Skip to content

Commit

Permalink
Upgrade Iceberg 1.7.1 (#442)
Browse files Browse the repository at this point in the history
Co-authored-by: Prashant Singh <[email protected]>
  • Loading branch information
singhpk234 and Prashant Singh authored Dec 9, 2024
1 parent edd09f0 commit 8566e74
Show file tree
Hide file tree
Showing 14 changed files with 54 additions and 82 deletions.
1 change: 1 addition & 0 deletions LICENSE
Original file line number Diff line number Diff line change
Expand Up @@ -277,6 +277,7 @@ commons-collections:commons-collections
commons-io:commons-io
commons-logging:commons-logging
commons-net:commons-net
dev.failsafe:failsafe
io.airlift:aircompressor
io.dropwizard.logback:logback-throttling-appender
io.dropwizard.metrics:metrics-annotation
Expand Down
2 changes: 1 addition & 1 deletion gradle/libs.versions.toml
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@

[versions]
hadoop = "3.4.0"
iceberg = "1.6.1"
iceberg = "1.7.1"
dropwizard = "4.0.8"
slf4j = "2.0.13"
swagger = "1.6.14"
Expand Down

This file was deleted.

Original file line number Diff line number Diff line change
Expand Up @@ -47,7 +47,6 @@
import org.apache.iceberg.TableMetadata;
import org.apache.iceberg.TableMetadataParser;
import org.apache.iceberg.TableOperations;
import org.apache.iceberg.aws.s3.S3FileIOProperties;
import org.apache.iceberg.catalog.Namespace;
import org.apache.iceberg.catalog.SupportsNamespaces;
import org.apache.iceberg.catalog.TableIdentifier;
Expand Down Expand Up @@ -101,7 +100,6 @@
import org.apache.polaris.core.storage.PolarisStorageConfigurationInfo;
import org.apache.polaris.core.storage.PolarisStorageIntegration;
import org.apache.polaris.core.storage.StorageLocation;
import org.apache.polaris.core.storage.aws.PolarisS3FileIOClientFactory;
import org.apache.polaris.service.catalog.io.FileIOFactory;
import org.apache.polaris.service.exception.IcebergExceptionMapper;
import org.apache.polaris.service.task.TaskExecutor;
Expand Down Expand Up @@ -2064,8 +2062,6 @@ private List<TableIdentifier> listTableLike(PolarisEntitySubType subType, Namesp
*/
private FileIO loadFileIO(String ioImpl, Map<String, String> properties) {
Map<String, String> propertiesWithS3CustomizedClientFactory = new HashMap<>(properties);
propertiesWithS3CustomizedClientFactory.put(
S3FileIOProperties.CLIENT_FACTORY, PolarisS3FileIOClientFactory.class.getName());
return fileIOFactory.loadFileIO(ioImpl, propertiesWithS3CustomizedClientFactory);
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,21 +21,26 @@
import static org.apache.polaris.service.catalog.AccessDelegationMode.VENDED_CREDENTIALS;

import com.google.common.base.Preconditions;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import jakarta.ws.rs.core.Response;
import jakarta.ws.rs.core.SecurityContext;
import java.net.URLEncoder;
import java.nio.charset.Charset;
import java.util.EnumSet;
import java.util.Map;
import java.util.Optional;
import java.util.Set;
import org.apache.iceberg.catalog.Catalog;
import org.apache.iceberg.catalog.Namespace;
import org.apache.iceberg.catalog.TableIdentifier;
import org.apache.iceberg.exceptions.BadRequestException;
import org.apache.iceberg.exceptions.NotAuthorizedException;
import org.apache.iceberg.exceptions.NotFoundException;
import org.apache.iceberg.rest.Endpoint;
import org.apache.iceberg.rest.RESTUtil;
import org.apache.iceberg.rest.ResourcePaths;
import org.apache.iceberg.rest.requests.CommitTransactionRequest;
import org.apache.iceberg.rest.requests.CreateNamespaceRequest;
import org.apache.iceberg.rest.requests.CreateTableRequest;
Expand Down Expand Up @@ -71,6 +76,38 @@
public class IcebergCatalogAdapter
implements IcebergRestCatalogApiService, IcebergRestConfigurationApiService {

private static final Set<Endpoint> DEFAULT_ENDPOINTS =
ImmutableSet.<Endpoint>builder()
.add(Endpoint.V1_LIST_NAMESPACES)
.add(Endpoint.V1_LOAD_NAMESPACE)
.add(Endpoint.V1_CREATE_NAMESPACE)
.add(Endpoint.V1_UPDATE_NAMESPACE)
.add(Endpoint.V1_DELETE_NAMESPACE)
.add(Endpoint.V1_LIST_TABLES)
.add(Endpoint.V1_LOAD_TABLE)
.add(Endpoint.V1_CREATE_TABLE)
.add(Endpoint.V1_UPDATE_TABLE)
.add(Endpoint.V1_DELETE_TABLE)
.add(Endpoint.V1_RENAME_TABLE)
.add(Endpoint.V1_REGISTER_TABLE)
.add(Endpoint.V1_REPORT_METRICS)
.build();

private static final Set<Endpoint> VIEW_ENDPOINTS =
ImmutableSet.<Endpoint>builder()
.add(Endpoint.V1_LIST_VIEWS)
.add(Endpoint.V1_LOAD_VIEW)
.add(Endpoint.V1_CREATE_VIEW)
.add(Endpoint.V1_UPDATE_VIEW)
.add(Endpoint.V1_DELETE_VIEW)
.add(Endpoint.V1_RENAME_VIEW)
.build();

private static final Set<Endpoint> COMMIT_ENDPOINT =
ImmutableSet.<Endpoint>builder()
.add(Endpoint.create("POST", ResourcePaths.V1_TRANSACTIONS_COMMIT))
.build();

private final CallContextCatalogFactory catalogFactory;
private final MetaStoreManagerFactory metaStoreManagerFactory;
private final RealmEntityManagerFactory entityManagerFactory;
Expand Down Expand Up @@ -466,6 +503,12 @@ public Response getConfig(String warehouse, SecurityContext securityContext) {
ConfigResponse.builder()
.withDefaults(properties) // catalog properties are defaults
.withOverrides(ImmutableMap.of("prefix", warehouse))
.withEndpoints(
ImmutableList.<Endpoint>builder()
.addAll(DEFAULT_ENDPOINTS)
.addAll(VIEW_ENDPOINTS)
.addAll(COMMIT_ENDPOINT)
.build())
.build())
.build();
}
Expand Down
2 changes: 1 addition & 1 deletion regtests/setup.sh
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@ if [ -z "${SPARK_HOME}" ]; then
fi
SPARK_CONF="${SPARK_HOME}/conf/spark-defaults.conf"
DERBY_HOME="/tmp/derby"
ICEBERG_VERSION="1.6.1"
ICEBERG_VERSION="1.7.1"
export PYTHONPATH="${SPARK_HOME}/python/:${SPARK_HOME}/python/lib/py4j-0.10.9.7-src.zip:$PYTHONPATH"

# Ensure binaries are downloaded locally
Expand Down
2 changes: 1 addition & 1 deletion regtests/t_pyspark/src/iceberg_spark.py
Original file line number Diff line number Diff line change
Expand Up @@ -72,7 +72,7 @@ def __enter__(self):
"""Initial method for Iceberg Spark session. Creates a Spark session with specified configs.
"""
packages = [
"org.apache.iceberg:iceberg-spark-runtime-3.5_2.12:1.6.1",
"org.apache.iceberg:iceberg-spark-runtime-3.5_2.12:1.7.1",
"org.apache.hadoop:hadoop-aws:3.4.0",
"software.amazon.awssdk:bundle:2.23.19",
"software.amazon.awssdk:url-connection-client:2.23.19",
Expand Down
2 changes: 1 addition & 1 deletion regtests/t_spark_sql/ref/spark_sql_azure_blob.sh.ref
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
{"defaults":{"default-base-location":"abfss://[email protected]/polaris-test/spark_sql_blob_catalog/"},"overrides":{"prefix":"spark_sql_azure_blob_catalog"}}
{"defaults":{"default-base-location":"abfss://[email protected]/polaris-test/spark_sql_blob_catalog/"},"overrides":{"prefix":"spark_sql_azure_blob_catalog"},"endpoints":["GET /v1/{prefix}/namespaces","GET /v1/{prefix}/namespaces/{namespace}","POST /v1/{prefix}/namespaces","POST /v1/{prefix}/namespaces/{namespace}/properties","DELETE /v1/{prefix}/namespaces/{namespace}","GET /v1/{prefix}/namespaces/{namespace}/tables","GET /v1/{prefix}/namespaces/{namespace}/tables/{table}","POST /v1/{prefix}/namespaces/{namespace}/tables","POST /v1/{prefix}/namespaces/{namespace}/tables/{table}","DELETE /v1/{prefix}/namespaces/{namespace}/tables/{table}","POST /v1/{prefix}/tables/rename","POST /v1/{prefix}/namespaces/{namespace}/register","POST /v1/{prefix}/namespaces/{namespace}/tables/{table}/metrics","GET /v1/{prefix}/namespaces/{namespace}/views","GET /v1/{prefix}/namespaces/{namespace}/views/{view}","POST /v1/{prefix}/namespaces/{namespace}/views","POST /v1/{prefix}/namespaces/{namespace}/views/{view}","DELETE /v1/{prefix}/namespaces/{namespace}/views/{view}","POST /v1/{prefix}/views/rename","POST /v1/{prefix}/transactions/commit"]}
Catalog created
spark-sql (default)> use polaris;
spark-sql ()> show namespaces;
Expand Down
2 changes: 1 addition & 1 deletion regtests/t_spark_sql/ref/spark_sql_azure_dfs.sh.ref
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
{"defaults":{"default-base-location":"abfss://[email protected]/polaris-test/spark_sql_dfs_catalog/"},"overrides":{"prefix":"spark_sql_azure_dfs_catalog"}}
{"defaults":{"default-base-location":"abfss://[email protected]/polaris-test/spark_sql_dfs_catalog/"},"overrides":{"prefix":"spark_sql_azure_dfs_catalog"},"endpoints":["GET /v1/{prefix}/namespaces","GET /v1/{prefix}/namespaces/{namespace}","POST /v1/{prefix}/namespaces","POST /v1/{prefix}/namespaces/{namespace}/properties","DELETE /v1/{prefix}/namespaces/{namespace}","GET /v1/{prefix}/namespaces/{namespace}/tables","GET /v1/{prefix}/namespaces/{namespace}/tables/{table}","POST /v1/{prefix}/namespaces/{namespace}/tables","POST /v1/{prefix}/namespaces/{namespace}/tables/{table}","DELETE /v1/{prefix}/namespaces/{namespace}/tables/{table}","POST /v1/{prefix}/tables/rename","POST /v1/{prefix}/namespaces/{namespace}/register","POST /v1/{prefix}/namespaces/{namespace}/tables/{table}/metrics","GET /v1/{prefix}/namespaces/{namespace}/views","GET /v1/{prefix}/namespaces/{namespace}/views/{view}","POST /v1/{prefix}/namespaces/{namespace}/views","POST /v1/{prefix}/namespaces/{namespace}/views/{view}","DELETE /v1/{prefix}/namespaces/{namespace}/views/{view}","POST /v1/{prefix}/views/rename","POST /v1/{prefix}/transactions/commit"]}
Catalog created
spark-sql (default)> use polaris;
spark-sql ()> show namespaces;
Expand Down
2 changes: 1 addition & 1 deletion regtests/t_spark_sql/ref/spark_sql_basic.sh.ref
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
{"defaults":{"default-base-location":"file:///tmp/spark_sql_s3_catalog"},"overrides":{"prefix":"spark_sql_basic_catalog"}}
{"defaults":{"default-base-location":"file:///tmp/spark_sql_s3_catalog"},"overrides":{"prefix":"spark_sql_basic_catalog"},"endpoints":["GET /v1/{prefix}/namespaces","GET /v1/{prefix}/namespaces/{namespace}","POST /v1/{prefix}/namespaces","POST /v1/{prefix}/namespaces/{namespace}/properties","DELETE /v1/{prefix}/namespaces/{namespace}","GET /v1/{prefix}/namespaces/{namespace}/tables","GET /v1/{prefix}/namespaces/{namespace}/tables/{table}","POST /v1/{prefix}/namespaces/{namespace}/tables","POST /v1/{prefix}/namespaces/{namespace}/tables/{table}","DELETE /v1/{prefix}/namespaces/{namespace}/tables/{table}","POST /v1/{prefix}/tables/rename","POST /v1/{prefix}/namespaces/{namespace}/register","POST /v1/{prefix}/namespaces/{namespace}/tables/{table}/metrics","GET /v1/{prefix}/namespaces/{namespace}/views","GET /v1/{prefix}/namespaces/{namespace}/views/{view}","POST /v1/{prefix}/namespaces/{namespace}/views","POST /v1/{prefix}/namespaces/{namespace}/views/{view}","DELETE /v1/{prefix}/namespaces/{namespace}/views/{view}","POST /v1/{prefix}/views/rename","POST /v1/{prefix}/transactions/commit"]}
Catalog created
spark-sql (default)> use polaris;
spark-sql ()> show namespaces;
Expand Down
2 changes: 1 addition & 1 deletion regtests/t_spark_sql/ref/spark_sql_gcp.sh.ref
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
{"defaults":{"default-base-location":"gs://polaris-test1/polaris_test/spark_sql_gcp_catalog/"},"overrides":{"prefix":"spark_sql_gcp_catalog"}}
{"defaults":{"default-base-location":"gs://polaris-test1/polaris_test/spark_sql_gcp_catalog/"},"overrides":{"prefix":"spark_sql_gcp_catalog"},"endpoints":["GET /v1/{prefix}/namespaces","GET /v1/{prefix}/namespaces/{namespace}","POST /v1/{prefix}/namespaces","POST /v1/{prefix}/namespaces/{namespace}/properties","DELETE /v1/{prefix}/namespaces/{namespace}","GET /v1/{prefix}/namespaces/{namespace}/tables","GET /v1/{prefix}/namespaces/{namespace}/tables/{table}","POST /v1/{prefix}/namespaces/{namespace}/tables","POST /v1/{prefix}/namespaces/{namespace}/tables/{table}","DELETE /v1/{prefix}/namespaces/{namespace}/tables/{table}","POST /v1/{prefix}/tables/rename","POST /v1/{prefix}/namespaces/{namespace}/register","POST /v1/{prefix}/namespaces/{namespace}/tables/{table}/metrics","GET /v1/{prefix}/namespaces/{namespace}/views","GET /v1/{prefix}/namespaces/{namespace}/views/{view}","POST /v1/{prefix}/namespaces/{namespace}/views","POST /v1/{prefix}/namespaces/{namespace}/views/{view}","DELETE /v1/{prefix}/namespaces/{namespace}/views/{view}","POST /v1/{prefix}/views/rename","POST /v1/{prefix}/transactions/commit"]}
Catalog created
spark-sql (default)> use polaris;
spark-sql ()> show namespaces;
Expand Down
2 changes: 1 addition & 1 deletion regtests/t_spark_sql/ref/spark_sql_s3.sh.ref
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
{"defaults":{"default-base-location":"s3://datalake-storage-team/polaris_test/spark_sql_s3_catalog"},"overrides":{"prefix":"spark_sql_s3_catalog"}}
{"defaults":{"default-base-location":"s3://datalake-storage-team/polaris_test/spark_sql_s3_catalog"},"overrides":{"prefix":"spark_sql_s3_catalog"},"endpoints":["GET /v1/{prefix}/namespaces","GET /v1/{prefix}/namespaces/{namespace}","POST /v1/{prefix}/namespaces","POST /v1/{prefix}/namespaces/{namespace}/properties","DELETE /v1/{prefix}/namespaces/{namespace}","GET /v1/{prefix}/namespaces/{namespace}/tables","GET /v1/{prefix}/namespaces/{namespace}/tables/{table}","POST /v1/{prefix}/namespaces/{namespace}/tables","POST /v1/{prefix}/namespaces/{namespace}/tables/{table}","DELETE /v1/{prefix}/namespaces/{namespace}/tables/{table}","POST /v1/{prefix}/tables/rename","POST /v1/{prefix}/namespaces/{namespace}/register","POST /v1/{prefix}/namespaces/{namespace}/tables/{table}/metrics","GET /v1/{prefix}/namespaces/{namespace}/views","GET /v1/{prefix}/namespaces/{namespace}/views/{view}","POST /v1/{prefix}/namespaces/{namespace}/views","POST /v1/{prefix}/namespaces/{namespace}/views/{view}","DELETE /v1/{prefix}/namespaces/{namespace}/views/{view}","POST /v1/{prefix}/views/rename","POST /v1/{prefix}/transactions/commit"]}
Catalog created
spark-sql (default)> use polaris;
spark-sql ()> show namespaces;
Expand Down
2 changes: 1 addition & 1 deletion regtests/t_spark_sql/ref/spark_sql_s3_cross_region.sh.ref
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
{"defaults":{"default-base-location":"s3://sfc-role-stage-for-reg-test-do-not-modify-write-only/polaris_test/spark_sql_s3_cross_region_catalog/"},"overrides":{"prefix":"spark_sql_s3_cross_region_catalog"}}
{"defaults":{"default-base-location":"s3://sfc-role-stage-for-reg-test-do-not-modify-write-only/polaris_test/spark_sql_s3_cross_region_catalog/"},"overrides":{"prefix":"spark_sql_s3_cross_region_catalog"},"endpoints":["GET /v1/{prefix}/namespaces","GET /v1/{prefix}/namespaces/{namespace}","POST /v1/{prefix}/namespaces","POST /v1/{prefix}/namespaces/{namespace}/properties","DELETE /v1/{prefix}/namespaces/{namespace}","GET /v1/{prefix}/namespaces/{namespace}/tables","GET /v1/{prefix}/namespaces/{namespace}/tables/{table}","POST /v1/{prefix}/namespaces/{namespace}/tables","POST /v1/{prefix}/namespaces/{namespace}/tables/{table}","DELETE /v1/{prefix}/namespaces/{namespace}/tables/{table}","POST /v1/{prefix}/tables/rename","POST /v1/{prefix}/namespaces/{namespace}/register","POST /v1/{prefix}/namespaces/{namespace}/tables/{table}/metrics","GET /v1/{prefix}/namespaces/{namespace}/views","GET /v1/{prefix}/namespaces/{namespace}/views/{view}","POST /v1/{prefix}/namespaces/{namespace}/views","POST /v1/{prefix}/namespaces/{namespace}/views/{view}","DELETE /v1/{prefix}/namespaces/{namespace}/views/{view}","POST /v1/{prefix}/views/rename","POST /v1/{prefix}/transactions/commit"]}
Catalog created
spark-sql (default)> use polaris;
spark-sql ()> show namespaces;
Expand Down
2 changes: 1 addition & 1 deletion regtests/t_spark_sql/ref/spark_sql_views.sh.ref
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
{"defaults":{"default-base-location":"file:///tmp/spark_sql_s3_catalog"},"overrides":{"prefix":"spark_sql_views_catalog"}}
{"defaults":{"default-base-location":"file:///tmp/spark_sql_s3_catalog"},"overrides":{"prefix":"spark_sql_views_catalog"},"endpoints":["GET /v1/{prefix}/namespaces","GET /v1/{prefix}/namespaces/{namespace}","POST /v1/{prefix}/namespaces","POST /v1/{prefix}/namespaces/{namespace}/properties","DELETE /v1/{prefix}/namespaces/{namespace}","GET /v1/{prefix}/namespaces/{namespace}/tables","GET /v1/{prefix}/namespaces/{namespace}/tables/{table}","POST /v1/{prefix}/namespaces/{namespace}/tables","POST /v1/{prefix}/namespaces/{namespace}/tables/{table}","DELETE /v1/{prefix}/namespaces/{namespace}/tables/{table}","POST /v1/{prefix}/tables/rename","POST /v1/{prefix}/namespaces/{namespace}/register","POST /v1/{prefix}/namespaces/{namespace}/tables/{table}/metrics","GET /v1/{prefix}/namespaces/{namespace}/views","GET /v1/{prefix}/namespaces/{namespace}/views/{view}","POST /v1/{prefix}/namespaces/{namespace}/views","POST /v1/{prefix}/namespaces/{namespace}/views/{view}","DELETE /v1/{prefix}/namespaces/{namespace}/views/{view}","POST /v1/{prefix}/views/rename","POST /v1/{prefix}/transactions/commit"]}
Catalog created
spark-sql (default)> use polaris;
spark-sql ()> show namespaces;
Expand Down

0 comments on commit 8566e74

Please sign in to comment.