From 947135ea397eb1bb45ee335fd7f0ae0900425ebb Mon Sep 17 00:00:00 2001 From: Yun Zou Date: Wed, 18 Jun 2025 10:42:46 -0700 Subject: [PATCH 01/18] add change --- codestyle/checkstyle.xml | 8 - .../polaris/service/it/env/CatalogApi.java | 2 +- .../polaris/service/it/env/ManagementApi.java | 2 +- .../spark/v3.5/integration/build.gradle.kts | 17 +- .../quarkus/it/PolarisManagementClient.java | 112 +++++++++++ .../quarkus/it/SparkCatalogIcebergIT.java | 3 + .../quarkus/it/SparkIntegrationBase.java | 155 +++++++++++++- plugins/spark/v3.5/spark/build.gradle.kts | 42 +--- .../apache/polaris/spark/PolarisCatalog.java | 2 +- .../polaris/spark/PolarisRESTCatalog.java | 4 +- .../polaris/spark/PolarisSparkCatalog.java | 2 +- .../rest/CreateGenericTableRESTRequest.java | 5 +- .../spark/rest/CreateGenericTableRequest.java | 190 ++++++++++++++++++ .../polaris/spark/rest/GenericTable.java | 189 +++++++++++++++++ .../rest/ListGenericTablesRESTResponse.java | 5 +- .../spark/rest/ListGenericTablesResponse.java | 137 +++++++++++++ .../rest/LoadGenericTableRESTResponse.java | 6 +- .../spark/rest/LoadGenericTableResponse.java | 111 ++++++++++ .../spark/utils/PolarisCatalogUtils.java | 2 +- .../polaris/spark/PolarisInMemoryCatalog.java | 2 +- .../spark/rest/DeserializationTest.java | 29 ++- 21 files changed, 940 insertions(+), 85 deletions(-) create mode 100644 plugins/spark/v3.5/integration/src/intTest/java/org/apache/polaris/spark/quarkus/it/PolarisManagementClient.java create mode 100644 plugins/spark/v3.5/spark/src/main/java/org/apache/polaris/spark/rest/CreateGenericTableRequest.java create mode 100644 plugins/spark/v3.5/spark/src/main/java/org/apache/polaris/spark/rest/GenericTable.java create mode 100644 plugins/spark/v3.5/spark/src/main/java/org/apache/polaris/spark/rest/ListGenericTablesResponse.java create mode 100644 plugins/spark/v3.5/spark/src/main/java/org/apache/polaris/spark/rest/LoadGenericTableResponse.java diff --git a/codestyle/checkstyle.xml b/codestyle/checkstyle.xml index d3986dc3e7..5f102b9d74 100644 --- a/codestyle/checkstyle.xml +++ b/codestyle/checkstyle.xml @@ -39,13 +39,5 @@ - - - - - - - - diff --git a/integration-tests/src/main/java/org/apache/polaris/service/it/env/CatalogApi.java b/integration-tests/src/main/java/org/apache/polaris/service/it/env/CatalogApi.java index 7be67f1947..0274d0ea81 100644 --- a/integration-tests/src/main/java/org/apache/polaris/service/it/env/CatalogApi.java +++ b/integration-tests/src/main/java/org/apache/polaris/service/it/env/CatalogApi.java @@ -50,7 +50,7 @@ * @see PolarisClient#catalogApi(ClientCredentials) */ public class CatalogApi extends RestApi { - CatalogApi(Client client, PolarisApiEndpoints endpoints, String authToken, URI uri) { + public CatalogApi(Client client, PolarisApiEndpoints endpoints, String authToken, URI uri) { super(client, endpoints, authToken, uri); } diff --git a/integration-tests/src/main/java/org/apache/polaris/service/it/env/ManagementApi.java b/integration-tests/src/main/java/org/apache/polaris/service/it/env/ManagementApi.java index fb3019c3e2..f2adf30144 100644 --- a/integration-tests/src/main/java/org/apache/polaris/service/it/env/ManagementApi.java +++ b/integration-tests/src/main/java/org/apache/polaris/service/it/env/ManagementApi.java @@ -53,7 +53,7 @@ * @see PolarisClient#managementApi(ClientCredentials) */ public class ManagementApi extends RestApi { - ManagementApi(Client client, PolarisApiEndpoints endpoints, String authToken, URI uri) { + public ManagementApi(Client client, PolarisApiEndpoints endpoints, String authToken, URI uri) { super(client, endpoints, authToken, uri); } diff --git a/plugins/spark/v3.5/integration/build.gradle.kts b/plugins/spark/v3.5/integration/build.gradle.kts index 0a1a8087e7..78a4f15c2c 100644 --- a/plugins/spark/v3.5/integration/build.gradle.kts +++ b/plugins/spark/v3.5/integration/build.gradle.kts @@ -45,9 +45,16 @@ dependencies { implementation(project(":polaris-runtime-service")) - testImplementation(project(":polaris-api-management-model")) + testImplementation( + "org.apache.iceberg:iceberg-spark-runtime-${sparkMajorVersion}_${scalaVersion}:${icebergVersion}" + ) testImplementation(project(":polaris-spark-${sparkMajorVersion}_${scalaVersion}")) + testImplementation(project(":polaris-api-management-model")) { + // exclude the iceberg + exclude("org.apache.iceberg", "iceberg-core") + } + testImplementation("org.apache.spark:spark-sql_${scalaVersion}:${spark35Version}") { // exclude log4j dependencies. Explicit dependencies for the log4j libraries are // enforced below to ensure the version compatibility @@ -64,13 +71,7 @@ dependencies { testImplementation("io.delta:delta-spark_${scalaVersion}:3.3.1") testImplementation(platform(libs.jackson.bom)) - testImplementation("com.fasterxml.jackson.core:jackson-annotations") - testImplementation("com.fasterxml.jackson.core:jackson-core") - testImplementation("com.fasterxml.jackson.core:jackson-databind") - - testImplementation( - "org.apache.iceberg:iceberg-spark-runtime-${sparkMajorVersion}_${scalaVersion}:${icebergVersion}" - ) + testImplementation("com.fasterxml.jackson.jakarta.rs:jackson-jakarta-rs-json-provider") testImplementation(testFixtures(project(":polaris-runtime-service"))) diff --git a/plugins/spark/v3.5/integration/src/intTest/java/org/apache/polaris/spark/quarkus/it/PolarisManagementClient.java b/plugins/spark/v3.5/integration/src/intTest/java/org/apache/polaris/spark/quarkus/it/PolarisManagementClient.java new file mode 100644 index 0000000000..6214d384f9 --- /dev/null +++ b/plugins/spark/v3.5/integration/src/intTest/java/org/apache/polaris/spark/quarkus/it/PolarisManagementClient.java @@ -0,0 +1,112 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.apache.polaris.spark.quarkus.it; + +import static java.util.concurrent.TimeUnit.MINUTES; +import static org.apache.polaris.service.it.ext.PolarisServerManagerLoader.polarisServerManager; + +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.jakarta.rs.json.JacksonJsonProvider; +import jakarta.ws.rs.client.Client; +import jakarta.ws.rs.client.ClientBuilder; +import java.util.Map; +import java.util.Random; +import org.apache.iceberg.rest.HTTPClient; +import org.apache.iceberg.rest.RESTClient; +import org.apache.iceberg.rest.auth.AuthSession; +import org.apache.iceberg.rest.auth.OAuth2Util; +import org.apache.iceberg.rest.responses.OAuthTokenResponse; +import org.apache.polaris.service.it.env.CatalogApi; +import org.apache.polaris.service.it.env.ClientCredentials; +import org.apache.polaris.service.it.env.ManagementApi; +import org.apache.polaris.service.it.env.PolarisApiEndpoints; +import org.apache.polaris.service.it.ext.PolarisServerManager; + +/** + * RestClient used + */ +public final class PolarisManagementClient implements AutoCloseable { + private final PolarisApiEndpoints endpoints; + private final Client client; + // Use an alphanumeric ID for widest compatibility in HTTP and SQL. + // Use MAX_RADIX for shorter output. + private final String clientId = + Long.toString(Math.abs(new Random().nextLong()), Character.MAX_RADIX); + // initialization an Iceberg rest client for fetch token + private final RESTClient restClient; + + private PolarisManagementClient(PolarisApiEndpoints endpoints) { + this.endpoints = endpoints; + + this.client = + ClientBuilder.newBuilder() + .readTimeout(5, MINUTES) + .connectTimeout(1, MINUTES) + .register(new JacksonJsonProvider(new ObjectMapper())) + .build(); + + this.restClient = HTTPClient.builder(Map.of()).uri(endpoints.catalogApiEndpoint()).build(); + } + + public static PolarisManagementClient managementClient(PolarisApiEndpoints endpoints) { + return new PolarisManagementClient(endpoints); + } + + /** + * This method should be used by test code to make top-level entity names. The purpose of this + * method is two-fold: + *
  • Identify top-level entities for latger clean-up by {@link #cleanUp(ClientCredentials)}. + *
  • Allow {@link PolarisServerManager}s to customize top-level entities per environment. + */ + public String newEntityName(String hint) { + return polarisServerManager().transformEntityName(hint + "_" + clientId); + } + + public ManagementApi managementApi(String authToken) { + return new ManagementApi(client, endpoints, authToken, endpoints.managementApiEndpoint()); + } + + public ManagementApi managementApi(ClientCredentials credentials) { + return managementApi(obtainToken(credentials)); + } + + public CatalogApi catalogApi(ClientCredentials credentials) { + return new CatalogApi( + client, endpoints, obtainToken(credentials), endpoints.catalogApiEndpoint()); + } + + /** Requests an access token from the Polaris server for the given {@link ClientCredentials}. */ + public String obtainToken(ClientCredentials credentials) { + OAuthTokenResponse response = + OAuth2Util.fetchToken( + restClient.withAuthSession(AuthSession.EMPTY), + Map.of(), + String.format("%s:%s", credentials.clientId(), credentials.clientSecret()), + "PRINCIPAL_ROLE:ALL", + endpoints.catalogApiEndpoint() + "/v1/oauth/tokens", + Map.of("grant_type", "client_credentials")); + return response.token(); + } + + @Override + public void close() throws Exception { + client.close(); + restClient.close(); + } +} diff --git a/plugins/spark/v3.5/integration/src/intTest/java/org/apache/polaris/spark/quarkus/it/SparkCatalogIcebergIT.java b/plugins/spark/v3.5/integration/src/intTest/java/org/apache/polaris/spark/quarkus/it/SparkCatalogIcebergIT.java index f3c411df23..d9182e6e88 100644 --- a/plugins/spark/v3.5/integration/src/intTest/java/org/apache/polaris/spark/quarkus/it/SparkCatalogIcebergIT.java +++ b/plugins/spark/v3.5/integration/src/intTest/java/org/apache/polaris/spark/quarkus/it/SparkCatalogIcebergIT.java @@ -27,6 +27,9 @@ public class SparkCatalogIcebergIT extends SparkCatalogBaseIT { @Override protected SparkSession.Builder withCatalog(SparkSession.Builder builder, String catalogName) { return builder + .config( + "spark.sql.extensions", + "org.apache.iceberg.spark.extensions.IcebergSparkSessionExtensions") .config( String.format("spark.sql.catalog.%s", catalogName), "org.apache.iceberg.spark.SparkCatalog") diff --git a/plugins/spark/v3.5/integration/src/intTest/java/org/apache/polaris/spark/quarkus/it/SparkIntegrationBase.java b/plugins/spark/v3.5/integration/src/intTest/java/org/apache/polaris/spark/quarkus/it/SparkIntegrationBase.java index be456716ca..54f18c4b61 100644 --- a/plugins/spark/v3.5/integration/src/intTest/java/org/apache/polaris/spark/quarkus/it/SparkIntegrationBase.java +++ b/plugins/spark/v3.5/integration/src/intTest/java/org/apache/polaris/spark/quarkus/it/SparkIntegrationBase.java @@ -18,23 +18,136 @@ */ package org.apache.polaris.spark.quarkus.it; +import com.adobe.testing.s3mock.testcontainers.S3MockContainer; import com.google.common.collect.ImmutableList; import com.google.errorprone.annotations.FormatMethod; import java.io.File; +import java.io.IOException; +import java.net.URI; +import java.nio.file.Path; import java.util.List; +import java.util.Map; import java.util.UUID; import java.util.stream.Collectors; import java.util.stream.IntStream; import org.apache.commons.io.FileUtils; import org.apache.commons.io.filefilter.DirectoryFileFilter; import org.apache.commons.io.filefilter.FalseFileFilter; -import org.apache.polaris.service.it.ext.PolarisSparkIntegrationTestBase; +import org.apache.polaris.core.admin.model.AwsStorageConfigInfo; +import org.apache.polaris.core.admin.model.Catalog; +import org.apache.polaris.core.admin.model.CatalogProperties; +import org.apache.polaris.core.admin.model.PolarisCatalog; +import org.apache.polaris.core.admin.model.StorageConfigInfo; +import org.apache.polaris.service.it.env.CatalogApi; +import org.apache.polaris.service.it.env.ClientCredentials; +import org.apache.polaris.service.it.env.IntegrationTestsHelper; +import org.apache.polaris.service.it.env.ManagementApi; +import org.apache.polaris.service.it.env.PolarisApiEndpoints; +import org.apache.polaris.service.it.ext.PolarisIntegrationTestExtension; +import org.apache.spark.sql.Dataset; import org.apache.spark.sql.Row; import org.apache.spark.sql.SparkSession; +import org.intellij.lang.annotations.Language; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.extension.ExtendWith; +import org.junit.jupiter.api.io.TempDir; +import org.slf4j.LoggerFactory; -public abstract class SparkIntegrationBase extends PolarisSparkIntegrationTestBase { +@ExtendWith(PolarisIntegrationTestExtension.class) +public abstract class SparkIntegrationBase { + protected static final S3MockContainer s3Container = + new S3MockContainer("3.11.0").withInitialBuckets("my-bucket,my-old-bucket"); + protected static SparkSession spark; + protected PolarisApiEndpoints endpoints; + protected PolarisManagementClient client; + protected ManagementApi managementApi; + protected CatalogApi catalogApi; + protected String catalogName; + protected String sparkToken; + + protected URI warehouseDir; + + @BeforeAll + public static void setup() throws IOException { + s3Container.start(); + } + + @AfterAll + public static void cleanup() { + s3Container.stop(); + } + + @BeforeEach + public void before( + PolarisApiEndpoints apiEndpoints, ClientCredentials credentials, @TempDir Path tempDir) { + endpoints = apiEndpoints; + client = PolarisManagementClient.managementClient(endpoints); + sparkToken = client.obtainToken(credentials); + managementApi = client.managementApi(credentials); + catalogApi = client.catalogApi(credentials); + + warehouseDir = IntegrationTestsHelper.getTemporaryDirectory(tempDir).resolve("spark-warehouse"); + + catalogName = client.newEntityName("spark_catalog"); + + AwsStorageConfigInfo awsConfigModel = + AwsStorageConfigInfo.builder() + .setRoleArn("arn:aws:iam::123456789012:role/my-role") + .setExternalId("externalId") + .setUserArn("userArn") + .setStorageType(StorageConfigInfo.StorageTypeEnum.S3) + .setAllowedLocations(List.of("s3://my-old-bucket/path/to/data")) + .build(); + CatalogProperties props = new CatalogProperties("s3://my-bucket/path/to/data"); + props.putAll( + Map.of( + "table-default.s3.endpoint", + s3Container.getHttpEndpoint(), + "table-default.s3.path-style-access", + "true", + "table-default.s3.access-key-id", + "foo", + "table-default.s3.secret-access-key", + "bar", + "s3.endpoint", + s3Container.getHttpEndpoint(), + "s3.path-style-access", + "true", + "s3.access-key-id", + "foo", + "s3.secret-access-key", + "bar", + "polaris.config.drop-with-purge.enabled", + "true")); + Catalog catalog = + PolarisCatalog.builder() + .setType(Catalog.TypeEnum.INTERNAL) + .setName(catalogName) + .setProperties(props) + .setStorageConfigInfo(awsConfigModel) + .build(); + + managementApi.createCatalog(catalog); + + SparkSession.Builder sessionBuilder = + SparkSession.builder() + .master("local[1]") + .config("spark.hadoop.fs.s3.impl", "org.apache.hadoop.fs.s3a.S3AFileSystem") + .config( + "spark.hadoop.fs.s3.aws.credentials.provider", + "org.apache.hadoop.fs.s3.TemporaryAWSCredentialsProvider") + .config("spark.hadoop.fs.s3.access.key", "foo") + .config("spark.hadoop.fs.s3.secret.key", "bar") + .config("spark.ui.showConsoleProgress", false) + .config("spark.ui.enabled", "false"); + spark = withCatalog(sessionBuilder, catalogName).getOrCreate(); + + onSpark("USE " + catalogName); + } - @Override protected SparkSession.Builder withCatalog(SparkSession.Builder builder, String catalogName) { return builder .config( @@ -61,6 +174,38 @@ protected SparkSession.Builder withCatalog(SparkSession.Builder builder, String .config(String.format("spark.sql.catalog.%s.s3.region", catalogName), "us-west-2"); } + @AfterEach + public void after() throws Exception { + cleanupCatalog(catalogName); + try { + SparkSession.clearDefaultSession(); + SparkSession.clearActiveSession(); + spark.close(); + } catch (Exception e) { + LoggerFactory.getLogger(getClass()).error("Unable to close spark session", e); + } + + client.close(); + } + + protected void cleanupCatalog(String catalogName) { + onSpark("USE " + catalogName); + List namespaces = onSpark("SHOW NAMESPACES").collectAsList(); + for (Row namespace : namespaces) { + List tables = onSpark("SHOW TABLES IN " + namespace.getString(0)).collectAsList(); + for (Row table : tables) { + onSpark("DROP TABLE " + namespace.getString(0) + "." + table.getString(1)); + } + List views = onSpark("SHOW VIEWS IN " + namespace.getString(0)).collectAsList(); + for (Row view : views) { + onSpark("DROP VIEW " + namespace.getString(0) + "." + view.getString(1)); + } + onSpark("DROP NAMESPACE " + namespace.getString(0)); + } + + managementApi.deleteCatalog(catalogName); + } + @FormatMethod protected List sql(String query, Object... args) { List rows = spark.sql(String.format(query, args)).collectAsList(); @@ -110,4 +255,8 @@ protected List listDirs(String path) { protected String generateName(String prefix) { return prefix + "_" + UUID.randomUUID().toString().replaceAll("-", ""); } + + protected static Dataset onSpark(@Language("SQL") String sql) { + return spark.sql(sql); + } } diff --git a/plugins/spark/v3.5/spark/build.gradle.kts b/plugins/spark/v3.5/spark/build.gradle.kts index a2a54e26be..b581ee6996 100644 --- a/plugins/spark/v3.5/spark/build.gradle.kts +++ b/plugins/spark/v3.5/spark/build.gradle.kts @@ -37,32 +37,6 @@ val scalaLibraryVersion = dependencies { // TODO: extract a polaris-rest module as a thin layer for // client to depends on. - implementation(project(":polaris-api-iceberg-service")) { - // exclude the iceberg dependencies, use the ones pulled - // by iceberg-core - exclude("org.apache.iceberg", "*") - // exclude all cloud and quarkus specific dependencies to avoid - // running into problems with signature files. - exclude("com.azure", "*") - exclude("software.amazon.awssdk", "*") - exclude("com.google.cloud", "*") - exclude("io.airlift", "*") - exclude("io.smallrye", "*") - exclude("io.smallrye.common", "*") - exclude("io.swagger", "*") - exclude("org.apache.commons", "*") - } - implementation(project(":polaris-api-catalog-service")) { - exclude("org.apache.iceberg", "*") - exclude("com.azure", "*") - exclude("software.amazon.awssdk", "*") - exclude("com.google.cloud", "*") - exclude("io.airlift", "*") - exclude("io.smallrye", "*") - exclude("io.smallrye.common", "*") - exclude("io.swagger", "*") - exclude("org.apache.commons", "*") - } implementation(project(":polaris-core")) { exclude("org.apache.iceberg", "*") exclude("com.azure", "*") @@ -75,15 +49,9 @@ dependencies { exclude("org.apache.commons", "*") } - implementation("org.apache.iceberg:iceberg-core:${icebergVersion}") - implementation( "org.apache.iceberg:iceberg-spark-runtime-${sparkMajorVersion}_${scalaVersion}:${icebergVersion}" - ) { - // exclude the iceberg rest dependencies, use the ones pulled - // with iceberg-core dependency - exclude("org.apache.iceberg", "iceberg-core") - } + ) compileOnly("org.scala-lang:scala-library:${scalaLibraryVersion}") compileOnly("org.scala-lang:scala-reflect:${scalaLibraryVersion}") @@ -95,6 +63,9 @@ dependencies { exclude("org.slf4j", "jul-to-slf4j") } + compileOnly(libs.jakarta.annotation.api) + compileOnly(libs.jakarta.validation.api) + testImplementation(platform(libs.junit.bom)) testImplementation("org.junit.jupiter:junit-jupiter") testImplementation(libs.assertj.core) @@ -131,12 +102,7 @@ tasks.register("createPolarisSparkJar") { // therefore excluded from the optimization. minimize { exclude(dependency("org.apache.iceberg:iceberg-spark-runtime-*.*")) - exclude(dependency("org.apache.iceberg:iceberg-core*.*")) - exclude(dependency("org.apache.avro:avro*.*")) } - - relocate("com.fasterxml", "org.apache.polaris.shaded.com.fasterxml.jackson") - relocate("org.apache.avro", "org.apache.polaris.shaded.org.apache.avro") } tasks.withType(Jar::class).named("sourcesJar") { dependsOn("createPolarisSparkJar") } diff --git a/plugins/spark/v3.5/spark/src/main/java/org/apache/polaris/spark/PolarisCatalog.java b/plugins/spark/v3.5/spark/src/main/java/org/apache/polaris/spark/PolarisCatalog.java index 31a6ac1897..99802d3cbb 100644 --- a/plugins/spark/v3.5/spark/src/main/java/org/apache/polaris/spark/PolarisCatalog.java +++ b/plugins/spark/v3.5/spark/src/main/java/org/apache/polaris/spark/PolarisCatalog.java @@ -22,7 +22,7 @@ import java.util.Map; import org.apache.iceberg.catalog.Namespace; import org.apache.iceberg.catalog.TableIdentifier; -import org.apache.polaris.service.types.GenericTable; +import org.apache.polaris.spark.rest.GenericTable; public interface PolarisCatalog { List listGenericTables(Namespace ns); diff --git a/plugins/spark/v3.5/spark/src/main/java/org/apache/polaris/spark/PolarisRESTCatalog.java b/plugins/spark/v3.5/spark/src/main/java/org/apache/polaris/spark/PolarisRESTCatalog.java index d255c3c570..5dfd1f8981 100644 --- a/plugins/spark/v3.5/spark/src/main/java/org/apache/polaris/spark/PolarisRESTCatalog.java +++ b/plugins/spark/v3.5/spark/src/main/java/org/apache/polaris/spark/PolarisRESTCatalog.java @@ -46,9 +46,9 @@ import org.apache.iceberg.util.PropertyUtil; import org.apache.polaris.core.rest.PolarisEndpoints; import org.apache.polaris.core.rest.PolarisResourcePaths; -import org.apache.polaris.service.types.CreateGenericTableRequest; -import org.apache.polaris.service.types.GenericTable; import org.apache.polaris.spark.rest.CreateGenericTableRESTRequest; +import org.apache.polaris.spark.rest.CreateGenericTableRequest; +import org.apache.polaris.spark.rest.GenericTable; import org.apache.polaris.spark.rest.ListGenericTablesRESTResponse; import org.apache.polaris.spark.rest.LoadGenericTableRESTResponse; diff --git a/plugins/spark/v3.5/spark/src/main/java/org/apache/polaris/spark/PolarisSparkCatalog.java b/plugins/spark/v3.5/spark/src/main/java/org/apache/polaris/spark/PolarisSparkCatalog.java index e1658312b6..8ce2bb0989 100644 --- a/plugins/spark/v3.5/spark/src/main/java/org/apache/polaris/spark/PolarisSparkCatalog.java +++ b/plugins/spark/v3.5/spark/src/main/java/org/apache/polaris/spark/PolarisSparkCatalog.java @@ -22,7 +22,7 @@ import org.apache.iceberg.catalog.Namespace; import org.apache.iceberg.exceptions.AlreadyExistsException; import org.apache.iceberg.spark.Spark3Util; -import org.apache.polaris.service.types.GenericTable; +import org.apache.polaris.spark.rest.GenericTable; import org.apache.polaris.spark.utils.PolarisCatalogUtils; import org.apache.spark.sql.catalyst.analysis.NoSuchNamespaceException; import org.apache.spark.sql.catalyst.analysis.NoSuchTableException; diff --git a/plugins/spark/v3.5/spark/src/main/java/org/apache/polaris/spark/rest/CreateGenericTableRESTRequest.java b/plugins/spark/v3.5/spark/src/main/java/org/apache/polaris/spark/rest/CreateGenericTableRESTRequest.java index 54ff841fc4..644fcc1c1d 100644 --- a/plugins/spark/v3.5/spark/src/main/java/org/apache/polaris/spark/rest/CreateGenericTableRESTRequest.java +++ b/plugins/spark/v3.5/spark/src/main/java/org/apache/polaris/spark/rest/CreateGenericTableRESTRequest.java @@ -18,11 +18,10 @@ */ package org.apache.polaris.spark.rest; -import com.fasterxml.jackson.annotation.JsonCreator; -import com.fasterxml.jackson.annotation.JsonProperty; import java.util.Map; import org.apache.iceberg.rest.RESTRequest; -import org.apache.polaris.service.types.CreateGenericTableRequest; +import org.apache.iceberg.shaded.com.fasterxml.jackson.annotation.JsonCreator; +import org.apache.iceberg.shaded.com.fasterxml.jackson.annotation.JsonProperty; /** * RESTRequest definition for CreateGenericTable which extends the iceberg RESTRequest. This is diff --git a/plugins/spark/v3.5/spark/src/main/java/org/apache/polaris/spark/rest/CreateGenericTableRequest.java b/plugins/spark/v3.5/spark/src/main/java/org/apache/polaris/spark/rest/CreateGenericTableRequest.java new file mode 100644 index 0000000000..e7b1d5a629 --- /dev/null +++ b/plugins/spark/v3.5/spark/src/main/java/org/apache/polaris/spark/rest/CreateGenericTableRequest.java @@ -0,0 +1,190 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.apache.polaris.spark.rest; + +import jakarta.validation.constraints.NotNull; +import java.util.HashMap; +import java.util.Map; +import java.util.Objects; +import org.apache.iceberg.shaded.com.fasterxml.jackson.annotation.JsonCreator; +import org.apache.iceberg.shaded.com.fasterxml.jackson.annotation.JsonProperty; + +@jakarta.annotation.Generated( + value = "org.openapitools.codegen.languages.JavaResteasyServerCodegen", + date = "2025-06-16T22:51:23.661280-07:00[America/Los_Angeles]", + comments = "Generator version: 7.12.0") +public class CreateGenericTableRequest { + + @NotNull private final String name; + @NotNull private final String format; + private final String baseLocation; + private final String doc; + private final Map properties; + + /** */ + @JsonProperty(value = "name", required = true) + public String getName() { + return name; + } + + /** */ + @JsonProperty(value = "format", required = true) + public String getFormat() { + return format; + } + + /** */ + @JsonProperty(value = "base-location") + public String getBaseLocation() { + return baseLocation; + } + + /** */ + @JsonProperty(value = "doc") + public String getDoc() { + return doc; + } + + /** */ + @JsonProperty(value = "properties") + public Map getProperties() { + return properties; + } + + @JsonCreator + public CreateGenericTableRequest( + @JsonProperty(value = "name", required = true) String name, + @JsonProperty(value = "format", required = true) String format, + @JsonProperty(value = "base-location") String baseLocation, + @JsonProperty(value = "doc") String doc, + @JsonProperty(value = "properties") Map properties) { + this.name = name; + this.format = format; + this.baseLocation = baseLocation; + this.doc = doc; + this.properties = Objects.requireNonNullElse(properties, new HashMap<>()); + } + + public CreateGenericTableRequest(String name, String format) { + this.name = name; + this.format = format; + this.baseLocation = null; + this.doc = null; + this.properties = new HashMap<>(); + } + + public static Builder builder() { + return new Builder(); + } + + public static Builder builder(String name, String format) { + return new Builder(name, format); + } + + public static final class Builder { + private String name; + private String format; + private String baseLocation; + private String doc; + private Map properties; + + private Builder() {} + + private Builder(String name, String format) { + this.name = name; + this.format = format; + } + + public Builder setName(String name) { + this.name = name; + return this; + } + + public Builder setFormat(String format) { + this.format = format; + return this; + } + + public Builder setBaseLocation(String baseLocation) { + this.baseLocation = baseLocation; + return this; + } + + public Builder setDoc(String doc) { + this.doc = doc; + return this; + } + + public Builder setProperties(Map properties) { + this.properties = properties; + return this; + } + + public CreateGenericTableRequest build() { + CreateGenericTableRequest inst = + new CreateGenericTableRequest(name, format, baseLocation, doc, properties); + return inst; + } + } + + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + CreateGenericTableRequest createGenericTableRequest = (CreateGenericTableRequest) o; + return Objects.equals(this.name, createGenericTableRequest.name) + && Objects.equals(this.format, createGenericTableRequest.format) + && Objects.equals(this.baseLocation, createGenericTableRequest.baseLocation) + && Objects.equals(this.doc, createGenericTableRequest.doc) + && Objects.equals(this.properties, createGenericTableRequest.properties); + } + + @Override + public int hashCode() { + return Objects.hash(name, format, baseLocation, doc, properties); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append("class CreateGenericTableRequest {\n"); + + sb.append(" name: ").append(toIndentedString(name)).append("\n"); + sb.append(" format: ").append(toIndentedString(format)).append("\n"); + sb.append(" baseLocation: ").append(toIndentedString(baseLocation)).append("\n"); + sb.append(" doc: ").append(toIndentedString(doc)).append("\n"); + sb.append(" properties: ").append(toIndentedString(properties)).append("\n"); + sb.append("}"); + return sb.toString(); + } + + /** + * Convert the given object to string with each line indented by 4 spaces (except the first line). + */ + private String toIndentedString(Object o) { + if (o == null) { + return "null"; + } + return o.toString().replace("\n", "\n "); + } +} diff --git a/plugins/spark/v3.5/spark/src/main/java/org/apache/polaris/spark/rest/GenericTable.java b/plugins/spark/v3.5/spark/src/main/java/org/apache/polaris/spark/rest/GenericTable.java new file mode 100644 index 0000000000..37f90da97f --- /dev/null +++ b/plugins/spark/v3.5/spark/src/main/java/org/apache/polaris/spark/rest/GenericTable.java @@ -0,0 +1,189 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.apache.polaris.spark.rest; + +import jakarta.validation.constraints.NotNull; +import java.util.HashMap; +import java.util.Map; +import java.util.Objects; +import org.apache.iceberg.shaded.com.fasterxml.jackson.annotation.JsonCreator; +import org.apache.iceberg.shaded.com.fasterxml.jackson.annotation.JsonProperty; + +@jakarta.annotation.Generated( + value = "org.openapitools.codegen.languages.JavaResteasyServerCodegen", + date = "2025-06-16T22:51:23.661280-07:00[America/Los_Angeles]", + comments = "Generator version: 7.12.0") +public class GenericTable { + + @NotNull private final String name; + @NotNull private final String format; + private final String baseLocation; + private final String doc; + private final Map properties; + + /** */ + @JsonProperty(value = "name", required = true) + public String getName() { + return name; + } + + /** */ + @JsonProperty(value = "format", required = true) + public String getFormat() { + return format; + } + + /** */ + @JsonProperty(value = "base-location") + public String getBaseLocation() { + return baseLocation; + } + + /** */ + @JsonProperty(value = "doc") + public String getDoc() { + return doc; + } + + /** */ + @JsonProperty(value = "properties") + public Map getProperties() { + return properties; + } + + @JsonCreator + public GenericTable( + @JsonProperty(value = "name", required = true) String name, + @JsonProperty(value = "format", required = true) String format, + @JsonProperty(value = "base-location") String baseLocation, + @JsonProperty(value = "doc") String doc, + @JsonProperty(value = "properties") Map properties) { + this.name = name; + this.format = format; + this.baseLocation = baseLocation; + this.doc = doc; + this.properties = Objects.requireNonNullElse(properties, new HashMap<>()); + } + + public GenericTable(String name, String format) { + this.name = name; + this.format = format; + this.baseLocation = null; + this.doc = null; + this.properties = new HashMap<>(); + } + + public static Builder builder() { + return new Builder(); + } + + public static Builder builder(String name, String format) { + return new Builder(name, format); + } + + public static final class Builder { + private String name; + private String format; + private String baseLocation; + private String doc; + private Map properties; + + private Builder() {} + + private Builder(String name, String format) { + this.name = name; + this.format = format; + } + + public Builder setName(String name) { + this.name = name; + return this; + } + + public Builder setFormat(String format) { + this.format = format; + return this; + } + + public Builder setBaseLocation(String baseLocation) { + this.baseLocation = baseLocation; + return this; + } + + public Builder setDoc(String doc) { + this.doc = doc; + return this; + } + + public Builder setProperties(Map properties) { + this.properties = properties; + return this; + } + + public GenericTable build() { + GenericTable inst = new GenericTable(name, format, baseLocation, doc, properties); + return inst; + } + } + + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + GenericTable genericTable = (GenericTable) o; + return Objects.equals(this.name, genericTable.name) + && Objects.equals(this.format, genericTable.format) + && Objects.equals(this.baseLocation, genericTable.baseLocation) + && Objects.equals(this.doc, genericTable.doc) + && Objects.equals(this.properties, genericTable.properties); + } + + @Override + public int hashCode() { + return Objects.hash(name, format, baseLocation, doc, properties); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append("class GenericTable {\n"); + + sb.append(" name: ").append(toIndentedString(name)).append("\n"); + sb.append(" format: ").append(toIndentedString(format)).append("\n"); + sb.append(" baseLocation: ").append(toIndentedString(baseLocation)).append("\n"); + sb.append(" doc: ").append(toIndentedString(doc)).append("\n"); + sb.append(" properties: ").append(toIndentedString(properties)).append("\n"); + sb.append("}"); + return sb.toString(); + } + + /** + * Convert the given object to string with each line indented by 4 spaces (except the first line). + */ + private String toIndentedString(Object o) { + if (o == null) { + return "null"; + } + return o.toString().replace("\n", "\n "); + } +} diff --git a/plugins/spark/v3.5/spark/src/main/java/org/apache/polaris/spark/rest/ListGenericTablesRESTResponse.java b/plugins/spark/v3.5/spark/src/main/java/org/apache/polaris/spark/rest/ListGenericTablesRESTResponse.java index ede2c89a9b..55205d30f5 100644 --- a/plugins/spark/v3.5/spark/src/main/java/org/apache/polaris/spark/rest/ListGenericTablesRESTResponse.java +++ b/plugins/spark/v3.5/spark/src/main/java/org/apache/polaris/spark/rest/ListGenericTablesRESTResponse.java @@ -18,12 +18,11 @@ */ package org.apache.polaris.spark.rest; -import com.fasterxml.jackson.annotation.JsonCreator; -import com.fasterxml.jackson.annotation.JsonProperty; import java.util.Set; import org.apache.iceberg.catalog.TableIdentifier; import org.apache.iceberg.rest.RESTResponse; -import org.apache.polaris.service.types.ListGenericTablesResponse; +import org.apache.iceberg.shaded.com.fasterxml.jackson.annotation.JsonCreator; +import org.apache.iceberg.shaded.com.fasterxml.jackson.annotation.JsonProperty; /** * RESTResponse definition for ListGenericTable which extends the iceberg RESTResponse. This is diff --git a/plugins/spark/v3.5/spark/src/main/java/org/apache/polaris/spark/rest/ListGenericTablesResponse.java b/plugins/spark/v3.5/spark/src/main/java/org/apache/polaris/spark/rest/ListGenericTablesResponse.java new file mode 100644 index 0000000000..e94eee6c41 --- /dev/null +++ b/plugins/spark/v3.5/spark/src/main/java/org/apache/polaris/spark/rest/ListGenericTablesResponse.java @@ -0,0 +1,137 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.apache.polaris.spark.rest; + +import jakarta.validation.Valid; +import java.util.LinkedHashSet; +import java.util.Objects; +import java.util.Set; +import org.apache.iceberg.catalog.TableIdentifier; +import org.apache.iceberg.shaded.com.fasterxml.jackson.annotation.JsonCreator; +import org.apache.iceberg.shaded.com.fasterxml.jackson.annotation.JsonProperty; + +@jakarta.annotation.Generated( + value = "org.openapitools.codegen.languages.JavaResteasyServerCodegen", + date = "2025-06-16T22:51:23.661280-07:00[America/Los_Angeles]", + comments = "Generator version: 7.12.0") +public class ListGenericTablesResponse { + + private final String nextPageToken; + @Valid private final Set<@Valid TableIdentifier> identifiers; + + /** + * An opaque token that allows clients to make use of pagination for list APIs (e.g. ListTables). + * Clients may initiate the first paginated request by sending an empty query parameter + * `pageToken` to the server. Servers that support pagination should identify the + * `pageToken` parameter and return a `next-page-token` in the response if + * there are more results available. After the initial request, the value of + * `next-page-token` from each response must be used as the `pageToken` + * parameter value for the next request. The server must return `null` value for the + * `next-page-token` in the last response. Servers that support pagination must return + * all results in a single response with the value of `next-page-token` set to + * `null` if the query parameter `pageToken` is not set in the request. + * Servers that do not support pagination should ignore the `pageToken` parameter and + * return all results in a single response. The `next-page-token` must be omitted from + * the response. Clients must interpret either `null` or missing response value of + * `next-page-token` as the end of the listing results. + */ + @JsonProperty(value = "next-page-token") + public String getNextPageToken() { + return nextPageToken; + } + + /** */ + @JsonProperty(value = "identifiers") + public Set<@Valid TableIdentifier> getIdentifiers() { + return identifiers; + } + + @JsonCreator + public ListGenericTablesResponse( + @JsonProperty(value = "next-page-token") String nextPageToken, + @JsonProperty(value = "identifiers") Set<@Valid TableIdentifier> identifiers) { + this.nextPageToken = nextPageToken; + this.identifiers = Objects.requireNonNullElse(identifiers, new LinkedHashSet<>()); + } + + public static Builder builder() { + return new Builder(); + } + + public static final class Builder { + private String nextPageToken; + private Set<@Valid TableIdentifier> identifiers; + + private Builder() {} + + public Builder setNextPageToken(String nextPageToken) { + this.nextPageToken = nextPageToken; + return this; + } + + public Builder setIdentifiers(Set<@Valid TableIdentifier> identifiers) { + this.identifiers = identifiers; + return this; + } + + public ListGenericTablesResponse build() { + ListGenericTablesResponse inst = new ListGenericTablesResponse(nextPageToken, identifiers); + return inst; + } + } + + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + ListGenericTablesResponse listGenericTablesResponse = (ListGenericTablesResponse) o; + return Objects.equals(this.nextPageToken, listGenericTablesResponse.nextPageToken) + && Objects.equals(this.identifiers, listGenericTablesResponse.identifiers); + } + + @Override + public int hashCode() { + return Objects.hash(nextPageToken, identifiers); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append("class ListGenericTablesResponse {\n"); + + sb.append(" nextPageToken: ").append(toIndentedString(nextPageToken)).append("\n"); + sb.append(" identifiers: ").append(toIndentedString(identifiers)).append("\n"); + sb.append("}"); + return sb.toString(); + } + + /** + * Convert the given object to string with each line indented by 4 spaces (except the first line). + */ + private String toIndentedString(Object o) { + if (o == null) { + return "null"; + } + return o.toString().replace("\n", "\n "); + } +} diff --git a/plugins/spark/v3.5/spark/src/main/java/org/apache/polaris/spark/rest/LoadGenericTableRESTResponse.java b/plugins/spark/v3.5/spark/src/main/java/org/apache/polaris/spark/rest/LoadGenericTableRESTResponse.java index 68c738dae4..ae9999dd58 100644 --- a/plugins/spark/v3.5/spark/src/main/java/org/apache/polaris/spark/rest/LoadGenericTableRESTResponse.java +++ b/plugins/spark/v3.5/spark/src/main/java/org/apache/polaris/spark/rest/LoadGenericTableRESTResponse.java @@ -18,11 +18,9 @@ */ package org.apache.polaris.spark.rest; -import com.fasterxml.jackson.annotation.JsonCreator; -import com.fasterxml.jackson.annotation.JsonProperty; import org.apache.iceberg.rest.RESTResponse; -import org.apache.polaris.service.types.GenericTable; -import org.apache.polaris.service.types.LoadGenericTableResponse; +import org.apache.iceberg.shaded.com.fasterxml.jackson.annotation.JsonCreator; +import org.apache.iceberg.shaded.com.fasterxml.jackson.annotation.JsonProperty; /** * RESTResponse definition for LoadGenericTable which extends the iceberg RESTResponse. This is diff --git a/plugins/spark/v3.5/spark/src/main/java/org/apache/polaris/spark/rest/LoadGenericTableResponse.java b/plugins/spark/v3.5/spark/src/main/java/org/apache/polaris/spark/rest/LoadGenericTableResponse.java new file mode 100644 index 0000000000..e77899ec9b --- /dev/null +++ b/plugins/spark/v3.5/spark/src/main/java/org/apache/polaris/spark/rest/LoadGenericTableResponse.java @@ -0,0 +1,111 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.apache.polaris.spark.rest; + +import jakarta.validation.Valid; +import jakarta.validation.constraints.NotNull; +import java.util.Objects; +import org.apache.iceberg.shaded.com.fasterxml.jackson.annotation.JsonCreator; +import org.apache.iceberg.shaded.com.fasterxml.jackson.annotation.JsonProperty; + +@jakarta.annotation.Generated( + value = "org.openapitools.codegen.languages.JavaResteasyServerCodegen", + date = "2025-06-16T22:51:23.661280-07:00[America/Los_Angeles]", + comments = "Generator version: 7.12.0") +public class LoadGenericTableResponse { + + @NotNull @Valid private final GenericTable table; + + /** */ + @JsonProperty(value = "table", required = true) + public GenericTable getTable() { + return table; + } + + @JsonCreator + public LoadGenericTableResponse( + @JsonProperty(value = "table", required = true) GenericTable table) { + this.table = table; + } + + public static Builder builder() { + return new Builder(); + } + + public static Builder builder(GenericTable table) { + return new Builder(table); + } + + public static final class Builder { + private GenericTable table; + + private Builder() {} + + private Builder(GenericTable table) { + this.table = table; + } + + public Builder setTable(GenericTable table) { + this.table = table; + return this; + } + + public LoadGenericTableResponse build() { + LoadGenericTableResponse inst = new LoadGenericTableResponse(table); + return inst; + } + } + + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + LoadGenericTableResponse loadGenericTableResponse = (LoadGenericTableResponse) o; + return Objects.equals(this.table, loadGenericTableResponse.table); + } + + @Override + public int hashCode() { + return Objects.hash(table); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append("class LoadGenericTableResponse {\n"); + + sb.append(" table: ").append(toIndentedString(table)).append("\n"); + sb.append("}"); + return sb.toString(); + } + + /** + * Convert the given object to string with each line indented by 4 spaces (except the first line). + */ + private String toIndentedString(Object o) { + if (o == null) { + return "null"; + } + return o.toString().replace("\n", "\n "); + } +} diff --git a/plugins/spark/v3.5/spark/src/main/java/org/apache/polaris/spark/utils/PolarisCatalogUtils.java b/plugins/spark/v3.5/spark/src/main/java/org/apache/polaris/spark/utils/PolarisCatalogUtils.java index 8dac78b23c..e7cd76bcaf 100644 --- a/plugins/spark/v3.5/spark/src/main/java/org/apache/polaris/spark/utils/PolarisCatalogUtils.java +++ b/plugins/spark/v3.5/spark/src/main/java/org/apache/polaris/spark/utils/PolarisCatalogUtils.java @@ -27,7 +27,7 @@ import org.apache.iceberg.rest.RESTSessionCatalog; import org.apache.iceberg.rest.auth.OAuth2Util; import org.apache.iceberg.spark.SparkCatalog; -import org.apache.polaris.service.types.GenericTable; +import org.apache.polaris.spark.rest.GenericTable; import org.apache.spark.sql.SparkSession; import org.apache.spark.sql.connector.catalog.Table; import org.apache.spark.sql.connector.catalog.TableCatalog; diff --git a/plugins/spark/v3.5/spark/src/test/java/org/apache/polaris/spark/PolarisInMemoryCatalog.java b/plugins/spark/v3.5/spark/src/test/java/org/apache/polaris/spark/PolarisInMemoryCatalog.java index 5c3d597100..c846659df0 100644 --- a/plugins/spark/v3.5/spark/src/test/java/org/apache/polaris/spark/PolarisInMemoryCatalog.java +++ b/plugins/spark/v3.5/spark/src/test/java/org/apache/polaris/spark/PolarisInMemoryCatalog.java @@ -30,7 +30,7 @@ import org.apache.iceberg.exceptions.NoSuchNamespaceException; import org.apache.iceberg.exceptions.NoSuchTableException; import org.apache.iceberg.inmemory.InMemoryCatalog; -import org.apache.polaris.service.types.GenericTable; +import org.apache.polaris.spark.rest.GenericTable; /** InMemory implementation for the Polaris Catalog. This class is mainly used by testing. */ public class PolarisInMemoryCatalog extends InMemoryCatalog implements PolarisCatalog { diff --git a/plugins/spark/v3.5/spark/src/test/java/org/apache/polaris/spark/rest/DeserializationTest.java b/plugins/spark/v3.5/spark/src/test/java/org/apache/polaris/spark/rest/DeserializationTest.java index 3ec9ddbdf3..d4d4da6abe 100644 --- a/plugins/spark/v3.5/spark/src/test/java/org/apache/polaris/spark/rest/DeserializationTest.java +++ b/plugins/spark/v3.5/spark/src/test/java/org/apache/polaris/spark/rest/DeserializationTest.java @@ -20,14 +20,6 @@ import static org.assertj.core.api.Assertions.assertThat; -import com.fasterxml.jackson.annotation.JsonAutoDetect; -import com.fasterxml.jackson.annotation.PropertyAccessor; -import com.fasterxml.jackson.core.JsonFactory; -import com.fasterxml.jackson.core.JsonFactoryBuilder; -import com.fasterxml.jackson.core.JsonProcessingException; -import com.fasterxml.jackson.databind.DeserializationFeature; -import com.fasterxml.jackson.databind.ObjectMapper; -import com.fasterxml.jackson.databind.PropertyNamingStrategies; import com.google.common.collect.ImmutableSet; import com.google.common.collect.Maps; import java.util.Map; @@ -36,8 +28,14 @@ import org.apache.iceberg.catalog.Namespace; import org.apache.iceberg.catalog.TableIdentifier; import org.apache.iceberg.rest.RESTSerializers; -import org.apache.polaris.service.types.CreateGenericTableRequest; -import org.apache.polaris.service.types.GenericTable; +import org.apache.iceberg.shaded.com.fasterxml.jackson.annotation.JsonAutoDetect; +import org.apache.iceberg.shaded.com.fasterxml.jackson.annotation.PropertyAccessor; +import org.apache.iceberg.shaded.com.fasterxml.jackson.core.JsonFactory; +import org.apache.iceberg.shaded.com.fasterxml.jackson.core.JsonFactoryBuilder; +import org.apache.iceberg.shaded.com.fasterxml.jackson.core.JsonProcessingException; +import org.apache.iceberg.shaded.com.fasterxml.jackson.databind.DeserializationFeature; +import org.apache.iceberg.shaded.com.fasterxml.jackson.databind.ObjectMapper; +import org.apache.iceberg.shaded.com.fasterxml.jackson.databind.PropertyNamingStrategies; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; import org.junit.jupiter.params.ParameterizedTest; @@ -137,6 +135,17 @@ public void testListGenericTablesRESTResponse() throws JsonProcessingException { } } + @Test + public void testLoadGenericTableRestResponse() throws JsonProcessingException { + LoadGenericTableRESTResponse request = + new LoadGenericTableRESTResponse( + GenericTable.builder().setName("test-table").setFormat("delta").build()); + String json = mapper.writeValueAsString(request); + LoadGenericTableRESTResponse deserializedResponse = + mapper.readValue(json, LoadGenericTableRESTResponse.class); + assertThat(deserializedResponse.getTable().getName()).isEqualTo("test-table"); + } + private static Stream genericTableTestCases() { var doc = "table for testing"; var properties = Maps.newHashMap(); From 1c0255f130ec3109121b3e471283bacdc27f1057 Mon Sep 17 00:00:00 2001 From: Yun Zou Date: Wed, 18 Jun 2025 11:02:15 -0700 Subject: [PATCH 02/18] add comment --- .../quarkus/it/PolarisManagementClient.java | 18 ++++-------------- .../spark/quarkus/it/SparkIntegrationBase.java | 3 --- plugins/spark/v3.5/spark/build.gradle.kts | 4 +--- .../spark/rest/CreateGenericTableRequest.java | 1 + .../polaris/spark/rest/GenericTable.java | 1 + .../spark/rest/ListGenericTablesResponse.java | 1 + .../spark/rest/LoadGenericTableResponse.java | 1 + 7 files changed, 9 insertions(+), 20 deletions(-) diff --git a/plugins/spark/v3.5/integration/src/intTest/java/org/apache/polaris/spark/quarkus/it/PolarisManagementClient.java b/plugins/spark/v3.5/integration/src/intTest/java/org/apache/polaris/spark/quarkus/it/PolarisManagementClient.java index 6214d384f9..e4fd1237c7 100644 --- a/plugins/spark/v3.5/integration/src/intTest/java/org/apache/polaris/spark/quarkus/it/PolarisManagementClient.java +++ b/plugins/spark/v3.5/integration/src/intTest/java/org/apache/polaris/spark/quarkus/it/PolarisManagementClient.java @@ -32,14 +32,14 @@ import org.apache.iceberg.rest.auth.AuthSession; import org.apache.iceberg.rest.auth.OAuth2Util; import org.apache.iceberg.rest.responses.OAuthTokenResponse; -import org.apache.polaris.service.it.env.CatalogApi; import org.apache.polaris.service.it.env.ClientCredentials; import org.apache.polaris.service.it.env.ManagementApi; import org.apache.polaris.service.it.env.PolarisApiEndpoints; -import org.apache.polaris.service.it.ext.PolarisServerManager; /** - * RestClient used + * That class provides rest client that is can be used to talk to Polaris Management service and + * auth token endpoint. This class is currently used by Spark Client tests for commands that can not + * be issued through spark command, such as createCatalog etc. */ public final class PolarisManagementClient implements AutoCloseable { private final PolarisApiEndpoints endpoints; @@ -68,12 +68,7 @@ public static PolarisManagementClient managementClient(PolarisApiEndpoints endpo return new PolarisManagementClient(endpoints); } - /** - * This method should be used by test code to make top-level entity names. The purpose of this - * method is two-fold: - *
  • Identify top-level entities for latger clean-up by {@link #cleanUp(ClientCredentials)}. - *
  • Allow {@link PolarisServerManager}s to customize top-level entities per environment. - */ + /** This method should be used by test code to make top-level entity names. */ public String newEntityName(String hint) { return polarisServerManager().transformEntityName(hint + "_" + clientId); } @@ -86,11 +81,6 @@ public ManagementApi managementApi(ClientCredentials credentials) { return managementApi(obtainToken(credentials)); } - public CatalogApi catalogApi(ClientCredentials credentials) { - return new CatalogApi( - client, endpoints, obtainToken(credentials), endpoints.catalogApiEndpoint()); - } - /** Requests an access token from the Polaris server for the given {@link ClientCredentials}. */ public String obtainToken(ClientCredentials credentials) { OAuthTokenResponse response = diff --git a/plugins/spark/v3.5/integration/src/intTest/java/org/apache/polaris/spark/quarkus/it/SparkIntegrationBase.java b/plugins/spark/v3.5/integration/src/intTest/java/org/apache/polaris/spark/quarkus/it/SparkIntegrationBase.java index 54f18c4b61..8d16c36add 100644 --- a/plugins/spark/v3.5/integration/src/intTest/java/org/apache/polaris/spark/quarkus/it/SparkIntegrationBase.java +++ b/plugins/spark/v3.5/integration/src/intTest/java/org/apache/polaris/spark/quarkus/it/SparkIntegrationBase.java @@ -38,7 +38,6 @@ import org.apache.polaris.core.admin.model.CatalogProperties; import org.apache.polaris.core.admin.model.PolarisCatalog; import org.apache.polaris.core.admin.model.StorageConfigInfo; -import org.apache.polaris.service.it.env.CatalogApi; import org.apache.polaris.service.it.env.ClientCredentials; import org.apache.polaris.service.it.env.IntegrationTestsHelper; import org.apache.polaris.service.it.env.ManagementApi; @@ -64,7 +63,6 @@ public abstract class SparkIntegrationBase { protected PolarisApiEndpoints endpoints; protected PolarisManagementClient client; protected ManagementApi managementApi; - protected CatalogApi catalogApi; protected String catalogName; protected String sparkToken; @@ -87,7 +85,6 @@ public void before( client = PolarisManagementClient.managementClient(endpoints); sparkToken = client.obtainToken(credentials); managementApi = client.managementApi(credentials); - catalogApi = client.catalogApi(credentials); warehouseDir = IntegrationTestsHelper.getTemporaryDirectory(tempDir).resolve("spark-warehouse"); diff --git a/plugins/spark/v3.5/spark/build.gradle.kts b/plugins/spark/v3.5/spark/build.gradle.kts index b581ee6996..1642671745 100644 --- a/plugins/spark/v3.5/spark/build.gradle.kts +++ b/plugins/spark/v3.5/spark/build.gradle.kts @@ -100,9 +100,7 @@ tasks.register("createPolarisSparkJar") { // Optimization: Minimize the JAR (remove unused classes from dependencies) // The iceberg-spark-runtime plugin is always packaged along with our polaris-spark plugin, // therefore excluded from the optimization. - minimize { - exclude(dependency("org.apache.iceberg:iceberg-spark-runtime-*.*")) - } + minimize { exclude(dependency("org.apache.iceberg:iceberg-spark-runtime-*.*")) } } tasks.withType(Jar::class).named("sourcesJar") { dependsOn("createPolarisSparkJar") } diff --git a/plugins/spark/v3.5/spark/src/main/java/org/apache/polaris/spark/rest/CreateGenericTableRequest.java b/plugins/spark/v3.5/spark/src/main/java/org/apache/polaris/spark/rest/CreateGenericTableRequest.java index e7b1d5a629..46aec88d14 100644 --- a/plugins/spark/v3.5/spark/src/main/java/org/apache/polaris/spark/rest/CreateGenericTableRequest.java +++ b/plugins/spark/v3.5/spark/src/main/java/org/apache/polaris/spark/rest/CreateGenericTableRequest.java @@ -25,6 +25,7 @@ import org.apache.iceberg.shaded.com.fasterxml.jackson.annotation.JsonCreator; import org.apache.iceberg.shaded.com.fasterxml.jackson.annotation.JsonProperty; +// TODO: auto generate the class based on spec @jakarta.annotation.Generated( value = "org.openapitools.codegen.languages.JavaResteasyServerCodegen", date = "2025-06-16T22:51:23.661280-07:00[America/Los_Angeles]", diff --git a/plugins/spark/v3.5/spark/src/main/java/org/apache/polaris/spark/rest/GenericTable.java b/plugins/spark/v3.5/spark/src/main/java/org/apache/polaris/spark/rest/GenericTable.java index 37f90da97f..f3c9fc523b 100644 --- a/plugins/spark/v3.5/spark/src/main/java/org/apache/polaris/spark/rest/GenericTable.java +++ b/plugins/spark/v3.5/spark/src/main/java/org/apache/polaris/spark/rest/GenericTable.java @@ -25,6 +25,7 @@ import org.apache.iceberg.shaded.com.fasterxml.jackson.annotation.JsonCreator; import org.apache.iceberg.shaded.com.fasterxml.jackson.annotation.JsonProperty; +// TODO: auto generate the class based on spec @jakarta.annotation.Generated( value = "org.openapitools.codegen.languages.JavaResteasyServerCodegen", date = "2025-06-16T22:51:23.661280-07:00[America/Los_Angeles]", diff --git a/plugins/spark/v3.5/spark/src/main/java/org/apache/polaris/spark/rest/ListGenericTablesResponse.java b/plugins/spark/v3.5/spark/src/main/java/org/apache/polaris/spark/rest/ListGenericTablesResponse.java index e94eee6c41..611259c30b 100644 --- a/plugins/spark/v3.5/spark/src/main/java/org/apache/polaris/spark/rest/ListGenericTablesResponse.java +++ b/plugins/spark/v3.5/spark/src/main/java/org/apache/polaris/spark/rest/ListGenericTablesResponse.java @@ -26,6 +26,7 @@ import org.apache.iceberg.shaded.com.fasterxml.jackson.annotation.JsonCreator; import org.apache.iceberg.shaded.com.fasterxml.jackson.annotation.JsonProperty; +// TODO: auto generate the class based on spec @jakarta.annotation.Generated( value = "org.openapitools.codegen.languages.JavaResteasyServerCodegen", date = "2025-06-16T22:51:23.661280-07:00[America/Los_Angeles]", diff --git a/plugins/spark/v3.5/spark/src/main/java/org/apache/polaris/spark/rest/LoadGenericTableResponse.java b/plugins/spark/v3.5/spark/src/main/java/org/apache/polaris/spark/rest/LoadGenericTableResponse.java index e77899ec9b..572125b25f 100644 --- a/plugins/spark/v3.5/spark/src/main/java/org/apache/polaris/spark/rest/LoadGenericTableResponse.java +++ b/plugins/spark/v3.5/spark/src/main/java/org/apache/polaris/spark/rest/LoadGenericTableResponse.java @@ -24,6 +24,7 @@ import org.apache.iceberg.shaded.com.fasterxml.jackson.annotation.JsonCreator; import org.apache.iceberg.shaded.com.fasterxml.jackson.annotation.JsonProperty; +// TODO: auto generate the class based on spec @jakarta.annotation.Generated( value = "org.openapitools.codegen.languages.JavaResteasyServerCodegen", date = "2025-06-16T22:51:23.661280-07:00[America/Los_Angeles]", From 32804fc1bbfc8c0e0376c386b0b13284f1846dea Mon Sep 17 00:00:00 2001 From: Yun Zou Date: Wed, 18 Jun 2025 14:44:10 -0700 Subject: [PATCH 03/18] update change --- codestyle/checkstyle.xml | 8 ++++ codestyle/checkstyle_no_illegalimport.xml | 43 +++++++++++++++++++ .../spark/v3.5/integration/build.gradle.kts | 5 +-- plugins/spark/v3.5/spark/build.gradle.kts | 5 +++ 4 files changed, 57 insertions(+), 4 deletions(-) create mode 100644 codestyle/checkstyle_no_illegalimport.xml diff --git a/codestyle/checkstyle.xml b/codestyle/checkstyle.xml index 5f102b9d74..d3986dc3e7 100644 --- a/codestyle/checkstyle.xml +++ b/codestyle/checkstyle.xml @@ -39,5 +39,13 @@ + + + + + + + + diff --git a/codestyle/checkstyle_no_illegalimport.xml b/codestyle/checkstyle_no_illegalimport.xml new file mode 100644 index 0000000000..5f102b9d74 --- /dev/null +++ b/codestyle/checkstyle_no_illegalimport.xml @@ -0,0 +1,43 @@ + + + + + + + + + + + + + + + + + + + + + + + diff --git a/plugins/spark/v3.5/integration/build.gradle.kts b/plugins/spark/v3.5/integration/build.gradle.kts index 78a4f15c2c..a27361b418 100644 --- a/plugins/spark/v3.5/integration/build.gradle.kts +++ b/plugins/spark/v3.5/integration/build.gradle.kts @@ -50,10 +50,7 @@ dependencies { ) testImplementation(project(":polaris-spark-${sparkMajorVersion}_${scalaVersion}")) - testImplementation(project(":polaris-api-management-model")) { - // exclude the iceberg - exclude("org.apache.iceberg", "iceberg-core") - } + testImplementation(project(":polaris-api-management-model")) testImplementation("org.apache.spark:spark-sql_${scalaVersion}:${spark35Version}") { // exclude log4j dependencies. Explicit dependencies for the log4j libraries are diff --git a/plugins/spark/v3.5/spark/build.gradle.kts b/plugins/spark/v3.5/spark/build.gradle.kts index 1642671745..9bdbdf3752 100644 --- a/plugins/spark/v3.5/spark/build.gradle.kts +++ b/plugins/spark/v3.5/spark/build.gradle.kts @@ -34,6 +34,11 @@ val scalaLibraryVersion = pluginlibs.versions.scala213.get() } +// the spark client relies on the shaded libraries from iceberg-spark-runtime, and therefore +// uses imports like org.apache.iceberg.shaded.*. Use checkstyle_no_illegalimport.xml to allow +// the import from shaded libraries for spark client. +checkstyle { configFile = rootProject.file("codestyle/checkstyle_no_illegalimport.xml") } + dependencies { // TODO: extract a polaris-rest module as a thin layer for // client to depends on. From 691f1492d85e3aac470a69736bd3a6f6d18b5af5 Mon Sep 17 00:00:00 2001 From: Yun Zou Date: Wed, 18 Jun 2025 15:11:21 -0700 Subject: [PATCH 04/18] add comment --- .../java/org/apache/polaris/spark/PolarisSparkCatalog.java | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/plugins/spark/v3.5/spark/src/main/java/org/apache/polaris/spark/PolarisSparkCatalog.java b/plugins/spark/v3.5/spark/src/main/java/org/apache/polaris/spark/PolarisSparkCatalog.java index 8ce2bb0989..fe0c6e180f 100644 --- a/plugins/spark/v3.5/spark/src/main/java/org/apache/polaris/spark/PolarisSparkCatalog.java +++ b/plugins/spark/v3.5/spark/src/main/java/org/apache/polaris/spark/PolarisSparkCatalog.java @@ -22,6 +22,10 @@ import org.apache.iceberg.catalog.Namespace; import org.apache.iceberg.exceptions.AlreadyExistsException; import org.apache.iceberg.spark.Spark3Util; +// Use the spec class defined at client side under the rest package. +// The spec classes used at client side and server side are different in +// terms of import, where the client side uses the shaded jackson library +// from iceberg-spark-runtime. import org.apache.polaris.spark.rest.GenericTable; import org.apache.polaris.spark.utils.PolarisCatalogUtils; import org.apache.spark.sql.catalyst.analysis.NoSuchNamespaceException; From 5494d1e09c0c49fa58a36a59cd4db6eef81604bf Mon Sep 17 00:00:00 2001 From: Yun Zou Date: Fri, 20 Jun 2025 13:50:33 -0700 Subject: [PATCH 05/18] add change --- .../src/main/kotlin/polaris-java.gradle.kts | 5 +++ codestyle/checkstyle.xml | 4 ++ codestyle/checkstyle_no_illegalimport.xml | 43 ------------------- codestyle/checkstyle_suppressions.xml | 32 ++++++++++++++ plugins/spark/v3.5/spark/build.gradle.kts | 2 +- 5 files changed, 42 insertions(+), 44 deletions(-) delete mode 100644 codestyle/checkstyle_no_illegalimport.xml create mode 100644 codestyle/checkstyle_suppressions.xml diff --git a/build-logic/src/main/kotlin/polaris-java.gradle.kts b/build-logic/src/main/kotlin/polaris-java.gradle.kts index 4370b5518c..0d8d7c9610 100644 --- a/build-logic/src/main/kotlin/polaris-java.gradle.kts +++ b/build-logic/src/main/kotlin/polaris-java.gradle.kts @@ -47,6 +47,11 @@ checkstyle { .requiredVersion toolVersion = checkstyleVersion configFile = rootProject.file("codestyle/checkstyle.xml") + configProperties = + mapOf( + "checkstyle.suppression.file" to + rootProject.file("codestyle/checkstyle_suppressions.xml").absolutePath + ) isIgnoreFailures = false maxErrors = 0 maxWarnings = 0 diff --git a/codestyle/checkstyle.xml b/codestyle/checkstyle.xml index d3986dc3e7..ac00bfcfb4 100644 --- a/codestyle/checkstyle.xml +++ b/codestyle/checkstyle.xml @@ -26,6 +26,10 @@ + + + + diff --git a/codestyle/checkstyle_no_illegalimport.xml b/codestyle/checkstyle_no_illegalimport.xml deleted file mode 100644 index 5f102b9d74..0000000000 --- a/codestyle/checkstyle_no_illegalimport.xml +++ /dev/null @@ -1,43 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - diff --git a/codestyle/checkstyle_suppressions.xml b/codestyle/checkstyle_suppressions.xml new file mode 100644 index 0000000000..1f6f0aad91 --- /dev/null +++ b/codestyle/checkstyle_suppressions.xml @@ -0,0 +1,32 @@ + + + + + + + + diff --git a/plugins/spark/v3.5/spark/build.gradle.kts b/plugins/spark/v3.5/spark/build.gradle.kts index 9bdbdf3752..a64ee1ee4d 100644 --- a/plugins/spark/v3.5/spark/build.gradle.kts +++ b/plugins/spark/v3.5/spark/build.gradle.kts @@ -37,7 +37,7 @@ val scalaLibraryVersion = // the spark client relies on the shaded libraries from iceberg-spark-runtime, and therefore // uses imports like org.apache.iceberg.shaded.*. Use checkstyle_no_illegalimport.xml to allow // the import from shaded libraries for spark client. -checkstyle { configFile = rootProject.file("codestyle/checkstyle_no_illegalimport.xml") } +// checkstyle { configFile = rootProject.file("codestyle/checkstyle_no_illegalimport.xml") } dependencies { // TODO: extract a polaris-rest module as a thin layer for From 469dcce6509a772e43d44ac76641bde9388b2191 Mon Sep 17 00:00:00 2001 From: Yun Zou Date: Fri, 20 Jun 2025 14:37:47 -0700 Subject: [PATCH 06/18] add tests --- .../polaris/spark/quarkus/it/PolarisManagementClient.java | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/plugins/spark/v3.5/integration/src/intTest/java/org/apache/polaris/spark/quarkus/it/PolarisManagementClient.java b/plugins/spark/v3.5/integration/src/intTest/java/org/apache/polaris/spark/quarkus/it/PolarisManagementClient.java index e4fd1237c7..37cdcc4d34 100644 --- a/plugins/spark/v3.5/integration/src/intTest/java/org/apache/polaris/spark/quarkus/it/PolarisManagementClient.java +++ b/plugins/spark/v3.5/integration/src/intTest/java/org/apache/polaris/spark/quarkus/it/PolarisManagementClient.java @@ -37,9 +37,8 @@ import org.apache.polaris.service.it.env.PolarisApiEndpoints; /** - * That class provides rest client that is can be used to talk to Polaris Management service and - * auth token endpoint. This class is currently used by Spark Client tests for commands that can not - * be issued through spark command, such as createCatalog etc. + * This class provides a REST client for the Polaris Management service endpoints and its auth-token endpoint, + * which is used in Spark client tests to run commands that Spark SQL can’t issue directly (e.g., createCatalog). */ public final class PolarisManagementClient implements AutoCloseable { private final PolarisApiEndpoints endpoints; From 89cb53d5a7e08cd215ba8e6cc8decc5cc3bc2a2d Mon Sep 17 00:00:00 2001 From: Yun Zou Date: Fri, 20 Jun 2025 15:05:27 -0700 Subject: [PATCH 07/18] add comment --- .../polaris/spark/quarkus/it/PolarisManagementClient.java | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/plugins/spark/v3.5/integration/src/intTest/java/org/apache/polaris/spark/quarkus/it/PolarisManagementClient.java b/plugins/spark/v3.5/integration/src/intTest/java/org/apache/polaris/spark/quarkus/it/PolarisManagementClient.java index 37cdcc4d34..cc0f177f7e 100644 --- a/plugins/spark/v3.5/integration/src/intTest/java/org/apache/polaris/spark/quarkus/it/PolarisManagementClient.java +++ b/plugins/spark/v3.5/integration/src/intTest/java/org/apache/polaris/spark/quarkus/it/PolarisManagementClient.java @@ -37,8 +37,9 @@ import org.apache.polaris.service.it.env.PolarisApiEndpoints; /** - * This class provides a REST client for the Polaris Management service endpoints and its auth-token endpoint, - * which is used in Spark client tests to run commands that Spark SQL can’t issue directly (e.g., createCatalog). + * This class provides a REST client for the Polaris Management service endpoints and its auth-token + * endpoint, which is used in Spark client tests to run commands that Spark SQL can’t issue directly + * (e.g., createCatalog). */ public final class PolarisManagementClient implements AutoCloseable { private final PolarisApiEndpoints endpoints; From 85a87891f0cc3760bcd9a067fe877f42881d7dc1 Mon Sep 17 00:00:00 2001 From: Yun Zou Date: Fri, 20 Jun 2025 15:48:48 -0700 Subject: [PATCH 08/18] clean up style check --- codestyle/checkstyle.xml | 8 +------- 1 file changed, 1 insertion(+), 7 deletions(-) diff --git a/codestyle/checkstyle.xml b/codestyle/checkstyle.xml index ac00bfcfb4..58c5b81ee8 100644 --- a/codestyle/checkstyle.xml +++ b/codestyle/checkstyle.xml @@ -26,6 +26,7 @@ + @@ -36,13 +37,6 @@ - - - - - - From 783ad75a4cc049e4a987d36888d5861ae980773e Mon Sep 17 00:00:00 2001 From: Yun Zou Date: Fri, 20 Jun 2025 16:18:55 -0700 Subject: [PATCH 09/18] update build --- plugins/spark/v3.5/spark/build.gradle.kts | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/plugins/spark/v3.5/spark/build.gradle.kts b/plugins/spark/v3.5/spark/build.gradle.kts index a64ee1ee4d..3bfc7ec5d9 100644 --- a/plugins/spark/v3.5/spark/build.gradle.kts +++ b/plugins/spark/v3.5/spark/build.gradle.kts @@ -108,4 +108,7 @@ tasks.register("createPolarisSparkJar") { minimize { exclude(dependency("org.apache.iceberg:iceberg-spark-runtime-*.*")) } } -tasks.withType(Jar::class).named("sourcesJar") { dependsOn("createPolarisSparkJar") } +// ensure the shadowJar job is run for both `assemble` and `build` task +tasks.named("assemble") { dependsOn("createPolarisSparkJar") } + +tasks.named("build") { dependsOn("createPolarisSparkJar") } From e5123998d69a0914d9d4156d9fd9123156a449ba Mon Sep 17 00:00:00 2001 From: Yun Zou Date: Fri, 20 Jun 2025 17:41:44 -0700 Subject: [PATCH 10/18] Revert "Reuse shadowJar for spark client bundle jar maven publish (#1857)" This reverts commit 1f7f127536a088911bf940addd1d05c07ff99a68. --- plugins/spark/v3.5/spark/build.gradle.kts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/plugins/spark/v3.5/spark/build.gradle.kts b/plugins/spark/v3.5/spark/build.gradle.kts index 3bfc7ec5d9..f57d3782fb 100644 --- a/plugins/spark/v3.5/spark/build.gradle.kts +++ b/plugins/spark/v3.5/spark/build.gradle.kts @@ -108,7 +108,7 @@ tasks.register("createPolarisSparkJar") { minimize { exclude(dependency("org.apache.iceberg:iceberg-spark-runtime-*.*")) } } -// ensure the shadowJar job is run for both `assemble` and `build` task +// ensure the ShadowJar job is run for both `assemble` and `build` task tasks.named("assemble") { dependsOn("createPolarisSparkJar") } tasks.named("build") { dependsOn("createPolarisSparkJar") } From 40f4d36c8e9cfa54074b9754b7024e4828b7b7a6 Mon Sep 17 00:00:00 2001 From: Yun Zou Date: Wed, 18 Jun 2025 13:54:54 -0700 Subject: [PATCH 11/18] Reuse shadowJar for spark client bundle jar maven publish (#1857) * fix spark client * fix test failure and address feedback * fix error * update regression test * update classifier name * address comment * add change * update doc * update build and readme * add back jr * udpate dependency * add change * update * update tests * remove merge service file * update readme * update readme --- .../main/kotlin/publishing/PublishingHelperPlugin.kt | 5 ----- plugins/spark/README.md | 8 +++++--- plugins/spark/v3.5/spark/build.gradle.kts | 12 +++++------- .../in-dev/unreleased/polaris-spark-client.md | 11 +++++++++++ 4 files changed, 21 insertions(+), 15 deletions(-) diff --git a/build-logic/src/main/kotlin/publishing/PublishingHelperPlugin.kt b/build-logic/src/main/kotlin/publishing/PublishingHelperPlugin.kt index d4d412a30f..04b04225e7 100644 --- a/build-logic/src/main/kotlin/publishing/PublishingHelperPlugin.kt +++ b/build-logic/src/main/kotlin/publishing/PublishingHelperPlugin.kt @@ -133,11 +133,6 @@ constructor(private val softwareComponentFactory: SoftwareComponentFactory) : Pl suppressPomMetadataWarningsFor("testFixturesApiElements") suppressPomMetadataWarningsFor("testFixturesRuntimeElements") - - if (project.tasks.findByName("createPolarisSparkJar") != null) { - // if the project contains spark client jar, also publish the jar to maven - artifact(project.tasks.named("createPolarisSparkJar").get()) - } } if ( diff --git a/plugins/spark/README.md b/plugins/spark/README.md index c7d6bc876b..3f4acc31c4 100644 --- a/plugins/spark/README.md +++ b/plugins/spark/README.md @@ -29,15 +29,17 @@ Right now, the plugin only provides support for Spark 3.5, Scala version 2.12 an and depends on iceberg-spark-runtime 1.9.0. # Build Plugin Jar -A task createPolarisSparkJar is added to build a jar for the Polaris Spark plugin, the jar is named as: +A shadowJar task is added to build a jar for the Polaris Spark plugin, the jar is named as: `polaris-spark-_--bundle.jar`. For example: `polaris-spark-3.5_2.12-0.11.0-beta-incubating-SNAPSHOT-bundle.jar`. -- `./gradlew :polaris-spark-3.5_2.12:createPolarisSparkJar` -- build jar for Spark 3.5 with Scala version 2.12. -- `./gradlew :polaris-spark-3.5_2.13:createPolarisSparkJar` -- build jar for Spark 3.5 with Scala version 2.13. +- `./gradlew :polaris-spark-3.5_2.12:shadowJar` -- build jar for Spark 3.5 with Scala version 2.12. +- `./gradlew :polaris-spark-3.5_2.13:shadowJar` -- build jar for Spark 3.5 with Scala version 2.13. The result jar is located at plugins/spark/v3.5/build//libs after the build. +The shadowJar task is also executed automatically when you run `gradlew assemble` or `gradlew build`. + # Start Spark with Local Polaris Service using built Jar Once the jar is built, we can manually test it with Spark and a local Polaris service. diff --git a/plugins/spark/v3.5/spark/build.gradle.kts b/plugins/spark/v3.5/spark/build.gradle.kts index f57d3782fb..f351218ea4 100644 --- a/plugins/spark/v3.5/spark/build.gradle.kts +++ b/plugins/spark/v3.5/spark/build.gradle.kts @@ -19,7 +19,10 @@ import com.github.jengelman.gradle.plugins.shadow.tasks.ShadowJar -plugins { id("polaris-client") } +plugins { + id("polaris-client") + id("com.gradleup.shadow") +} // get version information val sparkMajorVersion = "3.5" @@ -34,11 +37,6 @@ val scalaLibraryVersion = pluginlibs.versions.scala213.get() } -// the spark client relies on the shaded libraries from iceberg-spark-runtime, and therefore -// uses imports like org.apache.iceberg.shaded.*. Use checkstyle_no_illegalimport.xml to allow -// the import from shaded libraries for spark client. -// checkstyle { configFile = rootProject.file("codestyle/checkstyle_no_illegalimport.xml") } - dependencies { // TODO: extract a polaris-rest module as a thin layer for // client to depends on. @@ -88,7 +86,7 @@ dependencies { } } -tasks.register("createPolarisSparkJar") { +tasks.named("shadowJar") { archiveClassifier = "bundle" isZip64 = true diff --git a/site/content/in-dev/unreleased/polaris-spark-client.md b/site/content/in-dev/unreleased/polaris-spark-client.md index 4ceb536a9c..a34bceeced 100644 --- a/site/content/in-dev/unreleased/polaris-spark-client.md +++ b/site/content/in-dev/unreleased/polaris-spark-client.md @@ -128,3 +128,14 @@ The Polaris Spark client has the following functionality limitations: 3) Rename a Delta table is not supported. 4) ALTER TABLE ... SET LOCATION is not supported for DELTA table. 5) For other non-Iceberg tables like csv, it is not supported. + +## Iceberg Spark Client compatibility with Polaris Spark Client +The Polaris Spark client today depends on a specific Iceberg client version, and the version dependency is described +in the following table: + +| Spark Client Version | Iceberg Spark Client Version | +|----------------------|------------------------------| +| 1.0.0 | 1.9.0 | + +The Iceberg dependency is automatically downloaded when the Polaris package is downloaded, so there is no need to +add the Iceberg Spark client in the `packages` configuration. From d44f87f4d2c07bda46d642161e7269c4af3197ac Mon Sep 17 00:00:00 2001 From: Yun Zou Date: Mon, 23 Jun 2025 11:06:03 -0700 Subject: [PATCH 12/18] update checkstyl --- build-logic/src/main/kotlin/polaris-java.gradle.kts | 5 ----- codestyle/checkstyle.xml | 11 ++++++----- plugins/spark/v3.5/spark/build.gradle.kts | 5 +++++ .../spark/v3.5/spark}/checkstyle_suppressions.xml | 0 4 files changed, 11 insertions(+), 10 deletions(-) rename {codestyle => plugins/spark/v3.5/spark}/checkstyle_suppressions.xml (100%) diff --git a/build-logic/src/main/kotlin/polaris-java.gradle.kts b/build-logic/src/main/kotlin/polaris-java.gradle.kts index 0d8d7c9610..4370b5518c 100644 --- a/build-logic/src/main/kotlin/polaris-java.gradle.kts +++ b/build-logic/src/main/kotlin/polaris-java.gradle.kts @@ -47,11 +47,6 @@ checkstyle { .requiredVersion toolVersion = checkstyleVersion configFile = rootProject.file("codestyle/checkstyle.xml") - configProperties = - mapOf( - "checkstyle.suppression.file" to - rootProject.file("codestyle/checkstyle_suppressions.xml").absolutePath - ) isIgnoreFailures = false maxErrors = 0 maxWarnings = 0 diff --git a/codestyle/checkstyle.xml b/codestyle/checkstyle.xml index 58c5b81ee8..19db5831b9 100644 --- a/codestyle/checkstyle.xml +++ b/codestyle/checkstyle.xml @@ -26,17 +26,18 @@ - - - - - + + + + + + diff --git a/plugins/spark/v3.5/spark/build.gradle.kts b/plugins/spark/v3.5/spark/build.gradle.kts index f351218ea4..0e2f6da3e5 100644 --- a/plugins/spark/v3.5/spark/build.gradle.kts +++ b/plugins/spark/v3.5/spark/build.gradle.kts @@ -24,6 +24,11 @@ plugins { id("com.gradleup.shadow") } +checkstyle { + configProperties = + mapOf("checkstyle.suppression.file" to project.file("checkstyle_suppressions.xml").absolutePath) +} + // get version information val sparkMajorVersion = "3.5" val scalaVersion = getAndUseScalaVersionForProject() diff --git a/codestyle/checkstyle_suppressions.xml b/plugins/spark/v3.5/spark/checkstyle_suppressions.xml similarity index 100% rename from codestyle/checkstyle_suppressions.xml rename to plugins/spark/v3.5/spark/checkstyle_suppressions.xml From 328889afcff54ad275594b1f12145f7fa2eabe91 Mon Sep 17 00:00:00 2001 From: Yun Zou Date: Mon, 23 Jun 2025 11:30:07 -0700 Subject: [PATCH 13/18] rebase with main --- plugins/spark/v3.5/spark/build.gradle.kts | 7 ++----- 1 file changed, 2 insertions(+), 5 deletions(-) diff --git a/plugins/spark/v3.5/spark/build.gradle.kts b/plugins/spark/v3.5/spark/build.gradle.kts index 0e2f6da3e5..53517e212c 100644 --- a/plugins/spark/v3.5/spark/build.gradle.kts +++ b/plugins/spark/v3.5/spark/build.gradle.kts @@ -19,10 +19,7 @@ import com.github.jengelman.gradle.plugins.shadow.tasks.ShadowJar -plugins { - id("polaris-client") - id("com.gradleup.shadow") -} +plugins { id("polaris-client") } checkstyle { configProperties = @@ -91,7 +88,7 @@ dependencies { } } -tasks.named("shadowJar") { +tasks.register("createPolarisSparkJar") { archiveClassifier = "bundle" isZip64 = true From 82f31e783fc04811849ad7bcf619406c220724e9 Mon Sep 17 00:00:00 2001 From: Yun Zou Date: Mon, 23 Jun 2025 11:37:26 -0700 Subject: [PATCH 14/18] Revert "Reuse shadowJar for spark client bundle jar maven publish (#1857)" This reverts commit 40f4d36c8e9cfa54074b9754b7024e4828b7b7a6. --- .../main/kotlin/publishing/PublishingHelperPlugin.kt | 5 +++++ plugins/spark/README.md | 8 +++----- plugins/spark/v3.5/spark/build.gradle.kts | 5 +++++ .../content/in-dev/unreleased/polaris-spark-client.md | 11 ----------- 4 files changed, 13 insertions(+), 16 deletions(-) diff --git a/build-logic/src/main/kotlin/publishing/PublishingHelperPlugin.kt b/build-logic/src/main/kotlin/publishing/PublishingHelperPlugin.kt index 04b04225e7..d4d412a30f 100644 --- a/build-logic/src/main/kotlin/publishing/PublishingHelperPlugin.kt +++ b/build-logic/src/main/kotlin/publishing/PublishingHelperPlugin.kt @@ -133,6 +133,11 @@ constructor(private val softwareComponentFactory: SoftwareComponentFactory) : Pl suppressPomMetadataWarningsFor("testFixturesApiElements") suppressPomMetadataWarningsFor("testFixturesRuntimeElements") + + if (project.tasks.findByName("createPolarisSparkJar") != null) { + // if the project contains spark client jar, also publish the jar to maven + artifact(project.tasks.named("createPolarisSparkJar").get()) + } } if ( diff --git a/plugins/spark/README.md b/plugins/spark/README.md index 3f4acc31c4..c7d6bc876b 100644 --- a/plugins/spark/README.md +++ b/plugins/spark/README.md @@ -29,17 +29,15 @@ Right now, the plugin only provides support for Spark 3.5, Scala version 2.12 an and depends on iceberg-spark-runtime 1.9.0. # Build Plugin Jar -A shadowJar task is added to build a jar for the Polaris Spark plugin, the jar is named as: +A task createPolarisSparkJar is added to build a jar for the Polaris Spark plugin, the jar is named as: `polaris-spark-_--bundle.jar`. For example: `polaris-spark-3.5_2.12-0.11.0-beta-incubating-SNAPSHOT-bundle.jar`. -- `./gradlew :polaris-spark-3.5_2.12:shadowJar` -- build jar for Spark 3.5 with Scala version 2.12. -- `./gradlew :polaris-spark-3.5_2.13:shadowJar` -- build jar for Spark 3.5 with Scala version 2.13. +- `./gradlew :polaris-spark-3.5_2.12:createPolarisSparkJar` -- build jar for Spark 3.5 with Scala version 2.12. +- `./gradlew :polaris-spark-3.5_2.13:createPolarisSparkJar` -- build jar for Spark 3.5 with Scala version 2.13. The result jar is located at plugins/spark/v3.5/build//libs after the build. -The shadowJar task is also executed automatically when you run `gradlew assemble` or `gradlew build`. - # Start Spark with Local Polaris Service using built Jar Once the jar is built, we can manually test it with Spark and a local Polaris service. diff --git a/plugins/spark/v3.5/spark/build.gradle.kts b/plugins/spark/v3.5/spark/build.gradle.kts index 53517e212c..7a0d968fef 100644 --- a/plugins/spark/v3.5/spark/build.gradle.kts +++ b/plugins/spark/v3.5/spark/build.gradle.kts @@ -39,6 +39,11 @@ val scalaLibraryVersion = pluginlibs.versions.scala213.get() } +// the spark client relies on the shaded libraries from iceberg-spark-runtime, and therefore +// uses imports like org.apache.iceberg.shaded.*. Use checkstyle_no_illegalimport.xml to allow +// the import from shaded libraries for spark client. +// checkstyle { configFile = rootProject.file("codestyle/checkstyle_no_illegalimport.xml") } + dependencies { // TODO: extract a polaris-rest module as a thin layer for // client to depends on. diff --git a/site/content/in-dev/unreleased/polaris-spark-client.md b/site/content/in-dev/unreleased/polaris-spark-client.md index a34bceeced..4ceb536a9c 100644 --- a/site/content/in-dev/unreleased/polaris-spark-client.md +++ b/site/content/in-dev/unreleased/polaris-spark-client.md @@ -128,14 +128,3 @@ The Polaris Spark client has the following functionality limitations: 3) Rename a Delta table is not supported. 4) ALTER TABLE ... SET LOCATION is not supported for DELTA table. 5) For other non-Iceberg tables like csv, it is not supported. - -## Iceberg Spark Client compatibility with Polaris Spark Client -The Polaris Spark client today depends on a specific Iceberg client version, and the version dependency is described -in the following table: - -| Spark Client Version | Iceberg Spark Client Version | -|----------------------|------------------------------| -| 1.0.0 | 1.9.0 | - -The Iceberg dependency is automatically downloaded when the Polaris package is downloaded, so there is no need to -add the Iceberg Spark client in the `packages` configuration. From 69a7c69c6287326cde08151ad9ef6c7a0654a930 Mon Sep 17 00:00:00 2001 From: Yun Zou Date: Mon, 23 Jun 2025 12:11:24 -0700 Subject: [PATCH 15/18] update checkstyle --- plugins/spark/v3.5/spark/build.gradle.kts | 5 ----- 1 file changed, 5 deletions(-) diff --git a/plugins/spark/v3.5/spark/build.gradle.kts b/plugins/spark/v3.5/spark/build.gradle.kts index 7a0d968fef..53517e212c 100644 --- a/plugins/spark/v3.5/spark/build.gradle.kts +++ b/plugins/spark/v3.5/spark/build.gradle.kts @@ -39,11 +39,6 @@ val scalaLibraryVersion = pluginlibs.versions.scala213.get() } -// the spark client relies on the shaded libraries from iceberg-spark-runtime, and therefore -// uses imports like org.apache.iceberg.shaded.*. Use checkstyle_no_illegalimport.xml to allow -// the import from shaded libraries for spark client. -// checkstyle { configFile = rootProject.file("codestyle/checkstyle_no_illegalimport.xml") } - dependencies { // TODO: extract a polaris-rest module as a thin layer for // client to depends on. From c1651dd61787ce8d5b433e1a662c5c79bf7d4be3 Mon Sep 17 00:00:00 2001 From: Yun Zou Date: Mon, 23 Jun 2025 12:14:10 -0700 Subject: [PATCH 16/18] revert change --- codestyle/checkstyle.xml | 3 ++- plugins/spark/v3.5/spark/build.gradle.kts | 2 +- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/codestyle/checkstyle.xml b/codestyle/checkstyle.xml index 19db5831b9..d3986dc3e7 100644 --- a/codestyle/checkstyle.xml +++ b/codestyle/checkstyle.xml @@ -34,7 +34,8 @@ - + diff --git a/plugins/spark/v3.5/spark/build.gradle.kts b/plugins/spark/v3.5/spark/build.gradle.kts index 53517e212c..0db3da244e 100644 --- a/plugins/spark/v3.5/spark/build.gradle.kts +++ b/plugins/spark/v3.5/spark/build.gradle.kts @@ -23,7 +23,7 @@ plugins { id("polaris-client") } checkstyle { configProperties = - mapOf("checkstyle.suppression.file" to project.file("checkstyle_suppressions.xml").absolutePath) + mapOf("org.checkstyle.google.suppressionfilter.config" to project.file("checkstyle_suppressions.xml").absolutePath) } // get version information From d8e2b4c112624ba2f91039a6158c8bc206f149bd Mon Sep 17 00:00:00 2001 From: Yun Zou Date: Mon, 23 Jun 2025 12:17:40 -0700 Subject: [PATCH 17/18] address comments --- plugins/spark/v3.5/spark/build.gradle.kts | 5 ++++- .../apache/polaris/spark/rest/CreateGenericTableRequest.java | 4 ---- .../java/org/apache/polaris/spark/rest/GenericTable.java | 4 ---- .../apache/polaris/spark/rest/ListGenericTablesResponse.java | 4 ---- .../apache/polaris/spark/rest/LoadGenericTableResponse.java | 4 ---- 5 files changed, 4 insertions(+), 17 deletions(-) diff --git a/plugins/spark/v3.5/spark/build.gradle.kts b/plugins/spark/v3.5/spark/build.gradle.kts index 0db3da244e..d13255bf6f 100644 --- a/plugins/spark/v3.5/spark/build.gradle.kts +++ b/plugins/spark/v3.5/spark/build.gradle.kts @@ -23,7 +23,10 @@ plugins { id("polaris-client") } checkstyle { configProperties = - mapOf("org.checkstyle.google.suppressionfilter.config" to project.file("checkstyle_suppressions.xml").absolutePath) + mapOf( + "org.checkstyle.google.suppressionfilter.config" to + project.file("checkstyle_suppressions.xml").absolutePath + ) } // get version information diff --git a/plugins/spark/v3.5/spark/src/main/java/org/apache/polaris/spark/rest/CreateGenericTableRequest.java b/plugins/spark/v3.5/spark/src/main/java/org/apache/polaris/spark/rest/CreateGenericTableRequest.java index 46aec88d14..101695e55a 100644 --- a/plugins/spark/v3.5/spark/src/main/java/org/apache/polaris/spark/rest/CreateGenericTableRequest.java +++ b/plugins/spark/v3.5/spark/src/main/java/org/apache/polaris/spark/rest/CreateGenericTableRequest.java @@ -26,10 +26,6 @@ import org.apache.iceberg.shaded.com.fasterxml.jackson.annotation.JsonProperty; // TODO: auto generate the class based on spec -@jakarta.annotation.Generated( - value = "org.openapitools.codegen.languages.JavaResteasyServerCodegen", - date = "2025-06-16T22:51:23.661280-07:00[America/Los_Angeles]", - comments = "Generator version: 7.12.0") public class CreateGenericTableRequest { @NotNull private final String name; diff --git a/plugins/spark/v3.5/spark/src/main/java/org/apache/polaris/spark/rest/GenericTable.java b/plugins/spark/v3.5/spark/src/main/java/org/apache/polaris/spark/rest/GenericTable.java index f3c9fc523b..08f7511320 100644 --- a/plugins/spark/v3.5/spark/src/main/java/org/apache/polaris/spark/rest/GenericTable.java +++ b/plugins/spark/v3.5/spark/src/main/java/org/apache/polaris/spark/rest/GenericTable.java @@ -26,10 +26,6 @@ import org.apache.iceberg.shaded.com.fasterxml.jackson.annotation.JsonProperty; // TODO: auto generate the class based on spec -@jakarta.annotation.Generated( - value = "org.openapitools.codegen.languages.JavaResteasyServerCodegen", - date = "2025-06-16T22:51:23.661280-07:00[America/Los_Angeles]", - comments = "Generator version: 7.12.0") public class GenericTable { @NotNull private final String name; diff --git a/plugins/spark/v3.5/spark/src/main/java/org/apache/polaris/spark/rest/ListGenericTablesResponse.java b/plugins/spark/v3.5/spark/src/main/java/org/apache/polaris/spark/rest/ListGenericTablesResponse.java index 611259c30b..5ba2eb1937 100644 --- a/plugins/spark/v3.5/spark/src/main/java/org/apache/polaris/spark/rest/ListGenericTablesResponse.java +++ b/plugins/spark/v3.5/spark/src/main/java/org/apache/polaris/spark/rest/ListGenericTablesResponse.java @@ -27,10 +27,6 @@ import org.apache.iceberg.shaded.com.fasterxml.jackson.annotation.JsonProperty; // TODO: auto generate the class based on spec -@jakarta.annotation.Generated( - value = "org.openapitools.codegen.languages.JavaResteasyServerCodegen", - date = "2025-06-16T22:51:23.661280-07:00[America/Los_Angeles]", - comments = "Generator version: 7.12.0") public class ListGenericTablesResponse { private final String nextPageToken; diff --git a/plugins/spark/v3.5/spark/src/main/java/org/apache/polaris/spark/rest/LoadGenericTableResponse.java b/plugins/spark/v3.5/spark/src/main/java/org/apache/polaris/spark/rest/LoadGenericTableResponse.java index 572125b25f..3cdb51e8de 100644 --- a/plugins/spark/v3.5/spark/src/main/java/org/apache/polaris/spark/rest/LoadGenericTableResponse.java +++ b/plugins/spark/v3.5/spark/src/main/java/org/apache/polaris/spark/rest/LoadGenericTableResponse.java @@ -25,10 +25,6 @@ import org.apache.iceberg.shaded.com.fasterxml.jackson.annotation.JsonProperty; // TODO: auto generate the class based on spec -@jakarta.annotation.Generated( - value = "org.openapitools.codegen.languages.JavaResteasyServerCodegen", - date = "2025-06-16T22:51:23.661280-07:00[America/Los_Angeles]", - comments = "Generator version: 7.12.0") public class LoadGenericTableResponse { @NotNull @Valid private final GenericTable table; From cafb71003789f0cb0e4f9ad47cd12fb6073dd175 Mon Sep 17 00:00:00 2001 From: Yun Zou Date: Mon, 23 Jun 2025 13:06:22 -0700 Subject: [PATCH 18/18] trigger tests