From 7b3f788b43d45f8a211de39dc2fe8d1f739593a1 Mon Sep 17 00:00:00 2001 From: Max Gekk Date: Tue, 15 Dec 2020 11:30:51 +0300 Subject: [PATCH 1/2] Move the test to the common trait --- .../AlterTableDropPartitionSuiteBase.scala | 17 ++++++++++++++++ .../v1/AlterTableDropPartitionSuite.scala | 20 +------------------ .../v2/AlterTableDropPartitionSuite.scala | 17 ---------------- .../AlterTableDropPartitionSuite.scala | 17 ---------------- 4 files changed, 18 insertions(+), 53 deletions(-) diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/AlterTableDropPartitionSuiteBase.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/AlterTableDropPartitionSuiteBase.scala index ed479e2824fb..338f13ace891 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/AlterTableDropPartitionSuiteBase.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/AlterTableDropPartitionSuiteBase.scala @@ -21,6 +21,7 @@ import org.scalactic.source.Position import org.scalatest.Tag import org.apache.spark.sql.{AnalysisException, QueryTest, Row} +import org.apache.spark.sql.catalyst.analysis.NoSuchPartitionsException import org.apache.spark.sql.execution.datasources.PartitioningUtils import org.apache.spark.sql.internal.SQLConf import org.apache.spark.sql.test.SQLTestUtils @@ -146,4 +147,20 @@ trait AlterTableDropPartitionSuiteBase extends QueryTest with SQLTestUtils { assert(errMsg.contains(notFullPartitionSpecErr)) } } + + test("partition not exists") { + withNsTable("ns", "tbl") { t => + sql(s"CREATE TABLE $t (id bigint, data string) $defaultUsing PARTITIONED BY (id)") + sql(s"ALTER TABLE $t ADD PARTITION (id=1) LOCATION 'loc'") + + val errMsg = intercept[NoSuchPartitionsException] { + sql(s"ALTER TABLE $t DROP PARTITION (id=1), PARTITION (id=2)") + }.getMessage + assert(errMsg.contains("partitions not found in table")) + + checkPartitions(t, Map("id" -> "1")) + sql(s"ALTER TABLE $t DROP IF EXISTS PARTITION (id=1), PARTITION (id=2)") + checkPartitions(t) + } + } } diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/v1/AlterTableDropPartitionSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/v1/AlterTableDropPartitionSuite.scala index 5ad182bc689b..e655debc2fdd 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/v1/AlterTableDropPartitionSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/v1/AlterTableDropPartitionSuite.scala @@ -17,7 +17,6 @@ package org.apache.spark.sql.execution.command.v1 -import org.apache.spark.sql.catalyst.analysis.NoSuchPartitionsException import org.apache.spark.sql.connector.catalog.CatalogManager import org.apache.spark.sql.execution.command import org.apache.spark.sql.test.SharedSparkSession @@ -32,21 +31,4 @@ trait AlterTableDropPartitionSuiteBase extends command.AlterTableDropPartitionSu class AlterTableDropPartitionSuite extends AlterTableDropPartitionSuiteBase - with SharedSparkSession { - - test("partition not exists") { - withNsTable("ns", "tbl") { t => - sql(s"CREATE TABLE $t (id bigint, data string) $defaultUsing PARTITIONED BY (id)") - sql(s"ALTER TABLE $t ADD PARTITION (id=1) LOCATION 'loc'") - - val errMsg = intercept[NoSuchPartitionsException] { - sql(s"ALTER TABLE $t DROP PARTITION (id=1), PARTITION (id=2)") - }.getMessage - assert(errMsg.contains("partitions not found in table")) - - checkPartitions(t, Map("id" -> "1")) - sql(s"ALTER TABLE $t DROP IF EXISTS PARTITION (id=1), PARTITION (id=2)") - checkPartitions(t) - } - } -} + with SharedSparkSession diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/v2/AlterTableDropPartitionSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/v2/AlterTableDropPartitionSuite.scala index 608e7d7c98f6..9dc1cad5a002 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/v2/AlterTableDropPartitionSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/v2/AlterTableDropPartitionSuite.scala @@ -19,7 +19,6 @@ package org.apache.spark.sql.execution.command.v2 import org.apache.spark.SparkConf import org.apache.spark.sql.AnalysisException -import org.apache.spark.sql.catalyst.analysis.NoSuchPartitionsException import org.apache.spark.sql.connector.{InMemoryPartitionTableCatalog, InMemoryTableCatalog} import org.apache.spark.sql.execution.command import org.apache.spark.sql.test.SharedSparkSession @@ -38,22 +37,6 @@ class AlterTableDropPartitionSuite .set(s"spark.sql.catalog.$catalog", classOf[InMemoryPartitionTableCatalog].getName) .set(s"spark.sql.catalog.non_part_$catalog", classOf[InMemoryTableCatalog].getName) - test("partition not exists") { - withNsTable("ns", "tbl") { t => - sql(s"CREATE TABLE $t (id bigint, data string) $defaultUsing PARTITIONED BY (id)") - sql(s"ALTER TABLE $t ADD PARTITION (id=1) LOCATION 'loc'") - - val errMsg = intercept[NoSuchPartitionsException] { - sql(s"ALTER TABLE $t DROP PARTITION (id=1), PARTITION (id=2)") - }.getMessage - assert(errMsg.contains("partitions not found in table")) - - checkPartitions(t, Map("id" -> "1")) - sql(s"ALTER TABLE $t DROP IF EXISTS PARTITION (id=1), PARTITION (id=2)") - checkPartitions(t) - } - } - test("SPARK-33650: drop partition into a table which doesn't support partition management") { withNsTable("ns", "tbl", s"non_part_$catalog") { t => sql(s"CREATE TABLE $t (id bigint, data string) $defaultUsing") diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/command/AlterTableDropPartitionSuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/command/AlterTableDropPartitionSuite.scala index fe26466cdad6..e857e11c3589 100644 --- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/command/AlterTableDropPartitionSuite.scala +++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/command/AlterTableDropPartitionSuite.scala @@ -17,7 +17,6 @@ package org.apache.spark.sql.hive.execution.command -import org.apache.spark.sql.AnalysisException import org.apache.spark.sql.execution.command.v1 import org.apache.spark.sql.hive.test.TestHiveSingleton @@ -29,20 +28,4 @@ class AlterTableDropPartitionSuite override def defaultUsing: String = "USING HIVE" override protected val notFullPartitionSpecErr = "No partition is dropped" - - test("partition not exists") { - withNsTable("ns", "tbl") { t => - sql(s"CREATE TABLE $t (id bigint, data string) $defaultUsing PARTITIONED BY (id)") - sql(s"ALTER TABLE $t ADD PARTITION (id=1) LOCATION 'loc'") - - val errMsg = intercept[AnalysisException] { - sql(s"ALTER TABLE $t DROP PARTITION (id=1), PARTITION (id=2)") - }.getMessage - assert(errMsg.contains("No partition is dropped")) - - checkPartitions(t, Map("id" -> "1")) - sql(s"ALTER TABLE $t DROP IF EXISTS PARTITION (id=1), PARTITION (id=2)") - checkPartitions(t) - } - } } From 8df8fa7bb107983be13eb73a0179d8debb8097ff Mon Sep 17 00:00:00 2001 From: Max Gekk Date: Tue, 15 Dec 2020 11:43:46 +0300 Subject: [PATCH 2/2] Throw NoSuchPartitionsException from HiveClientImpl --- .../org/apache/spark/sql/hive/client/HiveClientImpl.scala | 6 ++---- .../execution/command/AlterTableDropPartitionSuite.scala | 2 -- 2 files changed, 2 insertions(+), 6 deletions(-) diff --git a/sql/hive/src/main/scala/org/apache/spark/sql/hive/client/HiveClientImpl.scala b/sql/hive/src/main/scala/org/apache/spark/sql/hive/client/HiveClientImpl.scala index 0b19e5e6e8c8..6a964a0ce361 100644 --- a/sql/hive/src/main/scala/org/apache/spark/sql/hive/client/HiveClientImpl.scala +++ b/sql/hive/src/main/scala/org/apache/spark/sql/hive/client/HiveClientImpl.scala @@ -49,7 +49,7 @@ import org.apache.spark.internal.Logging import org.apache.spark.metrics.source.HiveCatalogMetrics import org.apache.spark.sql.AnalysisException import org.apache.spark.sql.catalyst.TableIdentifier -import org.apache.spark.sql.catalyst.analysis.{NoSuchDatabaseException, NoSuchPartitionException, PartitionsAlreadyExistException} +import org.apache.spark.sql.catalyst.analysis.{NoSuchDatabaseException, NoSuchPartitionException, NoSuchPartitionsException, PartitionsAlreadyExistException} import org.apache.spark.sql.catalyst.catalog._ import org.apache.spark.sql.catalyst.catalog.CatalogTypes.TablePartitionSpec import org.apache.spark.sql.catalyst.expressions.Expression @@ -630,9 +630,7 @@ private[hive] class HiveClientImpl( // (b='1', c='1') and (b='1', c='2'), a partial spec of (b='1') will match both. val parts = client.getPartitions(hiveTable, s.asJava).asScala if (parts.isEmpty && !ignoreIfNotExists) { - throw new AnalysisException( - s"No partition is dropped. One partition spec '$s' does not exist in table '$table' " + - s"database '$db'") + throw new NoSuchPartitionsException(db, table, Seq(s)) } parts.map(_.getValues) }.distinct diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/command/AlterTableDropPartitionSuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/command/AlterTableDropPartitionSuite.scala index e857e11c3589..9c7d76a0caa0 100644 --- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/command/AlterTableDropPartitionSuite.scala +++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/command/AlterTableDropPartitionSuite.scala @@ -26,6 +26,4 @@ class AlterTableDropPartitionSuite override def version: String = "Hive V1" override def defaultUsing: String = "USING HIVE" - - override protected val notFullPartitionSpecErr = "No partition is dropped" }