Skip to content

Commit cc23581

Browse files
MaxGekkcloud-fan
authored andcommitted
[SPARK-33858][SQL][TESTS] Unify v1 and v2 ALTER TABLE .. RENAME PARTITION tests
### What changes were proposed in this pull request? 1. Move the `ALTER TABLE .. RENAME PARTITION` parsing tests to `AlterTableRenamePartitionParserSuite` 2. Place the v1 tests for `ALTER TABLE .. RENAME PARTITION` from `DDLSuite` to `v1.AlterTableRenamePartitionSuite` and v2 tests from `AlterTablePartitionV2SQLSuite` to `v2.AlterTableRenamePartitionSuite`, so, the tests will run for V1, Hive V1 and V2 DS. ### Why are the changes needed? - The unification will allow to run common `ALTER TABLE .. RENAME PARTITION` tests for both DSv1 and Hive DSv1, DSv2 - We can detect missing features and differences between DSv1 and DSv2 implementations. ### Does this PR introduce _any_ user-facing change? No ### How was this patch tested? By running new test suites: ``` $ build/sbt -Phive-2.3 -Phive-thriftserver "test:testOnly *AlterTableRenamePartitionParserSuite" $ build/sbt -Phive-2.3 -Phive-thriftserver "test:testOnly *AlterTableRenamePartitionSuite" ``` Closes #30863 from MaxGekk/unify-rename-partition-tests. Authored-by: Max Gekk <[email protected]> Signed-off-by: Wenchen Fan <[email protected]>
1 parent f421c17 commit cc23581

File tree

11 files changed

+325
-124
lines changed

11 files changed

+325
-124
lines changed

sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/DDLParserSuite.scala

Lines changed: 1 addition & 27 deletions
Original file line numberDiff line numberDiff line change
@@ -20,7 +20,7 @@ package org.apache.spark.sql.catalyst.parser
2020
import java.util.Locale
2121

2222
import org.apache.spark.sql.AnalysisException
23-
import org.apache.spark.sql.catalyst.analysis.{AnalysisTest, GlobalTempView, LocalTempView, PersistedView, UnresolvedAttribute, UnresolvedFunc, UnresolvedNamespace, UnresolvedPartitionSpec, UnresolvedRelation, UnresolvedStar, UnresolvedTable, UnresolvedTableOrView, UnresolvedView}
23+
import org.apache.spark.sql.catalyst.analysis.{AnalysisTest, GlobalTempView, LocalTempView, PersistedView, UnresolvedAttribute, UnresolvedFunc, UnresolvedNamespace, UnresolvedRelation, UnresolvedStar, UnresolvedTable, UnresolvedTableOrView, UnresolvedView}
2424
import org.apache.spark.sql.catalyst.catalog.{ArchiveResource, BucketSpec, FileResource, FunctionResource, JarResource}
2525
import org.apache.spark.sql.catalyst.expressions.{EqualTo, Literal}
2626
import org.apache.spark.sql.catalyst.plans.logical._
@@ -2073,32 +2073,6 @@ class DDLParserSuite extends AnalysisTest {
20732073
""".stripMargin)
20742074
}
20752075

2076-
test("alter table: rename partition") {
2077-
val sql1 =
2078-
"""
2079-
|ALTER TABLE table_name PARTITION (dt='2008-08-08', country='us')
2080-
|RENAME TO PARTITION (dt='2008-09-09', country='uk')
2081-
""".stripMargin
2082-
val parsed1 = parsePlan(sql1)
2083-
val expected1 = AlterTableRenamePartition(
2084-
UnresolvedTable(Seq("table_name"), "ALTER TABLE ... RENAME TO PARTITION"),
2085-
UnresolvedPartitionSpec(Map("dt" -> "2008-08-08", "country" -> "us")),
2086-
Map("dt" -> "2008-09-09", "country" -> "uk"))
2087-
comparePlans(parsed1, expected1)
2088-
2089-
val sql2 =
2090-
"""
2091-
|ALTER TABLE a.b.c PARTITION (ds='2017-06-10')
2092-
|RENAME TO PARTITION (ds='2018-06-10')
2093-
""".stripMargin
2094-
val parsed2 = parsePlan(sql2)
2095-
val expected2 = AlterTableRenamePartition(
2096-
UnresolvedTable(Seq("a", "b", "c"), "ALTER TABLE ... RENAME TO PARTITION"),
2097-
UnresolvedPartitionSpec(Map("ds" -> "2017-06-10")),
2098-
Map("ds" -> "2018-06-10"))
2099-
comparePlans(parsed2, expected2)
2100-
}
2101-
21022076
test("show current namespace") {
21032077
comparePlans(
21042078
parsePlan("SHOW CURRENT NAMESPACE"),

sql/core/src/test/scala/org/apache/spark/sql/connector/AlterTablePartitionV2SQLSuite.scala

Lines changed: 0 additions & 19 deletions
Original file line numberDiff line numberDiff line change
@@ -31,23 +31,4 @@ class AlterTablePartitionV2SQLSuite extends DatasourceV2SQLBase {
3131
"ALTER TABLE ... RECOVER PARTITIONS is not supported for v2 tables."))
3232
}
3333
}
34-
35-
test("ALTER TABLE RENAME PARTITION") {
36-
val nonPartTbl = "testcat.ns1.ns2.tbl"
37-
val partTbl = "testpart.ns1.ns2.tbl"
38-
withTable(nonPartTbl, partTbl) {
39-
spark.sql(s"CREATE TABLE $nonPartTbl (id bigint, data string) USING foo PARTITIONED BY (id)")
40-
val e1 = intercept[AnalysisException] {
41-
sql(s"ALTER TABLE $nonPartTbl PARTITION (id=1) RENAME TO PARTITION (id=2)")
42-
}
43-
assert(e1.message.contains(s"Table $nonPartTbl can not alter partitions"))
44-
45-
spark.sql(s"CREATE TABLE $partTbl (id bigint, data string) USING foo PARTITIONED BY (id)")
46-
val e2 = intercept[AnalysisException] {
47-
sql(s"ALTER TABLE $partTbl PARTITION (id=1) RENAME TO PARTITION (id=2)")
48-
}
49-
assert(e2.message.contains(
50-
"ALTER TABLE ... RENAME TO PARTITION is not supported for v2 tables."))
51-
}
52-
}
5334
}
Lines changed: 51 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,51 @@
1+
/*
2+
* Licensed to the Apache Software Foundation (ASF) under one or more
3+
* contributor license agreements. See the NOTICE file distributed with
4+
* this work for additional information regarding copyright ownership.
5+
* The ASF licenses this file to You under the Apache License, Version 2.0
6+
* (the "License"); you may not use this file except in compliance with
7+
* the License. You may obtain a copy of the License at
8+
*
9+
* http://www.apache.org/licenses/LICENSE-2.0
10+
*
11+
* Unless required by applicable law or agreed to in writing, software
12+
* distributed under the License is distributed on an "AS IS" BASIS,
13+
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14+
* See the License for the specific language governing permissions and
15+
* limitations under the License.
16+
*/
17+
18+
package org.apache.spark.sql.execution.command
19+
20+
import org.apache.spark.sql.catalyst.analysis.{AnalysisTest, UnresolvedPartitionSpec, UnresolvedTable}
21+
import org.apache.spark.sql.catalyst.parser.CatalystSqlParser.parsePlan
22+
import org.apache.spark.sql.catalyst.plans.logical.AlterTableRenamePartition
23+
import org.apache.spark.sql.test.SharedSparkSession
24+
25+
class AlterTableRenamePartitionParserSuite extends AnalysisTest with SharedSparkSession {
26+
test("rename a partition with single part") {
27+
val sql = """
28+
|ALTER TABLE a.b.c PARTITION (ds='2017-06-10')
29+
|RENAME TO PARTITION (ds='2018-06-10')
30+
""".stripMargin
31+
val parsed = parsePlan(sql)
32+
val expected = AlterTableRenamePartition(
33+
UnresolvedTable(Seq("a", "b", "c"), "ALTER TABLE ... RENAME TO PARTITION"),
34+
UnresolvedPartitionSpec(Map("ds" -> "2017-06-10")),
35+
Map("ds" -> "2018-06-10"))
36+
comparePlans(parsed, expected)
37+
}
38+
39+
test("rename a partition with multi parts") {
40+
val sql = """
41+
|ALTER TABLE table_name PARTITION (dt='2008-08-08', country='us')
42+
|RENAME TO PARTITION (dt='2008-09-09', country='uk')
43+
""".stripMargin
44+
val parsed = parsePlan(sql)
45+
val expected = AlterTableRenamePartition(
46+
UnresolvedTable(Seq("table_name"), "ALTER TABLE ... RENAME TO PARTITION"),
47+
UnresolvedPartitionSpec(Map("dt" -> "2008-08-08", "country" -> "us")),
48+
Map("dt" -> "2008-09-09", "country" -> "uk"))
49+
comparePlans(parsed, expected)
50+
}
51+
}
Lines changed: 24 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,24 @@
1+
/*
2+
* Licensed to the Apache Software Foundation (ASF) under one or more
3+
* contributor license agreements. See the NOTICE file distributed with
4+
* this work for additional information regarding copyright ownership.
5+
* The ASF licenses this file to You under the Apache License, Version 2.0
6+
* (the "License"); you may not use this file except in compliance with
7+
* the License. You may obtain a copy of the License at
8+
*
9+
* http://www.apache.org/licenses/LICENSE-2.0
10+
*
11+
* Unless required by applicable law or agreed to in writing, software
12+
* distributed under the License is distributed on an "AS IS" BASIS,
13+
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14+
* See the License for the specific language governing permissions and
15+
* limitations under the License.
16+
*/
17+
18+
package org.apache.spark.sql.execution.command
19+
20+
import org.apache.spark.sql.QueryTest
21+
22+
trait AlterTableRenamePartitionSuiteBase extends QueryTest with DDLCommandTestUtils {
23+
override val command = "ALTER TABLE .. RENAME PARTITION"
24+
}

sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLCommandTestUtils.scala

Lines changed: 18 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -57,4 +57,22 @@ trait DDLCommandTestUtils extends SQLTestUtils {
5757
.map(PartitioningUtils.parsePathFragment)
5858
assert(partitions === expected.toSet)
5959
}
60+
61+
protected def createWideTable(table: String): Unit = {
62+
sql(s"""
63+
|CREATE TABLE $table (
64+
| price int, qty int,
65+
| year int, month int, hour int, minute int, sec int, extra int)
66+
|$defaultUsing
67+
|PARTITIONED BY (year, month, hour, minute, sec, extra)
68+
|""".stripMargin)
69+
sql(s"""
70+
|INSERT INTO $table
71+
|PARTITION(year = 2016, month = 3, hour = 10, minute = 10, sec = 10, extra = 1) SELECT 3, 3
72+
|""".stripMargin)
73+
sql(s"""
74+
|ALTER TABLE $table
75+
|ADD PARTITION(year = 2016, month = 4, hour = 10, minute = 10, sec = 10, extra = 1)
76+
|""".stripMargin)
77+
}
6078
}

sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala

Lines changed: 1 addition & 56 deletions
Original file line numberDiff line numberDiff line change
@@ -29,7 +29,7 @@ import org.apache.spark.internal.config
2929
import org.apache.spark.internal.config.RDD_PARALLEL_LISTING_THRESHOLD
3030
import org.apache.spark.sql.{AnalysisException, QueryTest, Row, SaveMode}
3131
import org.apache.spark.sql.catalyst.{FunctionIdentifier, QualifiedTableName, TableIdentifier}
32-
import org.apache.spark.sql.catalyst.analysis.{FunctionRegistry, NoSuchDatabaseException, NoSuchFunctionException, NoSuchPartitionException, PartitionAlreadyExistsException, TempTableAlreadyExistsException}
32+
import org.apache.spark.sql.catalyst.analysis.{FunctionRegistry, NoSuchDatabaseException, NoSuchFunctionException, NoSuchPartitionException, TempTableAlreadyExistsException}
3333
import org.apache.spark.sql.catalyst.catalog._
3434
import org.apache.spark.sql.catalyst.catalog.CatalogTypes.TablePartitionSpec
3535
import org.apache.spark.sql.connector.catalog.SupportsNamespaces.PROP_OWNER
@@ -334,10 +334,6 @@ abstract class DDLSuite extends QueryTest with SQLTestUtils {
334334
testChangeColumn(isDatasourceTable = true)
335335
}
336336

337-
test("alter table: rename partition (datasource table)") {
338-
testRenamePartitions(isDatasourceTable = true)
339-
}
340-
341337
test("the qualified path of a database is stored in the catalog") {
342338
val catalog = spark.sessionState.catalog
343339

@@ -1592,57 +1588,6 @@ abstract class DDLSuite extends QueryTest with SQLTestUtils {
15921588
}
15931589
}
15941590

1595-
protected def testRenamePartitions(isDatasourceTable: Boolean): Unit = {
1596-
if (!isUsingHiveMetastore) {
1597-
assert(isDatasourceTable, "InMemoryCatalog only supports data source tables")
1598-
}
1599-
val catalog = spark.sessionState.catalog
1600-
val tableIdent = TableIdentifier("tab1", Some("dbx"))
1601-
val part1 = Map("a" -> "1", "b" -> "q")
1602-
val part2 = Map("a" -> "2", "b" -> "c")
1603-
val part3 = Map("a" -> "3", "b" -> "p")
1604-
createDatabase(catalog, "dbx")
1605-
createTable(catalog, tableIdent, isDatasourceTable)
1606-
createTablePartition(catalog, part1, tableIdent)
1607-
createTablePartition(catalog, part2, tableIdent)
1608-
createTablePartition(catalog, part3, tableIdent)
1609-
assert(catalog.listPartitions(tableIdent).map(_.spec).toSet == Set(part1, part2, part3))
1610-
1611-
// basic rename partition
1612-
sql("ALTER TABLE dbx.tab1 PARTITION (a='1', b='q') RENAME TO PARTITION (a='100', b='p')")
1613-
sql("ALTER TABLE dbx.tab1 PARTITION (a='2', b='c') RENAME TO PARTITION (a='20', b='c')")
1614-
assert(catalog.listPartitions(tableIdent).map(_.spec).toSet ==
1615-
Set(Map("a" -> "100", "b" -> "p"), Map("a" -> "20", "b" -> "c"), Map("a" -> "3", "b" -> "p")))
1616-
1617-
// rename without explicitly specifying database
1618-
catalog.setCurrentDatabase("dbx")
1619-
sql("ALTER TABLE tab1 PARTITION (a='100', b='p') RENAME TO PARTITION (a='10', b='p')")
1620-
assert(catalog.listPartitions(tableIdent).map(_.spec).toSet ==
1621-
Set(Map("a" -> "10", "b" -> "p"), Map("a" -> "20", "b" -> "c"), Map("a" -> "3", "b" -> "p")))
1622-
1623-
// table to alter does not exist
1624-
val e = intercept[AnalysisException] {
1625-
sql("ALTER TABLE does_not_exist PARTITION (c='3') RENAME TO PARTITION (c='333')")
1626-
}
1627-
assert(e.getMessage.contains("Table not found: does_not_exist"))
1628-
1629-
// partition to rename does not exist
1630-
intercept[NoSuchPartitionException] {
1631-
sql("ALTER TABLE tab1 PARTITION (a='not_found', b='1') RENAME TO PARTITION (a='1', b='2')")
1632-
}
1633-
1634-
// partition spec in RENAME PARTITION should be case insensitive by default
1635-
sql("ALTER TABLE tab1 PARTITION (A='10', B='p') RENAME TO PARTITION (A='1', B='p')")
1636-
assert(catalog.listPartitions(tableIdent).map(_.spec).toSet ==
1637-
Set(Map("a" -> "1", "b" -> "p"), Map("a" -> "20", "b" -> "c"), Map("a" -> "3", "b" -> "p")))
1638-
1639-
// target partition already exists
1640-
val errMsg = intercept[PartitionAlreadyExistsException] {
1641-
sql("ALTER TABLE tab1 PARTITION (a='1', b='p') RENAME TO PARTITION (a='20', b='c')")
1642-
}.getMessage
1643-
assert(errMsg.contains("Partition already exists"))
1644-
}
1645-
16461591
protected def testChangeColumn(isDatasourceTable: Boolean): Unit = {
16471592
if (!isUsingHiveMetastore) {
16481593
assert(isDatasourceTable, "InMemoryCatalog only supports data source tables")

sql/core/src/test/scala/org/apache/spark/sql/execution/command/ShowPartitionsSuiteBase.scala

Lines changed: 0 additions & 18 deletions
Original file line numberDiff line numberDiff line change
@@ -42,24 +42,6 @@ trait ShowPartitionsSuiteBase extends QueryTest with DDLCommandTestUtils {
4242
sql(s"ALTER TABLE $table ADD PARTITION(year = 2016, month = 3)")
4343
}
4444

45-
protected def createWideTable(table: String): Unit = {
46-
sql(s"""
47-
|CREATE TABLE $table (
48-
| price int, qty int,
49-
| year int, month int, hour int, minute int, sec int, extra int)
50-
|$defaultUsing
51-
|PARTITIONED BY (year, month, hour, minute, sec, extra)
52-
|""".stripMargin)
53-
sql(s"""
54-
|INSERT INTO $table
55-
|PARTITION(year = 2016, month = 3, hour = 10, minute = 10, sec = 10, extra = 1) SELECT 3, 3
56-
|""".stripMargin)
57-
sql(s"""
58-
|ALTER TABLE $table
59-
|ADD PARTITION(year = 2016, month = 4, hour = 10, minute = 10, sec = 10, extra = 1)
60-
|""".stripMargin)
61-
}
62-
6345
test("show partitions of non-partitioned table") {
6446
withNamespaceAndTable("ns", "not_partitioned_table") { t =>
6547
sql(s"CREATE TABLE $t (col1 int) $defaultUsing")

0 commit comments

Comments
 (0)