Skip to content

Commit 339b0f2

Browse files
LantaoJindongjoon-hyun
authored andcommitted
[SPARK-29045][SQL][TESTS] Drop table to avoid test failure in SQLMetricsSuite
### What changes were proposed in this pull request? In method `SQLMetricsTestUtils.testMetricsDynamicPartition()`, there is a CREATE TABLE sentence without `withTable` block. It causes test failure if use same table name in other unit tests. ### Why are the changes needed? To avoid "table already exists" in tests. ### Does this PR introduce any user-facing change? No ### How was this patch tested? Exist UT Closes #25752 from LantaoJin/SPARK-29045. Authored-by: LantaoJin <[email protected]> Signed-off-by: Yuming Wang <[email protected]>
1 parent 637a6c2 commit 339b0f2

File tree

1 file changed

+24
-22
lines changed

1 file changed

+24
-22
lines changed

sql/core/src/test/scala/org/apache/spark/sql/execution/metric/SQLMetricsTestUtils.scala

Lines changed: 24 additions & 22 deletions
Original file line numberDiff line numberDiff line change
@@ -100,29 +100,31 @@ trait SQLMetricsTestUtils extends SQLTestUtils {
100100
provider: String,
101101
dataFormat: String,
102102
tableName: String): Unit = {
103-
withTempPath { dir =>
104-
spark.sql(
105-
s"""
106-
|CREATE TABLE $tableName(a int, b int)
107-
|USING $provider
108-
|PARTITIONED BY(a)
109-
|LOCATION '${dir.toURI}'
110-
""".stripMargin)
111-
val table = spark.sessionState.catalog.getTableMetadata(TableIdentifier(tableName))
112-
assert(table.location == makeQualifiedPath(dir.getAbsolutePath))
113-
114-
val df = spark.range(start = 0, end = 40, step = 1, numPartitions = 1)
115-
.selectExpr("id a", "id b")
116-
117-
// 40 files, 80 rows, 40 dynamic partitions.
118-
verifyWriteDataMetrics(Seq(40, 40, 80)) {
119-
df.union(df).repartition(2, $"a")
120-
.write
121-
.format(dataFormat)
122-
.mode("overwrite")
123-
.insertInto(tableName)
103+
withTable(tableName) {
104+
withTempPath { dir =>
105+
spark.sql(
106+
s"""
107+
|CREATE TABLE $tableName(a int, b int)
108+
|USING $provider
109+
|PARTITIONED BY(a)
110+
|LOCATION '${dir.toURI}'
111+
""".stripMargin)
112+
val table = spark.sessionState.catalog.getTableMetadata(TableIdentifier(tableName))
113+
assert(table.location == makeQualifiedPath(dir.getAbsolutePath))
114+
115+
val df = spark.range(start = 0, end = 40, step = 1, numPartitions = 1)
116+
.selectExpr("id a", "id b")
117+
118+
// 40 files, 80 rows, 40 dynamic partitions.
119+
verifyWriteDataMetrics(Seq(40, 40, 80)) {
120+
df.union(df).repartition(2, $"a")
121+
.write
122+
.format(dataFormat)
123+
.mode("overwrite")
124+
.insertInto(tableName)
125+
}
126+
assert(TestUtils.recursiveList(dir).count(_.getName.startsWith("part-")) == 40)
124127
}
125-
assert(TestUtils.recursiveList(dir).count(_.getName.startsWith("part-")) == 40)
126128
}
127129
}
128130

0 commit comments

Comments
 (0)