Skip to content

Commit fd6758c

Browse files
committed
Use BeforeAndAfterAll.
1 parent 7880891 commit fd6758c

File tree

2 files changed

+52
-10
lines changed

2 files changed

+52
-10
lines changed

sql/core/src/test/scala/org/apache/spark/sql/sources/CreateTableAsSelectSuite.scala

Lines changed: 44 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -19,24 +19,29 @@ package org.apache.spark.sql.sources
1919

2020
import java.io.File
2121

22+
import org.scalatest.BeforeAndAfterAll
23+
2224
import org.apache.spark.sql.catalyst.util
2325
import org.apache.spark.sql.types.{StringType, StructType, StructField}
2426
import org.apache.spark.util.Utils
2527

26-
27-
class CreateTableAsSelectSuite extends DataSourceTest {
28+
class CreateTableAsSelectSuite extends DataSourceTest with BeforeAndAfterAll {
2829

2930
import caseInsensisitiveContext._
3031

31-
var path: File = util.getTempFilePath("jsonCTAS").getCanonicalFile
32+
var path: File = null
3233

33-
before {
34+
override def beforeAll(): Unit = {
35+
path = util.getTempFilePath("jsonCTAS").getCanonicalFile
3436
val rdd = sparkContext.parallelize((1 to 10).map(i => s"""{"a":$i, "b":"str${i}"}"""))
3537
jsonRDD(rdd).registerTempTable("jt")
3638
}
3739

38-
after {
40+
override def afterAll(): Unit = {
3941
dropTempTable("jt")
42+
}
43+
44+
after {
4045
if (path.exists()) Utils.deleteRecursively(path)
4146
}
4247

@@ -107,6 +112,40 @@ class CreateTableAsSelectSuite extends DataSourceTest {
107112
dropTempTable("jsonTable")
108113
}
109114

115+
test("create a table, drop it and create another one with the same name") {
116+
sql(
117+
s"""
118+
|CREATE TEMPORARY TABLE jsonTable
119+
|USING org.apache.spark.sql.json.DefaultSource
120+
|OPTIONS (
121+
| path '${path.toString}'
122+
|) AS
123+
|SELECT a, b FROM jt
124+
""".stripMargin)
125+
126+
checkAnswer(
127+
sql("SELECT a, b FROM jsonTable"),
128+
sql("SELECT a, b FROM jt").collect())
129+
130+
dropTempTable("jsonTable")
131+
132+
sql(
133+
s"""
134+
|CREATE TEMPORARY TABLE jsonTable
135+
|USING org.apache.spark.sql.json.DefaultSource
136+
|OPTIONS (
137+
| path '${path.toString}'
138+
|) AS
139+
|SELECT a * 4 FROM jt
140+
""".stripMargin)
141+
142+
checkAnswer(
143+
sql("SELECT * FROM jsonTable"),
144+
sql("SELECT a * 4 FROM jt").collect())
145+
146+
dropTempTable("jsonTable")
147+
}
148+
110149
test("a CTAS statement with column definitions is not allowed") {
111150
intercept[DDLException]{
112151
sql(

sql/core/src/test/scala/org/apache/spark/sql/sources/InsertIntoSuite.scala

Lines changed: 8 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -19,17 +19,20 @@ package org.apache.spark.sql.sources
1919

2020
import java.io.File
2121

22+
import org.scalatest.BeforeAndAfterAll
23+
2224
import org.apache.spark.sql.Row
2325
import org.apache.spark.sql.catalyst.util
2426
import org.apache.spark.util.Utils
2527

26-
27-
class InsertIntoSuite extends DataSourceTest {
28+
class InsertIntoSuite extends DataSourceTest with BeforeAndAfterAll {
2829

2930
import caseInsensisitiveContext._
3031

31-
var path: File = util.getTempFilePath("jsonInsertInto").getCanonicalFile
32-
before {
32+
var path: File = null
33+
34+
override def beforeAll: Unit = {
35+
path = util.getTempFilePath("jsonCTAS").getCanonicalFile
3336
val rdd = sparkContext.parallelize((1 to 10).map(i => s"""{"a":$i, "b":"str${i}"}"""))
3437
jsonRDD(rdd).registerTempTable("jt")
3538
sql(
@@ -42,7 +45,7 @@ class InsertIntoSuite extends DataSourceTest {
4245
""".stripMargin)
4346
}
4447

45-
after {
48+
override def afterAll: Unit = {
4649
dropTempTable("jsonTable")
4750
dropTempTable("jt")
4851
if (path.exists()) Utils.deleteRecursively(path)

0 commit comments

Comments
 (0)