@@ -19,24 +19,29 @@ package org.apache.spark.sql.sources
1919
2020import java .io .File
2121
22+ import org .scalatest .BeforeAndAfterAll
23+
2224import org .apache .spark .sql .catalyst .util
2325import org .apache .spark .sql .types .{StringType , StructType , StructField }
2426import org .apache .spark .util .Utils
2527
26-
27- class CreateTableAsSelectSuite extends DataSourceTest {
28+ class CreateTableAsSelectSuite extends DataSourceTest with BeforeAndAfterAll {
2829
2930 import caseInsensisitiveContext ._
3031
31- var path : File = util.getTempFilePath( " jsonCTAS " ).getCanonicalFile
32+ var path : File = null
3233
33- before {
34+ override def beforeAll (): Unit = {
35+ path = util.getTempFilePath(" jsonCTAS" ).getCanonicalFile
3436 val rdd = sparkContext.parallelize((1 to 10 ).map(i => s """ {"a": $i, "b":"str ${i}"} """ ))
3537 jsonRDD(rdd).registerTempTable(" jt" )
3638 }
3739
38- after {
40+ override def afterAll () : Unit = {
3941 dropTempTable(" jt" )
42+ }
43+
44+ after {
4045 if (path.exists()) Utils .deleteRecursively(path)
4146 }
4247
@@ -107,6 +112,40 @@ class CreateTableAsSelectSuite extends DataSourceTest {
107112 dropTempTable(" jsonTable" )
108113 }
109114
115+ test(" create a table, drop it and create another one with the same name" ) {
116+ sql(
117+ s """
118+ |CREATE TEMPORARY TABLE jsonTable
119+ |USING org.apache.spark.sql.json.DefaultSource
120+ |OPTIONS (
121+ | path ' ${path.toString}'
122+ |) AS
123+ |SELECT a, b FROM jt
124+ """ .stripMargin)
125+
126+ checkAnswer(
127+ sql(" SELECT a, b FROM jsonTable" ),
128+ sql(" SELECT a, b FROM jt" ).collect())
129+
130+ dropTempTable(" jsonTable" )
131+
132+ sql(
133+ s """
134+ |CREATE TEMPORARY TABLE jsonTable
135+ |USING org.apache.spark.sql.json.DefaultSource
136+ |OPTIONS (
137+ | path ' ${path.toString}'
138+ |) AS
139+ |SELECT a * 4 FROM jt
140+ """ .stripMargin)
141+
142+ checkAnswer(
143+ sql(" SELECT * FROM jsonTable" ),
144+ sql(" SELECT a * 4 FROM jt" ).collect())
145+
146+ dropTempTable(" jsonTable" )
147+ }
148+
110149 test(" a CTAS statement with column definitions is not allowed" ) {
111150 intercept[DDLException ]{
112151 sql(
0 commit comments