@@ -19,12 +19,15 @@ package org.apache.spark.sql.hive.execution
1919
2020import org .apache .hadoop .fs .Path
2121
22- import org .apache .spark .sql .{AnalysisException , QueryTest }
22+ import org .apache .spark .sql .{AnalysisException , QueryTest , SaveMode }
23+ import org .apache .spark .sql .catalyst .catalog .CatalogTableType
2324import org .apache .spark .sql .catalyst .TableIdentifier
2425import org .apache .spark .sql .hive .test .TestHiveSingleton
26+ import org .apache .spark .sql .internal .SQLConf
2527import org .apache .spark .sql .test .SQLTestUtils
2628
2729class HiveDDLSuite extends QueryTest with SQLTestUtils with TestHiveSingleton {
30+ import hiveContext .implicits ._
2831
2932 // check if the directory for recording the data of the table exists.
3033 private def tableDirectoryExists (tableIdentifier : TableIdentifier ): Boolean = {
@@ -51,24 +54,62 @@ class HiveDDLSuite extends QueryTest with SQLTestUtils with TestHiveSingleton {
5154 }
5255 }
5356
54- test(" drop external table " ) {
57+ test(" drop managed tables " ) {
5558 withTempDir { tmpDir =>
5659 val tabName = " tab1"
5760 withTable(tabName) {
61+ assert(tmpDir.listFiles.isEmpty)
5862 sql(
5963 s """
6064 |create external table $tabName(c1 int COMMENT 'abc', c2 string)
6165 |stored as parquet
6266 |location ' $tmpDir'
6367 |as select 1, '3'
6468 """ .stripMargin)
69+
70+ val hiveTable =
71+ hiveContext.sessionState.catalog
72+ .getTableMetadata(TableIdentifier (tabName, Some (" default" )))
73+ // It is a managed table, although it uses external in SQL
74+ assert(hiveTable.tableType == CatalogTableType .MANAGED_TABLE )
75+
6576 assert(tmpDir.listFiles.nonEmpty)
6677 sql(s " DROP TABLE $tabName" )
78+ // The data are deleted since the table type is not EXTERNAL
6779 assert(tmpDir.listFiles == null )
6880 }
6981 }
7082 }
7183
84+ test(" drop external data source table" ) {
85+ withTempDir { tmpDir =>
86+ val tabName = " tab1"
87+ withTable(tabName) {
88+ assert(tmpDir.listFiles.isEmpty)
89+
90+ withSQLConf(SQLConf .PARQUET_WRITE_LEGACY_FORMAT .key -> " true" ) {
91+ Seq (1 -> " a" ).toDF(" i" , " j" )
92+ .write
93+ .mode(SaveMode .Overwrite )
94+ .format(" parquet" )
95+ .option(" path" , tmpDir.toString)
96+ .saveAsTable(tabName)
97+ }
98+
99+ val hiveTable =
100+ hiveContext.sessionState.catalog
101+ .getTableMetadata(TableIdentifier (tabName, Some (" default" )))
102+ // This data source table is external table
103+ assert(hiveTable.tableType == CatalogTableType .EXTERNAL_TABLE )
104+
105+ assert(tmpDir.listFiles.nonEmpty)
106+ sql(s " DROP TABLE $tabName" )
107+ // The data are not deleted since the table type is EXTERNAL
108+ assert(tmpDir.listFiles.nonEmpty)
109+ }
110+ }
111+ }
112+
72113 test(" drop views" ) {
73114 withTable(" tab1" ) {
74115 val tabName = " tab1"
0 commit comments