@@ -33,6 +33,7 @@ import org.apache.spark.sql.hive.test.TestHiveSingleton
3333import org .apache .spark .sql .internal .SQLConf
3434import org .apache .spark .sql .internal .StaticSQLConf .CATALOG_IMPLEMENTATION
3535import org .apache .spark .sql .test .SQLTestUtils
36+ import org .apache .spark .sql .types .StructType
3637
3738class HiveDDLSuite
3839 extends QueryTest with SQLTestUtils with TestHiveSingleton with BeforeAndAfterEach {
@@ -1289,4 +1290,66 @@ class HiveDDLSuite
12891290 }
12901291 }
12911292 }
1293+
1294+ test(" create hive serde table with Catalog" ) {
1295+ withTable(" t" ) {
1296+ withTempDir { dir =>
1297+ val df = spark.catalog.createExternalTable(
1298+ " t" ,
1299+ " hive" ,
1300+ new StructType ().add(" i" , " int" ),
1301+ Map (" path" -> dir.getCanonicalPath, " fileFormat" -> " parquet" ))
1302+ assert(df.collect().isEmpty)
1303+
1304+ val table = spark.sessionState.catalog.getTableMetadata(TableIdentifier (" t" ))
1305+ assert(DDLUtils .isHiveTable(table))
1306+ assert(table.storage.inputFormat ==
1307+ Some (" org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat" ))
1308+ assert(table.storage.outputFormat ==
1309+ Some (" org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat" ))
1310+ assert(table.storage.serde ==
1311+ Some (" org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe" ))
1312+
1313+ sql(" INSERT INTO t SELECT 1" )
1314+ checkAnswer(spark.table(" t" ), Row (1 ))
1315+ }
1316+ }
1317+ }
1318+
1319+ test(" create hive serde table with DataFrameWriter.saveAsTable" ) {
1320+ withTable(" t" , " t2" ) {
1321+ Seq (1 -> " a" ).toDF(" i" , " j" )
1322+ .write.format(" hive" ).option(" fileFormat" , " avro" ).saveAsTable(" t" )
1323+ checkAnswer(spark.table(" t" ), Row (1 , " a" ))
1324+
1325+ val table = spark.sessionState.catalog.getTableMetadata(TableIdentifier (" t" ))
1326+ assert(DDLUtils .isHiveTable(table))
1327+ assert(table.storage.inputFormat ==
1328+ Some (" org.apache.hadoop.hive.ql.io.avro.AvroContainerInputFormat" ))
1329+ assert(table.storage.outputFormat ==
1330+ Some (" org.apache.hadoop.hive.ql.io.avro.AvroContainerOutputFormat" ))
1331+ assert(table.storage.serde ==
1332+ Some (" org.apache.hadoop.hive.serde2.avro.AvroSerDe" ))
1333+
1334+ sql(" INSERT INTO t SELECT 2, 'b'" )
1335+ checkAnswer(spark.table(" t" ), Row (1 , " a" ) :: Row (2 , " b" ) :: Nil )
1336+
1337+ val e = intercept[AnalysisException ] {
1338+ Seq (1 -> " a" ).toDF(" i" , " j" ).write.format(" hive" ).partitionBy(" i" ).saveAsTable(" t2" )
1339+ }
1340+ assert(e.message.contains(" A Create Table As Select (CTAS) statement is not allowed " +
1341+ " to create a partitioned table using Hive" ))
1342+
1343+ val e2 = intercept[AnalysisException ] {
1344+ Seq (1 -> " a" ).toDF(" i" , " j" ).write.format(" hive" ).bucketBy(4 , " i" ).saveAsTable(" t2" )
1345+ }
1346+ assert(e2.message.contains(" Creating bucketed Hive serde table is not supported yet" ))
1347+
1348+ val e3 = intercept[AnalysisException ] {
1349+ spark.table(" t" ).write.format(" hive" ).mode(" overwrite" ).saveAsTable(" t" )
1350+ }
1351+ assert(e3.message.contains(
1352+ " CTAS for hive serde tables does not support append or overwrite semantics" ))
1353+ }
1354+ }
12921355}
0 commit comments