From bffc412b4ce50ffc63da0f6b05d82f7dd52a97fd Mon Sep 17 00:00:00 2001 From: gatorsmile Date: Fri, 19 Aug 2016 22:40:52 -0700 Subject: [PATCH] fix. --- .../spark/sql/execution/SparkSqlParser.scala | 7 +++++-- .../spark/sql/hive/execution/HiveDDLSuite.scala | 14 ++++++++++++++ 2 files changed, 19 insertions(+), 2 deletions(-) diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkSqlParser.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkSqlParser.scala index 71c3bd31e02e..ddc12de8d734 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkSqlParser.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkSqlParser.scala @@ -1044,10 +1044,13 @@ class SparkSqlAstBuilder(conf: SQLConf) extends AstBuilder { if (conf.convertCTAS && !hasStorageProperties) { // At here, both rowStorage.serdeProperties and fileStorage.serdeProperties // are empty Maps. + // For data source tables, table properties is only used to store schema and + // system-generated metadata. All user-specified properties/options will be stored + // in serde properties. val optionsWithPath = if (location.isDefined) { - Map("path" -> location.get) + properties ++ Map("path" -> location.get) } else { - Map.empty[String, String] + properties } val newTableDesc = tableDesc.copy( diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveDDLSuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveDDLSuite.scala index 970b6885f625..8d270cada5c6 100644 --- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveDDLSuite.scala +++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveDDLSuite.scala @@ -653,6 +653,20 @@ class HiveDDLSuite } } + test("CTAS - converted to Data Source Table but lost table properties") { + withSQLConf(SQLConf.CONVERT_CTAS.key -> "true") { + withTable("t") { + sql("CREATE TABLE t TBLPROPERTIES('prop1' = 'c', 'prop2' = 'd') AS SELECT 1 as a, 1 as b") + val tableDesc = spark.sessionState.catalog.getTableMetadata(TableIdentifier("t")) + assert(tableDesc.properties.get("prop1").isEmpty) + assert(tableDesc.properties.get("prop2").isEmpty) + assert(tableDesc.storage.properties.get("prop1") == Option("c")) + assert(tableDesc.storage.properties.get("prop2") == Option("d")) + checkAnswer(spark.table("t"), Row(1, 1) :: Nil) + } + } + } + test("desc table for data source table - partitioned bucketed table") { withTable("t1") { spark