From 57893bdf55146c4ecd0a6d72c69ec3d3e85b5207 Mon Sep 17 00:00:00 2001 From: gatorsmile Date: Mon, 11 Jul 2016 15:30:11 -0700 Subject: [PATCH 1/3] fix --- .../spark/sql/execution/command/tables.scala | 26 +++++-------------- .../sql/hive/execution/HiveDDLSuite.scala | 16 +++++++----- 2 files changed, 16 insertions(+), 26 deletions(-) diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/command/tables.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/command/tables.scala index 5c815df0deb9e..6e1755674a350 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/execution/command/tables.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/command/tables.scala @@ -408,34 +408,24 @@ case class DescribeTableCommand(table: TableIdentifier, isExtended: Boolean, isF val result = new ArrayBuffer[Row] val catalog = sparkSession.sessionState.catalog - if (catalog.isTemporaryTable(table)) { - describeSchema(catalog.lookupRelation(table).schema, result) - } else { - val metadata = catalog.getTableMetadata(table) + describeSchema(catalog.lookupRelation(table).schema, result) + if (!catalog.isTemporaryTable(table)) { + val metadata = catalog.getTableMetadata(table) if (isExtended) { describeExtended(metadata, result) } else if (isFormatted) { describeFormatted(metadata, result) } else { - describe(metadata, result) + describePartitionInfo(metadata, result) } } result } - // Shows data columns and partitioned columns (if any) - private def describe(table: CatalogTable, buffer: ArrayBuffer[Row]): Unit = { + private def describePartitionInfo(table: CatalogTable, buffer: ArrayBuffer[Row]): Unit = { if (DDLUtils.isDatasourceTable(table)) { - val schema = DDLUtils.getSchemaFromTableProperties(table) - - if (schema.isEmpty) { - append(buffer, "# Schema of this table is inferred at runtime", "", "") - } else { - schema.foreach(describeSchema(_, buffer)) - } - val partCols = DDLUtils.getPartitionColumnsFromTableProperties(table) if (partCols.nonEmpty) { append(buffer, "# Partition Information", "", "") @@ -443,8 +433,6 @@ case class DescribeTableCommand(table: TableIdentifier, isExtended: Boolean, isF partCols.foreach(col => append(buffer, col, "", "")) } } else { - describeSchema(table.schema, buffer) - if (table.partitionColumns.nonEmpty) { append(buffer, "# Partition Information", "", "") append(buffer, s"# ${output.head.name}", output(1).name, output(2).name) @@ -454,14 +442,14 @@ case class DescribeTableCommand(table: TableIdentifier, isExtended: Boolean, isF } private def describeExtended(table: CatalogTable, buffer: ArrayBuffer[Row]): Unit = { - describe(table, buffer) + describePartitionInfo(table, buffer) append(buffer, "", "", "") append(buffer, "# Detailed Table Information", table.toString, "") } private def describeFormatted(table: CatalogTable, buffer: ArrayBuffer[Row]): Unit = { - describe(table, buffer) + describePartitionInfo(table, buffer) append(buffer, "", "", "") append(buffer, "# Detailed Table Information", "", "") diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveDDLSuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveDDLSuite.scala index 93e50f4ee907b..90c48617804ce 100644 --- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveDDLSuite.scala +++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveDDLSuite.scala @@ -610,15 +610,17 @@ class HiveDDLSuite } test("desc table for data source table - no user-defined schema") { - withTable("t1") { - withTempPath { dir => - val path = dir.getCanonicalPath - spark.range(1).write.parquet(path) - sql(s"CREATE TABLE t1 USING parquet OPTIONS (PATH '$path')") + Seq("parquet", "json", "orc").foreach { fileFormat => + withTable("t1") { + withTempPath { dir => + val path = dir.getCanonicalPath + spark.range(1).write.format(fileFormat).save(path) + sql(s"CREATE TABLE t1 USING $fileFormat OPTIONS (PATH '$path')") - val desc = sql("DESC FORMATTED t1").collect().toSeq + val desc = sql("DESC FORMATTED t1").collect().toSeq - assert(desc.contains(Row("# Schema of this table is inferred at runtime", "", ""))) + assert(desc.contains(Row("id", "bigint", ""))) + } } } } From 6f2deb3405b119aff1c88cab19d3953a7ede0408 Mon Sep 17 00:00:00 2001 From: gatorsmile Date: Mon, 11 Jul 2016 15:55:18 -0700 Subject: [PATCH 2/3] another fix way --- .../spark/sql/execution/command/tables.scala | 15 ++++++++++++--- 1 file changed, 12 insertions(+), 3 deletions(-) diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/command/tables.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/command/tables.scala index 6e1755674a350..4345f0d696ae7 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/execution/command/tables.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/command/tables.scala @@ -408,10 +408,19 @@ case class DescribeTableCommand(table: TableIdentifier, isExtended: Boolean, isF val result = new ArrayBuffer[Row] val catalog = sparkSession.sessionState.catalog - describeSchema(catalog.lookupRelation(table).schema, result) - - if (!catalog.isTemporaryTable(table)) { + if (catalog.isTemporaryTable(table)) { + describeSchema(catalog.lookupRelation(table).schema, result) + } else { val metadata = catalog.getTableMetadata(table) + + val schema = if (DDLUtils.isDatasourceTable(metadata)) { + DDLUtils.getSchemaFromTableProperties(metadata) + .getOrElse(catalog.lookupRelation(table).schema) + } else { + catalog.lookupRelation(table).schema + } + describeSchema(schema, result) + if (isExtended) { describeExtended(metadata, result) } else if (isFormatted) { From d92ebcdfd7e525499e0c8b491eeab416ad12ecfd Mon Sep 17 00:00:00 2001 From: gatorsmile Date: Mon, 11 Jul 2016 21:00:20 -0700 Subject: [PATCH 3/3] another fix way --- .../apache/spark/sql/execution/command/tables.scala | 11 ++++++----- 1 file changed, 6 insertions(+), 5 deletions(-) diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/command/tables.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/command/tables.scala index 4345f0d696ae7..6651c33a3a9d0 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/execution/command/tables.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/command/tables.scala @@ -413,13 +413,14 @@ case class DescribeTableCommand(table: TableIdentifier, isExtended: Boolean, isF } else { val metadata = catalog.getTableMetadata(table) - val schema = if (DDLUtils.isDatasourceTable(metadata)) { - DDLUtils.getSchemaFromTableProperties(metadata) - .getOrElse(catalog.lookupRelation(table).schema) + if (DDLUtils.isDatasourceTable(metadata)) { + DDLUtils.getSchemaFromTableProperties(metadata) match { + case Some(userSpecifiedSchema) => describeSchema(userSpecifiedSchema, result) + case None => describeSchema(catalog.lookupRelation(table).schema, result) + } } else { - catalog.lookupRelation(table).schema + describeSchema(metadata.schema, result) } - describeSchema(schema, result) if (isExtended) { describeExtended(metadata, result)