diff --git a/docs/sql-migration-guide.md b/docs/sql-migration-guide.md index 5551d562b8775..d22e5f0270b96 100644 --- a/docs/sql-migration-guide.md +++ b/docs/sql-migration-guide.md @@ -45,7 +45,7 @@ license: | - In Spark 3.2, the auto-generated `Cast` (such as those added by type coercion rules) will be stripped when generating column alias names. E.g., `sql("SELECT floor(1)").columns` will be `FLOOR(1)` instead of `FLOOR(CAST(1 AS DOUBLE))`. - - In Spark 3.2, the output schema of `SHOW TABLES` becomes `namespace: string, tableName: string, isTemporary: boolean`. In Spark 3.1 or earlier, the `namespace` field was named `database` for the builtin catalog, and there is no `isTemporary` field for v2 catalogs. To restore the old schema with the builtin catalog, you can set `spark.sql.legacy.keepCommandOutputSchema` to `true`. + - In Spark 3.2, the output schema of `SHOW TABLES` becomes `namespace: string, tableName: string, isTemporary: boolean, tableType: string`. In Spark 3.1 or earlier, the `namespace` field was named `database` for the builtin catalog, and there is no `isTemporary` field for v2 catalogs and `tableType` for both v1 and v2 catalogs. To restore the old schema with the builtin catalog, you can set `spark.sql.legacy.keepCommandOutputSchema` to `true`. - In Spark 3.2, the output schema of `SHOW TABLE EXTENDED` becomes `namespace: string, tableName: string, isTemporary: boolean, information: string`. In Spark 3.1 or earlier, the `namespace` field was named `database` for the builtin catalog, and no change for the v2 catalogs. To restore the old schema with the builtin catalog, you can set `spark.sql.legacy.keepCommandOutputSchema` to `true`. diff --git a/python/pyspark/sql/context.py b/python/pyspark/sql/context.py index 07b0d823c1ce2..ef05d5a404dee 100644 --- a/python/pyspark/sql/context.py +++ b/python/pyspark/sql/context.py @@ -478,7 +478,7 @@ def tables(self, dbName=None): >>> sqlContext.registerDataFrameAsTable(df, "table1") >>> df2 = sqlContext.tables() >>> df2.filter("tableName = 'table1'").first() - Row(namespace='', tableName='table1', isTemporary=True) + Row(namespace='', tableName='table1', isTemporary=True, tableType='VIEW') """ if dbName is None: return DataFrame(self._ssql_ctx.tables(), self) diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/interface.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/interface.scala index 5ccebc7edb876..8e3a46084a5ec 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/interface.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/interface.scala @@ -742,6 +742,13 @@ object CatalogTableType { val VIEW = new CatalogTableType("VIEW") val tableTypes = Seq(EXTERNAL, MANAGED, VIEW) + + def classicTableTypeString(tableType: CatalogTableType): String = tableType match { + case EXTERNAL | MANAGED => "TABLE" + case VIEW => "VIEW" + case t => + throw new IllegalArgumentException(s"Unknown table type is found: $t") + } } diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/v2Commands.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/v2Commands.scala index 938c23a51128e..6fae3b236f5c9 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/v2Commands.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/v2Commands.scala @@ -515,7 +515,13 @@ object ShowTables { def getOutputAttrs: Seq[Attribute] = Seq( AttributeReference("namespace", StringType, nullable = false)(), AttributeReference("tableName", StringType, nullable = false)(), - AttributeReference("isTemporary", BooleanType, nullable = false)()) + AttributeReference("isTemporary", BooleanType, nullable = false)(), + AttributeReference("tableType", StringType, nullable = false)()) + + def getLegacyOutputAttrs: Seq[Attribute] = { + val output = getOutputAttrs + output.head.withName("database") +: output.slice(1, output.length - 1) + } } /** @@ -534,7 +540,13 @@ object ShowTableExtended { AttributeReference("namespace", StringType, nullable = false)(), AttributeReference("tableName", StringType, nullable = false)(), AttributeReference("isTemporary", BooleanType, nullable = false)(), - AttributeReference("information", StringType, nullable = false)()) + AttributeReference("information", StringType, nullable = false)(), + AttributeReference("tableType", StringType, nullable = false)()) + + def getLegacyOutputAttrs: Seq[Attribute] = { + val output = getOutputAttrs + output.head.withName("database") +: output.slice(1, output.length - 1) + } } /** diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/internal/SQLConf.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/internal/SQLConf.scala index 130af3ae964c7..c5363b249803c 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/internal/SQLConf.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/internal/SQLConf.scala @@ -3095,7 +3095,7 @@ object SQLConf { buildConf("spark.sql.legacy.keepCommandOutputSchema") .internal() .doc("When true, Spark will keep the output schema of commands such as SHOW DATABASES " + - "unchanged, for v1 catalog and/or table.") + "as same as Spark 3.0 and earlier, for v1 catalog and/or table.") .version("3.0.2") .booleanConf .createWithDefault(false) diff --git a/sql/core/src/main/scala/org/apache/spark/sql/catalyst/analysis/ResolveSessionCatalog.scala b/sql/core/src/main/scala/org/apache/spark/sql/catalyst/analysis/ResolveSessionCatalog.scala index f9b9e5acb7fe8..6e05e91ce89f1 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/catalyst/analysis/ResolveSessionCatalog.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/catalyst/analysis/ResolveSessionCatalog.scala @@ -353,8 +353,8 @@ class ResolveSessionCatalog(val catalogManager: CatalogManager) case ShowTables(DatabaseInSessionCatalog(db), pattern, output) => val newOutput = if (conf.getConf(SQLConf.LEGACY_KEEP_COMMAND_OUTPUT_SCHEMA)) { - assert(output.length == 3) - output.head.withName("database") +: output.tail + assert(output.length == 4) + ShowTables.getLegacyOutputAttrs } else { output } @@ -366,8 +366,8 @@ class ResolveSessionCatalog(val catalogManager: CatalogManager) partitionSpec @ (None | Some(UnresolvedPartitionSpec(_, _))), output) => val newOutput = if (conf.getConf(SQLConf.LEGACY_KEEP_COMMAND_OUTPUT_SCHEMA)) { - assert(output.length == 4) - output.head.withName("database") +: output.tail + assert(output.length == 5) + ShowTableExtended.getLegacyOutputAttrs } else { output } diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/command/tables.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/command/tables.scala index 488c628fb8633..3f72b5b70b4bf 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/execution/command/tables.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/command/tables.scala @@ -843,11 +843,22 @@ case class ShowTablesCommand( val database = tableIdent.database.getOrElse("") val tableName = tableIdent.table val isTemp = catalog.isTempView(tableIdent) + val catalogTable = catalog.getTempViewOrPermanentTableMetadata(tableIdent) + val tableType = classicTableTypeString(catalogTable.tableType) if (isExtended) { - val information = catalog.getTempViewOrPermanentTableMetadata(tableIdent).simpleString - Row(database, tableName, isTemp, s"$information\n") + val information = catalogTable.simpleString + if (output.size == 5) { + Row(database, tableName, isTemp, s"$information\n", tableType) + } else { + Row(database, tableName, isTemp, s"$information\n") + } } else { - Row(database, tableName, isTemp) + if (output.size == 4) { + Row(database, tableName, isTemp, tableType) + } else { + Row(database, tableName, isTemp) + } + } } } else { @@ -870,7 +881,12 @@ case class ShowTablesCommand( val tableName = tableIdent.table val isTemp = catalog.isTempView(tableIdent) val information = partition.simpleString - Seq(Row(database, tableName, isTemp, s"$information\n")) + if (output.size == 5) { + val tableType = classicTableTypeString(table.tableType) + Seq(Row(database, tableName, isTemp, s"$information\n", tableType)) + } else { + Seq(Row(database, tableName, isTemp, s"$information\n")) + } } } } diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/ShowTablesExec.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/ShowTablesExec.scala index b624e621f1f3d..459d0a97aaebb 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/ShowTablesExec.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/ShowTablesExec.scala @@ -40,7 +40,7 @@ case class ShowTablesExec( val tables = catalog.listTables(namespace.toArray) tables.map { table => if (pattern.map(StringUtils.filterPattern(Seq(table.name()), _).nonEmpty).getOrElse(true)) { - rows += toCatalystRow(table.namespace().quoted, table.name(), false) + rows += toCatalystRow(table.namespace().quoted, table.name(), false, "TABLE") } } diff --git a/sql/core/src/test/resources/log4j.properties b/sql/core/src/test/resources/log4j.properties index 2e5cac12952db..7296e77e2d77e 100644 --- a/sql/core/src/test/resources/log4j.properties +++ b/sql/core/src/test/resources/log4j.properties @@ -22,7 +22,7 @@ log4j.rootLogger=INFO, CA, FA log4j.appender.CA=org.apache.log4j.ConsoleAppender log4j.appender.CA.layout=org.apache.log4j.PatternLayout log4j.appender.CA.layout.ConversionPattern=%d{HH:mm:ss.SSS} %p %c: %m%n -log4j.appender.CA.Threshold = WARN +log4j.appender.CA.Threshold = FATAL log4j.appender.CA.follow = true diff --git a/sql/core/src/test/resources/sql-tests/inputs/show-tables-legacy.sql b/sql/core/src/test/resources/sql-tests/inputs/show-tables-legacy.sql new file mode 100644 index 0000000000000..ce5c4fd171f98 --- /dev/null +++ b/sql/core/src/test/resources/sql-tests/inputs/show-tables-legacy.sql @@ -0,0 +1,2 @@ +--SET spark.sql.legacy.keepCommandOutputSchema=true +--IMPORT show-tables.sql diff --git a/sql/core/src/test/resources/sql-tests/results/show-tables-legacy.sql.out b/sql/core/src/test/resources/sql-tests/results/show-tables-legacy.sql.out new file mode 100644 index 0000000000000..05cfb40b9295b --- /dev/null +++ b/sql/core/src/test/resources/sql-tests/results/show-tables-legacy.sql.out @@ -0,0 +1,287 @@ +-- Automatically generated by SQLQueryTestSuite +-- Number of queries: 27 + + +-- !query +CREATE DATABASE showdb +-- !query schema +struct<> +-- !query output + + + +-- !query +USE showdb +-- !query schema +struct<> +-- !query output + + + +-- !query +CREATE TABLE show_t1(a String, b Int, c String, d String) USING parquet PARTITIONED BY (c, d) +-- !query schema +struct<> +-- !query output + + + +-- !query +ALTER TABLE show_t1 ADD PARTITION (c='Us', d=1) +-- !query schema +struct<> +-- !query output + + + +-- !query +CREATE TABLE show_t2(b String, d Int) USING parquet +-- !query schema +struct<> +-- !query output + + + +-- !query +CREATE TEMPORARY VIEW show_t3(e int) USING parquet +-- !query schema +struct<> +-- !query output + + + +-- !query +CREATE GLOBAL TEMP VIEW show_t4 AS SELECT 1 as col1 +-- !query schema +struct<> +-- !query output + + + +-- !query +SHOW TABLES +-- !query schema +struct +-- !query output +show_t1 +show_t2 +show_t3 + + +-- !query +SHOW TABLES IN showdb +-- !query schema +struct +-- !query output +show_t1 +show_t2 +show_t3 + + +-- !query +SHOW TABLES 'show_t*' +-- !query schema +struct +-- !query output +show_t1 +show_t2 +show_t3 + + +-- !query +SHOW TABLES LIKE 'show_t1*|show_t2*' +-- !query schema +struct +-- !query output +show_t1 +show_t2 + + +-- !query +SHOW TABLES IN showdb 'show_t*' +-- !query schema +struct +-- !query output +show_t1 +show_t2 +show_t3 + + +-- !query +SHOW TABLES IN showdb LIKE 'show_t*' +-- !query schema +struct +-- !query output +show_t1 +show_t2 +show_t3 + + +-- !query +SHOW TABLE EXTENDED LIKE 'show_t*' +-- !query schema +struct +-- !query output + show_t3 true Table: show_t3 +Created Time [not included in comparison] +Last Access [not included in comparison] +Created By [not included in comparison] +Type: VIEW +Table Properties: [view.storingAnalyzedPlan=true] +Schema: root + |-- e: integer (nullable = true) + + +showdb show_t1 false Database: showdb +Table: show_t1 +Created Time [not included in comparison] +Last Access [not included in comparison] +Created By [not included in comparison] +Type: MANAGED +Provider: parquet +Location [not included in comparison]/{warehouse_dir}/showdb.db/show_t1 +Partition Provider: Catalog +Partition Columns: [`c`, `d`] +Schema: root + |-- a: string (nullable = true) + |-- b: integer (nullable = true) + |-- c: string (nullable = true) + |-- d: string (nullable = true) + + +showdb show_t2 false Database: showdb +Table: show_t2 +Created Time [not included in comparison] +Last Access [not included in comparison] +Created By [not included in comparison] +Type: MANAGED +Provider: parquet +Location [not included in comparison]/{warehouse_dir}/showdb.db/show_t2 +Schema: root + |-- b: string (nullable = true) + |-- d: integer (nullable = true) + + +-- !query +SHOW TABLE EXTENDED +-- !query schema +struct<> +-- !query output +org.apache.spark.sql.catalyst.parser.ParseException + +mismatched input '' expecting {'FROM', 'IN', 'LIKE'}(line 1, pos 19) + +== SQL == +SHOW TABLE EXTENDED +-------------------^^^ + + +-- !query +SHOW TABLE EXTENDED LIKE 'show_t1' PARTITION(c='Us', d=1) +-- !query schema +struct +-- !query output +showdb show_t1 false Partition Values: [c=Us, d=1] +Location [not included in comparison]/{warehouse_dir}/showdb.db/show_t1/c=Us/d=1 +Created Time [not included in comparison] +Last Access [not included in comparison] + + +-- !query +SHOW TABLE EXTENDED PARTITION(c='Us', d=1) +-- !query schema +struct<> +-- !query output +org.apache.spark.sql.catalyst.parser.ParseException + +mismatched input 'PARTITION' expecting {'FROM', 'IN', 'LIKE'}(line 1, pos 20) + +== SQL == +SHOW TABLE EXTENDED PARTITION(c='Us', d=1) +--------------------^^^ + + +-- !query +SHOW TABLE EXTENDED LIKE 'show_t*' PARTITION(c='Us', d=1) +-- !query schema +struct<> +-- !query output +org.apache.spark.sql.catalyst.analysis.NoSuchTableException +Table or view 'show_t*' not found in database 'showdb' + + +-- !query +SHOW TABLE EXTENDED LIKE 'show_t1' PARTITION(c='Us') +-- !query schema +struct<> +-- !query output +org.apache.spark.sql.AnalysisException +Partition spec is invalid. The spec (c) must match the partition spec (c, d) defined in table '`showdb`.`show_t1`' + + +-- !query +SHOW TABLE EXTENDED LIKE 'show_t1' PARTITION(a='Us', d=1) +-- !query schema +struct<> +-- !query output +org.apache.spark.sql.AnalysisException +a is not a valid partition column in table `showdb`.`show_t1`. + + +-- !query +SHOW TABLE EXTENDED LIKE 'show_t1' PARTITION(c='Ch', d=1) +-- !query schema +struct<> +-- !query output +org.apache.spark.sql.catalyst.analysis.NoSuchPartitionException +Partition not found in table 'show_t1' database 'showdb': +c -> Ch +d -> 1 + + +-- !query +DROP TABLE show_t1 +-- !query schema +struct<> +-- !query output + + + +-- !query +DROP TABLE show_t2 +-- !query schema +struct<> +-- !query output + + + +-- !query +DROP VIEW show_t3 +-- !query schema +struct<> +-- !query output + + + +-- !query +DROP VIEW global_temp.show_t4 +-- !query schema +struct<> +-- !query output + + + +-- !query +USE default +-- !query schema +struct<> +-- !query output + + + +-- !query +DROP DATABASE showdb +-- !query schema +struct<> +-- !query output + diff --git a/sql/core/src/test/resources/sql-tests/results/show-tables.sql.out b/sql/core/src/test/resources/sql-tests/results/show-tables.sql.out index 9f877a2ab93ec..ce1c03f433b61 100644 --- a/sql/core/src/test/resources/sql-tests/results/show-tables.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/show-tables.sql.out @@ -61,7 +61,7 @@ struct<> -- !query SHOW TABLES -- !query schema -struct +struct -- !query output show_t1 show_t2 @@ -71,7 +71,7 @@ show_t3 -- !query SHOW TABLES IN showdb -- !query schema -struct +struct -- !query output show_t1 show_t2 @@ -81,7 +81,7 @@ show_t3 -- !query SHOW TABLES 'show_t*' -- !query schema -struct +struct -- !query output show_t1 show_t2 @@ -91,7 +91,7 @@ show_t3 -- !query SHOW TABLES LIKE 'show_t1*|show_t2*' -- !query schema -struct +struct -- !query output show_t1 show_t2 @@ -100,7 +100,7 @@ show_t2 -- !query SHOW TABLES IN showdb 'show_t*' -- !query schema -struct +struct -- !query output show_t1 show_t2 @@ -110,7 +110,7 @@ show_t3 -- !query SHOW TABLES IN showdb LIKE 'show_t*' -- !query schema -struct +struct -- !query output show_t1 show_t2 @@ -120,7 +120,7 @@ show_t3 -- !query SHOW TABLE EXTENDED LIKE 'show_t*' -- !query schema -struct +struct -- !query output show_t3 true Table: show_t3 Created Time [not included in comparison] @@ -131,7 +131,7 @@ Table Properties: [view.storingAnalyzedPlan=true] Schema: root |-- e: integer (nullable = true) - + VIEW showdb show_t1 false Database: showdb Table: show_t1 Created Time [not included in comparison] @@ -148,7 +148,7 @@ Schema: root |-- c: string (nullable = true) |-- d: string (nullable = true) - + TABLE showdb show_t2 false Database: showdb Table: show_t2 Created Time [not included in comparison] @@ -161,6 +161,8 @@ Schema: root |-- b: string (nullable = true) |-- d: integer (nullable = true) + TABLE + -- !query SHOW TABLE EXTENDED @@ -179,12 +181,13 @@ SHOW TABLE EXTENDED -- !query SHOW TABLE EXTENDED LIKE 'show_t1' PARTITION(c='Us', d=1) -- !query schema -struct +struct -- !query output showdb show_t1 false Partition Values: [c=Us, d=1] Location [not included in comparison]/{warehouse_dir}/showdb.db/show_t1/c=Us/d=1 Created Time [not included in comparison] Last Access [not included in comparison] + TABLE -- !query diff --git a/sql/core/src/test/scala/org/apache/spark/sql/SQLContextSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/SQLContextSuite.scala index d81768c0077eb..41d670c7ea934 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/SQLContextSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/SQLContextSuite.scala @@ -88,11 +88,11 @@ class SQLContextSuite extends SparkFunSuite with SharedSparkContext { df.createOrReplaceTempView("listtablessuitetable") assert( sqlContext.tables().filter("tableName = 'listtablessuitetable'").collect().toSeq == - Row("", "listtablessuitetable", true) :: Nil) + Row("", "listtablessuitetable", true, "VIEW") :: Nil) assert( sqlContext.sql("SHOW tables").filter("tableName = 'listtablessuitetable'").collect().toSeq == - Row("", "listtablessuitetable", true) :: Nil) + Row("", "listtablessuitetable", true, "VIEW") :: Nil) sqlContext.sessionState.catalog.dropTable( TableIdentifier("listtablessuitetable"), ignoreIfNotExists = true, purge = false) @@ -105,11 +105,11 @@ class SQLContextSuite extends SparkFunSuite with SharedSparkContext { df.createOrReplaceTempView("listtablessuitetable") assert( sqlContext.tables("default").filter("tableName = 'listtablessuitetable'").collect().toSeq == - Row("", "listtablessuitetable", true) :: Nil) + Row("", "listtablessuitetable", true, "VIEW") :: Nil) assert( sqlContext.sql("show TABLES in default").filter("tableName = 'listtablessuitetable'") - .collect().toSeq == Row("", "listtablessuitetable", true) :: Nil) + .collect().toSeq == Row("", "listtablessuitetable", true, "VIEW") :: Nil) sqlContext.sessionState.catalog.dropTable( TableIdentifier("listtablessuitetable"), ignoreIfNotExists = true, purge = false) @@ -124,7 +124,8 @@ class SQLContextSuite extends SparkFunSuite with SharedSparkContext { val expectedSchema = StructType( StructField("namespace", StringType, false) :: StructField("tableName", StringType, false) :: - StructField("isTemporary", BooleanType, false) :: Nil) + StructField("isTemporary", BooleanType, false) :: + StructField("tableType", StringType, false) :: Nil) Seq(sqlContext.tables(), sqlContext.sql("SHOW TABLes")).foreach { case tableDF => diff --git a/sql/core/src/test/scala/org/apache/spark/sql/connector/DataSourceV2SQLSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/connector/DataSourceV2SQLSuite.scala index c4abed32bf624..e4f176c5d6bcc 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/connector/DataSourceV2SQLSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/connector/DataSourceV2SQLSuite.scala @@ -1879,7 +1879,8 @@ class DataSourceV2SQLSuite test("rename table by ALTER VIEW") { withTable("testcat.ns1.new") { sql("CREATE TABLE testcat.ns1.ns2.old USING foo AS SELECT id, data FROM source") - checkAnswer(sql("SHOW TABLES FROM testcat.ns1.ns2"), Seq(Row("ns1.ns2", "old", false))) + checkAnswer(sql("SHOW TABLES FROM testcat.ns1.ns2"), + Seq(Row("ns1.ns2", "old", false, "TABLE"))) val e = intercept[AnalysisException] { sql("ALTER VIEW testcat.ns1.ns2.old RENAME TO ns1.new") @@ -2602,7 +2603,7 @@ class DataSourceV2SQLSuite sql(s"ALTER TABLE $tbl RENAME TO new_tbl") checkAnswer( sql(s"SHOW TABLES FROM testcat.ns1.ns2 LIKE 'new_tbl'"), - Row("ns1.ns2", "new_tbl", false)) + Row("ns1.ns2", "new_tbl", false, "TABLE")) checkAnswer(sql(s"SELECT c0 FROM ${catalogAndNamespace}new_tbl"), Row(0)) } } diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/GlobalTempViewSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/GlobalTempViewSuite.scala index 28e82aa14e0d0..43bcd7fa2fc08 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/execution/GlobalTempViewSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/GlobalTempViewSuite.scala @@ -130,8 +130,8 @@ class GlobalTempViewSuite extends QueryTest with SharedSparkSession { sql("CREATE TEMP VIEW v2 AS SELECT 1, 2") checkAnswer(sql(s"SHOW TABLES IN $globalTempDB"), - Row(globalTempDB, "v1", true) :: - Row("", "v2", true) :: Nil) + Row(globalTempDB, "v1", true, "VIEW") :: + Row("", "v2", true, "VIEW") :: Nil) assert(spark.catalog.listTables(globalTempDB).collect().toSeq.map(_.name) == Seq("v1", "v2")) } finally { diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/ShowTablesSuiteBase.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/ShowTablesSuiteBase.scala index 06385017bbd64..9e9237161cb63 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/ShowTablesSuiteBase.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/ShowTablesSuiteBase.scala @@ -44,7 +44,7 @@ trait ShowTablesSuiteBase extends QueryTest with DDLCommandTestUtils { test("show an existing table") { withNamespaceAndTable("ns", "table") { t => sql(s"CREATE TABLE $t (name STRING, id INT) $defaultUsing") - runShowTablesSql(s"SHOW TABLES IN $catalog.ns", Seq(Row("ns", "table", false))) + runShowTablesSql(s"SHOW TABLES IN $catalog.ns", Seq(Row("ns", "table", false, "TABLE"))) } } @@ -72,25 +72,25 @@ trait ShowTablesSuiteBase extends QueryTest with DDLCommandTestUtils { runShowTablesSql( s"SHOW TABLES FROM $catalog.ns1", Seq( - Row("ns1", "table", false), - Row("ns1", "table_name_1a", false), - Row("ns1", "table_name_2b", false))) + Row("ns1", "table", false, "TABLE"), + Row("ns1", "table_name_1a", false, "TABLE"), + Row("ns1", "table_name_2b", false, "TABLE"))) runShowTablesSql( s"SHOW TABLES FROM $catalog.ns1 LIKE '*name*'", Seq( - Row("ns1", "table_name_1a", false), - Row("ns1", "table_name_2b", false))) + Row("ns1", "table_name_1a", false, "TABLE"), + Row("ns1", "table_name_2b", false, "TABLE"))) runShowTablesSql( s"SHOW TABLES FROM $catalog.ns1 LIKE 'table_name_1*|table_name_2*'", Seq( - Row("ns1", "table_name_1a", false), - Row("ns1", "table_name_2b", false))) + Row("ns1", "table_name_1a", false, "TABLE"), + Row("ns1", "table_name_2b", false, "TABLE"))) runShowTablesSql( s"SHOW TABLES FROM $catalog.ns1 LIKE '*2b'", - Seq(Row("ns1", "table_name_2b", false))) + Seq(Row("ns1", "table_name_2b", false, "TABLE"))) } } } @@ -101,7 +101,7 @@ trait ShowTablesSuiteBase extends QueryTest with DDLCommandTestUtils { withTable(tblName) { sql(s"CREATE TABLE $tblName (name STRING, id INT) $defaultUsing") val ns = defaultNamespace.mkString(".") - runShowTablesSql("SHOW TABLES", Seq(Row(ns, "table", false))) + runShowTablesSql("SHOW TABLES", Seq(Row(ns, "table", false, "TABLE"))) } } } @@ -129,7 +129,7 @@ trait ShowTablesSuiteBase extends QueryTest with DDLCommandTestUtils { // Update the current namespace to match "ns.tbl". sql(s"USE $catalog.ns") - runShowTablesSql("SHOW TABLES", Seq(Row("ns", "table", false))) + runShowTablesSql("SHOW TABLES", Seq(Row("ns", "table", false, "TABLE"))) } } } diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/v1/ShowTablesSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/v1/ShowTablesSuite.scala index 4efb7c85e5066..1ae46b65198d0 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/v1/ShowTablesSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/v1/ShowTablesSuite.scala @@ -47,7 +47,7 @@ trait ShowTablesSuiteBase extends command.ShowTablesSuiteBase { withSourceViews { runShowTablesSql( "SHOW TABLES FROM default", - Seq(Row("", "source", true), Row("", "source2", true))) + Seq(Row("", "source", true, "VIEW"), Row("", "source2", true, "VIEW"))) } } @@ -60,14 +60,16 @@ trait ShowTablesSuiteBase extends command.ShowTablesSuiteBase { test("SHOW TABLE EXTENDED from default") { withSourceViews { - val expected = Seq(Row("", "source", true), Row("", "source2", true)) + val expected = Seq(Row("", "source", true, "VIEW"), Row("", "source2", true, "VIEW")) val df = sql("SHOW TABLE EXTENDED FROM default LIKE '*source*'") val result = df.collect() - val resultWithoutInfo = result.map { case Row(db, table, temp, _) => Row(db, table, temp) } + val resultWithoutInfo = result.map { + case Row(db, table, temp, _, tableType) => Row(db, table, temp, tableType) + } assert(resultWithoutInfo === expected) - result.foreach { case Row(_, _, _, info: String) => assert(info.nonEmpty) } + result.foreach { case Row(_, _, _, info: String, _) => assert(info.nonEmpty) } } } @@ -108,12 +110,12 @@ trait ShowTablesSuiteBase extends command.ShowTablesSuiteBase { sql(s"USE $catalog.ns") withTable("tbl") { sql("CREATE TABLE tbl(col1 int, col2 string) USING parquet") - checkAnswer(sql("show tables"), Row("ns", "tbl", false)) + checkAnswer(sql("show tables"), Row("ns", "tbl", false, "TABLE")) assert(sql("show tables").schema.fieldNames === - Seq("namespace", "tableName", "isTemporary")) - assert(sql("show table extended like 'tbl'").collect()(0).length == 4) + Seq("namespace", "tableName", "isTemporary", "tableType")) + assert(sql("show table extended like 'tbl'").collect()(0).length == 5) assert(sql("show table extended like 'tbl'").schema.fieldNames === - Seq("namespace", "tableName", "isTemporary", "information")) + Seq("namespace", "tableName", "isTemporary", "information", "tableType")) // Keep the legacy output schema withSQLConf(SQLConf.LEGACY_KEEP_COMMAND_OUTPUT_SCHEMA.key -> "true") { diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/v2/ShowTablesSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/v2/ShowTablesSuite.scala index 702c1744a68ef..d1359ed2147a1 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/v2/ShowTablesSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/v2/ShowTablesSuite.scala @@ -34,7 +34,7 @@ class ShowTablesSuite extends command.ShowTablesSuiteBase with CommandSuiteBase spark.sql(s"CREATE TABLE $catalog.n1.n2.db.table_name (id bigint, data string) $defaultUsing") runShowTablesSql( s"SHOW TABLES FROM $catalog.n1.n2.db", - Seq(Row("n1.n2.db", "table_name", false))) + Seq(Row("n1.n2.db", "table_name", false, "TABLE"))) } } @@ -44,7 +44,7 @@ class ShowTablesSuite extends command.ShowTablesSuiteBase with CommandSuiteBase test("using v2 catalog with empty namespace") { withTable(s"$catalog.table") { spark.sql(s"CREATE TABLE $catalog.table (id bigint, data string) $defaultUsing") - runShowTablesSql(s"SHOW TABLES FROM $catalog", Seq(Row("", "table", false))) + runShowTablesSql(s"SHOW TABLES FROM $catalog", Seq(Row("", "table", false, "TABLE"))) } } diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/v2/jdbc/JDBCTableCatalogSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/v2/jdbc/JDBCTableCatalogSuite.scala index b94d868120b28..3aade39e1de70 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/v2/jdbc/JDBCTableCatalogSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/v2/jdbc/JDBCTableCatalogSuite.scala @@ -68,7 +68,7 @@ class JDBCTableCatalogSuite extends QueryTest with SharedSparkSession { } test("show tables") { - checkAnswer(sql("SHOW TABLES IN h2.test"), Seq(Row("test", "people", false))) + checkAnswer(sql("SHOW TABLES IN h2.test"), Seq(Row("test", "people", false, "TABLE"))) // Check not existing namespace checkAnswer(sql("SHOW TABLES IN h2.bad_test"), Seq()) } @@ -78,9 +78,9 @@ class JDBCTableCatalogSuite extends QueryTest with SharedSparkSession { conn.prepareStatement("""CREATE TABLE "test"."to_drop" (id INTEGER)""").executeUpdate() } checkAnswer(sql("SHOW TABLES IN h2.test"), - Seq(Row("test", "to_drop", false), Row("test", "people", false))) + Seq(Row("test", "to_drop", false, "TABLE"), Row("test", "people", false, "TABLE"))) sql("DROP TABLE h2.test.to_drop") - checkAnswer(sql("SHOW TABLES IN h2.test"), Seq(Row("test", "people", false))) + checkAnswer(sql("SHOW TABLES IN h2.test"), Seq(Row("test", "people", false, "TABLE"))) Seq( "h2.test.not_existing_table" -> "Table or view not found: h2.test.not_existing_table", @@ -101,11 +101,11 @@ class JDBCTableCatalogSuite extends QueryTest with SharedSparkSession { } checkAnswer( sql("SHOW TABLES IN h2.test"), - Seq(Row("test", "src_table", false), Row("test", "people", false))) + Seq(Row("test", "src_table", false, "TABLE"), Row("test", "people", false, "TABLE"))) sql("ALTER TABLE h2.test.src_table RENAME TO test.dst_table") checkAnswer( sql("SHOW TABLES IN h2.test"), - Seq(Row("test", "dst_table", false), Row("test", "people", false))) + Seq(Row("test", "dst_table", false, "TABLE"), Row("test", "people", false, "TABLE"))) } // Rename not existing table or namespace val exp1 = intercept[AnalysisException] { @@ -156,7 +156,7 @@ class JDBCTableCatalogSuite extends QueryTest with SharedSparkSession { sql("CREATE TABLE h2.test.new_table(i INT, j STRING)") checkAnswer( sql("SHOW TABLES IN h2.test"), - Seq(Row("test", "people", false), Row("test", "new_table", false))) + Seq(Row("test", "people", false, "TABLE"), Row("test", "new_table", false, "TABLE"))) } withTable("h2.test.new_table") { sql("CREATE TABLE h2.test.new_table(i INT, j STRING)") diff --git a/sql/core/src/test/scala/org/apache/spark/sql/jdbc/JDBCV2Suite.scala b/sql/core/src/test/scala/org/apache/spark/sql/jdbc/JDBCV2Suite.scala index a3a3f47280952..cf1c6823fe868 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/jdbc/JDBCV2Suite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/jdbc/JDBCV2Suite.scala @@ -145,7 +145,7 @@ class JDBCV2Suite extends QueryTest with SharedSparkSession { test("show tables") { checkAnswer(sql("SHOW TABLES IN h2.test"), - Seq(Row("test", "people", false), Row("test", "empty_table", false))) + Seq(Row("test", "people", false, "TABLE"), Row("test", "empty_table", false, "TABLE"))) } test("SQL API: create table as select") { diff --git a/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkOperation.scala b/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkOperation.scala index bbfc1b83379aa..368f3bcb851e7 100644 --- a/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkOperation.scala +++ b/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkOperation.scala @@ -24,7 +24,6 @@ import org.apache.spark.SparkContext import org.apache.spark.internal.Logging import org.apache.spark.sql.{SparkSession, SQLContext} import org.apache.spark.sql.catalyst.catalog.CatalogTableType -import org.apache.spark.sql.catalyst.catalog.CatalogTableType.{EXTERNAL, MANAGED, VIEW} import org.apache.spark.sql.internal.SQLConf import org.apache.spark.util.Utils @@ -87,11 +86,8 @@ private[hive] trait SparkOperation extends Operation with Logging { } } - def tableTypeString(tableType: CatalogTableType): String = tableType match { - case EXTERNAL | MANAGED => "TABLE" - case VIEW => "VIEW" - case t => - throw new IllegalArgumentException(s"Unknown table type is found: $t") + def tableTypeString(tableType: CatalogTableType): String = { + CatalogTableType.classicTableTypeString(tableType) } protected def onError(): PartialFunction[Throwable, Unit] = { diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/ListTablesSuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/ListTablesSuite.scala index 49cd0885e722a..691862b1095e5 100644 --- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/ListTablesSuite.scala +++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/ListTablesSuite.scala @@ -62,10 +62,10 @@ class ListTablesSuite extends QueryTest // We are using default DB. checkAnswer( allTables.filter("tableName = 'listtablessuitetable'"), - Row("", "listtablessuitetable", true)) + Row("", "listtablessuitetable", true, "VIEW")) checkAnswer( allTables.filter("tableName = 'hivelisttablessuitetable'"), - Row("default", "hivelisttablessuitetable", false)) + Row("default", "hivelisttablessuitetable", false, "TABLE")) assert(allTables.filter("tableName = 'hiveindblisttablessuitetable'").count() === 0) } } @@ -75,11 +75,11 @@ class ListTablesSuite extends QueryTest case allTables => checkAnswer( allTables.filter("tableName = 'listtablessuitetable'"), - Row("", "listtablessuitetable", true)) + Row("", "listtablessuitetable", true, "VIEW")) assert(allTables.filter("tableName = 'hivelisttablessuitetable'").count() === 0) checkAnswer( allTables.filter("tableName = 'hiveindblisttablessuitetable'"), - Row("listtablessuitedb", "hiveindblisttablessuitetable", false)) + Row("listtablessuitedb", "hiveindblisttablessuitetable", false, "TABLE")) } } } diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/MetastoreDataSourcesSuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/MetastoreDataSourcesSuite.scala index ba441922d3600..2d649651f90c9 100644 --- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/MetastoreDataSourcesSuite.scala +++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/MetastoreDataSourcesSuite.scala @@ -1046,7 +1046,7 @@ class MetastoreDataSourcesSuite extends QueryTest with SQLTestUtils with TestHiv checkAnswer( spark.sql("show TABLES in testdb8156").filter("tableName = 'ttt3'"), - Row("testdb8156", "ttt3", false)) + Row("testdb8156", "ttt3", false, "TABLE")) spark.sql("""use default""") spark.sql("""drop database if exists testdb8156 CASCADE""") } diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/command/ShowTablesSuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/command/ShowTablesSuite.scala index b6db9a3e74315..74f6b2a50d84f 100644 --- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/command/ShowTablesSuite.scala +++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/command/ShowTablesSuite.scala @@ -26,7 +26,7 @@ class ShowTablesSuite extends v1.ShowTablesSuiteBase with CommandSuiteBase { test("hive client calls") { withNamespaceAndTable("ns", "tbl") { t => sql(s"CREATE TABLE $t (id int) $defaultUsing") - checkHiveClientCalls(expected = 3) { + checkHiveClientCalls(expected = 6) { sql(s"SHOW TABLES IN $catalog.ns") } }