From d32a6c4fd03a6aa1f7e27f7dd9f60f7da2b4583a Mon Sep 17 00:00:00 2001 From: Burak Yavuz Date: Fri, 1 May 2020 18:26:34 -0700 Subject: [PATCH 1/2] Fix SHOW TBLPROPERTIES for V2 tables that leverage the session catalog --- .../analysis/ResolveSessionCatalog.scala | 3 ++- .../v2/ShowTablePropertiesExec.scala | 8 ++++-- .../DataSourceV2SQLSessionCatalogSuite.scala | 25 ++++++++++++++++++- 3 files changed, 32 insertions(+), 4 deletions(-) diff --git a/sql/core/src/main/scala/org/apache/spark/sql/catalyst/analysis/ResolveSessionCatalog.scala b/sql/core/src/main/scala/org/apache/spark/sql/catalyst/analysis/ResolveSessionCatalog.scala index 58a7251f4ebd..bf90875e511f 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/catalyst/analysis/ResolveSessionCatalog.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/catalyst/analysis/ResolveSessionCatalog.scala @@ -559,7 +559,8 @@ class ResolveSessionCatalog( "SHOW VIEWS, only SessionCatalog supports this command.") } - case ShowTableProperties(r: ResolvedTable, propertyKey) if isSessionCatalog(r.catalog) => + case ShowTableProperties( + r @ ResolvedTable(_, _, _: V1Table), propertyKey) if isSessionCatalog(r.catalog) => ShowTablePropertiesCommand(r.identifier.asTableIdentifier, propertyKey) case ShowTableProperties(r: ResolvedView, propertyKey) => diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/ShowTablePropertiesExec.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/ShowTablePropertiesExec.scala index 0bcd7ea54104..fef63cb8253c 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/ShowTablePropertiesExec.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/ShowTablePropertiesExec.scala @@ -19,8 +19,8 @@ package org.apache.spark.sql.execution.datasources.v2 import org.apache.spark.sql.catalyst.InternalRow import org.apache.spark.sql.catalyst.encoders.RowEncoder -import org.apache.spark.sql.catalyst.expressions.{Attribute, GenericRowWithSchema} -import org.apache.spark.sql.connector.catalog.Table +import org.apache.spark.sql.catalyst.expressions.{Attribute, AttributeSet, GenericRowWithSchema} +import org.apache.spark.sql.connector.catalog.{CatalogV2Util, Table, TableCatalog} /** * Physical plan node for showing table properties. @@ -30,11 +30,15 @@ case class ShowTablePropertiesExec( catalogTable: Table, propertyKey: Option[String]) extends V2CommandExec { + override def producedAttributes: AttributeSet = AttributeSet(output) + override protected def run(): Seq[InternalRow] = { import scala.collection.JavaConverters._ val toRow = RowEncoder(schema).resolveAndBind().createSerializer() + // The reservered properties are accessible through DESCRIBE val properties = catalogTable.properties.asScala + .filter { case (k, v) => !CatalogV2Util.TABLE_RESERVED_PROPERTIES.contains(k) } propertyKey match { case Some(p) => val propValue = properties diff --git a/sql/core/src/test/scala/org/apache/spark/sql/connector/DataSourceV2SQLSessionCatalogSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/connector/DataSourceV2SQLSessionCatalogSuite.scala index 249b27c28b07..65ff8d581b3b 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/connector/DataSourceV2SQLSessionCatalogSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/connector/DataSourceV2SQLSessionCatalogSuite.scala @@ -17,7 +17,7 @@ package org.apache.spark.sql.connector -import org.apache.spark.sql.{DataFrame, SaveMode} +import org.apache.spark.sql.{DataFrame, Row, SaveMode} import org.apache.spark.sql.connector.catalog.{Identifier, Table, TableCatalog} class DataSourceV2SQLSessionCatalogSuite @@ -63,4 +63,27 @@ class DataSourceV2SQLSessionCatalogSuite } } } + + test("SPARK-31624: SHOW TBLPROPERTIES working with V2 tables and the session catalog") { + val t1 = "tbl" + withTable(t1) { + sql(s"CREATE TABLE $t1 (id bigint, data string) USING $v2Format TBLPROPERTIES " + + s"(key='v', key2='v2')") + + checkAnswer( + sql(s"SHOW TBLPROPERTIES $t1"), + Seq(Row("key", "v"), Row("key2", "v2")) + ) + + checkAnswer( + sql(s"SHOW TBLPROPERTIES $t1('key')"), + Row("key", "v") + ) + + checkAnswer( + sql(s"SHOW TBLPROPERTIES $t1('keyX')"), + Row("keyX", s"Table default.$t1 does not have property: keyX") + ) + } + } } From 98282869d92e609ba6d74a8fa98dcbd0e51080b7 Mon Sep 17 00:00:00 2001 From: Burak Yavuz Date: Sun, 3 May 2020 14:03:39 -0700 Subject: [PATCH 2/2] address --- .../DataSourceV2SQLSessionCatalogSuite.scala | 15 ++++----------- .../sql/connector/DataSourceV2SQLSuite.scala | 2 -- 2 files changed, 4 insertions(+), 13 deletions(-) diff --git a/sql/core/src/test/scala/org/apache/spark/sql/connector/DataSourceV2SQLSessionCatalogSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/connector/DataSourceV2SQLSessionCatalogSuite.scala index 65ff8d581b3b..cf00b3b5e441 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/connector/DataSourceV2SQLSessionCatalogSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/connector/DataSourceV2SQLSessionCatalogSuite.scala @@ -68,22 +68,15 @@ class DataSourceV2SQLSessionCatalogSuite val t1 = "tbl" withTable(t1) { sql(s"CREATE TABLE $t1 (id bigint, data string) USING $v2Format TBLPROPERTIES " + - s"(key='v', key2='v2')") + "(key='v', key2='v2')") - checkAnswer( - sql(s"SHOW TBLPROPERTIES $t1"), - Seq(Row("key", "v"), Row("key2", "v2")) - ) + checkAnswer(sql(s"SHOW TBLPROPERTIES $t1"), Seq(Row("key", "v"), Row("key2", "v2"))) - checkAnswer( - sql(s"SHOW TBLPROPERTIES $t1('key')"), - Row("key", "v") - ) + checkAnswer(sql(s"SHOW TBLPROPERTIES $t1('key')"), Row("key", "v")) checkAnswer( sql(s"SHOW TBLPROPERTIES $t1('keyX')"), - Row("keyX", s"Table default.$t1 does not have property: keyX") - ) + Row("keyX", s"Table default.$t1 does not have property: keyX")) } } } diff --git a/sql/core/src/test/scala/org/apache/spark/sql/connector/DataSourceV2SQLSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/connector/DataSourceV2SQLSuite.scala index 3244684c3396..e947e15a179e 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/connector/DataSourceV2SQLSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/connector/DataSourceV2SQLSuite.scala @@ -2122,8 +2122,6 @@ class DataSourceV2SQLSuite .add("value", StringType, nullable = false) val expected = Seq( - Row(TableCatalog.PROP_OWNER, defaultUser), - Row("provider", provider), Row("status", status), Row("user", user))