Skip to content

Commit 68dca9a

Browse files
viiryacloud-fan
authored andcommitted
[SPARK-29527][SQL] SHOW CREATE TABLE should look up catalog/table like v2 commands
### What changes were proposed in this pull request? Add ShowCreateTableStatement and make SHOW CREATE TABLE go through the same catalog/table resolution framework of v2 commands. ### Why are the changes needed? It's important to make all the commands have the same table resolution behavior, to avoid confusing end-users. e.g. ``` USE my_catalog DESC t // success and describe the table t from my_catalog SHOW CREATE TABLE t // report table not found as there is no table t in the session catalog ``` ### Does this PR introduce any user-facing change? yes. When running SHOW CREATE TABLE, Spark fails the command if the current catalog is set to a v2 catalog, or the table name specified a v2 catalog. ### How was this patch tested? Unit tests. Closes apache#26184 from viirya/SPARK-29527. Lead-authored-by: Liang-Chi Hsieh <[email protected]> Co-authored-by: Liang-Chi Hsieh <[email protected]> Signed-off-by: Wenchen Fan <[email protected]>
1 parent 0cf4f07 commit 68dca9a

File tree

7 files changed

+32
-10
lines changed

7 files changed

+32
-10
lines changed

sql/catalyst/src/main/antlr4/org/apache/spark/sql/catalyst/parser/SqlBase.g4

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -201,7 +201,7 @@ statement
201201
| SHOW PARTITIONS multipartIdentifier partitionSpec? #showPartitions
202202
| SHOW identifier? FUNCTIONS
203203
(LIKE? (qualifiedName | pattern=STRING))? #showFunctions
204-
| SHOW CREATE TABLE tableIdentifier #showCreateTable
204+
| SHOW CREATE TABLE multipartIdentifier #showCreateTable
205205
| (DESC | DESCRIBE) FUNCTION EXTENDED? describeFuncName #describeFunction
206206
| (DESC | DESCRIBE) database EXTENDED? db=errorCapturingIdentifier #describeDatabase
207207
| (DESC | DESCRIBE) TABLE? option=(EXTENDED | FORMATTED)?

sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/AstBuilder.scala

Lines changed: 7 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -2797,6 +2797,13 @@ class AstBuilder(conf: SQLConf) extends SqlBaseBaseVisitor[AnyRef] with Logging
27972797
RepairTableStatement(visitMultipartIdentifier(ctx.multipartIdentifier()))
27982798
}
27992799

2800+
/**
2801+
* Creates a [[ShowCreateTableStatement]]
2802+
*/
2803+
override def visitShowCreateTable(ctx: ShowCreateTableContext): LogicalPlan = withOrigin(ctx) {
2804+
ShowCreateTableStatement(visitMultipartIdentifier(ctx.multipartIdentifier()))
2805+
}
2806+
28002807
/**
28012808
* Create a [[CacheTableStatement]].
28022809
*

sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/statements.scala

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -330,6 +330,11 @@ case class AnalyzeColumnStatement(
330330
*/
331331
case class RepairTableStatement(tableName: Seq[String]) extends ParsedStatement
332332

333+
/**
334+
* A SHOW CREATE TABLE statement, as parsed from SQL.
335+
*/
336+
case class ShowCreateTableStatement(tableName: Seq[String]) extends ParsedStatement
337+
333338
/**
334339
* A CACHE TABLE statement, as parsed from SQL
335340
*/

sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/DDLParserSuite.scala

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1045,6 +1045,12 @@ class DDLParserSuite extends AnalysisTest {
10451045
RepairTableStatement(Seq("a", "b", "c")))
10461046
}
10471047

1048+
test("SHOW CREATE table") {
1049+
comparePlans(
1050+
parsePlan("SHOW CREATE TABLE a.b.c"),
1051+
ShowCreateTableStatement(Seq("a", "b", "c")))
1052+
}
1053+
10481054
test("CACHE TABLE") {
10491055
comparePlans(
10501056
parsePlan("CACHE TABLE a.b.c"),

sql/core/src/main/scala/org/apache/spark/sql/catalyst/analysis/ResolveSessionCatalog.scala

Lines changed: 5 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -24,7 +24,7 @@ import org.apache.spark.sql.catalyst.plans.logical._
2424
import org.apache.spark.sql.catalyst.rules.Rule
2525
import org.apache.spark.sql.connector.catalog.{CatalogManager, CatalogPlugin, LookupCatalog, TableChange, V1Table}
2626
import org.apache.spark.sql.connector.expressions.Transform
27-
import org.apache.spark.sql.execution.command.{AlterTableAddColumnsCommand, AlterTableRecoverPartitionsCommand, AlterTableSetLocationCommand, AlterTableSetPropertiesCommand, AlterTableUnsetPropertiesCommand, AnalyzeColumnCommand, AnalyzePartitionCommand, AnalyzeTableCommand, CacheTableCommand, CreateDatabaseCommand, DescribeColumnCommand, DescribeTableCommand, DropTableCommand, ShowPartitionsCommand, ShowTablesCommand, TruncateTableCommand, UncacheTableCommand}
27+
import org.apache.spark.sql.execution.command.{AlterTableAddColumnsCommand, AlterTableRecoverPartitionsCommand, AlterTableSetLocationCommand, AlterTableSetPropertiesCommand, AlterTableUnsetPropertiesCommand, AnalyzeColumnCommand, AnalyzePartitionCommand, AnalyzeTableCommand, CacheTableCommand, CreateDatabaseCommand, DescribeColumnCommand, DescribeTableCommand, DropTableCommand, ShowCreateTableCommand, ShowPartitionsCommand, ShowTablesCommand, TruncateTableCommand, UncacheTableCommand}
2828
import org.apache.spark.sql.execution.datasources.{CreateTable, DataSource, RefreshTable}
2929
import org.apache.spark.sql.execution.datasources.v2.FileDataSourceV2
3030
import org.apache.spark.sql.internal.SQLConf
@@ -299,6 +299,10 @@ class ResolveSessionCatalog(
299299
v1TableName.asTableIdentifier,
300300
"MSCK REPAIR TABLE")
301301

302+
case ShowCreateTableStatement(tableName) =>
303+
val v1TableName = parseV1Table(tableName, "SHOW CREATE TABLE")
304+
ShowCreateTableCommand(v1TableName.asTableIdentifier)
305+
302306
case CacheTableStatement(tableName, plan, isLazy, options) =>
303307
val v1TableName = parseV1Table(tableName, "CACHE TABLE")
304308
CacheTableCommand(v1TableName.asTableIdentifier, plan, isLazy, options)

sql/core/src/main/scala/org/apache/spark/sql/execution/SparkSqlParser.scala

Lines changed: 0 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -135,14 +135,6 @@ class SparkSqlAstBuilder(conf: SQLConf) extends AstBuilder(conf) {
135135
ShowColumnsCommand(Option(ctx.db).map(_.getText), visitTableIdentifier(ctx.tableIdentifier))
136136
}
137137

138-
/**
139-
* Creates a [[ShowCreateTableCommand]]
140-
*/
141-
override def visitShowCreateTable(ctx: ShowCreateTableContext): LogicalPlan = withOrigin(ctx) {
142-
val table = visitTableIdentifier(ctx.tableIdentifier())
143-
ShowCreateTableCommand(table)
144-
}
145-
146138
/**
147139
* Create a [[RefreshResource]] logical plan.
148140
*/

sql/core/src/test/scala/org/apache/spark/sql/connector/DataSourceV2SQLSuite.scala

Lines changed: 8 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1268,6 +1268,14 @@ class DataSourceV2SQLSuite
12681268
}
12691269
}
12701270

1271+
test("SHOW CREATE TABLE") {
1272+
val t = "testcat.ns1.ns2.tbl"
1273+
withTable(t) {
1274+
spark.sql(s"CREATE TABLE $t (id bigint, data string) USING foo")
1275+
testV1Command("SHOW CREATE TABLE", t)
1276+
}
1277+
}
1278+
12711279
test("CACHE TABLE") {
12721280
val t = "testcat.ns1.ns2.tbl"
12731281
withTable(t) {

0 commit comments

Comments
 (0)