From 0db083cd9e7a56c868b942e0a55ac5d35de148e2 Mon Sep 17 00:00:00 2001 From: Dilip Biswal Date: Wed, 28 Sep 2016 21:37:40 -0700 Subject: [PATCH 1/4] SHOW COLUMN's database conflict check should respect case sensitivity configuration. --- .../spark/sql/execution/SparkSqlParser.scala | 12 +-- .../spark/sql/execution/command/tables.scala | 18 +++- .../sql-tests/inputs/show_columns.sql | 27 +++++ .../sql-tests/results/show_columns.sql.out | 100 ++++++++++++++++++ .../apache/spark/sql/SQLQueryTestSuite.scala | 2 + .../execution/command/DDLCommandSuite.scala | 18 ++-- .../sql/execution/command/DDLSuite.scala | 15 +++ .../sql/hive/execution/HiveCommandSuite.scala | 23 +--- .../hive/execution/HiveComparisonTest.scala | 2 +- 9 files changed, 173 insertions(+), 44 deletions(-) create mode 100644 sql/core/src/test/resources/sql-tests/inputs/show_columns.sql create mode 100644 sql/core/src/test/resources/sql-tests/results/show_columns.sql.out diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkSqlParser.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkSqlParser.scala index be2eddbb0e42..36400a26bb24 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkSqlParser.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkSqlParser.scala @@ -168,17 +168,7 @@ class SparkSqlAstBuilder(conf: SQLConf) extends AstBuilder { * }}} */ override def visitShowColumns(ctx: ShowColumnsContext): LogicalPlan = withOrigin(ctx) { - val table = visitTableIdentifier(ctx.tableIdentifier) - - val lookupTable = Option(ctx.db) match { - case None => table - case Some(db) if table.database.exists(_ != db) => - operationNotAllowed( - s"SHOW COLUMNS with conflicting databases: '$db' != '${table.database.get}'", - ctx) - case Some(db) => TableIdentifier(table.identifier, Some(db.getText)) - } - ShowColumnsCommand(lookupTable) + ShowColumnsCommand(Option(ctx.db).map(_.getText), visitTableIdentifier(ctx.tableIdentifier)) } /** diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/command/tables.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/command/tables.scala index 424ef58d76c5..95a5828f98a5 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/execution/command/tables.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/command/tables.scala @@ -651,14 +651,28 @@ case class ShowTablePropertiesCommand(table: TableIdentifier, propertyKey: Optio * SHOW COLUMNS (FROM | IN) table_identifier [(FROM | IN) database]; * }}} */ -case class ShowColumnsCommand(tableName: TableIdentifier) extends RunnableCommand { +case class ShowColumnsCommand( + databaseName: Option[String], + tableName: TableIdentifier) extends RunnableCommand { override val output: Seq[Attribute] = { AttributeReference("col_name", StringType, nullable = false)() :: Nil } + private def nameEqual(name1: String, name2: String, caseSensitive: Boolean): Boolean = { + if (caseSensitive) name1 == name2 else name1.equalsIgnoreCase(name2) + } + override def run(sparkSession: SparkSession): Seq[Row] = { val catalog = sparkSession.sessionState.catalog - val table = catalog.getTempViewOrPermanentTableMetadata(tableName) + val caseSensitive = sparkSession.sessionState.conf.caseSensitiveAnalysis + val lookupTable = databaseName match { + case None => tableName + case Some(db) if tableName.database.exists(!nameEqual(_, db, caseSensitive)) => + throw new AnalysisException( + s"SHOW COLUMNS with conflicting databases: '$db' != '${tableName.database.get}'") + case Some(db) => TableIdentifier(tableName.identifier, Some(db)) + } + val table = catalog.getTempViewOrPermanentTableMetadata(lookupTable) table.schema.map { c => Row(c.name) } diff --git a/sql/core/src/test/resources/sql-tests/inputs/show_columns.sql b/sql/core/src/test/resources/sql-tests/inputs/show_columns.sql new file mode 100644 index 000000000000..dc99248bfe1a --- /dev/null +++ b/sql/core/src/test/resources/sql-tests/inputs/show_columns.sql @@ -0,0 +1,27 @@ +CREATE TABLE showcolumn1 (col1 int, `col 2` int); +CREATE TABLE showcolumn2 (price int, qty int) partitioned by (year int, month int); + +-- only table name +SHOW COLUMNS IN showcolumn1; + +-- qualified table name +SHOW COLUMNS IN default.showcolumn1; + +-- table name and database name +SHOW COLUMNS IN showcolumn1 FROM default; + +-- partitioned table +SHOW COLUMNS IN showcolumn2 IN default; + +-- Non-existent table. Raise an error in this case +SHOW COLUMNS IN badtable FROM default; + +-- database in table identifier and database name in different case +SHOW COLUMNS IN default.showcolumn1 from DEFAULT; + +-- different database name in table identifier and database name. +-- Raise an error in this case. +SHOW COLUMNS IN default.showcolumn1 FROM baddb; + +DROP TABLE showcolumn1; +DROP TABLE showColumn2; diff --git a/sql/core/src/test/resources/sql-tests/results/show_columns.sql.out b/sql/core/src/test/resources/sql-tests/results/show_columns.sql.out new file mode 100644 index 000000000000..2fd552400e0a --- /dev/null +++ b/sql/core/src/test/resources/sql-tests/results/show_columns.sql.out @@ -0,0 +1,100 @@ +-- Automatically generated by SQLQueryTestSuite +-- Number of queries: 11 + + +-- !query 0 +CREATE TABLE showcolumn1 (col1 int, `col 2` int) +-- !query 0 schema +struct<> +-- !query 0 output + + + +-- !query 1 +CREATE TABLE showcolumn2 (price int, qty int) partitioned by (year int, month int) +-- !query 1 schema +struct<> +-- !query 1 output + + + +-- !query 2 +SHOW COLUMNS IN showcolumn1 +-- !query 2 schema +struct +-- !query 2 output +col1 +col 2 + + +-- !query 3 +SHOW COLUMNS IN default.showcolumn1 +-- !query 3 schema +struct +-- !query 3 output +col1 +col 2 + + +-- !query 4 +SHOW COLUMNS IN showcolumn1 FROM default +-- !query 4 schema +struct +-- !query 4 output +col1 +col 2 + + +-- !query 5 +SHOW COLUMNS IN showcolumn2 IN default +-- !query 5 schema +struct +-- !query 5 output +price +qty +year +month + + +-- !query 6 +SHOW COLUMNS IN badtable FROM default +-- !query 6 schema +struct<> +-- !query 6 output +org.apache.spark.sql.catalyst.analysis.NoSuchTableException +Table or view 'badtable' not found in database 'default'; + + +-- !query 7 +SHOW COLUMNS IN default.showcolumn1 from DEFAULT +-- !query 7 schema +struct +-- !query 7 output +col1 +col 2 + + +-- !query 8 +SHOW COLUMNS IN default.showcolumn1 FROM baddb +-- !query 8 schema +struct<> +-- !query 8 output +org.apache.spark.sql.AnalysisException +SHOW COLUMNS with conflicting databases: 'baddb' != 'default'; + + + +-- !query 9 +DROP TABLE showcolumn1 +-- !query 9 schema +struct<> +-- !query 9 output + + + +-- !query 10 +DROP TABLE showColumn2 +-- !query 10 schema +struct<> +-- !query 10 output + diff --git a/sql/core/src/test/scala/org/apache/spark/sql/SQLQueryTestSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/SQLQueryTestSuite.scala index 55d5a56f1040..e9809048b815 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/SQLQueryTestSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/SQLQueryTestSuite.scala @@ -26,6 +26,7 @@ import org.apache.spark.sql.catalyst.planning.PhysicalOperation import org.apache.spark.sql.catalyst.plans.logical._ import org.apache.spark.sql.catalyst.rules.RuleExecutor import org.apache.spark.sql.catalyst.util.{fileToString, stringToFile} +import org.apache.spark.sql.execution.command.ShowColumnsCommand import org.apache.spark.sql.test.SharedSQLContext import org.apache.spark.sql.types.StructType @@ -207,6 +208,7 @@ class SQLQueryTestSuite extends QueryTest with SharedSQLContext { // Returns true if the plan is supposed to be sorted. def isSorted(plan: LogicalPlan): Boolean = plan match { case _: Join | _: Aggregate | _: Generate | _: Sample | _: Distinct => false + case _: ShowColumnsCommand => true case PhysicalOperation(_, _, Sort(_, true, _)) => true case _ => plan.children.iterator.exists(isSorted) } diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLCommandSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLCommandSuite.scala index 547fb6381375..4b306035b3a4 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLCommandSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLCommandSuite.scala @@ -822,22 +822,24 @@ class DDLCommandSuite extends PlanTest { val sql1 = "SHOW COLUMNS FROM t1" val sql2 = "SHOW COLUMNS IN db1.t1" val sql3 = "SHOW COLUMNS FROM t1 IN db1" - val sql4 = "SHOW COLUMNS FROM db1.t1 IN db1" - val sql5 = "SHOW COLUMNS FROM db1.t1 IN db2" + val sql4 = "SHOW COLUMNS FROM db1.t1 IN db2" val parsed1 = parser.parsePlan(sql1) - val expected1 = ShowColumnsCommand(TableIdentifier("t1", None)) + val expected1 = ShowColumnsCommand(None, TableIdentifier("t1", None)) val parsed2 = parser.parsePlan(sql2) - val expected2 = ShowColumnsCommand(TableIdentifier("t1", Some("db1"))) + val expected2 = ShowColumnsCommand(None, TableIdentifier("t1", Some("db1"))) val parsed3 = parser.parsePlan(sql3) - val parsed4 = parser.parsePlan(sql3) + val expected3 = ShowColumnsCommand(Some("db1"), TableIdentifier("t1", None)) + val parsed4 = parser.parsePlan(sql4) + val expected4 = ShowColumnsCommand(Some("db2"), TableIdentifier("t1", Some("db1"))) + comparePlans(parsed1, expected1) comparePlans(parsed2, expected2) - comparePlans(parsed3, expected2) - comparePlans(parsed4, expected2) - assertUnsupported(sql5) + comparePlans(parsed3, expected3) + comparePlans(parsed4, expected4) } + test("show partitions") { val sql1 = "SHOW PARTITIONS t1" val sql2 = "SHOW PARTITIONS db1.t1" diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala index 097dc2441351..552d097f3ae3 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala @@ -1713,4 +1713,19 @@ class DDLSuite extends QueryTest with SharedSQLContext with BeforeAndAfterEach { assert(sql("show user functions").count() === 1L) } } + + test("show columns - negative test") { + // When case sensitivity is true, the user supplied database name in table identifier + // should match the supplied database name in case sensitive way. + withSQLConf(SQLConf.CASE_SENSITIVE.key -> "true") { + val tabName = "showcolumn" + withTable(tabName) { + sql(s"CREATE TABLE $tabName(col1 int, col2 string) USING parquet ") + val message = intercept[AnalysisException] { + sql(s"SHOW COLUMNS IN default.showcolumn FROM DEFAULT") + }.getMessage + assert(message.contains("SHOW COLUMNS with conflicting databases")) + } + } + } } diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveCommandSuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveCommandSuite.scala index 2c772ce2155e..ad1e9b17a9f7 100644 --- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveCommandSuite.scala +++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveCommandSuite.scala @@ -22,6 +22,7 @@ import org.apache.spark.sql.catalyst.TableIdentifier import org.apache.spark.sql.catalyst.analysis.NoSuchTableException import org.apache.spark.sql.catalyst.catalog.{CatalogStorageFormat, CatalogTable, CatalogTableType} import org.apache.spark.sql.hive.test.TestHiveSingleton +import org.apache.spark.sql.internal.SQLConf import org.apache.spark.sql.test.SQLTestUtils import org.apache.spark.sql.types.StructType @@ -336,28 +337,6 @@ class HiveCommandSuite extends QueryTest with SQLTestUtils with TestHiveSingleto } } - test("show columns") { - checkAnswer( - sql("SHOW COLUMNS IN parquet_tab3"), - Row("col1") :: Row("col 2") :: Nil) - - checkAnswer( - sql("SHOW COLUMNS IN default.parquet_tab3"), - Row("col1") :: Row("col 2") :: Nil) - - checkAnswer( - sql("SHOW COLUMNS IN parquet_tab3 FROM default"), - Row("col1") :: Row("col 2") :: Nil) - - checkAnswer( - sql("SHOW COLUMNS IN parquet_tab4 IN default"), - Row("price") :: Row("qty") :: Row("year") :: Row("month") :: Nil) - - val message = intercept[NoSuchTableException] { - sql("SHOW COLUMNS IN badtable FROM default") - }.getMessage - assert(message.contains("'badtable' not found in database")) - } test("show partitions - show everything") { checkAnswer( diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveComparisonTest.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveComparisonTest.scala index 80e75aa898c3..13ceed7c79e3 100644 --- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveComparisonTest.scala +++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveComparisonTest.scala @@ -167,7 +167,7 @@ abstract class HiveComparisonTest // and does not return it as a query answer. case _: SetCommand => Seq("0") case _: ExplainCommand => answer - case _: DescribeTableCommand | ShowColumnsCommand(_) => + case _: DescribeTableCommand | ShowColumnsCommand(_, _) => // Filter out non-deterministic lines and lines which do not have actual results but // can introduce problems because of the way Hive formats these lines. // Then, remove empty lines. Do not sort the results. From cb0691c01bdf11212d001dcf3e6675c8b36c49ff Mon Sep 17 00:00:00 2001 From: Dilip Biswal Date: Fri, 14 Oct 2016 14:11:23 -0700 Subject: [PATCH 2/4] Review comments. --- .../sql-tests/inputs/show_columns.sql | 20 ++-- .../sql-tests/results/show_columns.sql.out | 97 ++++++++++++------- .../apache/spark/sql/SQLQueryTestSuite.scala | 1 - .../sql/execution/command/DDLSuite.scala | 16 +-- 4 files changed, 87 insertions(+), 47 deletions(-) diff --git a/sql/core/src/test/resources/sql-tests/inputs/show_columns.sql b/sql/core/src/test/resources/sql-tests/inputs/show_columns.sql index dc99248bfe1a..fee73da5e069 100644 --- a/sql/core/src/test/resources/sql-tests/inputs/show_columns.sql +++ b/sql/core/src/test/resources/sql-tests/inputs/show_columns.sql @@ -1,3 +1,7 @@ +CREATE DATABASE showdb; + +USE showdb; + CREATE TABLE showcolumn1 (col1 int, `col 2` int); CREATE TABLE showcolumn2 (price int, qty int) partitioned by (year int, month int); @@ -5,23 +9,27 @@ CREATE TABLE showcolumn2 (price int, qty int) partitioned by (year int, month in SHOW COLUMNS IN showcolumn1; -- qualified table name -SHOW COLUMNS IN default.showcolumn1; +SHOW COLUMNS IN showdb.showcolumn1; -- table name and database name -SHOW COLUMNS IN showcolumn1 FROM default; +SHOW COLUMNS IN showcolumn1 FROM showdb; -- partitioned table -SHOW COLUMNS IN showcolumn2 IN default; +SHOW COLUMNS IN showcolumn2 IN showdb; -- Non-existent table. Raise an error in this case -SHOW COLUMNS IN badtable FROM default; +SHOW COLUMNS IN badtable FROM showdb; -- database in table identifier and database name in different case -SHOW COLUMNS IN default.showcolumn1 from DEFAULT; +SHOW COLUMNS IN showdb.showcolumn1 from SHOWDB; -- different database name in table identifier and database name. -- Raise an error in this case. -SHOW COLUMNS IN default.showcolumn1 FROM baddb; +SHOW COLUMNS IN showdb.showcolumn1 FROM baddb; DROP TABLE showcolumn1; DROP TABLE showColumn2; + +use default; + +DROP DATABASE showdb; diff --git a/sql/core/src/test/resources/sql-tests/results/show_columns.sql.out b/sql/core/src/test/resources/sql-tests/results/show_columns.sql.out index 2fd552400e0a..2020ecabf134 100644 --- a/sql/core/src/test/resources/sql-tests/results/show_columns.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/show_columns.sql.out @@ -1,9 +1,9 @@ -- Automatically generated by SQLQueryTestSuite --- Number of queries: 11 +-- Number of queries: 15 -- !query 0 -CREATE TABLE showcolumn1 (col1 int, `col 2` int) +CREATE DATABASE showdb -- !query 0 schema struct<> -- !query 0 output @@ -11,7 +11,7 @@ struct<> -- !query 1 -CREATE TABLE showcolumn2 (price int, qty int) partitioned by (year int, month int) +USE showdb -- !query 1 schema struct<> -- !query 1 output @@ -19,82 +19,113 @@ struct<> -- !query 2 -SHOW COLUMNS IN showcolumn1 +CREATE TABLE showcolumn1 (col1 int, `col 2` int) -- !query 2 schema -struct +struct<> -- !query 2 output -col1 -col 2 + -- !query 3 -SHOW COLUMNS IN default.showcolumn1 +CREATE TABLE showcolumn2 (price int, qty int) partitioned by (year int, month int) -- !query 3 schema -struct +struct<> -- !query 3 output -col1 -col 2 + -- !query 4 -SHOW COLUMNS IN showcolumn1 FROM default +SHOW COLUMNS IN showcolumn1 -- !query 4 schema struct -- !query 4 output -col1 col 2 +col1 -- !query 5 -SHOW COLUMNS IN showcolumn2 IN default +SHOW COLUMNS IN showdb.showcolumn1 -- !query 5 schema struct -- !query 5 output -price -qty -year -month +col 2 +col1 -- !query 6 -SHOW COLUMNS IN badtable FROM default +SHOW COLUMNS IN showcolumn1 FROM showdb -- !query 6 schema -struct<> +struct -- !query 6 output -org.apache.spark.sql.catalyst.analysis.NoSuchTableException -Table or view 'badtable' not found in database 'default'; +col 2 +col1 -- !query 7 -SHOW COLUMNS IN default.showcolumn1 from DEFAULT +SHOW COLUMNS IN showcolumn2 IN showdb -- !query 7 schema struct -- !query 7 output -col1 -col 2 +month +price +qty +year -- !query 8 -SHOW COLUMNS IN default.showcolumn1 FROM baddb +SHOW COLUMNS IN badtable FROM showdb -- !query 8 schema struct<> -- !query 8 output -org.apache.spark.sql.AnalysisException -SHOW COLUMNS with conflicting databases: 'baddb' != 'default'; - +org.apache.spark.sql.catalyst.analysis.NoSuchTableException +Table or view 'badtable' not found in database 'showdb'; -- !query 9 -DROP TABLE showcolumn1 +SHOW COLUMNS IN showdb.showcolumn1 from SHOWDB -- !query 9 schema -struct<> +struct -- !query 9 output - +col 2 +col1 -- !query 10 -DROP TABLE showColumn2 +SHOW COLUMNS IN showdb.showcolumn1 FROM baddb -- !query 10 schema struct<> -- !query 10 output +org.apache.spark.sql.AnalysisException +SHOW COLUMNS with conflicting databases: 'baddb' != 'showdb'; + + +-- !query 11 +DROP TABLE showcolumn1 +-- !query 11 schema +struct<> +-- !query 11 output + + + +-- !query 12 +DROP TABLE showColumn2 +-- !query 12 schema +struct<> +-- !query 12 output + + + +-- !query 13 +use default +-- !query 13 schema +struct<> +-- !query 13 output + + + +-- !query 14 +DROP DATABASE showdb +-- !query 14 schema +struct<> +-- !query 14 output diff --git a/sql/core/src/test/scala/org/apache/spark/sql/SQLQueryTestSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/SQLQueryTestSuite.scala index e9809048b815..14d6d1154844 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/SQLQueryTestSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/SQLQueryTestSuite.scala @@ -208,7 +208,6 @@ class SQLQueryTestSuite extends QueryTest with SharedSQLContext { // Returns true if the plan is supposed to be sorted. def isSorted(plan: LogicalPlan): Boolean = plan match { case _: Join | _: Aggregate | _: Generate | _: Sample | _: Distinct => false - case _: ShowColumnsCommand => true case PhysicalOperation(_, _, Sort(_, true, _)) => true case _ => plan.children.iterator.exists(isSorted) } diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala index 552d097f3ae3..d728ba73ae17 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala @@ -1718,13 +1718,15 @@ class DDLSuite extends QueryTest with SharedSQLContext with BeforeAndAfterEach { // When case sensitivity is true, the user supplied database name in table identifier // should match the supplied database name in case sensitive way. withSQLConf(SQLConf.CASE_SENSITIVE.key -> "true") { - val tabName = "showcolumn" - withTable(tabName) { - sql(s"CREATE TABLE $tabName(col1 int, col2 string) USING parquet ") - val message = intercept[AnalysisException] { - sql(s"SHOW COLUMNS IN default.showcolumn FROM DEFAULT") - }.getMessage - assert(message.contains("SHOW COLUMNS with conflicting databases")) + withTempDatabase { db => + val tabName = s"$db.showcolumn" + withTable(tabName) { + sql(s"CREATE TABLE $tabName(col1 int, col2 string) USING parquet ") + val message = intercept[AnalysisException] { + sql(s"SHOW COLUMNS IN $db.showcolumn FROM ${db.toUpperCase}") + }.getMessage + assert(message.contains("SHOW COLUMNS with conflicting databases")) + } } } } From 586a6b42df2b71d532fd049318e0b955ff3766a1 Mon Sep 17 00:00:00 2001 From: Dilip Biswal Date: Tue, 18 Oct 2016 23:59:26 -0700 Subject: [PATCH 3/4] review comments --- .../spark/sql/execution/command/tables.scala | 8 +- .../sql-tests/inputs/show_columns.sql | 23 +++ .../sql-tests/results/show_columns.sql.out | 150 ++++++++++++++---- 3 files changed, 143 insertions(+), 38 deletions(-) diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/command/tables.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/command/tables.scala index 95a5828f98a5..b1f58790bf9d 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/execution/command/tables.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/command/tables.scala @@ -658,16 +658,12 @@ case class ShowColumnsCommand( AttributeReference("col_name", StringType, nullable = false)() :: Nil } - private def nameEqual(name1: String, name2: String, caseSensitive: Boolean): Boolean = { - if (caseSensitive) name1 == name2 else name1.equalsIgnoreCase(name2) - } - override def run(sparkSession: SparkSession): Seq[Row] = { val catalog = sparkSession.sessionState.catalog - val caseSensitive = sparkSession.sessionState.conf.caseSensitiveAnalysis + val resolver = sparkSession.sessionState.conf.resolver val lookupTable = databaseName match { case None => tableName - case Some(db) if tableName.database.exists(!nameEqual(_, db, caseSensitive)) => + case Some(db) if tableName.database.exists(!resolver(_, db)) => throw new AnalysisException( s"SHOW COLUMNS with conflicting databases: '$db' != '${tableName.database.get}'") case Some(db) => TableIdentifier(tableName.identifier, Some(db)) diff --git a/sql/core/src/test/resources/sql-tests/inputs/show_columns.sql b/sql/core/src/test/resources/sql-tests/inputs/show_columns.sql index fee73da5e069..389408225508 100644 --- a/sql/core/src/test/resources/sql-tests/inputs/show_columns.sql +++ b/sql/core/src/test/resources/sql-tests/inputs/show_columns.sql @@ -4,6 +4,9 @@ USE showdb; CREATE TABLE showcolumn1 (col1 int, `col 2` int); CREATE TABLE showcolumn2 (price int, qty int) partitioned by (year int, month int); +CREATE TEMPORARY VIEW showColumn3 (col3 int, `col 4` int) USING parquet; +CREATE GLOBAL TEMP VIEW showColumn4 AS SELECT 1 as col1, 'abc' as `col 5`; + -- only table name SHOW COLUMNS IN showcolumn1; @@ -27,8 +30,28 @@ SHOW COLUMNS IN showdb.showcolumn1 from SHOWDB; -- Raise an error in this case. SHOW COLUMNS IN showdb.showcolumn1 FROM baddb; +-- show column on temporary view +SHOW COLUMNS IN showcolumn3; + +-- error temp view can't be qualified with a database +SHOW COLUMNS IN showdb.showcolumn3; + +-- error temp view can't be qualified with a database +SHOW COLUMNS IN showcolumn3 FROM showdb; + +-- error global temp view needs to be qualified +SHOW COLUMNS IN showcolumn4; + +-- global temp view qualified with database +SHOW COLUMNS IN global_temp.showcolumn4; + +-- global temp view qualified with database +SHOW COLUMNS IN showcolumn4 FROM global_temp; + DROP TABLE showcolumn1; DROP TABLE showColumn2; +DROP VIEW showcolumn3; +DROP VIEW global_temp.showcolumn4; use default; diff --git a/sql/core/src/test/resources/sql-tests/results/show_columns.sql.out b/sql/core/src/test/resources/sql-tests/results/show_columns.sql.out index 2020ecabf134..832e6e25bb2b 100644 --- a/sql/core/src/test/resources/sql-tests/results/show_columns.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/show_columns.sql.out @@ -1,5 +1,5 @@ -- Automatically generated by SQLQueryTestSuite --- Number of queries: 15 +-- Number of queries: 25 -- !query 0 @@ -35,25 +35,23 @@ struct<> -- !query 4 -SHOW COLUMNS IN showcolumn1 +CREATE TEMPORARY VIEW showColumn3 (col3 int, `col 4` int) USING parquet -- !query 4 schema -struct +struct<> -- !query 4 output -col 2 -col1 + -- !query 5 -SHOW COLUMNS IN showdb.showcolumn1 +CREATE GLOBAL TEMP VIEW showColumn4 AS SELECT 1 as col1, 'abc' as `col 5` -- !query 5 schema -struct +struct<> -- !query 5 output -col 2 -col1 + -- !query 6 -SHOW COLUMNS IN showcolumn1 FROM showdb +SHOW COLUMNS IN showcolumn1 -- !query 6 schema struct -- !query 6 output @@ -62,70 +60,158 @@ col1 -- !query 7 -SHOW COLUMNS IN showcolumn2 IN showdb +SHOW COLUMNS IN showdb.showcolumn1 -- !query 7 schema struct -- !query 7 output +col 2 +col1 + + +-- !query 8 +SHOW COLUMNS IN showcolumn1 FROM showdb +-- !query 8 schema +struct +-- !query 8 output +col 2 +col1 + + +-- !query 9 +SHOW COLUMNS IN showcolumn2 IN showdb +-- !query 9 schema +struct +-- !query 9 output month price qty year --- !query 8 +-- !query 10 SHOW COLUMNS IN badtable FROM showdb --- !query 8 schema +-- !query 10 schema struct<> --- !query 8 output +-- !query 10 output org.apache.spark.sql.catalyst.analysis.NoSuchTableException Table or view 'badtable' not found in database 'showdb'; --- !query 9 +-- !query 11 SHOW COLUMNS IN showdb.showcolumn1 from SHOWDB --- !query 9 schema +-- !query 11 schema struct --- !query 9 output +-- !query 11 output col 2 col1 --- !query 10 +-- !query 12 SHOW COLUMNS IN showdb.showcolumn1 FROM baddb --- !query 10 schema +-- !query 12 schema struct<> --- !query 10 output +-- !query 12 output org.apache.spark.sql.AnalysisException SHOW COLUMNS with conflicting databases: 'baddb' != 'showdb'; --- !query 11 +-- !query 13 +SHOW COLUMNS IN showcolumn3 +-- !query 13 schema +struct +-- !query 13 output +col 4 +col3 + + +-- !query 14 +SHOW COLUMNS IN showdb.showcolumn3 +-- !query 14 schema +struct<> +-- !query 14 output +org.apache.spark.sql.catalyst.analysis.NoSuchTableException +Table or view 'showcolumn3' not found in database 'showdb'; + + +-- !query 15 +SHOW COLUMNS IN showcolumn3 FROM showdb +-- !query 15 schema +struct<> +-- !query 15 output +org.apache.spark.sql.catalyst.analysis.NoSuchTableException +Table or view 'showcolumn3' not found in database 'showdb'; + + +-- !query 16 +SHOW COLUMNS IN showcolumn4 +-- !query 16 schema +struct<> +-- !query 16 output +org.apache.spark.sql.catalyst.analysis.NoSuchTableException +Table or view 'showcolumn4' not found in database 'showdb'; + + +-- !query 17 +SHOW COLUMNS IN global_temp.showcolumn4 +-- !query 17 schema +struct +-- !query 17 output +col 5 +col1 + + +-- !query 18 +SHOW COLUMNS IN showcolumn4 FROM global_temp +-- !query 18 schema +struct +-- !query 18 output +col 5 +col1 + + +-- !query 19 DROP TABLE showcolumn1 --- !query 11 schema +-- !query 19 schema struct<> --- !query 11 output +-- !query 19 output --- !query 12 +-- !query 20 DROP TABLE showColumn2 --- !query 12 schema +-- !query 20 schema struct<> --- !query 12 output +-- !query 20 output --- !query 13 +-- !query 21 +DROP VIEW showcolumn3 +-- !query 21 schema +struct<> +-- !query 21 output + + + +-- !query 22 +DROP VIEW global_temp.showcolumn4 +-- !query 22 schema +struct<> +-- !query 22 output + + + +-- !query 23 use default --- !query 13 schema +-- !query 23 schema struct<> --- !query 13 output +-- !query 23 output --- !query 14 +-- !query 24 DROP DATABASE showdb --- !query 14 schema +-- !query 24 schema struct<> --- !query 14 output +-- !query 24 output From 15c568f2cddf28b9542c7d96339b356ed5c578f7 Mon Sep 17 00:00:00 2001 From: Dilip Biswal Date: Wed, 19 Oct 2016 22:03:05 -0700 Subject: [PATCH 4/4] fix indent --- .../scala/org/apache/spark/sql/execution/command/DDLSuite.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala index d728ba73ae17..49a2436e9742 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala @@ -1723,7 +1723,7 @@ class DDLSuite extends QueryTest with SharedSQLContext with BeforeAndAfterEach { withTable(tabName) { sql(s"CREATE TABLE $tabName(col1 int, col2 string) USING parquet ") val message = intercept[AnalysisException] { - sql(s"SHOW COLUMNS IN $db.showcolumn FROM ${db.toUpperCase}") + sql(s"SHOW COLUMNS IN $db.showcolumn FROM ${db.toUpperCase}") }.getMessage assert(message.contains("SHOW COLUMNS with conflicting databases")) }