From 8e92e6c26f61ac1999d76993756b8c696e487165 Mon Sep 17 00:00:00 2001 From: Suchith J N Date: Sat, 6 Jan 2018 03:16:20 +0530 Subject: [PATCH 1/7] [SPARK-22954][SQL] Fixed the problem where temporary views were not looked up by session state's catalog. --- .../spark/sql/execution/command/AnalyzeTableCommand.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/command/AnalyzeTableCommand.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/command/AnalyzeTableCommand.scala index 58b53e8b1c55..8cbad694f09f 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/execution/command/AnalyzeTableCommand.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/command/AnalyzeTableCommand.scala @@ -33,7 +33,7 @@ case class AnalyzeTableCommand( val sessionState = sparkSession.sessionState val db = tableIdent.database.getOrElse(sessionState.catalog.getCurrentDatabase) val tableIdentWithDB = TableIdentifier(tableIdent.table, Some(db)) - val tableMeta = sessionState.catalog.getTableMetadata(tableIdentWithDB) + val tableMeta = sessionState.catalog.getTempViewOrPermanentTableMetadata(tableIdentWithDB) if (tableMeta.tableType == CatalogTableType.VIEW) { throw new AnalysisException("ANALYZE TABLE is not supported on views.") } From 4de6ac840a6b8cbd7c5795b023783b626b21a9f4 Mon Sep 17 00:00:00 2001 From: Suchith J N Date: Sun, 7 Jan 2018 00:31:37 +0530 Subject: [PATCH 2/7] [SPARK-22954][SQL] Fixed database name lookup so that None is returned instead of Some('default') --- .../spark/sql/execution/command/AnalyzeTableCommand.scala | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/command/AnalyzeTableCommand.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/command/AnalyzeTableCommand.scala index 8cbad694f09f..3006387a4f2b 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/execution/command/AnalyzeTableCommand.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/command/AnalyzeTableCommand.scala @@ -31,8 +31,8 @@ case class AnalyzeTableCommand( override def run(sparkSession: SparkSession): Seq[Row] = { val sessionState = sparkSession.sessionState - val db = tableIdent.database.getOrElse(sessionState.catalog.getCurrentDatabase) - val tableIdentWithDB = TableIdentifier(tableIdent.table, Some(db)) + val db = tableIdent.database + val tableIdentWithDB = TableIdentifier(tableIdent.table, db) val tableMeta = sessionState.catalog.getTempViewOrPermanentTableMetadata(tableIdentWithDB) if (tableMeta.tableType == CatalogTableType.VIEW) { throw new AnalysisException("ANALYZE TABLE is not supported on views.") From 98b8711cdd26c25fd774f77f46fc9e395ee2cffe Mon Sep 17 00:00:00 2001 From: Suchith J N Date: Sun, 7 Jan 2018 15:39:33 +0530 Subject: [PATCH 3/7] [SPARK-22954][SQL] Fixed the test to test for AnalysisException instead of NoSuchTableException --- .../apache/spark/sql/execution/SQLViewSuite.scala | 13 ++++++++++--- 1 file changed, 10 insertions(+), 3 deletions(-) diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/SQLViewSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/SQLViewSuite.scala index 08a4a21b20f6..39c418e24a11 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/execution/SQLViewSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/SQLViewSuite.scala @@ -20,6 +20,7 @@ package org.apache.spark.sql.execution import org.apache.spark.sql._ import org.apache.spark.sql.catalyst.TableIdentifier import org.apache.spark.sql.catalyst.analysis.NoSuchTableException +import org.apache.spark.sql.AnalysisException import org.apache.spark.sql.test.{SharedSQLContext, SQLTestUtils} class SimpleSQLViewSuite extends SQLViewSuite with SharedSQLContext @@ -154,11 +155,17 @@ abstract class SQLViewSuite extends QueryTest with SQLTestUtils { assertNoSuchTable(s"TRUNCATE TABLE $viewName") assertNoSuchTable(s"SHOW CREATE TABLE $viewName") assertNoSuchTable(s"SHOW PARTITIONS $viewName") - assertNoSuchTable(s"ANALYZE TABLE $viewName COMPUTE STATISTICS") - assertNoSuchTable(s"ANALYZE TABLE $viewName COMPUTE STATISTICS FOR COLUMNS id") + assertAnalysisException(s"ANALYZE TABLE $viewName COMPUTE STATISTICS") + assertAnalysisException(s"ANALYZE TABLE $viewName COMPUTE STATISTICS FOR COLUMNS id") } } - + + private def assertAnalysisException(query: String): Unit = { + intercept[AnalysisException] { + sql(query) + } + } + private def assertNoSuchTable(query: String): Unit = { intercept[NoSuchTableException] { sql(query) From 0e873e5fcfe0858d869d9cb9cf63597c6746b734 Mon Sep 17 00:00:00 2001 From: Suchith J N Date: Sun, 21 Jan 2018 10:44:53 +0530 Subject: [PATCH 4/7] [SPARK-22954][SQL] Fix the problem for other versions of analyze commands - AnalyzeColum and AnalyzePartition. Fix tests to check if the error messages are proper. --- .../command/AnalyzeColumnCommand.scala | 6 +++--- .../command/AnalyzePartitionCommand.scala | 9 +++++---- .../spark/sql/execution/SQLViewSuite.scala | 20 +++++++++++++------ 3 files changed, 22 insertions(+), 13 deletions(-) diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/command/AnalyzeColumnCommand.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/command/AnalyzeColumnCommand.scala index 1122522ccb4c..69ba96f4f0e6 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/execution/command/AnalyzeColumnCommand.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/command/AnalyzeColumnCommand.scala @@ -39,9 +39,9 @@ case class AnalyzeColumnCommand( override def run(sparkSession: SparkSession): Seq[Row] = { val sessionState = sparkSession.sessionState - val db = tableIdent.database.getOrElse(sessionState.catalog.getCurrentDatabase) - val tableIdentWithDB = TableIdentifier(tableIdent.table, Some(db)) - val tableMeta = sessionState.catalog.getTableMetadata(tableIdentWithDB) + val db = tableIdent.database + val tableIdentWithDB = TableIdentifier(tableIdent.table, db) + val tableMeta = sessionState.catalog.getTempViewOrPermanentTableMetadata(tableIdentWithDB) if (tableMeta.tableType == CatalogTableType.VIEW) { throw new AnalysisException("ANALYZE TABLE is not supported on views.") } diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/command/AnalyzePartitionCommand.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/command/AnalyzePartitionCommand.scala index 5b54b2270b5e..3f09b0ad389e 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/execution/command/AnalyzePartitionCommand.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/command/AnalyzePartitionCommand.scala @@ -75,9 +75,9 @@ case class AnalyzePartitionCommand( override def run(sparkSession: SparkSession): Seq[Row] = { val sessionState = sparkSession.sessionState - val db = tableIdent.database.getOrElse(sessionState.catalog.getCurrentDatabase) - val tableIdentWithDB = TableIdentifier(tableIdent.table, Some(db)) - val tableMeta = sessionState.catalog.getTableMetadata(tableIdentWithDB) + val db = tableIdent.database + val tableIdentWithDB = TableIdentifier(tableIdent.table, db) + val tableMeta = sessionState.catalog.getTempViewOrPermanentTableMetadata(tableIdentWithDB) if (tableMeta.tableType == CatalogTableType.VIEW) { throw new AnalysisException("ANALYZE TABLE is not supported on views.") } @@ -88,7 +88,8 @@ case class AnalyzePartitionCommand( if (partitions.isEmpty) { if (partitionValueSpec.isDefined) { - throw new NoSuchPartitionException(db, tableIdent.table, partitionValueSpec.get) + throw new NoSuchPartitionException(db.getOrElse(sessionState.catalog.getCurrentDatabase), + tableIdent.table, partitionValueSpec.get) } else { // the user requested to analyze all partitions for a table which has no partitions // return normally, since there is nothing to do diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/SQLViewSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/SQLViewSuite.scala index 39c418e24a11..543a9f1a65e8 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/execution/SQLViewSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/SQLViewSuite.scala @@ -155,14 +155,22 @@ abstract class SQLViewSuite extends QueryTest with SQLTestUtils { assertNoSuchTable(s"TRUNCATE TABLE $viewName") assertNoSuchTable(s"SHOW CREATE TABLE $viewName") assertNoSuchTable(s"SHOW PARTITIONS $viewName") - assertAnalysisException(s"ANALYZE TABLE $viewName COMPUTE STATISTICS") - assertAnalysisException(s"ANALYZE TABLE $viewName COMPUTE STATISTICS FOR COLUMNS id") } } - - private def assertAnalysisException(query: String): Unit = { - intercept[AnalysisException] { - sql(query) + + test("SPARK-22954 - Issue AnalysisException when analysis is run on view") { + val viewName = "testView" + val analyzeNotSupportedOnViewsMsg = "ANALYZE TABLE is not supported on views." + withTempView(viewName) { + spark.range(10).createTempView(viewName) + + assert(intercept[AnalysisException] { + sql(s"ANALYZE TABLE $viewName COMPUTE STATISTICS") + }.getMessage.contains(analyzeNotSupportedOnViewsMsg)) + + assert(intercept[AnalysisException] { + sql(s"ANALYZE TABLE $viewName COMPUTE STATISTICS FOR COLUMNS id") + }.getMessage.contains(analyzeNotSupportedOnViewsMsg)) } } From 77e4d6db1d647db7a7b2c13c922bab0bdd3e53fc Mon Sep 17 00:00:00 2001 From: Suchith J N Date: Sun, 21 Jan 2018 11:51:20 +0530 Subject: [PATCH 5/7] [SPARK-22954][SQL] Reorganize imports to satisfy Scalastyle. --- .../scala/org/apache/spark/sql/execution/SQLViewSuite.scala | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/SQLViewSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/SQLViewSuite.scala index 543a9f1a65e8..e8c165561d73 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/execution/SQLViewSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/SQLViewSuite.scala @@ -17,10 +17,9 @@ package org.apache.spark.sql.execution -import org.apache.spark.sql._ +import org.apache.spark.sql.{AnalysisException, _} import org.apache.spark.sql.catalyst.TableIdentifier import org.apache.spark.sql.catalyst.analysis.NoSuchTableException -import org.apache.spark.sql.AnalysisException import org.apache.spark.sql.test.{SharedSQLContext, SQLTestUtils} class SimpleSQLViewSuite extends SQLViewSuite with SharedSQLContext From 4d4e3fe9cced139efc8f162a7dddda15527b641c Mon Sep 17 00:00:00 2001 From: Suchith J N Date: Sun, 21 Jan 2018 12:06:33 +0530 Subject: [PATCH 6/7] [SPARK-22954][SQL] Removed some extraneous whitespace. --- .../scala/org/apache/spark/sql/execution/SQLViewSuite.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/SQLViewSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/SQLViewSuite.scala index e8c165561d73..b488e8d7a129 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/execution/SQLViewSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/SQLViewSuite.scala @@ -172,7 +172,7 @@ abstract class SQLViewSuite extends QueryTest with SQLTestUtils { }.getMessage.contains(analyzeNotSupportedOnViewsMsg)) } } - + private def assertNoSuchTable(query: String): Unit = { intercept[NoSuchTableException] { sql(query) From 4c8645623f3b89c9f7b1bc7809c6b9f5a95d2389 Mon Sep 17 00:00:00 2001 From: Suchith J N Date: Tue, 30 Jan 2018 09:26:43 +0530 Subject: [PATCH 7/7] [SPARK-22954][SQL] Add test case to make sure that calling analyze partition on view throws AnalyzeException telling that it isn't supported on views. --- .../scala/org/apache/spark/sql/execution/SQLViewSuite.scala | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/SQLViewSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/SQLViewSuite.scala index b488e8d7a129..62344b631ee9 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/execution/SQLViewSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/SQLViewSuite.scala @@ -157,7 +157,7 @@ abstract class SQLViewSuite extends QueryTest with SQLTestUtils { } } - test("SPARK-22954 - Issue AnalysisException when analysis is run on view") { + test("SPARK-22954 - Issue AnalysisException when ANALYZE is run on view") { val viewName = "testView" val analyzeNotSupportedOnViewsMsg = "ANALYZE TABLE is not supported on views." withTempView(viewName) { @@ -170,6 +170,10 @@ abstract class SQLViewSuite extends QueryTest with SQLTestUtils { assert(intercept[AnalysisException] { sql(s"ANALYZE TABLE $viewName COMPUTE STATISTICS FOR COLUMNS id") }.getMessage.contains(analyzeNotSupportedOnViewsMsg)) + + assert(intercept[AnalysisException] { + sql(s"ANALYZE TABLE $viewName PARTITION (a) COMPUTE STATISTICS") + }.getMessage.contains(analyzeNotSupportedOnViewsMsg)) } }