diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/command/AnalyzeColumnCommand.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/command/AnalyzeColumnCommand.scala index 1122522ccb4cb..69ba96f4f0e61 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/execution/command/AnalyzeColumnCommand.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/command/AnalyzeColumnCommand.scala @@ -39,9 +39,9 @@ case class AnalyzeColumnCommand( override def run(sparkSession: SparkSession): Seq[Row] = { val sessionState = sparkSession.sessionState - val db = tableIdent.database.getOrElse(sessionState.catalog.getCurrentDatabase) - val tableIdentWithDB = TableIdentifier(tableIdent.table, Some(db)) - val tableMeta = sessionState.catalog.getTableMetadata(tableIdentWithDB) + val db = tableIdent.database + val tableIdentWithDB = TableIdentifier(tableIdent.table, db) + val tableMeta = sessionState.catalog.getTempViewOrPermanentTableMetadata(tableIdentWithDB) if (tableMeta.tableType == CatalogTableType.VIEW) { throw new AnalysisException("ANALYZE TABLE is not supported on views.") } diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/command/AnalyzePartitionCommand.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/command/AnalyzePartitionCommand.scala index 5b54b2270b5ec..3f09b0ad389e5 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/execution/command/AnalyzePartitionCommand.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/command/AnalyzePartitionCommand.scala @@ -75,9 +75,9 @@ case class AnalyzePartitionCommand( override def run(sparkSession: SparkSession): Seq[Row] = { val sessionState = sparkSession.sessionState - val db = tableIdent.database.getOrElse(sessionState.catalog.getCurrentDatabase) - val tableIdentWithDB = TableIdentifier(tableIdent.table, Some(db)) - val tableMeta = sessionState.catalog.getTableMetadata(tableIdentWithDB) + val db = tableIdent.database + val tableIdentWithDB = TableIdentifier(tableIdent.table, db) + val tableMeta = sessionState.catalog.getTempViewOrPermanentTableMetadata(tableIdentWithDB) if (tableMeta.tableType == CatalogTableType.VIEW) { throw new AnalysisException("ANALYZE TABLE is not supported on views.") } @@ -88,7 +88,8 @@ case class AnalyzePartitionCommand( if (partitions.isEmpty) { if (partitionValueSpec.isDefined) { - throw new NoSuchPartitionException(db, tableIdent.table, partitionValueSpec.get) + throw new NoSuchPartitionException(db.getOrElse(sessionState.catalog.getCurrentDatabase), + tableIdent.table, partitionValueSpec.get) } else { // the user requested to analyze all partitions for a table which has no partitions // return normally, since there is nothing to do diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/command/AnalyzeTableCommand.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/command/AnalyzeTableCommand.scala index 58b53e8b1c551..3006387a4f2b3 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/execution/command/AnalyzeTableCommand.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/command/AnalyzeTableCommand.scala @@ -31,9 +31,9 @@ case class AnalyzeTableCommand( override def run(sparkSession: SparkSession): Seq[Row] = { val sessionState = sparkSession.sessionState - val db = tableIdent.database.getOrElse(sessionState.catalog.getCurrentDatabase) - val tableIdentWithDB = TableIdentifier(tableIdent.table, Some(db)) - val tableMeta = sessionState.catalog.getTableMetadata(tableIdentWithDB) + val db = tableIdent.database + val tableIdentWithDB = TableIdentifier(tableIdent.table, db) + val tableMeta = sessionState.catalog.getTempViewOrPermanentTableMetadata(tableIdentWithDB) if (tableMeta.tableType == CatalogTableType.VIEW) { throw new AnalysisException("ANALYZE TABLE is not supported on views.") } diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/SQLViewSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/SQLViewSuite.scala index 08a4a21b20f61..62344b631ee9f 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/execution/SQLViewSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/SQLViewSuite.scala @@ -17,7 +17,7 @@ package org.apache.spark.sql.execution -import org.apache.spark.sql._ +import org.apache.spark.sql.{AnalysisException, _} import org.apache.spark.sql.catalyst.TableIdentifier import org.apache.spark.sql.catalyst.analysis.NoSuchTableException import org.apache.spark.sql.test.{SharedSQLContext, SQLTestUtils} @@ -154,8 +154,26 @@ abstract class SQLViewSuite extends QueryTest with SQLTestUtils { assertNoSuchTable(s"TRUNCATE TABLE $viewName") assertNoSuchTable(s"SHOW CREATE TABLE $viewName") assertNoSuchTable(s"SHOW PARTITIONS $viewName") - assertNoSuchTable(s"ANALYZE TABLE $viewName COMPUTE STATISTICS") - assertNoSuchTable(s"ANALYZE TABLE $viewName COMPUTE STATISTICS FOR COLUMNS id") + } + } + + test("SPARK-22954 - Issue AnalysisException when ANALYZE is run on view") { + val viewName = "testView" + val analyzeNotSupportedOnViewsMsg = "ANALYZE TABLE is not supported on views." + withTempView(viewName) { + spark.range(10).createTempView(viewName) + + assert(intercept[AnalysisException] { + sql(s"ANALYZE TABLE $viewName COMPUTE STATISTICS") + }.getMessage.contains(analyzeNotSupportedOnViewsMsg)) + + assert(intercept[AnalysisException] { + sql(s"ANALYZE TABLE $viewName COMPUTE STATISTICS FOR COLUMNS id") + }.getMessage.contains(analyzeNotSupportedOnViewsMsg)) + + assert(intercept[AnalysisException] { + sql(s"ANALYZE TABLE $viewName PARTITION (a) COMPUTE STATISTICS") + }.getMessage.contains(analyzeNotSupportedOnViewsMsg)) } }