Skip to content

Commit 0ad35ba

Browse files
imback82cloud-fan
authored andcommitted
[SPARK-33321][SQL] Migrate ANALYZE TABLE commands to use UnresolvedTableOrView to resolve the identifier
### What changes were proposed in this pull request? This PR proposes to migrate `ANALYZE TABLE` and `ANALYZE TABLE ... FOR COLUMNS` to use `UnresolvedTableOrView` to resolve the table/view identifier. This allows consistent resolution rules (temp view first, etc.) to be applied for both v1/v2 commands. More info about the consistent resolution rule proposal can be found in [JIRA](https://issues.apache.org/jira/browse/SPARK-29900) or [proposal doc](https://docs.google.com/document/d/1hvLjGA8y_W_hhilpngXVub1Ebv8RsMap986nENCFnrg/edit?usp=sharing). Note that `ANALYZE TABLE` is not supported for v2 tables. ### Why are the changes needed? The changes allow consistent resolution behavior when resolving the table/view identifier. For example, the following is the current behavior: ```scala sql("create temporary view t as select 1") sql("create database db") sql("create table db.t using csv as select 1") sql("use db") sql("ANALYZE TABLE t compute statistics") // Succeeds ``` With this change, ANALYZE TABLE above fails with the following: ``` org.apache.spark.sql.AnalysisException: t is a temp view not table or permanent view.; line 1 pos 0 at org.apache.spark.sql.catalyst.analysis.package$AnalysisErrorAt.failAnalysis(package.scala:42) at org.apache.spark.sql.catalyst.analysis.Analyzer$ResolveTempViews$$anonfun$apply$7.$anonfun$applyOrElse$40(Analyzer.scala:872) at scala.Option.map(Option.scala:230) at org.apache.spark.sql.catalyst.analysis.Analyzer$ResolveTempViews$$anonfun$apply$7.applyOrElse(Analyzer.scala:870) at org.apache.spark.sql.catalyst.analysis.Analyzer$ResolveTempViews$$anonfun$apply$7.applyOrElse(Analyzer.scala:856) ``` , which is expected since temporary view is resolved first and ANALYZE TABLE doesn't support a temporary view. ### Does this PR introduce _any_ user-facing change? After this PR, `ANALYZE TABLE t` is resolved to a temp view `t` instead of table `db.t`. ### How was this patch tested? Updated existing tests. Closes apache#30229 from imback82/parse_v1table. Authored-by: Terry Kim <[email protected]> Signed-off-by: Wenchen Fan <[email protected]>
1 parent 1740b29 commit 0ad35ba

File tree

12 files changed

+120
-74
lines changed

12 files changed

+120
-74
lines changed

sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/Analyzer.scala

Lines changed: 9 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -865,9 +865,14 @@ class Analyzer(
865865
u.failAnalysis(s"${ident.quoted} is a temp view not table.")
866866
}
867867
u
868-
case u @ UnresolvedTableOrView(ident) =>
868+
case u @ UnresolvedTableOrView(ident, allowTempView) =>
869869
lookupTempView(ident)
870-
.map(_ => ResolvedView(ident.asIdentifier, isTemp = true))
870+
.map { _ =>
871+
if (!allowTempView) {
872+
u.failAnalysis(s"${ident.quoted} is a temp view not table or permanent view.")
873+
}
874+
ResolvedView(ident.asIdentifier, isTemp = true)
875+
}
871876
.getOrElse(u)
872877
}
873878

@@ -926,7 +931,7 @@ class Analyzer(
926931
.map(ResolvedTable(catalog.asTableCatalog, ident, _))
927932
.getOrElse(u)
928933

929-
case u @ UnresolvedTableOrView(NonSessionCatalogAndIdentifier(catalog, ident)) =>
934+
case u @ UnresolvedTableOrView(NonSessionCatalogAndIdentifier(catalog, ident), _) =>
930935
CatalogV2Util.loadTable(catalog, ident)
931936
.map(ResolvedTable(catalog.asTableCatalog, ident, _))
932937
.getOrElse(u)
@@ -1026,7 +1031,7 @@ class Analyzer(
10261031
case table => table
10271032
}.getOrElse(u)
10281033

1029-
case u @ UnresolvedTableOrView(identifier) =>
1034+
case u @ UnresolvedTableOrView(identifier, _) =>
10301035
lookupTableOrView(identifier).getOrElse(u)
10311036
}
10321037

sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/v2ResolutionPlans.scala

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -46,7 +46,9 @@ case class UnresolvedTable(multipartIdentifier: Seq[String]) extends LeafNode {
4646
* Holds the name of a table or view that has yet to be looked up in a catalog. It will
4747
* be resolved to [[ResolvedTable]] or [[ResolvedView]] during analysis.
4848
*/
49-
case class UnresolvedTableOrView(multipartIdentifier: Seq[String]) extends LeafNode {
49+
case class UnresolvedTableOrView(
50+
multipartIdentifier: Seq[String],
51+
allowTempView: Boolean = true) extends LeafNode {
5052
override lazy val resolved: Boolean = false
5153
override def output: Seq[Attribute] = Nil
5254
}

sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/AstBuilder.scala

Lines changed: 10 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -3216,7 +3216,7 @@ class AstBuilder(conf: SQLConf) extends SqlBaseBaseVisitor[AnyRef] with Logging
32163216
}
32173217

32183218
/**
3219-
* Create an [[AnalyzeTableStatement]], or an [[AnalyzeColumnStatement]].
3219+
* Create an [[AnalyzeTable]], or an [[AnalyzeColumn]].
32203220
* Example SQL for analyzing a table or a set of partitions :
32213221
* {{{
32223222
* ANALYZE TABLE multi_part_name [PARTITION (partcol1[=val1], partcol2[=val2], ...)]
@@ -3249,18 +3249,23 @@ class AstBuilder(conf: SQLConf) extends SqlBaseBaseVisitor[AnyRef] with Logging
32493249
val tableName = visitMultipartIdentifier(ctx.multipartIdentifier())
32503250
if (ctx.ALL() != null) {
32513251
checkPartitionSpec()
3252-
AnalyzeColumnStatement(tableName, None, allColumns = true)
3252+
AnalyzeColumn(UnresolvedTableOrView(tableName), None, allColumns = true)
32533253
} else if (ctx.identifierSeq() == null) {
32543254
val partitionSpec = if (ctx.partitionSpec != null) {
32553255
visitPartitionSpec(ctx.partitionSpec)
32563256
} else {
32573257
Map.empty[String, Option[String]]
32583258
}
3259-
AnalyzeTableStatement(tableName, partitionSpec, noScan = ctx.identifier != null)
3259+
AnalyzeTable(
3260+
UnresolvedTableOrView(tableName, allowTempView = false),
3261+
partitionSpec,
3262+
noScan = ctx.identifier != null)
32603263
} else {
32613264
checkPartitionSpec()
3262-
AnalyzeColumnStatement(
3263-
tableName, Option(visitIdentifierSeq(ctx.identifierSeq())), allColumns = false)
3265+
AnalyzeColumn(
3266+
UnresolvedTableOrView(tableName),
3267+
Option(visitIdentifierSeq(ctx.identifierSeq())),
3268+
allColumns = false)
32643269
}
32653270
}
32663271

sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/statements.scala

Lines changed: 0 additions & 19 deletions
Original file line numberDiff line numberDiff line change
@@ -342,25 +342,6 @@ case class CreateNamespaceStatement(
342342
*/
343343
case class UseStatement(isNamespaceSet: Boolean, nameParts: Seq[String]) extends ParsedStatement
344344

345-
/**
346-
* An ANALYZE TABLE statement, as parsed from SQL.
347-
*/
348-
case class AnalyzeTableStatement(
349-
tableName: Seq[String],
350-
partitionSpec: Map[String, Option[String]],
351-
noScan: Boolean) extends ParsedStatement
352-
353-
/**
354-
* An ANALYZE TABLE FOR COLUMNS statement, as parsed from SQL.
355-
*/
356-
case class AnalyzeColumnStatement(
357-
tableName: Seq[String],
358-
columnNames: Option[Seq[String]],
359-
allColumns: Boolean) extends ParsedStatement {
360-
require(columnNames.isDefined ^ allColumns, "Parameter `columnNames` or `allColumns` are " +
361-
"mutually exclusive. Only one of them should be specified.")
362-
}
363-
364345
/**
365346
* A REPAIR TABLE statement, as parsed from SQL
366347
*/

sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/v2Commands.scala

Lines changed: 22 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -567,3 +567,25 @@ case class ShowFunctions(
567567
pattern: Option[String]) extends Command {
568568
override def children: Seq[LogicalPlan] = child.toSeq
569569
}
570+
571+
/**
572+
* The logical plan of the ANALYZE TABLE command that works for v2 catalogs.
573+
*/
574+
case class AnalyzeTable(
575+
child: LogicalPlan,
576+
partitionSpec: Map[String, Option[String]],
577+
noScan: Boolean) extends Command {
578+
override def children: Seq[LogicalPlan] = child :: Nil
579+
}
580+
581+
/**
582+
* The logical plan of the ANALYZE TABLE FOR COLUMNS command that works for v2 catalogs.
583+
*/
584+
case class AnalyzeColumn(
585+
child: LogicalPlan,
586+
columnNames: Option[Seq[String]],
587+
allColumns: Boolean) extends Command {
588+
require(columnNames.isDefined ^ allColumns, "Parameter `columnNames` or `allColumns` are " +
589+
"mutually exclusive. Only one of them should be specified.")
590+
override def children: Seq[LogicalPlan] = child :: Nil
591+
}

sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/DDLParserSuite.scala

Lines changed: 39 additions & 19 deletions
Original file line numberDiff line numberDiff line change
@@ -1502,42 +1502,59 @@ class DDLParserSuite extends AnalysisTest {
15021502

15031503
test("analyze table statistics") {
15041504
comparePlans(parsePlan("analyze table a.b.c compute statistics"),
1505-
AnalyzeTableStatement(Seq("a", "b", "c"), Map.empty, noScan = false))
1505+
AnalyzeTable(
1506+
UnresolvedTableOrView(Seq("a", "b", "c"), allowTempView = false),
1507+
Map.empty, noScan = false))
15061508
comparePlans(parsePlan("analyze table a.b.c compute statistics noscan"),
1507-
AnalyzeTableStatement(Seq("a", "b", "c"), Map.empty, noScan = true))
1509+
AnalyzeTable(
1510+
UnresolvedTableOrView(Seq("a", "b", "c"), allowTempView = false),
1511+
Map.empty, noScan = true))
15081512
comparePlans(parsePlan("analyze table a.b.c partition (a) compute statistics nOscAn"),
1509-
AnalyzeTableStatement(Seq("a", "b", "c"), Map("a" -> None), noScan = true))
1513+
AnalyzeTable(
1514+
UnresolvedTableOrView(Seq("a", "b", "c"), allowTempView = false),
1515+
Map("a" -> None), noScan = true))
15101516

15111517
// Partitions specified
15121518
comparePlans(
15131519
parsePlan("ANALYZE TABLE a.b.c PARTITION(ds='2008-04-09', hr=11) COMPUTE STATISTICS"),
1514-
AnalyzeTableStatement(
1515-
Seq("a", "b", "c"), Map("ds" -> Some("2008-04-09"), "hr" -> Some("11")), noScan = false))
1520+
AnalyzeTable(
1521+
UnresolvedTableOrView(Seq("a", "b", "c"), allowTempView = false),
1522+
Map("ds" -> Some("2008-04-09"), "hr" -> Some("11")), noScan = false))
15161523
comparePlans(
15171524
parsePlan("ANALYZE TABLE a.b.c PARTITION(ds='2008-04-09', hr=11) COMPUTE STATISTICS noscan"),
1518-
AnalyzeTableStatement(
1519-
Seq("a", "b", "c"), Map("ds" -> Some("2008-04-09"), "hr" -> Some("11")), noScan = true))
1525+
AnalyzeTable(
1526+
UnresolvedTableOrView(Seq("a", "b", "c"), allowTempView = false),
1527+
Map("ds" -> Some("2008-04-09"), "hr" -> Some("11")), noScan = true))
15201528
comparePlans(
15211529
parsePlan("ANALYZE TABLE a.b.c PARTITION(ds='2008-04-09') COMPUTE STATISTICS noscan"),
1522-
AnalyzeTableStatement(Seq("a", "b", "c"), Map("ds" -> Some("2008-04-09")), noScan = true))
1530+
AnalyzeTable(
1531+
UnresolvedTableOrView(Seq("a", "b", "c"), allowTempView = false),
1532+
Map("ds" -> Some("2008-04-09")), noScan = true))
15231533
comparePlans(
15241534
parsePlan("ANALYZE TABLE a.b.c PARTITION(ds='2008-04-09', hr) COMPUTE STATISTICS"),
1525-
AnalyzeTableStatement(
1526-
Seq("a", "b", "c"), Map("ds" -> Some("2008-04-09"), "hr" -> None), noScan = false))
1535+
AnalyzeTable(
1536+
UnresolvedTableOrView(Seq("a", "b", "c"), allowTempView = false),
1537+
Map("ds" -> Some("2008-04-09"), "hr" -> None), noScan = false))
15271538
comparePlans(
15281539
parsePlan("ANALYZE TABLE a.b.c PARTITION(ds='2008-04-09', hr) COMPUTE STATISTICS noscan"),
1529-
AnalyzeTableStatement(
1530-
Seq("a", "b", "c"), Map("ds" -> Some("2008-04-09"), "hr" -> None), noScan = true))
1540+
AnalyzeTable(
1541+
UnresolvedTableOrView(Seq("a", "b", "c"), allowTempView = false),
1542+
Map("ds" -> Some("2008-04-09"), "hr" -> None), noScan = true))
15311543
comparePlans(
15321544
parsePlan("ANALYZE TABLE a.b.c PARTITION(ds, hr=11) COMPUTE STATISTICS noscan"),
1533-
AnalyzeTableStatement(
1534-
Seq("a", "b", "c"), Map("ds" -> None, "hr" -> Some("11")), noScan = true))
1545+
AnalyzeTable(
1546+
UnresolvedTableOrView(Seq("a", "b", "c"), allowTempView = false),
1547+
Map("ds" -> None, "hr" -> Some("11")), noScan = true))
15351548
comparePlans(
15361549
parsePlan("ANALYZE TABLE a.b.c PARTITION(ds, hr) COMPUTE STATISTICS"),
1537-
AnalyzeTableStatement(Seq("a", "b", "c"), Map("ds" -> None, "hr" -> None), noScan = false))
1550+
AnalyzeTable(
1551+
UnresolvedTableOrView(Seq("a", "b", "c"), allowTempView = false),
1552+
Map("ds" -> None, "hr" -> None), noScan = false))
15381553
comparePlans(
15391554
parsePlan("ANALYZE TABLE a.b.c PARTITION(ds, hr) COMPUTE STATISTICS noscan"),
1540-
AnalyzeTableStatement(Seq("a", "b", "c"), Map("ds" -> None, "hr" -> None), noScan = true))
1555+
AnalyzeTable(
1556+
UnresolvedTableOrView(Seq("a", "b", "c"), allowTempView = false),
1557+
Map("ds" -> None, "hr" -> None), noScan = true))
15411558

15421559
intercept("analyze table a.b.c compute statistics xxxx",
15431560
"Expected `NOSCAN` instead of `xxxx`")
@@ -1550,7 +1567,8 @@ class DDLParserSuite extends AnalysisTest {
15501567

15511568
comparePlans(
15521569
parsePlan("ANALYZE TABLE a.b.c COMPUTE STATISTICS FOR COLUMNS key, value"),
1553-
AnalyzeColumnStatement(Seq("a", "b", "c"), Option(Seq("key", "value")), allColumns = false))
1570+
AnalyzeColumn(
1571+
UnresolvedTableOrView(Seq("a", "b", "c")), Option(Seq("key", "value")), allColumns = false))
15541572

15551573
// Partition specified - should be ignored
15561574
comparePlans(
@@ -1559,7 +1577,8 @@ class DDLParserSuite extends AnalysisTest {
15591577
|ANALYZE TABLE a.b.c PARTITION(ds='2017-06-10')
15601578
|COMPUTE STATISTICS FOR COLUMNS key, value
15611579
""".stripMargin),
1562-
AnalyzeColumnStatement(Seq("a", "b", "c"), Option(Seq("key", "value")), allColumns = false))
1580+
AnalyzeColumn(
1581+
UnresolvedTableOrView(Seq("a", "b", "c")), Option(Seq("key", "value")), allColumns = false))
15631582

15641583
// Partition specified should be ignored in case of COMPUTE STATISTICS FOR ALL COLUMNS
15651584
comparePlans(
@@ -1568,7 +1587,8 @@ class DDLParserSuite extends AnalysisTest {
15681587
|ANALYZE TABLE a.b.c PARTITION(ds='2017-06-10')
15691588
|COMPUTE STATISTICS FOR ALL COLUMNS
15701589
""".stripMargin),
1571-
AnalyzeColumnStatement(Seq("a", "b", "c"), None, allColumns = true))
1590+
AnalyzeColumn(
1591+
UnresolvedTableOrView(Seq("a", "b", "c")), None, allColumns = true))
15721592

15731593
intercept("ANALYZE TABLE a.b.c COMPUTE STATISTICS FOR ALL COLUMNS key, value",
15741594
"mismatched input 'key' expecting {<EOF>, ';'}")

sql/core/src/main/scala/org/apache/spark/sql/catalyst/analysis/ResolveSessionCatalog.scala

Lines changed: 17 additions & 19 deletions
Original file line numberDiff line numberDiff line change
@@ -22,12 +22,11 @@ import org.apache.spark.sql.catalyst.{FunctionIdentifier, TableIdentifier}
2222
import org.apache.spark.sql.catalyst.catalog.{BucketSpec, CatalogStorageFormat, CatalogTable, CatalogTableType, CatalogUtils}
2323
import org.apache.spark.sql.catalyst.plans.logical._
2424
import org.apache.spark.sql.catalyst.rules.Rule
25-
import org.apache.spark.sql.connector.catalog.{CatalogManager, CatalogPlugin, CatalogV2Util, LookupCatalog, SupportsNamespaces, TableCatalog, TableChange, V1Table}
25+
import org.apache.spark.sql.connector.catalog.{CatalogManager, CatalogPlugin, CatalogV2Util, Identifier, LookupCatalog, SupportsNamespaces, TableCatalog, TableChange, V1Table}
2626
import org.apache.spark.sql.connector.expressions.Transform
2727
import org.apache.spark.sql.execution.command._
2828
import org.apache.spark.sql.execution.datasources.{CreateTable, DataSource}
2929
import org.apache.spark.sql.execution.datasources.v2.FileDataSourceV2
30-
import org.apache.spark.sql.internal.SQLConf
3130
import org.apache.spark.sql.types.{HIVE_TYPE_STRING, HiveStringType, MetadataBuilder, StructField, StructType}
3231

3332
/**
@@ -255,19 +254,11 @@ class ResolveSessionCatalog(
255254
case RenameTableStatement(TempViewOrV1Table(oldName), newName, isView) =>
256255
AlterTableRenameCommand(oldName.asTableIdentifier, newName.asTableIdentifier, isView)
257256

258-
case DescribeRelation(r @ ResolvedTable(_, ident, _: V1Table), partitionSpec, isExtended)
259-
if isSessionCatalog(r.catalog) =>
260-
DescribeTableCommand(ident.asTableIdentifier, partitionSpec, isExtended)
261-
262257
// Use v1 command to describe (temp) view, as v2 catalog doesn't support view yet.
263-
case DescribeRelation(ResolvedView(ident, _), partitionSpec, isExtended) =>
258+
case DescribeRelation(ResolvedV1TableOrViewIdentifier(ident), partitionSpec, isExtended) =>
264259
DescribeTableCommand(ident.asTableIdentifier, partitionSpec, isExtended)
265260

266-
case DescribeColumn(r @ ResolvedTable(_, _, _: V1Table), colNameParts, isExtended)
267-
if isSessionCatalog(r.catalog) =>
268-
DescribeColumnCommand(r.identifier.asTableIdentifier, colNameParts, isExtended)
269-
270-
case DescribeColumn(ResolvedView(ident, _), colNameParts, isExtended) =>
261+
case DescribeColumn(ResolvedV1TableOrViewIdentifier(ident), colNameParts, isExtended) =>
271262
DescribeColumnCommand(ident.asTableIdentifier, colNameParts, isExtended)
272263

273264
// For CREATE TABLE [AS SELECT], we should use the v1 command if the catalog is resolved to the
@@ -419,17 +410,16 @@ class ResolveSessionCatalog(
419410
}
420411
ShowTablesCommand(db, Some(pattern), true, partitionsSpec)
421412

422-
case AnalyzeTableStatement(tbl, partitionSpec, noScan) =>
423-
val v1TableName = parseV1Table(tbl, "ANALYZE TABLE")
413+
// ANALYZE TABLE works on permanent views if the views are cached.
414+
case AnalyzeTable(ResolvedV1TableOrViewIdentifier(ident), partitionSpec, noScan) =>
424415
if (partitionSpec.isEmpty) {
425-
AnalyzeTableCommand(v1TableName.asTableIdentifier, noScan)
416+
AnalyzeTableCommand(ident.asTableIdentifier, noScan)
426417
} else {
427-
AnalyzePartitionCommand(v1TableName.asTableIdentifier, partitionSpec, noScan)
418+
AnalyzePartitionCommand(ident.asTableIdentifier, partitionSpec, noScan)
428419
}
429420

430-
case AnalyzeColumnStatement(tbl, columnNames, allColumns) =>
431-
val v1TableName = parseTempViewOrV1Table(tbl, "ANALYZE TABLE")
432-
AnalyzeColumnCommand(v1TableName.asTableIdentifier, columnNames, allColumns)
421+
case AnalyzeColumn(ResolvedV1TableOrViewIdentifier(ident), columnNames, allColumns) =>
422+
AnalyzeColumnCommand(ident.asTableIdentifier, columnNames, allColumns)
433423

434424
case RepairTableStatement(tbl) =>
435425
val v1TableName = parseV1Table(tbl, "MSCK REPAIR TABLE")
@@ -706,6 +696,14 @@ class ResolveSessionCatalog(
706696
}
707697
}
708698

699+
object ResolvedV1TableOrViewIdentifier {
700+
def unapply(resolved: LogicalPlan): Option[Identifier] = resolved match {
701+
case ResolvedTable(catalog, ident, _: V1Table) if isSessionCatalog(catalog) => Some(ident)
702+
case ResolvedView(ident, _) => Some(ident)
703+
case _ => None
704+
}
705+
}
706+
709707
private def assertTopLevelColumn(colName: Seq[String], command: String): Unit = {
710708
if (colName.length > 1) {
711709
throw new AnalysisException(s"$command does not support nested column: ${colName.quoted}")

sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/DataSourceV2Strategy.scala

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -231,7 +231,7 @@ class DataSourceV2Strategy(session: SparkSession) extends Strategy with Predicat
231231
case DropTable(r: ResolvedTable, ifExists, _) =>
232232
DropTableExec(r.catalog, r.identifier, ifExists) :: Nil
233233

234-
case NoopDropTable(multipartIdentifier) =>
234+
case _: NoopDropTable =>
235235
LocalTableScanExec(Nil, Nil) :: Nil
236236

237237
case AlterTable(catalog, ident, _, changes) =>
@@ -280,6 +280,9 @@ class DataSourceV2Strategy(session: SparkSession) extends Strategy with Predicat
280280
case r @ ShowTableProperties(rt: ResolvedTable, propertyKey) =>
281281
ShowTablePropertiesExec(r.output, rt.table, propertyKey) :: Nil
282282

283+
case AnalyzeTable(_: ResolvedTable, _, _) | AnalyzeColumn(_: ResolvedTable, _, _) =>
284+
throw new AnalysisException("ANALYZE TABLE is not supported for v2 tables.")
285+
283286
case _ => Nil
284287
}
285288
}

sql/core/src/test/resources/sql-tests/results/describe.sql.out

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -540,7 +540,7 @@ struct<plan:string>
540540
-- !query output
541541
== Parsed Logical Plan ==
542542
'DescribeRelation false
543-
+- 'UnresolvedTableOrView [t]
543+
+- 'UnresolvedTableOrView [t], true
544544

545545
== Analyzed Logical Plan ==
546546
col_name: string, data_type: string, comment: string

sql/core/src/test/scala/org/apache/spark/sql/StatisticsCollectionSuite.scala

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -540,10 +540,10 @@ class StatisticsCollectionSuite extends StatisticsCollectionTestBase with Shared
540540
test("analyzes column statistics in cached global temporary view") {
541541
withGlobalTempView("gTempView") {
542542
val globalTempDB = spark.sharedState.globalTempViewManager.database
543-
val errMsg1 = intercept[NoSuchTableException] {
543+
val errMsg1 = intercept[AnalysisException] {
544544
sql(s"ANALYZE TABLE $globalTempDB.gTempView COMPUTE STATISTICS FOR COLUMNS id")
545545
}.getMessage
546-
assert(errMsg1.contains(s"Table or view 'gTempView' not found in database '$globalTempDB'"))
546+
assert(errMsg1.contains(s"Table or view not found: $globalTempDB.gTempView"))
547547
// Analyzes in a global temporary view
548548
sql("CREATE GLOBAL TEMP VIEW gTempView AS SELECT * FROM range(1, 30)")
549549
val errMsg2 = intercept[AnalysisException] {

0 commit comments

Comments
 (0)