Skip to content

Commit a8ce27f

Browse files
committed
address comments
1 parent 03ded04 commit a8ce27f

File tree

11 files changed

+75
-82
lines changed

11 files changed

+75
-82
lines changed

sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/Analyzer.scala

Lines changed: 10 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -439,7 +439,16 @@ class Analyzer(
439439
object ResolveRelations extends Rule[LogicalPlan] {
440440
private def lookupTableFromCatalog(u: UnresolvedRelation): LogicalPlan = {
441441
try {
442-
catalog.lookupRelation(u.tableIdentifier, u.alias)
442+
if (u.tableIdentifier.database.isDefined) {
443+
catalog.lookupRelation(u.tableIdentifier, u.alias)
444+
} else {
445+
val maybeTempView = catalog.lookupTempView(u.tableIdentifier.table, u.alias)
446+
if (maybeTempView.isDefined) {
447+
maybeTempView.get
448+
} else {
449+
catalog.lookupRelation(u.tableIdentifier, u.alias)
450+
}
451+
}
443452
} catch {
444453
case _: NoSuchTableException =>
445454
u.failAnalysis(s"Table or view not found: ${u.tableName}")

sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/SessionCatalog.scala

Lines changed: 26 additions & 42 deletions
Original file line numberDiff line numberDiff line change
@@ -271,6 +271,22 @@ class SessionCatalog(
271271
externalCatalog.getTableOption(db, table)
272272
}
273273

274+
/**
275+
* Return a [[LogicalPlan]] that represents the given table/view in metastore.
276+
*
277+
* If the relation is a view, the relation will be wrapped in a [[SubqueryAlias]] which will
278+
* track the name of the view.
279+
*/
280+
def lookupRelation(name: TableIdentifier, alias: Option[String] = None): LogicalPlan = {
281+
val db = formatDatabaseName(name.database.getOrElse(getCurrentDatabase))
282+
val table = formatTableName(name.table)
283+
val tableMeta = externalCatalog.getTable(db, table)
284+
val view = Option(tableMeta.tableType).collect {
285+
case CatalogTableType.VIEW => TableIdentifier(table, Some(db))
286+
}
287+
SubqueryAlias(alias.getOrElse(table), SimpleCatalogRelation(db, tableMeta), view)
288+
}
289+
274290
/**
275291
* Return whether a table/view with the specified name exists in metastore.
276292
*/
@@ -370,6 +386,16 @@ class SessionCatalog(
370386
tempViews.create(table, viewDefinition, overrideIfExists)
371387
}
372388

389+
/**
390+
* Return a [[LogicalPlan]] that represents the given temporary view.
391+
*/
392+
def lookupTempView(name: String, alias: Option[String] = None): Option[LogicalPlan] = {
393+
val viewName = formatTableName(name)
394+
tempViews.get(viewName).map { viewDef =>
395+
SubqueryAlias(alias.getOrElse(viewName), viewDef, Some(TableIdentifier(viewName)))
396+
}
397+
}
398+
373399
/**
374400
* Rename a temporary view, and returns true if it succeeds, false otherwise.
375401
*/
@@ -401,43 +427,6 @@ class SessionCatalog(
401427
// | Methods that interact with temporary views and metastore tables/views |
402428
// -------------------------------------------------------------------------
403429

404-
/**
405-
* Return a [[LogicalPlan]] that represents the given table/view.
406-
*
407-
* If a database is specified in `name`, this will return the table/view from that database.
408-
* If no database is specified, this will first attempt to return a temporary view with
409-
* the same name, then, if that does not exist, return the table/view from the current database.
410-
*
411-
* If the relation is a view, the relation will be wrapped in a [[SubqueryAlias]] which will
412-
* track the name of the view.
413-
*/
414-
def lookupRelation(name: TableIdentifier, alias: Option[String] = None): LogicalPlan = {
415-
val db = formatDatabaseName(name.database.getOrElse(getCurrentDatabase))
416-
val table = formatTableName(name.table)
417-
418-
if (name.database.isDefined) {
419-
lookupMetastoreRelation(db, table, alias)
420-
} else {
421-
val maybeTempView = tempViews.get(table)
422-
if (maybeTempView.isDefined) {
423-
SubqueryAlias(alias.getOrElse(table), maybeTempView.get, Some(name))
424-
} else {
425-
lookupMetastoreRelation(db, table, alias)
426-
}
427-
}
428-
}
429-
430-
protected def lookupMetastoreRelation(
431-
db: String,
432-
table: String,
433-
alias: Option[String]): LogicalPlan = {
434-
val metadata = externalCatalog.getTable(db, table)
435-
val view = Option(metadata.tableType).collect {
436-
case CatalogTableType.VIEW => TableIdentifier(table, Some(db))
437-
}
438-
SubqueryAlias(alias.getOrElse(table), SimpleCatalogRelation(db, metadata), view)
439-
}
440-
441430
/**
442431
* List all tables/views in the specified database, including temporary views.
443432
*/
@@ -475,11 +464,6 @@ class SessionCatalog(
475464
*/
476465
def clearTempViews(): Unit = tempViews.clear()
477466

478-
/**
479-
* Return a temporary view exactly as it was stored.
480-
*/
481-
def getTempView(name: String): Option[LogicalPlan] = tempViews.get(formatTableName(name))
482-
483467
// ----------------------------------------------------------------------------
484468
// Partitions
485469
// ----------------------------------------------------------------------------

sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/TempViewManager.scala

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -29,6 +29,8 @@ import org.apache.spark.sql.catalyst.util.StringUtils
2929

3030
/**
3131
* A thread-safe manager for a list of temp views, providing atomic operations to manage temp views.
32+
* Note that, the temp view name is always case-sensitive here, callers are responsible to format
33+
* the view name w.r.t. case-sensitivity config.
3234
*/
3335
class TempViewManager {
3436

sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/catalog/SessionCatalogSuite.scala

Lines changed: 9 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -201,16 +201,16 @@ class SessionCatalogSuite extends SparkFunSuite {
201201
val tempTable2 = Range(1, 20, 2, 10)
202202
catalog.createTempView("tbl1", tempTable1, overrideIfExists = false)
203203
catalog.createTempView("tbl2", tempTable2, overrideIfExists = false)
204-
assert(catalog.getTempView("tbl1") == Option(tempTable1))
205-
assert(catalog.getTempView("tbl2") == Option(tempTable2))
206-
assert(catalog.getTempView("tbl3").isEmpty)
204+
assert(catalog.lookupTempView("tbl1") == Option(tempTable1))
205+
assert(catalog.lookupTempView("tbl2") == Option(tempTable2))
206+
assert(catalog.lookupTempView("tbl3").isEmpty)
207207
// Temporary table already exists
208208
intercept[TempViewAlreadyExistsException] {
209209
catalog.createTempView("tbl1", tempTable1, overrideIfExists = false)
210210
}
211211
// Temporary table already exists but we override it
212212
catalog.createTempView("tbl1", tempTable2, overrideIfExists = true)
213-
assert(catalog.getTempView("tbl1") == Option(tempTable2))
213+
assert(catalog.lookupTempView("tbl1") == Option(tempTable2))
214214
}
215215

216216
test("drop table") {
@@ -251,10 +251,10 @@ class SessionCatalogSuite extends SparkFunSuite {
251251
val tempTable = Range(1, 10, 2, 10)
252252
sessionCatalog.createTempView("tbl1", tempTable, overrideIfExists = false)
253253
sessionCatalog.setCurrentDatabase("db2")
254-
assert(sessionCatalog.getTempView("tbl1") == Some(tempTable))
254+
assert(sessionCatalog.lookupTempView("tbl1") == Some(tempTable))
255255
assert(externalCatalog.listTables("db2").toSet == Set("tbl1", "tbl2"))
256256
sessionCatalog.dropTempView("tbl1")
257-
assert(sessionCatalog.getTempView("tbl1").isEmpty)
257+
assert(sessionCatalog.lookupTempView("tbl1").isEmpty)
258258
assert(externalCatalog.listTables("db2").toSet == Set("tbl1", "tbl2"))
259259
}
260260

@@ -292,11 +292,11 @@ class SessionCatalogSuite extends SparkFunSuite {
292292
val tempTable = Range(1, 10, 2, 10)
293293
sessionCatalog.createTempView("tbl1", tempTable, overrideIfExists = false)
294294
sessionCatalog.setCurrentDatabase("db2")
295-
assert(sessionCatalog.getTempView("tbl1") == Option(tempTable))
295+
assert(sessionCatalog.lookupTempView("tbl1") == Option(tempTable))
296296
assert(externalCatalog.listTables("db2").toSet == Set("tbl1", "tbl2"))
297297
sessionCatalog.renameTempView("tbl1", "tbl3")
298-
assert(sessionCatalog.getTempView("tbl1").isEmpty)
299-
assert(sessionCatalog.getTempView("tbl3") == Option(tempTable))
298+
assert(sessionCatalog.lookupTempView("tbl1").isEmpty)
299+
assert(sessionCatalog.lookupTempView("tbl3") == Option(tempTable))
300300
assert(externalCatalog.listTables("db2").toSet == Set("tbl1", "tbl2"))
301301
}
302302

sql/core/src/main/scala/org/apache/spark/sql/DataFrameReader.scala

Lines changed: 1 addition & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -463,9 +463,7 @@ class DataFrameReader private[sql](sparkSession: SparkSession) extends Logging {
463463
* @since 1.4.0
464464
*/
465465
def table(tableName: String): DataFrame = {
466-
Dataset.ofRows(sparkSession,
467-
sparkSession.sessionState.catalog.lookupRelation(
468-
sparkSession.sessionState.sqlParser.parseTableIdentifier(tableName)))
466+
sparkSession.table(tableName)
469467
}
470468

471469
/**

sql/core/src/main/scala/org/apache/spark/sql/SparkSession.scala

Lines changed: 14 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -556,7 +556,20 @@ class SparkSession private(
556556
}
557557

558558
private[sql] def table(tableIdent: TableIdentifier): DataFrame = {
559-
Dataset.ofRows(self, sessionState.catalog.lookupRelation(tableIdent))
559+
val plan = {
560+
if (tableIdent.database.isDefined) {
561+
sessionState.catalog.lookupRelation(tableIdent)
562+
} else {
563+
val maybeTempView = sessionState.catalog.lookupTempView(tableIdent.table)
564+
if (maybeTempView.isDefined) {
565+
maybeTempView.get
566+
} else {
567+
sessionState.catalog.lookupRelation(tableIdent)
568+
}
569+
}
570+
}
571+
572+
Dataset.ofRows(self, plan)
560573
}
561574

562575
/* ----------------- *

sql/core/src/main/scala/org/apache/spark/sql/execution/command/tables.scala

Lines changed: 4 additions & 16 deletions
Original file line numberDiff line numberDiff line change
@@ -66,7 +66,7 @@ case class CreateTableLikeCommand(
6666
val sourceTableDesc = if (sourceTable.database.isDefined) {
6767
catalog.getTableMetadata(sourceTable)
6868
} else {
69-
val maybeTempView = catalog.getTempView(sourceTable.table)
69+
val maybeTempView = catalog.lookupTempView(sourceTable.table)
7070
if (maybeTempView.isDefined) {
7171
CatalogTable(
7272
identifier = sourceTable,
@@ -228,12 +228,7 @@ case class LoadDataCommand(
228228

229229
override def run(sparkSession: SparkSession): Seq[Row] = {
230230
val catalog = sparkSession.sessionState.catalog
231-
if (!catalog.tableExists(table)) {
232-
throw new AnalysisException(s"Target table in LOAD DATA does not exist: $table")
233-
}
234-
val targetTable = catalog.getTableMetadataOption(table).getOrElse {
235-
throw new AnalysisException(s"Target table in LOAD DATA cannot be temporary: $table")
236-
}
231+
val targetTable = catalog.getTableMetadata(table)
237232
if (targetTable.tableType == CatalogTableType.VIEW) {
238233
throw new AnalysisException(s"Target table in LOAD DATA cannot be a view: $table")
239234
}
@@ -349,9 +344,6 @@ case class TruncateTableCommand(
349344

350345
override def run(spark: SparkSession): Seq[Row] = {
351346
val catalog = spark.sessionState.catalog
352-
if (!catalog.tableExists(tableName)) {
353-
throw new AnalysisException(s"Table $tableName in TRUNCATE TABLE does not exist.")
354-
}
355347
val table = catalog.getTableMetadata(tableName)
356348
if (table.tableType == CatalogTableType.EXTERNAL) {
357349
throw new AnalysisException(
@@ -436,7 +428,7 @@ case class DescribeTableCommand(table: TableIdentifier, isExtended: Boolean, isF
436428
if (table.database.isDefined) {
437429
describeMetastoreTable(catalog, result)
438430
} else {
439-
val maybeTempView = catalog.getTempView(table.table)
431+
val maybeTempView = catalog.lookupTempView(table.table)
440432
if (maybeTempView.isDefined) {
441433
describeSchema(maybeTempView.get.schema, result)
442434
} else {
@@ -619,7 +611,7 @@ case class ShowColumnsCommand(table: TableIdentifier) extends RunnableCommand {
619611
val schema = if (table.database.isDefined) {
620612
catalog.getTableMetadata(table).schema
621613
} else {
622-
val maybeTempView = catalog.getTempView(table.table)
614+
val maybeTempView = catalog.lookupTempView(table.table)
623615
if (maybeTempView.isDefined) {
624616
maybeTempView.get.schema
625617
} else {
@@ -713,10 +705,6 @@ case class ShowCreateTableCommand(table: TableIdentifier) extends RunnableComman
713705

714706
override def run(sparkSession: SparkSession): Seq[Row] = {
715707
val catalog = sparkSession.sessionState.catalog
716-
if (!catalog.tableExists(table)) {
717-
throw new AnalysisException(s"Table $table doesn't exist")
718-
}
719-
720708
val tableMetadata = catalog.getTableMetadata(table)
721709

722710
// TODO: unify this after we unify the CREATE TABLE syntax for hive serde and data source table.

sql/core/src/main/scala/org/apache/spark/sql/internal/CatalogImpl.scala

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -139,7 +139,7 @@ class CatalogImpl(sparkSession: SparkSession) extends Catalog {
139139
*/
140140
@throws[AnalysisException]("table does not exist")
141141
override def listColumns(tableName: String): Dataset[Column] = {
142-
val maybeTempView = sessionCatalog.getTempView(tableName)
142+
val maybeTempView = sessionCatalog.lookupTempView(tableName)
143143
if (maybeTempView.isDefined) {
144144
val columns = maybeTempView.get.schema.map { c =>
145145
new Column(
@@ -368,7 +368,7 @@ class CatalogImpl(sparkSession: SparkSession) extends Catalog {
368368

369369
// If this table is cached as an InMemoryRelation, drop the original
370370
// cached version and make the new version cached lazily.
371-
val logicalPlan = sparkSession.sessionState.catalog.lookupRelation(tableIdent)
371+
val logicalPlan = sparkSession.table(tableIdent).queryExecution.logical
372372
// Use lookupCachedData directly since RefreshTable also takes databaseName.
373373
val isCached = sparkSession.sharedState.cacheManager.lookupCachedData(logicalPlan).nonEmpty
374374
if (isCached) {

sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveSessionCatalog.scala

Lines changed: 3 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -53,10 +53,9 @@ private[sql] class HiveSessionCatalog(
5353
conf,
5454
hadoopConf) {
5555

56-
override protected def lookupMetastoreRelation(
57-
db: String,
58-
table: String,
59-
alias: Option[String]): LogicalPlan = {
56+
override def lookupRelation(name: TableIdentifier, alias: Option[String] = None): LogicalPlan = {
57+
val db = formatDatabaseName(name.database.getOrElse(getCurrentDatabase))
58+
val table = formatTableName(name.table)
6059
metastoreCatalog.lookupRelation(TableIdentifier(table, Some(db)), alias)
6160
}
6261

sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveDDLSuite.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -675,7 +675,7 @@ class HiveDDLSuite
675675
identifier = TableIdentifier(sourceViewName),
676676
tableType = CatalogTableType.VIEW,
677677
storage = CatalogStorageFormat.empty,
678-
schema = spark.sessionState.catalog.getTempView(sourceViewName).get.schema)
678+
schema = spark.sessionState.catalog.lookupTempView(sourceViewName).get.schema)
679679
val targetTable = spark.sessionState.catalog.getTableMetadata(
680680
TableIdentifier(targetTabName, Some("default")))
681681

0 commit comments

Comments
 (0)