Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -491,7 +491,7 @@ class SessionCatalog(
// If the database is defined, this is definitely not a temp table.
// If the database is not defined, there is a good chance this is a temp table.
if (name.database.isEmpty) {
tempTables.get(name.table).foreach(_.refresh())
tempTables.get(formatTableName(name.table)).foreach(_.refresh())
}
}

Expand All @@ -508,7 +508,7 @@ class SessionCatalog(
* For testing only.
*/
private[catalog] def getTempTable(name: String): Option[LogicalPlan] = synchronized {
tempTables.get(name)
tempTables.get(formatTableName(name))
}

// ----------------------------------------------------------------------------
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -1212,7 +1212,7 @@ class SparkSqlAstBuilder(conf: SQLConf) extends AstBuilder {
*
* For example:
* {{{
* CREATE [TEMPORARY] VIEW [IF NOT EXISTS] [db_name.]view_name
* CREATE [OR REPLACE] [TEMPORARY] VIEW [IF NOT EXISTS] [db_name.]view_name
* [(column_name [COMMENT column_comment], ...) ]
* [COMMENT view_comment]
* [TBLPROPERTIES (property_name = property_value, ...)]
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@ package org.apache.spark.sql
import java.io.File

import org.apache.spark.SparkException
import org.apache.spark.sql.internal.SQLConf
import org.apache.spark.sql.test.SharedSQLContext

/**
Expand Down Expand Up @@ -85,4 +86,28 @@ class MetadataCacheSuite extends QueryTest with SharedSQLContext {
assert(newCount > 0 && newCount < 100)
}}
}

test("case sensitivity support in temporary view refresh") {
withSQLConf(SQLConf.CASE_SENSITIVE.key -> "false") {
withTempView("view_refresh") {
withTempPath { (location: File) =>
// Create a Parquet directory
spark.range(start = 0, end = 100, step = 1, numPartitions = 3)
.write.parquet(location.getAbsolutePath)

// Read the directory in
spark.read.parquet(location.getAbsolutePath).createOrReplaceTempView("view_refresh")

// Delete a file
deleteOneFileInDirectory(location)
intercept[SparkException](sql("select count(*) from view_refresh").first())

// Refresh and we should be able to read it again.
spark.catalog.refreshTable("vIeW_reFrEsH")
val newCount = sql("select count(*) from view_refresh").first().getLong(0)
assert(newCount > 0 && newCount < 100)
}
}
}
}
}