Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -107,7 +107,7 @@ case class CreateViewCommand(

// When creating a permanent view, not allowed to reference temporary objects.
// This should be called after `qe.assertAnalyzed()` (i.e., `child` can be resolved)
verifyTemporaryObjectsNotExists(catalog)
verifyTemporaryObjectsNotExists(catalog, isTemporary, name, child)

if (viewType == LocalTempView) {
val aliasedPlan = aliasPlan(sparkSession, analyzedPlan)
Expand Down Expand Up @@ -161,39 +161,7 @@ case class CreateViewCommand(
Seq.empty[Row]
}

/**
* Permanent views are not allowed to reference temp objects, including temp function and views
*/
private def verifyTemporaryObjectsNotExists(catalog: SessionCatalog): Unit = {
import org.apache.spark.sql.connector.catalog.CatalogV2Implicits._
if (!isTemporary) {
// This func traverses the unresolved plan `child`. Below are the reasons:
// 1) Analyzer replaces unresolved temporary views by a SubqueryAlias with the corresponding
// logical plan. After replacement, it is impossible to detect whether the SubqueryAlias is
// added/generated from a temporary view.
// 2) The temp functions are represented by multiple classes. Most are inaccessible from this
// package (e.g., HiveGenericUDF).
def verify(child: LogicalPlan) {
child.collect {
// Disallow creating permanent views based on temporary views.
case UnresolvedRelation(nameParts) if catalog.isTempView(nameParts) =>
throw new AnalysisException(s"Not allowed to create a permanent view $name by " +
s"referencing a temporary view ${nameParts.quoted}. " +
"Please create a temp view instead by CREATE TEMP VIEW")
case w: With if !w.resolved => w.innerChildren.foreach(verify)
case other if !other.resolved => other.expressions.flatMap(_.collect {
// Traverse subquery plan for any unresolved relations.
case e: SubqueryExpression => verify(e.plan)
// Disallow creating permanent views based on temporary UDFs.
case e: UnresolvedFunction if catalog.isTemporaryFunction(e.name) =>
throw new AnalysisException(s"Not allowed to create a permanent view $name by " +
s"referencing a temporary function `${e.name}`")
})
}
}
verify(child)
}
}


/**
* If `userSpecifiedColumns` is defined, alias the analyzed plan to the user specified columns,
Expand Down Expand Up @@ -266,7 +234,8 @@ case class AlterViewAsCommand(
val qe = session.sessionState.executePlan(query)
qe.assertAnalyzed()
val analyzedPlan = qe.analyzed

val isTemporary = session.sessionState.catalog.isTemporaryTable(name)
verifyTemporaryObjectsNotExists(session.sessionState.catalog, isTemporary, name, query)
if (session.sessionState.catalog.alterTempViewDefinition(name, analyzedPlan)) {
// a local/global temp view has been altered, we are done.
} else {
Expand Down Expand Up @@ -441,4 +410,41 @@ object ViewHelper {
}
}
}

/**
* Permanent views are not allowed to reference temp objects, including temp function and views
*/
def verifyTemporaryObjectsNotExists(catalog: SessionCatalog,
isTemporary: Boolean,
name: TableIdentifier,
child: LogicalPlan): Unit = {
import org.apache.spark.sql.connector.catalog.CatalogV2Implicits._
if (!isTemporary) {
// This func traverses the unresolved plan `child`. Below are the reasons:
// 1) Analyzer replaces unresolved temporary views by a SubqueryAlias with the corresponding
// logical plan. After replacement, it is impossible to detect whether the SubqueryAlias is
// added/generated from a temporary view.
// 2) The temp functions are represented by multiple classes. Most are inaccessible from this
// package (e.g., HiveGenericUDF).
def verify(child: LogicalPlan) {
child.collect {
// Disallow creating permanent views based on temporary views.
case UnresolvedRelation(nameParts) if catalog.isTempView(nameParts) =>
throw new AnalysisException(s"Not allowed to create a permanent view $name by " +
s"referencing a temporary view ${nameParts.quoted}. " +
"Please create a temp view instead by CREATE TEMP VIEW")
case w: With if !w.resolved => w.innerChildren.foreach(verify)
case other if !other.resolved => other.expressions.flatMap(_.collect {
// Traverse subquery plan for any unresolved relations.
case e: SubqueryExpression => verify(e.plan)
// Disallow creating permanent views based on temporary UDFs.
case e: UnresolvedFunction if catalog.isTemporaryFunction(e.name) =>
throw new AnalysisException(s"Not allowed to create a permanent view $name by " +
s"referencing a temporary function `${e.name}`")
})
}
}
verify(child)
}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,8 @@

package org.apache.spark.sql.execution

import org.scalatest.Assertions.intercept

import org.apache.spark.sql._
import org.apache.spark.sql.catalyst.TableIdentifier
import org.apache.spark.sql.catalyst.analysis.NoSuchTableException
Expand Down Expand Up @@ -795,4 +797,18 @@ abstract class SQLViewSuite extends QueryTest with SQLTestUtils {
}
}
}

test("SPARK-36011: Disallow altering permanent views based on temporary views") {
withView("jtv1") {
withTempView("jtv2") {
sql(s"CREATE VIEW jtv1 AS SELECT * FROM jt WHERE id > 3")
sql(s"CREATE TEMPORARY VIEW jtv2 AS SELECT * FROM jt where id < 3")
val e = intercept[AnalysisException] {
sql(s"ALTER VIEW jtv1 AS SELECT * FROM jtv2")
}.getMessage
assert(e.contains("Not allowed to create a permanent view `default`.`jtv1` by " +
"referencing a temporary view jtv2"))
}
}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,8 @@

package org.apache.spark.sql.hive.execution

import org.scalatest.Assertions.intercept

import org.apache.spark.sql.{AnalysisException, Row, SaveMode, SparkSession}
import org.apache.spark.sql.catalyst.TableIdentifier
import org.apache.spark.sql.catalyst.catalog.{CatalogStorageFormat, CatalogTable, CatalogTableType}
Expand Down Expand Up @@ -137,4 +139,27 @@ class HiveSQLViewSuite extends SQLViewSuite with TestHiveSingleton {
}
}
}

test("SPARK-36011: Disallow altering permanent views based on temporary UDFs") {
val tempFunctionName = "temp"
val functionClass =
classOf[org.apache.hadoop.hive.ql.udf.generic.GenericUDFUpper].getCanonicalName
withUserDefinedFunction(tempFunctionName -> true) {
sql(s"CREATE TEMPORARY FUNCTION $tempFunctionName AS '$functionClass'")
withView("view1") {
withTempView("tempView1") {
withTable("tab1") {
(1 to 10).map(i => s"$i").toDF("id").write.saveAsTable("tab1")
sql("CREATE VIEW view1 AS SELECT id from tab1")

val e = intercept[AnalysisException] {
sql(s"ALTER VIEW view1 AS SELECT $tempFunctionName(id) from tab1")
}.getMessage
assert(e.contains("Not allowed to create a permanent view `default`.`view1` by " +
s"referencing a temporary function `$tempFunctionName`"))
}
}
}
}
}
}