Skip to content

Commit 4dea510

Browse files
committed
[SPARK-33141][SQL][FOLLOW-UP] Store the max nested view depth in AnalysisContext
### What changes were proposed in this pull request? This is a followup of #30289. It removes the hack in `View.effectiveSQLConf`, by putting the max nested view depth in `AnalysisContext`. Then we don't get the max nested view depth from the active SQLConf, which keeps changing during nested view resolution. ### Why are the changes needed? remove hacks. ### Does this PR introduce _any_ user-facing change? No ### How was this patch tested? If I just remove the hack, `SimpleSQLViewSuite.restrict the nested level of a view` fails. With this fix, it passes again. Closes #30575 from cloud-fan/view. Authored-by: Wenchen Fan <[email protected]> Signed-off-by: Wenchen Fan <[email protected]> (cherry picked from commit acc211d) Signed-off-by: Wenchen Fan <[email protected]>
1 parent 990bee9 commit 4dea510

File tree

4 files changed

+57
-52
lines changed

4 files changed

+57
-52
lines changed

sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/Analyzer.scala

Lines changed: 32 additions & 17 deletions
Original file line numberDiff line numberDiff line change
@@ -87,8 +87,8 @@ object FakeV2SessionCatalog extends TableCatalog {
8787
}
8888

8989
/**
90-
* Provides a way to keep state during the analysis, this enables us to decouple the concerns
91-
* of analysis environment from the catalog.
90+
* Provides a way to keep state during the analysis, mostly for resolving views. This enables us to
91+
* decouple the concerns of analysis environment from the catalog.
9292
* The state that is kept here is per-query.
9393
*
9494
* Note this is thread local.
@@ -98,13 +98,21 @@ object FakeV2SessionCatalog extends TableCatalog {
9898
* views.
9999
* @param nestedViewDepth The nested depth in the view resolution, this enables us to limit the
100100
* depth of nested views.
101+
* @param maxNestedViewDepth The maximum allowed depth of nested view resolution.
101102
* @param relationCache A mapping from qualified table names to resolved relations. This can ensure
102103
* that the table is resolved only once if a table is used multiple times
103104
* in a query.
105+
* @param referredTempViewNames All the temp view names referred by the current view we are
106+
* resolving. It's used to make sure the relation resolution is
107+
* consistent between view creation and view resolution. For example,
108+
* if `t` was a permanent table when the current view was created, it
109+
* should still be a permanent table when resolving the current view,
110+
* even if a temp view `t` has been created.
104111
*/
105112
case class AnalysisContext(
106113
catalogAndNamespace: Seq[String] = Nil,
107114
nestedViewDepth: Int = 0,
115+
maxNestedViewDepth: Int = -1,
108116
relationCache: mutable.Map[Seq[String], LogicalPlan] = mutable.Map.empty,
109117
referredTempViewNames: Seq[Seq[String]] = Seq.empty)
110118

@@ -118,14 +126,20 @@ object AnalysisContext {
118126

119127
private def set(context: AnalysisContext): Unit = value.set(context)
120128

121-
def withAnalysisContext[A](
122-
catalogAndNamespace: Seq[String], referredTempViewNames: Seq[Seq[String]])(f: => A): A = {
129+
def withAnalysisContext[A](viewDesc: CatalogTable)(f: => A): A = {
123130
val originContext = value.get()
131+
val maxNestedViewDepth = if (originContext.maxNestedViewDepth == -1) {
132+
// Here we start to resolve views, get `maxNestedViewDepth` from configs.
133+
SQLConf.get.maxNestedViewDepth
134+
} else {
135+
originContext.maxNestedViewDepth
136+
}
124137
val context = AnalysisContext(
125-
catalogAndNamespace,
138+
viewDesc.viewCatalogAndNamespace,
126139
originContext.nestedViewDepth + 1,
140+
maxNestedViewDepth,
127141
originContext.relationCache,
128-
referredTempViewNames)
142+
viewDesc.viewReferredTempViewNames)
129143
set(context)
130144
try f finally { set(originContext) }
131145
}
@@ -1034,18 +1048,19 @@ class Analyzer(override val catalogManager: CatalogManager)
10341048
// operator.
10351049
case view @ View(desc, isTempView, _, child) if !child.resolved =>
10361050
// Resolve all the UnresolvedRelations and Views in the child.
1037-
val newChild = AnalysisContext.withAnalysisContext(
1038-
desc.viewCatalogAndNamespace, desc.viewReferredTempViewNames) {
1039-
if (AnalysisContext.get.nestedViewDepth > conf.maxNestedViewDepth) {
1040-
view.failAnalysis(s"The depth of view ${desc.identifier} exceeds the maximum " +
1041-
s"view resolution depth (${conf.maxNestedViewDepth}). Analysis is aborted to " +
1042-
s"avoid errors. Increase the value of ${SQLConf.MAX_NESTED_VIEW_DEPTH.key} to " +
1043-
"work around this.")
1044-
}
1045-
SQLConf.withExistingConf(View.effectiveSQLConf(desc.viewSQLConfigs, isTempView)) {
1046-
executeSameContext(child)
1047-
}
1051+
val newChild = AnalysisContext.withAnalysisContext(desc) {
1052+
val nestedViewDepth = AnalysisContext.get.nestedViewDepth
1053+
val maxNestedViewDepth = AnalysisContext.get.maxNestedViewDepth
1054+
if (nestedViewDepth > maxNestedViewDepth) {
1055+
view.failAnalysis(s"The depth of view ${desc.identifier} exceeds the maximum " +
1056+
s"view resolution depth ($maxNestedViewDepth). Analysis is aborted to " +
1057+
s"avoid errors. Increase the value of ${SQLConf.MAX_NESTED_VIEW_DEPTH.key} to " +
1058+
"work around this.")
1059+
}
1060+
SQLConf.withExistingConf(View.effectiveSQLConf(desc.viewSQLConfigs, isTempView)) {
1061+
executeSameContext(child)
10481062
}
1063+
}
10491064
view.copy(child = newChild)
10501065
case p @ SubqueryAlias(_, view: View) =>
10511066
p.copy(child = resolveViews(view))

sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/basicLogicalOperators.scala

Lines changed: 0 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -483,9 +483,6 @@ object View {
483483
for ((k, v) <- configs) {
484484
sqlConf.settings.put(k, v)
485485
}
486-
// We should respect the current maxNestedViewDepth cause the view resolving are executed
487-
// from top to down.
488-
sqlConf.setConf(SQLConf.MAX_NESTED_VIEW_DEPTH, activeConf.maxNestedViewDepth)
489486
sqlConf
490487
}
491488

sql/core/src/test/scala/org/apache/spark/sql/execution/SQLViewSuite.scala

Lines changed: 0 additions & 25 deletions
Original file line numberDiff line numberDiff line change
@@ -704,31 +704,6 @@ abstract class SQLViewSuite extends QueryTest with SQLTestUtils {
704704
}
705705
}
706706

707-
test("restrict the nested level of a view") {
708-
val viewNames = Array.range(0, 11).map(idx => s"view$idx")
709-
withView(viewNames: _*) {
710-
sql("CREATE VIEW view0 AS SELECT * FROM jt")
711-
Array.range(0, 10).foreach { idx =>
712-
sql(s"CREATE VIEW view${idx + 1} AS SELECT * FROM view$idx")
713-
}
714-
715-
withSQLConf(MAX_NESTED_VIEW_DEPTH.key -> "10") {
716-
val e = intercept[AnalysisException] {
717-
sql("SELECT * FROM view10")
718-
}.getMessage
719-
assert(e.contains("The depth of view `default`.`view0` exceeds the maximum view " +
720-
"resolution depth (10). Analysis is aborted to avoid errors. Increase the value " +
721-
s"of ${MAX_NESTED_VIEW_DEPTH.key} to work around this."))
722-
}
723-
724-
val e = intercept[IllegalArgumentException] {
725-
withSQLConf(MAX_NESTED_VIEW_DEPTH.key -> "0") {}
726-
}.getMessage
727-
assert(e.contains("The maximum depth of a view reference in a nested view must be " +
728-
"positive."))
729-
}
730-
}
731-
732707
test("permanent view should be case-preserving") {
733708
withView("v") {
734709
sql("CREATE VIEW v AS SELECT 1 as aBc")

sql/core/src/test/scala/org/apache/spark/sql/execution/SQLViewTestSuite.scala

Lines changed: 25 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -121,7 +121,7 @@ abstract class SQLViewTestSuite extends QueryTest with SQLTestUtils {
121121
test("change current database should not change view behavior") {
122122
withTable("t") {
123123
Seq(2, 3, 1).toDF("c1").write.format("parquet").saveAsTable("t")
124-
val viewName = createView("v1", "SELECT * from t")
124+
val viewName = createView("v1", "SELECT * FROM t")
125125
withView(viewName) {
126126
withTempDatabase { db =>
127127
sql(s"USE $db")
@@ -135,7 +135,7 @@ abstract class SQLViewTestSuite extends QueryTest with SQLTestUtils {
135135
test("view should read the new data if table is updated") {
136136
withTable("t") {
137137
Seq(2, 3, 1).toDF("c1").write.format("parquet").saveAsTable("t")
138-
val viewName = createView("v1", "SELECT c1 from t", Seq("c1"))
138+
val viewName = createView("v1", "SELECT c1 FROM t", Seq("c1"))
139139
withView(viewName) {
140140
Seq(9, 7, 8).toDF("c1").write.mode("overwrite").format("parquet").saveAsTable("t")
141141
checkViewOutput(viewName, Seq(Row(9), Row(7), Row(8)))
@@ -146,7 +146,7 @@ abstract class SQLViewTestSuite extends QueryTest with SQLTestUtils {
146146
test("add column for table should not affect view output") {
147147
withTable("t") {
148148
Seq(2, 3, 1).toDF("c1").write.format("parquet").saveAsTable("t")
149-
val viewName = createView("v1", "SELECT * from t")
149+
val viewName = createView("v1", "SELECT * FROM t")
150150
withView(viewName) {
151151
sql("ALTER TABLE t ADD COLUMN (c2 INT)")
152152
checkViewOutput(viewName, Seq(Row(2), Row(3), Row(1)))
@@ -157,8 +157,8 @@ abstract class SQLViewTestSuite extends QueryTest with SQLTestUtils {
157157
test("check cyclic view reference on CREATE OR REPLACE VIEW") {
158158
withTable("t") {
159159
Seq(2, 3, 1).toDF("c1").write.format("parquet").saveAsTable("t")
160-
val viewName1 = createView("v1", "SELECT * from t")
161-
val viewName2 = createView("v2", s"SELECT * from $viewName1")
160+
val viewName1 = createView("v1", "SELECT * FROM t")
161+
val viewName2 = createView("v2", s"SELECT * FROM $viewName1")
162162
withView(viewName2, viewName1) {
163163
val e = intercept[AnalysisException] {
164164
createView("v1", s"SELECT * FROM $viewName2", replace = true)
@@ -171,8 +171,8 @@ abstract class SQLViewTestSuite extends QueryTest with SQLTestUtils {
171171
test("check cyclic view reference on ALTER VIEW") {
172172
withTable("t") {
173173
Seq(2, 3, 1).toDF("c1").write.format("parquet").saveAsTable("t")
174-
val viewName1 = createView("v1", "SELECT * from t")
175-
val viewName2 = createView("v2", s"SELECT * from $viewName1")
174+
val viewName1 = createView("v1", "SELECT * FROM t")
175+
val viewName2 = createView("v2", s"SELECT * FROM $viewName1")
176176
withView(viewName2, viewName1) {
177177
val e = intercept[AnalysisException] {
178178
sql(s"ALTER VIEW $viewName1 AS SELECT * FROM $viewName2")
@@ -181,6 +181,24 @@ abstract class SQLViewTestSuite extends QueryTest with SQLTestUtils {
181181
}
182182
}
183183
}
184+
185+
test("restrict the nested level of a view") {
186+
val viewNames = scala.collection.mutable.ArrayBuffer.empty[String]
187+
val view0 = createView("view0", "SELECT 1")
188+
viewNames += view0
189+
for (i <- 1 to 10) {
190+
viewNames += createView(s"view$i", s"SELECT * FROM ${viewNames.last}")
191+
}
192+
withView(viewNames.reverse: _*) {
193+
withSQLConf(MAX_NESTED_VIEW_DEPTH.key -> "10") {
194+
val e = intercept[AnalysisException] {
195+
sql(s"SELECT * FROM ${viewNames.last}")
196+
}.getMessage
197+
assert(e.contains("exceeds the maximum view resolution depth (10)"))
198+
assert(e.contains(s"Increase the value of ${MAX_NESTED_VIEW_DEPTH.key}"))
199+
}
200+
}
201+
}
184202
}
185203

186204
class LocalTempViewTestSuite extends SQLViewTestSuite with SharedSparkSession {

0 commit comments

Comments
 (0)