Skip to content

Commit c6f8135

Browse files
yjshenyhuai
authored andcommitted
[SPARK-10539] [SQL] Project should not be pushed down through Intersect or Except #8742
Intersect and Except are both set operators and they use the all the columns to compare equality between rows. When pushing their Project parent down, the relations they based on would change, therefore not an equivalent transformation. JIRA: https://issues.apache.org/jira/browse/SPARK-10539 I added some comments based on the fix of #8742. Author: Yijie Shen <[email protected]> Author: Yin Huai <[email protected]> Closes #8823 from yhuai/fix_set_optimization.
1 parent 00a2911 commit c6f8135

File tree

3 files changed

+39
-30
lines changed

3 files changed

+39
-30
lines changed

sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/optimizer/Optimizer.scala

Lines changed: 19 additions & 18 deletions
Original file line numberDiff line numberDiff line change
@@ -85,7 +85,22 @@ object SamplePushDown extends Rule[LogicalPlan] {
8585
}
8686

8787
/**
88-
* Pushes operations to either side of a Union, Intersect or Except.
88+
* Pushes certain operations to both sides of a Union, Intersect or Except operator.
89+
* Operations that are safe to pushdown are listed as follows.
90+
* Union:
91+
* Right now, Union means UNION ALL, which does not de-duplicate rows. So, it is
92+
* safe to pushdown Filters and Projections through it. Once we add UNION DISTINCT,
93+
* we will not be able to pushdown Projections.
94+
*
95+
* Intersect:
96+
* It is not safe to pushdown Projections through it because we need to get the
97+
* intersect of rows by comparing the entire rows. It is fine to pushdown Filters
98+
* because we will not have non-deterministic expressions.
99+
*
100+
* Except:
101+
* It is not safe to pushdown Projections through it because we need to get the
102+
* intersect of rows by comparing the entire rows. It is fine to pushdown Filters
103+
* because we will not have non-deterministic expressions.
89104
*/
90105
object SetOperationPushDown extends Rule[LogicalPlan] {
91106

@@ -122,40 +137,26 @@ object SetOperationPushDown extends Rule[LogicalPlan] {
122137
Filter(condition, left),
123138
Filter(pushToRight(condition, rewrites), right))
124139

125-
// Push down projection into union
140+
// Push down projection through UNION ALL
126141
case Project(projectList, u @ Union(left, right)) =>
127142
val rewrites = buildRewrites(u)
128143
Union(
129144
Project(projectList, left),
130145
Project(projectList.map(pushToRight(_, rewrites)), right))
131146

132-
// Push down filter into intersect
147+
// Push down filter through INTERSECT
133148
case Filter(condition, i @ Intersect(left, right)) =>
134149
val rewrites = buildRewrites(i)
135150
Intersect(
136151
Filter(condition, left),
137152
Filter(pushToRight(condition, rewrites), right))
138153

139-
// Push down projection into intersect
140-
case Project(projectList, i @ Intersect(left, right)) =>
141-
val rewrites = buildRewrites(i)
142-
Intersect(
143-
Project(projectList, left),
144-
Project(projectList.map(pushToRight(_, rewrites)), right))
145-
146-
// Push down filter into except
154+
// Push down filter through EXCEPT
147155
case Filter(condition, e @ Except(left, right)) =>
148156
val rewrites = buildRewrites(e)
149157
Except(
150158
Filter(condition, left),
151159
Filter(pushToRight(condition, rewrites), right))
152-
153-
// Push down projection into except
154-
case Project(projectList, e @ Except(left, right)) =>
155-
val rewrites = buildRewrites(e)
156-
Except(
157-
Project(projectList, left),
158-
Project(projectList.map(pushToRight(_, rewrites)), right))
159160
}
160161
}
161162

sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/SetOperationPushDownSuite.scala

Lines changed: 11 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -60,23 +60,22 @@ class SetOperationPushDownSuite extends PlanTest {
6060
comparePlans(exceptOptimized, exceptCorrectAnswer)
6161
}
6262

63-
test("union/intersect/except: project to each side") {
63+
test("union: project to each side") {
6464
val unionQuery = testUnion.select('a)
65+
val unionOptimized = Optimize.execute(unionQuery.analyze)
66+
val unionCorrectAnswer =
67+
Union(testRelation.select('a), testRelation2.select('d)).analyze
68+
comparePlans(unionOptimized, unionCorrectAnswer)
69+
}
70+
71+
test("SPARK-10539: Project should not be pushed down through Intersect or Except") {
6572
val intersectQuery = testIntersect.select('b, 'c)
6673
val exceptQuery = testExcept.select('a, 'b, 'c)
6774

68-
val unionOptimized = Optimize.execute(unionQuery.analyze)
6975
val intersectOptimized = Optimize.execute(intersectQuery.analyze)
7076
val exceptOptimized = Optimize.execute(exceptQuery.analyze)
7177

72-
val unionCorrectAnswer =
73-
Union(testRelation.select('a), testRelation2.select('d)).analyze
74-
val intersectCorrectAnswer =
75-
Intersect(testRelation.select('b, 'c), testRelation2.select('e, 'f)).analyze
76-
val exceptCorrectAnswer =
77-
Except(testRelation.select('a, 'b, 'c), testRelation2.select('d, 'e, 'f)).analyze
78-
79-
comparePlans(unionOptimized, unionCorrectAnswer)
80-
comparePlans(intersectOptimized, intersectCorrectAnswer)
81-
comparePlans(exceptOptimized, exceptCorrectAnswer) }
78+
comparePlans(intersectOptimized, intersectQuery.analyze)
79+
comparePlans(exceptOptimized, exceptQuery.analyze)
80+
}
8281
}

sql/core/src/test/scala/org/apache/spark/sql/DataFrameSuite.scala

Lines changed: 9 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -907,4 +907,13 @@ class DataFrameSuite extends QueryTest with SharedSQLContext {
907907
assert(row.getDouble(1) - row.getDouble(3) === 0.0 +- 0.001)
908908
}
909909
}
910+
911+
test("SPARK-10539: Project should not be pushed down through Intersect or Except") {
912+
val df1 = (1 to 100).map(Tuple1.apply).toDF("i")
913+
val df2 = (1 to 30).map(Tuple1.apply).toDF("i")
914+
val intersect = df1.intersect(df2)
915+
val except = df1.except(df2)
916+
assert(intersect.count() === 30)
917+
assert(except.count() === 70)
918+
}
910919
}

0 commit comments

Comments
 (0)