Skip to content

Commit adcc742

Browse files
committed
Move test to PlannerSuite.
1 parent 0675956 commit adcc742

File tree

2 files changed

+11
-11
lines changed

2 files changed

+11
-11
lines changed

sql/core/src/test/scala/org/apache/spark/sql/execution/ExchangeSuite.scala

Lines changed: 0 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -30,14 +30,4 @@ class ExchangeSuite extends SparkPlanTest {
3030
input.map(Row.fromTuple)
3131
)
3232
}
33-
34-
test("EnsureRequirements shouldn't add exchange to SMJ inputs if both are SinglePartition") {
35-
val df = (1 to 10).map(Tuple1.apply).toDF("a").repartition(1)
36-
val keys = Seq(df.col("a").expr)
37-
val smj = SortMergeJoin(keys, keys, df.queryExecution.sparkPlan, df.queryExecution.sparkPlan)
38-
val afterEnsureRequirements = EnsureRequirements(df.sqlContext).apply(smj)
39-
if (afterEnsureRequirements.collect { case Exchange(_, _) => true }.nonEmpty) {
40-
fail(s"No Exchanges should have been added:\n$afterEnsureRequirements")
41-
}
42-
}
4333
}

sql/core/src/test/scala/org/apache/spark/sql/execution/PlannerSuite.scala

Lines changed: 11 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -21,7 +21,7 @@ import org.apache.spark.SparkFunSuite
2121
import org.apache.spark.sql.TestData._
2222
import org.apache.spark.sql.catalyst.plans._
2323
import org.apache.spark.sql.catalyst.plans.logical.LogicalPlan
24-
import org.apache.spark.sql.execution.joins.{BroadcastHashJoin, ShuffledHashJoin}
24+
import org.apache.spark.sql.execution.joins.{BroadcastHashJoin, ShuffledHashJoin, SortMergeJoin}
2525
import org.apache.spark.sql.functions._
2626
import org.apache.spark.sql.test.{SQLTestUtils, TestSQLContext}
2727
import org.apache.spark.sql.test.TestSQLContext._
@@ -202,4 +202,14 @@ class PlannerSuite extends SparkFunSuite with SQLTestUtils {
202202
}
203203
}
204204
}
205+
206+
test("EnsureRequirements shouldn't add exchange to SMJ inputs if both are SinglePartition") {
207+
val df = (1 to 10).map(Tuple1.apply).toDF("a").repartition(1)
208+
val keys = Seq(df.col("a").expr)
209+
val smj = SortMergeJoin(keys, keys, df.queryExecution.sparkPlan, df.queryExecution.sparkPlan)
210+
val afterEnsureRequirements = EnsureRequirements(df.sqlContext).apply(smj)
211+
if (afterEnsureRequirements.collect { case Exchange(_, _) => true }.nonEmpty) {
212+
fail(s"No Exchanges should have been added:\n$afterEnsureRequirements")
213+
}
214+
}
205215
}

0 commit comments

Comments
 (0)