Skip to content

Commit 99bb8ef

Browse files
author
Davies Liu
committed
fix test
1 parent 074f2a7 commit 99bb8ef

File tree

2 files changed

+4
-4
lines changed

2 files changed

+4
-4
lines changed

sql/core/src/main/scala/org/apache/spark/sql/execution/joins/CartesianProduct.scala

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -48,8 +48,8 @@ class UnsafeCartesianRDD(left : RDD[UnsafeRow], right : RDD[UnsafeRow], numField
4848
1024,
4949
SparkEnv.get.memoryManager.pageSizeBytes)
5050

51-
val currSplit = split.asInstanceOf[CartesianPartition]
52-
for (y <- rdd2.iterator(currSplit.s2, context)) {
51+
val partition = split.asInstanceOf[CartesianPartition]
52+
for (y <- rdd2.iterator(partition.s2, context)) {
5353
sorter.insertRecord(y.getBaseObject, y.getBaseOffset, y.getSizeInBytes, 0)
5454
}
5555

@@ -71,7 +71,7 @@ class UnsafeCartesianRDD(left : RDD[UnsafeRow], right : RDD[UnsafeRow], numField
7171
}
7272

7373
val resultIter =
74-
for (x <- rdd1.iterator(currSplit.s1, context);
74+
for (x <- rdd1.iterator(partition.s1, context);
7575
y <- createIter()) yield (x, y)
7676
CompletionIterator[(UnsafeRow, UnsafeRow), Iterator[(UnsafeRow, UnsafeRow)]](
7777
resultIter, sorter.cleanupResources)

sql/core/src/test/scala/org/apache/spark/sql/execution/metric/SQLMetricsSuite.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -315,7 +315,7 @@ class SQLMetricsSuite extends SparkFunSuite with SharedSQLContext {
315315
testSparkPlanMetrics(df, 1, Map(
316316
1L -> ("CartesianProduct", Map(
317317
"number of left rows" -> 12L, // left needs to be scanned twice
318-
"number of right rows" -> 12L, // right is read 6 times
318+
"number of right rows" -> 4L, // right is read twice
319319
"number of output rows" -> 12L)))
320320
)
321321
}

0 commit comments

Comments
 (0)