Skip to content
Closed
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
24 changes: 23 additions & 1 deletion core/src/test/scala/org/apache/spark/rdd/RDDSuite.scala
Original file line number Diff line number Diff line change
Expand Up @@ -443,7 +443,7 @@ class RDDSuite extends SparkFunSuite with SharedSparkContext {
map{x => List(x)}.toList, "Tried coalescing 9 partitions to 20 but didn't get 9 back")
}

test("coalesced RDDs with partial locality") {
test("coalesced RDDs with partial locality") {
// Make an RDD that has some locality preferences and some without. This can happen
// with UnionRDD
val data = sc.makeRDD((1 to 9).map(i => {
Expand Down Expand Up @@ -846,6 +846,28 @@ class RDDSuite extends SparkFunSuite with SharedSparkContext {
assert(partitions(1) === Seq((1, 3), (3, 8), (3, 8)))
}

test("cartesian on empty RDD") {
val a = sc.emptyRDD[Int]
val b = sc.parallelize(1 to 3)
val cartesian_result = Array.empty[(Int, Int)]
assert(a.cartesian(a).collect().toList === cartesian_result)
assert(a.cartesian(b).collect().toList === cartesian_result)
assert(b.cartesian(a).collect().toList === cartesian_result)
}

test("cartesian on non-empty RDDs") {
val a = sc.parallelize(1 to 3)
val b = sc.parallelize(2 to 4)
val c = sc.parallelize(1 to 1)
val a_cartesian_b =
Array((1, 2), (1, 3), (1, 4), (2, 2), (2, 3), (2, 4), (3, 2), (3, 3), (3, 4))
val a_cartesian_c = Array((1, 1), (2, 1), (3, 1))
val c_cartesian_a = Array((1, 1), (1, 2), (1, 3))
assert(a.cartesian[Int](b).collect().toList.sorted === a_cartesian_b)
assert(a.cartesian[Int](c).collect().toList.sorted === a_cartesian_c)
assert(c.cartesian[Int](a).collect().toList.sorted === c_cartesian_a)
}

test("intersection") {
val all = sc.parallelize(1 to 10)
val evens = sc.parallelize(2 to 10 by 2)
Expand Down