From 3bc0d44a9aaf23c996f840dd26792cff341b828b Mon Sep 17 00:00:00 2001 From: Bryn Keller Date: Wed, 26 Feb 2014 21:51:46 -0800 Subject: [PATCH] Added a unit test for PairRDDFunctions.lookup --- .../spark/rdd/PairRDDFunctionsSuite.scala | 26 +++++++++++++++++++ 1 file changed, 26 insertions(+) diff --git a/core/src/test/scala/org/apache/spark/rdd/PairRDDFunctionsSuite.scala b/core/src/test/scala/org/apache/spark/rdd/PairRDDFunctionsSuite.scala index e3e23775f011d..85e8eb5dc3a1e 100644 --- a/core/src/test/scala/org/apache/spark/rdd/PairRDDFunctionsSuite.scala +++ b/core/src/test/scala/org/apache/spark/rdd/PairRDDFunctionsSuite.scala @@ -347,6 +347,32 @@ class PairRDDFunctionsSuite extends FunSuite with SharedSparkContext { */ pairs.saveAsNewAPIHadoopFile[ConfigTestFormat]("ignored") } + + test("lookup") { + val pairs = sc.parallelize(Array((1,2), (3,4), (5,6), (5,7))) + + assert(pairs.partitioner === None) + assert(pairs.lookup(1) === Seq(2)) + assert(pairs.lookup(5) === Seq(6,7)) + assert(pairs.lookup(-1) === Seq()) + + } + + test("lookup with partitioner") { + val pairs = sc.parallelize(Array((1,2), (3,4), (5,6), (5,7))) + + val p = new Partitioner { + def numPartitions: Int = 2 + + def getPartition(key: Any): Int = Math.abs(key.hashCode() % 2) + } + val shuffled = pairs.partitionBy(p) + + assert(shuffled.partitioner === Some(p)) + assert(shuffled.lookup(1) === Seq(2)) + assert(shuffled.lookup(5) === Seq(6,7)) + assert(shuffled.lookup(-1) === Seq()) + } } /*