Skip to content

Commit b017c47

Browse files
committed
Added a test for SPARK-571
1 parent bd67551 commit b017c47

File tree

1 file changed

+23
-0
lines changed

1 file changed

+23
-0
lines changed

core/src/test/scala/org/apache/spark/util/ClosureCleanerSuite.scala

Lines changed: 23 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -21,6 +21,7 @@ import org.scalatest.FunSuite
2121

2222
import org.apache.spark.LocalSparkContext._
2323
import org.apache.spark.SparkContext
24+
import org.apache.spark.SparkException
2425

2526
class ClosureCleanerSuite extends FunSuite {
2627
test("closures inside an object") {
@@ -50,6 +51,14 @@ class ClosureCleanerSuite extends FunSuite {
5051
val obj = new TestClassWithNesting(1)
5152
assert(obj.run() === 96) // 4 * (1+2+3+4) + 4 * (1+2+3+4) + 16 * 1
5253
}
54+
55+
test("return statements in closures are identified at cleaning time") {
56+
val ex = intercept[SparkException] {
57+
TestObjectWithBogusReturns.run()
58+
}
59+
60+
assert(ex.getMessage.contains("Return statements aren't allowed in Spark closures"))
61+
}
5362
}
5463

5564
// A non-serializable class we create in closures to make sure that we aren't
@@ -108,6 +117,20 @@ class TestClassWithoutFieldAccess {
108117
}
109118
}
110119

120+
object TestObjectWithBogusReturns {
121+
def badClosureWithReturn(v: org.apache.spark.rdd.RDD[Int]): Int = {
122+
v.map {x => return 1 ; x * 2}
123+
1
124+
}
125+
126+
def run(): Int = {
127+
withSpark(new SparkContext("local", "test")) { sc =>
128+
val nums = sc.parallelize(Array(1, 2, 3, 4))
129+
badClosureWithReturn(nums)
130+
}
131+
}
132+
}
133+
111134

112135
object TestObjectWithNesting {
113136
def run(): Int = {

0 commit comments

Comments
 (0)