Skip to content

Commit 9451fed

Browse files
Andrew OrJoshRosen
authored andcommitted
[SPARK-13344][TEST] Fix harmless accumulator not found exceptions
See [JIRA](https://issues.apache.org/jira/browse/SPARK-13344) for more detail. This was caused by #10835. Author: Andrew Or <[email protected]> Closes #11222 from andrewor14/fix-test-accum-exceptions.
1 parent 97ee85d commit 9451fed

File tree

3 files changed

+30
-4
lines changed

3 files changed

+30
-4
lines changed

core/src/test/scala/org/apache/spark/AccumulatorSuite.scala

Lines changed: 8 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -34,6 +34,14 @@ import org.apache.spark.serializer.JavaSerializer
3434
class AccumulatorSuite extends SparkFunSuite with Matchers with LocalSparkContext {
3535
import AccumulatorParam._
3636

37+
override def afterEach(): Unit = {
38+
try {
39+
Accumulators.clear()
40+
} finally {
41+
super.afterEach()
42+
}
43+
}
44+
3745
implicit def setAccum[A]: AccumulableParam[mutable.Set[A], A] =
3846
new AccumulableParam[mutable.Set[A], A] {
3947
def addInPlace(t1: mutable.Set[A], t2: mutable.Set[A]) : mutable.Set[A] = {

core/src/test/scala/org/apache/spark/InternalAccumulatorSuite.scala

Lines changed: 8 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -28,6 +28,14 @@ class InternalAccumulatorSuite extends SparkFunSuite with LocalSparkContext {
2828
import InternalAccumulator._
2929
import AccumulatorParam._
3030

31+
override def afterEach(): Unit = {
32+
try {
33+
Accumulators.clear()
34+
} finally {
35+
super.afterEach()
36+
}
37+
}
38+
3139
test("get param") {
3240
assert(getParam(EXECUTOR_DESERIALIZE_TIME) === LongAccumulatorParam)
3341
assert(getParam(EXECUTOR_RUN_TIME) === LongAccumulatorParam)

core/src/test/scala/org/apache/spark/SparkFunSuite.scala

Lines changed: 14 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -18,14 +18,26 @@
1818
package org.apache.spark
1919

2020
// scalastyle:off
21-
import org.scalatest.{FunSuite, Outcome}
21+
import org.scalatest.{BeforeAndAfterAll, FunSuite, Outcome}
2222

2323
/**
2424
* Base abstract class for all unit tests in Spark for handling common functionality.
2525
*/
26-
private[spark] abstract class SparkFunSuite extends FunSuite with Logging {
26+
private[spark] abstract class SparkFunSuite
27+
extends FunSuite
28+
with BeforeAndAfterAll
29+
with Logging {
2730
// scalastyle:on
2831

32+
protected override def afterAll(): Unit = {
33+
try {
34+
// Avoid leaking map entries in tests that use accumulators without SparkContext
35+
Accumulators.clear()
36+
} finally {
37+
super.afterAll()
38+
}
39+
}
40+
2941
/**
3042
* Log the suite name and the test name before and after each test.
3143
*
@@ -42,8 +54,6 @@ private[spark] abstract class SparkFunSuite extends FunSuite with Logging {
4254
test()
4355
} finally {
4456
logInfo(s"\n\n===== FINISHED $shortSuiteName: '$testName' =====\n")
45-
// Avoid leaking map entries in tests that use accumulators without SparkContext
46-
Accumulators.clear()
4757
}
4858
}
4959

0 commit comments

Comments
 (0)