Skip to content

Commit 8783ab0

Browse files
committed
Remove TestUtils.setSystemProperty, since it is subsumed by the ResetSystemProperties trait.
1 parent 633a84a commit 8783ab0

File tree

3 files changed

+33
-51
lines changed

3 files changed

+33
-51
lines changed

core/src/main/scala/org/apache/spark/TestUtils.scala

Lines changed: 0 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -107,19 +107,4 @@ private[spark] object TestUtils {
107107
assert(out.exists(), "Destination file not moved: " + out.getAbsolutePath())
108108
out
109109
}
110-
111-
/** Allows system properties to be changed in tests */
112-
def withSystemProperty[T](property: String, value: String)(block: => T): T = {
113-
val originalValue = System.getProperty(property)
114-
try {
115-
System.setProperty(property, value)
116-
block
117-
} finally {
118-
if (originalValue == null) {
119-
System.clearProperty(property)
120-
} else {
121-
System.setProperty(property, originalValue)
122-
}
123-
}
124-
}
125110
}

core/src/test/scala/org/apache/spark/ShuffleSuite.scala

Lines changed: 9 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -35,16 +35,15 @@ abstract class ShuffleSuite extends FunSuite with Matchers with LocalSparkContex
3535
conf.set("spark.test.noStageRetry", "true")
3636

3737
test("groupByKey without compression") {
38-
TestUtils.withSystemProperty("spark.shuffle.compress", "false") {
39-
sc = new SparkContext("local", "test", conf)
40-
val pairs = sc.parallelize(Array((1, 1), (1, 2), (1, 3), (2, 1)), 4)
41-
val groups = pairs.groupByKey(4).collect()
42-
assert(groups.size === 2)
43-
val valuesFor1 = groups.find(_._1 == 1).get._2
44-
assert(valuesFor1.toList.sorted === List(1, 2, 3))
45-
val valuesFor2 = groups.find(_._1 == 2).get._2
46-
assert(valuesFor2.toList.sorted === List(1))
47-
}
38+
val myConf = conf.clone().set("spark.shuffle.compress", "false")
39+
sc = new SparkContext("local", "test", myConf)
40+
val pairs = sc.parallelize(Array((1, 1), (1, 2), (1, 3), (2, 1)), 4)
41+
val groups = pairs.groupByKey(4).collect()
42+
assert(groups.size === 2)
43+
val valuesFor1 = groups.find(_._1 == 1).get._2
44+
assert(valuesFor1.toList.sorted === List(1, 2, 3))
45+
val valuesFor2 = groups.find(_._1 == 2).get._2
46+
assert(valuesFor2.toList.sorted === List(1))
4847
}
4948

5049
test("shuffle non-zero block size") {

core/src/test/scala/org/apache/spark/SparkContextSuite.scala

Lines changed: 24 additions & 26 deletions
Original file line numberDiff line numberDiff line change
@@ -17,46 +17,44 @@
1717

1818
package org.apache.spark
1919

20+
import org.apache.spark.util.ResetSystemProperties
2021
import org.scalatest.FunSuite
2122

2223
import org.apache.hadoop.io.BytesWritable
2324

24-
class SparkContextSuite extends FunSuite with LocalSparkContext {
25+
class SparkContextSuite extends FunSuite with ResetSystemProperties with LocalSparkContext {
2526

2627
test("Only one SparkContext may be active at a time") {
2728
// Regression test for SPARK-4180
28-
TestUtils.withSystemProperty("spark.driver.allowMultipleContexts", "false") {
29-
val conf = new SparkConf().setAppName("test").setMaster("local")
30-
sc = new SparkContext(conf)
31-
// A SparkContext is already running, so we shouldn't be able to create a second one
32-
intercept[SparkException] { new SparkContext(conf) }
33-
// After stopping the running context, we should be able to create a new one
34-
resetSparkContext()
35-
sc = new SparkContext(conf)
36-
}
29+
System.setProperty("spark.driver.allowMultipleContexts", "false")
30+
val conf = new SparkConf().setAppName("test").setMaster("local")
31+
sc = new SparkContext(conf)
32+
// A SparkContext is already running, so we shouldn't be able to create a second one
33+
intercept[SparkException] { new SparkContext(conf) }
34+
// After stopping the running context, we should be able to create a new one
35+
resetSparkContext()
36+
sc = new SparkContext(conf)
3737
}
3838

3939
test("Can still construct a new SparkContext after failing to construct a previous one") {
40-
TestUtils.withSystemProperty("spark.driver.allowMultipleContexts", "false") {
41-
// This is an invalid configuration (no app name or master URL)
42-
intercept[SparkException] {
43-
new SparkContext(new SparkConf())
44-
}
45-
// Even though those earlier calls failed, we should still be able to create a new context
46-
sc = new SparkContext(new SparkConf().setMaster("local").setAppName("test"))
40+
System.setProperty("spark.driver.allowMultipleContexts", "false")
41+
// This is an invalid configuration (no app name or master URL)
42+
intercept[SparkException] {
43+
new SparkContext(new SparkConf())
4744
}
45+
// Even though those earlier calls failed, we should still be able to create a new context
46+
sc = new SparkContext(new SparkConf().setMaster("local").setAppName("test"))
4847
}
4948

5049
test("Check for multiple SparkContexts can be disabled via undocumented debug option") {
51-
TestUtils.withSystemProperty("spark.driver.allowMultipleContexts", "true") {
52-
var secondSparkContext: SparkContext = null
53-
try {
54-
val conf = new SparkConf().setAppName("test").setMaster("local")
55-
sc = new SparkContext(conf)
56-
secondSparkContext = new SparkContext(conf)
57-
} finally {
58-
Option(secondSparkContext).foreach(_.stop())
59-
}
50+
System.setProperty("spark.driver.allowMultipleContexts", "true")
51+
var secondSparkContext: SparkContext = null
52+
try {
53+
val conf = new SparkConf().setAppName("test").setMaster("local")
54+
sc = new SparkContext(conf)
55+
secondSparkContext = new SparkContext(conf)
56+
} finally {
57+
Option(secondSparkContext).foreach(_.stop())
6058
}
6159
}
6260

0 commit comments

Comments
 (0)