Skip to content

Commit d4c30cd

Browse files
concretevitaminmarmbrus
authored andcommitted
[HOTFIX] Synchronize on SQLContext.settings in tests.
Let's see if this fixes the ongoing series of test failures in a master build machine (https://amplab.cs.berkeley.edu/jenkins/job/Spark-Master-SBT-pre-YARN/SPARK_HADOOP_VERSION=1.0.4,label=centos/81/). pwendell marmbrus Author: Zongheng Yang <[email protected]> Closes #1277 from concretevitamin/test-fix and squashes the following commits: 28c88bd [Zongheng Yang] Synchronize on SQLContext.settings in tests.
1 parent 731f683 commit d4c30cd

File tree

4 files changed

+91
-83
lines changed

4 files changed

+91
-83
lines changed

sql/core/src/main/scala/org/apache/spark/sql/SQLConf.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -50,7 +50,7 @@ trait SQLConf {
5050
/** ********************** SQLConf functionality methods ************ */
5151

5252
@transient
53-
private val settings = java.util.Collections.synchronizedMap(
53+
protected[sql] val settings = java.util.Collections.synchronizedMap(
5454
new java.util.HashMap[String, String]())
5555

5656
def set(props: Properties): Unit = {

sql/core/src/test/scala/org/apache/spark/sql/JoinSuite.scala

Lines changed: 21 additions & 19 deletions
Original file line numberDiff line numberDiff line change
@@ -39,25 +39,27 @@ class JoinSuite extends QueryTest {
3939
test("plans broadcast hash join, given hints") {
4040

4141
def mkTest(buildSide: BuildSide, leftTable: String, rightTable: String) = {
42-
TestSQLContext.set("spark.sql.join.broadcastTables",
43-
s"${if (buildSide == BuildRight) rightTable else leftTable}")
44-
val rdd = sql(s"""SELECT * FROM $leftTable JOIN $rightTable ON key = a""")
45-
// Using `sparkPlan` because for relevant patterns in HashJoin to be
46-
// matched, other strategies need to be applied.
47-
val physical = rdd.queryExecution.sparkPlan
48-
val bhj = physical.collect { case j: BroadcastHashJoin if j.buildSide == buildSide => j }
49-
50-
assert(bhj.size === 1, "planner does not pick up hint to generate broadcast hash join")
51-
checkAnswer(
52-
rdd,
53-
Seq(
54-
(1, "1", 1, 1),
55-
(1, "1", 1, 2),
56-
(2, "2", 2, 1),
57-
(2, "2", 2, 2),
58-
(3, "3", 3, 1),
59-
(3, "3", 3, 2)
60-
))
42+
TestSQLContext.settings.synchronized {
43+
TestSQLContext.set("spark.sql.join.broadcastTables",
44+
s"${if (buildSide == BuildRight) rightTable else leftTable}")
45+
val rdd = sql( s"""SELECT * FROM $leftTable JOIN $rightTable ON key = a""")
46+
// Using `sparkPlan` because for relevant patterns in HashJoin to be
47+
// matched, other strategies need to be applied.
48+
val physical = rdd.queryExecution.sparkPlan
49+
val bhj = physical.collect { case j: BroadcastHashJoin if j.buildSide == buildSide => j}
50+
51+
assert(bhj.size === 1, "planner does not pick up hint to generate broadcast hash join")
52+
checkAnswer(
53+
rdd,
54+
Seq(
55+
(1, "1", 1, 1),
56+
(1, "1", 1, 2),
57+
(2, "2", 2, 1),
58+
(2, "2", 2, 2),
59+
(3, "3", 3, 1),
60+
(3, "3", 3, 2)
61+
))
62+
}
6163
}
6264

6365
mkTest(BuildRight, "testData", "testData2")

sql/core/src/test/scala/org/apache/spark/sql/SQLConfSuite.scala

Lines changed: 34 additions & 30 deletions
Original file line numberDiff line numberDiff line change
@@ -28,46 +28,50 @@ class SQLConfSuite extends QueryTest {
2828
val testVal = "test.val.0"
2929

3030
test("programmatic ways of basic setting and getting") {
31-
clear()
32-
assert(getOption(testKey).isEmpty)
33-
assert(getAll.toSet === Set())
31+
TestSQLContext.settings.synchronized {
32+
clear()
33+
assert(getOption(testKey).isEmpty)
34+
assert(getAll.toSet === Set())
3435

35-
set(testKey, testVal)
36-
assert(get(testKey) == testVal)
37-
assert(get(testKey, testVal + "_") == testVal)
38-
assert(getOption(testKey) == Some(testVal))
39-
assert(contains(testKey))
36+
set(testKey, testVal)
37+
assert(get(testKey) == testVal)
38+
assert(get(testKey, testVal + "_") == testVal)
39+
assert(getOption(testKey) == Some(testVal))
40+
assert(contains(testKey))
4041

41-
// Tests SQLConf as accessed from a SQLContext is mutable after
42-
// the latter is initialized, unlike SparkConf inside a SparkContext.
43-
assert(TestSQLContext.get(testKey) == testVal)
44-
assert(TestSQLContext.get(testKey, testVal + "_") == testVal)
45-
assert(TestSQLContext.getOption(testKey) == Some(testVal))
46-
assert(TestSQLContext.contains(testKey))
42+
// Tests SQLConf as accessed from a SQLContext is mutable after
43+
// the latter is initialized, unlike SparkConf inside a SparkContext.
44+
assert(TestSQLContext.get(testKey) == testVal)
45+
assert(TestSQLContext.get(testKey, testVal + "_") == testVal)
46+
assert(TestSQLContext.getOption(testKey) == Some(testVal))
47+
assert(TestSQLContext.contains(testKey))
4748

48-
clear()
49+
clear()
50+
}
4951
}
5052

5153
test("parse SQL set commands") {
52-
clear()
53-
sql(s"set $testKey=$testVal")
54-
assert(get(testKey, testVal + "_") == testVal)
55-
assert(TestSQLContext.get(testKey, testVal + "_") == testVal)
54+
TestSQLContext.settings.synchronized {
55+
clear()
56+
sql(s"set $testKey=$testVal")
57+
assert(get(testKey, testVal + "_") == testVal)
58+
assert(TestSQLContext.get(testKey, testVal + "_") == testVal)
5659

57-
sql("set mapred.reduce.tasks=20")
58-
assert(get("mapred.reduce.tasks", "0") == "20")
59-
sql("set mapred.reduce.tasks = 40")
60-
assert(get("mapred.reduce.tasks", "0") == "40")
60+
sql("set mapred.reduce.tasks=20")
61+
assert(get("mapred.reduce.tasks", "0") == "20")
62+
sql("set mapred.reduce.tasks = 40")
63+
assert(get("mapred.reduce.tasks", "0") == "40")
6164

62-
val key = "spark.sql.key"
63-
val vs = "val0,val_1,val2.3,my_table"
64-
sql(s"set $key=$vs")
65-
assert(get(key, "0") == vs)
65+
val key = "spark.sql.key"
66+
val vs = "val0,val_1,val2.3,my_table"
67+
sql(s"set $key=$vs")
68+
assert(get(key, "0") == vs)
6669

67-
sql(s"set $key=")
68-
assert(get(key, "0") == "")
70+
sql(s"set $key=")
71+
assert(get(key, "0") == "")
6972

70-
clear()
73+
clear()
74+
}
7175
}
7276

7377
}

sql/core/src/test/scala/org/apache/spark/sql/SQLQuerySuite.scala

Lines changed: 35 additions & 33 deletions
Original file line numberDiff line numberDiff line change
@@ -372,38 +372,40 @@ class SQLQuerySuite extends QueryTest {
372372
}
373373

374374
test("SET commands semantics using sql()") {
375-
clear()
376-
val testKey = "test.key.0"
377-
val testVal = "test.val.0"
378-
val nonexistentKey = "nonexistent"
379-
380-
// "set" itself returns all config variables currently specified in SQLConf.
381-
assert(sql("SET").collect().size == 0)
382-
383-
// "set key=val"
384-
sql(s"SET $testKey=$testVal")
385-
checkAnswer(
386-
sql("SET"),
387-
Seq(Seq(testKey, testVal))
388-
)
389-
390-
sql(s"SET ${testKey + testKey}=${testVal + testVal}")
391-
checkAnswer(
392-
sql("set"),
393-
Seq(
394-
Seq(testKey, testVal),
395-
Seq(testKey + testKey, testVal + testVal))
396-
)
397-
398-
// "set key"
399-
checkAnswer(
400-
sql(s"SET $testKey"),
401-
Seq(Seq(testKey, testVal))
402-
)
403-
checkAnswer(
404-
sql(s"SET $nonexistentKey"),
405-
Seq(Seq(nonexistentKey, "<undefined>"))
406-
)
407-
clear()
375+
TestSQLContext.settings.synchronized {
376+
clear()
377+
val testKey = "test.key.0"
378+
val testVal = "test.val.0"
379+
val nonexistentKey = "nonexistent"
380+
381+
// "set" itself returns all config variables currently specified in SQLConf.
382+
assert(sql("SET").collect().size == 0)
383+
384+
// "set key=val"
385+
sql(s"SET $testKey=$testVal")
386+
checkAnswer(
387+
sql("SET"),
388+
Seq(Seq(testKey, testVal))
389+
)
390+
391+
sql(s"SET ${testKey + testKey}=${testVal + testVal}")
392+
checkAnswer(
393+
sql("set"),
394+
Seq(
395+
Seq(testKey, testVal),
396+
Seq(testKey + testKey, testVal + testVal))
397+
)
398+
399+
// "set key"
400+
checkAnswer(
401+
sql(s"SET $testKey"),
402+
Seq(Seq(testKey, testVal))
403+
)
404+
checkAnswer(
405+
sql(s"SET $nonexistentKey"),
406+
Seq(Seq(nonexistentKey, "<undefined>"))
407+
)
408+
clear()
409+
}
408410
}
409411
}

0 commit comments

Comments
 (0)