Skip to content

Commit 874cd29

Browse files
committed
[SPARK-11672][ML] flaky spark.ml read/write tests
We set `sqlContext = null` in `afterAll`. However, this doesn't change `SQLContext.activeContext` and then `SQLContext.getOrCreate` might use the `SparkContext` from previous test suite and hence causes the error. This PR calls `clearActive` in `beforeAll` and `afterAll` to avoid using an old context from other test suites. cc: yhuai Author: Xiangrui Meng <[email protected]> Closes #9677 from mengxr/SPARK-11672.2. (cherry picked from commit e71c075) Signed-off-by: Xiangrui Meng <[email protected]>
1 parent 46a536e commit 874cd29

File tree

5 files changed

+7
-5
lines changed

5 files changed

+7
-5
lines changed

mllib/src/test/java/org/apache/spark/ml/util/JavaDefaultReadWriteSuite.java

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -23,7 +23,7 @@
2323
import org.junit.After;
2424
import org.junit.Assert;
2525
import org.junit.Before;
26-
import org.junit.Ignore;
26+
import org.junit.Test;
2727

2828
import org.apache.spark.api.java.JavaSparkContext;
2929
import org.apache.spark.sql.SQLContext;
@@ -50,7 +50,7 @@ public void tearDown() {
5050
Utils.deleteRecursively(tempDir);
5151
}
5252

53-
@Ignore // SPARK-11672
53+
@Test
5454
public void testDefaultReadWrite() throws IOException {
5555
String uid = "my_params";
5656
MyParams instance = new MyParams(uid);

mllib/src/test/scala/org/apache/spark/ml/classification/LogisticRegressionSuite.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -872,7 +872,7 @@ class LogisticRegressionSuite
872872
assert(model1a0.intercept ~== model1b.intercept absTol 1E-3)
873873
}
874874

875-
ignore("read/write") { // SPARK-11672
875+
test("read/write") {
876876
// Set some Params to make sure set Params are serialized.
877877
val lr = new LogisticRegression()
878878
.setElasticNetParam(0.1)

mllib/src/test/scala/org/apache/spark/ml/feature/BinarizerSuite.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -68,7 +68,7 @@ class BinarizerSuite extends SparkFunSuite with MLlibTestSparkContext with Defau
6868
}
6969
}
7070

71-
ignore("read/write") { // SPARK-11672
71+
test("read/write") {
7272
val binarizer = new Binarizer()
7373
.setInputCol("feature")
7474
.setOutputCol("binarized_feature")

mllib/src/test/scala/org/apache/spark/ml/util/DefaultReadWriteTest.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -105,7 +105,7 @@ object MyParams extends Readable[MyParams] {
105105
class DefaultReadWriteSuite extends SparkFunSuite with MLlibTestSparkContext
106106
with DefaultReadWriteTest {
107107

108-
ignore("default read/write") { // SPARK-11672
108+
test("default read/write") {
109109
val myParams = new MyParams("my_params")
110110
testDefaultReadWrite(myParams)
111111
}

mllib/src/test/scala/org/apache/spark/mllib/util/MLlibTestSparkContext.scala

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -32,11 +32,13 @@ trait MLlibTestSparkContext extends BeforeAndAfterAll { self: Suite =>
3232
.setMaster("local[2]")
3333
.setAppName("MLlibUnitTest")
3434
sc = new SparkContext(conf)
35+
SQLContext.clearActive()
3536
sqlContext = new SQLContext(sc)
3637
}
3738

3839
override def afterAll() {
3940
sqlContext = null
41+
SQLContext.clearActive()
4042
if (sc != null) {
4143
sc.stop()
4244
}

0 commit comments

Comments
 (0)