From 34305ff2d2faa723f0fca527d2e3e6ab6cce8aec Mon Sep 17 00:00:00 2001 From: Xiangrui Meng Date: Wed, 11 Nov 2015 14:40:28 -0800 Subject: [PATCH] disable spark.ml read/write suites --- .../org/apache/spark/ml/util/JavaDefaultReadWriteSuite.java | 4 ++-- .../spark/ml/classification/LogisticRegressionSuite.scala | 2 +- .../scala/org/apache/spark/ml/feature/BinarizerSuite.scala | 2 +- .../scala/org/apache/spark/ml/util/DefaultReadWriteTest.scala | 2 +- 4 files changed, 5 insertions(+), 5 deletions(-) diff --git a/mllib/src/test/java/org/apache/spark/ml/util/JavaDefaultReadWriteSuite.java b/mllib/src/test/java/org/apache/spark/ml/util/JavaDefaultReadWriteSuite.java index c39538014be8..4f7aeac1ec54 100644 --- a/mllib/src/test/java/org/apache/spark/ml/util/JavaDefaultReadWriteSuite.java +++ b/mllib/src/test/java/org/apache/spark/ml/util/JavaDefaultReadWriteSuite.java @@ -23,7 +23,7 @@ import org.junit.After; import org.junit.Assert; import org.junit.Before; -import org.junit.Test; +import org.junit.Ignore; import org.apache.spark.api.java.JavaSparkContext; import org.apache.spark.sql.SQLContext; @@ -50,7 +50,7 @@ public void tearDown() { Utils.deleteRecursively(tempDir); } - @Test + @Ignore // SPARK-11672 public void testDefaultReadWrite() throws IOException { String uid = "my_params"; MyParams instance = new MyParams(uid); diff --git a/mllib/src/test/scala/org/apache/spark/ml/classification/LogisticRegressionSuite.scala b/mllib/src/test/scala/org/apache/spark/ml/classification/LogisticRegressionSuite.scala index 51b06b7eb6d5..e4c2f1baa4fa 100644 --- a/mllib/src/test/scala/org/apache/spark/ml/classification/LogisticRegressionSuite.scala +++ b/mllib/src/test/scala/org/apache/spark/ml/classification/LogisticRegressionSuite.scala @@ -872,7 +872,7 @@ class LogisticRegressionSuite assert(model1a0.intercept ~== model1b.intercept absTol 1E-3) } - test("read/write") { + ignore("read/write") { // SPARK-11672 // Set some Params to make sure set Params are serialized. val lr = new LogisticRegression() .setElasticNetParam(0.1) diff --git a/mllib/src/test/scala/org/apache/spark/ml/feature/BinarizerSuite.scala b/mllib/src/test/scala/org/apache/spark/ml/feature/BinarizerSuite.scala index 9dfa1439cc30..a66fe0328193 100644 --- a/mllib/src/test/scala/org/apache/spark/ml/feature/BinarizerSuite.scala +++ b/mllib/src/test/scala/org/apache/spark/ml/feature/BinarizerSuite.scala @@ -68,7 +68,7 @@ class BinarizerSuite extends SparkFunSuite with MLlibTestSparkContext with Defau } } - test("read/write") { + ignore("read/write") { // SPARK-11672 val binarizer = new Binarizer() .setInputCol("feature") .setOutputCol("binarized_feature") diff --git a/mllib/src/test/scala/org/apache/spark/ml/util/DefaultReadWriteTest.scala b/mllib/src/test/scala/org/apache/spark/ml/util/DefaultReadWriteTest.scala index cac4bd9aa3ab..44e09c38f937 100644 --- a/mllib/src/test/scala/org/apache/spark/ml/util/DefaultReadWriteTest.scala +++ b/mllib/src/test/scala/org/apache/spark/ml/util/DefaultReadWriteTest.scala @@ -105,7 +105,7 @@ object MyParams extends Readable[MyParams] { class DefaultReadWriteSuite extends SparkFunSuite with MLlibTestSparkContext with DefaultReadWriteTest { - test("default read/write") { + ignore("default read/write") { // SPARK-11672 val myParams = new MyParams("my_params") testDefaultReadWrite(myParams) }