From 2b688079a9b5fbe025275878f3b90a05fae98be4 Mon Sep 17 00:00:00 2001 From: Xiangrui Meng Date: Fri, 13 Nov 2015 08:57:47 -0800 Subject: [PATCH 1/2] do not use SQLContext.getOrCreate --- .../main/scala/org/apache/spark/ml/util/ReadWrite.scala | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/mllib/src/main/scala/org/apache/spark/ml/util/ReadWrite.scala b/mllib/src/main/scala/org/apache/spark/ml/util/ReadWrite.scala index 85f888c9f2f6..0e293022dd34 100644 --- a/mllib/src/main/scala/org/apache/spark/ml/util/ReadWrite.scala +++ b/mllib/src/main/scala/org/apache/spark/ml/util/ReadWrite.scala @@ -48,8 +48,11 @@ private[util] sealed trait BaseReadWrite { /** * Returns the user-specified SQL context or the default. */ - protected final def sqlContext: SQLContext = optionSQLContext.getOrElse { - SQLContext.getOrCreate(SparkContext.getOrCreate()) + protected final def sqlContext: SQLContext = { + if (optionSQLContext.isEmpty) { + optionSQLContext = Some(new SQLContext(SparkContext.getOrCreate())) + } + optionSQLContext.get } /** Returns the [[SparkContext]] underlying [[sqlContext]] */ From 1a2a8661adb01baf5bd18ea400833441732fbd5d Mon Sep 17 00:00:00 2001 From: Xiangrui Meng Date: Fri, 13 Nov 2015 12:02:01 -0800 Subject: [PATCH 2/2] set active SQLContext manually --- mllib/src/main/scala/org/apache/spark/ml/util/ReadWrite.scala | 2 +- .../org/apache/spark/mllib/util/MLlibTestSparkContext.scala | 1 + 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/mllib/src/main/scala/org/apache/spark/ml/util/ReadWrite.scala b/mllib/src/main/scala/org/apache/spark/ml/util/ReadWrite.scala index 0e293022dd34..ca896ed6106c 100644 --- a/mllib/src/main/scala/org/apache/spark/ml/util/ReadWrite.scala +++ b/mllib/src/main/scala/org/apache/spark/ml/util/ReadWrite.scala @@ -50,7 +50,7 @@ private[util] sealed trait BaseReadWrite { */ protected final def sqlContext: SQLContext = { if (optionSQLContext.isEmpty) { - optionSQLContext = Some(new SQLContext(SparkContext.getOrCreate())) + optionSQLContext = Some(SQLContext.getOrCreate(SparkContext.getOrCreate())) } optionSQLContext.get } diff --git a/mllib/src/test/scala/org/apache/spark/mllib/util/MLlibTestSparkContext.scala b/mllib/src/test/scala/org/apache/spark/mllib/util/MLlibTestSparkContext.scala index 998ee4818655..378139593b26 100644 --- a/mllib/src/test/scala/org/apache/spark/mllib/util/MLlibTestSparkContext.scala +++ b/mllib/src/test/scala/org/apache/spark/mllib/util/MLlibTestSparkContext.scala @@ -34,6 +34,7 @@ trait MLlibTestSparkContext extends BeforeAndAfterAll { self: Suite => sc = new SparkContext(conf) SQLContext.clearActive() sqlContext = new SQLContext(sc) + SQLContext.setActive(sqlContext) } override def afterAll() {