From b60ac3168138d5691ff0819b795b310f526e62de Mon Sep 17 00:00:00 2001 From: liuzhaokun Date: Tue, 16 May 2017 11:14:53 +0800 Subject: [PATCH 1/3] [SPARK-20759] SCALA_VERSION in _config.yml should be consistent with pom.xml --- docs/_config.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/_config.yml b/docs/_config.yml index 21255ef7a5c4..dcc211204d76 100644 --- a/docs/_config.yml +++ b/docs/_config.yml @@ -17,7 +17,7 @@ include: SPARK_VERSION: 2.3.0-SNAPSHOT SPARK_VERSION_SHORT: 2.3.0 SCALA_BINARY_VERSION: "2.11" -SCALA_VERSION: "2.11.7" +SCALA_VERSION: "2.11.8" MESOS_VERSION: 1.0.0 SPARK_ISSUE_TRACKER_URL: https://issues.apache.org/jira/browse/SPARK SPARK_GITHUB_URL: https://github.com/apache/spark From d88542e70507522a822564d20011c5adb8700ace Mon Sep 17 00:00:00 2001 From: liuzhaokun Date: Tue, 16 May 2017 17:33:35 +0800 Subject: [PATCH 2/3] make all the changes --- LICENSE | 10 +++++----- build/mvn | 6 +++--- external/docker/spark-test/base/Dockerfile | 2 +- .../main/scala/org/apache/spark/repl/SparkILoop.scala | 6 +++--- 4 files changed, 12 insertions(+), 12 deletions(-) diff --git a/LICENSE b/LICENSE index c21032a1fd27..66a2e8f13295 100644 --- a/LICENSE +++ b/LICENSE @@ -249,11 +249,11 @@ The text of each license is also included at licenses/LICENSE-[project].txt. (Interpreter classes (all .scala files in repl/src/main/scala except for Main.Scala, SparkHelper.scala and ExecutorClassLoader.scala), and for SerializableMapWrapper in JavaUtils.scala) - (BSD-like) Scala Actors library (org.scala-lang:scala-actors:2.11.7 - http://www.scala-lang.org/) - (BSD-like) Scala Compiler (org.scala-lang:scala-compiler:2.11.7 - http://www.scala-lang.org/) - (BSD-like) Scala Compiler (org.scala-lang:scala-reflect:2.11.7 - http://www.scala-lang.org/) - (BSD-like) Scala Library (org.scala-lang:scala-library:2.11.7 - http://www.scala-lang.org/) - (BSD-like) Scalap (org.scala-lang:scalap:2.11.7 - http://www.scala-lang.org/) + (BSD-like) Scala Actors library (org.scala-lang:scala-actors:2.11.8 - http://www.scala-lang.org/) + (BSD-like) Scala Compiler (org.scala-lang:scala-compiler:2.11.8 - http://www.scala-lang.org/) + (BSD-like) Scala Compiler (org.scala-lang:scala-reflect:2.11.8 - http://www.scala-lang.org/) + (BSD-like) Scala Library (org.scala-lang:scala-library:2.11.8 - http://www.scala-lang.org/) + (BSD-like) Scalap (org.scala-lang:scalap:2.11.8 - http://www.scala-lang.org/) (BSD-style) scalacheck (org.scalacheck:scalacheck_2.11:1.10.0 - http://www.scalacheck.org) (BSD-style) spire (org.spire-math:spire_2.11:0.7.1 - http://spire-math.org) (BSD-style) spire-macros (org.spire-math:spire-macros_2.11:0.7.1 - http://spire-math.org) diff --git a/build/mvn b/build/mvn index 1e393c331dd8..efa4f9364ea5 100755 --- a/build/mvn +++ b/build/mvn @@ -91,13 +91,13 @@ install_mvn() { # Install zinc under the build/ folder install_zinc() { - local zinc_path="zinc-0.3.11/bin/zinc" + local zinc_path="zinc-0.3.15/bin/zinc" [ ! -f "${_DIR}/${zinc_path}" ] && ZINC_INSTALL_FLAG=1 local TYPESAFE_MIRROR=${TYPESAFE_MIRROR:-https://downloads.typesafe.com} install_app \ - "${TYPESAFE_MIRROR}/zinc/0.3.11" \ - "zinc-0.3.11.tgz" \ + "${TYPESAFE_MIRROR}/zinc/0.3.15" \ + "zinc-0.3.15.tgz" \ "${zinc_path}" ZINC_BIN="${_DIR}/${zinc_path}" } diff --git a/external/docker/spark-test/base/Dockerfile b/external/docker/spark-test/base/Dockerfile index 76f550f886ce..5a95a9387c31 100644 --- a/external/docker/spark-test/base/Dockerfile +++ b/external/docker/spark-test/base/Dockerfile @@ -25,7 +25,7 @@ RUN apt-get update && \ apt-get install -y less openjdk-7-jre-headless net-tools vim-tiny sudo openssh-server && \ rm -rf /var/lib/apt/lists/* -ENV SCALA_VERSION 2.11.7 +ENV SCALA_VERSION 2.11.8 ENV CDH_VERSION cdh4 ENV SCALA_HOME /opt/scala-$SCALA_VERSION ENV SPARK_HOME /opt/spark diff --git a/repl/scala-2.11/src/main/scala/org/apache/spark/repl/SparkILoop.scala b/repl/scala-2.11/src/main/scala/org/apache/spark/repl/SparkILoop.scala index 76a66c1beada..c4f6b2af591f 100644 --- a/repl/scala-2.11/src/main/scala/org/apache/spark/repl/SparkILoop.scala +++ b/repl/scala-2.11/src/main/scala/org/apache/spark/repl/SparkILoop.scala @@ -97,13 +97,13 @@ class SparkILoop(in0: Option[BufferedReader], out: JPrintWriter) override def commands: List[LoopCommand] = sparkStandardCommands /** - * We override `loadFiles` because we need to initialize Spark *before* the REPL + * We override `createInterpreter` because we need to initialize Spark *before* the REPL * sees any files, so that the Spark context is visible in those files. This is a bit of a * hack, but there isn't another hook available to us at this point. */ - override def loadFiles(settings: Settings): Unit = { + override def createInterpreter(): Unit = { + super.createInterpreter() initializeSpark() - super.loadFiles(settings) } override def resetCommand(line: String): Unit = { From 7f0c75a6d39632a65f112f06832cc7ad430d9b10 Mon Sep 17 00:00:00 2001 From: liuzhaokun Date: Tue, 16 May 2017 19:40:04 +0800 Subject: [PATCH 3/3] restore mvn and SparkILoop.scala --- build/mvn | 6 +++--- .../src/main/scala/org/apache/spark/repl/SparkILoop.scala | 6 +++--- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/build/mvn b/build/mvn index efa4f9364ea5..1e393c331dd8 100755 --- a/build/mvn +++ b/build/mvn @@ -91,13 +91,13 @@ install_mvn() { # Install zinc under the build/ folder install_zinc() { - local zinc_path="zinc-0.3.15/bin/zinc" + local zinc_path="zinc-0.3.11/bin/zinc" [ ! -f "${_DIR}/${zinc_path}" ] && ZINC_INSTALL_FLAG=1 local TYPESAFE_MIRROR=${TYPESAFE_MIRROR:-https://downloads.typesafe.com} install_app \ - "${TYPESAFE_MIRROR}/zinc/0.3.15" \ - "zinc-0.3.15.tgz" \ + "${TYPESAFE_MIRROR}/zinc/0.3.11" \ + "zinc-0.3.11.tgz" \ "${zinc_path}" ZINC_BIN="${_DIR}/${zinc_path}" } diff --git a/repl/scala-2.11/src/main/scala/org/apache/spark/repl/SparkILoop.scala b/repl/scala-2.11/src/main/scala/org/apache/spark/repl/SparkILoop.scala index c4f6b2af591f..76a66c1beada 100644 --- a/repl/scala-2.11/src/main/scala/org/apache/spark/repl/SparkILoop.scala +++ b/repl/scala-2.11/src/main/scala/org/apache/spark/repl/SparkILoop.scala @@ -97,13 +97,13 @@ class SparkILoop(in0: Option[BufferedReader], out: JPrintWriter) override def commands: List[LoopCommand] = sparkStandardCommands /** - * We override `createInterpreter` because we need to initialize Spark *before* the REPL + * We override `loadFiles` because we need to initialize Spark *before* the REPL * sees any files, so that the Spark context is visible in those files. This is a bit of a * hack, but there isn't another hook available to us at this point. */ - override def createInterpreter(): Unit = { - super.createInterpreter() + override def loadFiles(settings: Settings): Unit = { initializeSpark() + super.loadFiles(settings) } override def resetCommand(line: String): Unit = {