Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
10 changes: 5 additions & 5 deletions LICENSE
Original file line number Diff line number Diff line change
Expand Up @@ -249,11 +249,11 @@ The text of each license is also included at licenses/LICENSE-[project].txt.
(Interpreter classes (all .scala files in repl/src/main/scala
except for Main.Scala, SparkHelper.scala and ExecutorClassLoader.scala),
and for SerializableMapWrapper in JavaUtils.scala)
(BSD-like) Scala Actors library (org.scala-lang:scala-actors:2.11.8 - http://www.scala-lang.org/)
(BSD-like) Scala Compiler (org.scala-lang:scala-compiler:2.11.8 - http://www.scala-lang.org/)
(BSD-like) Scala Compiler (org.scala-lang:scala-reflect:2.11.8 - http://www.scala-lang.org/)
(BSD-like) Scala Library (org.scala-lang:scala-library:2.11.8 - http://www.scala-lang.org/)
(BSD-like) Scalap (org.scala-lang:scalap:2.11.8 - http://www.scala-lang.org/)
(BSD-like) Scala Actors library (org.scala-lang:scala-actors:2.11.11 - http://www.scala-lang.org/)
(BSD-like) Scala Compiler (org.scala-lang:scala-compiler:2.11.11 - http://www.scala-lang.org/)
(BSD-like) Scala Compiler (org.scala-lang:scala-reflect:2.11.11 - http://www.scala-lang.org/)
(BSD-like) Scala Library (org.scala-lang:scala-library:2.11.11 - http://www.scala-lang.org/)
(BSD-like) Scalap (org.scala-lang:scalap:2.11.11 - http://www.scala-lang.org/)
(BSD-style) scalacheck (org.scalacheck:scalacheck_2.11:1.10.0 - http://www.scalacheck.org)
(BSD-style) spire (org.spire-math:spire_2.11:0.7.1 - http://spire-math.org)
(BSD-style) spire-macros (org.spire-math:spire-macros_2.11:0.7.1 - http://spire-math.org)
Expand Down
6 changes: 3 additions & 3 deletions build/mvn
Original file line number Diff line number Diff line change
Expand Up @@ -91,13 +91,13 @@ install_mvn() {

# Install zinc under the build/ folder
install_zinc() {
local zinc_path="zinc-0.3.11/bin/zinc"
local zinc_path="zinc-0.3.15/bin/zinc"
[ ! -f "${_DIR}/${zinc_path}" ] && ZINC_INSTALL_FLAG=1
local TYPESAFE_MIRROR=${TYPESAFE_MIRROR:-https://downloads.typesafe.com}

install_app \
"${TYPESAFE_MIRROR}/zinc/0.3.11" \
"zinc-0.3.11.tgz" \
"${TYPESAFE_MIRROR}/zinc/0.3.15" \
"zinc-0.3.15.tgz" \
"${zinc_path}"
ZINC_BIN="${_DIR}/${zinc_path}"
}
Expand Down
8 changes: 4 additions & 4 deletions dev/deps/spark-deps-hadoop-2.6
Original file line number Diff line number Diff line change
Expand Up @@ -158,12 +158,12 @@ pmml-schema-1.2.15.jar
protobuf-java-2.5.0.jar
py4j-0.10.4.jar
pyrolite-4.13.jar
scala-compiler-2.11.8.jar
scala-library-2.11.8.jar
scala-compiler-2.11.11.jar
scala-library-2.11.11.jar
scala-parser-combinators_2.11-1.0.4.jar
scala-reflect-2.11.8.jar
scala-reflect-2.11.11.jar
scala-xml_2.11-1.0.2.jar
scalap-2.11.8.jar
scalap-2.11.11.jar
shapeless_2.11-2.3.2.jar
slf4j-api-1.7.16.jar
slf4j-log4j12-1.7.16.jar
Expand Down
8 changes: 4 additions & 4 deletions dev/deps/spark-deps-hadoop-2.7
Original file line number Diff line number Diff line change
Expand Up @@ -159,12 +159,12 @@ pmml-schema-1.2.15.jar
protobuf-java-2.5.0.jar
py4j-0.10.4.jar
pyrolite-4.13.jar
scala-compiler-2.11.8.jar
scala-library-2.11.8.jar
scala-compiler-2.11.11.jar
scala-library-2.11.11.jar
scala-parser-combinators_2.11-1.0.4.jar
scala-reflect-2.11.8.jar
scala-reflect-2.11.11.jar
scala-xml_2.11-1.0.2.jar
scalap-2.11.8.jar
scalap-2.11.11.jar
shapeless_2.11-2.3.2.jar
slf4j-api-1.7.16.jar
slf4j-log4j12-1.7.16.jar
Expand Down
2 changes: 1 addition & 1 deletion docs/_config.yml
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@ include:
SPARK_VERSION: 2.3.0-SNAPSHOT
SPARK_VERSION_SHORT: 2.3.0
SCALA_BINARY_VERSION: "2.11"
SCALA_VERSION: "2.11.8"
SCALA_VERSION: "2.11.11"
MESOS_VERSION: 1.0.0
SPARK_ISSUE_TRACKER_URL: https://issues.apache.org/jira/browse/SPARK
SPARK_GITHUB_URL: https://github.com/apache/spark
2 changes: 1 addition & 1 deletion external/docker/spark-test/base/Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@ RUN apt-get update && \
apt-get install -y less openjdk-7-jre-headless net-tools vim-tiny sudo openssh-server && \
rm -rf /var/lib/apt/lists/*

ENV SCALA_VERSION 2.11.8
ENV SCALA_VERSION 2.11.11
ENV CDH_VERSION cdh4
ENV SCALA_HOME /opt/scala-$SCALA_VERSION
ENV SPARK_HOME /opt/spark
Expand Down
4 changes: 2 additions & 2 deletions pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -154,7 +154,7 @@
<commons.math3.version>3.4.1</commons.math3.version>
<!-- managed up from 3.2.1 for SPARK-11652 -->
<commons.collections.version>3.2.2</commons.collections.version>
<scala.version>2.11.8</scala.version>
<scala.version>2.11.11</scala.version>
<scala.binary.version>2.11</scala.binary.version>
<codehaus.jackson.version>1.9.13</codehaus.jackson.version>
<fasterxml.jackson.version>2.6.5</fasterxml.jackson.version>
Expand Down Expand Up @@ -2607,7 +2607,7 @@
<property><name>!scala-2.10</name></property>
</activation>
<properties>
<scala.version>2.11.8</scala.version>
<scala.version>2.11.11</scala.version>
<scala.binary.version>2.11</scala.binary.version>
<jline.version>2.12.1</jline.version>
<jline.groupid>jline</jline.groupid>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -90,13 +90,15 @@ class SparkILoop(in0: Option[BufferedReader], out: JPrintWriter)
override def commands: List[LoopCommand] = standardCommands

/**
* We override `loadFiles` because we need to initialize Spark *before* the REPL
* We override `createInterpreter` because we need to initialize Spark *before* the REPL
* sees any files, so that the Spark context is visible in those files. This is a bit of a
* hack, but there isn't another hook available to us at this point.
* hack, but there isn't another hook available to us at this point -- at least,
* not one that works across versions of Scala 2.11.
* TODO: use `loopPostInit()`, probably, in Scala 2.12+ or when 2.11.8 support is dropped.
*/
override def loadFiles(settings: Settings): Unit = {
override def createInterpreter(): Unit = {
super.createInterpreter()
initializeSpark()
super.loadFiles(settings)
}

override def resetCommand(line: String): Unit = {
Expand Down