Skip to content

Commit 4e7c9e3

Browse files
committed
Merge remote-tracking branch 'upstream/master' into pyspark-inputformats
Conflicts: core/src/main/scala/org/apache/spark/api/python/PythonRDD.scala project/SparkBuild.scala
2 parents c304cc8 + 7db9165 commit 4e7c9e3

File tree

224 files changed

+3985
-3038
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

224 files changed

+3985
-3038
lines changed

.gitignore

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -41,3 +41,4 @@ derby.log
4141
dist/
4242
spark-*-bin.tar.gz
4343
unit-tests.log
44+
lib/

README.md

Lines changed: 7 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -12,9 +12,8 @@ This README file only contains basic setup instructions.
1212

1313
## Building
1414

15-
Spark requires Scala 2.9.3 (Scala 2.10 is not yet supported). The project is
16-
built using Simple Build Tool (SBT), which is packaged with it. To build
17-
Spark and its example programs, run:
15+
Spark requires Scala 2.10. The project is built using Simple Build Tool (SBT),
16+
which is packaged with it. To build Spark and its example programs, run:
1817

1918
sbt/sbt assembly
2019

@@ -55,7 +54,7 @@ versions without YARN, use:
5554
# Cloudera CDH 4.2.0 with MapReduce v1
5655
$ SPARK_HADOOP_VERSION=2.0.0-mr1-cdh4.2.0 sbt/sbt assembly
5756

58-
For Apache Hadoop 2.x, 0.23.x, Cloudera CDH MRv2, and other Hadoop versions
57+
For Apache Hadoop 2.0.X, 2.1.X, 0.23.x, Cloudera CDH MRv2, and other Hadoop versions
5958
with YARN, also set `SPARK_YARN=true`:
6059

6160
# Apache Hadoop 2.0.5-alpha
@@ -64,8 +63,10 @@ with YARN, also set `SPARK_YARN=true`:
6463
# Cloudera CDH 4.2.0 with MapReduce v2
6564
$ SPARK_HADOOP_VERSION=2.0.0-cdh4.2.0 SPARK_YARN=true sbt/sbt assembly
6665

67-
For convenience, these variables may also be set through the `conf/spark-env.sh` file
68-
described below.
66+
When building for Hadoop 2.2.X and newer, you'll need to include the additional `new-yarn` profile:
67+
68+
# Apache Hadoop 2.2.X and newer
69+
$ mvn -Dyarn.version=2.2.0 -Dhadoop.version=2.2.0 -Pnew-yarn
6970

7071
When developing a Spark application, specify the Hadoop version by adding the
7172
"hadoop-client" artifact to your project's dependencies. For example, if you're

assembly/pom.xml

Lines changed: 8 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -26,7 +26,7 @@
2626
</parent>
2727

2828
<groupId>org.apache.spark</groupId>
29-
<artifactId>spark-assembly_2.9.3</artifactId>
29+
<artifactId>spark-assembly_2.10</artifactId>
3030
<name>Spark Project Assembly</name>
3131
<url>http://spark.incubator.apache.org/</url>
3232

@@ -41,27 +41,27 @@
4141
<dependencies>
4242
<dependency>
4343
<groupId>org.apache.spark</groupId>
44-
<artifactId>spark-core_2.9.3</artifactId>
44+
<artifactId>spark-core_2.10</artifactId>
4545
<version>${project.version}</version>
4646
</dependency>
4747
<dependency>
4848
<groupId>org.apache.spark</groupId>
49-
<artifactId>spark-bagel_2.9.3</artifactId>
49+
<artifactId>spark-bagel_2.10</artifactId>
5050
<version>${project.version}</version>
5151
</dependency>
5252
<dependency>
5353
<groupId>org.apache.spark</groupId>
54-
<artifactId>spark-mllib_2.9.3</artifactId>
54+
<artifactId>spark-mllib_2.10</artifactId>
5555
<version>${project.version}</version>
5656
</dependency>
5757
<dependency>
5858
<groupId>org.apache.spark</groupId>
59-
<artifactId>spark-repl_2.9.3</artifactId>
59+
<artifactId>spark-repl_2.10</artifactId>
6060
<version>${project.version}</version>
6161
</dependency>
6262
<dependency>
6363
<groupId>org.apache.spark</groupId>
64-
<artifactId>spark-streaming_2.9.3</artifactId>
64+
<artifactId>spark-streaming_2.10</artifactId>
6565
<version>${project.version}</version>
6666
</dependency>
6767
<dependency>
@@ -79,7 +79,7 @@
7979
<artifactId>maven-shade-plugin</artifactId>
8080
<configuration>
8181
<shadedArtifactAttached>false</shadedArtifactAttached>
82-
<outputFile>${project.build.directory}/scala-${scala.version}/${project.artifactId}-${project.version}-hadoop${hadoop.version}.jar</outputFile>
82+
<outputFile>${project.build.directory}/scala-2.10/${project.artifactId}-${project.version}-hadoop${hadoop.version}.jar</outputFile>
8383
<artifactSet>
8484
<includes>
8585
<include>*:*</include>
@@ -128,7 +128,7 @@
128128
<dependencies>
129129
<dependency>
130130
<groupId>org.apache.spark</groupId>
131-
<artifactId>spark-yarn_2.9.3</artifactId>
131+
<artifactId>spark-yarn_2.10</artifactId>
132132
<version>${project.version}</version>
133133
</dependency>
134134
</dependencies>

bagel/pom.xml

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -26,15 +26,15 @@
2626
</parent>
2727

2828
<groupId>org.apache.spark</groupId>
29-
<artifactId>spark-bagel_2.9.3</artifactId>
29+
<artifactId>spark-bagel_2.10</artifactId>
3030
<packaging>jar</packaging>
3131
<name>Spark Project Bagel</name>
3232
<url>http://spark.incubator.apache.org/</url>
3333

3434
<dependencies>
3535
<dependency>
3636
<groupId>org.apache.spark</groupId>
37-
<artifactId>spark-core_2.9.3</artifactId>
37+
<artifactId>spark-core_2.10</artifactId>
3838
<version>${project.version}</version>
3939
</dependency>
4040
<dependency>
@@ -43,18 +43,18 @@
4343
</dependency>
4444
<dependency>
4545
<groupId>org.scalatest</groupId>
46-
<artifactId>scalatest_2.9.3</artifactId>
46+
<artifactId>scalatest_2.10</artifactId>
4747
<scope>test</scope>
4848
</dependency>
4949
<dependency>
5050
<groupId>org.scalacheck</groupId>
51-
<artifactId>scalacheck_2.9.3</artifactId>
51+
<artifactId>scalacheck_2.10</artifactId>
5252
<scope>test</scope>
5353
</dependency>
5454
</dependencies>
5555
<build>
56-
<outputDirectory>target/scala-${scala.version}/classes</outputDirectory>
57-
<testOutputDirectory>target/scala-${scala.version}/test-classes</testOutputDirectory>
56+
<outputDirectory>target/scala-2.10/classes</outputDirectory>
57+
<testOutputDirectory>target/scala-2.10/test-classes</testOutputDirectory>
5858
<plugins>
5959
<plugin>
6060
<groupId>org.scalatest</groupId>

bin/compute-classpath.cmd

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -20,7 +20,7 @@ rem
2020
rem This script computes Spark's classpath and prints it to stdout; it's used by both the "run"
2121
rem script and the ExecutorRunner in standalone cluster mode.
2222

23-
set SCALA_VERSION=2.9.3
23+
set SCALA_VERSION=2.10
2424

2525
rem Figure out where the Spark framework is installed
2626
set FWDIR=%~dp0..\

bin/compute-classpath.sh

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -20,7 +20,7 @@
2020
# This script computes Spark's classpath and prints it to stdout; it's used by both the "run"
2121
# script and the ExecutorRunner in standalone cluster mode.
2222

23-
SCALA_VERSION=2.9.3
23+
SCALA_VERSION=2.10
2424

2525
# Figure out where Spark is installed
2626
FWDIR="$(cd `dirname $0`/..; pwd)"

core/pom.xml

Lines changed: 15 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -26,7 +26,7 @@
2626
</parent>
2727

2828
<groupId>org.apache.spark</groupId>
29-
<artifactId>spark-core_2.9.3</artifactId>
29+
<artifactId>spark-core_2.10</artifactId>
3030
<packaging>jar</packaging>
3131
<name>Spark Project Core</name>
3232
<url>http://spark.incubator.apache.org/</url>
@@ -86,7 +86,7 @@
8686
</dependency>
8787
<dependency>
8888
<groupId>com.twitter</groupId>
89-
<artifactId>chill_2.9.3</artifactId>
89+
<artifactId>chill_2.10</artifactId>
9090
<version>0.3.1</version>
9191
</dependency>
9292
<dependency>
@@ -96,27 +96,23 @@
9696
</dependency>
9797
<dependency>
9898
<groupId>${akka.group}</groupId>
99-
<artifactId>akka-actor</artifactId>
99+
<artifactId>akka-actor_2.10</artifactId>
100100
</dependency>
101101
<dependency>
102102
<groupId>${akka.group}</groupId>
103-
<artifactId>akka-remote</artifactId>
103+
<artifactId>akka-remote_2.10</artifactId>
104104
</dependency>
105105
<dependency>
106106
<groupId>${akka.group}</groupId>
107-
<artifactId>akka-slf4j</artifactId>
108-
</dependency>
109-
<dependency>
110-
<groupId>org.scala-lang</groupId>
111-
<artifactId>scalap</artifactId>
107+
<artifactId>akka-slf4j_2.10</artifactId>
112108
</dependency>
113109
<dependency>
114110
<groupId>org.scala-lang</groupId>
115111
<artifactId>scala-library</artifactId>
116112
</dependency>
117113
<dependency>
118114
<groupId>net.liftweb</groupId>
119-
<artifactId>lift-json_2.9.2</artifactId>
115+
<artifactId>lift-json_2.10</artifactId>
120116
</dependency>
121117
<dependency>
122118
<groupId>it.unimi.dsi</groupId>
@@ -163,14 +159,19 @@
163159
<artifactId>derby</artifactId>
164160
<scope>test</scope>
165161
</dependency>
162+
<dependency>
163+
<groupId>commons-io</groupId>
164+
<artifactId>commons-io</artifactId>
165+
<scope>test</scope>
166+
</dependency>
166167
<dependency>
167168
<groupId>org.scalatest</groupId>
168-
<artifactId>scalatest_2.9.3</artifactId>
169+
<artifactId>scalatest_2.10</artifactId>
169170
<scope>test</scope>
170171
</dependency>
171172
<dependency>
172173
<groupId>org.scalacheck</groupId>
173-
<artifactId>scalacheck_2.9.3</artifactId>
174+
<artifactId>scalacheck_2.10</artifactId>
174175
<scope>test</scope>
175176
</dependency>
176177
<dependency>
@@ -190,8 +191,8 @@
190191
</dependency>
191192
</dependencies>
192193
<build>
193-
<outputDirectory>target/scala-${scala.version}/classes</outputDirectory>
194-
<testOutputDirectory>target/scala-${scala.version}/test-classes</testOutputDirectory>
194+
<outputDirectory>target/scala-2.10/classes</outputDirectory>
195+
<testOutputDirectory>target/scala-2.10/test-classes</testOutputDirectory>
195196
<plugins>
196197
<plugin>
197198
<groupId>org.apache.maven.plugins</groupId>

core/src/main/java/org/apache/spark/network/netty/FileClient.java

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -19,8 +19,6 @@
1919

2020
import io.netty.bootstrap.Bootstrap;
2121
import io.netty.channel.Channel;
22-
import io.netty.channel.ChannelFuture;
23-
import io.netty.channel.ChannelFutureListener;
2422
import io.netty.channel.ChannelOption;
2523
import io.netty.channel.oio.OioEventLoopGroup;
2624
import io.netty.channel.socket.oio.OioSocketChannel;

core/src/main/java/org/apache/spark/network/netty/FileServer.java

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -20,7 +20,6 @@
2020
import java.net.InetSocketAddress;
2121

2222
import io.netty.bootstrap.ServerBootstrap;
23-
import io.netty.channel.Channel;
2423
import io.netty.channel.ChannelFuture;
2524
import io.netty.channel.ChannelOption;
2625
import io.netty.channel.oio.OioEventLoopGroup;

core/src/main/scala/org/apache/spark/FutureAction.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -99,7 +99,7 @@ class SimpleFutureAction[T] private[spark](jobWaiter: JobWaiter[_], resultFunc:
9999
override def ready(atMost: Duration)(implicit permit: CanAwait): SimpleFutureAction.this.type = {
100100
if (!atMost.isFinite()) {
101101
awaitResult()
102-
} else {
102+
} else jobWaiter.synchronized {
103103
val finishTime = System.currentTimeMillis() + atMost.toMillis
104104
while (!isCompleted) {
105105
val time = System.currentTimeMillis()

0 commit comments

Comments
 (0)