Skip to content

Commit 020a9e2

Browse files
committed
Merge remote-tracking branch 'upstream/master'
2 parents ddda43e + 545dfcb commit 020a9e2

File tree

116 files changed

+2917
-2088
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

116 files changed

+2917
-2088
lines changed

assembly/pom.xml

Lines changed: 20 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -354,5 +354,25 @@
354354
</dependency>
355355
</dependencies>
356356
</profile>
357+
358+
<!-- Profiles that disable inclusion of certain dependencies. -->
359+
<profile>
360+
<id>hadoop-provided</id>
361+
<properties>
362+
<hadoop.deps.scope>provided</hadoop.deps.scope>
363+
</properties>
364+
</profile>
365+
<profile>
366+
<id>hive-provided</id>
367+
<properties>
368+
<hive.deps.scope>provided</hive.deps.scope>
369+
</properties>
370+
</profile>
371+
<profile>
372+
<id>parquet-provided</id>
373+
<properties>
374+
<parquet.deps.scope>provided</parquet.deps.scope>
375+
</properties>
376+
</profile>
357377
</profiles>
358378
</project>

bagel/pom.xml

Lines changed: 0 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -40,15 +40,6 @@
4040
<artifactId>spark-core_${scala.binary.version}</artifactId>
4141
<version>${project.version}</version>
4242
</dependency>
43-
<dependency>
44-
<groupId>org.eclipse.jetty</groupId>
45-
<artifactId>jetty-server</artifactId>
46-
</dependency>
47-
<dependency>
48-
<groupId>org.scalatest</groupId>
49-
<artifactId>scalatest_${scala.binary.version}</artifactId>
50-
<scope>test</scope>
51-
</dependency>
5243
<dependency>
5344
<groupId>org.scalacheck</groupId>
5445
<artifactId>scalacheck_${scala.binary.version}</artifactId>
@@ -58,11 +49,5 @@
5849
<build>
5950
<outputDirectory>target/scala-${scala.binary.version}/classes</outputDirectory>
6051
<testOutputDirectory>target/scala-${scala.binary.version}/test-classes</testOutputDirectory>
61-
<plugins>
62-
<plugin>
63-
<groupId>org.scalatest</groupId>
64-
<artifactId>scalatest-maven-plugin</artifactId>
65-
</plugin>
66-
</plugins>
6752
</build>
6853
</project>

bagel/src/test/resources/log4j.properties

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -15,10 +15,10 @@
1515
# limitations under the License.
1616
#
1717

18-
# Set everything to be logged to the file bagel/target/unit-tests.log
18+
# Set everything to be logged to the file target/unit-tests.log
1919
log4j.rootCategory=INFO, file
2020
log4j.appender.file=org.apache.log4j.FileAppender
21-
log4j.appender.file.append=false
21+
log4j.appender.file.append=true
2222
log4j.appender.file.file=target/unit-tests.log
2323
log4j.appender.file.layout=org.apache.log4j.PatternLayout
2424
log4j.appender.file.layout.ConversionPattern=%d{yy/MM/dd HH:mm:ss.SSS} %t %p %c{1}: %m%n

bin/compute-classpath.cmd

Lines changed: 7 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -109,6 +109,13 @@ if "x%YARN_CONF_DIR%"=="x" goto no_yarn_conf_dir
109109
set CLASSPATH=%CLASSPATH%;%YARN_CONF_DIR%
110110
:no_yarn_conf_dir
111111

112+
rem To allow for distributions to append needed libraries to the classpath (e.g. when
113+
rem using the "hadoop-provided" profile to build Spark), check SPARK_DIST_CLASSPATH and
114+
rem append it to tbe final classpath.
115+
if not "x%$SPARK_DIST_CLASSPATH%"=="x" (
116+
set CLASSPATH=%CLASSPATH%;%SPARK_DIST_CLASSPATH%
117+
)
118+
112119
rem A bit of a hack to allow calling this script within run2.cmd without seeing output
113120
if "%DONT_PRINT_CLASSPATH%"=="1" goto exit
114121

bin/compute-classpath.sh

Lines changed: 7 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -146,4 +146,11 @@ if [ -n "$YARN_CONF_DIR" ]; then
146146
CLASSPATH="$CLASSPATH:$YARN_CONF_DIR"
147147
fi
148148

149+
# To allow for distributions to append needed libraries to the classpath (e.g. when
150+
# using the "hadoop-provided" profile to build Spark), check SPARK_DIST_CLASSPATH and
151+
# append it to tbe final classpath.
152+
if [ -n "$SPARK_DIST_CLASSPATH" ]; then
153+
CLASSPATH="$CLASSPATH:$SPARK_DIST_CLASSPATH"
154+
fi
155+
149156
echo "$CLASSPATH"

bin/spark-submit

Lines changed: 9 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -38,11 +38,19 @@ while (($#)); do
3838
export SPARK_SUBMIT_CLASSPATH=$2
3939
elif [ "$1" = "--driver-java-options" ]; then
4040
export SPARK_SUBMIT_OPTS=$2
41+
elif [ "$1" = "--master" ]; then
42+
export MASTER=$2
4143
fi
4244
shift
4345
done
4446

45-
DEFAULT_PROPERTIES_FILE="$SPARK_HOME/conf/spark-defaults.conf"
47+
if [ -z "$SPARK_CONF_DIR" ]; then
48+
export SPARK_CONF_DIR="$SPARK_HOME/conf"
49+
fi
50+
DEFAULT_PROPERTIES_FILE="$SPARK_CONF_DIR/spark-defaults.conf"
51+
if [ "$MASTER" == "yarn-cluster" ]; then
52+
SPARK_SUBMIT_DEPLOY_MODE=cluster
53+
fi
4654
export SPARK_SUBMIT_DEPLOY_MODE=${SPARK_SUBMIT_DEPLOY_MODE:-"client"}
4755
export SPARK_SUBMIT_PROPERTIES_FILE=${SPARK_SUBMIT_PROPERTIES_FILE:-"$DEFAULT_PROPERTIES_FILE"}
4856

bin/spark-submit2.cmd

Lines changed: 11 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -24,7 +24,11 @@ set ORIG_ARGS=%*
2424

2525
rem Reset the values of all variables used
2626
set SPARK_SUBMIT_DEPLOY_MODE=client
27-
set SPARK_SUBMIT_PROPERTIES_FILE=%SPARK_HOME%\conf\spark-defaults.conf
27+
28+
if not defined %SPARK_CONF_DIR% (
29+
set SPARK_CONF_DIR=%SPARK_HOME%\conf
30+
)
31+
set SPARK_SUBMIT_PROPERTIES_FILE=%SPARK_CONF_DIR%\spark-defaults.conf
2832
set SPARK_SUBMIT_DRIVER_MEMORY=
2933
set SPARK_SUBMIT_LIBRARY_PATH=
3034
set SPARK_SUBMIT_CLASSPATH=
@@ -45,11 +49,17 @@ if [%1] == [] goto continue
4549
set SPARK_SUBMIT_CLASSPATH=%2
4650
) else if [%1] == [--driver-java-options] (
4751
set SPARK_SUBMIT_OPTS=%2
52+
) else if [%1] == [--master] (
53+
set MASTER=%2
4854
)
4955
shift
5056
goto loop
5157
:continue
5258

59+
if [%MASTER%] == [yarn-cluster] (
60+
set SPARK_SUBMIT_DEPLOY_MODE=cluster
61+
)
62+
5363
rem For client mode, the driver will be launched in the same JVM that launches
5464
rem SparkSubmit, so we may need to read the properties file for any extra class
5565
rem paths, library paths, java options and memory early on. Otherwise, it will

core/pom.xml

Lines changed: 0 additions & 18 deletions
Original file line numberDiff line numberDiff line change
@@ -276,11 +276,6 @@
276276
<artifactId>selenium-java</artifactId>
277277
<scope>test</scope>
278278
</dependency>
279-
<dependency>
280-
<groupId>org.scalatest</groupId>
281-
<artifactId>scalatest_${scala.binary.version}</artifactId>
282-
<scope>test</scope>
283-
</dependency>
284279
<dependency>
285280
<groupId>org.mockito</groupId>
286281
<artifactId>mockito-all</artifactId>
@@ -326,19 +321,6 @@
326321
<outputDirectory>target/scala-${scala.binary.version}/classes</outputDirectory>
327322
<testOutputDirectory>target/scala-${scala.binary.version}/test-classes</testOutputDirectory>
328323
<plugins>
329-
<plugin>
330-
<groupId>org.scalatest</groupId>
331-
<artifactId>scalatest-maven-plugin</artifactId>
332-
<executions>
333-
<execution>
334-
<id>test</id>
335-
<goals>
336-
<goal>test</goal>
337-
</goals>
338-
</execution>
339-
</executions>
340-
</plugin>
341-
342324
<!-- Unzip py4j so we can include its files in the jar -->
343325
<plugin>
344326
<groupId>org.apache.maven.plugins</groupId>

core/src/main/scala/org/apache/spark/SparkContext.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -229,7 +229,7 @@ class SparkContext(config: SparkConf) extends Logging with ExecutorAllocationCli
229229
// An asynchronous listener bus for Spark events
230230
private[spark] val listenerBus = new LiveListenerBus
231231

232-
conf.set("spark.executor.id", "driver")
232+
conf.set("spark.executor.id", SparkContext.DRIVER_IDENTIFIER)
233233

234234
// Create the Spark execution environment (cache, map output tracker, etc)
235235
private[spark] val env = SparkEnv.createDriverEnv(conf, isLocal, listenerBus)
Lines changed: 25 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,25 @@
1+
/*
2+
* Licensed to the Apache Software Foundation (ASF) under one or more
3+
* contributor license agreements. See the NOTICE file distributed with
4+
* this work for additional information regarding copyright ownership.
5+
* The ASF licenses this file to You under the Apache License, Version 2.0
6+
* (the "License"); you may not use this file except in compliance with
7+
* the License. You may obtain a copy of the License at
8+
*
9+
* http://www.apache.org/licenses/LICENSE-2.0
10+
*
11+
* Unless required by applicable law or agreed to in writing, software
12+
* distributed under the License is distributed on an "AS IS" BASIS,
13+
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14+
* See the License for the specific language governing permissions and
15+
* limitations under the License.
16+
*/
17+
18+
package org.apache.spark
19+
20+
import org.apache.spark.annotation.DeveloperApi
21+
22+
/**
23+
* Exception thrown when a task cannot be serialized.
24+
*/
25+
private[spark] class TaskNotSerializableException(error: Throwable) extends Exception(error)

0 commit comments

Comments
 (0)