Skip to content

Commit 6e0122c

Browse files
committed
Merge remote-tracking branch 'origin/master' into SPARK-5190-register-sparklistener-in-sc-constructor
Conflicts: core/src/main/scala/org/apache/spark/scheduler/LiveListenerBus.scala core/src/main/scala/org/apache/spark/scheduler/SparkListenerBus.scala
2 parents 1a5b9a0 + 6f34131 commit 6e0122c

File tree

356 files changed

+16428
-6571
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

356 files changed

+16428
-6571
lines changed

assembly/pom.xml

Lines changed: 0 additions & 20 deletions
Original file line numberDiff line numberDiff line change
@@ -43,12 +43,6 @@
4343
</properties>
4444

4545
<dependencies>
46-
<!-- Promote Guava to compile scope in this module so it's included while shading. -->
47-
<dependency>
48-
<groupId>com.google.guava</groupId>
49-
<artifactId>guava</artifactId>
50-
<scope>compile</scope>
51-
</dependency>
5246
<dependency>
5347
<groupId>org.apache.spark</groupId>
5448
<artifactId>spark-core_${scala.binary.version}</artifactId>
@@ -133,20 +127,6 @@
133127
<goal>shade</goal>
134128
</goals>
135129
<configuration>
136-
<relocations>
137-
<relocation>
138-
<pattern>com.google</pattern>
139-
<shadedPattern>org.spark-project.guava</shadedPattern>
140-
<includes>
141-
<include>com.google.common.**</include>
142-
</includes>
143-
<excludes>
144-
<exclude>com/google/common/base/Absent*</exclude>
145-
<exclude>com/google/common/base/Optional*</exclude>
146-
<exclude>com/google/common/base/Present*</exclude>
147-
</excludes>
148-
</relocation>
149-
</relocations>
150130
<transformers>
151131
<transformer implementation="org.apache.maven.plugins.shade.resource.ServicesResourceTransformer" />
152132
<transformer implementation="org.apache.maven.plugins.shade.resource.AppendingTransformer">

bin/compute-classpath.sh

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -50,8 +50,8 @@ fi
5050
if [ -n "$SPARK_PREPEND_CLASSES" ]; then
5151
echo "NOTE: SPARK_PREPEND_CLASSES is set, placing locally compiled Spark"\
5252
"classes ahead of assembly." >&2
53+
# Spark classes
5354
CLASSPATH="$CLASSPATH:$FWDIR/core/target/scala-$SPARK_SCALA_VERSION/classes"
54-
CLASSPATH="$CLASSPATH:$FWDIR/core/target/jars/*"
5555
CLASSPATH="$CLASSPATH:$FWDIR/repl/target/scala-$SPARK_SCALA_VERSION/classes"
5656
CLASSPATH="$CLASSPATH:$FWDIR/mllib/target/scala-$SPARK_SCALA_VERSION/classes"
5757
CLASSPATH="$CLASSPATH:$FWDIR/bagel/target/scala-$SPARK_SCALA_VERSION/classes"
@@ -63,6 +63,8 @@ if [ -n "$SPARK_PREPEND_CLASSES" ]; then
6363
CLASSPATH="$CLASSPATH:$FWDIR/sql/hive/target/scala-$SPARK_SCALA_VERSION/classes"
6464
CLASSPATH="$CLASSPATH:$FWDIR/sql/hive-thriftserver/target/scala-$SPARK_SCALA_VERSION/classes"
6565
CLASSPATH="$CLASSPATH:$FWDIR/yarn/stable/target/scala-$SPARK_SCALA_VERSION/classes"
66+
# Jars for shaded deps in their original form (copied here during build)
67+
CLASSPATH="$CLASSPATH:$FWDIR/core/target/jars/*"
6668
fi
6769

6870
# Use spark-assembly jar from either RELEASE or assembly directory

bin/spark-class

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -29,6 +29,7 @@ FWDIR="$(cd "`dirname "$0"`"/..; pwd)"
2929

3030
# Export this as SPARK_HOME
3131
export SPARK_HOME="$FWDIR"
32+
export SPARK_CONF_DIR="${SPARK_CONF_DIR:-"$SPARK_HOME/conf"}"
3233

3334
. "$FWDIR"/bin/load-spark-env.sh
3435

@@ -120,8 +121,8 @@ fi
120121
JAVA_OPTS="$JAVA_OPTS -Xms$OUR_JAVA_MEM -Xmx$OUR_JAVA_MEM"
121122

122123
# Load extra JAVA_OPTS from conf/java-opts, if it exists
123-
if [ -e "$FWDIR/conf/java-opts" ] ; then
124-
JAVA_OPTS="$JAVA_OPTS `cat "$FWDIR"/conf/java-opts`"
124+
if [ -e "$SPARK_CONF_DIR/java-opts" ] ; then
125+
JAVA_OPTS="$JAVA_OPTS `cat "$SPARK_CONF_DIR"/java-opts`"
125126
fi
126127

127128
# Attention: when changing the way the JAVA_OPTS are assembled, the change must be reflected in CommandUtils.scala!

build/mvn

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -48,11 +48,11 @@ install_app() {
4848
# check if we already have the tarball
4949
# check if we have curl installed
5050
# download application
51-
[ ! -f "${local_tarball}" ] && [ -n "`which curl 2>/dev/null`" ] && \
51+
[ ! -f "${local_tarball}" ] && [ $(command -v curl) ] && \
5252
echo "exec: curl ${curl_opts} ${remote_tarball}" && \
5353
curl ${curl_opts} "${remote_tarball}" > "${local_tarball}"
5454
# if the file still doesn't exist, lets try `wget` and cross our fingers
55-
[ ! -f "${local_tarball}" ] && [ -n "`which wget 2>/dev/null`" ] && \
55+
[ ! -f "${local_tarball}" ] && [ $(command -v wget) ] && \
5656
echo "exec: wget ${wget_opts} ${remote_tarball}" && \
5757
wget ${wget_opts} -O "${local_tarball}" "${remote_tarball}"
5858
# if both were unsuccessful, exit
@@ -68,10 +68,10 @@ install_app() {
6868
# Install maven under the build/ folder
6969
install_mvn() {
7070
install_app \
71-
"http://apache.claz.org/maven/maven-3/3.2.3/binaries" \
72-
"apache-maven-3.2.3-bin.tar.gz" \
73-
"apache-maven-3.2.3/bin/mvn"
74-
MVN_BIN="${_DIR}/apache-maven-3.2.3/bin/mvn"
71+
"http://archive.apache.org/dist/maven/maven-3/3.2.5/binaries" \
72+
"apache-maven-3.2.5-bin.tar.gz" \
73+
"apache-maven-3.2.5/bin/mvn"
74+
MVN_BIN="${_DIR}/apache-maven-3.2.5/bin/mvn"
7575
}
7676

7777
# Install zinc under the build/ folder

build/sbt-launch-lib.bash

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -50,9 +50,9 @@ acquire_sbt_jar () {
5050
# Download
5151
printf "Attempting to fetch sbt\n"
5252
JAR_DL="${JAR}.part"
53-
if hash curl 2>/dev/null; then
53+
if [ $(command -v curl) ]; then
5454
(curl --silent ${URL1} > "${JAR_DL}" || curl --silent ${URL2} > "${JAR_DL}") && mv "${JAR_DL}" "${JAR}"
55-
elif hash wget 2>/dev/null; then
55+
elif [ $(command -v wget) ]; then
5656
(wget --quiet ${URL1} -O "${JAR_DL}" || wget --quiet ${URL2} -O "${JAR_DL}") && mv "${JAR_DL}" "${JAR}"
5757
else
5858
printf "You do not have curl or wget installed, please install sbt manually from http://www.scala-sbt.org/\n"

conf/metrics.properties.template

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -87,6 +87,7 @@
8787
# period 10 Poll period
8888
# unit seconds Units of poll period
8989
# prefix EMPTY STRING Prefix to prepend to metric name
90+
# protocol tcp Protocol ("tcp" or "udp") to use
9091

9192
## Examples
9293
# Enable JmxSink for all instances by class name

core/pom.xml

Lines changed: 25 additions & 49 deletions
Original file line numberDiff line numberDiff line change
@@ -34,6 +34,10 @@
3434
<name>Spark Project Core</name>
3535
<url>http://spark.apache.org/</url>
3636
<dependencies>
37+
<dependency>
38+
<groupId>com.google.guava</groupId>
39+
<artifactId>guava</artifactId>
40+
</dependency>
3741
<dependency>
3842
<groupId>com.twitter</groupId>
3943
<artifactId>chill_${scala.binary.version}</artifactId>
@@ -90,32 +94,35 @@
9094
<groupId>org.apache.curator</groupId>
9195
<artifactId>curator-recipes</artifactId>
9296
</dependency>
97+
98+
<!-- Jetty dependencies promoted to compile here so they are shaded
99+
and inlined into spark-core jar -->
93100
<dependency>
94101
<groupId>org.eclipse.jetty</groupId>
95102
<artifactId>jetty-plus</artifactId>
103+
<scope>compile</scope>
96104
</dependency>
97105
<dependency>
98106
<groupId>org.eclipse.jetty</groupId>
99107
<artifactId>jetty-security</artifactId>
108+
<scope>compile</scope>
100109
</dependency>
101110
<dependency>
102111
<groupId>org.eclipse.jetty</groupId>
103112
<artifactId>jetty-util</artifactId>
113+
<scope>compile</scope>
104114
</dependency>
105115
<dependency>
106116
<groupId>org.eclipse.jetty</groupId>
107117
<artifactId>jetty-server</artifactId>
118+
<scope>compile</scope>
108119
</dependency>
109-
<!--
110-
Promote Guava to "compile" so that maven-shade-plugin picks it up (for packaging the Optional
111-
class exposed in the Java API). The plugin will then remove this dependency from the published
112-
pom, so that Guava does not pollute the client's compilation classpath.
113-
-->
114120
<dependency>
115-
<groupId>com.google.guava</groupId>
116-
<artifactId>guava</artifactId>
121+
<groupId>org.eclipse.jetty</groupId>
122+
<artifactId>jetty-http</artifactId>
117123
<scope>compile</scope>
118124
</dependency>
125+
119126
<dependency>
120127
<groupId>org.apache.commons</groupId>
121128
<artifactId>commons-lang3</artifactId>
@@ -204,19 +211,19 @@
204211
<artifactId>stream</artifactId>
205212
</dependency>
206213
<dependency>
207-
<groupId>com.codahale.metrics</groupId>
214+
<groupId>io.dropwizard.metrics</groupId>
208215
<artifactId>metrics-core</artifactId>
209216
</dependency>
210217
<dependency>
211-
<groupId>com.codahale.metrics</groupId>
218+
<groupId>io.dropwizard.metrics</groupId>
212219
<artifactId>metrics-jvm</artifactId>
213220
</dependency>
214221
<dependency>
215-
<groupId>com.codahale.metrics</groupId>
222+
<groupId>io.dropwizard.metrics</groupId>
216223
<artifactId>metrics-json</artifactId>
217224
</dependency>
218225
<dependency>
219-
<groupId>com.codahale.metrics</groupId>
226+
<groupId>io.dropwizard.metrics</groupId>
220227
<artifactId>metrics-graphite</artifactId>
221228
</dependency>
222229
<dependency>
@@ -350,59 +357,28 @@
350357
<verbose>true</verbose>
351358
</configuration>
352359
</plugin>
353-
<plugin>
354-
<groupId>org.apache.maven.plugins</groupId>
355-
<artifactId>maven-shade-plugin</artifactId>
356-
<executions>
357-
<execution>
358-
<phase>package</phase>
359-
<goals>
360-
<goal>shade</goal>
361-
</goals>
362-
<configuration>
363-
<shadedArtifactAttached>false</shadedArtifactAttached>
364-
<artifactSet>
365-
<includes>
366-
<include>com.google.guava:guava</include>
367-
</includes>
368-
</artifactSet>
369-
<filters>
370-
<!-- See comment in the guava dependency declaration above. -->
371-
<filter>
372-
<artifact>com.google.guava:guava</artifact>
373-
<includes>
374-
<include>com/google/common/base/Absent*</include>
375-
<include>com/google/common/base/Optional*</include>
376-
<include>com/google/common/base/Present*</include>
377-
</includes>
378-
</filter>
379-
</filters>
380-
</configuration>
381-
</execution>
382-
</executions>
383-
</plugin>
384-
<!--
385-
Copy guava to the build directory. This is needed to make the SPARK_PREPEND_CLASSES
386-
option work in compute-classpath.sh, since it would put the non-shaded Spark classes in
387-
the runtime classpath.
388-
-->
389360
<plugin>
390361
<groupId>org.apache.maven.plugins</groupId>
391362
<artifactId>maven-dependency-plugin</artifactId>
392363
<executions>
364+
<!-- When using SPARK_PREPEND_CLASSES Spark classes compiled locally don't use
365+
shaded deps. So here we store jars in their original form which are added
366+
when the classpath is computed. -->
393367
<execution>
394368
<id>copy-dependencies</id>
395369
<phase>package</phase>
396370
<goals>
397371
<goal>copy-dependencies</goal>
398372
</goals>
399-
<configuration>
373+
<configuration>
400374
<outputDirectory>${project.build.directory}</outputDirectory>
401375
<overWriteReleases>false</overWriteReleases>
402376
<overWriteSnapshots>false</overWriteSnapshots>
403377
<overWriteIfNewer>true</overWriteIfNewer>
404378
<useSubDirectoryPerType>true</useSubDirectoryPerType>
405-
<includeArtifactIds>guava</includeArtifactIds>
379+
<includeArtifactIds>
380+
guava,jetty-io,jetty-http,jetty-plus,jetty-util,jetty-server
381+
</includeArtifactIds>
406382
<silent>true</silent>
407383
</configuration>
408384
</execution>

core/src/main/resources/org/apache/spark/ui/static/webui.css

Lines changed: 10 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -121,6 +121,14 @@ pre {
121121
border: none;
122122
}
123123

124+
.description-input {
125+
overflow: hidden;
126+
text-overflow: ellipsis;
127+
width: 100%;
128+
white-space: nowrap;
129+
display: block;
130+
}
131+
124132
.stacktrace-details {
125133
max-height: 300px;
126134
overflow-y: auto;
@@ -182,6 +190,7 @@ span.additional-metric-title {
182190

183191
/* Hide all additional metrics by default. This is done here rather than using JavaScript to
184192
* avoid slow page loads for stage pages with large numbers (e.g., thousands) of tasks. */
185-
.scheduler_delay, .deserialization_time, .serialization_time, .getting_result_time {
193+
.scheduler_delay, .deserialization_time, .fetch_wait_time, .serialization_time,
194+
.getting_result_time {
186195
display: none;
187196
}

core/src/main/scala/org/apache/spark/ExecutorAllocationManager.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -158,7 +158,7 @@ private[spark] class ExecutorAllocationManager(
158158
"shuffle service. You may enable this through spark.shuffle.service.enabled.")
159159
}
160160
if (tasksPerExecutor == 0) {
161-
throw new SparkException("spark.executor.cores must not be less than spark.task.cpus.cores")
161+
throw new SparkException("spark.executor.cores must not be less than spark.task.cpus.")
162162
}
163163
}
164164

core/src/main/scala/org/apache/spark/Logging.scala

Lines changed: 11 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -118,15 +118,17 @@ trait Logging {
118118
// org.slf4j.impl.Log4jLoggerFactory, from the log4j 2.0 binding, currently
119119
// org.apache.logging.slf4j.Log4jLoggerFactory
120120
val usingLog4j12 = "org.slf4j.impl.Log4jLoggerFactory".equals(binderClass)
121-
val log4j12Initialized = LogManager.getRootLogger.getAllAppenders.hasMoreElements
122-
if (!log4j12Initialized && usingLog4j12) {
123-
val defaultLogProps = "org/apache/spark/log4j-defaults.properties"
124-
Option(Utils.getSparkClassLoader.getResource(defaultLogProps)) match {
125-
case Some(url) =>
126-
PropertyConfigurator.configure(url)
127-
System.err.println(s"Using Spark's default log4j profile: $defaultLogProps")
128-
case None =>
129-
System.err.println(s"Spark was unable to load $defaultLogProps")
121+
if (usingLog4j12) {
122+
val log4j12Initialized = LogManager.getRootLogger.getAllAppenders.hasMoreElements
123+
if (!log4j12Initialized) {
124+
val defaultLogProps = "org/apache/spark/log4j-defaults.properties"
125+
Option(Utils.getSparkClassLoader.getResource(defaultLogProps)) match {
126+
case Some(url) =>
127+
PropertyConfigurator.configure(url)
128+
System.err.println(s"Using Spark's default log4j profile: $defaultLogProps")
129+
case None =>
130+
System.err.println(s"Spark was unable to load $defaultLogProps")
131+
}
130132
}
131133
}
132134
Logging.initialized = true

0 commit comments

Comments
 (0)