Skip to content

Commit 51d14b9

Browse files
Merge remote-tracking branch 'upstream/master'
2 parents 38faca4 + 3d0c37b commit 51d14b9

File tree

393 files changed

+8104
-3155
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

393 files changed

+8104
-3155
lines changed

.gitignore

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -5,6 +5,7 @@
55
*.ipr
66
*.iml
77
*.iws
8+
*.pyc
89
.idea/
910
.idea_modules/
1011
sbt/*.jar
@@ -49,6 +50,8 @@ dependency-reduced-pom.xml
4950
checkpoint
5051
derby.log
5152
dist/
53+
dev/create-release/*txt
54+
dev/create-release/*final
5255
spark-*-bin-*.tgz
5356
unit-tests.log
5457
/lib/

.rat-excludes

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -64,3 +64,4 @@ dist/*
6464
logs
6565
.*scalastyle-output.xml
6666
.*dependency-reduced-pom.xml
67+
known_translations

LICENSE

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -646,7 +646,8 @@ THE SOFTWARE.
646646

647647
========================================================================
648648
For Scala Interpreter classes (all .scala files in repl/src/main/scala
649-
except for Main.Scala, SparkHelper.scala and ExecutorClassLoader.scala):
649+
except for Main.Scala, SparkHelper.scala and ExecutorClassLoader.scala),
650+
and for SerializableMapWrapper in JavaUtils.scala:
650651
========================================================================
651652

652653
Copyright (c) 2002-2013 EPFL

assembly/pom.xml

Lines changed: 0 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -169,16 +169,6 @@
169169
</build>
170170

171171
<profiles>
172-
<profile>
173-
<id>yarn-alpha</id>
174-
<dependencies>
175-
<dependency>
176-
<groupId>org.apache.spark</groupId>
177-
<artifactId>spark-yarn-alpha_${scala.binary.version}</artifactId>
178-
<version>${project.version}</version>
179-
</dependency>
180-
</dependencies>
181-
</profile>
182172
<profile>
183173
<id>yarn</id>
184174
<dependencies>

bin/beeline.cmd

Lines changed: 21 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,21 @@
1+
@echo off
2+
3+
rem
4+
rem Licensed to the Apache Software Foundation (ASF) under one or more
5+
rem contributor license agreements. See the NOTICE file distributed with
6+
rem this work for additional information regarding copyright ownership.
7+
rem The ASF licenses this file to You under the Apache License, Version 2.0
8+
rem (the "License"); you may not use this file except in compliance with
9+
rem the License. You may obtain a copy of the License at
10+
rem
11+
rem http://www.apache.org/licenses/LICENSE-2.0
12+
rem
13+
rem Unless required by applicable law or agreed to in writing, software
14+
rem distributed under the License is distributed on an "AS IS" BASIS,
15+
rem WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
16+
rem See the License for the specific language governing permissions and
17+
rem limitations under the License.
18+
rem
19+
20+
set SPARK_HOME=%~dp0..
21+
cmd /V /E /C %SPARK_HOME%\bin\spark-class.cmd org.apache.hive.beeline.BeeLine %*

bin/compute-classpath.sh

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -68,14 +68,14 @@ else
6868
assembly_folder="$ASSEMBLY_DIR"
6969
fi
7070

71-
num_jars="$(ls "$assembly_folder" | grep "spark-assembly.*hadoop.*\.jar" | wc -l)"
71+
num_jars="$(ls "$assembly_folder" | grep "spark-assembly.*hadoop.*\.jar$" | wc -l)"
7272
if [ "$num_jars" -eq "0" ]; then
7373
echo "Failed to find Spark assembly in $assembly_folder"
7474
echo "You need to build Spark before running this program."
7575
exit 1
7676
fi
7777
if [ "$num_jars" -gt "1" ]; then
78-
jars_list=$(ls "$assembly_folder" | grep "spark-assembly.*hadoop.*.jar")
78+
jars_list=$(ls "$assembly_folder" | grep "spark-assembly.*hadoop.*.jar$")
7979
echo "Found multiple Spark assembly jars in $assembly_folder:"
8080
echo "$jars_list"
8181
echo "Please remove all but one jar."
@@ -108,7 +108,7 @@ else
108108
datanucleus_dir="$FWDIR"/lib_managed/jars
109109
fi
110110

111-
datanucleus_jars="$(find "$datanucleus_dir" 2>/dev/null | grep "datanucleus-.*\\.jar")"
111+
datanucleus_jars="$(find "$datanucleus_dir" 2>/dev/null | grep "datanucleus-.*\\.jar$")"
112112
datanucleus_jars="$(echo "$datanucleus_jars" | tr "\n" : | sed s/:$//g)"
113113

114114
if [ -n "$datanucleus_jars" ]; then

bin/spark-shell

Lines changed: 7 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -45,6 +45,13 @@ source "$FWDIR"/bin/utils.sh
4545
SUBMIT_USAGE_FUNCTION=usage
4646
gatherSparkSubmitOpts "$@"
4747

48+
# SPARK-4161: scala does not assume use of the java classpath,
49+
# so we need to add the "-Dscala.usejavacp=true" flag mnually. We
50+
# do this specifically for the Spark shell because the scala REPL
51+
# has its own class loader, and any additional classpath specified
52+
# through spark.driver.extraClassPath is not automatically propagated.
53+
SPARK_SUBMIT_OPTS="$SPARK_SUBMIT_OPTS -Dscala.usejavacp=true"
54+
4855
function main() {
4956
if $cygwin; then
5057
# Workaround for issue involving JLine and Cygwin

core/src/main/java/org/apache/spark/SparkJobInfo.java

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -17,13 +17,15 @@
1717

1818
package org.apache.spark;
1919

20+
import java.io.Serializable;
21+
2022
/**
2123
* Exposes information about Spark Jobs.
2224
*
2325
* This interface is not designed to be implemented outside of Spark. We may add additional methods
2426
* which may break binary compatibility with outside implementations.
2527
*/
26-
public interface SparkJobInfo {
28+
public interface SparkJobInfo extends Serializable {
2729
int jobId();
2830
int[] stageIds();
2931
JobExecutionStatus status();

core/src/main/java/org/apache/spark/SparkStageInfo.java

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -17,13 +17,15 @@
1717

1818
package org.apache.spark;
1919

20+
import java.io.Serializable;
21+
2022
/**
2123
* Exposes information about Spark Stages.
2224
*
2325
* This interface is not designed to be implemented outside of Spark. We may add additional methods
2426
* which may break binary compatibility with outside implementations.
2527
*/
26-
public interface SparkStageInfo {
28+
public interface SparkStageInfo extends Serializable {
2729
int stageId();
2830
int currentAttemptId();
2931
long submissionTime();

core/src/main/resources/org/apache/spark/ui/static/webui.css

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -171,6 +171,6 @@ span.additional-metric-title {
171171

172172
/* Hide all additional metrics by default. This is done here rather than using JavaScript to
173173
* avoid slow page loads for stage pages with large numbers (e.g., thousands) of tasks. */
174-
.scheduler_delay, .gc_time, .deserialization_time, .serialization_time, .getting_result_time {
174+
.scheduler_delay, .deserialization_time, .serialization_time, .getting_result_time {
175175
display: none;
176176
}

0 commit comments

Comments
 (0)