Skip to content

Commit f11f617

Browse files
committed
Merge branch 'master' into reuse-worker
Conflicts: python/pyspark/serializers.py
2 parents 3939f20 + e11eeb7 commit f11f617

File tree

331 files changed

+7930
-4736
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

331 files changed

+7930
-4736
lines changed

README.md

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -4,8 +4,8 @@ Spark is a fast and general cluster computing system for Big Data. It provides
44
high-level APIs in Scala, Java, and Python, and an optimized engine that
55
supports general computation graphs for data analysis. It also supports a
66
rich set of higher-level tools including Spark SQL for SQL and structured
7-
data processing, MLLib for machine learning, GraphX for graph processing,
8-
and Spark Streaming.
7+
data processing, MLlib for machine learning, GraphX for graph processing,
8+
and Spark Streaming for stream processing.
99

1010
<http://spark.apache.org/>
1111

assembly/pom.xml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -21,7 +21,7 @@
2121
<parent>
2222
<groupId>org.apache.spark</groupId>
2323
<artifactId>spark-parent</artifactId>
24-
<version>1.1.0-SNAPSHOT</version>
24+
<version>1.2.0-SNAPSHOT</version>
2525
<relativePath>../pom.xml</relativePath>
2626
</parent>
2727

bagel/pom.xml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -21,7 +21,7 @@
2121
<parent>
2222
<groupId>org.apache.spark</groupId>
2323
<artifactId>spark-parent</artifactId>
24-
<version>1.1.0-SNAPSHOT</version>
24+
<version>1.2.0-SNAPSHOT</version>
2525
<relativePath>../pom.xml</relativePath>
2626
</parent>
2727

bagel/src/test/scala/org/apache/spark/bagel/BagelSuite.scala

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -24,8 +24,6 @@ import org.scalatest.time.SpanSugar._
2424
import org.apache.spark._
2525
import org.apache.spark.storage.StorageLevel
2626

27-
import scala.language.postfixOps
28-
2927
class TestVertex(val active: Boolean, val age: Int) extends Vertex with Serializable
3028
class TestMessage(val targetId: String) extends Message[String] with Serializable
3129

bin/beeline

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -24,7 +24,7 @@
2424
set -o posix
2525

2626
# Figure out where Spark is installed
27-
FWDIR="$(cd `dirname $0`/..; pwd)"
27+
FWDIR="$(cd "`dirname "$0"`"/..; pwd)"
2828

2929
CLASS="org.apache.hive.beeline.BeeLine"
3030
exec "$FWDIR/bin/spark-class" $CLASS "$@"

bin/compute-classpath.sh

Lines changed: 7 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -23,9 +23,9 @@
2323
SCALA_VERSION=2.10
2424

2525
# Figure out where Spark is installed
26-
FWDIR="$(cd `dirname $0`/..; pwd)"
26+
FWDIR="$(cd "`dirname "$0"`"/..; pwd)"
2727

28-
. $FWDIR/bin/load-spark-env.sh
28+
. "$FWDIR"/bin/load-spark-env.sh
2929

3030
# Build up classpath
3131
CLASSPATH="$SPARK_CLASSPATH:$SPARK_SUBMIT_CLASSPATH:$FWDIR/conf"
@@ -43,6 +43,7 @@ if [ -n "$SPARK_PREPEND_CLASSES" ]; then
4343
echo "NOTE: SPARK_PREPEND_CLASSES is set, placing locally compiled Spark"\
4444
"classes ahead of assembly." >&2
4545
CLASSPATH="$CLASSPATH:$FWDIR/core/target/scala-$SCALA_VERSION/classes"
46+
CLASSPATH="$CLASSPATH:$FWDIR/core/target/jars/*"
4647
CLASSPATH="$CLASSPATH:$FWDIR/repl/target/scala-$SCALA_VERSION/classes"
4748
CLASSPATH="$CLASSPATH:$FWDIR/mllib/target/scala-$SCALA_VERSION/classes"
4849
CLASSPATH="$CLASSPATH:$FWDIR/bagel/target/scala-$SCALA_VERSION/classes"
@@ -63,7 +64,7 @@ else
6364
assembly_folder="$ASSEMBLY_DIR"
6465
fi
6566

66-
num_jars=$(ls "$assembly_folder" | grep "spark-assembly.*hadoop.*\.jar" | wc -l)
67+
num_jars="$(ls "$assembly_folder" | grep "spark-assembly.*hadoop.*\.jar" | wc -l)"
6768
if [ "$num_jars" -eq "0" ]; then
6869
echo "Failed to find Spark assembly in $assembly_folder"
6970
echo "You need to build Spark before running this program."
@@ -77,7 +78,7 @@ if [ "$num_jars" -gt "1" ]; then
7778
exit 1
7879
fi
7980

80-
ASSEMBLY_JAR=$(ls "$assembly_folder"/spark-assembly*hadoop*.jar 2>/dev/null)
81+
ASSEMBLY_JAR="$(ls "$assembly_folder"/spark-assembly*hadoop*.jar 2>/dev/null)"
8182

8283
# Verify that versions of java used to build the jars and run Spark are compatible
8384
jar_error_check=$("$JAR_CMD" -tf "$ASSEMBLY_JAR" nonexistent/class/path 2>&1)
@@ -103,8 +104,8 @@ else
103104
datanucleus_dir="$FWDIR"/lib_managed/jars
104105
fi
105106

106-
datanucleus_jars=$(find "$datanucleus_dir" 2>/dev/null | grep "datanucleus-.*\\.jar")
107-
datanucleus_jars=$(echo "$datanucleus_jars" | tr "\n" : | sed s/:$//g)
107+
datanucleus_jars="$(find "$datanucleus_dir" 2>/dev/null | grep "datanucleus-.*\\.jar")"
108+
datanucleus_jars="$(echo "$datanucleus_jars" | tr "\n" : | sed s/:$//g)"
108109

109110
if [ -n "$datanucleus_jars" ]; then
110111
hive_files=$("$JAR_CMD" -tf "$ASSEMBLY_JAR" org/apache/hadoop/hive/ql/exec 2>/dev/null)

bin/load-spark-env.sh

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -25,9 +25,9 @@ if [ -z "$SPARK_ENV_LOADED" ]; then
2525
export SPARK_ENV_LOADED=1
2626

2727
# Returns the parent of the directory this script lives in.
28-
parent_dir="$(cd `dirname $0`/..; pwd)"
28+
parent_dir="$(cd "`dirname "$0"`"/..; pwd)"
2929

30-
user_conf_dir=${SPARK_CONF_DIR:-"$parent_dir/conf"}
30+
user_conf_dir="${SPARK_CONF_DIR:-"$parent_dir"/conf}"
3131

3232
if [ -f "${user_conf_dir}/spark-env.sh" ]; then
3333
# Promote all variable declarations to environment (exported) variables

bin/pyspark

Lines changed: 12 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -18,18 +18,18 @@
1818
#
1919

2020
# Figure out where Spark is installed
21-
FWDIR="$(cd `dirname $0`/..; pwd)"
21+
FWDIR="$(cd "`dirname "$0"`"/..; pwd)"
2222

2323
# Export this as SPARK_HOME
2424
export SPARK_HOME="$FWDIR"
2525

26-
source $FWDIR/bin/utils.sh
26+
source "$FWDIR/bin/utils.sh"
2727

2828
SCALA_VERSION=2.10
2929

3030
function usage() {
3131
echo "Usage: ./bin/pyspark [options]" 1>&2
32-
$FWDIR/bin/spark-submit --help 2>&1 | grep -v Usage 1>&2
32+
"$FWDIR"/bin/spark-submit --help 2>&1 | grep -v Usage 1>&2
3333
exit 0
3434
}
3535

@@ -48,7 +48,7 @@ if [ ! -f "$FWDIR/RELEASE" ]; then
4848
fi
4949
fi
5050

51-
. $FWDIR/bin/load-spark-env.sh
51+
. "$FWDIR"/bin/load-spark-env.sh
5252

5353
# Figure out which Python executable to use
5454
if [[ -z "$PYSPARK_PYTHON" ]]; then
@@ -57,12 +57,12 @@ fi
5757
export PYSPARK_PYTHON
5858

5959
# Add the PySpark classes to the Python path:
60-
export PYTHONPATH=$SPARK_HOME/python/:$PYTHONPATH
61-
export PYTHONPATH=$SPARK_HOME/python/lib/py4j-0.8.2.1-src.zip:$PYTHONPATH
60+
export PYTHONPATH="$SPARK_HOME/python/:$PYTHONPATH"
61+
export PYTHONPATH="$SPARK_HOME/python/lib/py4j-0.8.2.1-src.zip:$PYTHONPATH"
6262

6363
# Load the PySpark shell.py script when ./pyspark is used interactively:
64-
export OLD_PYTHONSTARTUP=$PYTHONSTARTUP
65-
export PYTHONSTARTUP=$FWDIR/python/pyspark/shell.py
64+
export OLD_PYTHONSTARTUP="$PYTHONSTARTUP"
65+
export PYTHONSTARTUP="$FWDIR/python/pyspark/shell.py"
6666

6767
# If IPython options are specified, assume user wants to run IPython
6868
if [[ -n "$IPYTHON_OPTS" ]]; then
@@ -85,6 +85,8 @@ export PYSPARK_SUBMIT_ARGS
8585

8686
# For pyspark tests
8787
if [[ -n "$SPARK_TESTING" ]]; then
88+
unset YARN_CONF_DIR
89+
unset HADOOP_CONF_DIR
8890
if [[ -n "$PYSPARK_DOC_TEST" ]]; then
8991
exec "$PYSPARK_PYTHON" -m doctest $1
9092
else
@@ -97,10 +99,10 @@ fi
9799
if [[ "$1" =~ \.py$ ]]; then
98100
echo -e "\nWARNING: Running python applications through ./bin/pyspark is deprecated as of Spark 1.0." 1>&2
99101
echo -e "Use ./bin/spark-submit <python file>\n" 1>&2
100-
primary=$1
102+
primary="$1"
101103
shift
102104
gatherSparkSubmitOpts "$@"
103-
exec $FWDIR/bin/spark-submit "${SUBMISSION_OPTS[@]}" $primary "${APPLICATION_OPTS[@]}"
105+
exec "$FWDIR"/bin/spark-submit "${SUBMISSION_OPTS[@]}" "$primary" "${APPLICATION_OPTS[@]}"
104106
else
105107
# PySpark shell requires special handling downstream
106108
export PYSPARK_SHELL=1

bin/run-example

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -19,7 +19,7 @@
1919

2020
SCALA_VERSION=2.10
2121

22-
FWDIR="$(cd `dirname $0`/..; pwd)"
22+
FWDIR="$(cd "`dirname "$0"`"/..; pwd)"
2323
export SPARK_HOME="$FWDIR"
2424
EXAMPLES_DIR="$FWDIR"/examples
2525

@@ -35,12 +35,12 @@ else
3535
fi
3636

3737
if [ -f "$FWDIR/RELEASE" ]; then
38-
export SPARK_EXAMPLES_JAR=`ls "$FWDIR"/lib/spark-examples-*hadoop*.jar`
38+
export SPARK_EXAMPLES_JAR="`ls "$FWDIR"/lib/spark-examples-*hadoop*.jar`"
3939
elif [ -e "$EXAMPLES_DIR"/target/scala-$SCALA_VERSION/spark-examples-*hadoop*.jar ]; then
40-
export SPARK_EXAMPLES_JAR=`ls "$EXAMPLES_DIR"/target/scala-$SCALA_VERSION/spark-examples-*hadoop*.jar`
40+
export SPARK_EXAMPLES_JAR="`ls "$EXAMPLES_DIR"/target/scala-$SCALA_VERSION/spark-examples-*hadoop*.jar`"
4141
fi
4242

43-
if [[ -z $SPARK_EXAMPLES_JAR ]]; then
43+
if [[ -z "$SPARK_EXAMPLES_JAR" ]]; then
4444
echo "Failed to find Spark examples assembly in $FWDIR/lib or $FWDIR/examples/target" 1>&2
4545
echo "You need to build Spark before running this program" 1>&2
4646
exit 1

bin/spark-class

Lines changed: 10 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -27,12 +27,12 @@ esac
2727
SCALA_VERSION=2.10
2828

2929
# Figure out where Spark is installed
30-
FWDIR="$(cd `dirname $0`/..; pwd)"
30+
FWDIR="$(cd "`dirname "$0"`"/..; pwd)"
3131

3232
# Export this as SPARK_HOME
3333
export SPARK_HOME="$FWDIR"
3434

35-
. $FWDIR/bin/load-spark-env.sh
35+
. "$FWDIR"/bin/load-spark-env.sh
3636

3737
if [ -z "$1" ]; then
3838
echo "Usage: spark-class <class> [<args>]" 1>&2
@@ -105,7 +105,7 @@ else
105105
exit 1
106106
fi
107107
fi
108-
JAVA_VERSION=$($RUNNER -version 2>&1 | sed 's/java version "\(.*\)\.\(.*\)\..*"/\1\2/; 1q')
108+
JAVA_VERSION=$("$RUNNER" -version 2>&1 | sed 's/java version "\(.*\)\.\(.*\)\..*"/\1\2/; 1q')
109109

110110
# Set JAVA_OPTS to be able to load native libraries and to set heap size
111111
if [ "$JAVA_VERSION" -ge 18 ]; then
@@ -117,7 +117,7 @@ JAVA_OPTS="$JAVA_OPTS -Xms$OUR_JAVA_MEM -Xmx$OUR_JAVA_MEM"
117117

118118
# Load extra JAVA_OPTS from conf/java-opts, if it exists
119119
if [ -e "$FWDIR/conf/java-opts" ] ; then
120-
JAVA_OPTS="$JAVA_OPTS `cat $FWDIR/conf/java-opts`"
120+
JAVA_OPTS="$JAVA_OPTS `cat "$FWDIR"/conf/java-opts`"
121121
fi
122122

123123
# Attention: when changing the way the JAVA_OPTS are assembled, the change must be reflected in CommandUtils.scala!
@@ -126,21 +126,21 @@ TOOLS_DIR="$FWDIR"/tools
126126
SPARK_TOOLS_JAR=""
127127
if [ -e "$TOOLS_DIR"/target/scala-$SCALA_VERSION/spark-tools*[0-9Tg].jar ]; then
128128
# Use the JAR from the SBT build
129-
export SPARK_TOOLS_JAR=`ls "$TOOLS_DIR"/target/scala-$SCALA_VERSION/spark-tools*[0-9Tg].jar`
129+
export SPARK_TOOLS_JAR="`ls "$TOOLS_DIR"/target/scala-$SCALA_VERSION/spark-tools*[0-9Tg].jar`"
130130
fi
131131
if [ -e "$TOOLS_DIR"/target/spark-tools*[0-9Tg].jar ]; then
132132
# Use the JAR from the Maven build
133133
# TODO: this also needs to become an assembly!
134-
export SPARK_TOOLS_JAR=`ls "$TOOLS_DIR"/target/spark-tools*[0-9Tg].jar`
134+
export SPARK_TOOLS_JAR="`ls "$TOOLS_DIR"/target/spark-tools*[0-9Tg].jar`"
135135
fi
136136

137137
# Compute classpath using external script
138-
classpath_output=$($FWDIR/bin/compute-classpath.sh)
138+
classpath_output=$("$FWDIR"/bin/compute-classpath.sh)
139139
if [[ "$?" != "0" ]]; then
140140
echo "$classpath_output"
141141
exit 1
142142
else
143-
CLASSPATH=$classpath_output
143+
CLASSPATH="$classpath_output"
144144
fi
145145

146146
if [[ "$1" =~ org.apache.spark.tools.* ]]; then
@@ -153,9 +153,9 @@ if [[ "$1" =~ org.apache.spark.tools.* ]]; then
153153
fi
154154

155155
if $cygwin; then
156-
CLASSPATH=`cygpath -wp $CLASSPATH`
156+
CLASSPATH="`cygpath -wp "$CLASSPATH"`"
157157
if [ "$1" == "org.apache.spark.tools.JavaAPICompletenessChecker" ]; then
158-
export SPARK_TOOLS_JAR=`cygpath -w $SPARK_TOOLS_JAR`
158+
export SPARK_TOOLS_JAR="`cygpath -w "$SPARK_TOOLS_JAR"`"
159159
fi
160160
fi
161161
export CLASSPATH

0 commit comments

Comments
 (0)