Skip to content

Commit 5927b6b

Browse files
author
Marcelo Vanzin
committed
Merge branch 'master' into SPARK-2669
2 parents cbb9fb3 + 0cce545 commit 5927b6b

File tree

580 files changed

+11931
-4971
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

580 files changed

+11931
-4971
lines changed

assembly/pom.xml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -21,7 +21,7 @@
2121
<parent>
2222
<groupId>org.apache.spark</groupId>
2323
<artifactId>spark-parent_2.10</artifactId>
24-
<version>1.3.0-SNAPSHOT</version>
24+
<version>1.4.0-SNAPSHOT</version>
2525
<relativePath>../pom.xml</relativePath>
2626
</parent>
2727

bagel/pom.xml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -21,7 +21,7 @@
2121
<parent>
2222
<groupId>org.apache.spark</groupId>
2323
<artifactId>spark-parent_2.10</artifactId>
24-
<version>1.3.0-SNAPSHOT</version>
24+
<version>1.4.0-SNAPSHOT</version>
2525
<relativePath>../pom.xml</relativePath>
2626
</parent>
2727

bin/spark-class

Lines changed: 36 additions & 25 deletions
Original file line numberDiff line numberDiff line change
@@ -40,35 +40,46 @@ else
4040
fi
4141
fi
4242

43-
# Look for the launcher. In non-release mode, add the compiled classes directly to the classpath
44-
# instead of looking for a jar file.
45-
SPARK_LAUNCHER_CP=
46-
if [ -f $SPARK_HOME/RELEASE ]; then
47-
LAUNCHER_DIR="$SPARK_HOME/lib"
48-
num_jars="$(ls -1 "$LAUNCHER_DIR" | grep "^spark-launcher.*\.jar$" | wc -l)"
49-
if [ "$num_jars" -eq "0" -a -z "$SPARK_LAUNCHER_CP" ]; then
50-
echo "Failed to find Spark launcher in $LAUNCHER_DIR." 1>&2
51-
echo "You need to build Spark before running this program." 1>&2
52-
exit 1
53-
fi
43+
# Find assembly jar
44+
SPARK_ASSEMBLY_JAR=
45+
if [ -f "$SPARK_HOME/RELEASE" ]; then
46+
ASSEMBLY_DIR="$SPARK_HOME/lib"
47+
else
48+
ASSEMBLY_DIR="$SPARK_HOME/assembly/target/scala-$SPARK_SCALA_VERSION"
49+
fi
5450

55-
LAUNCHER_JARS="$(ls -1 "$LAUNCHER_DIR" | grep "^spark-launcher.*\.jar$" || true)"
56-
if [ "$num_jars" -gt "1" ]; then
57-
echo "Found multiple Spark launcher jars in $LAUNCHER_DIR:" 1>&2
58-
echo "$LAUNCHER_JARS" 1>&2
59-
echo "Please remove all but one jar." 1>&2
60-
exit 1
61-
fi
51+
num_jars="$(ls -1 "$ASSEMBLY_DIR" | grep "^spark-assembly.*hadoop.*\.jar$" | wc -l)"
52+
if [ "$num_jars" -eq "0" -a -z "$SPARK_ASSEMBLY_JAR" ]; then
53+
echo "Failed to find Spark assembly in $ASSEMBLY_DIR." 1>&2
54+
echo "You need to build Spark before running this program." 1>&2
55+
exit 1
56+
fi
57+
ASSEMBLY_JARS="$(ls -1 "$ASSEMBLY_DIR" | grep "^spark-assembly.*hadoop.*\.jar$" || true)"
58+
if [ "$num_jars" -gt "1" ]; then
59+
echo "Found multiple Spark assembly jars in $ASSEMBLY_DIR:" 1>&2
60+
echo "$ASSEMBLY_JARS" 1>&2
61+
echo "Please remove all but one jar." 1>&2
62+
exit 1
63+
fi
6264

63-
SPARK_LAUNCHER_CP="${LAUNCHER_DIR}/${LAUNCHER_JARS}"
65+
SPARK_ASSEMBLY_JAR="${ASSEMBLY_DIR}/${ASSEMBLY_JARS}"
66+
67+
# Verify that versions of java used to build the jars and run Spark are compatible
68+
if [ -n "$JAVA_HOME" ]; then
69+
JAR_CMD="$JAVA_HOME/bin/jar"
6470
else
65-
LAUNCHER_DIR="$SPARK_HOME/launcher/target/scala-$SPARK_SCALA_VERSION"
66-
if [ ! -d "$LAUNCHER_DIR/classes" ]; then
67-
echo "Failed to find Spark launcher classes in $LAUNCHER_DIR." 1>&2
68-
echo "You need to build Spark before running this program." 1>&2
71+
JAR_CMD="jar"
72+
fi
73+
74+
if [ $(command -v "$JAR_CMD") ] ; then
75+
jar_error_check=$("$JAR_CMD" -tf "$SPARK_ASSEMBLY_JAR" nonexistent/class/path 2>&1)
76+
if [[ "$jar_error_check" =~ "invalid CEN header" ]]; then
77+
echo "Loading Spark jar with '$JAR_CMD' failed. " 1>&2
78+
echo "This is likely because Spark was compiled with Java 7 and run " 1>&2
79+
echo "with Java 6. (see SPARK-1703). Please use Java 7 to run Spark " 1>&2
80+
echo "or build Spark with Java 6." 1>&2
6981
exit 1
7082
fi
71-
SPARK_LAUNCHER_CP="$LAUNCHER_DIR/classes"
7283
fi
7384

7485
# The launcher library will print arguments separated by a NULL character, to allow arguments with
@@ -77,7 +88,7 @@ fi
7788
CMD=()
7889
while IFS= read -d '' -r ARG; do
7990
CMD+=("$ARG")
80-
done < <("$RUNNER" -cp "$SPARK_LAUNCHER_CP" org.apache.spark.launcher.Main "$@")
91+
done < <("$RUNNER" -cp "$SPARK_ASSEMBLY_JAR" org.apache.spark.launcher.Main "$@")
8192

8293
if [ "${CMD[0]}" = "usage" ]; then
8394
"${CMD[@]}"

bin/spark-class2.cmd

Lines changed: 11 additions & 22 deletions
Original file line numberDiff line numberDiff line change
@@ -29,31 +29,20 @@ if "x%1"=="x" (
2929
exit /b 1
3030
)
3131

32-
set LAUNCHER_CP=0
33-
if exist %SPARK_HOME%\RELEASE goto find_release_launcher
32+
rem Find assembly jar
33+
set SPARK_ASSEMBLY_JAR=0
3434

35-
rem Look for the Spark launcher in both Scala build directories. The launcher doesn't use Scala so
36-
rem it doesn't really matter which one is picked up. Add the compiled classes directly to the
37-
rem classpath instead of looking for a jar file, since it's very common for people using sbt to use
38-
rem the "assembly" target instead of "package".
39-
set LAUNCHER_CLASSES=%SPARK_HOME%\launcher\target\scala-2.10\classes
40-
if exist %LAUNCHER_CLASSES% (
41-
set LAUNCHER_CP=%LAUNCHER_CLASSES%
35+
if exist "%SPARK_HOME%\RELEASE" (
36+
set ASSEMBLY_DIR=%SPARK_HOME%\lib
37+
) else (
38+
set ASSEMBLY_DIR=%SPARK_HOME%\assembly\target\scala-%SPARK_SCALA_VERSION%
4239
)
43-
set LAUNCHER_CLASSES=%SPARK_HOME%\launcher\target\scala-2.11\classes
44-
if exist %LAUNCHER_CLASSES% (
45-
set LAUNCHER_CP=%LAUNCHER_CLASSES%
46-
)
47-
goto check_launcher
4840

49-
:find_release_launcher
50-
for %%d in (%SPARK_HOME%\lib\spark-launcher*.jar) do (
51-
set LAUNCHER_CP=%%d
41+
for %%d in (%ASSEMBLY_DIR%\spark-assembly*hadoop*.jar) do (
42+
set SPARK_ASSEMBLY_JAR=%%d
5243
)
53-
54-
:check_launcher
55-
if "%LAUNCHER_CP%"=="0" (
56-
echo Failed to find Spark launcher JAR.
44+
if "%SPARK_ASSEMBLY_JAR%"=="0" (
45+
echo Failed to find Spark assembly JAR.
5746
echo You need to build Spark before running this program.
5847
exit /b 1
5948
)
@@ -64,7 +53,7 @@ if not "x%JAVA_HOME%"=="x" set RUNNER=%JAVA_HOME%\bin\java
6453

6554
rem The launcher library prints the command to be executed in a single line suitable for being
6655
rem executed by the batch interpreter. So read all the output of the launcher into a variable.
67-
for /f "tokens=*" %%i in ('cmd /C ""%RUNNER%" -cp %LAUNCHER_CP% org.apache.spark.launcher.Main %*"') do (
56+
for /f "tokens=*" %%i in ('cmd /C ""%RUNNER%" -cp %SPARK_ASSEMBLY_JAR% org.apache.spark.launcher.Main %*"') do (
6857
set SPARK_CMD=%%i
6958
)
7059
%SPARK_CMD%

core/pom.xml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -21,7 +21,7 @@
2121
<parent>
2222
<groupId>org.apache.spark</groupId>
2323
<artifactId>spark-parent_2.10</artifactId>
24-
<version>1.3.0-SNAPSHOT</version>
24+
<version>1.4.0-SNAPSHOT</version>
2525
<relativePath>../pom.xml</relativePath>
2626
</parent>
2727

core/src/main/resources/org/apache/spark/ui/static/additional-metrics.js

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -30,7 +30,7 @@ $(function() {
3030

3131
stripeSummaryTable();
3232

33-
$("input:checkbox").click(function() {
33+
$('input[type="checkbox"]').click(function() {
3434
var column = "table ." + $(this).attr("name");
3535
$(column).toggle();
3636
stripeSummaryTable();
@@ -39,15 +39,15 @@ $(function() {
3939
$("#select-all-metrics").click(function() {
4040
if (this.checked) {
4141
// Toggle all un-checked options.
42-
$('input:checkbox:not(:checked)').trigger('click');
42+
$('input[type="checkbox"]:not(:checked)').trigger('click');
4343
} else {
4444
// Toggle all checked options.
45-
$('input:checkbox:checked').trigger('click');
45+
$('input[type="checkbox"]:checked').trigger('click');
4646
}
4747
});
4848

4949
// Trigger a click on the checkbox if a user clicks the label next to it.
5050
$("span.additional-metric-title").click(function() {
51-
$(this).parent().find('input:checkbox').trigger('click');
51+
$(this).parent().find('input[type="checkbox"]').trigger('click');
5252
});
5353
});

core/src/main/scala/org/apache/spark/Accumulators.scala

Lines changed: 11 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -18,8 +18,6 @@
1818
package org.apache.spark
1919

2020
import java.io.{ObjectInputStream, Serializable}
21-
import java.util.concurrent.atomic.AtomicLong
22-
import java.lang.ThreadLocal
2321

2422
import scala.collection.generic.Growable
2523
import scala.collection.mutable.Map
@@ -109,7 +107,7 @@ class Accumulable[R, T] (
109107
* The typical use of this method is to directly mutate the local value, eg., to add
110108
* an element to a Set.
111109
*/
112-
def localValue = value_
110+
def localValue: R = value_
113111

114112
/**
115113
* Set the accumulator's value; only allowed on master.
@@ -137,7 +135,7 @@ class Accumulable[R, T] (
137135
Accumulators.register(this, false)
138136
}
139137

140-
override def toString = if (value_ == null) "null" else value_.toString
138+
override def toString: String = if (value_ == null) "null" else value_.toString
141139
}
142140

143141
/**
@@ -257,22 +255,22 @@ object AccumulatorParam {
257255

258256
implicit object DoubleAccumulatorParam extends AccumulatorParam[Double] {
259257
def addInPlace(t1: Double, t2: Double): Double = t1 + t2
260-
def zero(initialValue: Double) = 0.0
258+
def zero(initialValue: Double): Double = 0.0
261259
}
262260

263261
implicit object IntAccumulatorParam extends AccumulatorParam[Int] {
264262
def addInPlace(t1: Int, t2: Int): Int = t1 + t2
265-
def zero(initialValue: Int) = 0
263+
def zero(initialValue: Int): Int = 0
266264
}
267265

268266
implicit object LongAccumulatorParam extends AccumulatorParam[Long] {
269-
def addInPlace(t1: Long, t2: Long) = t1 + t2
270-
def zero(initialValue: Long) = 0L
267+
def addInPlace(t1: Long, t2: Long): Long = t1 + t2
268+
def zero(initialValue: Long): Long = 0L
271269
}
272270

273271
implicit object FloatAccumulatorParam extends AccumulatorParam[Float] {
274-
def addInPlace(t1: Float, t2: Float) = t1 + t2
275-
def zero(initialValue: Float) = 0f
272+
def addInPlace(t1: Float, t2: Float): Float = t1 + t2
273+
def zero(initialValue: Float): Float = 0f
276274
}
277275

278276
// TODO: Add AccumulatorParams for other types, e.g. lists and strings
@@ -351,6 +349,7 @@ private[spark] object Accumulators extends Logging {
351349
}
352350
}
353351

354-
def stringifyPartialValue(partialValue: Any) = "%s".format(partialValue)
355-
def stringifyValue(value: Any) = "%s".format(value)
352+
def stringifyPartialValue(partialValue: Any): String = "%s".format(partialValue)
353+
354+
def stringifyValue(value: Any): String = "%s".format(value)
356355
}

core/src/main/scala/org/apache/spark/ContextCleaner.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -145,7 +145,7 @@ private[spark] class ContextCleaner(sc: SparkContext) extends Logging {
145145
}
146146

147147
/** Keep cleaning RDD, shuffle, and broadcast state. */
148-
private def keepCleaning(): Unit = Utils.logUncaughtExceptions {
148+
private def keepCleaning(): Unit = Utils.tryOrStopSparkContext(sc) {
149149
while (!stopped) {
150150
try {
151151
val reference = Option(referenceQueue.remove(ContextCleaner.REF_QUEUE_POLL_TIMEOUT))

core/src/main/scala/org/apache/spark/Dependency.scala

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -74,7 +74,7 @@ class ShuffleDependency[K, V, C](
7474
val mapSideCombine: Boolean = false)
7575
extends Dependency[Product2[K, V]] {
7676

77-
override def rdd = _rdd.asInstanceOf[RDD[Product2[K, V]]]
77+
override def rdd: RDD[Product2[K, V]] = _rdd.asInstanceOf[RDD[Product2[K, V]]]
7878

7979
val shuffleId: Int = _rdd.context.newShuffleId()
8080

@@ -91,7 +91,7 @@ class ShuffleDependency[K, V, C](
9191
*/
9292
@DeveloperApi
9393
class OneToOneDependency[T](rdd: RDD[T]) extends NarrowDependency[T](rdd) {
94-
override def getParents(partitionId: Int) = List(partitionId)
94+
override def getParents(partitionId: Int): List[Int] = List(partitionId)
9595
}
9696

9797

@@ -107,7 +107,7 @@ class OneToOneDependency[T](rdd: RDD[T]) extends NarrowDependency[T](rdd) {
107107
class RangeDependency[T](rdd: RDD[T], inStart: Int, outStart: Int, length: Int)
108108
extends NarrowDependency[T](rdd) {
109109

110-
override def getParents(partitionId: Int) = {
110+
override def getParents(partitionId: Int): List[Int] = {
111111
if (partitionId >= outStart && partitionId < outStart + length) {
112112
List(partitionId - outStart + inStart)
113113
} else {

core/src/main/scala/org/apache/spark/FutureAction.scala

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -168,7 +168,7 @@ class SimpleFutureAction[T] private[spark](jobWaiter: JobWaiter[_], resultFunc:
168168
}
169169
}
170170

171-
def jobIds = Seq(jobWaiter.jobId)
171+
def jobIds: Seq[Int] = Seq(jobWaiter.jobId)
172172
}
173173

174174

@@ -276,7 +276,7 @@ class ComplexFutureAction[T] extends FutureAction[T] {
276276

277277
override def value: Option[Try[T]] = p.future.value
278278

279-
def jobIds = jobs
279+
def jobIds: Seq[Int] = jobs
280280

281281
}
282282

0 commit comments

Comments
 (0)