Skip to content

Commit bef3afb

Browse files
committed
Merge remote-tracking branch 'upstream/master' into pyspark-inputformats
2 parents e001b94 + 39f85e0 commit bef3afb

File tree

202 files changed

+4389
-2124
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

202 files changed

+4389
-2124
lines changed

conf/spark-env.sh.template

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -5,6 +5,7 @@
55

66
# Options read when launching programs locally with
77
# ./bin/run-example or ./bin/spark-submit
8+
# - HADOOP_CONF_DIR, to point Spark towards Hadoop configuration files
89
# - SPARK_LOCAL_IP, to set the IP address Spark binds to on this node
910
# - SPARK_PUBLIC_DNS, to set the public dns name of the driver program
1011
# - SPARK_CLASSPATH, default classpath entries to append
@@ -17,6 +18,7 @@
1718
# - MESOS_NATIVE_LIBRARY, to point to your libmesos.so if you use Mesos
1819

1920
# Options read in YARN client mode
21+
# - HADOOP_CONF_DIR, to point Spark towards Hadoop configuration files
2022
# - SPARK_EXECUTOR_INSTANCES, Number of workers to start (Default: 2)
2123
# - SPARK_EXECUTOR_CORES, Number of cores for the workers (Default: 1).
2224
# - SPARK_EXECUTOR_MEMORY, Memory per Worker (e.g. 1000M, 2G) (Default: 1G)

core/pom.xml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -269,7 +269,7 @@
269269
<dependency>
270270
<groupId>org.spark-project</groupId>
271271
<artifactId>pyrolite</artifactId>
272-
<version>2.0</version>
272+
<version>2.0.1</version>
273273
</dependency>
274274
</dependencies>
275275
<build>
Lines changed: 23 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,23 @@
1+
/*
2+
* Licensed to the Apache Software Foundation (ASF) under one or more
3+
* contributor license agreements. See the NOTICE file distributed with
4+
* this work for additional information regarding copyright ownership.
5+
* The ASF licenses this file to You under the Apache License, Version 2.0
6+
* (the "License"); you may not use this file except in compliance with
7+
* the License. You may obtain a copy of the License at
8+
*
9+
* http://www.apache.org/licenses/LICENSE-2.0
10+
*
11+
* Unless required by applicable law or agreed to in writing, software
12+
* distributed under the License is distributed on an "AS IS" BASIS,
13+
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14+
* See the License for the specific language governing permissions and
15+
* limitations under the License.
16+
*/
17+
18+
/**
19+
* Core Spark classes in Scala. A few classes here, such as {@link org.apache.spark.Accumulator}
20+
* and {@link org.apache.spark.storage.StorageLevel}, are also used in Java, but the
21+
* {@link org.apache.spark.api.java} package contains the main Java API.
22+
*/
23+
package org.apache.spark;

core/src/main/scala/org/apache/spark/SparkContext.scala

Lines changed: 6 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -1110,6 +1110,7 @@ class SparkContext(config: SparkConf) extends Logging {
11101110
}
11111111

11121112
/**
1113+
* :: Experimental ::
11131114
* Submit a job for execution and return a FutureJob holding the result.
11141115
*/
11151116
@Experimental
@@ -1345,19 +1346,19 @@ object SparkContext extends Logging {
13451346
* Find the JAR from which a given class was loaded, to make it easy for users to pass
13461347
* their JARs to SparkContext.
13471348
*/
1348-
def jarOfClass(cls: Class[_]): Seq[String] = {
1349+
def jarOfClass(cls: Class[_]): Option[String] = {
13491350
val uri = cls.getResource("/" + cls.getName.replace('.', '/') + ".class")
13501351
if (uri != null) {
13511352
val uriStr = uri.toString
13521353
if (uriStr.startsWith("jar:file:")) {
13531354
// URI will be of the form "jar:file:/path/foo.jar!/package/cls.class",
13541355
// so pull out the /path/foo.jar
1355-
List(uriStr.substring("jar:file:".length, uriStr.indexOf('!')))
1356+
Some(uriStr.substring("jar:file:".length, uriStr.indexOf('!')))
13561357
} else {
1357-
Nil
1358+
None
13581359
}
13591360
} else {
1360-
Nil
1361+
None
13611362
}
13621363
}
13631364

@@ -1366,7 +1367,7 @@ object SparkContext extends Logging {
13661367
* to pass their JARs to SparkContext. In most cases you can call jarOfObject(this) in
13671368
* your driver program.
13681369
*/
1369-
def jarOfObject(obj: AnyRef): Seq[String] = jarOfClass(obj.getClass)
1370+
def jarOfObject(obj: AnyRef): Option[String] = jarOfClass(obj.getClass)
13701371

13711372
/**
13721373
* Creates a modified version of a SparkConf with the parameters that can be passed separately

core/src/main/scala/org/apache/spark/TaskContext.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -33,7 +33,7 @@ class TaskContext(
3333
val attemptId: Long,
3434
val runningLocally: Boolean = false,
3535
@volatile var interrupted: Boolean = false,
36-
private[spark] val taskMetrics: TaskMetrics = TaskMetrics.empty()
36+
private[spark] val taskMetrics: TaskMetrics = TaskMetrics.empty
3737
) extends Serializable {
3838

3939
@deprecated("use partitionId", "0.8.1")

core/src/main/scala/org/apache/spark/annotation/AlphaComponent.java

Lines changed: 8 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -19,7 +19,14 @@
1919

2020
import java.lang.annotation.*;
2121

22-
/** A new component of Spark which may have unstable API's. */
22+
/**
23+
* A new component of Spark which may have unstable API's.
24+
*
25+
* NOTE: If there exists a Scaladoc comment that immediately precedes this annotation, the first
26+
* line of the comment must be ":: AlphaComponent ::" with no trailing blank line. This is because
27+
* of the known issue that Scaladoc displays only either the annotation or the comment, whichever
28+
* comes first.
29+
*/
2330
@Retention(RetentionPolicy.RUNTIME)
2431
@Target({ElementType.TYPE, ElementType.FIELD, ElementType.METHOD, ElementType.PARAMETER,
2532
ElementType.CONSTRUCTOR, ElementType.LOCAL_VARIABLE, ElementType.PACKAGE})

core/src/main/scala/org/apache/spark/annotation/DeveloperApi.java

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -23,6 +23,11 @@
2323
* A lower-level, unstable API intended for developers.
2424
*
2525
* Developer API's might change or be removed in minor versions of Spark.
26+
*
27+
* NOTE: If there exists a Scaladoc comment that immediately precedes this annotation, the first
28+
* line of the comment must be ":: DeveloperApi ::" with no trailing blank line. This is because
29+
* of the known issue that Scaladoc displays only either the annotation or the comment, whichever
30+
* comes first.
2631
*/
2732
@Retention(RetentionPolicy.RUNTIME)
2833
@Target({ElementType.TYPE, ElementType.FIELD, ElementType.METHOD, ElementType.PARAMETER,

core/src/main/scala/org/apache/spark/annotation/Experimental.java

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -24,6 +24,11 @@
2424
*
2525
* Experimental API's might change or be removed in minor versions of Spark, or be adopted as
2626
* first-class Spark API's.
27+
*
28+
* NOTE: If there exists a Scaladoc comment that immediately precedes this annotation, the first
29+
* line of the comment must be ":: Experimental ::" with no trailing blank line. This is because
30+
* of the known issue that Scaladoc displays only either the annotation or the comment, whichever
31+
* comes first.
2732
*/
2833
@Retention(RetentionPolicy.RUNTIME)
2934
@Target({ElementType.TYPE, ElementType.FIELD, ElementType.METHOD, ElementType.PARAMETER,

core/src/main/scala/org/apache/spark/deploy/SparkSubmit.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -196,7 +196,7 @@ object SparkSubmit {
196196
childArgs ++= appArgs.childArgs
197197
} else if (clusterManager == YARN) {
198198
for (arg <- appArgs.childArgs) {
199-
childArgs += ("--args", arg)
199+
childArgs += ("--arg", arg)
200200
}
201201
}
202202
}

core/src/main/scala/org/apache/spark/deploy/SparkSubmitArguments.scala

Lines changed: 9 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -116,6 +116,15 @@ private[spark] class SparkSubmitArguments(args: Array[String]) {
116116
if (args.length == 0) printUsageAndExit(-1)
117117
if (primaryResource == null) SparkSubmit.printErrorAndExit("Must specify a primary resource")
118118
if (mainClass == null) SparkSubmit.printErrorAndExit("Must specify a main class with --class")
119+
120+
if (master.startsWith("yarn")) {
121+
val hasHadoopEnv = sys.env.contains("HADOOP_CONF_DIR") || sys.env.contains("YARN_CONF_DIR")
122+
val testing = sys.env.contains("SPARK_TESTING")
123+
if (!hasHadoopEnv && !testing) {
124+
throw new Exception(s"When running with master '$master' " +
125+
"either HADOOP_CONF_DIR or YARN_CONF_DIR must be set in the environment.")
126+
}
127+
}
119128
}
120129

121130
override def toString = {

0 commit comments

Comments
 (0)