Skip to content

Commit 4d9d8f7

Browse files
committed
Merge branch 'master' of github.com:apache/spark into yarn-docs
2 parents 041017a + 3e13b8c commit 4d9d8f7

File tree

56 files changed

+374
-161
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

56 files changed

+374
-161
lines changed

bagel/src/test/resources/log4j.properties

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -19,7 +19,7 @@
1919
log4j.rootCategory=INFO, file
2020
log4j.appender.file=org.apache.log4j.FileAppender
2121
log4j.appender.file.append=false
22-
log4j.appender.file.file=bagel/target/unit-tests.log
22+
log4j.appender.file.file=target/unit-tests.log
2323
log4j.appender.file.layout=org.apache.log4j.PatternLayout
2424
log4j.appender.file.layout.ConversionPattern=%d{yy/MM/dd HH:mm:ss.SSS} %p %c{1}: %m%n
2525

bin/spark-shell.cmd

Lines changed: 2 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -17,7 +17,6 @@ rem See the License for the specific language governing permissions and
1717
rem limitations under the License.
1818
rem
1919

20-
rem Find the path of sbin
21-
set BIN=%~dp0..\bin\
20+
set SPARK_HOME=%~dp0..
2221

23-
cmd /V /E /C %BIN%spark-class2.cmd org.apache.spark.repl.Main %*
22+
cmd /V /E /C %SPARK_HOME%\bin\spark-submit.cmd spark-internal %* --class org.apache.spark.repl.Main

bin/spark-submit.cmd

Lines changed: 56 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,56 @@
1+
@echo off
2+
3+
rem
4+
rem Licensed to the Apache Software Foundation (ASF) under one or more
5+
rem contributor license agreements. See the NOTICE file distributed with
6+
rem this work for additional information regarding copyright ownership.
7+
rem The ASF licenses this file to You under the Apache License, Version 2.0
8+
rem (the "License"); you may not use this file except in compliance with
9+
rem the License. You may obtain a copy of the License at
10+
rem
11+
rem http://www.apache.org/licenses/LICENSE-2.0
12+
rem
13+
rem Unless required by applicable law or agreed to in writing, software
14+
rem distributed under the License is distributed on an "AS IS" BASIS,
15+
rem WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
16+
rem See the License for the specific language governing permissions and
17+
rem limitations under the License.
18+
rem
19+
20+
set SPARK_HOME=%~dp0..
21+
set ORIG_ARGS=%*
22+
23+
rem Clear the values of all variables used
24+
set DEPLOY_MODE=
25+
set DRIVER_MEMORY=
26+
set SPARK_SUBMIT_LIBRARY_PATH=
27+
set SPARK_SUBMIT_CLASSPATH=
28+
set SPARK_SUBMIT_OPTS=
29+
set SPARK_DRIVER_MEMORY=
30+
31+
:loop
32+
if [%1] == [] goto continue
33+
if [%1] == [--deploy-mode] (
34+
set DEPLOY_MODE=%2
35+
) else if [%1] == [--driver-memory] (
36+
set DRIVER_MEMORY=%2
37+
) else if [%1] == [--driver-library-path] (
38+
set SPARK_SUBMIT_LIBRARY_PATH=%2
39+
) else if [%1] == [--driver-class-path] (
40+
set SPARK_SUBMIT_CLASSPATH=%2
41+
) else if [%1] == [--driver-java-options] (
42+
set SPARK_SUBMIT_OPTS=%2
43+
)
44+
shift
45+
goto loop
46+
:continue
47+
48+
if [%DEPLOY_MODE%] == [] (
49+
set DEPLOY_MODE=client
50+
)
51+
52+
if not [%DRIVER_MEMORY%] == [] if [%DEPLOY_MODE%] == [client] (
53+
set SPARK_DRIVER_MEMORY=%DRIVER_MEMORY%
54+
)
55+
56+
cmd /V /E /C %SPARK_HOME%\bin\spark-class.cmd org.apache.spark.deploy.SparkSubmit %ORIG_ARGS%

core/src/main/scala/org/apache/spark/SparkContext.scala

Lines changed: 5 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -276,27 +276,26 @@ class SparkContext(config: SparkConf) extends Logging {
276276
.getOrElse(512)
277277

278278
// Environment variables to pass to our executors.
279-
// NOTE: This should only be used for test related settings.
280-
private[spark] val testExecutorEnvs = HashMap[String, String]()
279+
private[spark] val executorEnvs = HashMap[String, String]()
281280

282281
// Convert java options to env vars as a work around
283282
// since we can't set env vars directly in sbt.
284283
for { (envKey, propKey) <- Seq(("SPARK_TESTING", "spark.testing"))
285284
value <- Option(System.getenv(envKey)).orElse(Option(System.getProperty(propKey)))} {
286-
testExecutorEnvs(envKey) = value
285+
executorEnvs(envKey) = value
287286
}
288287
// The Mesos scheduler backend relies on this environment variable to set executor memory.
289288
// TODO: Set this only in the Mesos scheduler.
290-
testExecutorEnvs("SPARK_EXECUTOR_MEMORY") = executorMemory + "m"
291-
testExecutorEnvs ++= conf.getExecutorEnv
289+
executorEnvs("SPARK_EXECUTOR_MEMORY") = executorMemory + "m"
290+
executorEnvs ++= conf.getExecutorEnv
292291

293292
// Set SPARK_USER for user who is running SparkContext.
294293
val sparkUser = Option {
295294
Option(System.getProperty("user.name")).getOrElse(System.getenv("SPARK_USER"))
296295
}.getOrElse {
297296
SparkContext.SPARK_UNKNOWN_USER
298297
}
299-
testExecutorEnvs("SPARK_USER") = sparkUser
298+
executorEnvs("SPARK_USER") = sparkUser
300299

301300
// Create and start the scheduler
302301
private[spark] var taskScheduler = SparkContext.createTaskScheduler(this, master)

core/src/main/scala/org/apache/spark/TestUtils.scala

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -43,6 +43,7 @@ private[spark] object TestUtils {
4343
*/
4444
def createJarWithClasses(classNames: Seq[String], value: String = ""): URL = {
4545
val tempDir = Files.createTempDir()
46+
tempDir.deleteOnExit()
4647
val files = for (name <- classNames) yield createCompiledClass(name, tempDir, value)
4748
val jarFile = new File(tempDir, "testJar-%s.jar".format(System.currentTimeMillis()))
4849
createJar(files, jarFile)

core/src/main/scala/org/apache/spark/broadcast/HttpBroadcast.scala

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -112,7 +112,7 @@ private[spark] object HttpBroadcast extends Logging {
112112
private var securityManager: SecurityManager = null
113113

114114
// TODO: This shouldn't be a global variable so that multiple SparkContexts can coexist
115-
private val files = new TimeStampedHashSet[String]
115+
private val files = new TimeStampedHashSet[File]
116116
private val httpReadTimeout = TimeUnit.MILLISECONDS.convert(5, TimeUnit.MINUTES).toInt
117117
private var compressionCodec: CompressionCodec = null
118118
private var cleaner: MetadataCleaner = null
@@ -173,7 +173,7 @@ private[spark] object HttpBroadcast extends Logging {
173173
val serOut = ser.serializeStream(out)
174174
serOut.writeObject(value)
175175
serOut.close()
176-
files += file.getAbsolutePath
176+
files += file
177177
}
178178

179179
def read[T: ClassTag](id: Long): T = {
@@ -216,7 +216,7 @@ private[spark] object HttpBroadcast extends Logging {
216216
SparkEnv.get.blockManager.master.removeBroadcast(id, removeFromDriver, blocking)
217217
if (removeFromDriver) {
218218
val file = getFile(id)
219-
files.remove(file.toString)
219+
files.remove(file)
220220
deleteBroadcastFile(file)
221221
}
222222
}
@@ -232,7 +232,7 @@ private[spark] object HttpBroadcast extends Logging {
232232
val (file, time) = (entry.getKey, entry.getValue)
233233
if (time < cleanupTime) {
234234
iterator.remove()
235-
deleteBroadcastFile(new File(file.toString))
235+
deleteBroadcastFile(file)
236236
}
237237
}
238238
}

core/src/main/scala/org/apache/spark/scheduler/cluster/SparkDeploySchedulerBackend.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -54,7 +54,7 @@ private[spark] class SparkDeploySchedulerBackend(
5454
}
5555

5656
val command = Command(
57-
"org.apache.spark.executor.CoarseGrainedExecutorBackend", args, sc.testExecutorEnvs,
57+
"org.apache.spark.executor.CoarseGrainedExecutorBackend", args, sc.executorEnvs,
5858
classPathEntries, libraryPathEntries, extraJavaOpts)
5959
val sparkHome = sc.getSparkHome()
6060
val appDesc = new ApplicationDescription(sc.appName, maxCores, sc.executorMemory, command,

core/src/main/scala/org/apache/spark/scheduler/cluster/mesos/CoarseMesosSchedulerBackend.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -122,7 +122,7 @@ private[spark] class CoarseMesosSchedulerBackend(
122122
val extraLibraryPath = conf.getOption(libraryPathOption).map(p => s"-Djava.library.path=$p")
123123
val extraOpts = Seq(extraJavaOpts, extraLibraryPath).flatten.mkString(" ")
124124

125-
sc.testExecutorEnvs.foreach { case (key, value) =>
125+
sc.executorEnvs.foreach { case (key, value) =>
126126
environment.addVariables(Environment.Variable.newBuilder()
127127
.setName(key)
128128
.setValue(value)

core/src/main/scala/org/apache/spark/scheduler/cluster/mesos/MesosSchedulerBackend.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -90,7 +90,7 @@ private[spark] class MesosSchedulerBackend(
9090
"Spark home is not set; set it through the spark.home system " +
9191
"property, the SPARK_HOME environment variable or the SparkContext constructor"))
9292
val environment = Environment.newBuilder()
93-
sc.testExecutorEnvs.foreach { case (key, value) =>
93+
sc.executorEnvs.foreach { case (key, value) =>
9494
environment.addVariables(Environment.Variable.newBuilder()
9595
.setName(key)
9696
.setValue(value)

core/src/main/scala/org/apache/spark/util/Utils.scala

Lines changed: 10 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -586,15 +586,17 @@ private[spark] object Utils extends Logging {
586586
* Don't follow directories if they are symlinks.
587587
*/
588588
def deleteRecursively(file: File) {
589-
if ((file.isDirectory) && !isSymlink(file)) {
590-
for (child <- listFilesSafely(file)) {
591-
deleteRecursively(child)
589+
if (file != null) {
590+
if ((file.isDirectory) && !isSymlink(file)) {
591+
for (child <- listFilesSafely(file)) {
592+
deleteRecursively(child)
593+
}
592594
}
593-
}
594-
if (!file.delete()) {
595-
// Delete can also fail if the file simply did not exist
596-
if (file.exists()) {
597-
throw new IOException("Failed to delete: " + file.getAbsolutePath)
595+
if (!file.delete()) {
596+
// Delete can also fail if the file simply did not exist
597+
if (file.exists()) {
598+
throw new IOException("Failed to delete: " + file.getAbsolutePath)
599+
}
598600
}
599601
}
600602
}

0 commit comments

Comments
 (0)