@@ -14,7 +14,7 @@ import scala.util.Properties
1414object RunSpark {
1515
1616 def run (
17- build : Build .Successful ,
17+ builds : Seq [ Build .Successful ] ,
1818 mainClass : String ,
1919 args : Seq [String ],
2020 submitArgs : Seq [String ],
@@ -27,10 +27,10 @@ object RunSpark {
2727 // FIXME Get Spark.sparkModules via provided settings?
2828 val providedModules = Spark .sparkModules
2929 val providedFiles =
30- value(PackageCmd .providedFiles(build , providedModules, logger)).toSet
31- val depCp = build. dependencyClassPath.filterNot(providedFiles)
32- val javaHomeInfo = build .options.javaHome().value
33- val javaOpts = build .options.javaOptions.javaOpts.toSeq.map(_.value.value)
30+ value(PackageCmd .providedFiles(builds , providedModules, logger)).toSet
31+ val depCp = builds.flatMap(_. dependencyClassPath).distinct .filterNot(providedFiles)
32+ val javaHomeInfo = builds.head .options.javaHome().value
33+ val javaOpts = builds.head .options.javaOptions.javaOpts.toSeq.map(_.value.value)
3434 val ext = if (Properties .isWin) " .cmd" else " "
3535 val submitCommand : String =
3636 EnvVar .Spark .sparkHome.valueOpt
@@ -44,28 +44,25 @@ object RunSpark {
4444 else Seq (" --jars" , depCp.mkString(" ," ))
4545
4646 scratchDirOpt.foreach(os.makeDir.all(_))
47- val library = Library .libraryJar(build )
47+ val libraries = builds.map( Library .libraryJar(_) )
4848
4949 val finalCommand =
5050 Seq (submitCommand, " --class" , mainClass) ++
5151 jarsArgs ++
5252 javaOpts.flatMap(opt => Seq (" --driver-java-options" , opt)) ++
5353 submitArgs ++
54- Seq (library .toString) ++
54+ libraries.map(_ .toString) ++
5555 args
5656 val envUpdates = javaHomeInfo.envUpdates(sys.env)
57- if (showCommand)
58- Left (Runner .envCommand(envUpdates) ++ finalCommand)
57+ if showCommand then Left (Runner .envCommand(envUpdates) ++ finalCommand)
5958 else {
6059 val proc =
61- if ( allowExecve)
60+ if allowExecve then
6261 Runner .maybeExec(" spark-submit" , finalCommand, logger, extraEnv = envUpdates)
63- else
64- Runner .run(finalCommand, logger, extraEnv = envUpdates)
62+ else Runner .run(finalCommand, logger, extraEnv = envUpdates)
6563 Right ((
6664 proc,
67- if (scratchDirOpt.isEmpty) Some (() => os.remove(library, checkExists = true ))
68- else None
65+ if scratchDirOpt.isEmpty then Some (() => libraries.foreach(l => os.remove(l, checkExists = true ))) else None
6966 ))
7067 }
7168 }
@@ -83,7 +80,7 @@ object RunSpark {
8380
8481 // FIXME Get Spark.sparkModules via provided settings?
8582 val providedModules = Spark .sparkModules
86- val sparkClassPath = value(PackageCmd .providedFiles(build, providedModules, logger))
83+ val sparkClassPath = value(PackageCmd .providedFiles(Seq ( build) , providedModules, logger)) // TODO: handle multiple builds
8784
8885 scratchDirOpt.foreach(os.makeDir.all(_))
8986 val library = Library .libraryJar(build)
0 commit comments