@@ -31,24 +31,39 @@ object Main extends Logging {
3131 val tmp = System .getProperty(" java.io.tmpdir" )
3232 val rootDir = conf.get(" spark.repl.classdir" , tmp)
3333 val outputDir = Utils .createTempDir(rootDir)
34- val s = new Settings ()
35- s.processArguments(List (" -Yrepl-class-based" ,
36- " -Yrepl-outdir" , s " ${outputDir.getAbsolutePath}" ,
37- " -classpath" , getAddedJars.mkString(File .pathSeparator)), true )
3834 // the creation of SecurityManager has to be lazy so SPARK_YARN_MODE is set if needed
3935 lazy val classServer = new HttpServer (conf, outputDir, new SecurityManager (conf))
4036 var sparkContext : SparkContext = _
4137 var sqlContext : SQLContext = _
4238 var interp = new SparkILoop // this is a public var because tests reset it.
4339
40+ private var hasErrors = false
41+
42+ private def scalaOptionError (msg : String ): Unit = {
43+ hasErrors = true
44+ Console .err.println(msg)
45+ }
46+
4447 def main (args : Array [String ]) {
45- if (getMaster == " yarn-client" ) System .setProperty(" SPARK_YARN_MODE" , " true" )
46- // Start the classServer and store its URI in a spark system property
47- // (which will be passed to executors so that they can connect to it)
48- classServer.start()
49- interp.process(s) // Repl starts and goes in loop of R.E.P.L
50- classServer.stop()
51- Option (sparkContext).map(_.stop)
48+
49+ val interpArguments = List (
50+ " -Yrepl-class-based" ,
51+ " -Yrepl-outdir" , s " ${outputDir.getAbsolutePath}" ,
52+ " -classpath" , getAddedJars.mkString(File .pathSeparator)
53+ ) ++ args.toList
54+
55+ val settings = new Settings (scalaOptionError)
56+ settings.processArguments(interpArguments, true )
57+
58+ if (! hasErrors) {
59+ if (getMaster == " yarn-client" ) System .setProperty(" SPARK_YARN_MODE" , " true" )
60+ // Start the classServer and store its URI in a spark system property
61+ // (which will be passed to executors so that they can connect to it)
62+ classServer.start()
63+ interp.process(settings) // Repl starts and goes in loop of R.E.P.L
64+ classServer.stop()
65+ Option (sparkContext).map(_.stop)
66+ }
5267 }
5368
5469 def getAddedJars : Array [String ] = {
0 commit comments