@@ -389,7 +389,7 @@ class SparkContext(config: SparkConf) extends Logging with ExecutorAllocationCli
389389
390390 _conf.set(" spark.executor.id" , SparkContext .DRIVER_IDENTIFIER )
391391
392- _jars = _conf.getOption(" spark.jars" ).map(_.split(" ," )).map(_.filter(_.size != 0 )).toSeq.flatten
392+ _jars = _conf.getOption(" spark.jars" ).map(_.split(" ," )).map(_.filter(_.size != 0 )).toSeq.flatten
393393 _files = _conf.getOption(" spark.files" ).map(_.split(" ," )).map(_.filter(_.size != 0 ))
394394 .toSeq.flatten
395395
@@ -438,7 +438,7 @@ class SparkContext(config: SparkConf) extends Logging with ExecutorAllocationCli
438438 _ui =
439439 if (conf.getBoolean(" spark.ui.enabled" , true )) {
440440 Some (SparkUI .createLiveUI(this , _conf, listenerBus, _jobProgressListener,
441- _env.securityManager,appName, startTime = startTime))
441+ _env.securityManager, appName, startTime = startTime))
442442 } else {
443443 // For tests, do not enable the UI
444444 None
@@ -917,7 +917,7 @@ class SparkContext(config: SparkConf) extends Logging with ExecutorAllocationCli
917917 classOf [FixedLengthBinaryInputFormat ],
918918 classOf [LongWritable ],
919919 classOf [BytesWritable ],
920- conf= conf)
920+ conf = conf)
921921 val data = br.map { case (k, v) =>
922922 val bytes = v.getBytes
923923 assert(bytes.length == recordLength, " Byte array does not have correct length" )
@@ -1267,7 +1267,7 @@ class SparkContext(config: SparkConf) extends Logging with ExecutorAllocationCli
12671267 */
12681268 def accumulableCollection [R <% Growable [T ] with TraversableOnce [T ] with Serializable : ClassTag , T ]
12691269 (initialValue : R ): Accumulable [R , T ] = {
1270- val param = new GrowableAccumulableParam [R ,T ]
1270+ val param = new GrowableAccumulableParam [R , T ]
12711271 val acc = new Accumulable (initialValue, param)
12721272 cleaner.foreach(_.registerAccumulatorForCleanup(acc))
12731273 acc
@@ -1316,7 +1316,7 @@ class SparkContext(config: SparkConf) extends Logging with ExecutorAllocationCli
13161316 val uri = new URI (path)
13171317 val schemeCorrectedPath = uri.getScheme match {
13181318 case null | " local" => new File (path).getCanonicalFile.toURI.toString
1319- case _ => path
1319+ case _ => path
13201320 }
13211321
13221322 val hadoopPath = new Path (schemeCorrectedPath)
0 commit comments