@@ -50,7 +50,7 @@ private[spark] class Executor(
5050 logInfo(s " Starting executor ID $executorId on host $executorHostname" )
5151
5252 // Application dependencies (added through SparkContext) that we've fetched so far on this node.
53- // Each map holds the master's timestamp for the version of that file, JAR, or directory we got.
53+ // Each map holds the master's timestamp for the version of that file or JAR we got.
5454 private val currentFiles : HashMap [String , Long ] = new HashMap [String , Long ]()
5555 private val currentJars : HashMap [String , Long ] = new HashMap [String , Long ]()
5656
@@ -171,8 +171,7 @@ private[spark] class Executor(
171171 startGCTime = gcTime
172172
173173 try {
174- val (taskFiles, taskJars, taskBytes) =
175- Task .deserializeWithDependencies(serializedTask)
174+ val (taskFiles, taskJars, taskBytes) = Task .deserializeWithDependencies(serializedTask)
176175 updateDependencies(taskFiles, taskJars)
177176 task = ser.deserialize[Task [Any ]](taskBytes, Thread .currentThread.getContextClassLoader)
178177
@@ -334,9 +333,7 @@ private[spark] class Executor(
334333 * Download any missing dependencies if we receive a new set of files and JARs from the
335334 * SparkContext. Also adds any new JARs we fetched to the class loader.
336335 */
337- private def updateDependencies (
338- newFiles : HashMap [String , Long ],
339- newJars : HashMap [String , Long ]) {
336+ private def updateDependencies (newFiles : HashMap [String , Long ], newJars : HashMap [String , Long ]) {
340337 lazy val hadoopConf = SparkHadoopUtil .get.newConfiguration(conf)
341338 synchronized {
342339 // Fetch missing dependencies
0 commit comments