@@ -517,17 +517,15 @@ private[spark] class Client(
517517 */
518518 val cachedSecondaryJarLinks = ListBuffer .empty[String ]
519519 List (
520- (sparkConf.get(JARS_TO_DISTRIBUTE ).orNull , LocalResourceType .FILE , true ),
521- (sparkConf.get(FILES_TO_DISTRIBUTE ).orNull , LocalResourceType .FILE , false ),
522- (sparkConf.get(ARCHIVES_TO_DISTRIBUTE ).orNull , LocalResourceType .ARCHIVE , false )
520+ (sparkConf.get(JARS_TO_DISTRIBUTE ), LocalResourceType .FILE , true ),
521+ (sparkConf.get(FILES_TO_DISTRIBUTE ), LocalResourceType .FILE , false ),
522+ (sparkConf.get(ARCHIVES_TO_DISTRIBUTE ), LocalResourceType .ARCHIVE , false )
523523 ).foreach { case (flist, resType, addToClasspath) =>
524- if (flist != null && ! flist.isEmpty()) {
525- flist.split(',' ).foreach { file =>
526- val (_, localizedPath) = distribute(file, resType = resType)
527- require(localizedPath != null )
528- if (addToClasspath) {
529- cachedSecondaryJarLinks += localizedPath
530- }
524+ flist.foreach { file =>
525+ val (_, localizedPath) = distribute(file, resType = resType)
526+ require(localizedPath != null )
527+ if (addToClasspath) {
528+ cachedSecondaryJarLinks += localizedPath
531529 }
532530 }
533531 }
@@ -1264,7 +1262,7 @@ private object Client extends Logging {
12641262
12651263 val secondaryJars =
12661264 if (args != null ) {
1267- getSecondaryJarUris(sparkConf.get(JARS_TO_DISTRIBUTE ).map(_.split( " , " ).toSeq ))
1265+ getSecondaryJarUris(Option ( sparkConf.get(JARS_TO_DISTRIBUTE )))
12681266 } else {
12691267 getSecondaryJarUris(sparkConf.get(SECONDARY_JARS ))
12701268 }
0 commit comments