Skip to content

Commit 452e468

Browse files
yanboliangsarutak
authored andcommitted
[SPARK-17577][CORE][2.0 BACKPORT] Update SparkContext.addFile to make it work well on Windows
## What changes were proposed in this pull request? Update ```SparkContext.addFile``` to correct the use of ```URI``` and ```Path```, then it can work well on Windows. This is used for branch-2.0 backport, more details at #15131. ## How was this patch tested? Backport, checked by appveyor. Author: Yanbo Liang <[email protected]> Closes #15217 from yanboliang/uri-2.0.
1 parent 1a8ea00 commit 452e468

File tree

1 file changed

+3
-3
lines changed

1 file changed

+3
-3
lines changed

core/src/main/scala/org/apache/spark/SparkContext.scala

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1419,7 +1419,7 @@ class SparkContext(config: SparkConf) extends Logging with ExecutorAllocationCli
14191419
* supported for Hadoop-supported filesystems.
14201420
*/
14211421
def addFile(path: String, recursive: Boolean): Unit = {
1422-
val uri = new URI(path)
1422+
val uri = new Path(path).toUri
14231423
val schemeCorrectedPath = uri.getScheme match {
14241424
case null | "local" => new File(path).getCanonicalFile.toURI.toString
14251425
case _ => path
@@ -1453,8 +1453,8 @@ class SparkContext(config: SparkConf) extends Logging with ExecutorAllocationCli
14531453
logInfo(s"Added file $path at $key with timestamp $timestamp")
14541454
// Fetch the file locally so that closures which are run on the driver can still use the
14551455
// SparkFiles API to access files.
1456-
Utils.fetchFile(path, new File(SparkFiles.getRootDirectory()), conf, env.securityManager,
1457-
hadoopConfiguration, timestamp, useCache = false)
1456+
Utils.fetchFile(uri.toString, new File(SparkFiles.getRootDirectory()), conf,
1457+
env.securityManager, hadoopConfiguration, timestamp, useCache = false)
14581458
postEnvironmentUpdate()
14591459
}
14601460
}

0 commit comments

Comments
 (0)