File tree Expand file tree Collapse file tree 2 files changed +20
-6
lines changed
main/scala/org/apache/spark
test/scala/org/apache/spark Expand file tree Collapse file tree 2 files changed +20
-6
lines changed Original file line number Diff line number Diff line change @@ -1018,6 +1018,7 @@ class SparkContext(config: SparkConf) extends Logging with ExecutorAllocationCli
10181018 * supported for Hadoop-supported filesystems.
10191019 */
10201020 def addFile (path : String , recursive : Boolean ): Unit = {
1021+ val isLocalMode = conf.get(" spark.master" ).startsWith(" local" )
10211022 val uri = new URI (path)
10221023 val schemeCorrectedPath = uri.getScheme match {
10231024 case null | " local" => " file:" + uri.getPath
@@ -1032,18 +1033,20 @@ class SparkContext(config: SparkConf) extends Logging with ExecutorAllocationCli
10321033 throw new SparkException (s " Added file $hadoopPath does not exist. " )
10331034 }
10341035 val isDir = fs.isDirectory(hadoopPath)
1035- if (scheme == " file" && isDir) {
1036- throw new SparkException (s " addFile does not support adding local directories. " )
1036+ if (! isLocalMode && scheme == " file" && isDir) {
1037+ throw new SparkException (s " addFile does not support local directories when not running " +
1038+ " local mode." )
10371039 }
10381040 if (! recursive && isDir) {
10391041 throw new SparkException (s " Added file $hadoopPath is a directory and recursive is not " +
10401042 " turned on." )
10411043 }
10421044 }
10431045
1044- val key = scheme match {
1045- case " file" => env.httpFileServer.addFile(new File (uri.getPath))
1046- case _ => path
1046+ val key = if (! isLocalMode && scheme == " file" ) {
1047+ env.httpFileServer.addFile(new File (uri.getPath))
1048+ } else {
1049+ schemeCorrectedPath
10471050 }
10481051 val timestamp = System .currentTimeMillis
10491052 addedFiles(key) = timestamp
Original file line number Diff line number Diff line change @@ -113,7 +113,18 @@ class SparkContextSuite extends FunSuite with LocalSparkContext {
113113 }
114114 }
115115
116- test(" addFile recursive can't add the same directory twice" ) {
116+ test(" addFile recursive can't add directories by default" ) {
117+ val dir = new File (" dir" )
117118
119+ try {
120+ sc = new SparkContext (new SparkConf ().setAppName(" test" ).setMaster(" local" ))
121+ sc.addFile(dir.getAbsolutePath)
122+ assert(false , " should have thrown exception" )
123+ } catch {
124+ case _ : SparkException =>
125+ } finally {
126+ sc.stop()
127+ dir.delete()
128+ }
118129 }
119130}
You can’t perform that action at this time.
0 commit comments