@@ -19,7 +19,7 @@ package org.apache.spark
1919
2020import java .io ._
2121import java .lang .reflect .Constructor
22- import java .net .{URI }
22+ import java .net .{URI , URL }
2323import java .util .{Arrays , Locale , Properties , ServiceLoader , UUID }
2424import java .util .concurrent .{ConcurrentHashMap , ConcurrentMap }
2525import java .util .concurrent .atomic .{AtomicBoolean , AtomicInteger , AtomicReference }
@@ -35,7 +35,7 @@ import scala.util.control.NonFatal
3535import com .google .common .collect .MapMaker
3636import org .apache .commons .lang3 .SerializationUtils
3737import org .apache .hadoop .conf .Configuration
38- import org .apache .hadoop .fs .{FileSystem , Path }
38+ import org .apache .hadoop .fs .{FileSystem , FsUrlStreamHandlerFactory , Path }
3939import org .apache .hadoop .io .{ArrayWritable , BooleanWritable , BytesWritable , DoubleWritable , FloatWritable , IntWritable , LongWritable , NullWritable , Text , Writable }
4040import org .apache .hadoop .mapred .{FileInputFormat , InputFormat , JobConf , SequenceFileInputFormat , TextInputFormat }
4141import org .apache .hadoop .mapreduce .{InputFormat => NewInputFormat , Job => NewHadoopJob }
@@ -2373,6 +2373,7 @@ class SparkContext(config: SparkConf) extends Logging {
23732373 * various Spark features.
23742374 */
23752375object SparkContext extends Logging {
2376+ URL .setURLStreamHandlerFactory(new FsUrlStreamHandlerFactory ())
23762377 private val VALID_LOG_LEVELS =
23772378 Set (" ALL" , " DEBUG" , " ERROR" , " FATAL" , " INFO" , " OFF" , " TRACE" , " WARN" )
23782379
0 commit comments