Skip to content

Commit a91b08f

Browse files
author
yuling
committed
Resolve faile to use UDF that jar file in hdfs.
1 parent 2bc1c95 commit a91b08f

File tree

1 file changed

+3
-2
lines changed

1 file changed

+3
-2
lines changed

core/src/main/scala/org/apache/spark/SparkContext.scala

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -19,7 +19,7 @@ package org.apache.spark
1919

2020
import java.io._
2121
import java.lang.reflect.Constructor
22-
import java.net.{URI}
22+
import java.net.{URI, URL}
2323
import java.util.{Arrays, Locale, Properties, ServiceLoader, UUID}
2424
import java.util.concurrent.{ConcurrentHashMap, ConcurrentMap}
2525
import java.util.concurrent.atomic.{AtomicBoolean, AtomicInteger, AtomicReference}
@@ -35,7 +35,7 @@ import scala.util.control.NonFatal
3535
import com.google.common.collect.MapMaker
3636
import org.apache.commons.lang3.SerializationUtils
3737
import org.apache.hadoop.conf.Configuration
38-
import org.apache.hadoop.fs.{FileSystem, Path}
38+
import org.apache.hadoop.fs.{FileSystem, FsUrlStreamHandlerFactory, Path}
3939
import org.apache.hadoop.io.{ArrayWritable, BooleanWritable, BytesWritable, DoubleWritable, FloatWritable, IntWritable, LongWritable, NullWritable, Text, Writable}
4040
import org.apache.hadoop.mapred.{FileInputFormat, InputFormat, JobConf, SequenceFileInputFormat, TextInputFormat}
4141
import org.apache.hadoop.mapreduce.{InputFormat => NewInputFormat, Job => NewHadoopJob}
@@ -2373,6 +2373,7 @@ class SparkContext(config: SparkConf) extends Logging {
23732373
* various Spark features.
23742374
*/
23752375
object SparkContext extends Logging {
2376+
URL.setURLStreamHandlerFactory(new FsUrlStreamHandlerFactory())
23762377
private val VALID_LOG_LEVELS =
23772378
Set("ALL", "DEBUG", "ERROR", "FATAL", "INFO", "OFF", "TRACE", "WARN")
23782379

0 commit comments

Comments
 (0)