@@ -986,15 +986,15 @@ class SparkContext(config: SparkConf) extends Logging with ExecutorAllocationCli
986986 union(Seq (first) ++ rest)
987987
988988 /** Get an RDD that has no partitions or elements. */
989- def emptyRDD [T : ClassTag ] = new EmptyRDD [T ](this )
989+ def emptyRDD [T : ClassTag ]: EmptyRDD [ T ] = new EmptyRDD [T ](this )
990990
991991 // Methods for creating shared variables
992992
993993 /**
994994 * Create an [[org.apache.spark.Accumulator ]] variable of a given type, which tasks can "add"
995995 * values to using the `+=` method. Only the driver can access the accumulator's `value`.
996996 */
997- def accumulator [T ](initialValue : T )(implicit param : AccumulatorParam [T ]) =
997+ def accumulator [T ](initialValue : T )(implicit param : AccumulatorParam [T ]): Accumulator [ T ] =
998998 {
999999 val acc = new Accumulator (initialValue, param)
10001000 cleaner.foreach(_.registerAccumulatorForCleanup(acc))
@@ -1006,7 +1006,8 @@ class SparkContext(config: SparkConf) extends Logging with ExecutorAllocationCli
10061006 * in the Spark UI. Tasks can "add" values to the accumulator using the `+=` method. Only the
10071007 * driver can access the accumulator's `value`.
10081008 */
1009- def accumulator [T ](initialValue : T , name : String )(implicit param : AccumulatorParam [T ]) = {
1009+ def accumulator [T ](initialValue : T , name : String )(implicit param : AccumulatorParam [T ])
1010+ : Accumulator [T ] = {
10101011 val acc = new Accumulator (initialValue, param, Some (name))
10111012 cleaner.foreach(_.registerAccumulatorForCleanup(acc))
10121013 acc
@@ -1018,7 +1019,8 @@ class SparkContext(config: SparkConf) extends Logging with ExecutorAllocationCli
10181019 * @tparam R accumulator result type
10191020 * @tparam T type that can be added to the accumulator
10201021 */
1021- def accumulable [R , T ](initialValue : R )(implicit param : AccumulableParam [R , T ]) = {
1022+ def accumulable [R , T ](initialValue : R )(implicit param : AccumulableParam [R , T ])
1023+ : Accumulable [R , T ] = {
10221024 val acc = new Accumulable (initialValue, param)
10231025 cleaner.foreach(_.registerAccumulatorForCleanup(acc))
10241026 acc
@@ -1031,7 +1033,8 @@ class SparkContext(config: SparkConf) extends Logging with ExecutorAllocationCli
10311033 * @tparam R accumulator result type
10321034 * @tparam T type that can be added to the accumulator
10331035 */
1034- def accumulable [R , T ](initialValue : R , name : String )(implicit param : AccumulableParam [R , T ]) = {
1036+ def accumulable [R , T ](initialValue : R , name : String )(implicit param : AccumulableParam [R , T ])
1037+ : Accumulable [R , T ] = {
10351038 val acc = new Accumulable (initialValue, param, Some (name))
10361039 cleaner.foreach(_.registerAccumulatorForCleanup(acc))
10371040 acc
@@ -1209,7 +1212,7 @@ class SparkContext(config: SparkConf) extends Logging with ExecutorAllocationCli
12091212 override def killExecutor (executorId : String ): Boolean = super .killExecutor(executorId)
12101213
12111214 /** The version of Spark on which this application is running. */
1212- def version = SPARK_VERSION
1215+ def version : String = SPARK_VERSION
12131216
12141217 /**
12151218 * Return a map from the slave to the max memory available for caching and the remaining
@@ -1659,7 +1662,7 @@ class SparkContext(config: SparkConf) extends Logging with ExecutorAllocationCli
16591662 }
16601663 }
16611664
1662- def getCheckpointDir = checkpointDir
1665+ def getCheckpointDir : Option [ String ] = checkpointDir
16631666
16641667 /** Default level of parallelism to use when not given by user (e.g. parallelize and makeRDD). */
16651668 def defaultParallelism : Int = {
@@ -1900,28 +1903,28 @@ object SparkContext extends Logging {
19001903 " backward compatibility." , " 1.3.0" )
19011904 object DoubleAccumulatorParam extends AccumulatorParam [Double ] {
19021905 def addInPlace (t1 : Double , t2 : Double ): Double = t1 + t2
1903- def zero (initialValue : Double ) = 0.0
1906+ def zero (initialValue : Double ): Double = 0.0
19041907 }
19051908
19061909 @ deprecated(" Replaced by implicit objects in AccumulatorParam. This is kept here only for " +
19071910 " backward compatibility." , " 1.3.0" )
19081911 object IntAccumulatorParam extends AccumulatorParam [Int ] {
19091912 def addInPlace (t1 : Int , t2 : Int ): Int = t1 + t2
1910- def zero (initialValue : Int ) = 0
1913+ def zero (initialValue : Int ): Int = 0
19111914 }
19121915
19131916 @ deprecated(" Replaced by implicit objects in AccumulatorParam. This is kept here only for " +
19141917 " backward compatibility." , " 1.3.0" )
19151918 object LongAccumulatorParam extends AccumulatorParam [Long ] {
1916- def addInPlace (t1 : Long , t2 : Long ) = t1 + t2
1917- def zero (initialValue : Long ) = 0L
1919+ def addInPlace (t1 : Long , t2 : Long ): Long = t1 + t2
1920+ def zero (initialValue : Long ): Long = 0L
19181921 }
19191922
19201923 @ deprecated(" Replaced by implicit objects in AccumulatorParam. This is kept here only for " +
19211924 " backward compatibility." , " 1.3.0" )
19221925 object FloatAccumulatorParam extends AccumulatorParam [Float ] {
1923- def addInPlace (t1 : Float , t2 : Float ) = t1 + t2
1924- def zero (initialValue : Float ) = 0f
1926+ def addInPlace (t1 : Float , t2 : Float ): Float = t1 + t2
1927+ def zero (initialValue : Float ): Float = 0f
19251928 }
19261929
19271930 // The following deprecated functions have already been moved to `object RDD` to
@@ -1931,18 +1934,18 @@ object SparkContext extends Logging {
19311934 @ deprecated(" Replaced by implicit functions in the RDD companion object. This is " +
19321935 " kept here only for backward compatibility." , " 1.3.0" )
19331936 def rddToPairRDDFunctions [K , V ](rdd : RDD [(K , V )])
1934- (implicit kt : ClassTag [K ], vt : ClassTag [V ], ord : Ordering [K ] = null ) = {
1937+ (implicit kt : ClassTag [K ], vt : ClassTag [V ], ord : Ordering [K ] = null ): PairRDDFunctions [ K , V ] =
19351938 RDD .rddToPairRDDFunctions(rdd)
1936- }
19371939
19381940 @ deprecated(" Replaced by implicit functions in the RDD companion object. This is " +
19391941 " kept here only for backward compatibility." , " 1.3.0" )
1940- def rddToAsyncRDDActions [T : ClassTag ](rdd : RDD [T ]) = RDD .rddToAsyncRDDActions(rdd)
1942+ def rddToAsyncRDDActions [T : ClassTag ](rdd : RDD [T ]): AsyncRDDActions [T ] =
1943+ RDD .rddToAsyncRDDActions(rdd)
19411944
19421945 @ deprecated(" Replaced by implicit functions in the RDD companion object. This is " +
19431946 " kept here only for backward compatibility." , " 1.3.0" )
19441947 def rddToSequenceFileRDDFunctions [K <% Writable : ClassTag , V <% Writable : ClassTag ](
1945- rdd : RDD [(K , V )]) = {
1948+ rdd : RDD [(K , V )]): SequenceFileRDDFunctions [ K , V ] = {
19461949 val kf = implicitly[K => Writable ]
19471950 val vf = implicitly[V => Writable ]
19481951 // Set the Writable class to null and `SequenceFileRDDFunctions` will use Reflection to get it
@@ -1954,16 +1957,17 @@ object SparkContext extends Logging {
19541957 @ deprecated(" Replaced by implicit functions in the RDD companion object. This is " +
19551958 " kept here only for backward compatibility." , " 1.3.0" )
19561959 def rddToOrderedRDDFunctions [K : Ordering : ClassTag , V : ClassTag ](
1957- rdd : RDD [(K , V )]) =
1960+ rdd : RDD [(K , V )]): OrderedRDDFunctions [ K , V , ( K , V )] =
19581961 RDD .rddToOrderedRDDFunctions(rdd)
19591962
19601963 @ deprecated(" Replaced by implicit functions in the RDD companion object. This is " +
19611964 " kept here only for backward compatibility." , " 1.3.0" )
1962- def doubleRDDToDoubleRDDFunctions (rdd : RDD [Double ]) = RDD .doubleRDDToDoubleRDDFunctions(rdd)
1965+ def doubleRDDToDoubleRDDFunctions (rdd : RDD [Double ]): DoubleRDDFunctions =
1966+ RDD .doubleRDDToDoubleRDDFunctions(rdd)
19631967
19641968 @ deprecated(" Replaced by implicit functions in the RDD companion object. This is " +
19651969 " kept here only for backward compatibility." , " 1.3.0" )
1966- def numericRDDToDoubleRDDFunctions [T ](rdd : RDD [T ])(implicit num : Numeric [T ]) =
1970+ def numericRDDToDoubleRDDFunctions [T ](rdd : RDD [T ])(implicit num : Numeric [T ]): DoubleRDDFunctions =
19671971 RDD .numericRDDToDoubleRDDFunctions(rdd)
19681972
19691973 // The following deprecated functions have already been moved to `object WritableFactory` to
@@ -2134,7 +2138,7 @@ object SparkContext extends Logging {
21342138 (backend, scheduler)
21352139
21362140 case LOCAL_N_REGEX (threads) =>
2137- def localCpuCount = Runtime .getRuntime.availableProcessors()
2141+ def localCpuCount : Int = Runtime .getRuntime.availableProcessors()
21382142 // local[*] estimates the number of cores on the machine; local[N] uses exactly N threads.
21392143 val threadCount = if (threads == " *" ) localCpuCount else threads.toInt
21402144 if (threadCount <= 0 ) {
@@ -2146,7 +2150,7 @@ object SparkContext extends Logging {
21462150 (backend, scheduler)
21472151
21482152 case LOCAL_N_FAILURES_REGEX (threads, maxFailures) =>
2149- def localCpuCount = Runtime .getRuntime.availableProcessors()
2153+ def localCpuCount : Int = Runtime .getRuntime.availableProcessors()
21502154 // local[*, M] means the number of cores on the computer with M failures
21512155 // local[N, M] means exactly N threads with M failures
21522156 val threadCount = if (threads == " *" ) localCpuCount else threads.toInt
0 commit comments