Skip to content

Commit ed92b47

Browse files
committed
[SPARK-4397] Move object RDD to the front of RDD.scala.
I ran into multiple cases that SBT/Scala compiler was confused by the implicits in continuous compilation mode. Adding explicit return types fixes the problem. Author: Reynold Xin <[email protected]> Closes apache#3580 from rxin/rdd-implicit and squashes the following commits: ee32fcd [Reynold Xin] Move object RDD to the end of the file. b8562c9 [Reynold Xin] Merge branch 'master' of github.com:apache/spark into rdd-implicit d4e9f85 [Reynold Xin] Code review. a836a37 [Reynold Xin] Move object RDD to the front of RDD.scala.
1 parent ab8177d commit ed92b47

File tree

2 files changed

+26
-11
lines changed

2 files changed

+26
-11
lines changed

core/src/main/scala/org/apache/spark/SparkContext.scala

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1758,7 +1758,7 @@ object SparkContext extends Logging {
17581758

17591759
@deprecated("Replaced by implicit functions in WritableConverter. This is kept here only for " +
17601760
"backward compatibility.", "1.3.0")
1761-
def writableWritableConverter[T <: Writable]() =
1761+
def writableWritableConverter[T <: Writable](): WritableConverter[T] =
17621762
WritableConverter.writableWritableConverter()
17631763

17641764
/**
@@ -2017,15 +2017,15 @@ object WritableConverter {
20172017
simpleWritableConverter[Boolean, BooleanWritable](_.get)
20182018

20192019
implicit def bytesWritableConverter(): WritableConverter[Array[Byte]] = {
2020-
simpleWritableConverter[Array[Byte], BytesWritable](bw =>
2020+
simpleWritableConverter[Array[Byte], BytesWritable] { bw =>
20212021
// getBytes method returns array which is longer then data to be returned
20222022
Arrays.copyOfRange(bw.getBytes, 0, bw.getLength)
2023-
)
2023+
}
20242024
}
20252025

20262026
implicit def stringWritableConverter(): WritableConverter[String] =
20272027
simpleWritableConverter[String, Text](_.toString)
20282028

2029-
implicit def writableWritableConverter[T <: Writable]() =
2029+
implicit def writableWritableConverter[T <: Writable](): WritableConverter[T] =
20302030
new WritableConverter[T](_.runtimeClass.asInstanceOf[Class[T]], _.asInstanceOf[T])
20312031
}

core/src/main/scala/org/apache/spark/rdd/RDD.scala

Lines changed: 22 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -1398,6 +1398,13 @@ abstract class RDD[T: ClassTag](
13981398
}
13991399
}
14001400

1401+
1402+
/**
1403+
* Defines implicit functions that provide extra functionalities on RDDs of specific types.
1404+
*
1405+
* For example, [[RDD.rddToPairRDDFunctions]] converts an RDD into a [[PairRDDFunctions]] for
1406+
* key-value-pair RDDs, and enabling extra functionalities such as [[PairRDDFunctions.reduceByKey]].
1407+
*/
14011408
object RDD {
14021409

14031410
// The following implicit functions were in SparkContext before 1.2 and users had to
@@ -1406,22 +1413,30 @@ object RDD {
14061413
// compatibility and forward to the following functions directly.
14071414

14081415
implicit def rddToPairRDDFunctions[K, V](rdd: RDD[(K, V)])
1409-
(implicit kt: ClassTag[K], vt: ClassTag[V], ord: Ordering[K] = null) = {
1416+
(implicit kt: ClassTag[K], vt: ClassTag[V], ord: Ordering[K] = null): PairRDDFunctions[K, V] = {
14101417
new PairRDDFunctions(rdd)
14111418
}
14121419

1413-
implicit def rddToAsyncRDDActions[T: ClassTag](rdd: RDD[T]) = new AsyncRDDActions(rdd)
1420+
implicit def rddToAsyncRDDActions[T: ClassTag](rdd: RDD[T]): AsyncRDDActions[T] = {
1421+
new AsyncRDDActions(rdd)
1422+
}
14141423

14151424
implicit def rddToSequenceFileRDDFunctions[K <% Writable: ClassTag, V <% Writable: ClassTag](
1416-
rdd: RDD[(K, V)]) =
1425+
rdd: RDD[(K, V)]): SequenceFileRDDFunctions[K, V] = {
14171426
new SequenceFileRDDFunctions(rdd)
1427+
}
14181428

1419-
implicit def rddToOrderedRDDFunctions[K : Ordering : ClassTag, V: ClassTag](
1420-
rdd: RDD[(K, V)]) =
1429+
implicit def rddToOrderedRDDFunctions[K : Ordering : ClassTag, V: ClassTag](rdd: RDD[(K, V)])
1430+
: OrderedRDDFunctions[K, V, (K, V)] = {
14211431
new OrderedRDDFunctions[K, V, (K, V)](rdd)
1432+
}
14221433

1423-
implicit def doubleRDDToDoubleRDDFunctions(rdd: RDD[Double]) = new DoubleRDDFunctions(rdd)
1434+
implicit def doubleRDDToDoubleRDDFunctions(rdd: RDD[Double]): DoubleRDDFunctions = {
1435+
new DoubleRDDFunctions(rdd)
1436+
}
14241437

1425-
implicit def numericRDDToDoubleRDDFunctions[T](rdd: RDD[T])(implicit num: Numeric[T]) =
1438+
implicit def numericRDDToDoubleRDDFunctions[T](rdd: RDD[T])(implicit num: Numeric[T])
1439+
: DoubleRDDFunctions = {
14261440
new DoubleRDDFunctions(rdd.map(x => num.toDouble(x)))
1441+
}
14271442
}

0 commit comments

Comments
 (0)