Skip to content

Commit fd3f46f

Browse files
committed
Remove usage of Traversable
1 parent 0969d7a commit fd3f46f

File tree

13 files changed

+28
-19
lines changed

13 files changed

+28
-19
lines changed

core/src/main/scala/org/apache/spark/SparkConf.scala

Lines changed: 9 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -168,6 +168,15 @@ class SparkConf(loadDefaults: Boolean) extends Cloneable with Logging with Seria
168168
}
169169

170170
/** Set multiple parameters together */
171+
def setAll(settings: Iterable[(String, String)]): SparkConf = {
172+
settings.foreach { case (k, v) => set(k, v) }
173+
this
174+
}
175+
176+
/**
177+
* Set multiple parameters together
178+
*/
179+
@deprecated("Use setAll(Iterable) instead", "3.0.0")
171180
def setAll(settings: Traversable[(String, String)]): SparkConf = {
172181
settings.foreach { case (k, v) => set(k, v) }
173182
this

core/src/main/scala/org/apache/spark/SparkContext.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -2556,7 +2556,7 @@ object SparkContext extends Logging {
25562556
private[spark] val DRIVER_IDENTIFIER = "driver"
25572557

25582558

2559-
private implicit def arrayToArrayWritable[T <: Writable : ClassTag](arr: Traversable[T])
2559+
private implicit def arrayToArrayWritable[T <: Writable : ClassTag](arr: Iterable[T])
25602560
: ArrayWritable = {
25612561
def anyToWritable[U <: Writable](u: U): Writable = u
25622562

core/src/main/scala/org/apache/spark/util/Distribution.scala

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -31,7 +31,7 @@ import scala.collection.immutable.IndexedSeq
3131
*/
3232
private[spark] class Distribution(val data: Array[Double], val startIdx: Int, val endIdx: Int) {
3333
require(startIdx < endIdx)
34-
def this(data: Traversable[Double]) = this(data.toArray, 0, data.size)
34+
def this(data: Iterable[Double]) = this(data.toArray, 0, data.size)
3535
java.util.Arrays.sort(data, startIdx, endIdx)
3636
val length = endIdx - startIdx
3737

@@ -42,7 +42,7 @@ private[spark] class Distribution(val data: Array[Double], val startIdx: Int, va
4242
* given from 0 to 1
4343
* @param probabilities
4444
*/
45-
def getQuantiles(probabilities: Traversable[Double] = defaultProbabilities)
45+
def getQuantiles(probabilities: Iterable[Double] = defaultProbabilities)
4646
: IndexedSeq[Double] = {
4747
probabilities.toIndexedSeq.map { p: Double => data(closestIndex(p)) }
4848
}
@@ -75,15 +75,15 @@ private[spark] class Distribution(val data: Array[Double], val startIdx: Int, va
7575

7676
private[spark] object Distribution {
7777

78-
def apply(data: Traversable[Double]): Option[Distribution] = {
78+
def apply(data: Iterable[Double]): Option[Distribution] = {
7979
if (data.size > 0) {
8080
Some(new Distribution(data))
8181
} else {
8282
None
8383
}
8484
}
8585

86-
def showQuantiles(out: PrintStream = System.out, quantiles: Traversable[Double]) {
86+
def showQuantiles(out: PrintStream = System.out, quantiles: Iterable[Double]) {
8787
// scalastyle:off println
8888
out.println("min\t25%\t50%\t75%\tmax")
8989
quantiles.foreach{q => out.print(q + "\t")}

core/src/main/scala/org/apache/spark/util/JsonProtocol.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -309,7 +309,7 @@ private[spark] object JsonProtocol {
309309

310310
private lazy val accumulableBlacklist = Set("internal.metrics.updatedBlockStatuses")
311311

312-
def accumulablesToJson(accumulables: Traversable[AccumulableInfo]): JArray = {
312+
def accumulablesToJson(accumulables: Iterable[AccumulableInfo]): JArray = {
313313
JArray(accumulables
314314
.filterNot(_.name.exists(accumulableBlacklist.contains))
315315
.toList.map(accumulableInfoToJson))

core/src/test/scala/org/apache/spark/scheduler/SparkListenerSuite.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -535,7 +535,7 @@ class SparkListenerSuite extends SparkFunSuite with LocalSparkContext with Match
535535
/**
536536
* Assert that the given list of numbers has an average that is greater than zero.
537537
*/
538-
private def checkNonZeroAvg(m: Traversable[Long], msg: String) {
538+
private def checkNonZeroAvg(m: Iterable[Long], msg: String) {
539539
assert(m.sum / m.size.toDouble > 0.0, msg)
540540
}
541541

external/kafka-0-10-token-provider/src/test/scala/org/apache/spark/kafka010/KafkaDelegationTokenTest.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -83,7 +83,7 @@ trait KafkaDelegationTokenTest extends BeforeAndAfterEach {
8383
UserGroupInformation.getCurrentUser.addCredentials(creds)
8484
}
8585

86-
protected def setSparkEnv(settings: Traversable[(String, String)]): Unit = {
86+
protected def setSparkEnv(settings: Iterable[(String, String)]): Unit = {
8787
val conf = new SparkConf().setAll(settings)
8888
val env = mock(classOf[SparkEnv])
8989
doReturn(conf).when(env).conf

graphx/src/main/scala/org/apache/spark/graphx/lib/LabelPropagation.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -55,7 +55,7 @@ object LabelPropagation {
5555
val count1Val = count1.getOrElse(i, 0L)
5656
val count2Val = count2.getOrElse(i, 0L)
5757
i -> (count1Val + count2Val)
58-
}(collection.breakOut) // more efficient alternative to [[collection.Traversable.toMap]]
58+
}(collection.breakOut)
5959
}
6060
def vertexProgram(vid: VertexId, attr: Long, message: Map[VertexId, Long]): VertexId = {
6161
if (message.isEmpty) attr else message.maxBy(_._2)._1

graphx/src/main/scala/org/apache/spark/graphx/lib/ShortestPaths.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -36,7 +36,7 @@ object ShortestPaths extends Serializable {
3636
private def addMaps(spmap1: SPMap, spmap2: SPMap): SPMap = {
3737
(spmap1.keySet ++ spmap2.keySet).map {
3838
k => k -> math.min(spmap1.getOrElse(k, Int.MaxValue), spmap2.getOrElse(k, Int.MaxValue))
39-
}(collection.breakOut) // more efficient alternative to [[collection.Traversable.toMap]]
39+
}(collection.breakOut)
4040
}
4141

4242
/**

sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/AttributeSet.scala

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -63,7 +63,7 @@ object AttributeSet {
6363
* when the transformation was a no-op).
6464
*/
6565
class AttributeSet private (val baseSet: Set[AttributeEquals])
66-
extends Traversable[Attribute] with Serializable {
66+
extends Iterable[Attribute] with Serializable {
6767

6868
override def hashCode: Int = baseSet.hashCode()
6969

@@ -99,7 +99,7 @@ class AttributeSet private (val baseSet: Set[AttributeEquals])
9999
* Returns a new [[AttributeSet]] that does not contain any of the [[Attribute Attributes]] found
100100
* in `other`.
101101
*/
102-
def --(other: Traversable[NamedExpression]): AttributeSet = {
102+
def --(other: Iterable[NamedExpression]): AttributeSet = {
103103
other match {
104104
case otherSet: AttributeSet =>
105105
new AttributeSet(baseSet -- otherSet.baseSet)

sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/Expression.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -253,7 +253,7 @@ abstract class Expression extends TreeNode[Expression] {
253253
def prettyName: String = nodeName.toLowerCase(Locale.ROOT)
254254

255255
protected def flatArguments: Iterator[Any] = productIterator.flatMap {
256-
case t: Traversable[_] => t
256+
case t: Iterable[_] => t
257257
case single => single :: Nil
258258
}
259259

0 commit comments

Comments
 (0)