-
Notifications
You must be signed in to change notification settings - Fork 28.9k
[SPARK-29292][SPARK-30010][CORE] Let core compile for Scala 2.13 #28971
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
Changes from all commits
08493d4
11f3198
c261d4b
b0a6be9
ab62b32
89d19c6
8f5af5f
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
| Original file line number | Diff line number | Diff line change | ||||
|---|---|---|---|---|---|---|
|
|
@@ -205,7 +205,7 @@ private object FaultToleranceTest extends App with Logging { | |||||
|
|
||||||
| private def addWorkers(num: Int): Unit = { | ||||||
| logInfo(s">>>>> ADD WORKERS $num <<<<<") | ||||||
| val masterUrls = getMasterUrls(masters) | ||||||
| val masterUrls = getMasterUrls(masters.toSeq) | ||||||
| (1 to num).foreach { _ => workers += SparkDocker.startWorker(dockerMountDir, masterUrls) } | ||||||
| } | ||||||
|
|
||||||
|
|
@@ -216,7 +216,7 @@ private object FaultToleranceTest extends App with Logging { | |||||
| // Counter-hack: Because of a hack in SparkEnv#create() that changes this | ||||||
| // property, we need to reset it. | ||||||
| System.setProperty(config.DRIVER_PORT.key, "0") | ||||||
| sc = new SparkContext(getMasterUrls(masters), "fault-tolerance", containerSparkHome) | ||||||
| sc = new SparkContext(getMasterUrls(masters.toSeq), "fault-tolerance", containerSparkHome) | ||||||
| } | ||||||
|
|
||||||
| private def getMasterUrls(masters: Seq[TestMasterInfo]): String = { | ||||||
|
|
@@ -279,7 +279,7 @@ private object FaultToleranceTest extends App with Logging { | |||||
| var liveWorkerIPs: Seq[String] = List() | ||||||
|
|
||||||
| def stateValid(): Boolean = { | ||||||
| (workers.map(_.ip) -- liveWorkerIPs).isEmpty && | ||||||
| workers.map(_.ip).forall(liveWorkerIPs.contains) && | ||||||
|
Contributor
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Nit: What about using
Suggested change
Member
Author
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. diff would work too, I think. It has multiset semantics, and I thought it was not necessary here. I went for what I thought was simpler, but I am not 100% sure. |
||||||
| numAlive == 1 && numStandby == masters.size - 1 && numLiveApps >= 1 | ||||||
| } | ||||||
|
|
||||||
|
|
||||||
| Original file line number | Diff line number | Diff line change |
|---|---|---|
|
|
@@ -133,12 +133,11 @@ private object ParallelCollectionRDD { | |
| // If the range is inclusive, use inclusive range for the last slice | ||
| if (r.isInclusive && index == numSlices - 1) { | ||
| new Range.Inclusive(r.start + start * r.step, r.end, r.step) | ||
| } | ||
| else { | ||
| new Range(r.start + start * r.step, r.start + end * r.step, r.step) | ||
| } else { | ||
| new Range.Inclusive(r.start + start * r.step, r.start + (end - 1) * r.step, r.step) | ||
|
Member
Author
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. For previous reviewers: I fixed a bug from my initial change here. The non-inclusive end is not 1 less than the exclusive end, but one less |
||
| } | ||
| }.toSeq.asInstanceOf[Seq[Seq[T]]] | ||
| case nr: NumericRange[_] => | ||
| case nr: NumericRange[T] => | ||
| // For ranges of Long, Double, BigInteger, etc | ||
| val slices = new ArrayBuffer[Seq[T]](numSlices) | ||
| var r = nr | ||
|
|
@@ -147,7 +146,7 @@ private object ParallelCollectionRDD { | |
| slices += r.take(sliceSize).asInstanceOf[Seq[T]] | ||
| r = r.drop(sliceSize) | ||
| } | ||
| slices | ||
| slices.toSeq | ||
| case _ => | ||
| val array = seq.toArray // To prevent O(n^2) operations for List etc | ||
| positions(array.length, numSlices).map { case (start, end) => | ||
|
|
||
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
@srowen, BTW, it might be best to file a JIRA as a reminder to keep this API back if we can't make Scala 2.13 in Spark 3.1.
I believe it is legitimate and inevitable to remove this because of Scala 2.13 but it might be problematic if we can't make it in Spark 3.1, and have a release out only with Scala 2.12.
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
Yeah if the whole thing doesn't make it for 3.1, I'd leave this method in 3.1.