Skip to content

Commit 6002b92

Browse files
committed
merge with master
2 parents e0cf36f + d7e124e commit 6002b92

File tree

109 files changed

+821
-756
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

109 files changed

+821
-756
lines changed

core/pom.xml

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -192,7 +192,6 @@
192192
<dependency>
193193
<groupId>org.json4s</groupId>
194194
<artifactId>json4s-jackson_${scala.binary.version}</artifactId>
195-
<version>3.2.10</version>
196195
</dependency>
197196
<dependency>
198197
<groupId>com.sun.jersey</groupId>

core/src/main/scala/org/apache/spark/ContextCleaner.scala

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -278,9 +278,9 @@ private object ContextCleaner {
278278
* Listener class used for testing when any item has been cleaned by the Cleaner class.
279279
*/
280280
private[spark] trait CleanerListener {
281-
def rddCleaned(rddId: Int)
282-
def shuffleCleaned(shuffleId: Int)
283-
def broadcastCleaned(broadcastId: Long)
284-
def accumCleaned(accId: Long)
285-
def checkpointCleaned(rddId: Long)
281+
def rddCleaned(rddId: Int): Unit
282+
def shuffleCleaned(shuffleId: Int): Unit
283+
def broadcastCleaned(broadcastId: Long): Unit
284+
def accumCleaned(accId: Long): Unit
285+
def checkpointCleaned(rddId: Long): Unit
286286
}

core/src/main/scala/org/apache/spark/FutureAction.scala

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -41,7 +41,7 @@ trait FutureAction[T] extends Future[T] {
4141
/**
4242
* Cancels the execution of this action.
4343
*/
44-
def cancel()
44+
def cancel(): Unit
4545

4646
/**
4747
* Blocks until this action completes.
@@ -65,7 +65,7 @@ trait FutureAction[T] extends Future[T] {
6565
* When this action is completed, either through an exception, or a value, applies the provided
6666
* function.
6767
*/
68-
def onComplete[U](func: (Try[T]) => U)(implicit executor: ExecutionContext)
68+
def onComplete[U](func: (Try[T]) => U)(implicit executor: ExecutionContext): Unit
6969

7070
/**
7171
* Returns whether the action has already been completed with a value or an exception.

core/src/main/scala/org/apache/spark/InternalAccumulator.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -187,7 +187,7 @@ private[spark] object InternalAccumulator {
187187
* add to the same set of accumulators. We do this to report the distribution of accumulator
188188
* values across all tasks within each stage.
189189
*/
190-
def create(sc: SparkContext): Seq[Accumulator[_]] = {
190+
def createAll(sc: SparkContext): Seq[Accumulator[_]] = {
191191
val accums = createAll()
192192
accums.foreach { accum =>
193193
Accumulators.register(accum)

core/src/main/scala/org/apache/spark/SparkContext.scala

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -723,7 +723,7 @@ class SparkContext(config: SparkConf) extends Logging with ExecutorAllocationCli
723723
(safeEnd - safeStart) / step + 1
724724
}
725725
}
726-
parallelize(0 until numSlices, numSlices).mapPartitionsWithIndex((i, _) => {
726+
parallelize(0 until numSlices, numSlices).mapPartitionsWithIndex { (i, _) =>
727727
val partitionStart = (i * numElements) / numSlices * step + start
728728
val partitionEnd = (((i + 1) * numElements) / numSlices) * step + start
729729
def getSafeMargin(bi: BigInt): Long =
@@ -762,7 +762,7 @@ class SparkContext(config: SparkConf) extends Logging with ExecutorAllocationCli
762762
ret
763763
}
764764
}
765-
})
765+
}
766766
}
767767

768768
/** Distribute a local Scala collection to form an RDD.

core/src/main/scala/org/apache/spark/deploy/client/AppClientListener.scala

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -33,7 +33,8 @@ private[spark] trait AppClientListener {
3333
/** An application death is an unrecoverable failure condition. */
3434
def dead(reason: String): Unit
3535

36-
def executorAdded(fullId: String, workerId: String, hostPort: String, cores: Int, memory: Int)
36+
def executorAdded(
37+
fullId: String, workerId: String, hostPort: String, cores: Int, memory: Int): Unit
3738

3839
def executorRemoved(fullId: String, message: String, exitStatus: Option[Int]): Unit
3940
}

core/src/main/scala/org/apache/spark/deploy/master/LeaderElectionAgent.scala

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -32,8 +32,8 @@ trait LeaderElectionAgent {
3232

3333
@DeveloperApi
3434
trait LeaderElectable {
35-
def electedLeader()
36-
def revokedLeadership()
35+
def electedLeader(): Unit
36+
def revokedLeadership(): Unit
3737
}
3838

3939
/** Single-node implementation of LeaderElectionAgent -- we're initially and always the leader. */

core/src/main/scala/org/apache/spark/deploy/master/Master.scala

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -843,10 +843,10 @@ private[deploy] class Master(
843843
addressToApp -= app.driver.address
844844
if (completedApps.size >= RETAINED_APPLICATIONS) {
845845
val toRemove = math.max(RETAINED_APPLICATIONS / 10, 1)
846-
completedApps.take(toRemove).foreach( a => {
846+
completedApps.take(toRemove).foreach { a =>
847847
Option(appIdToUI.remove(a.id)).foreach { ui => webUi.detachSparkUI(ui) }
848848
applicationMetricsSystem.removeSource(a.appSource)
849-
})
849+
}
850850
completedApps.trimStart(toRemove)
851851
}
852852
completedApps += app // Remember it in our history

core/src/main/scala/org/apache/spark/deploy/master/PersistenceEngine.scala

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -40,12 +40,12 @@ abstract class PersistenceEngine {
4040
* Defines how the object is serialized and persisted. Implementation will
4141
* depend on the store used.
4242
*/
43-
def persist(name: String, obj: Object)
43+
def persist(name: String, obj: Object): Unit
4444

4545
/**
4646
* Defines how the object referred by its name is removed from the store.
4747
*/
48-
def unpersist(name: String)
48+
def unpersist(name: String): Unit
4949

5050
/**
5151
* Gives all objects, matching a prefix. This defines how objects are

core/src/main/scala/org/apache/spark/deploy/worker/DriverRunner.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -218,7 +218,7 @@ private[deploy] class DriverRunner(
218218
}
219219

220220
private[deploy] trait Sleeper {
221-
def sleep(seconds: Int)
221+
def sleep(seconds: Int): Unit
222222
}
223223

224224
// Needed because ProcessBuilder is a final class and cannot be mocked

0 commit comments

Comments
 (0)