Skip to content

Commit 2250c7a

Browse files
techaddictrxin
authored andcommitted
Fix Scala Style
Any comments are welcome Author: Sandeep <[email protected]> Closes #531 from techaddict/stylefix-1 and squashes the following commits: 7492730 [Sandeep] Pass 4 98b2428 [Sandeep] fix rxin suggestions b5e2e6f [Sandeep] Pass 3 05932d7 [Sandeep] fix if else styling 2 08690e5 [Sandeep] fix if else styling (cherry picked from commit a03ac22) Signed-off-by: Reynold Xin <[email protected]>
1 parent 5ca01f6 commit 2250c7a

File tree

20 files changed

+109
-83
lines changed

20 files changed

+109
-83
lines changed

core/src/main/scala/org/apache/spark/Accumulators.scala

Lines changed: 5 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -104,8 +104,11 @@ class Accumulable[R, T] (
104104
* Set the accumulator's value; only allowed on master.
105105
*/
106106
def value_= (newValue: R) {
107-
if (!deserialized) value_ = newValue
108-
else throw new UnsupportedOperationException("Can't assign accumulator value in task")
107+
if (!deserialized) {
108+
value_ = newValue
109+
} else {
110+
throw new UnsupportedOperationException("Can't assign accumulator value in task")
111+
}
109112
}
110113

111114
/**

core/src/main/scala/org/apache/spark/deploy/SparkSubmitArguments.scala

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -66,8 +66,7 @@ private[spark] class SparkSubmitArguments(args: Array[String]) {
6666
if (k.startsWith("spark")) {
6767
defaultProperties(k) = v
6868
if (verbose) SparkSubmit.printStream.println(s"Adding default property: $k=$v")
69-
}
70-
else {
69+
} else {
7170
SparkSubmit.printWarning(s"Ignoring non-spark config property: $k=$v")
7271
}
7372
}

core/src/main/scala/org/apache/spark/deploy/master/Master.scala

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -237,8 +237,7 @@ private[spark] class Master(
237237
if (waitingDrivers.contains(d)) {
238238
waitingDrivers -= d
239239
self ! DriverStateChanged(driverId, DriverState.KILLED, None)
240-
}
241-
else {
240+
} else {
242241
// We just notify the worker to kill the driver here. The final bookkeeping occurs
243242
// on the return path when the worker submits a state change back to the master
244243
// to notify it that the driver was successfully killed.

core/src/main/scala/org/apache/spark/deploy/worker/DriverRunner.scala

Lines changed: 5 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -91,9 +91,11 @@ private[spark] class DriverRunner(
9191
}
9292

9393
val state =
94-
if (killed) { DriverState.KILLED }
95-
else if (finalException.isDefined) { DriverState.ERROR }
96-
else {
94+
if (killed) {
95+
DriverState.KILLED
96+
} else if (finalException.isDefined) {
97+
DriverState.ERROR
98+
} else {
9799
finalExitCode match {
98100
case Some(0) => DriverState.FINISHED
99101
case _ => DriverState.FAILED

core/src/main/scala/org/apache/spark/deploy/worker/ui/LogPage.scala

Lines changed: 9 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -89,8 +89,7 @@ private[spark] class LogPage(parent: WorkerWebUI) extends WebUIPage("logPage") {
8989
Previous {Utils.bytesToString(math.min(byteLength, startByte))}
9090
</button>
9191
</a>
92-
}
93-
else {
92+
} else {
9493
<button type="button" class="btn btn-default" disabled="disabled">
9594
Previous 0 B
9695
</button>
@@ -104,8 +103,7 @@ private[spark] class LogPage(parent: WorkerWebUI) extends WebUIPage("logPage") {
104103
Next {Utils.bytesToString(math.min(byteLength, logLength - endByte))}
105104
</button>
106105
</a>
107-
}
108-
else {
106+
} else {
109107
<button type="button" class="btn btn-default" disabled="disabled">
110108
Next 0 B
111109
</button>
@@ -137,9 +135,13 @@ private[spark] class LogPage(parent: WorkerWebUI) extends WebUIPage("logPage") {
137135
val logLength = file.length()
138136
val getOffset = offset.getOrElse(logLength - defaultBytes)
139137
val startByte =
140-
if (getOffset < 0) 0L
141-
else if (getOffset > logLength) logLength
142-
else getOffset
138+
if (getOffset < 0) {
139+
0L
140+
} else if (getOffset > logLength) {
141+
logLength
142+
} else {
143+
getOffset
144+
}
143145
val logPageLength = math.min(byteLength, maxBytes)
144146
val endByte = math.min(startByte + logPageLength, logLength)
145147
(startByte, endByte)

core/src/main/scala/org/apache/spark/storage/BlockManager.scala

Lines changed: 5 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -281,7 +281,9 @@ private[spark] class BlockManager(
281281
val onDiskSize = status.diskSize
282282
master.updateBlockInfo(
283283
blockManagerId, blockId, storageLevel, inMemSize, onDiskSize, inTachyonSize)
284-
} else true
284+
} else {
285+
true
286+
}
285287
}
286288

287289
/**
@@ -676,7 +678,7 @@ private[spark] class BlockManager(
676678
tachyonStore.putValues(blockId, iterator, level, false)
677679
case ArrayBufferValues(array) =>
678680
tachyonStore.putValues(blockId, array, level, false)
679-
case ByteBufferValues(bytes) =>
681+
case ByteBufferValues(bytes) =>
680682
bytes.rewind()
681683
tachyonStore.putBytes(blockId, bytes, level)
682684
}
@@ -695,7 +697,7 @@ private[spark] class BlockManager(
695697
diskStore.putValues(blockId, iterator, level, askForBytes)
696698
case ArrayBufferValues(array) =>
697699
diskStore.putValues(blockId, array, level, askForBytes)
698-
case ByteBufferValues(bytes) =>
700+
case ByteBufferValues(bytes) =>
699701
bytes.rewind()
700702
diskStore.putBytes(blockId, bytes, level)
701703
}

core/src/main/scala/org/apache/spark/util/BoundedPriorityQueue.scala

Lines changed: 8 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -43,8 +43,11 @@ private[spark] class BoundedPriorityQueue[A](maxSize: Int)(implicit ord: Orderin
4343
}
4444

4545
override def +=(elem: A): this.type = {
46-
if (size < maxSize) underlying.offer(elem)
47-
else maybeReplaceLowest(elem)
46+
if (size < maxSize) {
47+
underlying.offer(elem)
48+
} else {
49+
maybeReplaceLowest(elem)
50+
}
4851
this
4952
}
5053

@@ -59,7 +62,8 @@ private[spark] class BoundedPriorityQueue[A](maxSize: Int)(implicit ord: Orderin
5962
if (head != null && ord.gt(a, head)) {
6063
underlying.poll()
6164
underlying.offer(a)
62-
} else false
65+
} else {
66+
false
67+
}
6368
}
6469
}
65-

core/src/main/scala/org/apache/spark/util/FileLogger.scala

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -113,7 +113,9 @@ private[spark] class FileLogger(
113113
* @param withTime Whether to prepend message with a timestamp
114114
*/
115115
def log(msg: String, withTime: Boolean = false) {
116-
val writeInfo = if (!withTime) msg else {
116+
val writeInfo = if (!withTime) {
117+
msg
118+
} else {
117119
val date = new Date(System.currentTimeMillis())
118120
dateFormat.get.format(date) + ": " + msg
119121
}

core/src/main/scala/org/apache/spark/util/Utils.scala

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -811,8 +811,7 @@ private[spark] object Utils extends Logging {
811811
} else {
812812
el.getMethodName
813813
}
814-
}
815-
else {
814+
} else {
816815
firstUserLine = el.getLineNumber
817816
firstUserFile = el.getFileName
818817
firstUserClass = el.getClassName

core/src/test/scala/org/apache/spark/rdd/RDDSuite.scala

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -381,8 +381,8 @@ class RDDSuite extends FunSuite with SharedSparkContext {
381381
val prng42 = new Random(42)
382382
val prng43 = new Random(43)
383383
Array(1, 2, 3, 4, 5, 6).filter{i =>
384-
if (i < 4) 0 == prng42.nextInt(3)
385-
else 0 == prng43.nextInt(3)}
384+
if (i < 4) 0 == prng42.nextInt(3) else 0 == prng43.nextInt(3)
385+
}
386386
}
387387
assert(sample.size === checkSample.size)
388388
for (i <- 0 until sample.size) assert(sample(i) === checkSample(i))

0 commit comments

Comments
 (0)