Skip to content

Commit 08690e5

Browse files
committed
fix if else styling
1 parent 39f85e0 commit 08690e5

File tree

10 files changed

+49
-25
lines changed

10 files changed

+49
-25
lines changed

core/src/main/scala/org/apache/spark/Accumulators.scala

Lines changed: 5 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -104,8 +104,11 @@ class Accumulable[R, T] (
104104
* Set the accumulator's value; only allowed on master.
105105
*/
106106
def value_= (newValue: R) {
107-
if (!deserialized) value_ = newValue
108-
else throw new UnsupportedOperationException("Can't assign accumulator value in task")
107+
if (!deserialized) {
108+
value_ = newValue
109+
} else {
110+
throw new UnsupportedOperationException("Can't assign accumulator value in task")
111+
}
109112
}
110113

111114
/**

core/src/main/scala/org/apache/spark/deploy/SparkSubmitArguments.scala

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -66,8 +66,7 @@ private[spark] class SparkSubmitArguments(args: Array[String]) {
6666
if (k.startsWith("spark")) {
6767
defaultProperties(k) = v
6868
if (verbose) SparkSubmit.printStream.println(s"Adding default property: $k=$v")
69-
}
70-
else {
69+
} else {
7170
SparkSubmit.printWarning(s"Ignoring non-spark config property: $k=$v")
7271
}
7372
}

core/src/main/scala/org/apache/spark/storage/BlockManager.scala

Lines changed: 5 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -281,7 +281,9 @@ private[spark] class BlockManager(
281281
val onDiskSize = status.diskSize
282282
master.updateBlockInfo(
283283
blockManagerId, blockId, storageLevel, inMemSize, onDiskSize, inTachyonSize)
284-
} else true
284+
} else {
285+
true
286+
}
285287
}
286288

287289
/**
@@ -676,7 +678,7 @@ private[spark] class BlockManager(
676678
tachyonStore.putValues(blockId, iterator, level, false)
677679
case ArrayBufferValues(array) =>
678680
tachyonStore.putValues(blockId, array, level, false)
679-
case ByteBufferValues(bytes) =>
681+
case ByteBufferValues(bytes) =>
680682
bytes.rewind()
681683
tachyonStore.putBytes(blockId, bytes, level)
682684
}
@@ -695,7 +697,7 @@ private[spark] class BlockManager(
695697
diskStore.putValues(blockId, iterator, level, askForBytes)
696698
case ArrayBufferValues(array) =>
697699
diskStore.putValues(blockId, array, level, askForBytes)
698-
case ByteBufferValues(bytes) =>
700+
case ByteBufferValues(bytes) =>
699701
bytes.rewind()
700702
diskStore.putBytes(blockId, bytes, level)
701703
}

core/src/main/scala/org/apache/spark/util/BoundedPriorityQueue.scala

Lines changed: 8 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -43,8 +43,11 @@ private[spark] class BoundedPriorityQueue[A](maxSize: Int)(implicit ord: Orderin
4343
}
4444

4545
override def +=(elem: A): this.type = {
46-
if (size < maxSize) underlying.offer(elem)
47-
else maybeReplaceLowest(elem)
46+
if (size < maxSize) {
47+
underlying.offer(elem)
48+
} else {
49+
maybeReplaceLowest(elem)
50+
}
4851
this
4952
}
5053

@@ -59,7 +62,8 @@ private[spark] class BoundedPriorityQueue[A](maxSize: Int)(implicit ord: Orderin
5962
if (head != null && ord.gt(a, head)) {
6063
underlying.poll()
6164
underlying.offer(a)
62-
} else false
65+
} else {
66+
false
67+
}
6368
}
6469
}
65-

core/src/main/scala/org/apache/spark/util/FileLogger.scala

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -110,7 +110,9 @@ private[spark] class FileLogger(
110110
* @param withTime Whether to prepend message with a timestamp
111111
*/
112112
def log(msg: String, withTime: Boolean = false) {
113-
val writeInfo = if (!withTime) msg else {
113+
val writeInfo = if (!withTime) {
114+
msg
115+
} else {
114116
val date = new Date(System.currentTimeMillis())
115117
dateFormat.get.format(date) + ": " + msg
116118
}

core/src/main/scala/org/apache/spark/util/Utils.scala

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -811,8 +811,7 @@ private[spark] object Utils extends Logging {
811811
} else {
812812
el.getMethodName
813813
}
814-
}
815-
else {
814+
} else {
816815
firstUserLine = el.getLineNumber
817816
firstUserFile = el.getFileName
818817
firstUserClass = el.getClassName

core/src/test/scala/org/apache/spark/rdd/RDDSuite.scala

Lines changed: 6 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -381,8 +381,12 @@ class RDDSuite extends FunSuite with SharedSparkContext {
381381
val prng42 = new Random(42)
382382
val prng43 = new Random(43)
383383
Array(1, 2, 3, 4, 5, 6).filter{i =>
384-
if (i < 4) 0 == prng42.nextInt(3)
385-
else 0 == prng43.nextInt(3)}
384+
if (i < 4){
385+
0 == prng42.nextInt(3)
386+
} else {
387+
0 == prng43.nextInt(3)
388+
}
389+
}
386390
}
387391
assert(sample.size === checkSample.size)
388392
for (i <- 0 until sample.size) assert(sample(i) === checkSample(i))

examples/src/main/scala/org/apache/spark/examples/LogQuery.scala

Lines changed: 5 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -49,8 +49,11 @@ object LogQuery {
4949
System.getenv("SPARK_HOME"), SparkContext.jarOfClass(this.getClass).toSeq)
5050

5151
val dataSet =
52-
if (args.length == 2) sc.textFile(args(1))
53-
else sc.parallelize(exampleApacheLogs)
52+
if (args.length == 2) {
53+
sc.textFile(args(1))
54+
} else {
55+
sc.parallelize(exampleApacheLogs)
56+
}
5457
// scalastyle:off
5558
val apacheLogRegex =
5659
"""^([\d.]+) (\S+) (\S+) \[([\w\d:/]+\s[+\-]\d{4})\] "(.+?)" (\d{3}) ([\d\-]+) "([^"]+)" "([^"]+)".*""".r

graphx/src/test/scala/org/apache/spark/graphx/GraphOpsSuite.scala

Lines changed: 5 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -165,8 +165,11 @@ class GraphOpsSuite extends FunSuite with LocalSparkContext {
165165
// not have any edges in the specified direction.
166166
assert(edges.count === 50)
167167
edges.collect.foreach {
168-
case (vid, edges) => if (vid > 0 && vid < 49) assert(edges.size == 2)
169-
else assert(edges.size == 1)
168+
case (vid, edges) => if (vid > 0 && vid < 49) {
169+
assert(edges.size == 2)
170+
} else {
171+
assert(edges.size == 1)
172+
}
170173
}
171174
edges.collect.foreach {
172175
case (vid, edges) =>

repl/src/main/scala/org/apache/spark/repl/SparkExprTyper.scala

Lines changed: 10 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -47,9 +47,15 @@ trait SparkExprTyper extends Logging {
4747
var isIncomplete = false
4848
reporter.withIncompleteHandler((_, _) => isIncomplete = true) {
4949
val trees = codeParser.stmts(line)
50-
if (reporter.hasErrors) Some(Nil)
51-
else if (isIncomplete) None
52-
else Some(trees)
50+
if (reporter.hasErrors) {
51+
Some(Nil)
52+
}
53+
else if (isIncomplete) {
54+
None
55+
}
56+
else {
57+
Some(trees)
58+
}
5359
}
5460
}
5561
// def parsesAsExpr(line: String) = {
@@ -70,8 +76,7 @@ trait SparkExprTyper extends Logging {
7076
val sym0 = symbolOfTerm(name)
7177
// drop NullaryMethodType
7278
val sym = sym0.cloneSymbol setInfo afterTyper(sym0.info.finalResultType)
73-
if (sym.info.typeSymbol eq UnitClass) NoSymbol
74-
else sym
79+
if (sym.info.typeSymbol eq UnitClass) NoSymbol else sym
7580
case _ => NoSymbol
7681
}
7782
}

0 commit comments

Comments
 (0)