Skip to content

Commit 5ad4735

Browse files
kiszkHyukjinKwon
authored andcommitted
[SPARK-24529][BUILD][TEST-MAVEN] Add spotbugs into maven build process
## What changes were proposed in this pull request? This PR enables a Java bytecode check tool [spotbugs](https://spotbugs.github.io/) to avoid possible integer overflow at multiplication. When an violation is detected, the build process is stopped. Due to the tool limitation, some other checks will be enabled. In this PR, [these patterns](http://spotbugs-in-kengo-toda.readthedocs.io/en/lqc-list-detectors/detectors.html#findpuzzlers) in `FindPuzzlers` can be detected. This check is enabled at `compile` phase. Thus, `mvn compile` or `mvn package` launches this check. ## How was this patch tested? Existing UTs Author: Kazuaki Ishizaki <[email protected]> Closes #21542 from kiszk/SPARK-24529.
1 parent 3ab48f9 commit 5ad4735

File tree

8 files changed

+44
-9
lines changed

8 files changed

+44
-9
lines changed

core/src/main/scala/org/apache/spark/util/collection/ExternalSorter.scala

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -368,8 +368,8 @@ private[spark] class ExternalSorter[K, V, C](
368368
val bufferedIters = iterators.filter(_.hasNext).map(_.buffered)
369369
type Iter = BufferedIterator[Product2[K, C]]
370370
val heap = new mutable.PriorityQueue[Iter]()(new Ordering[Iter] {
371-
// Use the reverse of comparator.compare because PriorityQueue dequeues the max
372-
override def compare(x: Iter, y: Iter): Int = -comparator.compare(x.head._1, y.head._1)
371+
// Use the reverse order because PriorityQueue dequeues the max
372+
override def compare(x: Iter, y: Iter): Int = comparator.compare(y.head._1, x.head._1)
373373
})
374374
heap.enqueue(bufferedIters: _*) // Will contain only the iterators with hasNext = true
375375
new Iterator[Product2[K, C]] {

mllib/src/main/scala/org/apache/spark/ml/image/HadoopUtils.scala

Lines changed: 5 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -42,9 +42,11 @@ private object RecursiveFlag {
4242
val old = Option(hadoopConf.get(flagName))
4343
hadoopConf.set(flagName, value.toString)
4444
try f finally {
45-
old match {
46-
case Some(v) => hadoopConf.set(flagName, v)
47-
case None => hadoopConf.unset(flagName)
45+
// avoid false positive of DLS_DEAD_LOCAL_STORE_IN_RETURN by SpotBugs
46+
if (old.isDefined) {
47+
hadoopConf.set(flagName, old.get)
48+
} else {
49+
hadoopConf.unset(flagName)
4850
}
4951
}
5052
}

pom.xml

Lines changed: 22 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -2606,6 +2606,28 @@
26062606
</execution>
26072607
</executions>
26082608
</plugin>
2609+
<plugin>
2610+
<groupId>com.github.spotbugs</groupId>
2611+
<artifactId>spotbugs-maven-plugin</artifactId>
2612+
<version>3.1.3</version>
2613+
<configuration>
2614+
<classFilesDirectory>${basedir}/target/scala-${scala.binary.version}/classes</classFilesDirectory>
2615+
<testClassFilesDirectory>${basedir}/target/scala-${scala.binary.version}/test-classes</testClassFilesDirectory>
2616+
<effort>Max</effort>
2617+
<threshold>Low</threshold>
2618+
<xmlOutput>true</xmlOutput>
2619+
<visitors>FindPuzzlers</visitors>
2620+
<fork>false</fork>
2621+
</configuration>
2622+
<executions>
2623+
<execution>
2624+
<goals>
2625+
<goal>check</goal>
2626+
</goals>
2627+
<phase>compile</phase>
2628+
</execution>
2629+
</executions>
2630+
</plugin>
26092631
</plugins>
26102632
</build>
26112633

resource-managers/kubernetes/core/pom.xml

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -47,6 +47,12 @@
4747
<scope>test</scope>
4848
</dependency>
4949

50+
<dependency>
51+
<groupId>org.apache.spark</groupId>
52+
<artifactId>spark-tags_${scala.binary.version}</artifactId>
53+
<type>test-jar</type>
54+
</dependency>
55+
5056
<dependency>
5157
<groupId>io.fabric8</groupId>
5258
<artifactId>kubernetes-client</artifactId>

resource-managers/kubernetes/integration-tests/pom.xml

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -63,6 +63,11 @@
6363
<artifactId>kubernetes-client</artifactId>
6464
<version>${kubernetes-client.version}</version>
6565
</dependency>
66+
<dependency>
67+
<groupId>org.apache.spark</groupId>
68+
<artifactId>spark-tags_${scala.binary.version}</artifactId>
69+
<type>test-jar</type>
70+
</dependency>
6671
</dependencies>
6772

6873
<build>

sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/collectionOperations.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -993,7 +993,7 @@ trait ArraySortLike extends ExpectsInputTypes {
993993
} else if (o2 == null) {
994994
nullOrder
995995
} else {
996-
-ordering.compare(o1, o2)
996+
ordering.compare(o2, o1)
997997
}
998998
}
999999
}

sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/conditionalExpressions.scala

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -294,7 +294,7 @@ object CaseWhen {
294294
case cond :: value :: Nil => Some((cond, value))
295295
case value :: Nil => None
296296
}.toArray.toSeq // force materialization to make the seq serializable
297-
val elseValue = if (branches.size % 2 == 1) Some(branches.last) else None
297+
val elseValue = if (branches.size % 2 != 0) Some(branches.last) else None
298298
CaseWhen(cases, elseValue)
299299
}
300300
}
@@ -309,7 +309,7 @@ object CaseKeyWhen {
309309
case Seq(cond, value) => Some((EqualTo(key, cond), value))
310310
case Seq(value) => None
311311
}.toArray.toSeq // force materialization to make the seq serializable
312-
val elseValue = if (branches.size % 2 == 1) Some(branches.last) else None
312+
val elseValue = if (branches.size % 2 != 0) Some(branches.last) else None
313313
CaseWhen(cases, elseValue)
314314
}
315315
}

sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/AstBuilder.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1507,7 +1507,7 @@ class AstBuilder(conf: SQLConf) extends SqlBaseBaseVisitor[AnyRef] with Logging
15071507
case "TIMESTAMP" =>
15081508
Literal(Timestamp.valueOf(value))
15091509
case "X" =>
1510-
val padding = if (value.length % 2 == 1) "0" else ""
1510+
val padding = if (value.length % 2 != 0) "0" else ""
15111511
Literal(DatatypeConverter.parseHexBinary(padding + value))
15121512
case other =>
15131513
throw new ParseException(s"Literals of type '$other' are currently not supported.", ctx)

0 commit comments

Comments
 (0)