Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -106,7 +106,6 @@ private[spark] class SecurityMessage() extends Logging {
* @return BufferMessage
*/
def toBufferMessage: BufferMessage = {
val startTime = System.currentTimeMillis
val buffers = new ArrayBuffer[ByteBuffer]()

// 4 bytes for the length of the connectionId
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -61,7 +61,6 @@ private[spark] class GroupedMeanEvaluator[T](totalOutputs: Int, confidence: Doub
} else if (outputsMerged == 0) {
new HashMap[T, BoundedDouble]
} else {
val p = outputsMerged.toDouble / totalOutputs
val studentTCacher = new StudentTCacher(confidence)
val result = new JHashMap[T, BoundedDouble](sums.size)
val iter = sums.entrySet.iterator()
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -118,11 +118,9 @@ object BlockFetcherIterator {
})
bytesInFlight += req.size
val sizeMap = req.blocks.toMap // so we can look up the size of each blockID
val fetchStart = System.currentTimeMillis()
val future = connectionManager.sendMessageReliably(cmId, blockMessageArray.toBufferMessage)
future.onSuccess {
case Some(message) => {
val fetchDone = System.currentTimeMillis()
val bufferMessage = message.asInstanceOf[BufferMessage]
val blockMessageArray = BlockMessageArray.fromBufferMessage(bufferMessage)
for (blockMessage <- blockMessageArray) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -91,7 +91,7 @@ private[spark] object XORShiftRandom {
val xorRand = new XORShiftRandom(seed)

// this is just to warm up the JIT - we're not timing anything
timeIt(1e6.toInt) {
timeIt(million) {
javaRand.nextInt()
xorRand.nextInt()
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -75,7 +75,6 @@ public String toString() {

public static Tuple3<String, String, String> extractKey(String line) {
Matcher m = apacheLogRegex.matcher(line);
List<String> key = Collections.emptyList();
if (m.find()) {
String ip = m.group(1);
String user = m.group(3);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -85,7 +85,7 @@ public Tuple2<Integer, Integer> call(Tuple2<Integer, Integer> e) {
}
});

long oldCount = 0;
long oldCount;
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Isn't it better to initialize this to 0 rather than rely on an implicit initialization?

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Either initialize it or not, it will be assigned with value of "nextCount", so I think it's redundant.

Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Yeah it's super minor but my IDE also flags things like this. The initial value is overwritten straight away in the loop, so initializing it is flagged as a trivial mistake. Unlike with instance variables, Java would not allow omitting the initialization of a local if it were possible to read its value before it was ever written. It's not a question of letting it rely on a default value here.

long nextCount = tc.count();
do {
oldCount = nextCount;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,6 @@
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.api.java.function.Function;
import org.apache.spark.api.java.function.VoidFunction;

import org.apache.spark.sql.api.java.JavaSQLContext;
import org.apache.spark.sql.api.java.JavaSchemaRDD;
Expand Down