Skip to content

Commit d12b95f

Browse files
committed
Remove unused imports (minor)
1 parent a4c387b commit d12b95f

File tree

2 files changed

+3
-5
lines changed

2 files changed

+3
-5
lines changed

core/src/main/scala/org/apache/spark/scheduler/ResultTask.scala

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -17,11 +17,12 @@
1717

1818
package org.apache.spark.scheduler
1919

20+
import scala.language.existentials
21+
2022
import java.io._
2123
import java.util.zip.{GZIPInputStream, GZIPOutputStream}
2224

2325
import scala.collection.mutable.HashMap
24-
import scala.language.existentials
2526

2627
import org.apache.spark._
2728
import org.apache.spark.rdd.{RDD, RDDCheckpointData}

core/src/main/scala/org/apache/spark/scheduler/ShuffleMapTask.scala

Lines changed: 1 addition & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -25,10 +25,7 @@ import java.util.zip.{GZIPInputStream, GZIPOutputStream}
2525
import scala.collection.mutable.HashMap
2626

2727
import org.apache.spark._
28-
import org.apache.spark.executor.ShuffleWriteMetrics
2928
import org.apache.spark.rdd.{RDD, RDDCheckpointData}
30-
import org.apache.spark.serializer.Serializer
31-
import org.apache.spark.storage._
3229
import org.apache.spark.shuffle.ShuffleWriter
3330

3431
private[spark] object ShuffleMapTask {
@@ -150,7 +147,7 @@ private[spark] class ShuffleMapTask(
150147
for (elem <- rdd.iterator(split, context)) {
151148
writer.write(elem.asInstanceOf[Product2[Any, Any]])
152149
}
153-
return writer.stop(success = true).get
150+
writer.stop(success = true).get
154151
} catch {
155152
case e: Exception =>
156153
if (writer != null) {

0 commit comments

Comments
 (0)