Skip to content

Commit f089800

Browse files
committed
[SPARK-6754] Remove unnecessary TaskContextHelper
The TaskContextHelper was originally necessary because TaskContext was written in Java, which does not have a way to specify that classes are package-private, so TaskContextHelper existed to work around this. Now that TaskContext has been re-written in Scala, this class is no longer necessary.
1 parent 1232215 commit f089800

File tree

3 files changed

+5
-34
lines changed

3 files changed

+5
-34
lines changed

core/src/main/scala/org/apache/spark/TaskContextHelper.scala

Lines changed: 0 additions & 29 deletions
This file was deleted.

core/src/main/scala/org/apache/spark/scheduler/DAGScheduler.scala

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -641,13 +641,13 @@ class DAGScheduler(
641641
val split = rdd.partitions(job.partitions(0))
642642
val taskContext = new TaskContextImpl(job.finalStage.id, job.partitions(0), taskAttemptId = 0,
643643
attemptNumber = 0, runningLocally = true)
644-
TaskContextHelper.setTaskContext(taskContext)
644+
TaskContext.setTaskContext(taskContext)
645645
try {
646646
val result = job.func(taskContext, rdd.iterator(split, taskContext))
647647
job.listener.taskSucceeded(0, result)
648648
} finally {
649649
taskContext.markTaskCompleted()
650-
TaskContextHelper.unset()
650+
TaskContext.unset()
651651
}
652652
} catch {
653653
case e: Exception =>

core/src/main/scala/org/apache/spark/scheduler/Task.scala

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -22,7 +22,7 @@ import java.nio.ByteBuffer
2222

2323
import scala.collection.mutable.HashMap
2424

25-
import org.apache.spark.{TaskContextHelper, TaskContextImpl, TaskContext}
25+
import org.apache.spark.{TaskContextImpl, TaskContext}
2626
import org.apache.spark.executor.TaskMetrics
2727
import org.apache.spark.serializer.SerializerInstance
2828
import org.apache.spark.util.ByteBufferInputStream
@@ -54,7 +54,7 @@ private[spark] abstract class Task[T](val stageId: Int, var partitionId: Int) ex
5454
final def run(taskAttemptId: Long, attemptNumber: Int): T = {
5555
context = new TaskContextImpl(stageId = stageId, partitionId = partitionId,
5656
taskAttemptId = taskAttemptId, attemptNumber = attemptNumber, runningLocally = false)
57-
TaskContextHelper.setTaskContext(context)
57+
TaskContext.setTaskContext(context)
5858
context.taskMetrics.setHostname(Utils.localHostName())
5959
taskThread = Thread.currentThread()
6060
if (_killed) {
@@ -64,7 +64,7 @@ private[spark] abstract class Task[T](val stageId: Int, var partitionId: Int) ex
6464
runTask(context)
6565
} finally {
6666
context.markTaskCompleted()
67-
TaskContextHelper.unset()
67+
TaskContext.unset()
6868
}
6969
}
7070

0 commit comments

Comments
 (0)