Skip to content

Commit 5cb1e0a

Browse files
committed
MetadataFetchFailedException extends FetchFailedException.
1 parent 8861ee2 commit 5cb1e0a

File tree

4 files changed

+19
-28
lines changed

4 files changed

+19
-28
lines changed

core/src/main/scala/org/apache/spark/TaskEndReason.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -38,7 +38,7 @@ case object Resubmitted extends TaskEndReason // Task was finished earlier but w
3838

3939
@DeveloperApi
4040
case class FetchFailed(
41-
bmAddress: BlockManagerId,
41+
bmAddress: BlockManagerId, // Note that bmAddress can be null
4242
shuffleId: Int,
4343
mapId: Int,
4444
reduceId: Int)

core/src/main/scala/org/apache/spark/scheduler/TaskDescription.scala

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -21,6 +21,10 @@ import java.nio.ByteBuffer
2121

2222
import org.apache.spark.util.SerializableBuffer
2323

24+
/**
25+
* Description of a task that gets passed onto executors to be executed, usually created by
26+
* [[TaskSetManager.resourceOffer]].
27+
*/
2428
private[spark] class TaskDescription(
2529
val taskId: Long,
2630
val executorId: String,

core/src/main/scala/org/apache/spark/shuffle/FetchFailedException.scala

Lines changed: 14 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -23,6 +23,8 @@ import org.apache.spark.{FetchFailed, TaskEndReason}
2323
/**
2424
* Failed to fetch a shuffle block. The executor catches this exception and propagates it
2525
* back to DAGScheduler (through TaskEndReason) so we'd resubmit the previous stage.
26+
*
27+
* Note that bmAddress can be null.
2628
*/
2729
private[spark] class FetchFailedException(
2830
bmAddress: BlockManagerId,
@@ -36,3 +38,15 @@ private[spark] class FetchFailedException(
3638

3739
def toTaskEndReason: TaskEndReason = FetchFailed(bmAddress, shuffleId, mapId, reduceId)
3840
}
41+
42+
/**
43+
* Failed to get shuffle metadata from [[org.apache.spark.MapOutputTracker]].
44+
*/
45+
private[spark] class MetadataFetchFailedException(
46+
shuffleId: Int,
47+
reduceId: Int,
48+
message: String)
49+
extends FetchFailedException(null, shuffleId, -1, reduceId) {
50+
51+
override def getMessage: String = message
52+
}

core/src/main/scala/org/apache/spark/shuffle/MetadataFetchFailedException.scala

Lines changed: 0 additions & 27 deletions
This file was deleted.

0 commit comments

Comments
 (0)