Skip to content

Commit 4831000

Browse files
committed
SPARK-5613: Catch the ApplicationNotFoundException exception to avoid
thread from getting killed on yarn restart.
1 parent f318af0 commit 4831000

File tree

1 file changed

+9
-2
lines changed

1 file changed

+9
-2
lines changed

yarn/common/src/main/scala/org/apache/spark/scheduler/cluster/YarnClientSchedulerBackend.scala

Lines changed: 9 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -20,6 +20,7 @@ package org.apache.spark.scheduler.cluster
2020
import scala.collection.mutable.ArrayBuffer
2121

2222
import org.apache.hadoop.yarn.api.records.{ApplicationId, YarnApplicationState}
23+
import org.apache.hadoop.yarn.exceptions.ApplicationNotFoundException
2324

2425
import org.apache.spark.{SparkException, Logging, SparkContext}
2526
import org.apache.spark.deploy.yarn.{Client, ClientArguments}
@@ -132,8 +133,14 @@ private[spark] class YarnClientSchedulerBackend(
132133
val t = new Thread {
133134
override def run() {
134135
while (!stopping) {
135-
val report = client.getApplicationReport(appId)
136-
val state = report.getYarnApplicationState()
136+
var state : YarnApplicationState = null
137+
try {
138+
val report = client.getApplicationReport(appId)
139+
state = report.getYarnApplicationState()
140+
} catch {
141+
case e : ApplicationNotFoundException =>
142+
state = YarnApplicationState.KILLED
143+
}
137144
if (state == YarnApplicationState.FINISHED ||
138145
state == YarnApplicationState.KILLED ||
139146
state == YarnApplicationState.FAILED) {

0 commit comments

Comments
 (0)