Skip to content

Commit 773fbfe

Browse files
petermaxleezsxwing
authored andcommitted
[SPARK-16304] LinkageError should not crash Spark executor
## What changes were proposed in this pull request? This patch updates the failure handling logic so Spark executor does not crash when seeing LinkageError. ## How was this patch tested? Added an end-to-end test in FailureSuite. Author: petermaxlee <[email protected]> Closes #13982 from petermaxlee/SPARK-16304.
1 parent b4a7b65 commit 773fbfe

File tree

2 files changed

+14
-1
lines changed

2 files changed

+14
-1
lines changed

core/src/main/scala/org/apache/spark/util/Utils.scala

Lines changed: 5 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1977,7 +1977,11 @@ private[spark] object Utils extends Logging {
19771977
/** Returns true if the given exception was fatal. See docs for scala.util.control.NonFatal. */
19781978
def isFatalError(e: Throwable): Boolean = {
19791979
e match {
1980-
case NonFatal(_) | _: InterruptedException | _: NotImplementedError | _: ControlThrowable =>
1980+
case NonFatal(_) |
1981+
_: InterruptedException |
1982+
_: NotImplementedError |
1983+
_: ControlThrowable |
1984+
_: LinkageError =>
19811985
false
19821986
case _ =>
19831987
true

core/src/test/scala/org/apache/spark/FailureSuite.scala

Lines changed: 9 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -253,6 +253,15 @@ class FailureSuite extends SparkFunSuite with LocalSparkContext {
253253
rdd.count()
254254
}
255255

256+
test("SPARK-16304: Link error should not crash executor") {
257+
sc = new SparkContext("local[1,2]", "test")
258+
intercept[SparkException] {
259+
sc.parallelize(1 to 2).foreach { i =>
260+
throw new LinkageError()
261+
}
262+
}
263+
}
264+
256265
// TODO: Need to add tests with shuffle fetch failures.
257266
}
258267

0 commit comments

Comments
 (0)