Skip to content

Commit 45f5003

Browse files
committed
[SPARK-16304] LinkageError should not crash Spark executor
1 parent d8a87a3 commit 45f5003

File tree

3 files changed

+16
-1
lines changed

3 files changed

+16
-1
lines changed

core/src/main/scala/org/apache/spark/executor/Executor.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -406,7 +406,7 @@ private[spark] class Executor(
406406

407407
// Don't forcibly exit unless the exception was inherently fatal, to avoid
408408
// stopping other tasks unnecessarily.
409-
if (Utils.isFatalError(t)) {
409+
if (Utils.isFatalError(t) && !Utils.isLinkageError(t)) {
410410
SparkUncaughtExceptionHandler.uncaughtException(t)
411411
}
412412

core/src/main/scala/org/apache/spark/util/Utils.scala

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1872,6 +1872,12 @@ private[spark] object Utils extends Logging {
18721872
}
18731873
}
18741874

1875+
/** Returns true if the given exception is a linkage error. */
1876+
def isLinkageError(e: Throwable): Boolean = e match {
1877+
case _: java.lang.LinkageError => true
1878+
case _ => false
1879+
}
1880+
18751881
/**
18761882
* Return a well-formed URI for the file described by a user input string.
18771883
*

core/src/test/scala/org/apache/spark/FailureSuite.scala

Lines changed: 9 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -253,6 +253,15 @@ class FailureSuite extends SparkFunSuite with LocalSparkContext {
253253
rdd.count()
254254
}
255255

256+
test("SPARK-16304: Link error should not crash executor") {
257+
sc = new SparkContext("local[1,2]", "test")
258+
intercept[SparkException] {
259+
sc.parallelize(1 to 2).foreach { i =>
260+
throw new LinkageError()
261+
}
262+
}
263+
}
264+
256265
// TODO: Need to add tests with shuffle fetch failures.
257266
}
258267

0 commit comments

Comments
 (0)