Skip to content

Commit 81b9d5b

Browse files
sryzamateiz
authored andcommitted
SPARK-3052. Misleading and spurious FileSystem closed errors whenever a ...
...job fails while reading from Hadoop Author: Sandy Ryza <[email protected]> Closes apache#1956 from sryza/sandy-spark-3052 and squashes the following commits: 815813a [Sandy Ryza] SPARK-3052. Misleading and spurious FileSystem closed errors whenever a job fails while reading from Hadoop
1 parent 066f31a commit 81b9d5b

File tree

2 files changed

+13
-3
lines changed

2 files changed

+13
-3
lines changed

core/src/main/scala/org/apache/spark/rdd/HadoopRDD.scala

Lines changed: 7 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -42,7 +42,8 @@ import org.apache.spark.broadcast.Broadcast
4242
import org.apache.spark.deploy.SparkHadoopUtil
4343
import org.apache.spark.executor.{DataReadMethod, InputMetrics}
4444
import org.apache.spark.rdd.HadoopRDD.HadoopMapPartitionsWithSplitRDD
45-
import org.apache.spark.util.NextIterator
45+
import org.apache.spark.util.{NextIterator, Utils}
46+
4647

4748
/**
4849
* A Spark split class that wraps around a Hadoop InputSplit.
@@ -228,7 +229,11 @@ class HadoopRDD[K, V](
228229
try {
229230
reader.close()
230231
} catch {
231-
case e: Exception => logWarning("Exception in RecordReader.close()", e)
232+
case e: Exception => {
233+
if (!Utils.inShutdown()) {
234+
logWarning("Exception in RecordReader.close()", e)
235+
}
236+
}
232237
}
233238
}
234239
}

core/src/main/scala/org/apache/spark/rdd/NewHadoopRDD.scala

Lines changed: 6 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -35,6 +35,7 @@ import org.apache.spark.SerializableWritable
3535
import org.apache.spark.{SparkContext, TaskContext}
3636
import org.apache.spark.executor.{DataReadMethod, InputMetrics}
3737
import org.apache.spark.rdd.NewHadoopRDD.NewHadoopMapPartitionsWithSplitRDD
38+
import org.apache.spark.util.Utils
3839

3940
private[spark] class NewHadoopPartition(
4041
rddId: Int,
@@ -153,7 +154,11 @@ class NewHadoopRDD[K, V](
153154
try {
154155
reader.close()
155156
} catch {
156-
case e: Exception => logWarning("Exception in RecordReader.close()", e)
157+
case e: Exception => {
158+
if (!Utils.inShutdown()) {
159+
logWarning("Exception in RecordReader.close()", e)
160+
}
161+
}
157162
}
158163
}
159164
}

0 commit comments

Comments
 (0)