Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view

This file was deleted.

Original file line number Diff line number Diff line change
Expand Up @@ -602,8 +602,9 @@ private[spark] class BlockManager(
numFetchFailures += 1
if (numFetchFailures == locations.size) {
// An exception is thrown while fetching this block from all locations
throw new BlockFetchException(s"Failed to fetch block from" +
logWarning(s"Failed to fetch block from" +
s" ${locations.size} locations. Most recent failure cause:", e)
return None
} else {
// This location failed, so we retry fetch from a different one by returning null here
logWarning(s"Failed to fetch remote block $blockId " +
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -509,10 +509,9 @@ class BlockManagerSuite extends SparkFunSuite with Matchers with BeforeAndAfterE
assert(list1Get.isDefined, "list1Get expected to be fetched")
store3.stop()
store3 = null
// exception throw because there is no locations
intercept[BlockFetchException] {
list1Get = store.getRemoteBytes("list1")
}
// Fetch should fail because there are no locations, but no exception should be thrown
list1Get = store.getRemoteBytes("list1")
assert(list1Get.isEmpty, "list1Get expected to fail")
} finally {
origTimeoutOpt match {
case Some(t) => conf.set("spark.network.timeout", t)
Expand Down