Skip to content

Commit 41fa50d

Browse files
committed
Add a legacy constructor for StorageStatus
This just makes it easier to create one with a source of blocks.
1 parent 53af15d commit 41fa50d

File tree

2 files changed

+17
-8
lines changed

2 files changed

+17
-8
lines changed

core/src/main/scala/org/apache/spark/storage/BlockManagerMasterActor.scala

Lines changed: 2 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -264,10 +264,8 @@ class BlockManagerMasterActor(val isLocal: Boolean, conf: SparkConf, listenerBus
264264
}
265265

266266
private def storageStatus: Array[StorageStatus] = {
267-
blockManagerInfo.map { case(blockManagerId, info) =>
268-
val storageStatus = new StorageStatus(blockManagerId, info.maxMem)
269-
info.blocks.foreach { case (id, status) => storageStatus.addBlock(id, status) }
270-
storageStatus
267+
blockManagerInfo.map { case (blockManagerId, info) =>
268+
new StorageStatus(blockManagerId, info.maxMem, info.blocks)
271269
}.toArray
272270
}
273271

core/src/main/scala/org/apache/spark/storage/StorageUtils.scala

Lines changed: 15 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -25,13 +25,23 @@ import org.apache.spark.annotation.DeveloperApi
2525

2626
/**
2727
* :: DeveloperApi ::
28-
* Storage information for each BlockManager.
28+
* Storage information for each BlockManager. This class assumes BlockId and BlockStatus are
29+
* immutable, such that the consumers of this class will not mutate the source of the information.
2930
*/
3031
@DeveloperApi
3132
class StorageStatus(val blockManagerId: BlockManagerId, val maxMem: Long) {
3233
private val _blocks = new mutable.HashMap[BlockId, BlockStatus]
3334
private val _rddIds = new mutable.HashSet[Int]
3435

36+
/**
37+
* Instantiate a StorageStatus with the given initial blocks. This essentially makes a copy of
38+
* the original blocks map such that the fate of this storage status is not tied to the source.
39+
*/
40+
def this(bmid: BlockManagerId, maxMem: Long, initialBlocks: Map[BlockId, BlockStatus]) {
41+
this(bmid, maxMem)
42+
initialBlocks.foreach { case (blockId, blockStatus) => addBlock(blockId, blockStatus) }
43+
}
44+
3545
/** Return the blocks stored in this block manager as a mapping from ID to status. */
3646
def blocks: Map[BlockId, BlockStatus] = _blocks
3747

@@ -174,9 +184,10 @@ private[spark] object StorageUtils {
174184
storageStatuses
175185
.filter(_.rddIds.contains(rddId))
176186
.map { status =>
177-
val newStatus = new StorageStatus(status.blockManagerId, status.maxMem)
178-
status.rddBlocks(rddId).foreach { case (bid, bstatus) => newStatus.addBlock(bid, bstatus) }
179-
newStatus
187+
new StorageStatus(
188+
status.blockManagerId,
189+
status.maxMem,
190+
status.rddBlocks(rddId).asInstanceOf[Map[BlockId, BlockStatus]])
180191
}.toArray
181192
}
182193
}

0 commit comments

Comments
 (0)