Skip to content

Commit 81f9fd2

Browse files
committed
put the filter inside
1 parent 658814c commit 81f9fd2

File tree

2 files changed

+11
-1
lines changed

2 files changed

+11
-1
lines changed

core/src/main/scala/org/apache/spark/ui/UIUtils.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -362,7 +362,7 @@ private[spark] object UIUtils extends Logging {
362362
{ g.incomingEdges.map { e => <div class="incoming-edge">{e.fromId},{e.toId}</div> } }
363363
{ g.outgoingEdges.map { e => <div class="outgoing-edge">{e.fromId},{e.toId}</div> } }
364364
{
365-
g.rootCluster.getAllNodes.filter(_.cached).map { n =>
365+
g.rootCluster.getCachedNode.map { n =>
366366
<div class="cached-rdd">{n.id}</div>
367367
}
368368
}

core/src/main/scala/org/apache/spark/ui/scope/RDDOperationGraph.scala

Lines changed: 10 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -70,6 +70,16 @@ private[ui] class RDDOperationCluster(val id: String, private var _name: String)
7070
def getAllNodes: Seq[RDDOperationNode] = {
7171
_childNodes ++ _childClusters.flatMap(_.childNodes)
7272
}
73+
74+
/** Return all the node which are cached. */
75+
def getCachedNode: Seq[RDDOperationNode] = {
76+
var cachedNodes = new ListBuffer[RDDOperationNode]
77+
cachedNodes ++= (_childNodes.filter(_.cached))
78+
for(cluster <- _childClusters) {
79+
cachedNodes ++= (cluster._childNodes.filter(_.cached))
80+
}
81+
cachedNodes
82+
}
7383
}
7484

7585
private[ui] object RDDOperationGraph extends Logging {

0 commit comments

Comments
 (0)