diff --git a/core/src/main/scala/org/apache/spark/ui/UIUtils.scala b/core/src/main/scala/org/apache/spark/ui/UIUtils.scala
index 65162f4fdcd6..789803951920 100644
--- a/core/src/main/scala/org/apache/spark/ui/UIUtils.scala
+++ b/core/src/main/scala/org/apache/spark/ui/UIUtils.scala
@@ -362,7 +362,7 @@ private[spark] object UIUtils extends Logging {
{ g.incomingEdges.map { e =>
{e.fromId},{e.toId}
} }
{ g.outgoingEdges.map { e => {e.fromId},{e.toId}
} }
{
- g.rootCluster.getAllNodes.filter(_.cached).map { n =>
+ g.rootCluster.getCachedNodes.map { n =>
{n.id}
}
}
diff --git a/core/src/main/scala/org/apache/spark/ui/scope/RDDOperationGraph.scala b/core/src/main/scala/org/apache/spark/ui/scope/RDDOperationGraph.scala
index d6a5085db1ef..ffea9817c0b0 100644
--- a/core/src/main/scala/org/apache/spark/ui/scope/RDDOperationGraph.scala
+++ b/core/src/main/scala/org/apache/spark/ui/scope/RDDOperationGraph.scala
@@ -66,9 +66,9 @@ private[ui] class RDDOperationCluster(val id: String, private var _name: String)
_childClusters += childCluster
}
- /** Return all the nodes container in this cluster, including ones nested in other clusters. */
- def getAllNodes: Seq[RDDOperationNode] = {
- _childNodes ++ _childClusters.flatMap(_.childNodes)
+ /** Return all the nodes which are cached. */
+ def getCachedNodes: Seq[RDDOperationNode] = {
+ _childNodes.filter(_.cached) ++ _childClusters.flatMap(_.getCachedNodes)
}
}