From 3ccad539410615156dea2ee83ad7d7841f520a46 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E2=80=9Cattilapiros=E2=80=9D?= Date: Mon, 12 Feb 2018 20:17:32 +0100 Subject: [PATCH 1/5] initial version --- .../org/apache/spark/ui/storage/RDDPage.scala | 16 ++++++++++------ 1 file changed, 10 insertions(+), 6 deletions(-) diff --git a/core/src/main/scala/org/apache/spark/ui/storage/RDDPage.scala b/core/src/main/scala/org/apache/spark/ui/storage/RDDPage.scala index 02cee7f8c5b33..4eb4dc6a35a3b 100644 --- a/core/src/main/scala/org/apache/spark/ui/storage/RDDPage.scala +++ b/core/src/main/scala/org/apache/spark/ui/storage/RDDPage.scala @@ -23,7 +23,7 @@ import javax.servlet.http.HttpServletRequest import scala.xml.{Node, Unparsed} import org.apache.spark.status.AppStatusStore -import org.apache.spark.status.api.v1.{RDDDataDistribution, RDDPartitionInfo} +import org.apache.spark.status.api.v1.{ExecutorSummary, RDDDataDistribution, RDDPartitionInfo} import org.apache.spark.ui._ import org.apache.spark.util.Utils @@ -76,7 +76,8 @@ private[ui] class RDDPage(parent: SparkUITab, store: AppStatusStore) extends Web rddStorageInfo.partitions.get, blockPageSize, blockSortColumn, - blockSortDesc) + blockSortDesc, + store.executorList(false)) _blockTable.table(page) } catch { case e @ (_ : IllegalArgumentException | _ : IndexOutOfBoundsException) => @@ -182,7 +183,8 @@ private[ui] class BlockDataSource( rddPartitions: Seq[RDDPartitionInfo], pageSize: Int, sortColumn: String, - desc: Boolean) extends PagedDataSource[BlockTableRowData](pageSize) { + desc: Boolean, + executorIdToAddress: Map[String, String]) extends PagedDataSource[BlockTableRowData](pageSize) { private val data = rddPartitions.map(blockRow).sorted(ordering(sortColumn, desc)) @@ -198,7 +200,7 @@ private[ui] class BlockDataSource( rddPartition.storageLevel, rddPartition.memoryUsed, rddPartition.diskUsed, - rddPartition.executors.mkString(" ")) + rddPartition.executors.map(id => executorIdToAddress.get(id).getOrElse(id)).mkString(" ")) } /** @@ -226,7 +228,8 @@ private[ui] class BlockPagedTable( rddPartitions: Seq[RDDPartitionInfo], pageSize: Int, sortColumn: String, - desc: Boolean) extends PagedTable[BlockTableRowData] { + desc: Boolean, + executorSummaries: Seq[ExecutorSummary]) extends PagedTable[BlockTableRowData] { override def tableId: String = "rdd-storage-by-block-table" @@ -243,7 +246,8 @@ private[ui] class BlockPagedTable( rddPartitions, pageSize, sortColumn, - desc) + desc, + executorSummaries.map(ex => (ex.id, ex.hostPort)).toMap) override def pageLink(page: Int): String = { val encodedSortColumn = URLEncoder.encode(sortColumn, "UTF-8") From ec160aaaad3c3baef07deb93f9c733c88be35fe0 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E2=80=9Cattilapiros=E2=80=9D?= Date: Tue, 13 Feb 2018 10:33:55 +0100 Subject: [PATCH 2/5] sorting and checkstyle --- .../main/scala/org/apache/spark/ui/storage/RDDPage.scala | 7 +++++-- .../org/apache/spark/graphx/lib/LabelPropagation.scala | 3 ++- 2 files changed, 7 insertions(+), 3 deletions(-) diff --git a/core/src/main/scala/org/apache/spark/ui/storage/RDDPage.scala b/core/src/main/scala/org/apache/spark/ui/storage/RDDPage.scala index 4eb4dc6a35a3b..652efd193a89f 100644 --- a/core/src/main/scala/org/apache/spark/ui/storage/RDDPage.scala +++ b/core/src/main/scala/org/apache/spark/ui/storage/RDDPage.scala @@ -200,7 +200,10 @@ private[ui] class BlockDataSource( rddPartition.storageLevel, rddPartition.memoryUsed, rddPartition.diskUsed, - rddPartition.executors.map(id => executorIdToAddress.get(id).getOrElse(id)).mkString(" ")) + rddPartition.executors + .sorted + .map { id => executorIdToAddress.get(id).getOrElse(id) } + .mkString(" ")) } /** @@ -247,7 +250,7 @@ private[ui] class BlockPagedTable( pageSize, sortColumn, desc, - executorSummaries.map(ex => (ex.id, ex.hostPort)).toMap) + executorSummaries.map { ex => (ex.id, ex.hostPort) }.toMap) override def pageLink(page: Int): String = { val encodedSortColumn = URLEncoder.encode(sortColumn, "UTF-8") diff --git a/graphx/src/main/scala/org/apache/spark/graphx/lib/LabelPropagation.scala b/graphx/src/main/scala/org/apache/spark/graphx/lib/LabelPropagation.scala index cb3025f8bef54..0be2f45af8bea 100644 --- a/graphx/src/main/scala/org/apache/spark/graphx/lib/LabelPropagation.scala +++ b/graphx/src/main/scala/org/apache/spark/graphx/lib/LabelPropagation.scala @@ -55,7 +55,8 @@ object LabelPropagation { val count1Val = count1.getOrElse(i, 0L) val count2Val = count2.getOrElse(i, 0L) i -> (count1Val + count2Val) - }(collection.breakOut) // more efficient alternative to [[collection.Traversable.toMap]] + }(collection.breakOut) // more efficient alternative to [[collection.Traversable + // .toMap]] } def vertexProgram(vid: VertexId, attr: Long, message: Map[VertexId, Long]): VertexId = { if (message.isEmpty) attr else message.maxBy(_._2)._1 From eee987ad4344f22696abf9338a36bb27bdf8db66 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E2=80=9Cattilapiros=E2=80=9D?= Date: Tue, 13 Feb 2018 10:40:16 +0100 Subject: [PATCH 3/5] revert --- .../scala/org/apache/spark/graphx/lib/LabelPropagation.scala | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/graphx/src/main/scala/org/apache/spark/graphx/lib/LabelPropagation.scala b/graphx/src/main/scala/org/apache/spark/graphx/lib/LabelPropagation.scala index 0be2f45af8bea..cb3025f8bef54 100644 --- a/graphx/src/main/scala/org/apache/spark/graphx/lib/LabelPropagation.scala +++ b/graphx/src/main/scala/org/apache/spark/graphx/lib/LabelPropagation.scala @@ -55,8 +55,7 @@ object LabelPropagation { val count1Val = count1.getOrElse(i, 0L) val count2Val = count2.getOrElse(i, 0L) i -> (count1Val + count2Val) - }(collection.breakOut) // more efficient alternative to [[collection.Traversable - // .toMap]] + }(collection.breakOut) // more efficient alternative to [[collection.Traversable.toMap]] } def vertexProgram(vid: VertexId, attr: Long, message: Map[VertexId, Long]): VertexId = { if (message.isEmpty) attr else message.maxBy(_._2)._1 From ef69450ff382d6d6a8d75a74a2473f6973bc6d71 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E2=80=9Cattilapiros=E2=80=9D?= Date: Tue, 13 Feb 2018 15:45:41 +0100 Subject: [PATCH 4/5] loading only live executors --- core/src/main/scala/org/apache/spark/ui/storage/RDDPage.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/core/src/main/scala/org/apache/spark/ui/storage/RDDPage.scala b/core/src/main/scala/org/apache/spark/ui/storage/RDDPage.scala index 652efd193a89f..1d3d012345898 100644 --- a/core/src/main/scala/org/apache/spark/ui/storage/RDDPage.scala +++ b/core/src/main/scala/org/apache/spark/ui/storage/RDDPage.scala @@ -77,7 +77,7 @@ private[ui] class RDDPage(parent: SparkUITab, store: AppStatusStore) extends Web blockPageSize, blockSortColumn, blockSortDesc, - store.executorList(false)) + store.executorList(true)) _blockTable.table(page) } catch { case e @ (_ : IllegalArgumentException | _ : IndexOutOfBoundsException) => From 75bb8c5120fa58608560406bc714739f2caa8c87 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E2=80=9Cattilapiros=E2=80=9D?= Date: Wed, 14 Feb 2018 12:04:59 +0100 Subject: [PATCH 5/5] Sorting by address --- core/src/main/scala/org/apache/spark/ui/storage/RDDPage.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/core/src/main/scala/org/apache/spark/ui/storage/RDDPage.scala b/core/src/main/scala/org/apache/spark/ui/storage/RDDPage.scala index 1d3d012345898..2674b9291203a 100644 --- a/core/src/main/scala/org/apache/spark/ui/storage/RDDPage.scala +++ b/core/src/main/scala/org/apache/spark/ui/storage/RDDPage.scala @@ -201,8 +201,8 @@ private[ui] class BlockDataSource( rddPartition.memoryUsed, rddPartition.diskUsed, rddPartition.executors - .sorted .map { id => executorIdToAddress.get(id).getOrElse(id) } + .sorted .mkString(" ")) }