Skip to content

Commit b02c82a

Browse files
committed
More code style cleanup;
- memCol -> sizeCol - 4 spaces for parameter indentation - move `case` statements
1 parent 292a4bd commit b02c82a

File tree

7 files changed

+37
-47
lines changed

7 files changed

+37
-47
lines changed

core/src/main/scala/org/apache/spark/deploy/master/ui/ApplicationPage.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -54,7 +54,7 @@ private[spark] class ApplicationPage(parent: MasterWebUI) extends WebUIPage("app
5454
<a href={executor.worker.webUiAddress}>{executor.worker.id}</a>
5555
}
5656
t.intCol("Cores") { _.cores }
57-
t.memCol("Memory") { _.memory }
57+
t.sizeCol("Memory") { _.memory }
5858
t.col("State") { _.state.toString }
5959
t.customCol("Logs") { executor =>
6060
<a href={"%s/logPage?appId=%s&executorId=%s&logType=stdout"

core/src/main/scala/org/apache/spark/deploy/master/ui/MasterPage.scala

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -64,7 +64,7 @@ private[spark] class MasterPage(parent: MasterWebUI) extends WebUIPage("") {
6464
}
6565
t.col("Name") { _.id }
6666
t.intCol("Cores") { _.coresGranted }
67-
t.memCol("Memory per Node") { _.desc.memoryPerSlave }
67+
t.sizeCol("Memory per Node") { _.desc.memoryPerSlave }
6868
t.dateCol("Submitted Time") { _.submitDate }
6969
t.col("User") { _.desc.user }
7070
t.col("State") { _.state.toString }
@@ -81,7 +81,7 @@ private[spark] class MasterPage(parent: MasterWebUI) extends WebUIPage("") {
8181
}
8282
t.col("State") { _.state.toString }
8383
t.intCol("Cores") { _.desc.cores }
84-
t.memCol("Memory") { _.desc.mem.toLong }
84+
t.sizeCol("Memory") { _.desc.mem.toLong }
8585
t.col("Main Class") { _.desc.command.arguments(1) }
8686
t.build()
8787
}

core/src/main/scala/org/apache/spark/deploy/worker/ui/WorkerPage.scala

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -47,7 +47,7 @@ private[spark] class WorkerPage(parent: WorkerWebUI) extends WebUIPage("") {
4747
t.intCol("Executor ID") { _.execId }
4848
t.intCol("Cores") { _.cores }
4949
t.col("State") { _.state.toString }
50-
t.memCol("Memory") { _.memory }
50+
t.sizeCol("Memory") { _.memory }
5151
t.customCol("Job Details") { executor =>
5252
<ul class="unstyled">
5353
<li><strong>ID:</strong> {executor.appId}</li>
@@ -70,7 +70,7 @@ private[spark] class WorkerPage(parent: WorkerWebUI) extends WebUIPage("") {
7070
t.col("Main Class") { _.driverDesc.command.arguments(1) }
7171
t.col("State") { _.finalState.getOrElse(DriverState.RUNNING).toString }
7272
t.intCol("Cores") { _.driverDesc.cores }
73-
t.memCol("Memory") { _.driverDesc.mem }
73+
t.sizeCol("Memory") { _.driverDesc.mem }
7474
t.customCol("Logs") { driver =>
7575
<a href={s"logPage?driverId=${driver.driverId}&logType=stdout"}>stdout</a>
7676
<a href={s"logPage?driverId=${driver.driverId}&logType=stderr"}>stderr</a>

core/src/main/scala/org/apache/spark/ui/UITables.scala

Lines changed: 17 additions & 27 deletions
Original file line numberDiff line numberDiff line change
@@ -20,7 +20,7 @@ package org.apache.spark.ui
2020
import java.util.Date
2121

2222
import scala.collection.mutable
23-
import scala.xml.{Text, Node}
23+
import scala.xml.{Node, Text}
2424

2525
import org.apache.spark.util.Utils
2626

@@ -142,17 +142,7 @@ private[spark] class UITable[T] (cols: Seq[UITableColumn[T, _]], fixedWidth: Boo
142142
* Columns have additional options, such as controlling their sort keys; see the individual
143143
* methods' documentation for more details.
144144
*
145-
* - Call `build` to construct an immutable object which can be used to render tables.
146-
* *
147-
* To remove some of the boilerplate here, you can statically import the `col` methods; for example:
148-
*
149-
* val myTable: UITable[MyRowDataType] = {
150-
* val builder = new UITableBuilder[MyRowDataType]()
151-
* import builder._
152-
* col("Name") { _.name }
153-
* [...]
154-
* build
155-
* }
145+
* - Call `build()` to construct an immutable object which can be used to render tables.
156146
*
157147
* There are many other features, including support for arbitrary markup in custom column types;
158148
* see the actual uses in the web UI code for more details.
@@ -168,9 +158,9 @@ private[spark] class UITableBuilder[T](fixedWidth: Boolean = false) {
168158
* render the contents of the TD tag, not the TD tag itself.
169159
*/
170160
def customCol[V](
171-
name: String,
172-
sortable: Boolean = true,
173-
sortKey: Option[T => String] = None)(renderer: T => Seq[Node]): UITableBuilder[T] = {
161+
name: String,
162+
sortable: Boolean = true,
163+
sortKey: Option[T => String] = None)(renderer: T => Seq[Node]): UITableBuilder[T] = {
174164
val customColumn = new UITableColumn[T, T](name, null, sortable, sortKey, identity) {
175165
override def renderCellContents(row: T) = renderer(row)
176166
}
@@ -179,32 +169,32 @@ private[spark] class UITableBuilder[T](fixedWidth: Boolean = false) {
179169
}
180170

181171
def col[V](
182-
name: String,
183-
formatter: V => String,
184-
sortable: Boolean = true,
185-
sortKey: Option[V => String] = None)(fieldExtractor: T => V): UITableBuilder[T] = {
172+
name: String,
173+
formatter: V => String,
174+
sortable: Boolean = true,
175+
sortKey: Option[V => String] = None)(fieldExtractor: T => V): UITableBuilder[T] = {
186176
cols.append(UITableColumn(name, formatter, sortable, sortKey, fieldExtractor))
187177
this
188178
}
189179

190180
def col(
191-
name: String,
192-
sortable: Boolean = true,
193-
sortKey: Option[String => String] = None)(fieldExtractor: T => String): UITableBuilder[T] = {
181+
name: String,
182+
sortable: Boolean = true,
183+
sortKey: Option[String => String] = None)(fieldExtractor: T => String): UITableBuilder[T] = {
194184
col[String](name, {x: String => x}, sortable, sortKey)(fieldExtractor)
195185
}
196186

197187
def intCol(
198-
name: String,
199-
formatter: Int => String = { x: Int => x.toString },
200-
sortable: Boolean = true)(fieldExtractor: T => Int): UITableBuilder[T] = {
188+
name: String,
189+
formatter: Int => String = { x: Int => x.toString },
190+
sortable: Boolean = true)(fieldExtractor: T => Int): UITableBuilder[T] = {
201191
col[Int](name, formatter, sortable = sortable)(fieldExtractor)
202192
}
203193

204194
/**
205-
* Display a column of memory sizes, in megabytes, as human-readable strings, such as "4.0 MB".
195+
* Display a column of sizes, in megabytes, as human-readable strings, such as "4.0 MB".
206196
*/
207-
def memCol(name: String)(fieldExtractor: T => Long): UITableBuilder[T] = {
197+
def sizeCol(name: String)(fieldExtractor: T => Long): UITableBuilder[T] = {
208198
col[Long](
209199
name,
210200
formatter = Utils.megabytesToString,

core/src/main/scala/org/apache/spark/ui/storage/RDDPage.scala

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -42,16 +42,16 @@ private[ui] class RDDPage(parent: StorageTab) extends WebUIPage("rdd") {
4242
val remaining = Utils.bytesToString(status.memRemaining)
4343
Text(s"$used ($remaining Remaining)")
4444
}
45-
t.memCol("Disk Usage") { case (rddId, status) => status.diskUsedByRdd(rddId) }
45+
t.sizeCol("Disk Usage") { case (rddId, status) => status.diskUsedByRdd(rddId) }
4646
t.build()
4747
}
4848

49-
val blockTable: UITable[(BlockId, BlockStatus, Seq[String])] = {
49+
private val blockTable: UITable[(BlockId, BlockStatus, Seq[String])] = {
5050
val t = new UITableBuilder[(BlockId, BlockStatus, Seq[String])]()
5151
t.col("Block Name") { case (id, block, locations) => id.toString }
5252
t.col("Storage Level") { case (id, block, locations) => block.storageLevel.description }
53-
t. memCol("Size in Memory") { case (id, block, locations) => block.memSize }
54-
t.memCol("Size on Disk") { case (id, block, locations) => block.diskSize }
53+
t. sizeCol("Size in Memory") { case (id, block, locations) => block.memSize }
54+
t.sizeCol("Size on Disk") { case (id, block, locations) => block.diskSize }
5555
t.customCol("Executors") { case (id, block, locations) =>
5656
locations.map(l => <span>{l}<br/></span>)
5757
}

core/src/main/scala/org/apache/spark/ui/storage/StoragePage.scala

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -40,9 +40,9 @@ private[ui] class StoragePage(parent: StorageTab) extends WebUIPage("") {
4040
t. col("Fraction Cached") { rdd =>
4141
"%.0f%%".format(rdd.numCachedPartitions * 100.0 / rdd.numPartitions)
4242
}
43-
t.memCol("Size in Memory") { _.memSize }
44-
t.memCol("Size in Tachyon") { _.tachyonSize }
45-
t.memCol("Size on Disk") { _.diskSize }
43+
t.sizeCol("Size in Memory") { _.memSize }
44+
t.sizeCol("Size in Tachyon") { _.tachyonSize }
45+
t.sizeCol("Size on Disk") { _.diskSize }
4646
t.build()
4747
}
4848

streaming/src/main/scala/org/apache/spark/streaming/ui/StreamingPage.scala

Lines changed: 8 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -100,23 +100,23 @@ private[ui] class StreamingPage(parent: StreamingTab)
100100
t.col("Records in last batch\n[" + formatDate(Calendar.getInstance().getTime()) + "]") {
101101
case (receiverId, _) => formatNumber(lastBatchReceivedRecord(receiverId))
102102
}
103-
t.col("Minimum rate\n[records/sec]") {
104-
case (receiverId, _) => receivedRecordDistributions(receiverId).map {
103+
t.col("Minimum rate\n[records/sec]") { case (receiverId, _) =>
104+
receivedRecordDistributions(receiverId).map {
105105
_.getQuantiles(Seq(0.0)).map(formatNumber).head
106106
}.getOrElse(empty)
107107
}
108-
t.col("Median rate\n[records/sec]") {
109-
case (receiverId, _) => receivedRecordDistributions(receiverId).map {
108+
t.col("Median rate\n[records/sec]") { case (receiverId, _) =>
109+
receivedRecordDistributions(receiverId).map {
110110
_.getQuantiles(Seq(0.5)).map(formatNumber).head
111111
}.getOrElse(empty)
112112
}
113-
t.col("Maximum rate\n[records/sec]") {
114-
case (receiverId, _) => receivedRecordDistributions(receiverId).map {
113+
t.col("Maximum rate\n[records/sec]") { case (receiverId, _) =>
114+
receivedRecordDistributions(receiverId).map {
115115
_.getQuantiles(Seq(1.0)).map(formatNumber).head
116116
}.getOrElse(empty)
117117
}
118-
t.col("Last Error") {
119-
case (_, receiverInfo) => receiverInfo.map { info =>
118+
t.col("Last Error") { case (_, receiverInfo) =>
119+
receiverInfo.map { info =>
120120
val msg = s"${info.lastErrorMessage} - ${info.lastError}"
121121
if (msg.size > 100) msg.take(97) + "..." else msg
122122
}.getOrElse(empty)

0 commit comments

Comments
 (0)