Skip to content

Commit 7ae328f

Browse files
committed
Modified code style
1 parent d5f794a commit 7ae328f

File tree

60 files changed

+570
-361
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

60 files changed

+570
-361
lines changed

core/src/main/scala/org/apache/spark/ui/jobs/StagePage.scala

Lines changed: 58 additions & 52 deletions
Original file line numberDiff line numberDiff line change
@@ -226,7 +226,9 @@ private[ui] class StagePage(parent: StagesTab) extends WebUIPage("stage") {
226226
val accumulableHeaders: Seq[String] = Seq("Accumulable", "Value")
227227
def accumulableRow(acc: AccumulableInfo): Elem =
228228
<tr><td>{acc.name}</td><td>{acc.value}</td></tr>
229-
val accumulableTable = UIUtils.listingTable(accumulableHeaders, accumulableRow,
229+
val accumulableTable = UIUtils.listingTable(
230+
accumulableHeaders,
231+
accumulableRow,
230232
accumulables.values.toSeq)
231233

232234
val taskHeadersAndCssClasses: Seq[(String, String)] =
@@ -265,9 +267,14 @@ private[ui] class StagePage(parent: StagesTab) extends WebUIPage("stage") {
265267
val currentTime = System.currentTimeMillis()
266268
val taskTable = UIUtils.listingTable(
267269
unzipped._1,
268-
taskRow(hasAccumulators, stageData.hasInput, stageData.hasOutput,
269-
stageData.hasShuffleRead, stageData.hasShuffleWrite,
270-
stageData.hasBytesSpilled, currentTime),
270+
taskRow(
271+
hasAccumulators,
272+
stageData.hasInput,
273+
stageData.hasOutput,
274+
stageData.hasShuffleRead,
275+
stageData.hasShuffleWrite,
276+
stageData.hasBytesSpilled,
277+
currentTime),
271278
tasks,
272279
headerClasses = unzipped._2)
273280
// Excludes tasks which failed and have incomplete metrics
@@ -515,49 +522,42 @@ private[ui] class StagePage(parent: StagesTab) extends WebUIPage("stage") {
515522
val host = taskInfo.host
516523
executorsSet += ((executorId, host))
517524

518-
val isSucceeded = taskInfo.successful
519-
val isFailed = taskInfo.failed
520-
val isRunning = taskInfo.running
521525
val classNameByStatus = {
522-
if (isSucceeded) {
526+
if (taskInfo.successful) {
523527
"succeeded"
524-
} else if (isFailed) {
528+
} else if (taskInfo.failed) {
525529
"failed"
526-
} else if (isRunning) {
530+
} else if (taskInfo.running) {
527531
"running"
528532
}
529533
}
530534

531535
val launchTime = taskInfo.launchTime
532-
val finishTime = if (!isRunning) taskInfo.finishTime else currentTime
536+
val finishTime = if (!taskInfo.running) taskInfo.finishTime else currentTime
533537
val totalExecutionTime = finishTime - launchTime
534538
minLaunchTime = launchTime.min(minLaunchTime)
535539
maxFinishTime = launchTime.max(maxFinishTime)
536540
numEffectiveTasks += 1
537541

542+
def toProportion(time: Long) = (time.toDouble / totalExecutionTime * 100).toLong
543+
538544
val metricsOpt = taskUIData.taskMetrics
539545
val shuffleReadTime =
540546
metricsOpt.flatMap(_.shuffleReadMetrics.map(_.fetchWaitTime)).getOrElse(0L)
541-
val shuffleReadTimeProportion =
542-
(shuffleReadTime.toDouble / totalExecutionTime * 100).toLong
547+
val shuffleReadTimeProportion = toProportion(shuffleReadTime)
543548
val shuffleWriteTime =
544549
(metricsOpt.flatMap(_.shuffleWriteMetrics
545550
.map(_.shuffleWriteTime)).getOrElse(0L) / 1e6).toLong
546-
val shuffleWriteTimeProportion =
547-
(shuffleWriteTime.toDouble / totalExecutionTime * 100).toLong
551+
val shuffleWriteTimeProportion = toProportion(shuffleWriteTime)
548552
val executorComputingTime = metricsOpt.map(_.executorRunTime).getOrElse(0L) -
549553
shuffleReadTime - shuffleWriteTime
550-
val executorComputingTimeProportion =
551-
(executorComputingTime.toDouble / totalExecutionTime * 100).toLong
554+
val executorComputingTimeProportion = toProportion(executorComputingTime)
552555
val serializationTime = metricsOpt.map(_.resultSerializationTime).getOrElse(0L)
553-
val serializationTimeProportion =
554-
(serializationTime.toDouble / totalExecutionTime * 100).toLong
556+
val serializationTimeProportion = toProportion(serializationTime)
555557
val deserializationTime = metricsOpt.map(_.executorDeserializeTime).getOrElse(0L)
556-
val deserializationTimeProportion =
557-
(deserializationTime.toDouble / totalExecutionTime * 100).toLong
558+
val deserializationTimeProportion = toProportion(deserializationTime)
558559
val gettingResultTime = getGettingResultTime(taskUIData.taskInfo)
559-
val gettingResultTimeProportion =
560-
(gettingResultTime.toDouble / totalExecutionTime * 100).toLong
560+
val gettingResultTimeProportion = toProportion(gettingResultTime)
561561
val schedulerDelay = totalExecutionTime -
562562
(executorComputingTime + shuffleReadTime + shuffleWriteTime +
563563
serializationTime + deserializationTime + gettingResultTime)
@@ -585,22 +585,22 @@ private[ui] class StagePage(parent: StagesTab) extends WebUIPage("stage") {
585585
val timelineObject =
586586
s"""
587587
{
588-
'className': 'task task-assignment-timeline-object ${classNameByStatus}',
589-
'group': '${executorId}',
588+
'className': 'task task-assignment-timeline-object $classNameByStatus',
589+
'group': '$executorId',
590590
'content': '<div class="task-assignment-timeline-content"' +
591591
'data-toggle="tooltip" data-placement="top"' +
592592
'data-html="true" data-container="body"' +
593593
'data-title="${s"Task " + index + " (attempt " + attempt + ")"}<br>' +
594594
'Status: ${taskInfo.status}<br>' +
595595
'Launch Time: ${UIUtils.formatDate(new Date(launchTime))}' +
596596
'${
597-
if (!isRunning) {
597+
if (!taskInfo.running) {
598598
s"""<br>Finish Time: ${UIUtils.formatDate(new Date(finishTime))}"""
599599
} else {
600600
""
601601
}
602602
}' +
603-
'<br>Scheduler Delay: ${schedulerDelay} ms' +
603+
'<br>Scheduler Delay: $schedulerDelay ms' +
604604
'<br>Task Deserialization Time: ${UIUtils.formatDuration(deserializationTime)}' +
605605
'<br>Shuffle Read Time: ${UIUtils.formatDuration(shuffleReadTime)}' +
606606
'<br>Executor Computing Time: ${UIUtils.formatDuration(executorComputingTime)}' +
@@ -609,28 +609,28 @@ private[ui] class StagePage(parent: StagesTab) extends WebUIPage("stage") {
609609
'<br>Getting Result Time: ${UIUtils.formatDuration(gettingResultTime)}">' +
610610
'<svg class="task-assignment-timeline-duration-bar">' +
611611
'<rect class="scheduler-delay-proportion" ' +
612-
'x="${schedulerDelayProportionPos}%" y="0px" height="26px"' +
613-
'width="${schedulerDelayProportion}%""></rect>' +
612+
'x="$schedulerDelayProportionPos%" y="0px" height="26px"' +
613+
'width="$schedulerDelayProportion%""></rect>' +
614614
'<rect class="deserialization-time-proportion" '+
615-
'x="${deserializationTimeProportionPos}%" y="0px" height="26px"' +
616-
'width="${deserializationTimeProportion}%"></rect>' +
615+
'x="$deserializationTimeProportionPos%" y="0px" height="26px"' +
616+
'width="$deserializationTimeProportion%"></rect>' +
617617
'<rect class="shuffle-read-time-proportion" ' +
618-
'x="${shuffleReadTimeProportionPos}%" y="0px" height="26px"' +
619-
'width="${shuffleReadTimeProportion}%"></rect>' +
618+
'x="$shuffleReadTimeProportionPos%" y="0px" height="26px"' +
619+
'width="$shuffleReadTimeProportion%"></rect>' +
620620
'<rect class="executor-runtime-proportion" ' +
621-
'x="${executorRuntimeProportionPos}%" y="0px" height="26px"' +
622-
'width="${executorComputingTimeProportion}%"></rect>' +
621+
'x="$executorRuntimeProportionPos%" y="0px" height="26px"' +
622+
'width="$executorComputingTimeProportion%"></rect>' +
623623
'<rect class="shuffle-write-time-proportion" ' +
624-
'x="${shuffleWriteTimeProportionPos}%" y="0px" height="26px"' +
625-
'width="${shuffleWriteTimeProportion}%"></rect>' +
624+
'x="$shuffleWriteTimeProportionPos%" y="0px" height="26px"' +
625+
'width="$shuffleWriteTimeProportion%"></rect>' +
626626
'<rect class="serialization-time-proportion" ' +
627-
'x="${serializationTimeProportionPos}%" y="0px" height="26px"' +
628-
'width="${serializationTimeProportion}%"></rect>' +
627+
'x="$serializationTimeProportionPos%" y="0px" height="26px"' +
628+
'width="$serializationTimeProportion%"></rect>' +
629629
'<rect class="getting-result-time-proportion" ' +
630-
'x="${gettingResultTimeProportionPos}%" y="0px" height="26px"' +
631-
'width="${gettingResultTimeProportion}%"></rect></svg>',
632-
'start': new Date(${launchTime}),
633-
'end': new Date(${finishTime})
630+
'x="$gettingResultTimeProportionPos%" y="0px" height="26px"' +
631+
'width="$gettingResultTimeProportion%"></rect></svg>',
632+
'start': new Date($launchTime),
633+
'end': new Date($finishTime)
634634
}
635635
"""
636636
timelineObject
@@ -640,23 +640,29 @@ private[ui] class StagePage(parent: StagesTab) extends WebUIPage("stage") {
640640
case (executorId, host) =>
641641
s"""
642642
{
643-
'id': '${executorId}',
644-
'content': '${executorId} / ${host}',
643+
'id': '$executorId',
644+
'content': '$executorId / $host',
645645
}
646646
"""
647647
}.mkString("[", ",", "]")
648648

649649
val maxZoom = maxFinishTime - minLaunchTime
650650
<span class="expand-task-assignment-timeline">
651651
<span class="expand-task-assignment-timeline-arrow arrow-closed"></span>
652-
<a>Event Timeline {
653-
if (MAX_TIMELINE_TASKS <= numEffectiveTasks) {
654-
s"(Most recent ${MAX_TIMELINE_TASKS})"
655-
}
656-
}
657-
</a>
652+
<a>Event Timeline</a>
658653
</span> ++
659654
<div id="task-assignment-timeline" class="collapsed">
655+
{
656+
if (MAX_TIMELINE_TASKS < numEffectiveTasks) {
657+
<strong>
658+
This stage has more than the maximum number of tasks that can be shown in the
659+
visualization! Only the first {MAX_TIMELINE_TASKS} tasks
660+
(of {numEffectiveTasks} total) are shown.
661+
</strong>
662+
} else {
663+
Seq.empty
664+
}
665+
}
660666
<div class="control-panel">
661667
<div id="task-assignment-timeline-zoom-lock">
662668
<input type="checkbox"></input>
@@ -667,7 +673,7 @@ private[ui] class StagePage(parent: StagesTab) extends WebUIPage("stage") {
667673
</div> ++
668674
<script type="text/javascript">
669675
{Unparsed(s"drawTaskAssignmentTimeline(" +
670-
s"${groupArrayStr}, ${executorsArrayStr}, ${minLaunchTime}, ${maxZoom})")}
676+
s"$groupArrayStr, $executorsArrayStr, $minLaunchTime, $maxZoom)")}
671677
</script>
672678
}
673679

dev/create-release/create-release.sh

Lines changed: 7 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -118,14 +118,14 @@ if [[ ! "$@" =~ --skip-publish ]]; then
118118

119119
rm -rf $SPARK_REPO
120120

121-
build/mvn -DskipTests -Dhadoop.version=2.2.0 -Dyarn.version=2.2.0 \
122-
-Pyarn -Phive -Phive-thriftserver -Phadoop-2.2 -Pspark-ganglia-lgpl -Pkinesis-asl \
121+
build/mvn -DskipTests -Pyarn -Phive \
122+
-Phive-thriftserver -Phadoop-2.2 -Pspark-ganglia-lgpl -Pkinesis-asl \
123123
clean install
124124

125125
./dev/change-version-to-2.11.sh
126126

127-
build/mvn -DskipTests -Dhadoop.version=2.2.0 -Dyarn.version=2.2.0 \
128-
-Dscala-2.11 -Pyarn -Phive -Phadoop-2.2 -Pspark-ganglia-lgpl -Pkinesis-asl \
127+
build/mvn -DskipTests -Pyarn -Phive \
128+
-Dscala-2.11 -Phadoop-2.2 -Pspark-ganglia-lgpl -Pkinesis-asl \
129129
clean install
130130

131131
./dev/change-version-to-2.10.sh
@@ -228,9 +228,9 @@ if [[ ! "$@" =~ --skip-package ]]; then
228228

229229
# We increment the Zinc port each time to avoid OOM's and other craziness if multiple builds
230230
# share the same Zinc server.
231-
make_binary_release "hadoop1" "-Phive -Phive-thriftserver -Dhadoop.version=1.0.4" "3030" &
232-
make_binary_release "hadoop1-scala2.11" "-Phive -Dscala-2.11" "3031" &
233-
make_binary_release "cdh4" "-Phive -Phive-thriftserver -Dhadoop.version=2.0.0-mr1-cdh4.2.0" "3032" &
231+
make_binary_release "hadoop1" "-Phadoop-1 -Phive -Phive-thriftserver" "3030" &
232+
make_binary_release "hadoop1-scala2.11" "-Phadoop-1 -Phive -Dscala-2.11" "3031" &
233+
make_binary_release "cdh4" "-Phadoop-1 -Phive -Phive-thriftserver -Dhadoop.version=2.0.0-mr1-cdh4.2.0" "3032" &
234234
make_binary_release "hadoop2.3" "-Phadoop-2.3 -Phive -Phive-thriftserver -Pyarn" "3033" &
235235
make_binary_release "hadoop2.4" "-Phadoop-2.4 -Phive -Phive-thriftserver -Pyarn" "3034" &
236236
make_binary_release "mapr3" "-Pmapr3 -Phive -Phive-thriftserver" "3035" &

dev/run-tests

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -40,11 +40,11 @@ function handle_error () {
4040
{
4141
if [ -n "$AMPLAB_JENKINS_BUILD_PROFILE" ]; then
4242
if [ "$AMPLAB_JENKINS_BUILD_PROFILE" = "hadoop1.0" ]; then
43-
export SBT_MAVEN_PROFILES_ARGS="-Dhadoop.version=1.0.4"
43+
export SBT_MAVEN_PROFILES_ARGS="-Phadoop-1 -Dhadoop.version=1.0.4"
4444
elif [ "$AMPLAB_JENKINS_BUILD_PROFILE" = "hadoop2.0" ]; then
45-
export SBT_MAVEN_PROFILES_ARGS="-Dhadoop.version=2.0.0-mr1-cdh4.1.1"
45+
export SBT_MAVEN_PROFILES_ARGS="-Phadoop-1 -Dhadoop.version=2.0.0-mr1-cdh4.1.1"
4646
elif [ "$AMPLAB_JENKINS_BUILD_PROFILE" = "hadoop2.2" ]; then
47-
export SBT_MAVEN_PROFILES_ARGS="-Pyarn -Phadoop-2.2 -Dhadoop.version=2.2.0"
47+
export SBT_MAVEN_PROFILES_ARGS="-Pyarn -Phadoop-2.2"
4848
elif [ "$AMPLAB_JENKINS_BUILD_PROFILE" = "hadoop2.3" ]; then
4949
export SBT_MAVEN_PROFILES_ARGS="-Pyarn -Phadoop-2.3 -Dhadoop.version=2.3.0"
5050
fi

dev/scalastyle

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -20,8 +20,8 @@
2020
echo -e "q\n" | build/sbt -Phive -Phive-thriftserver scalastyle > scalastyle.txt
2121
echo -e "q\n" | build/sbt -Phive -Phive-thriftserver test:scalastyle >> scalastyle.txt
2222
# Check style with YARN built too
23-
echo -e "q\n" | build/sbt -Pyarn -Phadoop-2.2 -Dhadoop.version=2.2.0 scalastyle >> scalastyle.txt
24-
echo -e "q\n" | build/sbt -Pyarn -Phadoop-2.2 -Dhadoop.version=2.2.0 test:scalastyle >> scalastyle.txt
23+
echo -e "q\n" | build/sbt -Pyarn -Phadoop-2.2 scalastyle >> scalastyle.txt
24+
echo -e "q\n" | build/sbt -Pyarn -Phadoop-2.2 test:scalastyle >> scalastyle.txt
2525

2626
ERRORS=$(cat scalastyle.txt | awk '{if($1~/error/)print}')
2727
rm scalastyle.txt

docs/building-spark.md

Lines changed: 6 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -59,14 +59,14 @@ You can fix this by setting the `MAVEN_OPTS` variable as discussed before.
5959

6060
# Specifying the Hadoop Version
6161

62-
Because HDFS is not protocol-compatible across versions, if you want to read from HDFS, you'll need to build Spark against the specific HDFS version in your environment. You can do this through the "hadoop.version" property. If unset, Spark will build against Hadoop 1.0.4 by default. Note that certain build profiles are required for particular Hadoop versions:
62+
Because HDFS is not protocol-compatible across versions, if you want to read from HDFS, you'll need to build Spark against the specific HDFS version in your environment. You can do this through the "hadoop.version" property. If unset, Spark will build against Hadoop 2.2.0 by default. Note that certain build profiles are required for particular Hadoop versions:
6363

6464
<table class="table">
6565
<thead>
6666
<tr><th>Hadoop version</th><th>Profile required</th></tr>
6767
</thead>
6868
<tbody>
69-
<tr><td>1.x to 2.1.x</td><td>(none)</td></tr>
69+
<tr><td>1.x to 2.1.x</td><td>hadoop-1</td></tr>
7070
<tr><td>2.2.x</td><td>hadoop-2.2</td></tr>
7171
<tr><td>2.3.x</td><td>hadoop-2.3</td></tr>
7272
<tr><td>2.4.x</td><td>hadoop-2.4</td></tr>
@@ -77,19 +77,20 @@ For Apache Hadoop versions 1.x, Cloudera CDH "mr1" distributions, and other Hado
7777

7878
{% highlight bash %}
7979
# Apache Hadoop 1.2.1
80-
mvn -Dhadoop.version=1.2.1 -DskipTests clean package
80+
mvn -Dhadoop.version=1.2.1 -Phadoop-1 -DskipTests clean package
8181

8282
# Cloudera CDH 4.2.0 with MapReduce v1
83-
mvn -Dhadoop.version=2.0.0-mr1-cdh4.2.0 -DskipTests clean package
83+
mvn -Dhadoop.version=2.0.0-mr1-cdh4.2.0 -Phadoop-1 -DskipTests clean package
8484
{% endhighlight %}
8585

8686
You can enable the "yarn" profile and optionally set the "yarn.version" property if it is different from "hadoop.version". Spark only supports YARN versions 2.2.0 and later.
8787

8888
Examples:
8989

9090
{% highlight bash %}
91+
9192
# Apache Hadoop 2.2.X
92-
mvn -Pyarn -Phadoop-2.2 -Dhadoop.version=2.2.0 -DskipTests clean package
93+
mvn -Pyarn -Phadoop-2.2 -DskipTests clean package
9394

9495
# Apache Hadoop 2.3.X
9596
mvn -Pyarn -Phadoop-2.3 -Dhadoop.version=2.3.0 -DskipTests clean package

docs/hadoop-third-party-distributions.md

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -14,7 +14,7 @@ property. For certain versions, you will need to specify additional profiles. Fo
1414
see the guide on [building with maven](building-spark.html#specifying-the-hadoop-version):
1515

1616
mvn -Dhadoop.version=1.0.4 -DskipTests clean package
17-
mvn -Phadoop-2.2 -Dhadoop.version=2.2.0 -DskipTests clean package
17+
mvn -Phadoop-2.3 -Dhadoop.version=2.3.0 -DskipTests clean package
1818

1919
The table below lists the corresponding `hadoop.version` code for each CDH/HDP release. Note that
2020
some Hadoop releases are binary compatible across client versions. This means the pre-built Spark

0 commit comments

Comments
 (0)