Skip to content

Commit 1bf8528

Browse files
committed
Remove the codes checking deprecated config spark.sql.unsafe.enabled.
1 parent c3d08e2 commit 1bf8528

File tree

2 files changed

+18
-47
lines changed

2 files changed

+18
-47
lines changed

core/src/main/scala/org/apache/spark/ui/jobs/StagePage.scala

Lines changed: 15 additions & 34 deletions
Original file line numberDiff line numberDiff line change
@@ -70,8 +70,6 @@ private[ui] class StagePage(parent: StagesTab) extends WebUIPage("stage") {
7070
// if we find that it's okay.
7171
private val MAX_TIMELINE_TASKS = parent.conf.getInt("spark.ui.timeline.tasks.maximum", 1000)
7272

73-
private val displayPeakExecutionMemory = parent.conf.getBoolean("spark.sql.unsafe.enabled", true)
74-
7573
private def getLocalitySummaryString(stageData: StageUIData): String = {
7674
val localities = stageData.taskData.values.map(_.taskInfo.taskLocality)
7775
val localityCounts = localities.groupBy(identity).mapValues(_.size)
@@ -252,15 +250,13 @@ private[ui] class StagePage(parent: StagesTab) extends WebUIPage("stage") {
252250
<span class="additional-metric-title">Getting Result Time</span>
253251
</span>
254252
</li>
255-
{if (displayPeakExecutionMemory) {
256-
<li>
257-
<span data-toggle="tooltip"
258-
title={ToolTips.PEAK_EXECUTION_MEMORY} data-placement="right">
259-
<input type="checkbox" name={TaskDetailsClassNames.PEAK_EXECUTION_MEMORY}/>
260-
<span class="additional-metric-title">Peak Execution Memory</span>
261-
</span>
262-
</li>
263-
}}
253+
<li>
254+
<span data-toggle="tooltip"
255+
title={ToolTips.PEAK_EXECUTION_MEMORY} data-placement="right">
256+
<input type="checkbox" name={TaskDetailsClassNames.PEAK_EXECUTION_MEMORY}/>
257+
<span class="additional-metric-title">Peak Execution Memory</span>
258+
</span>
259+
</li>
264260
</ul>
265261
</div>
266262
</div>
@@ -532,13 +528,9 @@ private[ui] class StagePage(parent: StagesTab) extends WebUIPage("stage") {
532528
{serializationQuantiles}
533529
</tr>,
534530
<tr class={TaskDetailsClassNames.GETTING_RESULT_TIME}>{gettingResultQuantiles}</tr>,
535-
if (displayPeakExecutionMemory) {
536-
<tr class={TaskDetailsClassNames.PEAK_EXECUTION_MEMORY}>
537-
{peakExecutionMemoryQuantiles}
538-
</tr>
539-
} else {
540-
Nil
541-
},
531+
<tr class={TaskDetailsClassNames.PEAK_EXECUTION_MEMORY}>
532+
{peakExecutionMemoryQuantiles}
533+
</tr>,
542534
if (stageData.hasInput) <tr>{inputQuantiles}</tr> else Nil,
543535
if (stageData.hasOutput) <tr>{outputQuantiles}</tr> else Nil,
544536
if (stageData.hasShuffleRead) {
@@ -1166,9 +1158,6 @@ private[ui] class TaskPagedTable(
11661158
desc: Boolean,
11671159
executorsListener: ExecutorsListener) extends PagedTable[TaskTableRowData] {
11681160

1169-
// We only track peak memory used for unsafe operators
1170-
private val displayPeakExecutionMemory = conf.getBoolean("spark.sql.unsafe.enabled", true)
1171-
11721161
override def tableId: String = "task-table"
11731162

11741163
override def tableCssClass: String =
@@ -1217,14 +1206,8 @@ private[ui] class TaskPagedTable(
12171206
("Task Deserialization Time", TaskDetailsClassNames.TASK_DESERIALIZATION_TIME),
12181207
("GC Time", ""),
12191208
("Result Serialization Time", TaskDetailsClassNames.RESULT_SERIALIZATION_TIME),
1220-
("Getting Result Time", TaskDetailsClassNames.GETTING_RESULT_TIME)) ++
1221-
{
1222-
if (displayPeakExecutionMemory) {
1223-
Seq(("Peak Execution Memory", TaskDetailsClassNames.PEAK_EXECUTION_MEMORY))
1224-
} else {
1225-
Nil
1226-
}
1227-
} ++
1209+
("Getting Result Time", TaskDetailsClassNames.GETTING_RESULT_TIME),
1210+
("Peak Execution Memory", TaskDetailsClassNames.PEAK_EXECUTION_MEMORY)) ++
12281211
{if (hasAccumulators) Seq(("Accumulators", "")) else Nil} ++
12291212
{if (hasInput) Seq(("Input Size / Records", "")) else Nil} ++
12301213
{if (hasOutput) Seq(("Output Size / Records", "")) else Nil} ++
@@ -1316,11 +1299,9 @@ private[ui] class TaskPagedTable(
13161299
<td class={TaskDetailsClassNames.GETTING_RESULT_TIME}>
13171300
{UIUtils.formatDuration(task.gettingResultTime)}
13181301
</td>
1319-
{if (displayPeakExecutionMemory) {
1320-
<td class={TaskDetailsClassNames.PEAK_EXECUTION_MEMORY}>
1321-
{Utils.bytesToString(task.peakExecutionMemoryUsed)}
1322-
</td>
1323-
}}
1302+
<td class={TaskDetailsClassNames.PEAK_EXECUTION_MEMORY}>
1303+
{Utils.bytesToString(task.peakExecutionMemoryUsed)}
1304+
</td>
13241305
{if (task.accumulators.nonEmpty) {
13251306
<td>{Unparsed(task.accumulators.get)}</td>
13261307
}}

core/src/test/scala/org/apache/spark/ui/StagePageSuite.scala

Lines changed: 3 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -35,25 +35,15 @@ class StagePageSuite extends SparkFunSuite with LocalSparkContext {
3535

3636
private val peakExecutionMemory = 10
3737

38-
test("peak execution memory only displayed if unsafe is enabled") {
39-
val unsafeConf = "spark.sql.unsafe.enabled"
40-
val conf = new SparkConf(false).set(unsafeConf, "true")
38+
test("peak execution memory should displayed") {
39+
val conf = new SparkConf(false)
4140
val html = renderStagePage(conf).toString().toLowerCase
4241
val targetString = "peak execution memory"
4342
assert(html.contains(targetString))
44-
// Disable unsafe and make sure it's not there
45-
val conf2 = new SparkConf(false).set(unsafeConf, "false")
46-
val html2 = renderStagePage(conf2).toString().toLowerCase
47-
assert(!html2.contains(targetString))
48-
// Avoid setting anything; it should be displayed by default
49-
val conf3 = new SparkConf(false)
50-
val html3 = renderStagePage(conf3).toString().toLowerCase
51-
assert(html3.contains(targetString))
5243
}
5344

5445
test("SPARK-10543: peak execution memory should be per-task rather than cumulative") {
55-
val unsafeConf = "spark.sql.unsafe.enabled"
56-
val conf = new SparkConf(false).set(unsafeConf, "true")
46+
val conf = new SparkConf(false)
5747
val html = renderStagePage(conf).toString().toLowerCase
5848
// verify min/25/50/75/max show task value not cumulative values
5949
assert(html.contains(s"<td>$peakExecutionMemory.0 b</td>" * 5))

0 commit comments

Comments
 (0)