}}
diff --git a/core/src/test/scala/org/apache/spark/ui/StagePageSuite.scala b/core/src/test/scala/org/apache/spark/ui/StagePageSuite.scala
index d30b987d6ca31..11482d187aeca 100644
--- a/core/src/test/scala/org/apache/spark/ui/StagePageSuite.scala
+++ b/core/src/test/scala/org/apache/spark/ui/StagePageSuite.scala
@@ -35,25 +35,15 @@ class StagePageSuite extends SparkFunSuite with LocalSparkContext {
private val peakExecutionMemory = 10
- test("peak execution memory only displayed if unsafe is enabled") {
- val unsafeConf = "spark.sql.unsafe.enabled"
- val conf = new SparkConf(false).set(unsafeConf, "true")
+ test("peak execution memory should displayed") {
+ val conf = new SparkConf(false)
val html = renderStagePage(conf).toString().toLowerCase
val targetString = "peak execution memory"
assert(html.contains(targetString))
- // Disable unsafe and make sure it's not there
- val conf2 = new SparkConf(false).set(unsafeConf, "false")
- val html2 = renderStagePage(conf2).toString().toLowerCase
- assert(!html2.contains(targetString))
- // Avoid setting anything; it should be displayed by default
- val conf3 = new SparkConf(false)
- val html3 = renderStagePage(conf3).toString().toLowerCase
- assert(html3.contains(targetString))
}
test("SPARK-10543: peak execution memory should be per-task rather than cumulative") {
- val unsafeConf = "spark.sql.unsafe.enabled"
- val conf = new SparkConf(false).set(unsafeConf, "true")
+ val conf = new SparkConf(false)
val html = renderStagePage(conf).toString().toLowerCase
// verify min/25/50/75/max show task value not cumulative values
assert(html.contains(s"