Skip to content

Commit 2c5dee0

Browse files
committed
Revert "[SPARK-11206] Support SQL UI on the history server"
This reverts commit cc243a0 / PR #9297 I'm reverting this because it broke SQLListenerMemoryLeakSuite in the master Maven builds. See #9991 for a discussion of why this broke the tests.
1 parent f2fbfa4 commit 2c5dee0

File tree

21 files changed

+135
-327
lines changed

21 files changed

+135
-327
lines changed

.rat-excludes

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -82,5 +82,4 @@ INDEX
8282
gen-java.*
8383
.*avpr
8484
org.apache.spark.sql.sources.DataSourceRegister
85-
org.apache.spark.scheduler.SparkHistoryListenerFactory
8685
.*parquet

core/src/main/java/org/apache/spark/JavaSparkListener.java

Lines changed: 0 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -82,7 +82,4 @@ public void onExecutorRemoved(SparkListenerExecutorRemoved executorRemoved) { }
8282
@Override
8383
public void onBlockUpdated(SparkListenerBlockUpdated blockUpdated) { }
8484

85-
@Override
86-
public void onOtherEvent(SparkListenerEvent event) { }
87-
8885
}

core/src/main/java/org/apache/spark/SparkFirehoseListener.java

Lines changed: 0 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -118,8 +118,4 @@ public void onBlockUpdated(SparkListenerBlockUpdated blockUpdated) {
118118
onEvent(blockUpdated);
119119
}
120120

121-
@Override
122-
public void onOtherEvent(SparkListenerEvent event) {
123-
onEvent(event);
124-
}
125121
}

core/src/main/scala/org/apache/spark/scheduler/EventLoggingListener.scala

Lines changed: 0 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -207,10 +207,6 @@ private[spark] class EventLoggingListener(
207207
// No-op because logging every update would be overkill
208208
override def onExecutorMetricsUpdate(event: SparkListenerExecutorMetricsUpdate): Unit = { }
209209

210-
override def onOtherEvent(event: SparkListenerEvent): Unit = {
211-
logEvent(event, flushLogger = true)
212-
}
213-
214210
/**
215211
* Stop logging events. The event log file will be renamed so that it loses the
216212
* ".inprogress" suffix.

core/src/main/scala/org/apache/spark/scheduler/SparkListener.scala

Lines changed: 2 additions & 22 deletions
Original file line numberDiff line numberDiff line change
@@ -22,19 +22,15 @@ import java.util.Properties
2222
import scala.collection.Map
2323
import scala.collection.mutable
2424

25-
import com.fasterxml.jackson.annotation.JsonTypeInfo
26-
27-
import org.apache.spark.{Logging, SparkConf, TaskEndReason}
25+
import org.apache.spark.{Logging, TaskEndReason}
2826
import org.apache.spark.annotation.DeveloperApi
2927
import org.apache.spark.executor.TaskMetrics
3028
import org.apache.spark.scheduler.cluster.ExecutorInfo
3129
import org.apache.spark.storage.{BlockManagerId, BlockUpdatedInfo}
3230
import org.apache.spark.util.{Distribution, Utils}
33-
import org.apache.spark.ui.SparkUI
3431

3532
@DeveloperApi
36-
@JsonTypeInfo(use = JsonTypeInfo.Id.CLASS, include = JsonTypeInfo.As.PROPERTY, property = "Event")
37-
trait SparkListenerEvent
33+
sealed trait SparkListenerEvent
3834

3935
@DeveloperApi
4036
case class SparkListenerStageSubmitted(stageInfo: StageInfo, properties: Properties = null)
@@ -134,17 +130,6 @@ case class SparkListenerApplicationEnd(time: Long) extends SparkListenerEvent
134130
*/
135131
private[spark] case class SparkListenerLogStart(sparkVersion: String) extends SparkListenerEvent
136132

137-
/**
138-
* Interface for creating history listeners defined in other modules like SQL, which are used to
139-
* rebuild the history UI.
140-
*/
141-
private[spark] trait SparkHistoryListenerFactory {
142-
/**
143-
* Create listeners used to rebuild the history UI.
144-
*/
145-
def createListeners(conf: SparkConf, sparkUI: SparkUI): Seq[SparkListener]
146-
}
147-
148133
/**
149134
* :: DeveloperApi ::
150135
* Interface for listening to events from the Spark scheduler. Note that this is an internal
@@ -238,11 +223,6 @@ trait SparkListener {
238223
* Called when the driver receives a block update info.
239224
*/
240225
def onBlockUpdated(blockUpdated: SparkListenerBlockUpdated) { }
241-
242-
/**
243-
* Called when other events like SQL-specific events are posted.
244-
*/
245-
def onOtherEvent(event: SparkListenerEvent) { }
246226
}
247227

248228
/**

core/src/main/scala/org/apache/spark/scheduler/SparkListenerBus.scala

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -61,7 +61,6 @@ private[spark] trait SparkListenerBus extends ListenerBus[SparkListener, SparkLi
6161
case blockUpdated: SparkListenerBlockUpdated =>
6262
listener.onBlockUpdated(blockUpdated)
6363
case logStart: SparkListenerLogStart => // ignore event log metadata
64-
case _ => listener.onOtherEvent(event)
6564
}
6665
}
6766

core/src/main/scala/org/apache/spark/ui/SparkUI.scala

Lines changed: 2 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -17,13 +17,10 @@
1717

1818
package org.apache.spark.ui
1919

20-
import java.util.{Date, ServiceLoader}
21-
22-
import scala.collection.JavaConverters._
20+
import java.util.Date
2321

2422
import org.apache.spark.status.api.v1.{ApiRootResource, ApplicationAttemptInfo, ApplicationInfo,
2523
UIRoot}
26-
import org.apache.spark.util.Utils
2724
import org.apache.spark.{Logging, SecurityManager, SparkConf, SparkContext}
2825
import org.apache.spark.scheduler._
2926
import org.apache.spark.storage.StorageStatusListener
@@ -157,16 +154,7 @@ private[spark] object SparkUI {
157154
appName: String,
158155
basePath: String,
159156
startTime: Long): SparkUI = {
160-
val sparkUI = create(
161-
None, conf, listenerBus, securityManager, appName, basePath, startTime = startTime)
162-
163-
val listenerFactories = ServiceLoader.load(classOf[SparkHistoryListenerFactory],
164-
Utils.getContextOrSparkClassLoader).asScala
165-
listenerFactories.foreach { listenerFactory =>
166-
val listeners = listenerFactory.createListeners(conf, sparkUI)
167-
listeners.foreach(listenerBus.addListener)
168-
}
169-
sparkUI
157+
create(None, conf, listenerBus, securityManager, appName, basePath, startTime = startTime)
170158
}
171159

172160
/**

core/src/main/scala/org/apache/spark/util/JsonProtocol.scala

Lines changed: 2 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -19,21 +19,19 @@ package org.apache.spark.util
1919

2020
import java.util.{Properties, UUID}
2121

22+
import org.apache.spark.scheduler.cluster.ExecutorInfo
23+
2224
import scala.collection.JavaConverters._
2325
import scala.collection.Map
2426

25-
import com.fasterxml.jackson.databind.ObjectMapper
26-
import com.fasterxml.jackson.module.scala.DefaultScalaModule
2727
import org.json4s.DefaultFormats
2828
import org.json4s.JsonDSL._
2929
import org.json4s.JsonAST._
30-
import org.json4s.jackson.JsonMethods._
3130

3231
import org.apache.spark._
3332
import org.apache.spark.executor._
3433
import org.apache.spark.rdd.RDDOperationScope
3534
import org.apache.spark.scheduler._
36-
import org.apache.spark.scheduler.cluster.ExecutorInfo
3735
import org.apache.spark.storage._
3836

3937
/**
@@ -56,8 +54,6 @@ private[spark] object JsonProtocol {
5654

5755
private implicit val format = DefaultFormats
5856

59-
private val mapper = new ObjectMapper().registerModule(DefaultScalaModule)
60-
6157
/** ------------------------------------------------- *
6258
* JSON serialization methods for SparkListenerEvents |
6359
* -------------------------------------------------- */
@@ -100,7 +96,6 @@ private[spark] object JsonProtocol {
10096
executorMetricsUpdateToJson(metricsUpdate)
10197
case blockUpdated: SparkListenerBlockUpdated =>
10298
throw new MatchError(blockUpdated) // TODO(ekl) implement this
103-
case _ => parse(mapper.writeValueAsString(event))
10499
}
105100
}
106101

@@ -516,8 +511,6 @@ private[spark] object JsonProtocol {
516511
case `executorRemoved` => executorRemovedFromJson(json)
517512
case `logStart` => logStartFromJson(json)
518513
case `metricsUpdate` => executorMetricsUpdateFromJson(json)
519-
case other => mapper.readValue(compact(render(json)), Utils.classForName(other))
520-
.asInstanceOf[SparkListenerEvent]
521514
}
522515
}
523516

sql/core/src/main/resources/META-INF/services/org.apache.spark.scheduler.SparkHistoryListenerFactory

Lines changed: 0 additions & 1 deletion
This file was deleted.

sql/core/src/main/scala/org/apache/spark/sql/SQLContext.scala

Lines changed: 4 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -1263,8 +1263,6 @@ object SQLContext {
12631263
*/
12641264
@transient private val instantiatedContext = new AtomicReference[SQLContext]()
12651265

1266-
@transient private val sqlListener = new AtomicReference[SQLListener]()
1267-
12681266
/**
12691267
* Get the singleton SQLContext if it exists or create a new one using the given SparkContext.
12701268
*
@@ -1309,10 +1307,6 @@ object SQLContext {
13091307
Option(instantiatedContext.get())
13101308
}
13111309

1312-
private[sql] def clearSqlListener(): Unit = {
1313-
sqlListener.set(null)
1314-
}
1315-
13161310
/**
13171311
* Changes the SQLContext that will be returned in this thread and its children when
13181312
* SQLContext.getOrCreate() is called. This can be used to ensure that a given thread receives
@@ -1361,13 +1355,9 @@ object SQLContext {
13611355
* Create a SQLListener then add it into SparkContext, and create an SQLTab if there is SparkUI.
13621356
*/
13631357
private[sql] def createListenerAndUI(sc: SparkContext): SQLListener = {
1364-
if (sqlListener.get() == null) {
1365-
val listener = new SQLListener(sc.conf)
1366-
if (sqlListener.compareAndSet(null, listener)) {
1367-
sc.addSparkListener(listener)
1368-
sc.ui.foreach(new SQLTab(listener, _))
1369-
}
1370-
}
1371-
sqlListener.get()
1358+
val listener = new SQLListener(sc.conf)
1359+
sc.addSparkListener(listener)
1360+
sc.ui.foreach(new SQLTab(listener, _))
1361+
listener
13721362
}
13731363
}

0 commit comments

Comments
 (0)