Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
14 changes: 10 additions & 4 deletions core/src/main/scala/org/apache/spark/SparkContext.scala
Original file line number Diff line number Diff line change
Expand Up @@ -155,8 +155,14 @@ class SparkContext(
new MetadataCleaner(MetadataCleanerType.SPARK_CONTEXT, this.cleanup, conf)

// Initialize the Spark UI
private[spark] val ui = new SparkUI(this)
ui.bind()
private[spark] val ui: Option[SparkUI] =
if (conf.getBoolean("spark.ui.enabled", true)) {
Some(new SparkUI(this))
} else {
// For tests, do not enable the UI
None
}
ui.foreach(_.bind())

val startTime = System.currentTimeMillis()

Expand Down Expand Up @@ -202,7 +208,7 @@ class SparkContext(
@volatile private[spark] var dagScheduler = new DAGScheduler(taskScheduler)
dagScheduler.start()

ui.start()
ui.foreach(_.start())

/** A default Hadoop Configuration for the Hadoop code (e.g. file systems) that we reuse. */
val hadoopConfiguration = {
Expand Down Expand Up @@ -777,7 +783,7 @@ class SparkContext(

/** Shut down the SparkContext. */
def stop() {
ui.stop()
ui.foreach(_.stop())
// Do this only if not stopped already - best case effort.
// prevent NPE if stopped more than once.
val dagSchedulerCopy = dagScheduler
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -44,16 +44,17 @@ private[spark] class SimrSchedulerBackend(

val conf = new Configuration()
val fs = FileSystem.get(conf)
val appUIAddress = sc.ui.map(_.appUIAddress).getOrElse("")

logInfo("Writing to HDFS file: " + driverFilePath)
logInfo("Writing Akka address: " + driverUrl)
logInfo("Writing Spark UI Address: " + sc.ui.appUIAddress)
logInfo("Writing Spark UI Address: " + appUIAddress)

// Create temporary file to prevent race condition where executors get empty driverUrl file
val temp = fs.create(tmpPath, true)
temp.writeUTF(driverUrl)
temp.writeInt(maxCores)
temp.writeUTF(sc.ui.appUIAddress)
temp.writeUTF(appUIAddress)
temp.close()

// "Atomic" rename
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -51,8 +51,9 @@ private[spark] class SparkDeploySchedulerBackend(
val command = Command(
"org.apache.spark.executor.CoarseGrainedExecutorBackend", args, sc.executorEnvs)
val sparkHome = sc.getSparkHome().getOrElse(null)
val appDesc = new ApplicationDescription(appName, maxCores, sc.executorMemory, command, sparkHome,
"http://" + sc.ui.appUIAddress)
val appUIAddress = sc.ui.map { x => "http://" + x.appUIAddress }.getOrElse("")
val appDesc = new ApplicationDescription(
appName, maxCores, sc.executorMemory, command, sparkHome, appUIAddress)

client = new AppClient(sc.env.actorSystem, masters, appDesc, this, conf)
client.start()
Expand Down
4 changes: 4 additions & 0 deletions pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -636,6 +636,10 @@
<filereports>${project.build.directory}/SparkTestSuite.txt</filereports>
<argLine>-Xms64m -Xmx3g</argLine>
<stderr />
<systemProperties>
<spark.testing>true</spark.testing>
<spark.ui.enabled>false</spark.ui.enabled>
</systemProperties>
</configuration>
<executions>
<execution>
Expand Down
1 change: 1 addition & 0 deletions project/SparkBuild.scala
Original file line number Diff line number Diff line change
Expand Up @@ -149,6 +149,7 @@ object SparkBuild extends Build {
fork := true,
javaOptions in Test += "-Dspark.home=" + sparkHome,
javaOptions in Test += "-Dspark.testing=1",
javaOptions in Test += "-Dspark.ui.enabled=false",
javaOptions += "-Xmx3g",
// Show full stack trace and duration in test cases.
testOptions in Test += Tests.Argument("-oDF"),
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -227,7 +227,7 @@ class ApplicationMaster(args: ApplicationMasterArguments, conf: Configuration,
assert(sparkContext != null || count >= numTries)

if (null != sparkContext) {
uiAddress = sparkContext.ui.appUIAddress
uiAddress = sparkContext.ui.map(_.appUIAddress).getOrElse("")
this.yarnAllocator = YarnAllocationHandler.newAllocator(
yarnConf,
resourceManager,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -199,7 +199,7 @@ class ApplicationMaster(args: ApplicationMasterArguments, conf: Configuration,
assert(sparkContext != null || numTries >= maxNumTries)

if (sparkContext != null) {
uiAddress = sparkContext.ui.appUIAddress
uiAddress = sparkContext.ui.map(_.appUIAddress).getOrElse("")
this.yarnAllocator = YarnAllocationHandler.newAllocator(
yarnConf,
amClient,
Expand Down