From 12769f4e7a8ab564c01c0a9a98f48f8fbbf7642c Mon Sep 17 00:00:00 2001 From: pierre-borckmans Date: Wed, 30 Apr 2014 17:52:12 +0200 Subject: [PATCH 1/5] Remove hardcoded Spark version - use the version from the manifest (SBT) instead --- core/src/main/scala/org/apache/spark/SparkContext.scala | 3 +-- core/src/main/scala/org/apache/spark/util/Utils.scala | 5 +++++ .../main/scala/org/apache/spark/repl/SparkILoopInit.scala | 6 ++++-- 3 files changed, 10 insertions(+), 4 deletions(-) diff --git a/core/src/main/scala/org/apache/spark/SparkContext.scala b/core/src/main/scala/org/apache/spark/SparkContext.scala index e6c9b7000d819..9613fc5f4b6f7 100644 --- a/core/src/main/scala/org/apache/spark/SparkContext.scala +++ b/core/src/main/scala/org/apache/spark/SparkContext.scala @@ -1216,7 +1216,7 @@ class SparkContext(config: SparkConf) extends Logging { */ object SparkContext extends Logging { - private[spark] val SPARK_VERSION = "1.0.0" + private[spark] val SPARK_VERSION = getClass.getPackage.getImplementationVersion private[spark] val SPARK_JOB_DESCRIPTION = "spark.job.description" @@ -1524,4 +1524,3 @@ private[spark] class WritableConverter[T]( val writableClass: ClassTag[T] => Class[_ <: Writable], val convert: Writable => T) extends Serializable - diff --git a/core/src/main/scala/org/apache/spark/util/Utils.scala b/core/src/main/scala/org/apache/spark/util/Utils.scala index 166f48ce7342e..ff512ae12c1e9 100644 --- a/core/src/main/scala/org/apache/spark/util/Utils.scala +++ b/core/src/main/scala/org/apache/spark/util/Utils.scala @@ -1004,6 +1004,11 @@ private[spark] object Utils extends Logging { obj.getClass.getSimpleName.replace("$", "") } + /** Return the current version of Spark */ + def getSparkVersion(): String = { + getClass.getPackage.getImplementationVersion + } + /** Return an option that translates JNothing to None */ def jsonOption(json: JValue): Option[JValue] = { json match { diff --git a/repl/src/main/scala/org/apache/spark/repl/SparkILoopInit.scala b/repl/src/main/scala/org/apache/spark/repl/SparkILoopInit.scala index 910b31d209e13..9eff7f9b82db1 100644 --- a/repl/src/main/scala/org/apache/spark/repl/SparkILoopInit.scala +++ b/repl/src/main/scala/org/apache/spark/repl/SparkILoopInit.scala @@ -14,6 +14,8 @@ import scala.reflect.internal.util.Position import scala.util.control.Exception.ignoring import scala.tools.nsc.util.stackTraceString +import org.apache.spark.util.Utils + /** * Machinery for the asynchronous initialization of the repl. */ @@ -26,9 +28,9 @@ trait SparkILoopInit { ____ __ / __/__ ___ _____/ /__ _\ \/ _ \/ _ `/ __/ '_/ - /___/ .__/\_,_/_/ /_/\_\ version 1.0.0-SNAPSHOT + /___/ .__/\_,_/_/ /_/\_\ version %s /_/ -""") +""").format(Utils.getSparkVersion) import Properties._ val welcomeMsg = "Using Scala %s (%s, Java %s)".format( versionString, javaVmName, javaVersion) From f7c2298a7769d83fe52b6e9516d2b3ebfc219056 Mon Sep 17 00:00:00 2001 From: pierre-borckmans Date: Wed, 30 Apr 2014 17:52:58 +0200 Subject: [PATCH 2/5] Add spark version label in ui (master index page) --- .../main/scala/org/apache/spark/deploy/master/ui/IndexPage.scala | 1 + 1 file changed, 1 insertion(+) diff --git a/core/src/main/scala/org/apache/spark/deploy/master/ui/IndexPage.scala b/core/src/main/scala/org/apache/spark/deploy/master/ui/IndexPage.scala index 8c1d6c7cce450..57f73e8a36fe1 100644 --- a/core/src/main/scala/org/apache/spark/deploy/master/ui/IndexPage.scala +++ b/core/src/main/scala/org/apache/spark/deploy/master/ui/IndexPage.scala @@ -72,6 +72,7 @@ private[spark] class IndexPage(parent: MasterWebUI) {
    +
  • Version: {Utils.getSparkVersion}
  • URL: {state.uri}
  • Workers: {state.workers.size}
  • Cores: {state.workers.map(_.cores).sum} Total, From 0f88e9565c9b7dec2e116462a0816642f28aa5c8 Mon Sep 17 00:00:00 2001 From: pierre-borckmans Date: Wed, 30 Apr 2014 18:15:49 +0200 Subject: [PATCH 3/5] Changed SparkContext SPARK_VERSION to use Utils.getSparkVersion --- core/src/main/scala/org/apache/spark/SparkContext.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/core/src/main/scala/org/apache/spark/SparkContext.scala b/core/src/main/scala/org/apache/spark/SparkContext.scala index 6be403c1baf0e..bcdc8554519ca 100644 --- a/core/src/main/scala/org/apache/spark/SparkContext.scala +++ b/core/src/main/scala/org/apache/spark/SparkContext.scala @@ -1253,7 +1253,7 @@ class SparkContext(config: SparkConf) extends Logging { */ object SparkContext extends Logging { - private[spark] val SPARK_VERSION = getClass.getPackage.getImplementationVersion + private[spark] val SPARK_VERSION = Utils.getSparkVersion private[spark] val SPARK_JOB_DESCRIPTION = "spark.job.description" From 142be891aa75753cdb7d0bb104036049082339d2 Mon Sep 17 00:00:00 2001 From: pierre-borckmans Date: Wed, 30 Apr 2014 18:19:59 +0200 Subject: [PATCH 4/5] Modified Spark REPL to display Utils.getSparkVersion in Welcome message --- repl/src/main/scala/org/apache/spark/repl/SparkILoopInit.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/repl/src/main/scala/org/apache/spark/repl/SparkILoopInit.scala b/repl/src/main/scala/org/apache/spark/repl/SparkILoopInit.scala index 9eff7f9b82db1..8fb7d530132ce 100644 --- a/repl/src/main/scala/org/apache/spark/repl/SparkILoopInit.scala +++ b/repl/src/main/scala/org/apache/spark/repl/SparkILoopInit.scala @@ -30,7 +30,7 @@ trait SparkILoopInit { _\ \/ _ \/ _ `/ __/ '_/ /___/ .__/\_,_/_/ /_/\_\ version %s /_/ -""").format(Utils.getSparkVersion) +""".format(Utils.getSparkVersion)) import Properties._ val welcomeMsg = "Using Scala %s (%s, Java %s)".format( versionString, javaVmName, javaVersion) From da8886ea7f4731269051f38d17b1f58f271528da Mon Sep 17 00:00:00 2001 From: pierre-borckmans Date: Wed, 30 Apr 2014 19:50:15 +0200 Subject: [PATCH 5/5] Changed getSparkVersion signature to omit the parentheses --- core/src/main/scala/org/apache/spark/util/Utils.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/core/src/main/scala/org/apache/spark/util/Utils.scala b/core/src/main/scala/org/apache/spark/util/Utils.scala index 76ee0d12b15d4..4eb99d37338c5 100644 --- a/core/src/main/scala/org/apache/spark/util/Utils.scala +++ b/core/src/main/scala/org/apache/spark/util/Utils.scala @@ -1038,7 +1038,7 @@ private[spark] object Utils extends Logging { } /** Return the current version of Spark */ - def getSparkVersion(): String = { + def getSparkVersion: String = { getClass.getPackage.getImplementationVersion }