diff --git a/core/src/main/scala/org/apache/spark/SparkContext.scala b/core/src/main/scala/org/apache/spark/SparkContext.scala
index eb14d87467af7..bcdc8554519ca 100644
--- a/core/src/main/scala/org/apache/spark/SparkContext.scala
+++ b/core/src/main/scala/org/apache/spark/SparkContext.scala
@@ -1253,7 +1253,7 @@ class SparkContext(config: SparkConf) extends Logging {
*/
object SparkContext extends Logging {
- private[spark] val SPARK_VERSION = "1.0.0"
+ private[spark] val SPARK_VERSION = Utils.getSparkVersion
private[spark] val SPARK_JOB_DESCRIPTION = "spark.job.description"
@@ -1565,4 +1565,3 @@ private[spark] class WritableConverter[T](
val writableClass: ClassTag[T] => Class[_ <: Writable],
val convert: Writable => T)
extends Serializable
-
diff --git a/core/src/main/scala/org/apache/spark/deploy/master/ui/MasterPage.scala b/core/src/main/scala/org/apache/spark/deploy/master/ui/MasterPage.scala
index 7ca3b08a28728..c0d94ea4b4740 100644
--- a/core/src/main/scala/org/apache/spark/deploy/master/ui/MasterPage.scala
+++ b/core/src/main/scala/org/apache/spark/deploy/master/ui/MasterPage.scala
@@ -72,6 +72,7 @@ private[spark] class MasterPage(parent: MasterWebUI) extends WebUIPage("") {
+ - Version: {Utils.getSparkVersion}
- URL: {state.uri}
- Workers: {state.workers.size}
- Cores: {state.workers.map(_.cores).sum} Total,
diff --git a/core/src/main/scala/org/apache/spark/util/Utils.scala b/core/src/main/scala/org/apache/spark/util/Utils.scala
index 79f314c8dd36c..4eb99d37338c5 100644
--- a/core/src/main/scala/org/apache/spark/util/Utils.scala
+++ b/core/src/main/scala/org/apache/spark/util/Utils.scala
@@ -1037,6 +1037,11 @@ private[spark] object Utils extends Logging {
obj.getClass.getSimpleName.replace("$", "")
}
+ /** Return the current version of Spark */
+ def getSparkVersion: String = {
+ getClass.getPackage.getImplementationVersion
+ }
+
/** Return an option that translates JNothing to None */
def jsonOption(json: JValue): Option[JValue] = {
json match {
diff --git a/repl/src/main/scala/org/apache/spark/repl/SparkILoopInit.scala b/repl/src/main/scala/org/apache/spark/repl/SparkILoopInit.scala
index 910b31d209e13..8fb7d530132ce 100644
--- a/repl/src/main/scala/org/apache/spark/repl/SparkILoopInit.scala
+++ b/repl/src/main/scala/org/apache/spark/repl/SparkILoopInit.scala
@@ -14,6 +14,8 @@ import scala.reflect.internal.util.Position
import scala.util.control.Exception.ignoring
import scala.tools.nsc.util.stackTraceString
+import org.apache.spark.util.Utils
+
/**
* Machinery for the asynchronous initialization of the repl.
*/
@@ -26,9 +28,9 @@ trait SparkILoopInit {
____ __
/ __/__ ___ _____/ /__
_\ \/ _ \/ _ `/ __/ '_/
- /___/ .__/\_,_/_/ /_/\_\ version 1.0.0-SNAPSHOT
+ /___/ .__/\_,_/_/ /_/\_\ version %s
/_/
-""")
+""".format(Utils.getSparkVersion))
import Properties._
val welcomeMsg = "Using Scala %s (%s, Java %s)".format(
versionString, javaVmName, javaVersion)