Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
12 changes: 11 additions & 1 deletion core/src/main/scala/org/apache/spark/SparkContext.scala
Original file line number Diff line number Diff line change
Expand Up @@ -1900,7 +1900,17 @@ object SparkContext extends Logging {

private[spark] val SPARK_JOB_INTERRUPT_ON_CANCEL = "spark.job.interruptOnCancel"

private[spark] val DRIVER_IDENTIFIER = "<driver>"
/**
* Executor id for the driver. In earlier versions of Spark, this was `<driver>`, but this was
* changed to `driver` because the angle brackets caused escaping issues in URLs and XML (see
* SPARK-6716 for more details).
*/
private[spark] val DRIVER_IDENTIFIER = "driver"

/**
* Legacy version of DRIVER_IDENTIFIER, retained for backwards-compatibility.
*/
private[spark] val LEGACY_DRIVER_IDENTIFIER = "<driver>"

// The following deprecated objects have already been copied to `object AccumulatorParam` to
// make the compiler find them automatically. They are duplicate codes only for backward
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -60,7 +60,10 @@ class BlockManagerId private (

def port: Int = port_

def isDriver: Boolean = { executorId == SparkContext.DRIVER_IDENTIFIER }
def isDriver: Boolean = {
executorId == SparkContext.DRIVER_IDENTIFIER ||
executorId == SparkContext.LEGACY_DRIVER_IDENTIFIER
}

override def writeExternal(out: ObjectOutput): Unit = Utils.tryOrIOException {
out.writeUTF(executorId_)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -148,6 +148,12 @@ class BlockManagerSuite extends FunSuite with Matchers with BeforeAndAfterEach
assert(id2_.eq(id1), "Deserialized id2 is not the same object as original id1")
}

test("BlockManagerId.isDriver() backwards-compatibility with legacy driver ids (SPARK-6716)") {
assert(BlockManagerId(SparkContext.DRIVER_IDENTIFIER, "XXX", 1).isDriver)
assert(BlockManagerId(SparkContext.LEGACY_DRIVER_IDENTIFIER, "XXX", 1).isDriver)
assert(!BlockManagerId("notADriverIdentifier", "XXX", 1).isDriver)
}

test("master + 1 manager interaction") {
store = makeBlockManager(20000)
val a1 = new Array[Byte](4000)
Expand Down