Skip to content

Commit 44b2ad4

Browse files
committed
Add unit test.
1 parent 1a1f2b8 commit 44b2ad4

File tree

3 files changed

+70
-8
lines changed

3 files changed

+70
-8
lines changed

core/src/main/scala/org/apache/spark/scheduler/cluster/mesos/MesosClusterScheduler.scala

Lines changed: 8 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -358,9 +358,10 @@ private[spark] class MesosClusterScheduler(
358358
val appJar = CommandInfo.URI.newBuilder()
359359
.setValue(desc.jarUrl.stripPrefix("file:").stripPrefix("local:")).build()
360360
val builder = CommandInfo.newBuilder().addUris(appJar)
361-
val entries =
362-
(conf.getOption("spark.executor.extraLibraryPath").toList ++
363-
desc.command.libraryPathEntries)
361+
val entries = conf.getOption("spark.executor.extraLibraryPath")
362+
.map(path => Seq(path) ++ desc.command.libraryPathEntries)
363+
.getOrElse(desc.command.libraryPathEntries)
364+
364365
val prefixEnv = if (!entries.isEmpty) {
365366
Utils.libraryPathEnvPrefix(entries)
366367
} else {
@@ -549,7 +550,10 @@ private[spark] class MesosClusterScheduler(
549550
tasks.foreach { case (offerId, taskInfos) =>
550551
driver.launchTasks(Collections.singleton(offerId), taskInfos.asJava)
551552
}
552-
currentOffers.asScala.filter(!_.used).foreach(o => driver.declineOffer(o.offerId))
553+
554+
for (o <- currentOffers.asScala if !o.used) {
555+
driver.declineOffer(o.offerId)
556+
}
553557
}
554558

555559
private def copyBuffer(

core/src/main/scala/org/apache/spark/scheduler/cluster/mesos/MesosSchedulerUtils.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -189,7 +189,7 @@ private[mesos] trait MesosSchedulerUtils extends Logging {
189189
val filteredResources =
190190
remainingResources.filter(r => r.getType != Value.Type.SCALAR || r.getScalar.getValue > 0.0)
191191

192-
(filteredResources.toList.asJava, requestedResources.toList.asJava)
192+
(filteredResources.asJava, requestedResources.asJava)
193193
}
194194

195195
/** Helper method to get the key,value-set pair for a Mesos Attribute protobuf */

core/src/test/scala/org/apache/spark/scheduler/mesos/MesosClusterSchedulerSuite.scala

Lines changed: 61 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -17,10 +17,16 @@
1717

1818
package org.apache.spark.scheduler.mesos
1919

20-
import java.util.Date
20+
import java.util.{Collection, Collections, Date}
2121

22-
import org.scalatest.mock.MockitoSugar
22+
import scala.collection.JavaConverters._
2323

24+
import org.apache.mesos.Protos.Value.{Scalar, Type}
25+
import org.apache.mesos.Protos._
26+
import org.apache.mesos.SchedulerDriver
27+
import org.mockito.Mockito._
28+
import org.mockito.{ArgumentCaptor, Matchers}
29+
import org.scalatest.mock.MockitoSugar
2430
import org.apache.spark.deploy.Command
2531
import org.apache.spark.deploy.mesos.MesosDriverDescription
2632
import org.apache.spark.scheduler.cluster.mesos._
@@ -29,7 +35,7 @@ import org.apache.spark.{LocalSparkContext, SparkConf, SparkFunSuite}
2935

3036
class MesosClusterSchedulerSuite extends SparkFunSuite with LocalSparkContext with MockitoSugar {
3137

32-
private val command = new Command("mainClass", Seq("arg"), null, null, null, null)
38+
private val command = new Command("mainClass", Seq("arg"), Map(), Seq(), Seq(), Seq())
3339

3440
test("can queue drivers") {
3541
val conf = new SparkConf()
@@ -72,4 +78,56 @@ class MesosClusterSchedulerSuite extends SparkFunSuite with LocalSparkContext wi
7278
val state = scheduler.getSchedulerState()
7379
assert(state.queuedDrivers.isEmpty)
7480
}
81+
82+
test("can handle multiple roles") {
83+
val conf = new SparkConf()
84+
conf.setMaster("mesos://localhost:5050")
85+
conf.setAppName("spark mesos")
86+
val scheduler = new MesosClusterScheduler(
87+
new BlackHoleMesosClusterPersistenceEngineFactory, conf) {
88+
override def start(): Unit = { ready = true }
89+
}
90+
scheduler.start()
91+
val driver = mock[SchedulerDriver]
92+
val response = scheduler.submitDriver(
93+
new MesosDriverDescription("d1", "jar", 1500, 1, true,
94+
command,
95+
Map(("spark.mesos.executor.home", "test"), ("spark.app.name", "test")),
96+
"s1",
97+
new Date()))
98+
assert(response.success)
99+
val offer = Offer.newBuilder()
100+
.addResources(
101+
Resource.newBuilder().setRole("*")
102+
.setScalar(Scalar.newBuilder().setValue(1).build()).setName("cpus").setType(Type.SCALAR))
103+
.addResources(
104+
Resource.newBuilder().setRole("*")
105+
.setScalar(Scalar.newBuilder().setValue(1000).build()).setName("mem").setType(Type.SCALAR))
106+
.addResources(
107+
Resource.newBuilder().setRole("role2")
108+
.setScalar(Scalar.newBuilder().setValue(1).build()).setName("cpus").setType(Type.SCALAR))
109+
.addResources(
110+
Resource.newBuilder().setRole("role2")
111+
.setScalar(Scalar.newBuilder().setValue(500).build()).setName("mem").setType(Type.SCALAR))
112+
.setId(OfferID.newBuilder().setValue("o1").build())
113+
.setFrameworkId(FrameworkID.newBuilder().setValue("f1").build())
114+
.setSlaveId(SlaveID.newBuilder().setValue("s1").build())
115+
.setHostname("host1")
116+
.build()
117+
118+
val capture = ArgumentCaptor.forClass(classOf[Collection[TaskInfo]])
119+
120+
when(
121+
driver.launchTasks(
122+
Matchers.eq(Collections.singleton(offer.getId)),
123+
capture.capture())
124+
).thenReturn(Status.valueOf(1))
125+
126+
scheduler.resourceOffers(driver, List(offer).asJava)
127+
128+
verify(driver, times(1)).launchTasks(
129+
Matchers.eq(Collections.singleton(offer.getId)),
130+
capture.capture()
131+
)
132+
}
75133
}

0 commit comments

Comments
 (0)