@@ -19,9 +19,11 @@ package org.apache.spark.deploy.yarn
1919
2020import scala .collection .mutable .{HashMap , HashSet , Set }
2121
22- import org .apache .hadoop .conf .Configuration
22+ import org .apache .hadoop .fs .CommonConfigurationKeysPublic
23+ import org .apache .hadoop .net .DNSToSwitchMapping
2324import org .apache .hadoop .yarn .api .records ._
2425import org .apache .hadoop .yarn .client .api .AMRMClient .ContainerRequest
26+ import org .apache .hadoop .yarn .conf .YarnConfiguration
2527import org .mockito .Mockito ._
2628
2729import org .apache .spark .{SparkConf , SparkFunSuite }
@@ -49,18 +51,22 @@ class LocalityPlacementStrategySuite extends SparkFunSuite {
4951 }
5052
5153 private def runTest (): Unit = {
54+ val yarnConf = new YarnConfiguration ()
55+ yarnConf.setClass(
56+ CommonConfigurationKeysPublic .NET_TOPOLOGY_NODE_SWITCH_MAPPING_IMPL_KEY ,
57+ classOf [MockResolver ], classOf [DNSToSwitchMapping ])
58+
5259 val resource = Resource .newInstance(8 * 1024 , 4 )
5360 val strategy = new LocalityPreferredContainerPlacementStrategy (new SparkConf (),
54- new Configuration () , resource)
61+ yarnConf , resource)
5562
5663 val totalTasks = 32 * 1024
5764 val totalContainers = totalTasks / 16
5865 val totalHosts = totalContainers / 16
5966
67+ val mockId = mock(classOf [ContainerId ])
6068 val hosts = (1 to totalHosts).map { i => (s " host_ $i" , totalTasks % i) }.toMap
61- val containers = (1 to totalContainers).map { i =>
62- ContainerId .fromString(s " container_12345678_0001_01_ $i" )
63- }
69+ val containers = (1 to totalContainers).map { i => mockId }
6470 val count = containers.size / hosts.size / 2
6571
6672 val hostToContainerMap = new HashMap [String , Set [ContainerId ]]()
0 commit comments