@@ -29,6 +29,7 @@ import org.apache.spark._
2929import org .apache .spark .rdd .RDD
3030import org .apache .spark .scheduler .SchedulingMode .SchedulingMode
3131import org .apache .spark .storage .{BlockId , BlockManagerId , BlockManagerMaster }
32+ import org .apache .spark .util .CallSite
3233
3334class BuggyDAGEventProcessActor extends Actor {
3435 val state = 0
@@ -211,7 +212,7 @@ class DAGSchedulerSuite extends TestKit(ActorSystem("DAGSchedulerSuite")) with F
211212 allowLocal : Boolean = false ,
212213 listener : JobListener = jobListener): Int = {
213214 val jobId = scheduler.nextJobId.getAndIncrement()
214- runEvent(JobSubmitted (jobId, rdd, func, partitions, allowLocal, null , listener))
215+ runEvent(JobSubmitted (jobId, rdd, func, partitions, allowLocal, CallSite ( " " , " " ) , listener))
215216 jobId
216217 }
217218
@@ -251,7 +252,7 @@ class DAGSchedulerSuite extends TestKit(ActorSystem("DAGSchedulerSuite")) with F
251252 override def toString = " DAGSchedulerSuite Local RDD"
252253 }
253254 val jobId = scheduler.nextJobId.getAndIncrement()
254- runEvent(JobSubmitted (jobId, rdd, jobComputeFunc, Array (0 ), true , null , jobListener))
255+ runEvent(JobSubmitted (jobId, rdd, jobComputeFunc, Array (0 ), true , CallSite ( " " , " " ) , jobListener))
255256 assert(results === Map (0 -> 42 ))
256257 assertDataStructuresEmpty
257258 }
@@ -265,7 +266,7 @@ class DAGSchedulerSuite extends TestKit(ActorSystem("DAGSchedulerSuite")) with F
265266 override def toString = " DAGSchedulerSuite Local RDD"
266267 }
267268 val jobId = scheduler.nextJobId.getAndIncrement()
268- runEvent(JobSubmitted (jobId, rdd, jobComputeFunc, Array (0 ), true , null , jobListener))
269+ runEvent(JobSubmitted (jobId, rdd, jobComputeFunc, Array (0 ), true , CallSite ( " " , " " ) , jobListener))
269270 assert(results.size == 0 )
270271 assertDataStructuresEmpty
271272 }
0 commit comments