@@ -266,40 +266,15 @@ private object ExternalShuffleDriver extends Logging with Matchers {
266266 val status = new File (args(0 ))
267267 var result = " failure"
268268 try {
269- val data = sc.parallelize(1 to 4 , 4 ).collect().toSet
269+ val data = sc.parallelize(1 to 100 , 10 ).map { x => (x % 10 ) -> x }.reduceByKey{ _ + _ }.
270+ collect().toSet
270271 sc.listenerBus.waitUntilEmpty(WAIT_TIMEOUT_MILLIS )
271272 data should be (Set (1 , 2 , 3 , 4 ))
272273 result = " success"
273274 } finally {
274275 sc.stop()
275276 Files .write(result, status, UTF_8 )
276277 }
277-
278- // verify log urls are present
279- val listeners = sc.listenerBus.findListenersByClass[SaveExecutorInfo ]
280- assert(listeners.size === 1 )
281- val listener = listeners(0 )
282- val executorInfos = listener.addedExecutorInfos.values
283- assert(executorInfos.nonEmpty)
284- executorInfos.foreach { info =>
285- assert(info.logUrlMap.nonEmpty)
286- }
287-
288- // If we are running in yarn-cluster mode, verify that driver logs links and present and are
289- // in the expected format.
290- if (conf.get(" spark.master" ) == " yarn-cluster" ) {
291- assert(listener.driverLogs.nonEmpty)
292- val driverLogs = listener.driverLogs.get
293- assert(driverLogs.size === 2 )
294- assert(driverLogs.containsKey(" stderr" ))
295- assert(driverLogs.containsKey(" stdout" ))
296- val urlStr = driverLogs(" stderr" )
297- // Ensure that this is a valid URL, else this will throw an exception
298- new URL (urlStr)
299- val containerId = YarnSparkHadoopUtil .get.getContainerId
300- val user = Utils .getCurrentUserName()
301- assert(urlStr.endsWith(s " /node/containerlogs/ $containerId/ $user/stderr?start=-4096 " ))
302- }
303278 }
304279
305280}
0 commit comments