@@ -18,6 +18,8 @@ package org.apache.spark.network.yarn
1818
1919import java .io .{DataOutputStream , FileOutputStream , PrintWriter , File }
2020
21+ import scala .annotation .tailrec
22+
2123import org .apache .commons .io .FileUtils
2224import org .apache .hadoop .yarn .api .records .ApplicationId
2325import org .apache .hadoop .yarn .conf .YarnConfiguration
@@ -43,6 +45,7 @@ class YarnShuffleServiceSuite extends SparkFunSuite with Matchers with BeforeAnd
4345 FileUtils .deleteDirectory(d)
4446 }
4547 FileUtils .forceMkdir(d)
48+ logInfo(s " creating yarn.nodemanager.local-dirs: $d" )
4649 }
4750 }
4851
@@ -87,7 +90,16 @@ class YarnShuffleServiceSuite extends SparkFunSuite with Matchers with BeforeAnd
8790 ShuffleTestAccessor .getExecutorInfo(app2Id, " exec-2" , blockResolver) should
8891 be (Some (shuffleInfo2))
8992
90- execStateFile.exists() should be (true )
93+ if (! execStateFile.exists()) {
94+ @ tailrec def findExistingParent (file : File ): File = {
95+ if (file == null ) file
96+ else if (file.exists()) file
97+ else findExistingParent(file.getParentFile())
98+ }
99+ val existingParent = findExistingParent(execStateFile)
100+ assert(false , s " $execStateFile does not exist -- closest existing parent is $existingParent" )
101+ }
102+ assert(execStateFile.exists(), s " $execStateFile did not exist " )
91103
92104 // now we pretend the shuffle service goes down, and comes back up
93105 s1.stop()
0 commit comments