@@ -35,6 +35,8 @@ import org.apache.spark.util.{JsonProtocol, ManualClock, Utils}
3535
3636class FsHistoryProviderSuite extends SparkFunSuite with BeforeAndAfter with Matchers with Logging {
3737
38+ import FsHistoryProvider ._
39+
3840 private var testDir : File = null
3941
4042 before {
@@ -63,43 +65,39 @@ class FsHistoryProviderSuite extends SparkFunSuite with BeforeAndAfter with Matc
6365 // Write a new-style application log.
6466 val newAppComplete = newLogFile(" new1" , None , inProgress = false )
6567 writeFile(newAppComplete, true , None ,
66- SparkListenerApplicationStart (" new-app-complete" , None , 1L , " test" , None ),
68+ SparkListenerApplicationStart (newAppComplete.getName(), Some (" new-app-complete" ), 1L , " test" ,
69+ None ),
6770 SparkListenerApplicationEnd (5L )
6871 )
6972
7073 // Write a new-style application log.
7174 val newAppCompressedComplete = newLogFile(" new1compressed" , None , inProgress = false ,
7275 Some (" lzf" ))
7376 writeFile(newAppCompressedComplete, true , None ,
74- SparkListenerApplicationStart (" new-app-compressed-complete" , None , 1L , " test" , None ),
77+ SparkListenerApplicationStart (newAppCompressedComplete.getName(), Some (" new-complete-lzf" ),
78+ 1L , " test" , None ),
7579 SparkListenerApplicationEnd (4L ))
7680
7781 // Write an unfinished app, new-style.
7882 val newAppIncomplete = newLogFile(" new2" , None , inProgress = true )
7983 writeFile(newAppIncomplete, true , None ,
80- SparkListenerApplicationStart (" new-app-incomplete" , None , 1L , " test" , None )
84+ SparkListenerApplicationStart (newAppIncomplete.getName(), Some (" new-incomplete" ), 1L , " test" ,
85+ None )
8186 )
8287
8388 // Write an old-style application log.
84- val oldAppComplete = new File (testDir, " old1" )
85- oldAppComplete.mkdir()
86- createEmptyFile(new File (oldAppComplete, provider.SPARK_VERSION_PREFIX + " 1.0" ))
87- writeFile(new File (oldAppComplete, provider.LOG_PREFIX + " 1" ), false , None ,
88- SparkListenerApplicationStart (" old-app-complete" , None , 2L , " test" , None ),
89+ val oldAppComplete = writeOldLog(" old1" , " 1.0" , None , true ,
90+ SparkListenerApplicationStart (" old1" , Some (" old-app-complete" ), 2L , " test" , None ),
8991 SparkListenerApplicationEnd (3L )
9092 )
91- createEmptyFile(new File (oldAppComplete, provider.APPLICATION_COMPLETE ))
9293
9394 // Check for logs so that we force the older unfinished app to be loaded, to make
9495 // sure unfinished apps are also sorted correctly.
9596 provider.checkForLogs()
9697
9798 // Write an unfinished app, old-style.
98- val oldAppIncomplete = new File (testDir, " old2" )
99- oldAppIncomplete.mkdir()
100- createEmptyFile(new File (oldAppIncomplete, provider.SPARK_VERSION_PREFIX + " 1.0" ))
101- writeFile(new File (oldAppIncomplete, provider.LOG_PREFIX + " 1" ), false , None ,
102- SparkListenerApplicationStart (" old-app-incomplete" , None , 2L , " test" , None )
99+ val oldAppIncomplete = writeOldLog(" old2" , " 1.0" , None , false ,
100+ SparkListenerApplicationStart (" old2" , None , 2L , " test" , None )
103101 )
104102
105103 // Force a reload of data from the log directory, and check that both logs are loaded.
@@ -120,16 +118,15 @@ class FsHistoryProviderSuite extends SparkFunSuite with BeforeAndAfter with Matc
120118 List (ApplicationAttemptInfo (None , start, end, lastMod, user, completed)))
121119 }
122120
123- list(0 ) should be (makeAppInfo(newAppComplete.getName(), " new-app-complete" , 1L , 5L ,
121+ list(0 ) should be (makeAppInfo(" new-app-complete" , newAppComplete.getName() , 1L , 5L ,
124122 newAppComplete.lastModified(), " test" , true ))
125- list(1 ) should be (makeAppInfo(newAppCompressedComplete.getName(),
126- " new-app-compressed-complete" , 1L , 4L , newAppCompressedComplete.lastModified(), " test" ,
127- true ))
128- list(2 ) should be (makeAppInfo(oldAppComplete.getName(), " old-app-complete" , 2L , 3L ,
123+ list(1 ) should be (makeAppInfo(" new-complete-lzf" , newAppCompressedComplete.getName(),
124+ 1L , 4L , newAppCompressedComplete.lastModified(), " test" , true ))
125+ list(2 ) should be (makeAppInfo(" old-app-complete" , oldAppComplete.getName(), 2L , 3L ,
129126 oldAppComplete.lastModified(), " test" , true ))
130- list(3 ) should be (makeAppInfo(oldAppIncomplete.getName(), " old-app-incomplete " , 2L , - 1L ,
131- oldAppIncomplete.lastModified(), " test" , false ))
132- list(4 ) should be (makeAppInfo(newAppIncomplete.getName(), " new-app- incomplete" , 1L , - 1L ,
127+ list(3 ) should be (makeAppInfo(oldAppIncomplete.getName(), oldAppIncomplete.getName() , 2L ,
128+ - 1L , oldAppIncomplete.lastModified(), " test" , false ))
129+ list(4 ) should be (makeAppInfo(" new-incomplete" , newAppIncomplete.getName() , 1L , - 1L ,
133130 newAppIncomplete.lastModified(), " test" , false ))
134131
135132 // Make sure the UI can be rendered.
@@ -151,12 +148,12 @@ class FsHistoryProviderSuite extends SparkFunSuite with BeforeAndAfter with Matc
151148 val codec = if (valid) CompressionCodec .createCodec(new SparkConf (), codecName) else null
152149 val logDir = new File (testDir, codecName)
153150 logDir.mkdir()
154- createEmptyFile(new File (logDir, provider. SPARK_VERSION_PREFIX + " 1.0" ))
155- writeFile(new File (logDir, provider. LOG_PREFIX + " 1" ), false , Option (codec),
151+ createEmptyFile(new File (logDir, SPARK_VERSION_PREFIX + " 1.0" ))
152+ writeFile(new File (logDir, LOG_PREFIX + " 1" ), false , Option (codec),
156153 SparkListenerApplicationStart (" app2" , None , 2L , " test" , None ),
157154 SparkListenerApplicationEnd (3L )
158155 )
159- createEmptyFile(new File (logDir, provider. COMPRESSION_CODEC_PREFIX + codecName))
156+ createEmptyFile(new File (logDir, COMPRESSION_CODEC_PREFIX + codecName))
160157
161158 val logPath = new Path (logDir.getAbsolutePath())
162159 try {
@@ -176,12 +173,12 @@ class FsHistoryProviderSuite extends SparkFunSuite with BeforeAndAfter with Matc
176173 test(" SPARK-3697: ignore directories that cannot be read." ) {
177174 val logFile1 = newLogFile(" new1" , None , inProgress = false )
178175 writeFile(logFile1, true , None ,
179- SparkListenerApplicationStart (" app1-1" , None , 1L , " test" , None ),
176+ SparkListenerApplicationStart (" app1-1" , Some ( " app1-1 " ) , 1L , " test" , None ),
180177 SparkListenerApplicationEnd (2L )
181178 )
182179 val logFile2 = newLogFile(" new2" , None , inProgress = false )
183180 writeFile(logFile2, true , None ,
184- SparkListenerApplicationStart (" app1-2" , None , 1L , " test" , None ),
181+ SparkListenerApplicationStart (" app1-2" , Some ( " app1-2 " ) , 1L , " test" , None ),
185182 SparkListenerApplicationEnd (2L )
186183 )
187184 logFile2.setReadable(false , false )
@@ -214,6 +211,18 @@ class FsHistoryProviderSuite extends SparkFunSuite with BeforeAndAfter with Matc
214211 }
215212 }
216213
214+ test(" Parse logs that application is not started" ) {
215+ val provider = new FsHistoryProvider ((createTestConf()))
216+
217+ val logFile1 = newLogFile(" app1" , None , inProgress = true )
218+ writeFile(logFile1, true , None ,
219+ SparkListenerLogStart (" 1.4" )
220+ )
221+ updateAndCheck(provider) { list =>
222+ list.size should be (0 )
223+ }
224+ }
225+
217226 test(" SPARK-5582: empty log directory" ) {
218227 val provider = new FsHistoryProvider (createTestConf())
219228
@@ -335,6 +344,33 @@ class FsHistoryProviderSuite extends SparkFunSuite with BeforeAndAfter with Matc
335344 assert(! log2.exists())
336345 }
337346
347+ test(" SPARK-8372: new logs with no app ID are ignored" ) {
348+ val provider = new FsHistoryProvider (createTestConf())
349+
350+ // Write a new log file without an app id, to make sure it's ignored.
351+ val logFile1 = newLogFile(" app1" , None , inProgress = true )
352+ writeFile(logFile1, true , None ,
353+ SparkListenerLogStart (" 1.4" )
354+ )
355+
356+ // Write a 1.2 log file with no start event (= no app id), it should be ignored.
357+ writeOldLog(" v12Log" , " 1.2" , None , false )
358+
359+ // Write 1.0 and 1.1 logs, which don't have app ids.
360+ writeOldLog(" v11Log" , " 1.1" , None , true ,
361+ SparkListenerApplicationStart (" v11Log" , None , 2L , " test" , None ),
362+ SparkListenerApplicationEnd (3L ))
363+ writeOldLog(" v10Log" , " 1.0" , None , true ,
364+ SparkListenerApplicationStart (" v10Log" , None , 2L , " test" , None ),
365+ SparkListenerApplicationEnd (4L ))
366+
367+ updateAndCheck(provider) { list =>
368+ list.size should be (2 )
369+ list(0 ).id should be (" v10Log" )
370+ list(1 ).id should be (" v11Log" )
371+ }
372+ }
373+
338374 /**
339375 * Asks the provider to check for logs and calls a function to perform checks on the updated
340376 * app list. Example:
@@ -374,4 +410,23 @@ class FsHistoryProviderSuite extends SparkFunSuite with BeforeAndAfter with Matc
374410 new SparkConf ().set(" spark.history.fs.logDirectory" , testDir.getAbsolutePath())
375411 }
376412
413+ private def writeOldLog (
414+ fname : String ,
415+ sparkVersion : String ,
416+ codec : Option [CompressionCodec ],
417+ completed : Boolean ,
418+ events : SparkListenerEvent * ): File = {
419+ val log = new File (testDir, fname)
420+ log.mkdir()
421+
422+ val oldEventLog = new File (log, LOG_PREFIX + " 1" )
423+ createEmptyFile(new File (log, SPARK_VERSION_PREFIX + sparkVersion))
424+ writeFile(new File (log, LOG_PREFIX + " 1" ), false , codec, events : _* )
425+ if (completed) {
426+ createEmptyFile(new File (log, APPLICATION_COMPLETE ))
427+ }
428+
429+ log
430+ }
431+
377432}
0 commit comments