@@ -277,13 +277,25 @@ class SparkContextSuite extends SparkFunSuite with LocalSparkContext {
277277 test(" Default path for file based RDDs is properly set (SPARK-12517)" ) {
278278 sc = new SparkContext (new SparkConf ().setAppName(" test" ).setMaster(" local" ))
279279
280- // Test textFile, wholeTextFiles and binaryFiles for default paths
280+ // Test filetextFile, wholeTextFiles, binaryFiles, hadoopFile and
281+ // newAPIHadoopFile for setting the default path as the RDD name
281282 val mockPath = " default/path/for/"
282- assert(sc.textFile(mockPath + " textFile" ).name == mockPath + " textFile" )
283- assert(sc.wholeTextFiles(mockPath + " wholeTextFile" ).name == mockPath + " wholeTextFile" )
284- assert(sc.binaryFiles(mockPath + " binaryFiles" ).name == mockPath + " binaryFiles" )
285- assert(sc.hadoopFile(mockPath + " hadoopFile" ).name == mockPath + " hadoopFile" )
286- assert(sc.newAPIHadoopFile(mockPath + " newAPIHadoopFile" ).name == mockPath + " newAPIHadoopFile" )
283+
284+ var targetPath = mockPath + " textFile"
285+ assert(sc.textFile(targetPath).name == targetPath " )
286+
287+ targetPath = mockPath + " wholeTextFile "
288+ assert(sc.wholeTextFile(targetPath).name == targetPath " )
289+
290+ targetPath = mockPath + " binaryFiles"
291+ assert(sc.binaryFiles(targetPath).name == targetPath " )
292+
293+ targetPath = mockPath + " hadoopFile "
294+ assert(sc.hadoopFile(targetPath).name == targetPath " )
295+
296+ targetPath = mockPath + " newAPIHadoopFile"
297+ assert(sc.newAPIHadoopFile(targetPath).name == targetPath " )
298+
287299 sc.stop()
288300 }
289301
0 commit comments