@@ -19,6 +19,7 @@ package org.apache.spark.sql.execution.datasources
1919
2020import org .apache .hadoop .conf .Configuration
2121import org .apache .hadoop .fs .{FileStatus , Path , RawLocalFileSystem }
22+
2223import org .apache .spark .sql .AnalysisException
2324import org .apache .spark .sql .test .SharedSparkSession
2425
@@ -31,11 +32,11 @@ class DataSourceSuite extends SharedSparkSession {
3132 path1.toString,
3233 path2.toString,
3334 globPath1.toString,
34- globPath2.toString,
35+ globPath2.toString
3536 ),
3637 hadoopConf,
3738 checkEmptyGlobPath = true ,
38- checkFilesExist = true ,
39+ checkFilesExist = true
3940 )
4041
4142 assert(resultPaths.toSet == allPathsInFs.toSet)
@@ -45,11 +46,11 @@ class DataSourceSuite extends SharedSparkSession {
4546 val resultPaths = DataSource .checkAndGlobPathIfNecessary(
4647 Seq (
4748 globPath1.toString,
48- globPath2.toString,
49+ globPath2.toString
4950 ),
5051 hadoopConf,
5152 checkEmptyGlobPath = true ,
52- checkFilesExist = true ,
53+ checkFilesExist = true
5354 )
5455
5556 assert(
@@ -58,7 +59,7 @@ class DataSourceSuite extends SharedSparkSession {
5859 globPath1Result1,
5960 globPath1Result2,
6061 globPath2Result1,
61- globPath2Result2,
62+ globPath2Result2
6263 )
6364 )
6465 )
@@ -68,18 +69,18 @@ class DataSourceSuite extends SharedSparkSession {
6869 val resultPaths = DataSource .checkAndGlobPathIfNecessary(
6970 Seq (
7071 path1.toString,
71- path2.toString,
72+ path2.toString
7273 ),
7374 hadoopConf,
7475 checkEmptyGlobPath = true ,
75- checkFilesExist = true ,
76+ checkFilesExist = true
7677 )
7778
7879 assert(
7980 resultPaths.equals(
8081 Seq (
8182 path1,
82- path2,
83+ path2
8384 )
8485 )
8586 )
@@ -91,11 +92,11 @@ class DataSourceSuite extends SharedSparkSession {
9192 Seq (
9293 path1.toString,
9394 path2.toString,
94- nonExistentPath.toString,
95+ nonExistentPath.toString
9596 ),
9697 hadoopConf,
9798 checkEmptyGlobPath = true ,
98- checkFilesExist = true ,
99+ checkFilesExist = true
99100 )
100101 )
101102 }
@@ -106,11 +107,11 @@ class DataSourceSuite extends SharedSparkSession {
106107 Seq (
107108 globPath1.toString,
108109 globPath2.toString,
109- nonExistentGlobPath.toString,
110+ nonExistentGlobPath.toString
110111 ),
111112 hadoopConf,
112113 checkEmptyGlobPath = true ,
113- checkFilesExist = true ,
114+ checkFilesExist = true
114115 )
115116 )
116117 }
@@ -139,20 +140,20 @@ object TestPaths {
139140 globPath1Result1,
140141 globPath1Result2,
141142 globPath2Result1,
142- globPath2Result2,
143+ globPath2Result2
143144 )
144145
145146 val mockGlobResults : Map [Path , Array [FileStatus ]] = Map (
146147 globPath1 ->
147148 Array (
148149 createMockFileStatus(globPath1Result1.toString),
149- createMockFileStatus(globPath1Result2.toString),
150+ createMockFileStatus(globPath1Result2.toString)
150151 ),
151152 globPath2 ->
152153 Array (
153154 createMockFileStatus(globPath2Result1.toString),
154- createMockFileStatus(globPath2Result2.toString),
155- ),
155+ createMockFileStatus(globPath2Result2.toString)
156+ )
156157 )
157158
158159 def createMockFileStatus (path : String ): FileStatus = {
0 commit comments