|
17 | 17 |
|
18 | 18 | package org.apache.spark.sql |
19 | 19 |
|
| 20 | +import java.io.FileNotFoundException |
| 21 | + |
20 | 22 | import org.apache.hadoop.fs.Path |
21 | 23 |
|
22 | 24 | import org.apache.spark.SparkException |
@@ -102,17 +104,27 @@ class FileBasedDataSourceSuite extends QueryTest with SharedSQLContext { |
102 | 104 | def testIgnoreMissingFiles(): Unit = { |
103 | 105 | withTempDir { dir => |
104 | 106 | val basePath = dir.getCanonicalPath |
| 107 | + |
105 | 108 | Seq("0").toDF("a").write.format(format).save(new Path(basePath, "first").toString) |
106 | 109 | Seq("1").toDF("a").write.format(format).save(new Path(basePath, "second").toString) |
| 110 | + |
107 | 111 | val thirdPath = new Path(basePath, "third") |
| 112 | + val fs = thirdPath.getFileSystem(spark.sparkContext.hadoopConfiguration) |
108 | 113 | Seq("2").toDF("a").write.format(format).save(thirdPath.toString) |
| 114 | + val files = fs.listStatus(thirdPath).filter(_.isFile).map(_.getPath) |
| 115 | + |
109 | 116 | val df = spark.read.format(format).load( |
110 | 117 | new Path(basePath, "first").toString, |
111 | 118 | new Path(basePath, "second").toString, |
112 | 119 | new Path(basePath, "third").toString) |
113 | 120 |
|
114 | | - val fs = thirdPath.getFileSystem(spark.sparkContext.hadoopConfiguration) |
| 121 | + // Make sure all data files are deleted and can't be opened. |
| 122 | + files.foreach(f => fs.delete(f, false)) |
115 | 123 | assert(fs.delete(thirdPath, true)) |
| 124 | + for (f <- files) { |
| 125 | + intercept[FileNotFoundException](fs.open(f)) |
| 126 | + } |
| 127 | + |
116 | 128 | checkAnswer(df, Seq(Row("0"), Row("1"))) |
117 | 129 | } |
118 | 130 | } |
|
0 commit comments