@@ -190,38 +190,39 @@ class DataSourceV2Suite extends QueryTest with SharedSQLContext {
190190
191191 test(" simple writable data source" ) {
192192 // TODO: java implementation.
193+ val writeOnlySource = classOf [SimpleWriteOnlyDataSource ]
193194 Seq (classOf [SimpleWritableDataSource ]).foreach { cls =>
194195 withTempPath { file =>
195196 val path = file.getCanonicalPath
196197 assert(spark.read.format(cls.getName).option(" path" , path).load().collect().isEmpty)
197198
198- spark.range(10 ).select(' id as ' i , - ' id as ' j ).write.format(cls .getName)
199+ spark.range(10 ).select(' id as ' i , - ' id as ' j ).write.format(writeOnlySource .getName)
199200 .option(" path" , path).save()
200201 checkAnswer(
201202 spark.read.format(cls.getName).option(" path" , path).load(),
202203 spark.range(10 ).select(' id , - ' id ))
203204
204205 // test with different save modes
205- spark.range(10 ).select(' id as ' i , - ' id as ' j ).write.format(cls .getName)
206+ spark.range(10 ).select(' id as ' i , - ' id as ' j ).write.format(writeOnlySource .getName)
206207 .option(" path" , path).mode(" append" ).save()
207208 checkAnswer(
208209 spark.read.format(cls.getName).option(" path" , path).load(),
209210 spark.range(10 ).union(spark.range(10 )).select(' id , - ' id ))
210211
211- spark.range(5 ).select(' id as ' i , - ' id as ' j ).write.format(cls .getName)
212+ spark.range(5 ).select(' id as ' i , - ' id as ' j ).write.format(writeOnlySource .getName)
212213 .option(" path" , path).mode(" overwrite" ).save()
213214 checkAnswer(
214215 spark.read.format(cls.getName).option(" path" , path).load(),
215216 spark.range(5 ).select(' id , - ' id ))
216217
217- spark.range(5 ).select(' id as ' i , - ' id as ' j ).write.format(cls .getName)
218+ spark.range(5 ).select(' id as ' i , - ' id as ' j ).write.format(writeOnlySource .getName)
218219 .option(" path" , path).mode(" ignore" ).save()
219220 checkAnswer(
220221 spark.read.format(cls.getName).option(" path" , path).load(),
221222 spark.range(5 ).select(' id , - ' id ))
222223
223224 val e = intercept[Exception ] {
224- spark.range(5 ).select(' id as ' i , - ' id as ' j ).write.format(cls .getName)
225+ spark.range(5 ).select(' id as ' i , - ' id as ' j ).write.format(writeOnlySource .getName)
225226 .option(" path" , path).mode(" error" ).save()
226227 }
227228 assert(e.getMessage.contains(" data already exists" ))
@@ -240,7 +241,7 @@ class DataSourceV2Suite extends QueryTest with SharedSQLContext {
240241 // this input data will fail to read middle way.
241242 val input = spark.range(10 ).select(failingUdf(' id ).as(' i )).select(' i , - ' i as ' j )
242243 val e2 = intercept[SparkException ] {
243- input.write.format(cls .getName).option(" path" , path).mode(" overwrite" ).save()
244+ input.write.format(writeOnlySource .getName).option(" path" , path).mode(" overwrite" ).save()
244245 }
245246 assert(e2.getMessage.contains(" Writing job aborted" ))
246247 // make sure we don't have partial data.
@@ -640,3 +641,12 @@ object SpecificReaderFactory extends PartitionReaderFactory {
640641 }
641642 }
642643}
644+
645+ class SimpleWriteOnlyDataSource extends SimpleWritableDataSource {
646+ override def fullSchema (): StructType = {
647+ // This is a bit hacky since this source implements read support but throws
648+ // during schema retrieval. Might have to rewrite but it's done
649+ // such so for minimised changes.
650+ throw new UnsupportedOperationException (" read is not supported" )
651+ }
652+ }
0 commit comments