@@ -38,11 +38,15 @@ import org.apache.spark.internal.io.HadoopMapReduceCommitProtocol
3838import org .apache .spark .scheduler .{SparkListener , SparkListenerJobStart }
3939import org .apache .spark .sql ._
4040import org .apache .spark .sql .catalyst .TableIdentifier
41+ import org .apache .spark .sql .catalyst .plans .logical .{AppendData , LogicalPlan , OverwriteByExpression }
42+ import org .apache .spark .sql .execution .QueryExecution
4143import org .apache .spark .sql .execution .datasources .DataSourceUtils
44+ import org .apache .spark .sql .execution .datasources .noop .NoopDataSource
4245import org .apache .spark .sql .execution .datasources .parquet .SpecificParquetRecordReaderBase
4346import org .apache .spark .sql .internal .SQLConf
4447import org .apache .spark .sql .sources ._
4548import org .apache .spark .sql .types ._
49+ import org .apache .spark .sql .util .QueryExecutionListener
4650import org .apache .spark .util .Utils
4751
4852
@@ -239,15 +243,75 @@ class DataFrameReaderWriterSuite extends QueryTest with SharedSQLContext with Be
239243 }
240244
241245 test(" save mode" ) {
242- val df = spark.read
246+ spark.range( 10 ).write
243247 .format(" org.apache.spark.sql.test" )
244- .load()
248+ .mode(SaveMode .ErrorIfExists )
249+ .save()
250+ assert(LastOptions .saveMode === SaveMode .ErrorIfExists )
245251
246- df.write
252+ spark.range(10 ).write
253+ .format(" org.apache.spark.sql.test" )
254+ .mode(SaveMode .Append )
255+ .save()
256+ assert(LastOptions .saveMode === SaveMode .Append )
257+
258+ // By default the save mode is `ErrorIfExists` for data source v1.
259+ spark.range(10 ).write
247260 .format(" org.apache.spark.sql.test" )
248- .mode(SaveMode .ErrorIfExists )
249261 .save()
250262 assert(LastOptions .saveMode === SaveMode .ErrorIfExists )
263+
264+ spark.range(10 ).write
265+ .format(" org.apache.spark.sql.test" )
266+ .mode(" default" )
267+ .save()
268+ assert(LastOptions .saveMode === SaveMode .ErrorIfExists )
269+ }
270+
271+ test(" save mode for data source v2" ) {
272+ var plan : LogicalPlan = null
273+ val listener = new QueryExecutionListener {
274+ override def onSuccess (funcName : String , qe : QueryExecution , durationNs : Long ): Unit = {
275+ plan = qe.analyzed
276+
277+ }
278+ override def onFailure (funcName : String , qe : QueryExecution , exception : Exception ): Unit = {}
279+ }
280+
281+ spark.listenerManager.register(listener)
282+ try {
283+ // append mode creates `AppendData`
284+ spark.range(10 ).write
285+ .format(classOf [NoopDataSource ].getName)
286+ .mode(SaveMode .Append )
287+ .save()
288+ sparkContext.listenerBus.waitUntilEmpty(1000 )
289+ assert(plan.isInstanceOf [AppendData ])
290+
291+ // overwrite mode creates `OverwriteByExpression`
292+ spark.range(10 ).write
293+ .format(classOf [NoopDataSource ].getName)
294+ .mode(SaveMode .Overwrite )
295+ .save()
296+ sparkContext.listenerBus.waitUntilEmpty(1000 )
297+ assert(plan.isInstanceOf [OverwriteByExpression ])
298+
299+ // By default the save mode is `ErrorIfExists` for data source v2.
300+ spark.range(10 ).write
301+ .format(classOf [NoopDataSource ].getName)
302+ .save()
303+ sparkContext.listenerBus.waitUntilEmpty(1000 )
304+ assert(plan.isInstanceOf [AppendData ])
305+
306+ spark.range(10 ).write
307+ .format(classOf [NoopDataSource ].getName)
308+ .mode(" default" )
309+ .save()
310+ sparkContext.listenerBus.waitUntilEmpty(1000 )
311+ assert(plan.isInstanceOf [AppendData ])
312+ } finally {
313+ spark.listenerManager.unregister(listener)
314+ }
251315 }
252316
253317 test(" test path option in load" ) {
0 commit comments