Skip to content

Commit 3db1539

Browse files
committed
save does not take overwrite.
1 parent 1c98881 commit 3db1539

File tree

5 files changed

+12
-37
lines changed

5 files changed

+12
-37
lines changed

sql/core/src/main/scala/org/apache/spark/sql/DataFrame.scala

Lines changed: 0 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -519,20 +519,15 @@ trait DataFrame extends DataFrameSpecificApi with RDDApi[Row] {
519519
@Experimental
520520
override def save(path: String): Unit
521521

522-
@Experimental
523-
override def save(path: String, overwrite: Boolean): Unit
524-
525522
@Experimental
526523
override def save(
527524
dataSourceName: String,
528-
overwrite: Boolean,
529525
option: (String, String),
530526
options: (String, String)*): Unit
531527

532528
@Experimental
533529
override def save(
534530
dataSourceName: String,
535-
overwrite: Boolean,
536531
options: java.util.Map[String, String]): Unit
537532

538533
/**

sql/core/src/main/scala/org/apache/spark/sql/DataFrameImpl.scala

Lines changed: 2 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -343,28 +343,22 @@ private[sql] class DataFrameImpl protected[sql](
343343
}
344344

345345
override def save(path: String): Unit = {
346-
save(path, false)
347-
}
348-
349-
override def save(path: String, overwrite: Boolean): Unit = {
350346
val dataSourceName = sqlContext.conf.defaultDataSourceName
351-
save(dataSourceName, overwrite, ("path" -> path))
347+
save(dataSourceName, ("path" -> path))
352348
}
353349

354350
override def save(
355351
dataSourceName: String,
356-
overwrite: Boolean,
357352
option: (String, String),
358353
options: (String, String)*): Unit = {
359354
ResolvedDataSource(sqlContext, dataSourceName, (option +: options).toMap, this)
360355
}
361356

362357
override def save(
363358
dataSourceName: String,
364-
overwrite: Boolean,
365359
options: java.util.Map[String, String]): Unit = {
366360
val opts = options.toSeq
367-
save(dataSourceName, overwrite, opts.head, opts.tail:_*)
361+
save(dataSourceName, opts.head, opts.tail:_*)
368362
}
369363

370364
override def insertInto(tableName: String, overwrite: Boolean): Unit = {

sql/core/src/main/scala/org/apache/spark/sql/IncomputableColumn.scala

Lines changed: 0 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -165,17 +165,13 @@ private[sql] class IncomputableColumn(protected[sql] val expr: Expression) exten
165165

166166
override def save(path: String): Unit = err()
167167

168-
override def save(path: String, overwrite: Boolean): Unit = err()
169-
170168
override def save(
171169
dataSourceName: String,
172-
overwrite: Boolean,
173170
option: (String, String),
174171
options: (String, String)*): Unit = err()
175172

176173
override def save(
177174
dataSourceName: String,
178-
overwrite: Boolean,
179175
options: java.util.Map[String, String]): Unit = err()
180176

181177
override def insertInto(tableName: String, overwrite: Boolean): Unit = err()

sql/core/src/main/scala/org/apache/spark/sql/api.scala

Lines changed: 0 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -186,20 +186,15 @@ private[sql] trait DataFrameSpecificApi {
186186
@Experimental
187187
def save(path: String): Unit
188188

189-
@Experimental
190-
def save(path: String, overwrite: Boolean): Unit
191-
192189
@Experimental
193190
def save(
194191
dataSourceName: String,
195-
overwrite: Boolean,
196192
option: (String, String),
197193
options: (String, String)*): Unit
198194

199195
@Experimental
200196
def save(
201197
dataSourceName: String,
202-
overwrite: Boolean,
203198
options: java.util.Map[String, String]): Unit
204199

205200
@Experimental

sql/core/src/test/scala/org/apache/spark/sql/sources/SaveLoadSuite.scala

Lines changed: 10 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -60,34 +60,29 @@ class SaveLoadSuite extends DataSourceTest with BeforeAndAfterAll {
6060
}
6161

6262
test("save with overwrite and load") {
63-
df.save(path.toString, true)
64-
63+
df.save(path.toString)
6564
checkLoad
6665
}
6766

6867
test("save with data source and options, and load") {
69-
df.save("org.apache.spark.sql.json", true, ("path", path.toString))
70-
68+
df.save("org.apache.spark.sql.json", ("path", path.toString))
7169
checkLoad
7270
}
7371

74-
test("save and save again without overwrite") {
75-
df.save(path.toString, true)
72+
test("save and save again") {
73+
df.save(path.toString)
7674

77-
val exception = intercept[RuntimeException] {
75+
val message = intercept[RuntimeException] {
7876
df.save(path.toString)
79-
}
77+
}.getMessage
8078

8179
assert(
82-
exception.getMessage.contains("JSON table only support INSERT OVERWRITE for now."),
83-
"JSON table should only support INSERT OVERWRITE for now.")
84-
}
80+
message.contains("already exists"),
81+
"We should complain that the path already exists.")
8582

86-
test("save and save again with overwrite") {
87-
df.save(path.toString, true)
88-
df.save(path.toString, true)
83+
if (path.exists()) Utils.deleteRecursively(path)
8984

85+
df.save(path.toString)
9086
checkLoad
9187
}
92-
9388
}

0 commit comments

Comments
 (0)