Skip to content

Commit a3f87bf

Browse files
committed
move partitioning check
1 parent ca08d3b commit a3f87bf

File tree

2 files changed

+5
-8
lines changed

2 files changed

+5
-8
lines changed

sql/core/src/main/scala/org/apache/spark/sql/DataFrameWriter.scala

Lines changed: 4 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -254,11 +254,6 @@ final class DataFrameWriter[T] private[sql](ds: Dataset[T]) {
254254

255255
val maybeV2Provider = lookupV2Provider()
256256
if (maybeV2Provider.isDefined) {
257-
if (partitioningColumns.nonEmpty) {
258-
throw new AnalysisException(
259-
"Cannot write data to TableProvider implementation if partition columns are specified.")
260-
}
261-
262257
val provider = maybeV2Provider.get
263258
val sessionOptions = DataSourceV2Utils.extractSessionConfigs(
264259
provider, df.sparkSession.sessionState.conf)
@@ -268,6 +263,10 @@ final class DataFrameWriter[T] private[sql](ds: Dataset[T]) {
268263
import org.apache.spark.sql.execution.datasources.v2.DataSourceV2Implicits._
269264
provider.getTable(dsOptions) match {
270265
case table: SupportsWrite if table.supports(BATCH_WRITE) =>
266+
if (partitioningColumns.nonEmpty) {
267+
throw new AnalysisException("Cannot write data to TableProvider implementation " +
268+
"if partition columns are specified.")
269+
}
271270
lazy val relation = DataSourceV2Relation.create(table, dsOptions)
272271
modeForDSV2 match {
273272
case SaveMode.Append =>

sql/core/src/test/scala/org/apache/spark/sql/sources/v2/V1WriteFallbackSuite.scala

Lines changed: 1 addition & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -24,10 +24,9 @@ import scala.collection.mutable
2424

2525
import org.scalatest.BeforeAndAfter
2626

27-
import org.apache.spark.sql.{AnalysisException, DataFrame, QueryTest, Row, SaveMode, SparkSession}
27+
import org.apache.spark.sql.{DataFrame, QueryTest, Row, SaveMode, SparkSession}
2828
import org.apache.spark.sql.catalog.v2.expressions.{FieldReference, IdentityTransform, Transform}
2929
import org.apache.spark.sql.connector.InMemoryTable
30-
import org.apache.spark.sql.internal.SQLConf.{PARTITION_OVERWRITE_MODE, PartitionOverwriteMode}
3130
import org.apache.spark.sql.sources.{DataSourceRegister, Filter, InsertableRelation}
3231
import org.apache.spark.sql.sources.v2.utils.TestV2SessionCatalogBase
3332
import org.apache.spark.sql.sources.v2.writer.{SupportsOverwrite, SupportsTruncate, V1WriteBuilder, WriteBuilder}
@@ -144,7 +143,6 @@ class InMemoryTableWithV1Fallback(
144143
}
145144

146145
override def capabilities: util.Set[TableCapability] = Set(
147-
TableCapability.BATCH_WRITE,
148146
TableCapability.V1_BATCH_WRITE,
149147
TableCapability.OVERWRITE_BY_FILTER,
150148
TableCapability.TRUNCATE).asJava

0 commit comments

Comments
 (0)