Skip to content

Commit 2e18093

Browse files
committed
remove legacy config
1 parent ea49eed commit 2e18093

File tree

2 files changed

+3
-15
lines changed

2 files changed

+3
-15
lines changed

sql/catalyst/src/main/scala/org/apache/spark/sql/internal/SQLConf.scala

Lines changed: 0 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -1711,15 +1711,6 @@ object SQLConf {
17111711
.booleanConf
17121712
.createWithDefault(false)
17131713

1714-
val LEGACY_PASS_PARTITION_BY_AS_OPTIONS =
1715-
buildConf("spark.sql.legacy.sources.write.passPartitionByAsOptions")
1716-
.internal()
1717-
.doc("Whether to pass the partitionBy columns as options in DataFrameWriter. " +
1718-
"Data source V1 now silently drops partitionBy columns for non-file-format sources; " +
1719-
"turning the flag on provides a way for these sources to see these partitionBy columns.")
1720-
.booleanConf
1721-
.createWithDefault(true)
1722-
17231714
val NAME_NON_STRUCT_GROUPING_KEY_AS_VALUE =
17241715
buildConf("spark.sql.legacy.dataset.nameNonStructGroupingKeyAsValue")
17251716
.internal()

sql/core/src/main/scala/org/apache/spark/sql/DataFrameWriter.scala

Lines changed: 3 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -316,12 +316,9 @@ final class DataFrameWriter[T] private[sql](ds: Dataset[T]) {
316316
}
317317

318318
private def saveToV1Source(): Unit = {
319-
if (SparkSession.active.sessionState.conf.getConf(
320-
SQLConf.LEGACY_PASS_PARTITION_BY_AS_OPTIONS)) {
321-
partitioningColumns.foreach { columns =>
322-
extraOptions += (DataSourceUtils.PARTITIONING_COLUMNS_KEY ->
323-
DataSourceUtils.encodePartitioningColumns(columns))
324-
}
319+
partitioningColumns.foreach { columns =>
320+
extraOptions += (DataSourceUtils.PARTITIONING_COLUMNS_KEY ->
321+
DataSourceUtils.encodePartitioningColumns(columns))
325322
}
326323

327324
// Code path for data source v1.

0 commit comments

Comments
 (0)