Skip to content

Commit 9fe8fca

Browse files
committed
Update comments and remove the supprot for codec for JSON and TEXT
1 parent f82a2f4 commit 9fe8fca

File tree

3 files changed

+11
-23
lines changed

3 files changed

+11
-23
lines changed

sql/core/src/main/scala/org/apache/spark/sql/DataFrameWriter.scala

Lines changed: 9 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -453,11 +453,9 @@ final class DataFrameWriter private[sql](df: DataFrame) {
453453
* format("json").save(path)
454454
* }}}
455455
*
456-
* You can set the following JSON-specific options for writing JSON files:
457-
* <li>`compression` or `codec` (default `null`): compression codec to use when saving to file.
458-
* This should be the fully qualified name of a class implementing
459-
* [[org.apache.hadoop.io.compress.CompressionCodec]] or one of the known case-insensitive
460-
* shorten names(`bzip2`, `gzip`, `lz4`, and `snappy`). </li>
456+
* You can set the following JSON-specific option(s) for writing JSON files:
457+
* <li>`compression` (default `null`): compression codec to use when saving to file. This can be
458+
* one of the known case-insensitive shorten names (`bzip2`, `gzip`, `lz4`, and `snappy`). </li>
461459
*
462460
* @since 1.4.0
463461
*/
@@ -498,11 +496,9 @@ final class DataFrameWriter private[sql](df: DataFrame) {
498496
* df.write().text("/path/to/output")
499497
* }}}
500498
*
501-
* You can set the following options for writing text files:
502-
* <li>`compression` or `codec` (default `null`): compression codec to use when saving to file.
503-
* This should be the fully qualified name of a class implementing
504-
* [[org.apache.hadoop.io.compress.CompressionCodec]] or one of the known case-insensitive
505-
* shorten names(`bzip2`, `gzip`, `lz4`, and `snappy`). </li>
499+
* You can set the following option(s) for writing text files:
500+
* <li>`compression` (default `null`): compression codec to use when saving to file. This can be
501+
* one of the known case-insensitive shorten names (`bzip2`, `gzip`, `lz4`, and `snappy`). </li>
506502
*
507503
* @since 1.6.0
508504
*/
@@ -515,11 +511,9 @@ final class DataFrameWriter private[sql](df: DataFrame) {
515511
* format("csv").save(path)
516512
* }}}
517513
*
518-
* You can set the following CSV-specific options for writing CSV files:
519-
* <li>`compression` or `codec` (default `null`): compression codec to use when saving to file.
520-
* This should be the fully qualified name of a class implementing
521-
* [[org.apache.hadoop.io.compress.CompressionCodec]] or one of the known case-insensitive
522-
* shorten names(`bzip2`, `gzip`, `lz4`, and `snappy`). </li>
514+
* You can set the following CSV-specific option(s) for writing CSV files:
515+
* <li>`compression` (default `null`): compression codec to use when saving to file. This can be
516+
* one of the known case-insensitive shorten names (`bzip2`, `gzip`, `lz4`, and `snappy`). </li>
523517
*
524518
* @since 2.0.0
525519
*/

sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/json/JSONOptions.scala

Lines changed: 1 addition & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -48,10 +48,7 @@ private[sql] class JSONOptions(
4848
parameters.get("allowNonNumericNumbers").map(_.toBoolean).getOrElse(true)
4949
val allowBackslashEscapingAnyCharacter =
5050
parameters.get("allowBackslashEscapingAnyCharacter").map(_.toBoolean).getOrElse(false)
51-
val compressionCodec = {
52-
val name = parameters.get("compression").orElse(parameters.get("codec"))
53-
name.map(CompressionCodecs.getCodecClassName)
54-
}
51+
val compressionCodec = parameters.get("compression").map(CompressionCodecs.getCodecClassName)
5552

5653
/** Sets config options on a Jackson [[JsonFactory]]. */
5754
def setJacksonOptions(factory: JsonFactory): Unit = {

sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/text/DefaultSource.scala

Lines changed: 1 addition & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -115,10 +115,7 @@ private[sql] class TextRelation(
115115
/** Write path. */
116116
override def prepareJobForWrite(job: Job): OutputWriterFactory = {
117117
val conf = job.getConfiguration
118-
val compressionCodec = {
119-
val name = parameters.get("compression").orElse(parameters.get("codec"))
120-
name.map(CompressionCodecs.getCodecClassName)
121-
}
118+
val compressionCodec = parameters.get("compression").map(CompressionCodecs.getCodecClassName)
122119
compressionCodec.foreach { codec =>
123120
CompressionCodecs.setCodecConfiguration(conf, codec)
124121
}

0 commit comments

Comments
 (0)