Skip to content

Commit d107d50

Browse files
committed
Remove the duplicated test
1 parent a96e510 commit d107d50

File tree

2 files changed

+17
-36
lines changed

2 files changed

+17
-36
lines changed

sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/parquet/ParquetIOSuite.scala

Lines changed: 0 additions & 20 deletions
Original file line numberDiff line numberDiff line change
@@ -744,26 +744,6 @@ class ParquetIOSuite extends QueryTest with ParquetTest with SharedSQLContext {
744744
}
745745
}
746746
}
747-
748-
test("SPARK-13543: Support for specifying compression codec for Parquet via option()") {
749-
withSQLConf(SQLConf.PARQUET_COMPRESSION.key -> "UNCOMPRESSED") {
750-
withTempPath { dir =>
751-
val path = s"${dir.getCanonicalPath}/table1"
752-
val df = (1 to 5).map(i => (i, (i % 2).toString)).toDF("a", "b")
753-
df.write
754-
.option("compression", "GzIP")
755-
.parquet(path)
756-
757-
val compressedFiles = new File(path).listFiles()
758-
assert(compressedFiles.exists(_.getName.endsWith(".gz.parquet")))
759-
760-
val copyDf = sqlContext
761-
.read
762-
.parquet(path)
763-
checkAnswer(df, copyDf)
764-
}
765-
}
766-
}
767747
}
768748

769749
class JobCommitFailureParquetOutputCommitter(outputPath: Path, context: TaskAttemptContext)

sql/hive/src/test/scala/org/apache/spark/sql/sources/ParquetHadoopFsRelationSuite.scala

Lines changed: 17 additions & 16 deletions
Original file line numberDiff line numberDiff line change
@@ -209,22 +209,23 @@ class ParquetHadoopFsRelationSuite extends HadoopFsRelationTest {
209209
}
210210
}
211211

212-
test("SPARK-13543: Support for specifying compression codec for ORC via option()") {
213-
withTempPath { dir =>
214-
val path = s"${dir.getCanonicalPath}/table1"
215-
val df = (1 to 5).map(i => (i, (i % 2).toString)).toDF("a", "b")
216-
df.write
217-
.option("compression", "sNaPpy")
218-
.parquet(path)
219-
220-
val compressedFiles = new File(path).listFiles()
221-
assert(compressedFiles.exists(_.getName.endsWith(".snappy")))
222-
223-
val copyDf = sqlContext
224-
.read
225-
.parquet(path)
226-
227-
checkAnswer(df, copyDf)
212+
test("SPARK-13543: Support for specifying compression codec for Parquet via option()") {
213+
withSQLConf(SQLConf.PARQUET_COMPRESSION.key -> "UNCOMPRESSED") {
214+
withTempPath { dir =>
215+
val path = s"${dir.getCanonicalPath}/table1"
216+
val df = (1 to 5).map(i => (i, (i % 2).toString)).toDF("a", "b")
217+
df.write
218+
.option("compression", "GzIP")
219+
.parquet(path)
220+
221+
val compressedFiles = new File(path).listFiles()
222+
assert(compressedFiles.exists(_.getName.endsWith(".gz.parquet")))
223+
224+
val copyDf = sqlContext
225+
.read
226+
.parquet(path)
227+
checkAnswer(df, copyDf)
228+
}
228229
}
229230
}
230231
}

0 commit comments

Comments
 (0)