@@ -22,16 +22,19 @@ import org.apache.spark.sql.test.SharedSQLContext
2222class FileBasedDataSourceSuite extends QueryTest with SharedSQLContext {
2323 import testImplicits ._
2424
25- Seq (" orc" , " parquet" , " csv" , " json" , " text" ).foreach { format =>
25+ private val allFileBasedDataSources = Seq (" orc" , " parquet" , " csv" , " json" , " text" )
26+
27+ allFileBasedDataSources.foreach { format =>
2628 test(s " Writing empty datasets should not fail - $format" ) {
2729 withTempPath { dir =>
2830 Seq (" str" ).toDS().limit(0 ).write.format(format).save(dir.getCanonicalPath)
2931 }
3032 }
3133 }
3234
33- Seq (" orc" , " parquet" , " csv" , " json" ).foreach { format =>
34- test(s " SPARK-23072 Write and read back unicode schema - $format" ) {
35+ // `TEXT` data source always has a single column whose name is `value`.
36+ allFileBasedDataSources.filterNot(_ == " text" ).foreach { format =>
37+ test(s " SPARK-23072 Write and read back unicode column names - $format" ) {
3538 withTempPath { path =>
3639 val dir = path.getCanonicalPath
3740
@@ -42,12 +45,14 @@ class FileBasedDataSourceSuite extends QueryTest with SharedSQLContext {
4245 df.write.format(format).option(" header" , " true" ).save(dir)
4346 val answerDf = spark.read.format(format).option(" header" , " true" ).load(dir)
4447
45- assert(df.schema === answerDf.schema)
48+ assert(df.schema.sameType( answerDf.schema) )
4649 checkAnswer(df, answerDf)
4750 }
4851 }
4952 }
5053
54+ // Only ORC/Parquet support this. `CSV` and `JSON` returns an empty schema.
55+ // `TEXT` data source always has a single column whose name is `value`.
5156 Seq (" orc" , " parquet" ).foreach { format =>
5257 test(s " SPARK-15474 Write and read back non-emtpy schema with empty dataframe - $format" ) {
5358 withTempPath { file =>
@@ -62,7 +67,7 @@ class FileBasedDataSourceSuite extends QueryTest with SharedSQLContext {
6267 }
6368 }
6469
65- Seq ( " orc " , " parquet " , " csv " , " json " , " text " ) .foreach { format =>
70+ allFileBasedDataSources .foreach { format =>
6671 test(s " SPARK-22146 read files containing special characters using $format" ) {
6772 val nameWithSpecialChars = s " sp&cial%chars "
6873 withTempDir { dir =>
0 commit comments