Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -119,6 +119,13 @@ object PartitioningUtils {
Map.empty[String, DataType]
}

// SPARK-26990: use user specified field names if case insensitive.
val userSpecifiedNames = if (userSpecifiedSchema.isDefined && !caseSensitive) {
CaseInsensitiveMap(userSpecifiedSchema.get.fields.map(f => f.name -> f.name).toMap)
} else {
Map.empty[String, String]
}

// First, we need to parse every partition's path and see if we can find partition values.
val (partitionValues, optDiscoveredBasePaths) = paths.map { path =>
parsePartition(path, typeInference, basePaths, userSpecifiedDataTypes, timeZone)
Expand Down Expand Up @@ -163,7 +170,9 @@ object PartitioningUtils {
columnNames.zip(literals).map { case (name, Literal(_, dataType)) =>
// We always assume partition columns are nullable since we've no idea whether null values
// will be appended in the future.
StructField(name, userSpecifiedDataTypes.getOrElse(name, dataType), nullable = true)
val resultName = userSpecifiedNames.getOrElse(name, name)
val resultDataType = userSpecifiedDataTypes.getOrElse(name, dataType)
StructField(resultName, resultDataType, nullable = true)
}
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -65,6 +65,21 @@ class FileIndexSuite extends SharedSQLContext {
}
}

test("SPARK-26990: use user specified field names if possible") {
withTempDir { dir =>
val partitionDirectory = new File(dir, "a=foo")
partitionDirectory.mkdir()
val file = new File(partitionDirectory, "text.txt")
stringToFile(file, "text")
val path = new Path(dir.getCanonicalPath)
val schema = StructType(Seq(StructField("A", StringType, false)))
withSQLConf(SQLConf.CASE_SENSITIVE.key -> "false") {
val fileIndex = new InMemoryFileIndex(spark, Seq(path), Map.empty, Some(schema))
assert(fileIndex.partitionSchema.length == 1 && fileIndex.partitionSchema.head.name == "A")
}
}
}

test("InMemoryFileIndex: input paths are converted to qualified paths") {
withTempDir { dir =>
val file = new File(dir, "text.txt")
Expand Down