Skip to content

Commit e999f2c

Browse files
committed
Update
1 parent 0edfed4 commit e999f2c

File tree

1 file changed

+6
-3
lines changed

1 file changed

+6
-3
lines changed

sql/core/src/main/scala/org/apache/spark/sql/execution/SparkSqlParser.scala

Lines changed: 6 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1306,7 +1306,10 @@ class SparkSqlAstBuilder(conf: SQLConf) extends AstBuilder {
13061306

13071307
// Decode and input/output format.
13081308
type Format = (Seq[(String, String)], Option[String], Seq[(String, String)], Option[String])
1309-
def format(fmt: RowFormatContext, configKey: String, configValue: String): Format = fmt match {
1309+
def format(
1310+
fmt: RowFormatContext,
1311+
configKey: String,
1312+
defaultConfigValue: String): Format = fmt match {
13101313
case c: RowFormatDelimitedContext =>
13111314
// TODO we should use the visitRowFormatDelimited function here. However HiveScriptIOSchema
13121315
// expects a seq of pairs in which the old parsers' token names are used as keys.
@@ -1329,7 +1332,7 @@ class SparkSqlAstBuilder(conf: SQLConf) extends AstBuilder {
13291332

13301333
// SPARK-10310: Special cases LazySimpleSerDe
13311334
val recordHandler = if (name == "org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe") {
1332-
Try(conf.getConfString(configKey, configValue)).toOption
1335+
Option(conf.getConfString(configKey, defaultConfigValue))
13331336
} else {
13341337
None
13351338
}
@@ -1340,7 +1343,7 @@ class SparkSqlAstBuilder(conf: SQLConf) extends AstBuilder {
13401343
val name = conf.getConfString("hive.script.serde",
13411344
"org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe")
13421345
val props = Seq("field.delim" -> "\t")
1343-
val recordHandler = Try(conf.getConfString(configKey, configValue)).toOption
1346+
val recordHandler = Option(conf.getConfString(configKey, defaultConfigValue))
13441347
(Nil, Option(name), props, recordHandler)
13451348
}
13461349

0 commit comments

Comments
 (0)