Skip to content

Commit afb070c

Browse files
committed
fix
1 parent 58443e2 commit afb070c

File tree

1 file changed

+27
-16
lines changed

1 file changed

+27
-16
lines changed

sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/DateTimeFormatterHelper.scala

Lines changed: 27 additions & 16 deletions
Original file line numberDiff line numberDiff line change
@@ -20,7 +20,7 @@ package org.apache.spark.sql.catalyst.util
2020
import java.time._
2121
import java.time.chrono.IsoChronology
2222
import java.time.format.{DateTimeFormatter, DateTimeFormatterBuilder, ResolverStyle}
23-
import java.time.temporal.{ChronoField, TemporalAccessor}
23+
import java.time.temporal.{ChronoField, TemporalAccessor, TemporalQueries}
2424
import java.util.Locale
2525

2626
import com.google.common.cache.CacheBuilder
@@ -40,6 +40,10 @@ trait DateTimeFormatterHelper {
4040
}
4141

4242
protected def toLocalDate(accessor: TemporalAccessor, allowMissingYear: Boolean): LocalDate = {
43+
val localDate = accessor.query(TemporalQueries.localDate())
44+
// If all the date fields are specified, return the local date directly.
45+
if (localDate != null) return localDate
46+
4347
val year = if (accessor.isSupported(ChronoField.YEAR)) {
4448
accessor.get(ChronoField.YEAR)
4549
} else if (allowMissingYear) {
@@ -56,25 +60,32 @@ trait DateTimeFormatterHelper {
5660
LocalDate.of(year, month, day)
5761
}
5862

63+
private def toLocalTime(accessor: TemporalAccessor): LocalTime = {
64+
val localTime = accessor.query(TemporalQueries.localTime())
65+
// If all the time fields are specified, return the local time directly.
66+
if (localTime != null) return localTime
67+
val hour = if (accessor.isSupported(ChronoField.HOUR_OF_DAY)) {
68+
accessor.get(ChronoField.HOUR_OF_DAY)
69+
} else if (accessor.isSupported(ChronoField.HOUR_OF_AMPM)) {
70+
// When we reach here, it means am/pm is not specified. Here we assume it's am.
71+
accessor.get(ChronoField.HOUR_OF_AMPM)
72+
} else {
73+
0
74+
}
75+
val minute = getOrDefault(accessor, ChronoField.MINUTE_OF_HOUR, 0)
76+
val second = getOrDefault(accessor, ChronoField.SECOND_OF_MINUTE, 0)
77+
val nanoSecond = getOrDefault(accessor, ChronoField.NANO_OF_SECOND, 0)
78+
LocalTime.of(hour, minute, second, nanoSecond)
79+
}
80+
5981
// Converts the parsed temporal object to ZonedDateTime. It sets time components to zeros
6082
// if they does not exist in the parsed object.
6183
protected def toZonedDateTime(
6284
temporalAccessor: TemporalAccessor,
6385
zoneId: ZoneId,
6486
allowMissingYear: Boolean): ZonedDateTime = {
65-
val hour = if (temporalAccessor.isSupported(ChronoField.HOUR_OF_DAY)) {
66-
temporalAccessor.get(ChronoField.HOUR_OF_DAY)
67-
} else if (temporalAccessor.isSupported(ChronoField.HOUR_OF_AMPM)) {
68-
// When we reach here, it means am/pm is not specified. Here we assume it's am.
69-
temporalAccessor.get(ChronoField.HOUR_OF_AMPM)
70-
} else {
71-
0
72-
}
73-
val minute = getOrDefault(temporalAccessor, ChronoField.MINUTE_OF_HOUR, 0)
74-
val second = getOrDefault(temporalAccessor, ChronoField.SECOND_OF_MINUTE, 0)
75-
val nanoSecond = getOrDefault(temporalAccessor, ChronoField.NANO_OF_SECOND, 0)
76-
val localTime = LocalTime.of(hour, minute, second, nanoSecond)
7787
val localDate = toLocalDate(temporalAccessor, allowMissingYear)
88+
val localTime = toLocalTime(temporalAccessor)
7889
ZonedDateTime.of(localDate, localTime, zoneId)
7990
}
8091

@@ -108,12 +119,12 @@ trait DateTimeFormatterHelper {
108119
case e: DateTimeException if SQLConf.get.legacyTimeParserPolicy == EXCEPTION =>
109120
try {
110121
legacyParseFunc(s)
111-
throw new SparkUpgradeException("3.0", s"Fail to parse '$s' in the new parser. You can " +
112-
s"set ${SQLConf.LEGACY_TIME_PARSER_POLICY.key} to LEGACY to restore the behavior " +
113-
s"before Spark 3.0, or set to CORRECTED and treat it as an invalid datetime string.", e)
114122
} catch {
115123
case _: Throwable => throw e
116124
}
125+
throw new SparkUpgradeException("3.0", s"Fail to parse '$s' in the new parser. You can " +
126+
s"set ${SQLConf.LEGACY_TIME_PARSER_POLICY.key} to LEGACY to restore the behavior " +
127+
s"before Spark 3.0, or set to CORRECTED and treat it as an invalid datetime string.", e)
117128
}
118129
}
119130

0 commit comments

Comments
 (0)