Skip to content

Commit 58443e2

Browse files
committed
address comments
1 parent 086eeb5 commit 58443e2

File tree

12 files changed

+156
-83
lines changed

12 files changed

+156
-83
lines changed

sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/DateFormatter.scala

Lines changed: 5 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -42,6 +42,10 @@ class Iso8601DateFormatter(
4242
@transient
4343
private lazy val formatter = getOrCreateFormatter(pattern, locale)
4444

45+
@transient
46+
private val allowMissingYear =
47+
SQLConf.get.getConf(SQLConf.LEGACY_ALLOW_MISSING_YEAR_DURING_PARSING)
48+
4549
@transient
4650
private lazy val legacyFormatter = DateFormatter.getLegacyFormatter(
4751
pattern, zoneId, locale, legacyFormat)
@@ -50,7 +54,7 @@ class Iso8601DateFormatter(
5054
val specialDate = convertSpecialDate(s.trim, zoneId)
5155
specialDate.getOrElse {
5256
try {
53-
val localDate = toLocalDate(formatter.parse(s))
57+
val localDate = toLocalDate(formatter.parse(s), allowMissingYear)
5458
localDateToDays(localDate)
5559
} catch checkDiffResult(s, legacyFormatter.parse)
5660
}

sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/DateTimeFormatterHelper.scala

Lines changed: 18 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -39,22 +39,33 @@ trait DateTimeFormatterHelper {
3939
}
4040
}
4141

42-
protected def toLocalDate(temporalAccessor: TemporalAccessor): LocalDate = {
43-
val year = getOrDefault(temporalAccessor, ChronoField.YEAR, 1970)
44-
val month = getOrDefault(temporalAccessor, ChronoField.MONTH_OF_YEAR, 1)
45-
val day = getOrDefault(temporalAccessor, ChronoField.DAY_OF_MONTH, 1)
42+
protected def toLocalDate(accessor: TemporalAccessor, allowMissingYear: Boolean): LocalDate = {
43+
val year = if (accessor.isSupported(ChronoField.YEAR)) {
44+
accessor.get(ChronoField.YEAR)
45+
} else if (allowMissingYear) {
46+
// To keep backward compatibility with Spark 2.x, we pick 1970 as the default value of year.
47+
1970
48+
} else {
49+
throw new SparkUpgradeException("3.0",
50+
"Year must be given in the date/timestamp string to be parsed. You can set " +
51+
SQLConf.LEGACY_ALLOW_MISSING_YEAR_DURING_PARSING.key + " to true, to pick 1970 as " +
52+
"the default value of year.", null)
53+
}
54+
val month = getOrDefault(accessor, ChronoField.MONTH_OF_YEAR, 1)
55+
val day = getOrDefault(accessor, ChronoField.DAY_OF_MONTH, 1)
4656
LocalDate.of(year, month, day)
4757
}
4858

4959
// Converts the parsed temporal object to ZonedDateTime. It sets time components to zeros
5060
// if they does not exist in the parsed object.
5161
protected def toZonedDateTime(
5262
temporalAccessor: TemporalAccessor,
53-
zoneId: ZoneId): ZonedDateTime = {
63+
zoneId: ZoneId,
64+
allowMissingYear: Boolean): ZonedDateTime = {
5465
val hour = if (temporalAccessor.isSupported(ChronoField.HOUR_OF_DAY)) {
5566
temporalAccessor.get(ChronoField.HOUR_OF_DAY)
5667
} else if (temporalAccessor.isSupported(ChronoField.HOUR_OF_AMPM)) {
57-
// When we reach here, is mean am/pm is not specified. Here we assume it's am.
68+
// When we reach here, it means am/pm is not specified. Here we assume it's am.
5869
temporalAccessor.get(ChronoField.HOUR_OF_AMPM)
5970
} else {
6071
0
@@ -63,7 +74,7 @@ trait DateTimeFormatterHelper {
6374
val second = getOrDefault(temporalAccessor, ChronoField.SECOND_OF_MINUTE, 0)
6475
val nanoSecond = getOrDefault(temporalAccessor, ChronoField.NANO_OF_SECOND, 0)
6576
val localTime = LocalTime.of(hour, minute, second, nanoSecond)
66-
val localDate = toLocalDate(temporalAccessor)
77+
val localDate = toLocalDate(temporalAccessor, allowMissingYear)
6778
ZonedDateTime.of(localDate, localTime, zoneId)
6879
}
6980

sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/TimestampFormatter.scala

Lines changed: 5 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -64,6 +64,10 @@ class Iso8601TimestampFormatter(
6464
protected lazy val formatter: DateTimeFormatter =
6565
getOrCreateFormatter(pattern, locale, needVarLengthSecondFraction)
6666

67+
@transient
68+
private val allowMissingYear =
69+
SQLConf.get.getConf(SQLConf.LEGACY_ALLOW_MISSING_YEAR_DURING_PARSING)
70+
6771
@transient
6872
protected lazy val legacyFormatter = TimestampFormatter.getLegacyFormatter(
6973
pattern, zoneId, locale, legacyFormat)
@@ -75,7 +79,7 @@ class Iso8601TimestampFormatter(
7579
val parsed = formatter.parse(s)
7680
val parsedZoneId = parsed.query(TemporalQueries.zone())
7781
val timeZoneId = if (parsedZoneId == null) zoneId else parsedZoneId
78-
val zonedDateTime = toZonedDateTime(parsed, timeZoneId)
82+
val zonedDateTime = toZonedDateTime(parsed, timeZoneId, allowMissingYear)
7983
val epochSeconds = zonedDateTime.toEpochSecond
8084
val microsOfSecond = zonedDateTime.get(MICRO_OF_SECOND)
8185

sql/catalyst/src/main/scala/org/apache/spark/sql/internal/SQLConf.scala

Lines changed: 9 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -2586,6 +2586,15 @@ object SQLConf {
25862586
.checkValue(_ > 0, "The timeout value must be positive")
25872587
.createWithDefault(10L)
25882588

2589+
val LEGACY_ALLOW_MISSING_YEAR_DURING_PARSING =
2590+
buildConf("spark.sql.legacy.allowMissingYearDuringParsing")
2591+
.internal()
2592+
.doc("When true, DateFormatter/TimestampFormatter allows parsing date/timestamp string " +
2593+
"without the year field, and pick 1970 as the default value.")
2594+
.version("3.0.0")
2595+
.booleanConf
2596+
.createWithDefault(false)
2597+
25892598
/**
25902599
* Holds information about keys that have been deprecated.
25912600
*

sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/csv/UnivocityParserSuite.scala

Lines changed: 7 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -325,30 +325,30 @@ class UnivocityParserSuite extends SparkFunSuite with SQLHelper {
325325
assert(parser.makeConverter("t", TimestampType).apply("2020-1-12 12:3:45") ==
326326
date(2020, 1, 12, 12, 3, 45, 0))
327327
assert(parser.makeConverter("t", DateType).apply("2020-1-12") ==
328-
days(2020, 1, 12, 0, 0, 0))
328+
days(2020, 1, 12))
329329
// The legacy format allows arbitrary length of second fraction.
330330
assert(parser.makeConverter("t", TimestampType).apply("2020-1-12 12:3:45.1") ==
331331
date(2020, 1, 12, 12, 3, 45, 100000))
332332
assert(parser.makeConverter("t", TimestampType).apply("2020-1-12 12:3:45.1234") ==
333333
date(2020, 1, 12, 12, 3, 45, 123400))
334334
// The legacy format allow date string to end with T or space, with arbitrary string
335335
assert(parser.makeConverter("t", DateType).apply("2020-1-12T") ==
336-
days(2020, 1, 12, 0, 0, 0))
336+
days(2020, 1, 12))
337337
assert(parser.makeConverter("t", DateType).apply("2020-1-12Txyz") ==
338-
days(2020, 1, 12, 0, 0, 0))
338+
days(2020, 1, 12))
339339
assert(parser.makeConverter("t", DateType).apply("2020-1-12 ") ==
340-
days(2020, 1, 12, 0, 0, 0))
340+
days(2020, 1, 12))
341341
assert(parser.makeConverter("t", DateType).apply("2020-1-12 xyz") ==
342-
days(2020, 1, 12, 0, 0, 0))
342+
days(2020, 1, 12))
343343
// The legacy format ignores the "GMT" from the string
344344
assert(parser.makeConverter("t", TimestampType).apply("2020-1-12 12:3:45GMT") ==
345345
date(2020, 1, 12, 12, 3, 45, 0))
346346
assert(parser.makeConverter("t", TimestampType).apply("GMT2020-1-12 12:3:45") ==
347347
date(2020, 1, 12, 12, 3, 45, 0))
348348
assert(parser.makeConverter("t", DateType).apply("2020-1-12GMT") ==
349-
days(2020, 1, 12, 0, 0, 0))
349+
days(2020, 1, 12))
350350
assert(parser.makeConverter("t", DateType).apply("GMT2020-1-12") ==
351-
days(2020, 1, 12, 0, 0, 0))
351+
days(2020, 1, 12))
352352
}
353353

354354
val options = new CSVOptions(Map.empty[String, String], false, "UTC")

sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/util/DateTimeTestUtils.scala

Lines changed: 2 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -88,12 +88,8 @@ object DateTimeTestUtils {
8888
def days(
8989
year: Int,
9090
month: Byte = 1,
91-
day: Byte = 1,
92-
hour: Byte = 0,
93-
minute: Byte = 0,
94-
sec: Byte = 0): Int = {
95-
val micros = date(year, month, day, hour, minute, sec)
96-
TimeUnit.MICROSECONDS.toDays(micros).toInt
91+
day: Byte = 1): Int = {
92+
LocalDate.of(year, month, day).toEpochDay.toInt
9793
}
9894

9995
// Returns microseconds since epoch for current date and give time

sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/util/DateTimeUtilsSuite.scala

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -386,13 +386,13 @@ class DateTimeUtilsSuite extends SparkFunSuite with Matchers with SQLHelper {
386386
}
387387

388388
test("date add months") {
389-
val input = days(1997, 2, 28, 10, 30)
389+
val input = days(1997, 2, 28)
390390
assert(dateAddMonths(input, 36) === days(2000, 2, 28))
391391
assert(dateAddMonths(input, -13) === days(1996, 1, 28))
392392
}
393393

394394
test("date add interval with day precision") {
395-
val input = days(1997, 2, 28, 10, 30)
395+
val input = days(1997, 2, 28)
396396
assert(dateAddInterval(input, new CalendarInterval(36, 0, 0)) === days(2000, 2, 28))
397397
assert(dateAddInterval(input, new CalendarInterval(36, 47, 0)) === days(2000, 4, 15))
398398
assert(dateAddInterval(input, new CalendarInterval(-13, 0, 0)) === days(1996, 1, 28))

sql/catalyst/src/test/scala/org/apache/spark/sql/util/DateFormatterSuite.scala

Lines changed: 13 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -22,6 +22,7 @@ import java.time.{DateTimeException, LocalDate, ZoneOffset}
2222
import org.apache.spark.{SparkFunSuite, SparkUpgradeException}
2323
import org.apache.spark.sql.catalyst.plans.SQLHelper
2424
import org.apache.spark.sql.catalyst.util._
25+
import org.apache.spark.sql.catalyst.util.DateTimeTestUtils._
2526
import org.apache.spark.sql.catalyst.util.DateTimeUtils.{getZoneId, localDateToDays}
2627
import org.apache.spark.sql.internal.SQLConf
2728
import org.apache.spark.sql.internal.SQLConf.LegacyBehaviorPolicy
@@ -116,11 +117,11 @@ class DateFormatterSuite extends SparkFunSuite with SQLHelper {
116117
test("parsing date without explicit day") {
117118
val formatter = DateFormatter("yyyy MMM", ZoneOffset.UTC)
118119
val daysSinceEpoch = formatter.parse("2018 Dec")
119-
assert(daysSinceEpoch === LocalDate.of(2018, 12, 1).toEpochDay)
120+
assert(daysSinceEpoch === days(2018, 12, 1))
120121
}
121122

122123
test("formatting negative years with default pattern") {
123-
val epochDays = LocalDate.of(-99, 1, 1).toEpochDay.toInt
124+
val epochDays = days(-99, 1, 1)
124125
assert(DateFormatter(ZoneOffset.UTC).format(epochDays) === "-0099-01-01")
125126
}
126127

@@ -139,7 +140,7 @@ class DateFormatterSuite extends SparkFunSuite with SQLHelper {
139140

140141
test("SPARK-30958: parse date with negative year") {
141142
val formatter1 = DateFormatter("yyyy-MM-dd", ZoneOffset.UTC)
142-
assert(formatter1.parse("-1234-02-22") === localDateToDays(LocalDate.of(-1234, 2, 22)))
143+
assert(formatter1.parse("-1234-02-22") === days(-1234, 2, 22))
143144

144145
def assertParsingError(f: => Unit): Unit = {
145146
intercept[Exception](f) match {
@@ -155,8 +156,8 @@ class DateFormatterSuite extends SparkFunSuite with SQLHelper {
155156
assertParsingError(formatter2.parse("BC -1234-02-22"))
156157
assertParsingError(formatter2.parse("AD 0000-02-22"))
157158

158-
assert(formatter2.parse("BC 1234-02-22") === localDateToDays(LocalDate.of(-1233, 2, 22)))
159-
assert(formatter2.parse("AD 1234-02-22") === localDateToDays(LocalDate.of(1234, 2, 22)))
159+
assert(formatter2.parse("BC 1234-02-22") === days(-1233, 2, 22))
160+
assert(formatter2.parse("AD 1234-02-22") === days(1234, 2, 22))
160161
}
161162

162163
test("SPARK-31557: rebasing in legacy formatters/parsers") {
@@ -178,8 +179,12 @@ class DateFormatterSuite extends SparkFunSuite with SQLHelper {
178179
}
179180

180181
test("missing date fields") {
181-
val formatter = DateFormatter("HH", ZoneOffset.UTC)
182-
val daysSinceEpoch = formatter.parse("20")
183-
assert(daysSinceEpoch === LocalDate.of(1970, 1, 1).toEpochDay)
182+
// by default we don't allow missing year field.
183+
intercept[SparkUpgradeException](DateFormatter("HH", ZoneOffset.UTC).parse("20"))
184+
withSQLConf(SQLConf.LEGACY_ALLOW_MISSING_YEAR_DURING_PARSING.key -> "true") {
185+
val formatter = DateFormatter("HH", ZoneOffset.UTC)
186+
val daysSinceEpoch = formatter.parse("20")
187+
assert(daysSinceEpoch === days(1970, 1, 1))
188+
}
184189
}
185190
}

sql/catalyst/src/test/scala/org/apache/spark/sql/util/TimestampFormatterSuite.scala

Lines changed: 28 additions & 41 deletions
Original file line numberDiff line numberDiff line change
@@ -25,7 +25,7 @@ import org.scalatest.Matchers
2525
import org.apache.spark.{SparkFunSuite, SparkUpgradeException}
2626
import org.apache.spark.sql.catalyst.plans.SQLHelper
2727
import org.apache.spark.sql.catalyst.util.{DateTimeTestUtils, DateTimeUtils, LegacyDateFormats, TimestampFormatter}
28-
import org.apache.spark.sql.catalyst.util.DateTimeTestUtils.{CET, PST, UTC}
28+
import org.apache.spark.sql.catalyst.util.DateTimeTestUtils._
2929
import org.apache.spark.sql.catalyst.util.DateTimeUtils._
3030
import org.apache.spark.sql.internal.SQLConf
3131
import org.apache.spark.sql.internal.SQLConf.LegacyBehaviorPolicy
@@ -120,8 +120,7 @@ class TimestampFormatterSuite extends SparkFunSuite with SQLHelper with Matchers
120120
test("case insensitive parsing of am and pm") {
121121
val formatter = TimestampFormatter("yyyy MMM dd hh:mm:ss a", ZoneOffset.UTC)
122122
val micros = formatter.parse("2009 Mar 20 11:30:01 am")
123-
assert(micros === TimeUnit.SECONDS.toMicros(
124-
LocalDateTime.of(2009, 3, 20, 11, 30, 1).toEpochSecond(ZoneOffset.UTC)))
123+
assert(micros === date(2009, 3, 20, 11, 30, 1))
125124
}
126125

127126
test("format fraction of second") {
@@ -241,8 +240,7 @@ class TimestampFormatterSuite extends SparkFunSuite with SQLHelper with Matchers
241240

242241
test("SPARK-30958: parse timestamp with negative year") {
243242
val formatter1 = TimestampFormatter("yyyy-MM-dd HH:mm:ss", ZoneOffset.UTC, true)
244-
assert(formatter1.parse("-1234-02-22 02:22:22") === instantToMicros(
245-
LocalDateTime.of(-1234, 2, 22, 2, 22, 22).toInstant(ZoneOffset.UTC)))
243+
assert(formatter1.parse("-1234-02-22 02:22:22") === date(-1234, 2, 22, 2, 22, 22))
246244

247245
def assertParsingError(f: => Unit): Unit = {
248246
intercept[Exception](f) match {
@@ -258,10 +256,8 @@ class TimestampFormatterSuite extends SparkFunSuite with SQLHelper with Matchers
258256
assertParsingError(formatter2.parse("BC -1234-02-22 02:22:22"))
259257
assertParsingError(formatter2.parse("AC 0000-02-22 02:22:22"))
260258

261-
assert(formatter2.parse("BC 1234-02-22 02:22:22") === instantToMicros(
262-
LocalDateTime.of(-1233, 2, 22, 2, 22, 22).toInstant(ZoneOffset.UTC)))
263-
assert(formatter2.parse("AD 1234-02-22 02:22:22") === instantToMicros(
264-
LocalDateTime.of(1234, 2, 22, 2, 22, 22).toInstant(ZoneOffset.UTC)))
259+
assert(formatter2.parse("BC 1234-02-22 02:22:22") === date(-1233, 2, 22, 2, 22, 22))
260+
assert(formatter2.parse("AD 1234-02-22 02:22:22") === date(1234, 2, 22, 2, 22, 22))
265261
}
266262

267263
test("SPARK-31557: rebasing in legacy formatters/parsers") {
@@ -281,9 +277,8 @@ class TimestampFormatterSuite extends SparkFunSuite with SQLHelper with Matchers
281277
.atZone(zoneId)
282278
.toLocalDateTime === LocalDateTime.of(1000, 1, 1, 1, 2, 3))
283279

284-
assert(formatter.format(instantToMicros(
285-
LocalDateTime.of(1000, 1, 1, 1, 2, 3)
286-
.atZone(zoneId).toInstant)) === "1000-01-01 01:02:03")
280+
assert(formatter.format(date(1000, 1, 1, 1, 2, 3, zid = zoneId)) ===
281+
"1000-01-01 01:02:03")
287282
}
288283
}
289284
}
@@ -295,19 +290,17 @@ class TimestampFormatterSuite extends SparkFunSuite with SQLHelper with Matchers
295290
test("parsing hour with various patterns") {
296291
def createFormatter(pattern: String): TimestampFormatter = {
297292
// Use `SIMPLE_DATE_FORMAT`, so that the legacy parser also fails with invalid value range.
298-
TimestampFormatter(pattern, ZoneOffset.UTC, LegacyDateFormats.SIMPLE_DATE_FORMAT, false)
293+
TimestampFormatter(pattern, UTC, LegacyDateFormats.SIMPLE_DATE_FORMAT, false)
299294
}
300295

301296
withClue("HH") {
302297
val formatter = createFormatter("yyyy-MM-dd HH")
303298

304299
val micros1 = formatter.parse("2009-12-12 00")
305-
assert(micros1 === TimeUnit.SECONDS.toMicros(
306-
LocalDateTime.of(2009, 12, 12, 0, 0, 0).toEpochSecond(ZoneOffset.UTC)))
300+
assert(micros1 === date(2009, 12, 12))
307301

308302
val micros2 = formatter.parse("2009-12-12 15")
309-
assert(micros2 === TimeUnit.SECONDS.toMicros(
310-
LocalDateTime.of(2009, 12, 12, 15, 0, 0).toEpochSecond(ZoneOffset.UTC)))
303+
assert(micros2 === date(2009, 12, 12, 15))
311304

312305
intercept[DateTimeException](formatter.parse("2009-12-12 24"))
313306
}
@@ -318,29 +311,24 @@ class TimestampFormatterSuite extends SparkFunSuite with SQLHelper with Matchers
318311
intercept[DateTimeException](formatter.parse("2009-12-12 00"))
319312

320313
val micros1 = formatter.parse("2009-12-12 15")
321-
assert(micros1 === TimeUnit.SECONDS.toMicros(
322-
LocalDateTime.of(2009, 12, 12, 15, 0, 0).toEpochSecond(ZoneOffset.UTC)))
314+
assert(micros1 === date(2009, 12, 12, 15))
323315

324316
val micros2 = formatter.parse("2009-12-12 24")
325-
assert(micros2 === TimeUnit.SECONDS.toMicros(
326-
LocalDateTime.of(2009, 12, 12, 0, 0, 0).toEpochSecond(ZoneOffset.UTC)))
317+
assert(micros2 === date(2009, 12, 12))
327318
}
328319

329320
withClue("KK") {
330321
val formatter = createFormatter("yyyy-MM-dd KK a")
331322

332323
val micros1 = formatter.parse("2009-12-12 00 am")
333-
assert(micros1 === TimeUnit.SECONDS.toMicros(
334-
LocalDateTime.of(2009, 12, 12, 0, 0, 0).toEpochSecond(ZoneOffset.UTC)))
324+
assert(micros1 === date(2009, 12, 12))
335325

336326
// For `KK`, "12:00:00 am" is the same as "00:00:00 pm".
337327
val micros2 = formatter.parse("2009-12-12 12 am")
338-
assert(micros2 === TimeUnit.SECONDS.toMicros(
339-
LocalDateTime.of(2009, 12, 12, 12, 0, 0).toEpochSecond(ZoneOffset.UTC)))
328+
assert(micros2 === date(2009, 12, 12, 12))
340329

341330
val micros3 = formatter.parse("2009-12-12 00 pm")
342-
assert(micros3 === TimeUnit.SECONDS.toMicros(
343-
LocalDateTime.of(2009, 12, 12, 12, 0, 0).toEpochSecond(ZoneOffset.UTC)))
331+
assert(micros3 === date(2009, 12, 12, 12))
344332

345333
intercept[DateTimeException](formatter.parse("2009-12-12 12 pm"))
346334
}
@@ -351,35 +339,34 @@ class TimestampFormatterSuite extends SparkFunSuite with SQLHelper with Matchers
351339
intercept[DateTimeException](formatter.parse("2009-12-12 00 am"))
352340

353341
val micros1 = formatter.parse("2009-12-12 12 am")
354-
assert(micros1 === TimeUnit.SECONDS.toMicros(
355-
LocalDateTime.of(2009, 12, 12, 0, 0, 0).toEpochSecond(ZoneOffset.UTC)))
342+
assert(micros1 === date(2009, 12, 12))
356343

357344
intercept[DateTimeException](formatter.parse("2009-12-12 00 pm"))
358345

359346
val micros2 = formatter.parse("2009-12-12 12 pm")
360-
assert(micros2 === TimeUnit.SECONDS.toMicros(
361-
LocalDateTime.of(2009, 12, 12, 12, 0, 0).toEpochSecond(ZoneOffset.UTC)))
347+
assert(micros2 === date(2009, 12, 12, 12))
362348
}
363349
}
364350

365351
test("missing date fields") {
366-
val formatter = TimestampFormatter("HH:mm:ss", ZoneOffset.UTC)
367-
val micros = formatter.parse("11:30:01")
368-
assert(micros === TimeUnit.SECONDS.toMicros(
369-
LocalDateTime.of(1970, 1, 1, 11, 30, 1).toEpochSecond(ZoneOffset.UTC)))
352+
// by default we don't allow missing year field.
353+
intercept[SparkUpgradeException](TimestampFormatter("HH:mm:ss", UTC).parse("11:30:01"))
354+
withSQLConf(SQLConf.LEGACY_ALLOW_MISSING_YEAR_DURING_PARSING.key -> "true") {
355+
val formatter = TimestampFormatter("HH:mm:ss", UTC)
356+
val micros = formatter.parse("11:30:01")
357+
assert(micros === date(1970, 1, 1, 11, 30, 1))
358+
}
370359
}
371360

372361
test("missing am/pm field") {
373-
val formatter = TimestampFormatter("yyyy hh:mm:ss", ZoneOffset.UTC)
362+
val formatter = TimestampFormatter("yyyy hh:mm:ss", UTC)
374363
val micros = formatter.parse("2009 11:30:01")
375-
assert(micros === TimeUnit.SECONDS.toMicros(
376-
LocalDateTime.of(2009, 1, 1, 11, 30, 1).toEpochSecond(ZoneOffset.UTC)))
364+
assert(micros === date(2009, 1, 1, 11, 30, 1))
377365
}
378366

379367
test("missing time fields") {
380-
val formatter = TimestampFormatter("yyyy HH", ZoneOffset.UTC)
368+
val formatter = TimestampFormatter("yyyy HH", UTC)
381369
val micros = formatter.parse("2009 11")
382-
assert(micros === TimeUnit.SECONDS.toMicros(
383-
LocalDateTime.of(2009, 1, 1, 11, 0, 0).toEpochSecond(ZoneOffset.UTC)))
370+
assert(micros === date(2009, 1, 1, 11))
384371
}
385372
}

0 commit comments

Comments
 (0)