diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/DateTimeUtils.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/DateTimeUtils.scala index cc3fcb2a2a39..5deb83ef5624 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/DateTimeUtils.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/DateTimeUtils.scala @@ -378,7 +378,7 @@ object DateTimeUtils { i += 1 } } else { - if (b == ':' || b == ' ') { + if (i < segments.length && (b == ':' || b == ' ')) { segments(i) = currentSegmentValue currentSegmentValue = 0 i += 1 diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/util/DateTimeUtilsSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/util/DateTimeUtilsSuite.scala index abdb91619ccb..2d055c7dddac 100644 --- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/util/DateTimeUtilsSuite.scala +++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/util/DateTimeUtilsSuite.scala @@ -581,6 +581,12 @@ class DateTimeUtilsSuite extends SparkFunSuite { } } + test("trailing characters while converting string to timestamp") { + val s = UTF8String.fromString("2019-10-31T10:59:23Z:::") + val time = DateTimeUtils.stringToTimestamp(s, DateTimeUtils.defaultTimeZone()) + assert(time == None) + } + test("truncTimestamp") { def testTrunc( level: Int,