@@ -23,6 +23,8 @@ import java.time.{Instant, LocalDate, ZoneId}
2323import java .util .{Calendar , Locale , TimeZone }
2424import java .util .concurrent .TimeUnit ._
2525
26+ import scala .reflect .ClassTag
27+
2628import org .apache .spark .{SparkFunSuite , SparkUpgradeException }
2729import org .apache .spark .sql .catalyst .InternalRow
2830import org .apache .spark .sql .catalyst .expressions .codegen .GenerateUnsafeProjection
@@ -777,8 +779,6 @@ class DateExpressionsSuite extends SparkFunSuite with ExpressionEvalHelper {
777779 checkEvaluation(
778780 FromUnixTime (Literal (1000L ), Literal .create(null , StringType ), timeZoneId),
779781 null )
780- checkEvaluation(
781- FromUnixTime (Literal (0L ), Literal (" not a valid format" ), timeZoneId), null )
782782
783783 // SPARK-28072 The codegen path for non-literal input should also work
784784 checkEvaluation(
@@ -792,7 +792,7 @@ class DateExpressionsSuite extends SparkFunSuite with ExpressionEvalHelper {
792792 }
793793 }
794794 // Test escaping of format
795- GenerateUnsafeProjection .generate(FromUnixTime (Literal (0L ), Literal (" \" quote " ), UTC_OPT ) :: Nil )
795+ GenerateUnsafeProjection .generate(FromUnixTime (Literal (0L ), Literal (" \" " ), UTC_OPT ) :: Nil )
796796 }
797797
798798 test(" unix_timestamp" ) {
@@ -854,15 +854,13 @@ class DateExpressionsSuite extends SparkFunSuite with ExpressionEvalHelper {
854854 UnixTimestamp (Literal (date1), Literal .create(null , StringType ), timeZoneId),
855855 MICROSECONDS .toSeconds(
856856 DateTimeUtils .daysToMicros(DateTimeUtils .fromJavaDate(date1), tz.toZoneId)))
857- checkEvaluation(
858- UnixTimestamp (Literal (" 2015-07-24" ), Literal (" not a valid format" ), timeZoneId), null )
859857 }
860858 }
861859 }
862860 }
863861 // Test escaping of format
864862 GenerateUnsafeProjection .generate(
865- UnixTimestamp (Literal (" 2015-07-24" ), Literal (" \" quote " ), UTC_OPT ) :: Nil )
863+ UnixTimestamp (Literal (" 2015-07-24" ), Literal (" \" " ), UTC_OPT ) :: Nil )
866864 }
867865
868866 test(" to_unix_timestamp" ) {
@@ -920,10 +918,6 @@ class DateExpressionsSuite extends SparkFunSuite with ExpressionEvalHelper {
920918 Literal (date1), Literal .create(null , StringType ), timeZoneId),
921919 MICROSECONDS .toSeconds(
922920 DateTimeUtils .daysToMicros(DateTimeUtils .fromJavaDate(date1), zid)))
923- checkEvaluation(
924- ToUnixTimestamp (
925- Literal (" 2015-07-24" ),
926- Literal (" not a valid format" ), timeZoneId), null )
927921
928922 // SPARK-28072 The codegen path for non-literal input should also work
929923 checkEvaluation(
@@ -940,7 +934,7 @@ class DateExpressionsSuite extends SparkFunSuite with ExpressionEvalHelper {
940934 }
941935 // Test escaping of format
942936 GenerateUnsafeProjection .generate(
943- ToUnixTimestamp (Literal (" 2015-07-24" ), Literal (" \" quote " ), UTC_OPT ) :: Nil )
937+ ToUnixTimestamp (Literal (" 2015-07-24" ), Literal (" \" " ), UTC_OPT ) :: Nil )
944938 }
945939
946940 test(" datediff" ) {
@@ -1171,34 +1165,28 @@ class DateExpressionsSuite extends SparkFunSuite with ExpressionEvalHelper {
11711165
11721166 test(" Disable week-based date fields and quarter fields for parsing" ) {
11731167
1174- def checkSparkUpgrade (c : Char ): Unit = {
1175- checkExceptionInExpression[SparkUpgradeException ](
1176- new ParseToTimestamp (Literal (" 1" ), Literal (c.toString )).child, " 3.0 " )
1177- checkExceptionInExpression[SparkUpgradeException ](
1178- new ParseToDate ( Literal (" 1" ), Literal (c.toString)).child, " 3.0 " )
1179- checkExceptionInExpression[ SparkUpgradeException ](
1180- ToUnixTimestamp ( Literal ( " 1 " ), Literal (c.toString )), " 3.0 " )
1181- checkExceptionInExpression[SparkUpgradeException ](
1182- UnixTimestamp ( Literal ( " 1 " ), Literal (c.toString)), " 3.0 " )
1168+ def checkException [ T <: Exception : ClassTag ] (c : String , onlyParsing : Boolean = false ): Unit = {
1169+ checkExceptionInExpression[T ]( new ParseToTimestamp ( Literal ( " 1 " ), Literal (c)).child, c)
1170+ checkExceptionInExpression[ T ]( new ParseToDate (Literal (" 1" ), Literal (c)).child, c )
1171+ checkExceptionInExpression[T ]( ToUnixTimestamp ( Literal ( " 1 " ), Literal (c)), c)
1172+ checkExceptionInExpression[ T ]( UnixTimestamp ( Literal (" 1" ), Literal (c)), c )
1173+ if ( ! onlyParsing) {
1174+ checkExceptionInExpression[ T ]( DateFormatClass ( CurrentTimestamp ( ), Literal (c)), c )
1175+ checkExceptionInExpression[T ]( FromUnixTime ( Literal ( 0L ), Literal (c)), c)
1176+ }
11831177 }
11841178
1185- def checkNullify (c : Char ): Unit = {
1186- checkEvaluation(new ParseToTimestamp (Literal (" 1" ), Literal (c.toString)).child, null )
1187- checkEvaluation(new ParseToDate (Literal (" 1" ), Literal (c.toString)).child, null )
1188- checkEvaluation(ToUnixTimestamp (Literal (" 1" ), Literal (c.toString)), null )
1189- checkEvaluation(UnixTimestamp (Literal (" 1" ), Literal (c.toString)), null )
1190- }
1179+ val unsupportedLettersForParsing = Set ('E' , 'F' , 'q' , 'Q' )
11911180
11921181 Seq ('Y' , 'W' , 'w' , 'E' , 'u' , 'F' ).foreach { l =>
1193- checkSparkUpgrade(l )
1182+ checkException[ SparkUpgradeException ](l.toString, unsupportedLettersForParsing.contains(l) )
11941183 }
11951184
1196- Seq ('q' , 'Q' ).foreach { l =>
1197- checkNullify(l )
1185+ Seq ('q' , 'Q' , 'e' , 'c' , 'A' , 'n' , 'N' , 'p' ).foreach { l =>
1186+ checkException[ IllegalArgumentException ](l.toString, unsupportedLettersForParsing.contains(l) )
11981187 }
11991188 }
12001189
1201-
12021190 test(" SPARK-31896: Handle am-pm timestamp parsing when hour is missing" ) {
12031191 checkEvaluation(
12041192 new ParseToTimestamp (Literal (" PM" ), Literal (" a" )).child,
0 commit comments