@@ -19,25 +19,28 @@ package org.apache.spark.sql
1919
2020import java .sql .{Date , Timestamp }
2121import java .text .SimpleDateFormat
22+ import java .time .LocalDate
2223import java .util .Locale
2324import java .util .concurrent .TimeUnit
2425
25- import org .apache .spark .sql .catalyst .util .DateTimeUtils
26+ import org .apache .spark .sql .catalyst .util .DateTimeUtils . _
2627import org .apache .spark .sql .functions ._
28+ import org .apache .spark .sql .internal .SQLConf
2729import org .apache .spark .sql .test .SharedSQLContext
2830import org .apache .spark .unsafe .types .CalendarInterval
2931
3032class DateFunctionsSuite extends QueryTest with SharedSQLContext {
3133 import testImplicits ._
3234
3335 test(" function current_date" ) {
34- val df1 = Seq ((1 , 2 ), (3 , 1 )).toDF(" a" , " b" )
35- val d0 = DateTimeUtils .millisToDays(System .currentTimeMillis())
36- val d1 = DateTimeUtils .fromJavaDate(df1.select(current_date()).collect().head.getDate(0 ))
37- val d2 = DateTimeUtils .fromJavaDate(
38- sql(""" SELECT CURRENT_DATE()""" ).collect().head.getDate(0 ))
39- val d3 = DateTimeUtils .millisToDays(System .currentTimeMillis())
40- assert(d0 <= d1 && d1 <= d2 && d2 <= d3 && d3 - d0 <= 1 )
36+ withSQLConf(SQLConf .DATETIME_JAVA8API_EANBLED .key -> " true" ) {
37+ val df1 = Seq ((1 , 2 ), (3 , 1 )).toDF(" a" , " b" )
38+ val d0 = System .currentTimeMillis() / MILLIS_PER_DAY
39+ val d1 = localDateToDays(df1.select(current_date()).collect().head.getAs[LocalDate ](0 ))
40+ val d2 = localDateToDays(sql(""" SELECT CURRENT_DATE()""" ).collect().head.getAs[LocalDate ](0 ))
41+ val d3 = System .currentTimeMillis() / MILLIS_PER_DAY
42+ assert(d0 <= d1 && d1 <= d2 && d2 <= d3 && d3 - d0 <= 1 )
43+ }
4144 }
4245
4346 test(" function current_timestamp and now" ) {
0 commit comments