Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -117,7 +117,13 @@ class LegacySimpleDateFormatter(pattern: String, locale: Locale) extends LegacyD
object DateFormatter {
import LegacyDateFormats._

val defaultLocale: Locale = Locale.US
/**
* Before Spark 3.0, the first day-of-week is always Monday. Since Spark 3.0, it depends on the
* locale.
* We pick GB as the default locale instead of US, to be compatible with Spark 2.x, as US locale
* uses Sunday as the first day-of-week. See SPARK-31879.
*/
val defaultLocale: Locale = new Locale("en", "GB")

val defaultPattern: String = "yyyy-MM-dd"

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -278,7 +278,13 @@ object LegacyDateFormats extends Enumeration {
object TimestampFormatter {
import LegacyDateFormats._

val defaultLocale: Locale = Locale.US
/**
* Before Spark 3.0, the first day-of-week is always Monday. Since Spark 3.0, it depends on the
* locale.
* We pick GB as the default locale instead of US, to be compatible with Spark 2.x, as US locale
* uses Sunday as the first day-of-week. See SPARK-31879.
*/
val defaultLocale: Locale = new Locale("en", "GB")

def defaultPattern(): String = s"${DateFormatter.defaultPattern} HH:mm:ss"

Expand Down
4 changes: 4 additions & 0 deletions sql/core/src/test/resources/sql-tests/inputs/datetime.sql
Original file line number Diff line number Diff line change
Expand Up @@ -164,3 +164,7 @@ select from_csv('26/October/2015', 'date Date', map('dateFormat', 'dd/MMMMM/yyyy
select from_unixtime(1, 'yyyyyyyyyyy-MM-dd');
select date_format(timestamp '2018-11-17 13:33:33', 'yyyyyyyyyy-MM-dd HH:mm:ss');
select date_format(date '2018-11-17', 'yyyyyyyyyyy-MM-dd');

-- SPARK-31879: the first day of week
select date_format('2020-01-01', 'YYYY-MM-dd uu');
select date_format('2020-01-01', 'YYYY-MM-dd uuuu');
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
-- Automatically generated by SQLQueryTestSuite
-- Number of queries: 119
-- Number of queries: 121


-- !query
Expand Down Expand Up @@ -1025,3 +1025,19 @@ struct<>
-- !query output
org.apache.spark.SparkUpgradeException
You may get a different result due to the upgrading of Spark 3.0: Fail to recognize 'yyyyyyyyyyy-MM-dd' pattern in the DateTimeFormatter. 1) You can set spark.sql.legacy.timeParserPolicy to LEGACY to restore the behavior before Spark 3.0. 2) You can form a valid datetime pattern with the guide from https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html


-- !query
select date_format('2020-01-01', 'YYYY-MM-dd uu')
-- !query schema
struct<date_format(CAST(2020-01-01 AS TIMESTAMP), YYYY-MM-dd uu):string>
-- !query output
2020-01-01 03


-- !query
select date_format('2020-01-01', 'YYYY-MM-dd uuuu')
-- !query schema
struct<date_format(CAST(2020-01-01 AS TIMESTAMP), YYYY-MM-dd uuuu):string>
-- !query output
2020-01-01 Wednesday
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
-- Automatically generated by SQLQueryTestSuite
-- Number of queries: 119
-- Number of queries: 121


-- !query
Expand Down Expand Up @@ -980,3 +980,19 @@ select date_format(date '2018-11-17', 'yyyyyyyyyyy-MM-dd')
struct<date_format(CAST(DATE '2018-11-17' AS TIMESTAMP), yyyyyyyyyyy-MM-dd):string>
-- !query output
00000002018-11-17


-- !query
select date_format('2020-01-01', 'YYYY-MM-dd uu')
-- !query schema
struct<date_format(CAST(2020-01-01 AS TIMESTAMP), YYYY-MM-dd uu):string>
-- !query output
2020-01-01 03


-- !query
select date_format('2020-01-01', 'YYYY-MM-dd uuuu')
-- !query schema
struct<date_format(CAST(2020-01-01 AS TIMESTAMP), YYYY-MM-dd uuuu):string>
-- !query output
2020-01-01 0003
18 changes: 17 additions & 1 deletion sql/core/src/test/resources/sql-tests/results/datetime.sql.out
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
-- Automatically generated by SQLQueryTestSuite
-- Number of queries: 119
-- Number of queries: 121


-- !query
Expand Down Expand Up @@ -997,3 +997,19 @@ struct<>
-- !query output
org.apache.spark.SparkUpgradeException
You may get a different result due to the upgrading of Spark 3.0: Fail to recognize 'yyyyyyyyyyy-MM-dd' pattern in the DateTimeFormatter. 1) You can set spark.sql.legacy.timeParserPolicy to LEGACY to restore the behavior before Spark 3.0. 2) You can form a valid datetime pattern with the guide from https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html


-- !query
select date_format('2020-01-01', 'YYYY-MM-dd uu')
-- !query schema
struct<date_format(CAST(2020-01-01 AS TIMESTAMP), YYYY-MM-dd uu):string>
-- !query output
2020-01-01 03


-- !query
select date_format('2020-01-01', 'YYYY-MM-dd uuuu')
-- !query schema
struct<date_format(CAST(2020-01-01 AS TIMESTAMP), YYYY-MM-dd uuuu):string>
-- !query output
2020-01-01 Wednesday