From eabbe2edc4b83fbd8e045f1ceb61cfc5ca917a70 Mon Sep 17 00:00:00 2001 From: Marios Trivyzas Date: Sun, 8 Sep 2019 20:09:47 +0300 Subject: [PATCH 01/11] SQL: Implement DATE_TRUNC function DATE_TRUNC(, ) is a function that allows the user to truncate a timestamp to the specified field by zeroing out the rest of the fields. The function is implemented according to the spec from PostgreSQL: https://www.postgresql.org/docs/current/functions-datetime.html#FUNCTIONS-DATETIME-TRUNC Closes: #46319 --- .../sql/functions/date-time.asciidoc | 75 ++++++++- docs/reference/sql/functions/index.asciidoc | 1 + .../qa/src/main/resources/command.csv-spec | 17 +- .../qa/src/main/resources/datetime.csv-spec | 96 ++++++++++- .../qa/src/main/resources/docs/docs.csv-spec | 69 +++++++- .../xpack/sql/proto/StringUtils.java | 14 ++ .../xpack/sql/expression/TypeResolutions.java | 1 - .../expression/function/FunctionRegistry.java | 4 +- .../function/scalar/Processors.java | 4 +- .../function/scalar/datetime/DateTrunc.java | 158 ++++++++++++++++++ .../scalar/datetime/DateTruncPipe.java | 64 +++++++ .../scalar/datetime/DateTruncProcessor.java | 97 +++++++++++ .../whitelist/InternalSqlScriptUtils.java | 7 +- .../xpack/sql/util/DateUtils.java | 93 +++++++++++ .../xpack/sql/plugin/sql_whitelist.txt | 1 + .../analyzer/VerifierErrorMessagesTests.java | 20 +++ .../function/scalar/FunctionTestUtils.java | 15 +- .../scalar/datetime/DateTimeTestUtils.java | 4 + .../scalar/datetime/DateTruncPipeTests.java | 133 +++++++++++++++ .../datetime/DateTruncProcessorTests.java | 156 +++++++++++++++++ .../sql/planner/QueryTranslatorTests.java | 16 ++ 21 files changed, 1019 insertions(+), 26 deletions(-) create mode 100644 x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DateTrunc.java create mode 100644 x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DateTruncPipe.java create mode 100644 x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DateTruncProcessor.java create mode 100644 x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DateTruncPipeTests.java create mode 100644 x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DateTruncProcessorTests.java diff --git a/docs/reference/sql/functions/date-time.asciidoc b/docs/reference/sql/functions/date-time.asciidoc index 45231393521c7..9118bebf8a7ac 100644 --- a/docs/reference/sql/functions/date-time.asciidoc +++ b/docs/reference/sql/functions/date-time.asciidoc @@ -8,7 +8,7 @@ [[sql-functions-datetime-interval]] ==== Intervals -A common requirement when dealing with date/time in general revolves around +A common requirement when dealing with date/time in general revolves around the notion of `interval`, a topic that is worth exploring in the context of {es} and {es-sql}. {es} has comprehensive support for <> both inside <> and <>. @@ -248,6 +248,79 @@ include-tagged::{sql-specs}/docs/docs.csv-spec[filterNow] Currently, using a _precision_ greater than 3 doesn't make any difference to the output of the function as the maximum number of second fractional digits returned is 3 (milliseconds). +[[sql-functions-datetime-trunc]] +==== `DATE_TRUNC` + +.Synopsis: +[source, sql] +-------------------------------------------------- +DATE_TRUNC( + string_exp, <1> + datetime_exp) <2> +-------------------------------------------------- + +*Input*: + +<1> string expression denoting the unit to which the date/datetime should be truncated +<2> date/datetime expression + +*Output*: date/datetime, same as datetime_exp + +.Description: + +Truncate the date/datetime to the specified unit by setting all fields that are less significant than the specified +one to zero (or one, for day, day of week and month). + +[cols="^,^"] +|=== +2+h|Datetime truncation units + +s|unit +s|abbreviations + +| millennium | millennia +| century | centuries +| decade | decades +| year | years, yy, yyyy +| quarter | quarters, qq, q +| month | months, mm, m +| week | weeks, wk, ww +| day | days, dd, d +| hour | hours, hh +| minute | minutes, mi, n +| second | seconds, ss, s +| millisecond | milliseconds, ms +| microsecond | microseconds, mcs +| nanosecond | nanoseconds, ns +|=== + + + +[source, sql] +-------------------------------------------------- +include-tagged::{sql-specs}/docs/docs.csv-spec[truncateDateTimeMillennium] +-------------------------------------------------- + +[source, sql] +-------------------------------------------------- +include-tagged::{sql-specs}/docs/docs.csv-spec[truncateDateTimeWeek] +-------------------------------------------------- + +[source, sql] +-------------------------------------------------- +include-tagged::{sql-specs}/docs/docs.csv-spec[truncateDateTimeMinutes] +-------------------------------------------------- + +[source, sql] +-------------------------------------------------- +include-tagged::{sql-specs}/docs/docs.csv-spec[truncateDateDecades] +-------------------------------------------------- + +[source, sql] +-------------------------------------------------- +include-tagged::{sql-specs}/docs/docs.csv-spec[truncateDateQuarter] +-------------------------------------------------- + [[sql-functions-datetime-day]] ==== `DAY_OF_MONTH/DOM/DAY` diff --git a/docs/reference/sql/functions/index.asciidoc b/docs/reference/sql/functions/index.asciidoc index 248c47452bab4..b36dafd024d8b 100644 --- a/docs/reference/sql/functions/index.asciidoc +++ b/docs/reference/sql/functions/index.asciidoc @@ -51,6 +51,7 @@ ** <> ** <> ** <> +** <> ** <> ** <> ** <> diff --git a/x-pack/plugin/sql/qa/src/main/resources/command.csv-spec b/x-pack/plugin/sql/qa/src/main/resources/command.csv-spec index 073788511d0f0..abb1175cb2f86 100644 --- a/x-pack/plugin/sql/qa/src/main/resources/command.csv-spec +++ b/x-pack/plugin/sql/qa/src/main/resources/command.csv-spec @@ -39,16 +39,17 @@ CURRENT_DATE |SCALAR CURRENT_TIME |SCALAR CURRENT_TIMESTAMP|SCALAR CURTIME |SCALAR +DATE_TRUNC |SCALAR DAY |SCALAR DAYNAME |SCALAR -DAYOFMONTH |SCALAR -DAYOFWEEK |SCALAR -DAYOFYEAR |SCALAR -DAY_NAME |SCALAR -DAY_OF_MONTH |SCALAR -DAY_OF_WEEK |SCALAR -DAY_OF_YEAR |SCALAR -DOM |SCALAR +DAYOFMONTH |SCALAR +DAYOFWEEK |SCALAR +DAYOFYEAR |SCALAR +DAY_NAME |SCALAR +DAY_OF_MONTH |SCALAR +DAY_OF_WEEK |SCALAR +DAY_OF_YEAR |SCALAR +DOM |SCALAR DOW |SCALAR DOY |SCALAR HOUR |SCALAR diff --git a/x-pack/plugin/sql/qa/src/main/resources/datetime.csv-spec b/x-pack/plugin/sql/qa/src/main/resources/datetime.csv-spec index 367b5d0ddfdcf..b6d5905df21ed 100644 --- a/x-pack/plugin/sql/qa/src/main/resources/datetime.csv-spec +++ b/x-pack/plugin/sql/qa/src/main/resources/datetime.csv-spec @@ -121,6 +121,100 @@ SELECT WEEK(birth_date) week, birth_date FROM test_emp WHERE WEEK(birth_date) > 2 |1953-01-07T00:00:00.000Z ; +selectDateTruncWithDateTime +schema::dt_hour:ts|dt_min:ts|dt_sec:ts|dt_millis:s|dt_micro:s|dt_nano:s +SELECT DATE_TRUNC('hour', '2019-09-04T11:22:33.123Z'::datetime) as dt_hour, DATE_TRUNC('minute', '2019-09-04T11:22:33.123Z'::datetime) as dt_min, +DATE_TRUNC('seconds', '2019-09-04T11:22:33.123Z'::datetime) as dt_sec, DATE_TRUNC('ms', '2019-09-04T11:22:33.123Z'::datetime)::string as dt_millis, +DATE_TRUNC('mcs', '2019-09-04T11:22:33.123Z'::datetime)::string as dt_micro, DATE_TRUNC('nanoseconds', '2019-09-04T11:22:33.123Z'::datetime)::string as dt_nano; + + dt_hour | dt_min | dt_sec | dt_millis | dt_micro | dt_nano +-------------------------+---------------------------+--------------------------+--------------------------+--------------------------+------------------------- +2019-09-04T11:00:00.000Z | 2019-09-04T11:22:00.000Z | 2019-09-04T11:22:33.000Z | 2019-09-04T11:22:33.123Z | 2019-09-04T11:22:33.123Z | 2019-09-04T11:22:33.123Z +; + +selectDateTruncWithDate +schema::dt_mil:s|dt_cent:s|dt_dec:s|dt_year:s|dt_quarter:s|dt_month:s|dt_week:s|dt_day:s +SELECT DATE_TRUNC('millennia', '2019-09-04'::date)::string as dt_mil, DATE_TRUNC('century', '2019-09-04'::date)::string as dt_cent, +DATE_TRUNC('decades', '2019-09-04'::date)::string as dt_dec, DATE_TRUNC('year', '2019-09-04'::date)::string as dt_year, +DATE_TRUNC('quarter', '2019-09-04'::date)::string as dt_quarter, DATE_TRUNC('month', '2019-09-04'::date)::string as dt_month, +DATE_TRUNC('week', '2019-09-04'::date)::string as dt_week, DATE_TRUNC('day', '2019-09-04'::date)::string as dt_day; + + dt_mil | dt_cent | dt_dec | dt_year | dt_quarter | dt_month | dt_week | dt_day +-------------------------+--------------------------+--------------------------+--------------------------+--------------------------+--------------------------+--------------------------+------------------------- +2000-01-01T00:00:00.000Z | 2000-01-01T00:00:00.000Z | 2010-01-01T00:00:00.000Z | 2019-01-01T00:00:00.000Z | 2019-07-01T00:00:00.000Z | 2019-09-01T00:00:00.000Z | 2019-09-02T00:00:00.000Z | 2019-09-04T00:00:00.000Z +; + +selectDateTruncWithField +schema::emp_no:i|birth_date:ts|dt_mil:ts|dt_cent:ts|dt_dec:ts|dt_year:ts|dt_quarter:ts|dt_month:ts|dt_week:ts|dt_day:ts +SELECT emp_no, birth_date, DATE_TRUNC('millennium', birth_date) as dt_mil, DATE_TRUNC('centuries', birth_date) as dt_cent, +DATE_TRUNC('decades', birth_date) as dt_dec, DATE_TRUNC('year', birth_date) as dt_year, DATE_TRUNC('quarter', birth_date) as dt_quarter, +DATE_TRUNC('month', birth_date) as dt_month, DATE_TRUNC('week', birth_date) as dt_week, DATE_TRUNC('day', birth_date) as dt_day +FROM test_emp WHERE emp_no >= 10032 AND emp_no <= 10042 ORDER BY 1; + + emp_no | birth_date | dt_mil | dt_cent | dt_dec | dt_year | dt_quarter | dt_month | dt_week | dt_day +--------+-------------------------+--------------------------+--------------------------+--------------------------+--------------------------+--------------------------+--------------------------+--------------------------+------------------------- +10032 |1960-08-09 00:00:00.000Z | 0999-12-27 00:00:00.000Z | 1900-01-01 00:00:00.000Z | 1960-01-01 00:00:00.000Z | 1960-01-01 00:00:00.000Z | 1960-07-01 00:00:00.000Z | 1960-08-01 00:00:00.000Z | 1960-08-08 00:00:00.000Z | 1960-08-09 00:00:00.000Z +10033 |1956-11-14 00:00:00.000Z | 0999-12-27 00:00:00.000Z | 1900-01-01 00:00:00.000Z | 1950-01-01 00:00:00.000Z | 1956-01-01 00:00:00.000Z | 1956-10-01 00:00:00.000Z | 1956-11-01 00:00:00.000Z | 1956-11-12 00:00:00.000Z | 1956-11-14 00:00:00.000Z +10034 |1962-12-29 00:00:00.000Z | 0999-12-27 00:00:00.000Z | 1900-01-01 00:00:00.000Z | 1960-01-01 00:00:00.000Z | 1962-01-01 00:00:00.000Z | 1962-10-01 00:00:00.000Z | 1962-12-01 00:00:00.000Z | 1962-12-24 00:00:00.000Z | 1962-12-29 00:00:00.000Z +10035 |1953-02-08 00:00:00.000Z | 0999-12-27 00:00:00.000Z | 1900-01-01 00:00:00.000Z | 1950-01-01 00:00:00.000Z | 1953-01-01 00:00:00.000Z | 1953-01-01 00:00:00.000Z | 1953-02-01 00:00:00.000Z | 1953-02-02 00:00:00.000Z | 1953-02-08 00:00:00.000Z +10036 |1959-08-10 00:00:00.000Z | 0999-12-27 00:00:00.000Z | 1900-01-01 00:00:00.000Z | 1950-01-01 00:00:00.000Z | 1959-01-01 00:00:00.000Z | 1959-07-01 00:00:00.000Z | 1959-08-01 00:00:00.000Z | 1959-08-10 00:00:00.000Z | 1959-08-10 00:00:00.000Z +10037 |1963-07-22 00:00:00.000Z | 0999-12-27 00:00:00.000Z | 1900-01-01 00:00:00.000Z | 1960-01-01 00:00:00.000Z | 1963-01-01 00:00:00.000Z | 1963-07-01 00:00:00.000Z | 1963-07-01 00:00:00.000Z | 1963-07-22 00:00:00.000Z | 1963-07-22 00:00:00.000Z +10038 |1960-07-20 00:00:00.000Z | 0999-12-27 00:00:00.000Z | 1900-01-01 00:00:00.000Z | 1960-01-01 00:00:00.000Z | 1960-01-01 00:00:00.000Z | 1960-07-01 00:00:00.000Z | 1960-07-01 00:00:00.000Z | 1960-07-18 00:00:00.000Z | 1960-07-20 00:00:00.000Z +10039 |1959-10-01 00:00:00.000Z | 0999-12-27 00:00:00.000Z | 1900-01-01 00:00:00.000Z | 1950-01-01 00:00:00.000Z | 1959-01-01 00:00:00.000Z | 1959-10-01 00:00:00.000Z | 1959-10-01 00:00:00.000Z | 1959-09-28 00:00:00.000Z | 1959-10-01 00:00:00.000Z +10040 | null | null | null | null | null | null | null | null | null +10041 | null | null | null | null | null | null | null | null | null +10042 | null | null | null | null | null | null | null | null | null +; + +dateTruncOrderBy +schema::emp_no:i|hire_date:ts|dt:ts +SELECT emp_no, hire_date, DATE_TRUNC('quarter', hire_date) as dt FROM test_emp ORDER BY dt NULLS LAST, emp_no LIMIT 5; + + emp_no | hire_date | dt +--------+--------------------------+------------------------- +10009 | 1985-02-18 00:00:00.000Z | 1985-01-01 00:00:00.000Z +10048 | 1985-02-24 00:00:00.000Z | 1985-01-01 00:00:00.000Z +10098 | 1985-05-13 00:00:00.000Z | 1985-04-01 00:00:00.000Z +10061 | 1985-09-17 00:00:00.000Z | 1985-07-01 00:00:00.000Z +10076 | 1985-07-09 00:00:00.000Z | 1985-07-01 00:00:00.000Z +; + +dateTruncFilter +schema::emp_no:i|hire_date:ts|dt:ts +SELECT emp_no, hire_date, DATE_TRUNC('quarter', hire_date) as dt FROM test_emp WHERE DATE_TRUNC('quarter', hire_date) > '1994-07-01T00:00:00.000Z'::timestamp ORDER BY emp_no; + + emp_no | hire_date | dt +--------+--------------------------+------------------------- +10016 | 1995-01-27 00:00:00.000Z | 1995-01-01 00:00:00.000Z +10019 | 1999-04-30 00:00:00.000Z | 1999-04-01 00:00:00.000Z +10022 | 1995-08-22 00:00:00.000Z | 1995-07-01 00:00:00.000Z +10024 | 1997-05-19 00:00:00.000Z | 1997-04-01 00:00:00.000Z +10026 | 1995-03-20 00:00:00.000Z | 1995-01-01 00:00:00.000Z +10054 | 1995-03-13 00:00:00.000Z | 1995-01-01 00:00:00.000Z +10084 | 1995-12-15 00:00:00.000Z | 1995-10-01 00:00:00.000Z +10093 | 1996-11-05 00:00:00.000Z | 1996-10-01 00:00:00.000Z +; + +dateTruncGroupBy +schema::count:l|dt:ts +SELECT count(*) as count, DATE_TRUNC('decade', hire_date) dt FROM test_emp GROUP BY dt ORDER BY 2; + + count | dt +--------+------------------------- +59 | 1980-01-01 00:00:00.000Z +41 | 1990-01-01 00:00:00.000Z +; + +dateTruncHaving +schema::gender:s|dt:ts +SELECT gender, max(hire_date) dt FROM test_emp GROUP BY gender HAVING DATE_TRUNC('year', max(hire_date)) >= '1997-01-01T00:00:00.000Z'::timestamp ORDER BY 1; + + gender | dt +--------+------------------------- +null | 1999-04-30 00:00:00.000Z +F | 1997-05-19 00:00:00.000Z +; + // // Aggregate // @@ -404,4 +498,4 @@ SELECT CAST (CAST (birth_date AS VARCHAR) AS TIMESTAMP) a FROM test_emp WHERE YE a:ts --------------- 1965-01-03T00:00:00Z -; \ No newline at end of file +; diff --git a/x-pack/plugin/sql/qa/src/main/resources/docs/docs.csv-spec b/x-pack/plugin/sql/qa/src/main/resources/docs/docs.csv-spec index 3cb2c3f45d4aa..36f55bd8f6169 100644 --- a/x-pack/plugin/sql/qa/src/main/resources/docs/docs.csv-spec +++ b/x-pack/plugin/sql/qa/src/main/resources/docs/docs.csv-spec @@ -235,16 +235,17 @@ CURRENT_DATE |SCALAR CURRENT_TIME |SCALAR CURRENT_TIMESTAMP|SCALAR CURTIME |SCALAR +DATE_TRUNC |SCALAR DAY |SCALAR DAYNAME |SCALAR -DAYOFMONTH |SCALAR -DAYOFWEEK |SCALAR -DAYOFYEAR |SCALAR -DAY_NAME |SCALAR -DAY_OF_MONTH |SCALAR -DAY_OF_WEEK |SCALAR -DAY_OF_YEAR |SCALAR -DOM |SCALAR +DAYOFMONTH |SCALAR +DAYOFWEEK |SCALAR +DAYOFYEAR |SCALAR +DAY_NAME |SCALAR +DAY_OF_MONTH |SCALAR +DAY_OF_WEEK |SCALAR +DAY_OF_YEAR |SCALAR +DOM |SCALAR DOW |SCALAR DOY |SCALAR HOUR |SCALAR @@ -2412,6 +2413,58 @@ SELECT DAY_OF_MONTH(CAST('2018-02-19T10:23:27Z' AS TIMESTAMP)) AS day; // end::dayOfMonth ; +truncateDateTimeMillennium +// tag::truncateDateTimeMillennium +SELECT DATE_TRUNC('millennium', '2019-09-04T11:22:33.123Z'::datetime) AS millennium; + + millennium +------------------------ +2000-01-01T00:00:00.000Z +// end::truncateDateTimeMillennium +; + +truncateDateTimeWeek +// tag::truncateDateTimeWeek +SELECT DATE_TRUNC('week', '2019-08-24T11:22:33.123Z'::datetime) AS week; + + week +------------------------ +2019-08-19T00:00:00.000Z +// end::truncateDateTimeWeek +; + +truncateDateTimeMinutes +// tag::truncateDateTimeMinutes +SELECT DATE_TRUNC('mi', '2019-09-04T11:22:33.123Z'::datetime) AS mins; + + mins +------------------------ +2019-09-04T11:22:00.000Z +// end::truncateDateTimeMinutes +; + +truncateDateDecades +schema::decades:s +// tag::truncateDateDecades +SELECT DATE_TRUNC('decade', CAST('2019-09-04' AS DATE))::string AS decades; + + decades +------------------------ +2010-01-01T00:00:00.000Z +// end::truncateDateDecades +; + +truncateDateQuarter +schema::quarter:s +// tag::truncateDateQuarter +SELECT DATE_TRUNC('quarters', CAST('2019-09-04' AS DATE))::string AS quarter; + + quarter +------------------------ +2019-07-01T00:00:00.000Z +// end::truncateDateQuarter +; + constantDayOfWeek // tag::dayOfWeek SELECT DAY_OF_WEEK(CAST('2018-02-19T10:23:27Z' AS TIMESTAMP)) AS day; diff --git a/x-pack/plugin/sql/sql-proto/src/main/java/org/elasticsearch/xpack/sql/proto/StringUtils.java b/x-pack/plugin/sql/sql-proto/src/main/java/org/elasticsearch/xpack/sql/proto/StringUtils.java index b7624ad7eee81..60b1875674457 100644 --- a/x-pack/plugin/sql/sql-proto/src/main/java/org/elasticsearch/xpack/sql/proto/StringUtils.java +++ b/x-pack/plugin/sql/sql-proto/src/main/java/org/elasticsearch/xpack/sql/proto/StringUtils.java @@ -22,6 +22,7 @@ import static java.time.temporal.ChronoField.HOUR_OF_DAY; import static java.time.temporal.ChronoField.MILLI_OF_SECOND; import static java.time.temporal.ChronoField.MINUTE_OF_HOUR; +import static java.time.temporal.ChronoField.NANO_OF_SECOND; import static java.time.temporal.ChronoField.SECOND_OF_MINUTE; public final class StringUtils { @@ -40,6 +41,19 @@ public final class StringUtils { .appendOffsetId() .toFormatter(IsoLocale.ROOT); + public static final DateTimeFormatter ISO_DATE_WITH_NANOS = new DateTimeFormatterBuilder() + .parseCaseInsensitive() + .append(ISO_LOCAL_DATE) + .appendLiteral('T') + .appendValue(HOUR_OF_DAY, 2) + .appendLiteral(':') + .appendValue(MINUTE_OF_HOUR, 2) + .appendLiteral(':') + .appendValue(SECOND_OF_MINUTE, 2) + .appendFraction(NANO_OF_SECOND, 3, 9, true) + .appendOffsetId() + .toFormatter(IsoLocale.ROOT); + public static final DateTimeFormatter ISO_TIME_WITH_MILLIS = new DateTimeFormatterBuilder() .parseCaseInsensitive() .appendValue(HOUR_OF_DAY, 2) diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/TypeResolutions.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/TypeResolutions.java index d382dad83a19d..c465ab1b2deb8 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/TypeResolutions.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/TypeResolutions.java @@ -57,7 +57,6 @@ public static TypeResolution isNumericOrDateOrTime(Expression e, String operatio "date", "time", "datetime", "numeric"); } - public static TypeResolution isGeo(Expression e, String operationName, ParamOrdinal paramOrd) { return isType(e, DataType::isGeo, operationName, paramOrd, "geo_point", "geo_shape"); } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/FunctionRegistry.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/FunctionRegistry.java index 3a9ae06203476..0ebe256fe0999 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/FunctionRegistry.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/FunctionRegistry.java @@ -31,6 +31,7 @@ import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.CurrentDate; import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.CurrentDateTime; import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.CurrentTime; +import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.DateTrunc; import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.DayName; import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.DayOfMonth; import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.DayOfWeek; @@ -104,8 +105,8 @@ import org.elasticsearch.xpack.sql.expression.predicate.conditional.Case; import org.elasticsearch.xpack.sql.expression.predicate.conditional.Coalesce; import org.elasticsearch.xpack.sql.expression.predicate.conditional.Greatest; -import org.elasticsearch.xpack.sql.expression.predicate.conditional.Iif; import org.elasticsearch.xpack.sql.expression.predicate.conditional.IfNull; +import org.elasticsearch.xpack.sql.expression.predicate.conditional.Iif; import org.elasticsearch.xpack.sql.expression.predicate.conditional.Least; import org.elasticsearch.xpack.sql.expression.predicate.conditional.NullIf; import org.elasticsearch.xpack.sql.expression.predicate.operator.arithmetic.Mod; @@ -193,6 +194,7 @@ private void defineDefaultFunctions() { def(DayOfMonth.class, DayOfMonth::new, "DAY_OF_MONTH", "DAYOFMONTH", "DAY", "DOM"), def(DayOfWeek.class, DayOfWeek::new, "DAY_OF_WEEK", "DAYOFWEEK", "DOW"), def(DayOfYear.class, DayOfYear::new, "DAY_OF_YEAR", "DAYOFYEAR", "DOY"), + def(DateTrunc.class, DateTrunc::new, "DATE_TRUNC"), def(HourOfDay.class, HourOfDay::new, "HOUR_OF_DAY", "HOUR"), def(IsoDayOfWeek.class, IsoDayOfWeek::new, "ISO_DAY_OF_WEEK", "ISODAYOFWEEK", "ISODOW", "IDOW"), def(IsoWeekOfYear.class, IsoWeekOfYear::new, "ISO_WEEK_OF_YEAR", "ISOWEEKOFYEAR", "ISOWEEK", "IWOY", "IW"), diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/Processors.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/Processors.java index 0b9bbd1094a44..fa1eda8b15224 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/Processors.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/Processors.java @@ -8,13 +8,14 @@ import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.NamedWriteableRegistry.Entry; import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.DateTimeProcessor; +import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.DateTruncProcessor; import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.NamedDateTimeProcessor; import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.NonIsoDateTimeProcessor; import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.QuarterProcessor; +import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.TimeProcessor; import org.elasticsearch.xpack.sql.expression.function.scalar.geo.GeoProcessor; import org.elasticsearch.xpack.sql.expression.function.scalar.geo.StDistanceProcessor; import org.elasticsearch.xpack.sql.expression.function.scalar.geo.StWkttosqlProcessor; -import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.TimeProcessor; import org.elasticsearch.xpack.sql.expression.function.scalar.math.BinaryMathProcessor; import org.elasticsearch.xpack.sql.expression.function.scalar.math.BinaryOptionalMathProcessor; import org.elasticsearch.xpack.sql.expression.function.scalar.math.MathProcessor; @@ -88,6 +89,7 @@ public static List getNamedWriteables() { entries.add(new Entry(Processor.class, NamedDateTimeProcessor.NAME, NamedDateTimeProcessor::new)); entries.add(new Entry(Processor.class, NonIsoDateTimeProcessor.NAME, NonIsoDateTimeProcessor::new)); entries.add(new Entry(Processor.class, QuarterProcessor.NAME, QuarterProcessor::new)); + entries.add(new Entry(Processor.class, DateTruncProcessor.NAME, DateTruncProcessor::new)); // math entries.add(new Entry(Processor.class, BinaryMathProcessor.NAME, BinaryMathProcessor::new)); entries.add(new Entry(Processor.class, BinaryOptionalMathProcessor.NAME, BinaryOptionalMathProcessor::new)); diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DateTrunc.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DateTrunc.java new file mode 100644 index 0000000000000..3c1c0927f61ff --- /dev/null +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DateTrunc.java @@ -0,0 +1,158 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.sql.expression.function.scalar.datetime; + +import org.elasticsearch.xpack.sql.expression.Expression; +import org.elasticsearch.xpack.sql.expression.Expressions; +import org.elasticsearch.xpack.sql.expression.Nullability; +import org.elasticsearch.xpack.sql.expression.function.scalar.BinaryScalarFunction; +import org.elasticsearch.xpack.sql.expression.gen.pipeline.Pipe; +import org.elasticsearch.xpack.sql.expression.gen.script.ScriptTemplate; +import org.elasticsearch.xpack.sql.tree.NodeInfo; +import org.elasticsearch.xpack.sql.tree.Source; +import org.elasticsearch.xpack.sql.type.DataType; + +import java.time.ZoneId; +import java.util.Locale; +import java.util.Objects; +import java.util.Set; + +import static org.elasticsearch.common.logging.LoggerMessageFormat.format; +import static org.elasticsearch.xpack.sql.expression.TypeResolutions.isDate; +import static org.elasticsearch.xpack.sql.expression.TypeResolutions.isString; +import static org.elasticsearch.xpack.sql.expression.function.scalar.datetime.DateTruncProcessor.process; +import static org.elasticsearch.xpack.sql.expression.gen.script.ParamsBuilder.paramsBuilder; + +public class DateTrunc extends BinaryScalarFunction { + + public enum DatePart { + + MILLENNIUM("millennia"), + CENTURY("centuries"), + DECADE("decades"), + YEAR("years", "yy", "yyyy"), + QUARTER("quarters", "qq", "q"), + MONTH("months", "mm", "m"), + WEEK("weeks", "wk", "ww"), + DAY("days", "dd", "d"), + HOUR("hours", "hh"), + MINUTE("minutes", "mi", "n"), + SECOND("seconds", "ss", "s"), + MILLISECOND("milliseconds", "ms"), + MICROSECOND("microseconds", "mcs"), + NANOSECOND("nanoseconds", "ns"); + + private Set aliases; + + DatePart(String... aliases) { + this.aliases = Set.of(aliases); + } + + public Set aliases() { + return aliases; + } + + public static DatePart resolveTruncate(String truncateTo) { + for (DatePart datePart : DatePart.values()) { + truncateTo = truncateTo.toLowerCase(Locale.ROOT); + if (datePart.name().equalsIgnoreCase(truncateTo) || datePart.aliases().contains(truncateTo)) { + return datePart; + } + } + return null; + } + } + + + private final ZoneId zoneId; + + public DateTrunc(Source source, Expression truncateTo, Expression timestamp, ZoneId zoneId) { + super(source, truncateTo, timestamp); + this.zoneId = zoneId; + } + + @Override + public DataType dataType() { + return right().dataType(); + } + + @Override + protected TypeResolution resolveType() { + TypeResolution resolution = isString(left(), sourceText(), Expressions.ParamOrdinal.FIRST); + if (resolution.unresolved()) { + return resolution; + } + + if (left().foldable() && DatePart.resolveTruncate((String) left().fold()) == null) { + return new TypeResolution(format(null, "first argument of [{}] must be one of {} or their aliases, found value [{}]", + sourceText(), + DatePart.values(), + Expressions.name(left()))); + } + resolution = isDate(right(), sourceText(), Expressions.ParamOrdinal.SECOND); + if (resolution.unresolved()) { + return resolution; + } + return TypeResolution.TYPE_RESOLVED; + } + + @Override + protected BinaryScalarFunction replaceChildren(Expression newTruncateTo, Expression newTimestamp) { + return new DateTrunc(source(), newTruncateTo, newTimestamp, zoneId); + } + + @Override + protected NodeInfo info() { + return NodeInfo.create(this, DateTrunc::new, left(), right(), zoneId); + } + + @Override + protected Pipe makePipe() { + return new DateTruncPipe(source(), this, Expressions.pipe(left()), Expressions.pipe(right()), zoneId); + } + + @Override + public Nullability nullable() { + return Nullability.TRUE; + } + + @Override + public Object fold() { + return process(left().fold(), right().fold(), zoneId); + } + + @Override + protected ScriptTemplate asScriptFrom(ScriptTemplate leftScript, ScriptTemplate rightScript) { + return new ScriptTemplate( + formatTemplate("{sql}.dateTrunc(" + leftScript.template() + "," + rightScript.template()+ ",{})"), + paramsBuilder() + .script(leftScript.params()) + .script(rightScript.params()) + .variable(zoneId.getId()) + .build(), + dataType()); + } + + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + if (!super.equals(o)) { + return false; + } + DateTrunc dateTrunc = (DateTrunc) o; + return Objects.equals(zoneId, dateTrunc.zoneId); + } + + @Override + public int hashCode() { + return Objects.hash(super.hashCode(), zoneId); + } +} diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DateTruncPipe.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DateTruncPipe.java new file mode 100644 index 0000000000000..a456883f788fd --- /dev/null +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DateTruncPipe.java @@ -0,0 +1,64 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.sql.expression.function.scalar.datetime; + +import org.elasticsearch.xpack.sql.expression.Expression; +import org.elasticsearch.xpack.sql.expression.gen.pipeline.BinaryPipe; +import org.elasticsearch.xpack.sql.expression.gen.pipeline.Pipe; +import org.elasticsearch.xpack.sql.tree.NodeInfo; +import org.elasticsearch.xpack.sql.tree.Source; + +import java.time.ZoneId; +import java.util.Objects; + +public class DateTruncPipe extends BinaryPipe { + + private final ZoneId zoneId; + + public DateTruncPipe(Source source, Expression expression, Pipe left, Pipe right, ZoneId zoneId) { + super(source, expression, left, right); + this.zoneId = zoneId; + } + + ZoneId zoneId() { + return zoneId; + } + + @Override + protected NodeInfo info() { + return NodeInfo.create(this, DateTruncPipe::new, expression(), left(), right(), zoneId); + } + + @Override + protected BinaryPipe replaceChildren(Pipe left, Pipe right) { + return new DateTruncPipe(source(), expression(), left, right, zoneId); + } + + @Override + public DateTruncProcessor asProcessor() { + return new DateTruncProcessor(left().asProcessor(), right().asProcessor(), zoneId); + } + + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + if (!super.equals(o)) { + return false; + } + DateTruncPipe that = (DateTruncPipe) o; + return zoneId.equals(that.zoneId); + } + + @Override + public int hashCode() { + return Objects.hash(super.hashCode(), zoneId); + } +} diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DateTruncProcessor.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DateTruncProcessor.java new file mode 100644 index 0000000000000..ed286d4ca9eea --- /dev/null +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DateTruncProcessor.java @@ -0,0 +1,97 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.sql.expression.function.scalar.datetime; + +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.xpack.sql.SqlIllegalArgumentException; +import org.elasticsearch.xpack.sql.common.io.SqlStreamInput; +import org.elasticsearch.xpack.sql.expression.gen.processor.BinaryProcessor; +import org.elasticsearch.xpack.sql.expression.gen.processor.Processor; +import org.elasticsearch.xpack.sql.util.DateUtils; + +import java.io.IOException; +import java.time.ZoneId; +import java.time.ZonedDateTime; +import java.util.Objects; + +public class DateTruncProcessor extends BinaryProcessor { + + public static final String NAME = "dtrunc"; + + private final ZoneId zoneId; + + public DateTruncProcessor(Processor source1, Processor source2, ZoneId zoneId) { + super(source1, source2); + this.zoneId = zoneId; + } + + public DateTruncProcessor(StreamInput in) throws IOException { + super(in); + zoneId = SqlStreamInput.asSqlStream(in).zoneId(); + } + + ZoneId zoneId() { + return zoneId; + } + + @Override + protected Object doProcess(Object left, Object right) { + return process(left, right, zoneId); + } + + /** + * Used in Painless scripting + */ + public static Object process(Object source1, Object source2, String zoneId) { + return process(source1, source2, ZoneId.of(zoneId)); + } + + static Object process(Object source1, Object source2, ZoneId zoneId) { + if (source1 == null || source2 == null) { + return null; + } + if (!(source1 instanceof String)) { + throw new SqlIllegalArgumentException("A string is required; received [{}]", source1); + } + DateTrunc.DatePart truncateDateField = DateTrunc.DatePart.resolveTruncate((String) source1); + if (truncateDateField == null) { + throw new SqlIllegalArgumentException("A value of {} or their aliases is required; received [{}]", + DateTrunc.DatePart.values(), source2); + } + if (!(source2 instanceof ZonedDateTime)) { + throw new SqlIllegalArgumentException("A datetime/date is required; received [{}]", source2); + } + + return DateUtils.truncate(((ZonedDateTime) source2).withZoneSameInstant(zoneId), truncateDateField); + } + + @Override + public String getWriteableName() { + return NAME; + } + + @Override + protected void doWrite(StreamOutput out) { + } + + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + DateTruncProcessor that = (DateTruncProcessor) o; + return zoneId.equals(that.zoneId); + } + + @Override + public int hashCode() { + return Objects.hash(zoneId); + } +} diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/whitelist/InternalSqlScriptUtils.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/whitelist/InternalSqlScriptUtils.java index d39aec4423684..4c9153a2e1fd8 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/whitelist/InternalSqlScriptUtils.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/whitelist/InternalSqlScriptUtils.java @@ -10,14 +10,15 @@ import org.elasticsearch.script.JodaCompatibleZonedDateTime; import org.elasticsearch.xpack.sql.SqlIllegalArgumentException; import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.DateTimeFunction; +import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.DateTruncProcessor; import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.NamedDateTimeProcessor.NameExtractor; import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.NonIsoDateTimeProcessor.NonIsoDateTimeExtractor; import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.QuarterProcessor; +import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.TimeFunction; import org.elasticsearch.xpack.sql.expression.function.scalar.geo.GeoProcessor; import org.elasticsearch.xpack.sql.expression.function.scalar.geo.GeoShape; import org.elasticsearch.xpack.sql.expression.function.scalar.geo.StDistanceProcessor; import org.elasticsearch.xpack.sql.expression.function.scalar.geo.StWkttosqlProcessor; -import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.TimeFunction; import org.elasticsearch.xpack.sql.expression.function.scalar.math.BinaryMathProcessor.BinaryMathOperation; import org.elasticsearch.xpack.sql.expression.function.scalar.math.BinaryOptionalMathProcessor.BinaryOptionalMathOperation; import org.elasticsearch.xpack.sql.expression.function.scalar.math.MathProcessor.MathOperation; @@ -369,6 +370,10 @@ public static Integer weekOfYear(Object dateTime, String tzId) { return NonIsoDateTimeExtractor.WEEK_OF_YEAR.extract(asDateTime(dateTime), tzId); } + public static ZonedDateTime dateTrunc(String truncateTo, Object dateTime, String tzId) { + return (ZonedDateTime) DateTruncProcessor.process(truncateTo, asDateTime(dateTime) ,tzId); + } + public static ZonedDateTime asDateTime(Object dateTime) { return (ZonedDateTime) asDateTime(dateTime, false); } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/util/DateUtils.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/util/DateUtils.java index 0f8afdd155215..cfd572fc701e6 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/util/DateUtils.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/util/DateUtils.java @@ -10,6 +10,7 @@ import org.elasticsearch.common.time.DateFormatters; import org.elasticsearch.xpack.sql.expression.Expression; import org.elasticsearch.xpack.sql.expression.Foldables; +import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.DateTrunc; import org.elasticsearch.xpack.sql.parser.ParsingException; import org.elasticsearch.xpack.sql.proto.StringUtils; @@ -20,6 +21,7 @@ import java.time.ZonedDateTime; import java.time.format.DateTimeFormatter; import java.time.format.DateTimeFormatterBuilder; +import java.time.temporal.ChronoField; import static java.time.format.DateTimeFormatter.ISO_LOCAL_DATE; import static java.time.format.DateTimeFormatter.ISO_LOCAL_TIME; @@ -148,4 +150,95 @@ public static int getNanoPrecision(Expression precisionExpression, int nano) { nano = nano - nano % (int) Math.pow(10, (9 - precision)); return nano; } + + public static ZonedDateTime truncate(ZonedDateTime dateTime, DateTrunc.DatePart datePart) { + ZonedDateTime truncated = null; + switch (datePart) { + case MILLENNIUM: + int year = dateTime.getYear(); + int firstYearOfMillenium = year - (year % 1000); + truncated = dateTime + .with(ChronoField.YEAR, firstYearOfMillenium) + .with(ChronoField.MONTH_OF_YEAR, 1) + .with(ChronoField.DAY_OF_MONTH, 1) + .toLocalDate().atStartOfDay(dateTime.getZone()); + break; + case CENTURY: + year = dateTime.getYear(); + int firstYearOfCentury = year - (year % 100); + truncated = dateTime + .with(ChronoField.YEAR, firstYearOfCentury) + .with(ChronoField.MONTH_OF_YEAR, 1) + .with(ChronoField.DAY_OF_MONTH, 1) + .toLocalDate().atStartOfDay(dateTime.getZone()); + break; + case DECADE: + year = dateTime.getYear(); + int firstYearOfDecade = year - (year % 10); + truncated = dateTime + .with(ChronoField.YEAR, firstYearOfDecade) + .with(ChronoField.MONTH_OF_YEAR, 1) + .with(ChronoField.DAY_OF_MONTH, 1) + .toLocalDate().atStartOfDay(dateTime.getZone()); + break; + case YEAR: + truncated = dateTime + .with(ChronoField.MONTH_OF_YEAR, 1) + .with(ChronoField.DAY_OF_MONTH, 1) + .toLocalDate().atStartOfDay(dateTime.getZone()); + break; + case QUARTER: + int month = dateTime.getMonthValue(); + int firstMonthOfQuarter = (((month - 1) / 3) * 3) + 1; + truncated = dateTime + .with(ChronoField.MONTH_OF_YEAR, firstMonthOfQuarter) + .with(ChronoField.DAY_OF_MONTH, 1) + .toLocalDate().atStartOfDay(dateTime.getZone()); + break; + case MONTH: + truncated = dateTime + .with(ChronoField.DAY_OF_MONTH, 1) + .toLocalDate().atStartOfDay(dateTime.getZone()); + break; + case WEEK: + truncated = dateTime + .with(ChronoField.DAY_OF_WEEK, 1) + .toLocalDate().atStartOfDay(dateTime.getZone()); + break; + case DAY: + truncated = dateTime + .toLocalDate().atStartOfDay(dateTime.getZone()); + break; + case HOUR: + int hour = dateTime.getHour(); + truncated = dateTime.toLocalDate().atStartOfDay(dateTime.getZone()) + .with(ChronoField.HOUR_OF_DAY, hour); + break; + case MINUTE: + hour = dateTime.getHour(); + int minute = dateTime.getMinute(); + truncated = dateTime.toLocalDate().atStartOfDay(dateTime.getZone()) + .with(ChronoField.HOUR_OF_DAY, hour) + .with(ChronoField.MINUTE_OF_HOUR, minute); + break; + case SECOND: + truncated = dateTime + .with(ChronoField.NANO_OF_SECOND, 0); + break; + case MILLISECOND: + int micros = dateTime.get(ChronoField.MICRO_OF_SECOND); + truncated = dateTime + .with(ChronoField.MILLI_OF_SECOND, (micros / 1000)); + break; + case MICROSECOND: + int nanos = dateTime.getNano(); + truncated = dateTime + .with(ChronoField.MICRO_OF_SECOND, (nanos / 1000)); + break; + case NANOSECOND: + truncated = dateTime; + break; + } + return truncated; + } } diff --git a/x-pack/plugin/sql/src/main/resources/org/elasticsearch/xpack/sql/plugin/sql_whitelist.txt b/x-pack/plugin/sql/src/main/resources/org/elasticsearch/xpack/sql/plugin/sql_whitelist.txt index 6d24ea79f2bc2..b326aefea093f 100644 --- a/x-pack/plugin/sql/src/main/resources/org/elasticsearch/xpack/sql/plugin/sql_whitelist.txt +++ b/x-pack/plugin/sql/src/main/resources/org/elasticsearch/xpack/sql/plugin/sql_whitelist.txt @@ -115,6 +115,7 @@ class org.elasticsearch.xpack.sql.expression.function.scalar.whitelist.InternalS String monthName(Object, String) Integer quarter(Object, String) Integer weekOfYear(Object, String) + ZonedDateTime dateTrunc(String, Object, String) IntervalDayTime intervalDayTime(String, String) IntervalYearMonth intervalYearMonth(String, String) ZonedDateTime asDateTime(Object) diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/analysis/analyzer/VerifierErrorMessagesTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/analysis/analyzer/VerifierErrorMessagesTests.java index f10b1a402708f..9c34c6c4f2eef 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/analysis/analyzer/VerifierErrorMessagesTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/analysis/analyzer/VerifierErrorMessagesTests.java @@ -203,6 +203,26 @@ public void testExtractNonDateTime() { assertEquals("1:8: Invalid datetime field [ABS]. Use any datetime function.", error("SELECT EXTRACT(ABS FROM date) FROM test")); } + public void testDateTruncInvalidArgs() { + assertEquals("1:8: first argument of [DATE_TRUNC(int, date)] must be [string], found value [int] type [integer]", + error("SELECT DATE_TRUNC(int, date) FROM test")); + assertEquals("1:8: second argument of [DATE_TRUNC(keyword, keyword)] must be [date or datetime], found value [keyword] " + + "type [keyword]", error("SELECT DATE_TRUNC(keyword, keyword) FROM test")); + assertEquals("1:8: first argument of [DATE_TRUNC('invalid', keyword)] must be one of [MILLENNIUM, CENTURY, DECADE, " + "" + + "YEAR, QUARTER, MONTH, WEEK, DAY, HOUR, MINUTE, SECOND, MILLISECOND, MICROSECOND, NANOSECOND] " + + "or their aliases, found value ['invalid']", + error("SELECT DATE_TRUNC('invalid', keyword) FROM test")); + } + + public void testDateTruncValidArgs() { + accept("SELECT DATE_TRUNC('decade', date) FROM test"); + accept("SELECT DATE_TRUNC('decades', date) FROM test"); + accept("SELECT DATE_TRUNC('day', date) FROM test"); + accept("SELECT DATE_TRUNC('days', date) FROM test"); + accept("SELECT DATE_TRUNC('dd', date) FROM test"); + accept("SELECT DATE_TRUNC('d', date) FROM test"); + } + public void testValidDateTimeFunctionsOnTime() { accept("SELECT HOUR_OF_DAY(CAST(date AS TIME)) FROM test"); accept("SELECT MINUTE_OF_HOUR(CAST(date AS TIME)) FROM test"); diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/FunctionTestUtils.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/FunctionTestUtils.java index 3cc1b6d987dc6..8c0abf815c2a2 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/FunctionTestUtils.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/FunctionTestUtils.java @@ -9,6 +9,8 @@ import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.sql.expression.Literal; +import java.time.Instant; +import java.time.ZonedDateTime; import java.util.BitSet; import java.util.Iterator; @@ -27,7 +29,11 @@ public static Literal randomStringLiteral() { public static Literal randomIntLiteral() { return l(ESTestCase.randomInt()); } - + + public static Literal randomDatetimeLiteral() { + return l(ZonedDateTime.ofInstant(Instant.ofEpochMilli(ESTestCase.randomLong()), ESTestCase.randomZone())); + } + public static class Combinations implements Iterable { private int n; private int k; @@ -39,8 +45,9 @@ public Combinations(int n, int k) { @Override public Iterator iterator() { - return new Iterator() { + return new Iterator<>() { BitSet bs = new BitSet(n); + { bs.set(0, k); } @@ -55,9 +62,9 @@ public BitSet next() { BitSet old = (BitSet) bs.clone(); int b = bs.previousClearBit(n - 1); int b1 = bs.previousSetBit(b); - if (b1 == -1) + if (b1 == -1) { bs = null; - else { + } else { bs.clear(b1); bs.set(b1 + 1, b1 + (n - b) + 1); bs.clear(b1 + (n - b) + 1, n); diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DateTimeTestUtils.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DateTimeTestUtils.java index 13215eb41aebc..45bb3752123f6 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DateTimeTestUtils.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DateTimeTestUtils.java @@ -20,6 +20,10 @@ public static ZonedDateTime dateTime(int year, int month, int day, int hour, int return ZonedDateTime.of(year, month, day, hour, minute, 0, 0, DateUtils.UTC); } + public static ZonedDateTime dateTime(int year, int month, int day, int hour, int minute, int seconds, int nanos) { + return ZonedDateTime.of(year, month, day, hour, minute, seconds, nanos, DateUtils.UTC); + } + public static ZonedDateTime dateTime(long millisSinceEpoch) { return DateUtils.asDateTime(millisSinceEpoch); } diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DateTruncPipeTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DateTruncPipeTests.java new file mode 100644 index 0000000000000..3c85aa5257e51 --- /dev/null +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DateTruncPipeTests.java @@ -0,0 +1,133 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.sql.expression.function.scalar.datetime; + +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.sql.expression.Expression; +import org.elasticsearch.xpack.sql.expression.function.scalar.FunctionTestUtils; +import org.elasticsearch.xpack.sql.expression.gen.pipeline.BinaryPipe; +import org.elasticsearch.xpack.sql.expression.gen.pipeline.Pipe; +import org.elasticsearch.xpack.sql.tree.AbstractNodeTestCase; +import org.elasticsearch.xpack.sql.tree.Source; +import org.elasticsearch.xpack.sql.tree.SourceTests; + +import java.time.ZoneId; +import java.util.ArrayList; +import java.util.List; +import java.util.Objects; +import java.util.function.Function; + +import static org.elasticsearch.xpack.sql.expression.Expressions.pipe; +import static org.elasticsearch.xpack.sql.expression.function.scalar.FunctionTestUtils.randomStringLiteral; +import static org.elasticsearch.xpack.sql.tree.SourceTests.randomSource; + +public class DateTruncPipeTests extends AbstractNodeTestCase { + + @Override + protected DateTruncPipe randomInstance() { + return randomDateTruncPipe(); + } + + private Expression randomDateTruncPipeExpression() { + return randomDateTruncPipe().expression(); + } + + public static DateTruncPipe randomDateTruncPipe() { + return (DateTruncPipe) new DateTrunc( + randomSource(), + randomStringLiteral(), + randomStringLiteral(), + randomZone()) + .makePipe(); + } + + @Override + public void testTransform() { + // test transforming only the properties (source, expression), + // skipping the children (the two parameters of the binary function) which are tested separately + DateTruncPipe b1 = randomInstance(); + + Expression newExpression = randomValueOtherThan(b1.expression(), this::randomDateTruncPipeExpression); + DateTruncPipe newB = new DateTruncPipe( + b1.source(), + newExpression, + b1.left(), + b1.right(), + b1.zoneId()); + assertEquals(newB, b1.transformPropertiesOnly(v -> Objects.equals(v, b1.expression()) ? newExpression : v, Expression.class)); + + DateTruncPipe b2 = randomInstance(); + Source newLoc = randomValueOtherThan(b2.source(), SourceTests::randomSource); + newB = new DateTruncPipe( + newLoc, + b2.expression(), + b2.left(), + b2.right(), + b2.zoneId()); + assertEquals(newB, + b2.transformPropertiesOnly(v -> Objects.equals(v, b2.source()) ? newLoc : v, Source.class)); + } + + @Override + public void testReplaceChildren() { + DateTruncPipe b = randomInstance(); + Pipe newLeft = pipe(((Expression) randomValueOtherThan(b.left(), FunctionTestUtils::randomStringLiteral))); + Pipe newRight = pipe(((Expression) randomValueOtherThan(b.right(), FunctionTestUtils::randomDatetimeLiteral))); + ZoneId newZoneId = randomValueOtherThan(b.zoneId(), ESTestCase::randomZone); + DateTruncPipe newB = + new DateTruncPipe(b.source(), b.expression(), b.left(), b.right(), newZoneId); + BinaryPipe transformed = newB.replaceChildren(newLeft, b.right()); + + assertEquals(transformed.left(), newLeft); + assertEquals(transformed.source(), b.source()); + assertEquals(transformed.expression(), b.expression()); + assertEquals(transformed.right(), b.right()); + + transformed = newB.replaceChildren(b.left(), newRight); + assertEquals(transformed.left(), b.left()); + assertEquals(transformed.source(), b.source()); + assertEquals(transformed.expression(), b.expression()); + assertEquals(transformed.right(), newRight); + + transformed = newB.replaceChildren(newLeft, newRight); + assertEquals(transformed.left(), newLeft); + assertEquals(transformed.source(), b.source()); + assertEquals(transformed.expression(), b.expression()); + assertEquals(transformed.right(), newRight); + } + + @Override + protected DateTruncPipe mutate(DateTruncPipe instance) { + List> randoms = new ArrayList<>(); + randoms.add(f -> new DateTruncPipe(f.source(), + f.expression(), + pipe(((Expression) randomValueOtherThan(f.left(), FunctionTestUtils::randomStringLiteral))), + f.right(), + randomValueOtherThan(f.zoneId(), ESTestCase::randomZone))); + randoms.add(f -> new DateTruncPipe(f.source(), + f.expression(), + f.left(), + pipe(((Expression) randomValueOtherThan(f.right(), FunctionTestUtils::randomDatetimeLiteral))), + randomValueOtherThan(f.zoneId(), ESTestCase::randomZone))); + randoms.add(f -> new DateTruncPipe(f.source(), + f.expression(), + pipe(((Expression) randomValueOtherThan(f.left(), FunctionTestUtils::randomStringLiteral))), + pipe(((Expression) randomValueOtherThan(f.right(), FunctionTestUtils::randomDatetimeLiteral))), + randomValueOtherThan(f.zoneId(), ESTestCase::randomZone))); + + return randomFrom(randoms).apply(instance); + } + + @Override + protected DateTruncPipe copy(DateTruncPipe instance) { + return new DateTruncPipe(instance.source(), + instance.expression(), + instance.left(), + instance.right(), + instance.zoneId()); + } +} diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DateTruncProcessorTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DateTruncProcessorTests.java new file mode 100644 index 0000000000000..7447cd781c292 --- /dev/null +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DateTruncProcessorTests.java @@ -0,0 +1,156 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.sql.expression.function.scalar.datetime; + +import org.elasticsearch.common.io.stream.Writeable.Reader; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.sql.AbstractSqlWireSerializingTestCase; +import org.elasticsearch.xpack.sql.SqlIllegalArgumentException; +import org.elasticsearch.xpack.sql.expression.Literal; +import org.elasticsearch.xpack.sql.expression.gen.processor.ConstantProcessor; +import org.elasticsearch.xpack.sql.tree.Source; +import org.elasticsearch.xpack.sql.util.DateUtils; + +import java.time.ZoneId; +import java.time.ZonedDateTime; + +import static org.elasticsearch.xpack.sql.expression.Literal.NULL; +import static org.elasticsearch.xpack.sql.expression.function.scalar.FunctionTestUtils.l; +import static org.elasticsearch.xpack.sql.expression.function.scalar.FunctionTestUtils.randomDatetimeLiteral; +import static org.elasticsearch.xpack.sql.expression.function.scalar.datetime.DateTimeTestUtils.dateTime; +import static org.elasticsearch.xpack.sql.proto.StringUtils.ISO_DATE_WITH_NANOS; +import static org.hamcrest.Matchers.startsWith; + +public class DateTruncProcessorTests extends AbstractSqlWireSerializingTestCase { + + public static DateTruncProcessor randomDateTruncProcessor() { + return new DateTruncProcessor( + new ConstantProcessor(randomRealisticUnicodeOfLengthBetween(0, 128)), + new ConstantProcessor(ZonedDateTime.now()), + randomZone()); + } + + @Override + protected DateTruncProcessor createTestInstance() { + return randomDateTruncProcessor(); + } + + @Override + protected Reader instanceReader() { + return DateTruncProcessor::new; + } + + @Override + protected ZoneId instanceZoneId(DateTruncProcessor instance) { + return instance.zoneId(); + } + + @Override + protected DateTruncProcessor mutateInstance(DateTruncProcessor instance) { + return new DateTruncProcessor( + new ConstantProcessor(ESTestCase.randomRealisticUnicodeOfLength(128)), + new ConstantProcessor(ZonedDateTime.now()), + randomValueOtherThan(instance.zoneId(), ESTestCase::randomZone)); + } + + public void testInvalidInputs() { + SqlIllegalArgumentException siae = expectThrows(SqlIllegalArgumentException.class, + () -> new DateTrunc(Source.EMPTY, l(5), randomDatetimeLiteral(), randomZone()).makePipe().asProcessor().process(null)); + assertEquals("A string is required; received [5]", siae.getMessage()); + + siae = expectThrows(SqlIllegalArgumentException.class, + () -> new DateTrunc(Source.EMPTY, l("days"), l("foo"), randomZone()).makePipe().asProcessor().process(null)); + assertEquals("A datetime/date is required; received [foo]", siae.getMessage()); + + siae = expectThrows(SqlIllegalArgumentException.class, + () -> new DateTrunc(Source.EMPTY, l("invalid"), randomDatetimeLiteral(), randomZone()).makePipe().asProcessor().process(null)); + assertThat(siae.getMessage(), startsWith("A value of [MILLENNIUM, CENTURY, DECADE, YEAR, " + + "QUARTER, MONTH, WEEK, DAY, HOUR, MINUTE, " + + "SECOND, MILLISECOND, MICROSECOND, NANOSECOND] or their aliases is required; received [")); + } + + public void testWithNulls() { + assertNull(new DateTrunc(Source.EMPTY, NULL, randomDatetimeLiteral(), randomZone()).makePipe().asProcessor().process(null)); + assertNull(new DateTrunc(Source.EMPTY, l("days"), NULL, randomZone()).makePipe().asProcessor().process(null)); + assertNull(new DateTrunc(Source.EMPTY, NULL, NULL, randomZone()).makePipe().asProcessor().process(null)); + } + + public void testTruncation() { + ZoneId zoneId = ZoneId.of("Etc/GMT-10"); + Literal dateTime = l(dateTime(2019, 9, 3, 18, 10, 37, 123456789)); + + assertEquals("2000-01-01T00:00:00.000+10:00", + DateUtils.toString((ZonedDateTime) new DateTrunc(Source.EMPTY, l("millennia"), dateTime, zoneId) + .makePipe().asProcessor().process(null))); + assertEquals("2000-01-01T00:00:00.000+10:00", + DateUtils.toString((ZonedDateTime) new DateTrunc(Source.EMPTY, l("CENTURY"), dateTime, zoneId) + .makePipe().asProcessor().process(null))); + assertEquals("2010-01-01T00:00:00.000+10:00", + DateUtils.toString((ZonedDateTime) new DateTrunc(Source.EMPTY, l("decades"), dateTime, zoneId) + .makePipe().asProcessor().process(null))); + assertEquals("2019-01-01T00:00:00.000+10:00", + DateUtils.toString((ZonedDateTime) new DateTrunc(Source.EMPTY, l("years"), dateTime, zoneId) + .makePipe().asProcessor().process(null))); + assertEquals("2019-07-01T00:00:00.000+10:00", + toString((ZonedDateTime) new DateTrunc(Source.EMPTY, l("quarters"), dateTime, zoneId) + .makePipe().asProcessor().process(null))); + assertEquals("2019-09-01T00:00:00.000+10:00", + toString((ZonedDateTime) new DateTrunc(Source.EMPTY, l("month"), dateTime, zoneId) + .makePipe().asProcessor().process(null))); + assertEquals("2019-09-02T00:00:00.000+10:00", + toString((ZonedDateTime) new DateTrunc(Source.EMPTY, l("weeks"), dateTime, zoneId) + .makePipe().asProcessor().process(null))); + assertEquals("2019-09-04T00:00:00.000+10:00", + toString((ZonedDateTime) new DateTrunc(Source.EMPTY, l("days"), dateTime, zoneId) + .makePipe().asProcessor().process(null))); + assertEquals("2019-09-04T04:00:00.000+10:00", + toString((ZonedDateTime) new DateTrunc(Source.EMPTY, l("hh"), dateTime, zoneId) + .makePipe().asProcessor().process(null))); + assertEquals("2019-09-04T04:10:00.000+10:00", + toString((ZonedDateTime) new DateTrunc(Source.EMPTY, l("mi"), dateTime, zoneId) + .makePipe().asProcessor().process(null))); + assertEquals("2019-09-04T04:10:37.000+10:00", + toString((ZonedDateTime) new DateTrunc(Source.EMPTY, l("second"), dateTime, zoneId) + .makePipe().asProcessor().process(null))); + assertEquals("2019-09-04T04:10:37.123+10:00", + toString((ZonedDateTime) new DateTrunc(Source.EMPTY, l("ms"), dateTime, zoneId) + .makePipe().asProcessor().process(null))); + assertEquals("2019-09-04T04:10:37.123456+10:00", + toString((ZonedDateTime) new DateTrunc(Source.EMPTY, l("mcs"), dateTime, zoneId) + .makePipe().asProcessor().process(null))); + assertEquals("2019-09-04T04:10:37.123456789+10:00", + toString((ZonedDateTime) new DateTrunc(Source.EMPTY, l("nanoseconds"), dateTime, zoneId) + .makePipe().asProcessor().process(null))); + } + + public void testTruncationEdgeCases() { + ZoneId zoneId = ZoneId.of("Etc/GMT-10"); + Literal dateTime = l(dateTime(-11412, 9, 3, 18, 10, 37, 123456789)); + assertEquals("-11000-01-01T00:00:00.000+10:00", + DateUtils.toString((ZonedDateTime) new DateTrunc(Source.EMPTY, l("millennia"), dateTime, zoneId) + .makePipe().asProcessor().process(null))); + + dateTime = l(dateTime(-12999, 9, 3, 18, 10, 37, 123456789)); + assertEquals("-12900-01-01T00:00:00.000+10:00", + DateUtils.toString((ZonedDateTime) new DateTrunc(Source.EMPTY, l("centuries"), dateTime, zoneId) + .makePipe().asProcessor().process(null))); + + dateTime = l(dateTime(-32999, 9, 3, 18, 10, 37, 123456789)); + assertEquals("-32990-01-01T00:00:00.000+10:00", + DateUtils.toString((ZonedDateTime) new DateTrunc(Source.EMPTY, l("decades"), dateTime, zoneId) + .makePipe().asProcessor().process(null))); + + dateTime = l(dateTime(-1234, 9, 3, 18, 10, 37, 123456789)); + assertEquals("-1234-08-29T00:00:00.000+10:00", + DateUtils.toString((ZonedDateTime) new DateTrunc(Source.EMPTY, l("week"), dateTime, zoneId) + .makePipe().asProcessor().process(null))); + } + + private String toString(ZonedDateTime dateTime) { + return ISO_DATE_WITH_NANOS.format(dateTime); + } +} diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/planner/QueryTranslatorTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/planner/QueryTranslatorTests.java index 80d9202d5bfed..db0406dc00146 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/planner/QueryTranslatorTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/planner/QueryTranslatorTests.java @@ -293,6 +293,22 @@ private void testDateRangeWithCurrentFunctions(String function, String pattern, assertEquals(pattern, rq.format()); } + public void testTranslateDateTrunc_WhereClause_Painless() { + LogicalPlan p = plan("SELECT int FROM test WHERE DATE_TRUNC('month', date) > '2018-09-04'::date"); + assertTrue(p instanceof Project); + assertTrue(p.children().get(0) instanceof Filter); + Expression condition = ((Filter) p.children().get(0)).condition(); + assertFalse(condition.foldable()); + QueryTranslation translation = QueryTranslator.toQuery(condition, false); + assertNull(translation.aggFilter); + assertTrue(translation.query instanceof ScriptQuery); + ScriptQuery sc = (ScriptQuery) translation.query; + assertEquals("InternalSqlScriptUtils.nullSafeFilter(InternalSqlScriptUtils.gt(InternalSqlScriptUtils.dateTrunc(" + + "params.v0,InternalSqlScriptUtils.docValue(doc,params.v1),params.v2),InternalSqlScriptUtils.asDateTime(params.v3)))", + sc.script().toString()); + assertEquals("[{v=month}, {v=date}, {v=Z}, {v=2018-09-04T00:00:00.000Z}]", sc.script().params().toString()); + } + public void testLikeOnInexact() { LogicalPlan p = plan("SELECT * FROM test WHERE some.string LIKE '%a%'"); assertTrue(p instanceof Project); From 4b119dc99bc374e40186f043a26f90434e866d13 Mon Sep 17 00:00:00 2001 From: Marios Trivyzas Date: Mon, 9 Sep 2019 00:48:04 +0300 Subject: [PATCH 02/11] Fix issue with assertion of DATE types --- .../xpack/sql/qa/jdbc/JdbcAssert.java | 15 ++++++++++---- .../xpack/sql/qa/jdbc/JdbcTestUtils.java | 5 +++++ .../qa/src/main/resources/datetime.csv-spec | 18 ++++++++--------- .../qa/src/main/resources/docs/docs.csv-spec | 20 +++++++++---------- 4 files changed, 35 insertions(+), 23 deletions(-) diff --git a/x-pack/plugin/sql/qa/src/main/java/org/elasticsearch/xpack/sql/qa/jdbc/JdbcAssert.java b/x-pack/plugin/sql/qa/src/main/java/org/elasticsearch/xpack/sql/qa/jdbc/JdbcAssert.java index ec9386d2d6e12..37ca10c9988d0 100644 --- a/x-pack/plugin/sql/qa/src/main/java/org/elasticsearch/xpack/sql/qa/jdbc/JdbcAssert.java +++ b/x-pack/plugin/sql/qa/src/main/java/org/elasticsearch/xpack/sql/qa/jdbc/JdbcAssert.java @@ -37,6 +37,9 @@ import static java.sql.Types.REAL; import static java.sql.Types.SMALLINT; import static java.sql.Types.TINYINT; +import static org.elasticsearch.xpack.sql.qa.jdbc.JdbcTestUtils.convertDateToSystemTimezone; +import static org.elasticsearch.xpack.sql.qa.jdbc.JdbcTestUtils.logResultSetMetadata; +import static org.elasticsearch.xpack.sql.qa.jdbc.JdbcTestUtils.resultSetCurrentData; import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.instanceOf; import static org.junit.Assert.assertEquals; @@ -107,7 +110,7 @@ public static void assertResultSetMetadata(ResultSet expected, ResultSet actual, ResultSetMetaData actualMeta = actual.getMetaData(); if (logger != null) { - JdbcTestUtils.logResultSetMetadata(actual, logger); + logResultSetMetadata(actual, logger); } if (expectedMeta.getColumnCount() != actualMeta.getColumnCount()) { @@ -210,7 +213,7 @@ private static void doAssertResultSetData(ResultSet expected, ResultSet actual, assertTrue("Expected more data but no more entries found after [" + count + "]", actual.next()); if (logger != null) { - logger.info(JdbcTestUtils.resultSetCurrentData(actual)); + logger.info(resultSetCurrentData(actual)); } for (int column = 1; column <= columns; column++) { @@ -264,6 +267,10 @@ private static void doAssertResultSetData(ResultSet expected, ResultSet actual, else if (type == Types.TIMESTAMP || type == Types.TIMESTAMP_WITH_TIMEZONE) { assertEquals(msg, expected.getTimestamp(column), actual.getTimestamp(column)); } + // then date + else if (type == Types.DATE) { + assertEquals(msg, convertDateToSystemTimezone(expected.getDate(column)), actual.getDate(column)); + } // and floats/doubles else if (type == Types.DOUBLE) { assertEquals(msg, (double) expectedObject, (double) actualObject, lenientFloatingNumbers ? 1d : 0.0d); @@ -301,14 +308,14 @@ else if (type == Types.VARCHAR && actualObject instanceof TemporalAmount) { } catch (AssertionError ae) { if (logger != null && actual.next()) { logger.info("^^^ Assertion failure ^^^"); - logger.info(JdbcTestUtils.resultSetCurrentData(actual)); + logger.info(resultSetCurrentData(actual)); } throw ae; } if (actual.next()) { fail("Elasticsearch [" + actual + "] still has data after [" + count + "] entries:\n" - + JdbcTestUtils.resultSetCurrentData(actual)); + + resultSetCurrentData(actual)); } } diff --git a/x-pack/plugin/sql/qa/src/main/java/org/elasticsearch/xpack/sql/qa/jdbc/JdbcTestUtils.java b/x-pack/plugin/sql/qa/src/main/java/org/elasticsearch/xpack/sql/qa/jdbc/JdbcTestUtils.java index 123f22073ae57..d625192e63fce 100644 --- a/x-pack/plugin/sql/qa/src/main/java/org/elasticsearch/xpack/sql/qa/jdbc/JdbcTestUtils.java +++ b/x-pack/plugin/sql/qa/src/main/java/org/elasticsearch/xpack/sql/qa/jdbc/JdbcTestUtils.java @@ -38,6 +38,7 @@ import java.util.jar.JarInputStream; import java.util.zip.ZipEntry; +import static java.time.ZoneOffset.UTC; import static org.elasticsearch.xpack.sql.action.BasicFormatter.FormatOption.CLI; final class JdbcTestUtils { @@ -240,4 +241,8 @@ static Time asTime(long millis, ZoneId zoneId) { return new Time(ZonedDateTime.ofInstant(Instant.ofEpochMilli(millis), zoneId) .toLocalTime().atDate(JdbcTestUtils.EPOCH).atZone(zoneId).toInstant().toEpochMilli()); } + + static Date convertDateToSystemTimezone(Date date) { + return new Date(date.toLocalDate().atStartOfDay(UTC).toInstant().toEpochMilli()); + } } diff --git a/x-pack/plugin/sql/qa/src/main/resources/datetime.csv-spec b/x-pack/plugin/sql/qa/src/main/resources/datetime.csv-spec index b6d5905df21ed..217994fea9c77 100644 --- a/x-pack/plugin/sql/qa/src/main/resources/datetime.csv-spec +++ b/x-pack/plugin/sql/qa/src/main/resources/datetime.csv-spec @@ -133,15 +133,15 @@ DATE_TRUNC('mcs', '2019-09-04T11:22:33.123Z'::datetime)::string as dt_micro, DAT ; selectDateTruncWithDate -schema::dt_mil:s|dt_cent:s|dt_dec:s|dt_year:s|dt_quarter:s|dt_month:s|dt_week:s|dt_day:s -SELECT DATE_TRUNC('millennia', '2019-09-04'::date)::string as dt_mil, DATE_TRUNC('century', '2019-09-04'::date)::string as dt_cent, -DATE_TRUNC('decades', '2019-09-04'::date)::string as dt_dec, DATE_TRUNC('year', '2019-09-04'::date)::string as dt_year, -DATE_TRUNC('quarter', '2019-09-04'::date)::string as dt_quarter, DATE_TRUNC('month', '2019-09-04'::date)::string as dt_month, -DATE_TRUNC('week', '2019-09-04'::date)::string as dt_week, DATE_TRUNC('day', '2019-09-04'::date)::string as dt_day; - - dt_mil | dt_cent | dt_dec | dt_year | dt_quarter | dt_month | dt_week | dt_day --------------------------+--------------------------+--------------------------+--------------------------+--------------------------+--------------------------+--------------------------+------------------------- -2000-01-01T00:00:00.000Z | 2000-01-01T00:00:00.000Z | 2010-01-01T00:00:00.000Z | 2019-01-01T00:00:00.000Z | 2019-07-01T00:00:00.000Z | 2019-09-01T00:00:00.000Z | 2019-09-02T00:00:00.000Z | 2019-09-04T00:00:00.000Z +schema::dt_mil:date|dt_cent:date|dt_dec:date|dt_year:date|dt_quarter:date|dt_month:date|dt_week:date|dt_day:date +SELECT DATE_TRUNC('millennia', '2019-09-04'::date) as dt_mil, DATE_TRUNC('century', '2019-09-04'::date) as dt_cent, +DATE_TRUNC('decades', '2019-09-04'::date) as dt_dec, DATE_TRUNC('year', '2019-09-04'::date) as dt_year, +DATE_TRUNC('quarter', '2019-09-04'::date) as dt_quarter, DATE_TRUNC('month', '2019-09-04'::date) as dt_month, +DATE_TRUNC('week', '2019-09-04'::date) as dt_week, DATE_TRUNC('day', '2019-09-04'::date) as dt_day; + + dt_mil | dt_cent | dt_dec | dt_year | dt_quarter | dt_month | dt_week | dt_day +-----------+------------+------------+------------+-------------+------------+------------+----------- +2000-01-01 | 2000-01-01 | 2010-01-01 | 2019-01-01 | 2019-07-01 | 2019-09-01 | 2019-09-02 | 2019-09-04 ; selectDateTruncWithField diff --git a/x-pack/plugin/sql/qa/src/main/resources/docs/docs.csv-spec b/x-pack/plugin/sql/qa/src/main/resources/docs/docs.csv-spec index 36f55bd8f6169..024cad379ff3a 100644 --- a/x-pack/plugin/sql/qa/src/main/resources/docs/docs.csv-spec +++ b/x-pack/plugin/sql/qa/src/main/resources/docs/docs.csv-spec @@ -2444,24 +2444,24 @@ SELECT DATE_TRUNC('mi', '2019-09-04T11:22:33.123Z'::datetime) AS mins; ; truncateDateDecades -schema::decades:s +schema::decades:date // tag::truncateDateDecades -SELECT DATE_TRUNC('decade', CAST('2019-09-04' AS DATE))::string AS decades; +SELECT DATE_TRUNC('decade', CAST('2019-09-04' AS DATE)) AS decades; - decades ------------------------- -2010-01-01T00:00:00.000Z + decades +---------- +2010-01-01 // end::truncateDateDecades ; truncateDateQuarter -schema::quarter:s +schema::quarter:date // tag::truncateDateQuarter -SELECT DATE_TRUNC('quarters', CAST('2019-09-04' AS DATE))::string AS quarter; +SELECT DATE_TRUNC('quarters', CAST('2019-09-04' AS DATE)) AS quarter; - quarter ------------------------- -2019-07-01T00:00:00.000Z + quarter +---------- +2019-07-01 // end::truncateDateQuarter ; From 3c10b9c709cd58f85aeee9c19ebbd3870e45f1ab Mon Sep 17 00:00:00 2001 From: Marios Trivyzas Date: Mon, 9 Sep 2019 01:31:04 +0300 Subject: [PATCH 03/11] Added comment --- .../java/org/elasticsearch/xpack/sql/qa/jdbc/JdbcTestUtils.java | 1 + 1 file changed, 1 insertion(+) diff --git a/x-pack/plugin/sql/qa/src/main/java/org/elasticsearch/xpack/sql/qa/jdbc/JdbcTestUtils.java b/x-pack/plugin/sql/qa/src/main/java/org/elasticsearch/xpack/sql/qa/jdbc/JdbcTestUtils.java index d625192e63fce..6b3457e9362a1 100644 --- a/x-pack/plugin/sql/qa/src/main/java/org/elasticsearch/xpack/sql/qa/jdbc/JdbcTestUtils.java +++ b/x-pack/plugin/sql/qa/src/main/java/org/elasticsearch/xpack/sql/qa/jdbc/JdbcTestUtils.java @@ -242,6 +242,7 @@ static Time asTime(long millis, ZoneId zoneId) { .toLocalTime().atDate(JdbcTestUtils.EPOCH).atZone(zoneId).toInstant().toEpochMilli()); } + // Used to convert the DATE read from CSV file to a java.sql.Date at the System's timezone (-Dtests.timezone=XXXX) static Date convertDateToSystemTimezone(Date date) { return new Date(date.toLocalDate().atStartOfDay(UTC).toInstant().toEpochMilli()); } From edf42ce75fadd3f99270804dbe47911e88d1e719 Mon Sep 17 00:00:00 2001 From: Marios Trivyzas Date: Mon, 9 Sep 2019 23:09:41 +0300 Subject: [PATCH 04/11] Address comments --- .../xpack/sql/qa/jdbc/JdbcAssert.java | 8 +- .../xpack/sql/qa/jdbc/JdbcTestUtils.java | 6 - .../function/scalar/datetime/DateTrunc.java | 142 ++++++++++++++++-- .../scalar/datetime/DateTruncProcessor.java | 37 +++-- .../xpack/sql/util/DateUtils.java | 93 +----------- .../analyzer/VerifierErrorMessagesTests.java | 6 + .../datetime/DateTruncProcessorTests.java | 12 +- 7 files changed, 177 insertions(+), 127 deletions(-) diff --git a/x-pack/plugin/sql/qa/src/main/java/org/elasticsearch/xpack/sql/qa/jdbc/JdbcAssert.java b/x-pack/plugin/sql/qa/src/main/java/org/elasticsearch/xpack/sql/qa/jdbc/JdbcAssert.java index 37ca10c9988d0..2bed132df4cf4 100644 --- a/x-pack/plugin/sql/qa/src/main/java/org/elasticsearch/xpack/sql/qa/jdbc/JdbcAssert.java +++ b/x-pack/plugin/sql/qa/src/main/java/org/elasticsearch/xpack/sql/qa/jdbc/JdbcAssert.java @@ -17,6 +17,7 @@ import org.relique.jdbc.csv.CsvResultSet; import java.io.IOException; +import java.sql.Date; import java.sql.ResultSet; import java.sql.ResultSetMetaData; import java.sql.SQLException; @@ -37,7 +38,7 @@ import static java.sql.Types.REAL; import static java.sql.Types.SMALLINT; import static java.sql.Types.TINYINT; -import static org.elasticsearch.xpack.sql.qa.jdbc.JdbcTestUtils.convertDateToSystemTimezone; +import static java.time.ZoneOffset.UTC; import static org.elasticsearch.xpack.sql.qa.jdbc.JdbcTestUtils.logResultSetMetadata; import static org.elasticsearch.xpack.sql.qa.jdbc.JdbcTestUtils.resultSetCurrentData; import static org.hamcrest.MatcherAssert.assertThat; @@ -335,4 +336,9 @@ private static int typeOf(int columnType, boolean lenientDataType) { return columnType; } + + // Used to convert the DATE read from CSV file to a java.sql.Date at the System's timezone (-Dtests.timezone=XXXX) + private static Date convertDateToSystemTimezone(Date date) { + return new Date(date.toLocalDate().atStartOfDay(UTC).toInstant().toEpochMilli()); + } } diff --git a/x-pack/plugin/sql/qa/src/main/java/org/elasticsearch/xpack/sql/qa/jdbc/JdbcTestUtils.java b/x-pack/plugin/sql/qa/src/main/java/org/elasticsearch/xpack/sql/qa/jdbc/JdbcTestUtils.java index 6b3457e9362a1..123f22073ae57 100644 --- a/x-pack/plugin/sql/qa/src/main/java/org/elasticsearch/xpack/sql/qa/jdbc/JdbcTestUtils.java +++ b/x-pack/plugin/sql/qa/src/main/java/org/elasticsearch/xpack/sql/qa/jdbc/JdbcTestUtils.java @@ -38,7 +38,6 @@ import java.util.jar.JarInputStream; import java.util.zip.ZipEntry; -import static java.time.ZoneOffset.UTC; import static org.elasticsearch.xpack.sql.action.BasicFormatter.FormatOption.CLI; final class JdbcTestUtils { @@ -241,9 +240,4 @@ static Time asTime(long millis, ZoneId zoneId) { return new Time(ZonedDateTime.ofInstant(Instant.ofEpochMilli(millis), zoneId) .toLocalTime().atDate(JdbcTestUtils.EPOCH).atZone(zoneId).toInstant().toEpochMilli()); } - - // Used to convert the DATE read from CSV file to a java.sql.Date at the System's timezone (-Dtests.timezone=XXXX) - static Date convertDateToSystemTimezone(Date date) { - return new Date(date.toLocalDate().atStartOfDay(UTC).toInstant().toEpochMilli()); - } } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DateTrunc.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DateTrunc.java index 3c1c0927f61ff..0292f68afe26e 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DateTrunc.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DateTrunc.java @@ -5,6 +5,7 @@ */ package org.elasticsearch.xpack.sql.expression.function.scalar.datetime; +import org.elasticsearch.common.time.IsoLocale; import org.elasticsearch.xpack.sql.expression.Expression; import org.elasticsearch.xpack.sql.expression.Expressions; import org.elasticsearch.xpack.sql.expression.Nullability; @@ -14,8 +15,13 @@ import org.elasticsearch.xpack.sql.tree.NodeInfo; import org.elasticsearch.xpack.sql.tree.Source; import org.elasticsearch.xpack.sql.type.DataType; +import org.elasticsearch.xpack.sql.util.StringUtils; import java.time.ZoneId; +import java.time.ZonedDateTime; +import java.time.temporal.ChronoField; +import java.util.HashSet; +import java.util.List; import java.util.Locale; import java.util.Objects; import java.util.Set; @@ -45,6 +51,16 @@ public enum DatePart { MICROSECOND("microseconds", "mcs"), NANOSECOND("nanoseconds", "ns"); + private static Set ALL_DATE_PARTS; + + static { + ALL_DATE_PARTS = new HashSet<>(); + for (DatePart datePart : DatePart.values()) { + ALL_DATE_PARTS.add(datePart.name().toLowerCase(IsoLocale.ROOT)); + ALL_DATE_PARTS.addAll(datePart.aliases()); + } + } + private Set aliases; DatePart(String... aliases) { @@ -64,6 +80,101 @@ public static DatePart resolveTruncate(String truncateTo) { } return null; } + + public static List findSimilar(String match) { + return StringUtils.findSimilar(match, ALL_DATE_PARTS); + } + + public static ZonedDateTime truncate(ZonedDateTime dateTime, DateTrunc.DatePart datePart) { + ZonedDateTime truncated = null; + switch (datePart) { + case MILLENNIUM: + int year = dateTime.getYear(); + int firstYearOfMillenium = year - (year % 1000); + truncated = dateTime + .with(ChronoField.YEAR, firstYearOfMillenium) + .with(ChronoField.MONTH_OF_YEAR, 1) + .with(ChronoField.DAY_OF_MONTH, 1) + .toLocalDate().atStartOfDay(dateTime.getZone()); + break; + case CENTURY: + year = dateTime.getYear(); + int firstYearOfCentury = year - (year % 100); + truncated = dateTime + .with(ChronoField.YEAR, firstYearOfCentury) + .with(ChronoField.MONTH_OF_YEAR, 1) + .with(ChronoField.DAY_OF_MONTH, 1) + .toLocalDate().atStartOfDay(dateTime.getZone()); + break; + case DECADE: + year = dateTime.getYear(); + int firstYearOfDecade = year - (year % 10); + truncated = dateTime + .with(ChronoField.YEAR, firstYearOfDecade) + .with(ChronoField.MONTH_OF_YEAR, 1) + .with(ChronoField.DAY_OF_MONTH, 1) + .toLocalDate().atStartOfDay(dateTime.getZone()); + break; + case YEAR: + truncated = dateTime + .with(ChronoField.MONTH_OF_YEAR, 1) + .with(ChronoField.DAY_OF_MONTH, 1) + .toLocalDate().atStartOfDay(dateTime.getZone()); + break; + case QUARTER: + int month = dateTime.getMonthValue(); + int firstMonthOfQuarter = (((month - 1) / 3) * 3) + 1; + truncated = dateTime + .with(ChronoField.MONTH_OF_YEAR, firstMonthOfQuarter) + .with(ChronoField.DAY_OF_MONTH, 1) + .toLocalDate().atStartOfDay(dateTime.getZone()); + break; + case MONTH: + truncated = dateTime + .with(ChronoField.DAY_OF_MONTH, 1) + .toLocalDate().atStartOfDay(dateTime.getZone()); + break; + case WEEK: + truncated = dateTime + .with(ChronoField.DAY_OF_WEEK, 1) + .toLocalDate().atStartOfDay(dateTime.getZone()); + break; + case DAY: + truncated = dateTime + .toLocalDate().atStartOfDay(dateTime.getZone()); + break; + case HOUR: + int hour = dateTime.getHour(); + truncated = dateTime.toLocalDate().atStartOfDay(dateTime.getZone()) + .with(ChronoField.HOUR_OF_DAY, hour); + break; + case MINUTE: + hour = dateTime.getHour(); + int minute = dateTime.getMinute(); + truncated = dateTime.toLocalDate().atStartOfDay(dateTime.getZone()) + .with(ChronoField.HOUR_OF_DAY, hour) + .with(ChronoField.MINUTE_OF_HOUR, minute); + break; + case SECOND: + truncated = dateTime + .with(ChronoField.NANO_OF_SECOND, 0); + break; + case MILLISECOND: + int micros = dateTime.get(ChronoField.MICRO_OF_SECOND); + truncated = dateTime + .with(ChronoField.MILLI_OF_SECOND, (micros / 1000)); + break; + case MICROSECOND: + int nanos = dateTime.getNano(); + truncated = dateTime + .with(ChronoField.MICRO_OF_SECOND, (nanos / 1000)); + break; + case NANOSECOND: + truncated = dateTime; + break; + } + return truncated; + } } @@ -86,11 +197,22 @@ protected TypeResolution resolveType() { return resolution; } - if (left().foldable() && DatePart.resolveTruncate((String) left().fold()) == null) { - return new TypeResolution(format(null, "first argument of [{}] must be one of {} or their aliases, found value [{}]", - sourceText(), - DatePart.values(), - Expressions.name(left()))); + if (left().foldable()) { + String truncateToValue = (String) left().fold(); + if (DatePart.resolveTruncate(truncateToValue) == null) { + List similar = DatePart.findSimilar(truncateToValue); + if (similar.isEmpty()) { + return new TypeResolution(format(null, "first argument of [{}] must be one of {} or their aliases, found value [{}]", + sourceText(), + DatePart.values(), + Expressions.name(left()))); + } else { + return new TypeResolution(format(null, "Unknown value [{}] for first argument of [{}]; did you mean {}?", + Expressions.name(left()), + sourceText(), + similar)); + } + } } resolution = isDate(right(), sourceText(), Expressions.ParamOrdinal.SECOND); if (resolution.unresolved()) { @@ -136,6 +258,11 @@ protected ScriptTemplate asScriptFrom(ScriptTemplate leftScript, ScriptTemplate dataType()); } + @Override + public int hashCode() { + return Objects.hash(super.hashCode(), zoneId); + } + @Override public boolean equals(Object o) { if (this == o) { @@ -150,9 +277,4 @@ public boolean equals(Object o) { DateTrunc dateTrunc = (DateTrunc) o; return Objects.equals(zoneId, dateTrunc.zoneId); } - - @Override - public int hashCode() { - return Objects.hash(super.hashCode(), zoneId); - } } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DateTruncProcessor.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DateTruncProcessor.java index ed286d4ca9eea..733b6606a0db4 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DateTruncProcessor.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DateTruncProcessor.java @@ -11,13 +11,15 @@ import org.elasticsearch.xpack.sql.common.io.SqlStreamInput; import org.elasticsearch.xpack.sql.expression.gen.processor.BinaryProcessor; import org.elasticsearch.xpack.sql.expression.gen.processor.Processor; -import org.elasticsearch.xpack.sql.util.DateUtils; import java.io.IOException; import java.time.ZoneId; import java.time.ZonedDateTime; +import java.util.List; import java.util.Objects; +import static org.elasticsearch.xpack.sql.expression.function.scalar.datetime.DateTrunc.DatePart; + public class DateTruncProcessor extends BinaryProcessor { public static final String NAME = "dtrunc"; @@ -34,6 +36,15 @@ public DateTruncProcessor(StreamInput in) throws IOException { zoneId = SqlStreamInput.asSqlStream(in).zoneId(); } + @Override + public String getWriteableName() { + return NAME; + } + + @Override + protected void doWrite(StreamOutput out) { + } + ZoneId zoneId() { return zoneId; } @@ -57,25 +68,23 @@ static Object process(Object source1, Object source2, ZoneId zoneId) { if (!(source1 instanceof String)) { throw new SqlIllegalArgumentException("A string is required; received [{}]", source1); } - DateTrunc.DatePart truncateDateField = DateTrunc.DatePart.resolveTruncate((String) source1); + DatePart truncateDateField = DatePart.resolveTruncate((String) source1); if (truncateDateField == null) { - throw new SqlIllegalArgumentException("A value of {} or their aliases is required; received [{}]", - DateTrunc.DatePart.values(), source2); + List similar = DatePart.findSimilar((String) source1); + if (similar.isEmpty()) { + throw new SqlIllegalArgumentException("A value of {} or their aliases is required; received [{}]", + DatePart.values(), source1); + } else { + throw new SqlIllegalArgumentException("Received value [{}] is not valid date part for truncation; " + "" + + "did you mean {}?", source1, similar); + } } + if (!(source2 instanceof ZonedDateTime)) { throw new SqlIllegalArgumentException("A datetime/date is required; received [{}]", source2); } - return DateUtils.truncate(((ZonedDateTime) source2).withZoneSameInstant(zoneId), truncateDateField); - } - - @Override - public String getWriteableName() { - return NAME; - } - - @Override - protected void doWrite(StreamOutput out) { + return DatePart.truncate(((ZonedDateTime) source2).withZoneSameInstant(zoneId), truncateDateField); } @Override diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/util/DateUtils.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/util/DateUtils.java index cfd572fc701e6..ceda288704cd2 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/util/DateUtils.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/util/DateUtils.java @@ -10,7 +10,6 @@ import org.elasticsearch.common.time.DateFormatters; import org.elasticsearch.xpack.sql.expression.Expression; import org.elasticsearch.xpack.sql.expression.Foldables; -import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.DateTrunc; import org.elasticsearch.xpack.sql.parser.ParsingException; import org.elasticsearch.xpack.sql.proto.StringUtils; @@ -21,7 +20,6 @@ import java.time.ZonedDateTime; import java.time.format.DateTimeFormatter; import java.time.format.DateTimeFormatterBuilder; -import java.time.temporal.ChronoField; import static java.time.format.DateTimeFormatter.ISO_LOCAL_DATE; import static java.time.format.DateTimeFormatter.ISO_LOCAL_TIME; @@ -151,94 +149,5 @@ public static int getNanoPrecision(Expression precisionExpression, int nano) { return nano; } - public static ZonedDateTime truncate(ZonedDateTime dateTime, DateTrunc.DatePart datePart) { - ZonedDateTime truncated = null; - switch (datePart) { - case MILLENNIUM: - int year = dateTime.getYear(); - int firstYearOfMillenium = year - (year % 1000); - truncated = dateTime - .with(ChronoField.YEAR, firstYearOfMillenium) - .with(ChronoField.MONTH_OF_YEAR, 1) - .with(ChronoField.DAY_OF_MONTH, 1) - .toLocalDate().atStartOfDay(dateTime.getZone()); - break; - case CENTURY: - year = dateTime.getYear(); - int firstYearOfCentury = year - (year % 100); - truncated = dateTime - .with(ChronoField.YEAR, firstYearOfCentury) - .with(ChronoField.MONTH_OF_YEAR, 1) - .with(ChronoField.DAY_OF_MONTH, 1) - .toLocalDate().atStartOfDay(dateTime.getZone()); - break; - case DECADE: - year = dateTime.getYear(); - int firstYearOfDecade = year - (year % 10); - truncated = dateTime - .with(ChronoField.YEAR, firstYearOfDecade) - .with(ChronoField.MONTH_OF_YEAR, 1) - .with(ChronoField.DAY_OF_MONTH, 1) - .toLocalDate().atStartOfDay(dateTime.getZone()); - break; - case YEAR: - truncated = dateTime - .with(ChronoField.MONTH_OF_YEAR, 1) - .with(ChronoField.DAY_OF_MONTH, 1) - .toLocalDate().atStartOfDay(dateTime.getZone()); - break; - case QUARTER: - int month = dateTime.getMonthValue(); - int firstMonthOfQuarter = (((month - 1) / 3) * 3) + 1; - truncated = dateTime - .with(ChronoField.MONTH_OF_YEAR, firstMonthOfQuarter) - .with(ChronoField.DAY_OF_MONTH, 1) - .toLocalDate().atStartOfDay(dateTime.getZone()); - break; - case MONTH: - truncated = dateTime - .with(ChronoField.DAY_OF_MONTH, 1) - .toLocalDate().atStartOfDay(dateTime.getZone()); - break; - case WEEK: - truncated = dateTime - .with(ChronoField.DAY_OF_WEEK, 1) - .toLocalDate().atStartOfDay(dateTime.getZone()); - break; - case DAY: - truncated = dateTime - .toLocalDate().atStartOfDay(dateTime.getZone()); - break; - case HOUR: - int hour = dateTime.getHour(); - truncated = dateTime.toLocalDate().atStartOfDay(dateTime.getZone()) - .with(ChronoField.HOUR_OF_DAY, hour); - break; - case MINUTE: - hour = dateTime.getHour(); - int minute = dateTime.getMinute(); - truncated = dateTime.toLocalDate().atStartOfDay(dateTime.getZone()) - .with(ChronoField.HOUR_OF_DAY, hour) - .with(ChronoField.MINUTE_OF_HOUR, minute); - break; - case SECOND: - truncated = dateTime - .with(ChronoField.NANO_OF_SECOND, 0); - break; - case MILLISECOND: - int micros = dateTime.get(ChronoField.MICRO_OF_SECOND); - truncated = dateTime - .with(ChronoField.MILLI_OF_SECOND, (micros / 1000)); - break; - case MICROSECOND: - int nanos = dateTime.getNano(); - truncated = dateTime - .with(ChronoField.MICRO_OF_SECOND, (nanos / 1000)); - break; - case NANOSECOND: - truncated = dateTime; - break; - } - return truncated; - } + } diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/analysis/analyzer/VerifierErrorMessagesTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/analysis/analyzer/VerifierErrorMessagesTests.java index 9c34c6c4f2eef..8844301006fdc 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/analysis/analyzer/VerifierErrorMessagesTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/analysis/analyzer/VerifierErrorMessagesTests.java @@ -212,6 +212,12 @@ public void testDateTruncInvalidArgs() { "YEAR, QUARTER, MONTH, WEEK, DAY, HOUR, MINUTE, SECOND, MILLISECOND, MICROSECOND, NANOSECOND] " + "or their aliases, found value ['invalid']", error("SELECT DATE_TRUNC('invalid', keyword) FROM test")); + assertEquals("1:8: Unknown value ['millenioum'] for first argument of [DATE_TRUNC('millenioum', keyword)]; " + + "did you mean [millennium, millennia]?", + error("SELECT DATE_TRUNC('millenioum', keyword) FROM test")); + assertEquals("1:8: Unknown value ['yyyz'] for first argument of [DATE_TRUNC('yyyz', keyword)]; " + + "did you mean [yyyy, yy]?", + error("SELECT DATE_TRUNC('yyyz', keyword) FROM test")); } public void testDateTruncValidArgs() { diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DateTruncProcessorTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DateTruncProcessorTests.java index 7447cd781c292..47ce7477ddc04 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DateTruncProcessorTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DateTruncProcessorTests.java @@ -23,7 +23,6 @@ import static org.elasticsearch.xpack.sql.expression.function.scalar.FunctionTestUtils.randomDatetimeLiteral; import static org.elasticsearch.xpack.sql.expression.function.scalar.datetime.DateTimeTestUtils.dateTime; import static org.elasticsearch.xpack.sql.proto.StringUtils.ISO_DATE_WITH_NANOS; -import static org.hamcrest.Matchers.startsWith; public class DateTruncProcessorTests extends AbstractSqlWireSerializingTestCase { @@ -68,9 +67,14 @@ public void testInvalidInputs() { siae = expectThrows(SqlIllegalArgumentException.class, () -> new DateTrunc(Source.EMPTY, l("invalid"), randomDatetimeLiteral(), randomZone()).makePipe().asProcessor().process(null)); - assertThat(siae.getMessage(), startsWith("A value of [MILLENNIUM, CENTURY, DECADE, YEAR, " + - "QUARTER, MONTH, WEEK, DAY, HOUR, MINUTE, " + - "SECOND, MILLISECOND, MICROSECOND, NANOSECOND] or their aliases is required; received [")); + assertEquals("A value of [MILLENNIUM, CENTURY, DECADE, YEAR, QUARTER, MONTH, WEEK, DAY, HOUR, MINUTE, " + + "SECOND, MILLISECOND, MICROSECOND, NANOSECOND] or their aliases is required; received [invalid]", + siae.getMessage()); + + siae = expectThrows(SqlIllegalArgumentException.class, + () -> new DateTrunc(Source.EMPTY, l("dacede"), randomDatetimeLiteral(), randomZone()).makePipe().asProcessor().process(null)); + assertEquals("Received value [dacede] is not valid date part for truncation; did you mean [decade, decades]?", + siae.getMessage()); } public void testWithNulls() { From 6b5090e4a5c55da2bdc37644a11b30ad3029f9ae Mon Sep 17 00:00:00 2001 From: Marios Trivyzas Date: Mon, 9 Sep 2019 23:18:33 +0300 Subject: [PATCH 05/11] Address comments pt2 - use HashMap for resolution --- .../function/scalar/datetime/DateTrunc.java | 30 +++++++++---------- 1 file changed, 15 insertions(+), 15 deletions(-) diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DateTrunc.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DateTrunc.java index 0292f68afe26e..69ab7cf47da81 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DateTrunc.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DateTrunc.java @@ -20,9 +20,10 @@ import java.time.ZoneId; import java.time.ZonedDateTime; import java.time.temporal.ChronoField; +import java.util.HashMap; import java.util.HashSet; import java.util.List; -import java.util.Locale; +import java.util.Map; import java.util.Objects; import java.util.Set; @@ -51,13 +52,22 @@ public enum DatePart { MICROSECOND("microseconds", "mcs"), NANOSECOND("nanoseconds", "ns"); - private static Set ALL_DATE_PARTS; + private static final Set ALL_DATE_PARTS; + private static final Map RESOLVE_MAP; static { ALL_DATE_PARTS = new HashSet<>(); + RESOLVE_MAP = new HashMap<>(); + for (DatePart datePart : DatePart.values()) { - ALL_DATE_PARTS.add(datePart.name().toLowerCase(IsoLocale.ROOT)); - ALL_DATE_PARTS.addAll(datePart.aliases()); + String lowerCaseName = datePart.name().toLowerCase(IsoLocale.ROOT); + ALL_DATE_PARTS.add(lowerCaseName); + ALL_DATE_PARTS.addAll(datePart.aliases); + + RESOLVE_MAP.put(lowerCaseName, datePart); + for (String alias : datePart.aliases) { + RESOLVE_MAP.put(alias, datePart); + } } } @@ -67,18 +77,8 @@ public enum DatePart { this.aliases = Set.of(aliases); } - public Set aliases() { - return aliases; - } - public static DatePart resolveTruncate(String truncateTo) { - for (DatePart datePart : DatePart.values()) { - truncateTo = truncateTo.toLowerCase(Locale.ROOT); - if (datePart.name().equalsIgnoreCase(truncateTo) || datePart.aliases().contains(truncateTo)) { - return datePart; - } - } - return null; + return RESOLVE_MAP.get(truncateTo.toLowerCase(IsoLocale.ROOT)); } public static List findSimilar(String match) { From 55f13b269b095b073aac270f0d58cea2ade1cee9 Mon Sep 17 00:00:00 2001 From: Marios Trivyzas Date: Tue, 10 Sep 2019 13:02:15 +0300 Subject: [PATCH 06/11] Fix docs --- docs/reference/sql/functions/date-time.asciidoc | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/reference/sql/functions/date-time.asciidoc b/docs/reference/sql/functions/date-time.asciidoc index 9118bebf8a7ac..d3fdd756e5c36 100644 --- a/docs/reference/sql/functions/date-time.asciidoc +++ b/docs/reference/sql/functions/date-time.asciidoc @@ -261,10 +261,10 @@ DATE_TRUNC( *Input*: -<1> string expression denoting the unit to which the date/datetime should be truncated +<1> string expression denoting the unit to which the date/datetime should be truncated to <2> date/datetime expression -*Output*: date/datetime, same as datetime_exp +*Output*: date/datetime, same as `datetime_exp` .Description: From 215d42e726126f30f094a1046f5749faef6dfc45 Mon Sep 17 00:00:00 2001 From: Marios Trivyzas Date: Tue, 10 Sep 2019 14:10:48 +0300 Subject: [PATCH 07/11] Fix issue with nulls, added more tests --- .../qa/src/main/resources/datetime.csv-spec | 51 +++++++++++++++++++ .../function/scalar/datetime/DateTrunc.java | 2 +- 2 files changed, 52 insertions(+), 1 deletion(-) diff --git a/x-pack/plugin/sql/qa/src/main/resources/datetime.csv-spec b/x-pack/plugin/sql/qa/src/main/resources/datetime.csv-spec index 217994fea9c77..f55ec1535456f 100644 --- a/x-pack/plugin/sql/qa/src/main/resources/datetime.csv-spec +++ b/x-pack/plugin/sql/qa/src/main/resources/datetime.csv-spec @@ -166,6 +166,57 @@ FROM test_emp WHERE emp_no >= 10032 AND emp_no <= 10042 ORDER BY 1; 10042 | null | null | null | null | null | null | null | null | null ; +selectDateTruncWithNullTruncateField +SELECT DATE_TRUNC(null, birth_date) AS dt FROM test_emp LIMIT 5; + + dt:ts +------ +null +null +null +null +null +; + +selectDateTruncWithScalars +SELECT birth_date, DATE_TRUNC(CAST(CHAR(109) AS VARCHAR), birth_date + INTERVAL 12 YEAR) AS dt FROM test_emp ORDER BY 1 DESC LIMIT 5; + + birth_date:Ts | dt:ts +-------------------------+--------------------- +1965-01-03 00:00:00.000Z | 1977-01-01 00:00:00.000Z +1964-10-18 00:00:00.000Z | 1976-10-01 00:00:00.000Z +1964-06-11 00:00:00.000Z | 1976-06-01 00:00:00.000Z +1964-06-02 00:00:00.000Z | 1976-06-01 00:00:00.000Z +1964-04-18 00:00:00.000Z | 1976-04-01 00:00:00.000Z +; + +selectDateTruncWithTruncArgFromField +SELECT DATE_TRUNC(CONCAT(gender, 'illennium'), birth_date) AS dt FROM test_emp WHERE gender='M' ORDER BY 1 DESC LIMIT 2; + + dt:ts +------------------------ +0999-12-27 00:00:00.000Z +0999-12-27 00:00:00.000Z +; + +selectDateTruncWithComplexExpressions +SELECT gender, birth_date, DATE_TRUNC(CASE WHEN gender = 'M' THEN CONCAT(gender, 'onths') WHEN gender = 'F' THEN 'decade' ELSE 'quarter' END, +birth_date + INTERVAL 10 month) AS dt FROM test_emp WHERE dt > '1954-07-01'::date ORDER BY emp_no LIMIT 10; + + gender:s | birth_date:ts | dt:ts +------------+--------------------------+--------------------- +F | 1964-06-02 00:00:00.000Z | 1960-01-01 00:00:00.000Z +M | 1959-12-03 00:00:00.000Z | 1960-10-01 00:00:00.000Z +M | 1954-05-01 00:00:00.000Z | 1955-03-01 00:00:00.000Z +M | 1955-01-21 00:00:00.000Z | 1955-11-01 00:00:00.000Z +M | 1958-02-19 00:00:00.000Z | 1958-12-01 00:00:00.000Z +null | 1963-06-01 00:00:00.000Z | 1964-04-01 00:00:00.000Z +null | 1960-10-04 00:00:00.000Z | 1961-07-01 00:00:00.000Z +null | 1963-06-07 00:00:00.000Z | 1964-04-01 00:00:00.000Z +null | 1956-02-12 00:00:00.000Z | 1956-10-01 00:00:00.000Z +null | 1959-08-19 00:00:00.000Z | 1960-04-01 00:00:00.000Z +; + dateTruncOrderBy schema::emp_no:i|hire_date:ts|dt:ts SELECT emp_no, hire_date, DATE_TRUNC('quarter', hire_date) as dt FROM test_emp ORDER BY dt NULLS LAST, emp_no LIMIT 5; diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DateTrunc.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DateTrunc.java index 69ab7cf47da81..c05a346cdd2f9 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DateTrunc.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DateTrunc.java @@ -199,7 +199,7 @@ protected TypeResolution resolveType() { if (left().foldable()) { String truncateToValue = (String) left().fold(); - if (DatePart.resolveTruncate(truncateToValue) == null) { + if (truncateToValue != null && DatePart.resolveTruncate(truncateToValue) == null) { List similar = DatePart.findSimilar(truncateToValue); if (similar.isEmpty()) { return new TypeResolution(format(null, "first argument of [{}] must be one of {} or their aliases, found value [{}]", From 8d5b398afd886fc980693bf8af0533aa22fa2304 Mon Sep 17 00:00:00 2001 From: Marios Trivyzas Date: Tue, 10 Sep 2019 14:26:40 +0300 Subject: [PATCH 08/11] Fix integ test --- x-pack/plugin/sql/qa/src/main/resources/datetime.csv-spec | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/x-pack/plugin/sql/qa/src/main/resources/datetime.csv-spec b/x-pack/plugin/sql/qa/src/main/resources/datetime.csv-spec index f55ec1535456f..84a1a137a7655 100644 --- a/x-pack/plugin/sql/qa/src/main/resources/datetime.csv-spec +++ b/x-pack/plugin/sql/qa/src/main/resources/datetime.csv-spec @@ -179,9 +179,9 @@ null ; selectDateTruncWithScalars -SELECT birth_date, DATE_TRUNC(CAST(CHAR(109) AS VARCHAR), birth_date + INTERVAL 12 YEAR) AS dt FROM test_emp ORDER BY 1 DESC LIMIT 5; +SELECT birth_date, DATE_TRUNC(CAST(CHAR(109) AS VARCHAR), birth_date + INTERVAL 12 YEAR) AS dt FROM test_emp ORDER BY 1 DESC NULLS LAST LIMIT 5; - birth_date:Ts | dt:ts + birth_date:ts | dt:ts -------------------------+--------------------- 1965-01-03 00:00:00.000Z | 1977-01-01 00:00:00.000Z 1964-10-18 00:00:00.000Z | 1976-10-01 00:00:00.000Z From fc75b527698a9cb9df13f03731e8b47be102aed8 Mon Sep 17 00:00:00 2001 From: Marios Trivyzas Date: Tue, 10 Sep 2019 18:20:45 +0300 Subject: [PATCH 09/11] Make return time always DATETIME --- docs/reference/sql/functions/date-time.asciidoc | 2 +- .../sql/qa/src/main/resources/datetime.csv-spec | 8 ++++---- .../sql/qa/src/main/resources/docs/docs.csv-spec | 16 ++++++++-------- .../function/scalar/datetime/DateTrunc.java | 2 +- 4 files changed, 14 insertions(+), 14 deletions(-) diff --git a/docs/reference/sql/functions/date-time.asciidoc b/docs/reference/sql/functions/date-time.asciidoc index d3fdd756e5c36..14779b09ff849 100644 --- a/docs/reference/sql/functions/date-time.asciidoc +++ b/docs/reference/sql/functions/date-time.asciidoc @@ -264,7 +264,7 @@ DATE_TRUNC( <1> string expression denoting the unit to which the date/datetime should be truncated to <2> date/datetime expression -*Output*: date/datetime, same as `datetime_exp` +*Output*: datetime (even if `datetime_exp` is of type date) .Description: diff --git a/x-pack/plugin/sql/qa/src/main/resources/datetime.csv-spec b/x-pack/plugin/sql/qa/src/main/resources/datetime.csv-spec index 84a1a137a7655..ad80d663b473a 100644 --- a/x-pack/plugin/sql/qa/src/main/resources/datetime.csv-spec +++ b/x-pack/plugin/sql/qa/src/main/resources/datetime.csv-spec @@ -133,15 +133,15 @@ DATE_TRUNC('mcs', '2019-09-04T11:22:33.123Z'::datetime)::string as dt_micro, DAT ; selectDateTruncWithDate -schema::dt_mil:date|dt_cent:date|dt_dec:date|dt_year:date|dt_quarter:date|dt_month:date|dt_week:date|dt_day:date +schema::dt_mil:ts|dt_cent:ts|dt_dec:ts|dt_year:ts|dt_quarter:ts|dt_month:ts|dt_week:ts|dt_day:ts SELECT DATE_TRUNC('millennia', '2019-09-04'::date) as dt_mil, DATE_TRUNC('century', '2019-09-04'::date) as dt_cent, DATE_TRUNC('decades', '2019-09-04'::date) as dt_dec, DATE_TRUNC('year', '2019-09-04'::date) as dt_year, DATE_TRUNC('quarter', '2019-09-04'::date) as dt_quarter, DATE_TRUNC('month', '2019-09-04'::date) as dt_month, DATE_TRUNC('week', '2019-09-04'::date) as dt_week, DATE_TRUNC('day', '2019-09-04'::date) as dt_day; - dt_mil | dt_cent | dt_dec | dt_year | dt_quarter | dt_month | dt_week | dt_day ------------+------------+------------+------------+-------------+------------+------------+----------- -2000-01-01 | 2000-01-01 | 2010-01-01 | 2019-01-01 | 2019-07-01 | 2019-09-01 | 2019-09-02 | 2019-09-04 + dt_mil | dt_cent | dt_dec | dt_year | dt_quarter | dt_month | dt_week | dt_day +-------------------------+--------------------------+--------------------------+--------------------------+--------------------------+--------------------------+--------------------------+------------------------- +2000-01-01T00:00:00.000Z | 2000-01-01T00:00:00.000Z | 2010-01-01T00:00:00.000Z | 2019-01-01T00:00:00.000Z | 2019-07-01T00:00:00.000Z | 2019-09-01T00:00:00.000Z | 2019-09-02T00:00:00.000Z | 2019-09-04T00:00:00.000Z ; selectDateTruncWithField diff --git a/x-pack/plugin/sql/qa/src/main/resources/docs/docs.csv-spec b/x-pack/plugin/sql/qa/src/main/resources/docs/docs.csv-spec index 024cad379ff3a..7198da9319cdb 100644 --- a/x-pack/plugin/sql/qa/src/main/resources/docs/docs.csv-spec +++ b/x-pack/plugin/sql/qa/src/main/resources/docs/docs.csv-spec @@ -2444,24 +2444,24 @@ SELECT DATE_TRUNC('mi', '2019-09-04T11:22:33.123Z'::datetime) AS mins; ; truncateDateDecades -schema::decades:date +schema::decades:ts // tag::truncateDateDecades SELECT DATE_TRUNC('decade', CAST('2019-09-04' AS DATE)) AS decades; - decades ----------- -2010-01-01 + decades +------------------------ +2010-01-01T00:00:00.000Z // end::truncateDateDecades ; truncateDateQuarter -schema::quarter:date +schema::quarter:ts // tag::truncateDateQuarter SELECT DATE_TRUNC('quarters', CAST('2019-09-04' AS DATE)) AS quarter; - quarter ----------- -2019-07-01 + quarter +------------------------ +2019-07-01T00:00:00.000Z // end::truncateDateQuarter ; diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DateTrunc.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DateTrunc.java index c05a346cdd2f9..cd746964ca2cc 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DateTrunc.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DateTrunc.java @@ -187,7 +187,7 @@ public DateTrunc(Source source, Expression truncateTo, Expression timestamp, Zon @Override public DataType dataType() { - return right().dataType(); + return DataType.DATETIME; } @Override From 0b0069fd269eb18b8590979103c66353b9eb8b90 Mon Sep 17 00:00:00 2001 From: Marios Trivyzas Date: Tue, 10 Sep 2019 22:15:19 +0300 Subject: [PATCH 10/11] fix docs --- docs/reference/sql/functions/date-time.asciidoc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/reference/sql/functions/date-time.asciidoc b/docs/reference/sql/functions/date-time.asciidoc index 14779b09ff849..3f515ec98e2df 100644 --- a/docs/reference/sql/functions/date-time.asciidoc +++ b/docs/reference/sql/functions/date-time.asciidoc @@ -264,7 +264,7 @@ DATE_TRUNC( <1> string expression denoting the unit to which the date/datetime should be truncated to <2> date/datetime expression -*Output*: datetime (even if `datetime_exp` is of type date) +*Output*: datetime .Description: From 7bd17d92587915b4b877a43d1addfd5b0e378bcf Mon Sep 17 00:00:00 2001 From: Marios Trivyzas Date: Wed, 11 Sep 2019 18:07:45 +0300 Subject: [PATCH 11/11] Address comments --- .../function/scalar/datetime/DateTrunc.java | 215 ++++++++---------- .../scalar/datetime/DateTruncProcessor.java | 10 +- 2 files changed, 96 insertions(+), 129 deletions(-) diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DateTrunc.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DateTrunc.java index cd746964ca2cc..6dd07e80ff2c3 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DateTrunc.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DateTrunc.java @@ -21,11 +21,11 @@ import java.time.ZonedDateTime; import java.time.temporal.ChronoField; import java.util.HashMap; -import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Objects; import java.util.Set; +import java.util.function.Function; import static org.elasticsearch.common.logging.LoggerMessageFormat.format; import static org.elasticsearch.xpack.sql.expression.TypeResolutions.isDate; @@ -35,149 +35,116 @@ public class DateTrunc extends BinaryScalarFunction { - public enum DatePart { - - MILLENNIUM("millennia"), - CENTURY("centuries"), - DECADE("decades"), - YEAR("years", "yy", "yyyy"), - QUARTER("quarters", "qq", "q"), - MONTH("months", "mm", "m"), - WEEK("weeks", "wk", "ww"), - DAY("days", "dd", "d"), - HOUR("hours", "hh"), - MINUTE("minutes", "mi", "n"), - SECOND("seconds", "ss", "s"), - MILLISECOND("milliseconds", "ms"), - MICROSECOND("microseconds", "mcs"), - NANOSECOND("nanoseconds", "ns"); - - private static final Set ALL_DATE_PARTS; - private static final Map RESOLVE_MAP; + public enum Part { + + MILLENNIUM(dt -> { + int year = dt.getYear(); + int firstYearOfMillenium = year - (year % 1000); + return dt + .with(ChronoField.YEAR, firstYearOfMillenium) + .with(ChronoField.MONTH_OF_YEAR, 1) + .with(ChronoField.DAY_OF_MONTH, 1) + .toLocalDate().atStartOfDay(dt.getZone()); + },"millennia"), + CENTURY(dt -> { + int year = dt.getYear(); + int firstYearOfCentury = year - (year % 100); + return dt + .with(ChronoField.YEAR, firstYearOfCentury) + .with(ChronoField.MONTH_OF_YEAR, 1) + .with(ChronoField.DAY_OF_MONTH, 1) + .toLocalDate().atStartOfDay(dt.getZone()); + }, "centuries"), + DECADE(dt -> { + int year = dt.getYear(); + int firstYearOfDecade = year - (year % 10); + return dt + .with(ChronoField.YEAR, firstYearOfDecade) + .with(ChronoField.MONTH_OF_YEAR, 1) + .with(ChronoField.DAY_OF_MONTH, 1) + .toLocalDate().atStartOfDay(dt.getZone()); + }, "decades"), + YEAR(dt -> dt + .with(ChronoField.MONTH_OF_YEAR, 1) + .with(ChronoField.DAY_OF_MONTH, 1) + .toLocalDate().atStartOfDay(dt.getZone()), + "years", "yy", "yyyy"), + QUARTER(dt -> { + int month = dt.getMonthValue(); + int firstMonthOfQuarter = (((month - 1) / 3) * 3) + 1; + return dt + .with(ChronoField.MONTH_OF_YEAR, firstMonthOfQuarter) + .with(ChronoField.DAY_OF_MONTH, 1) + .toLocalDate().atStartOfDay(dt.getZone()); + }, "quarters", "qq", "q"), + MONTH(dt -> dt + .with(ChronoField.DAY_OF_MONTH, 1) + .toLocalDate().atStartOfDay(dt.getZone()), + "months", "mm", "m"), + WEEK(dt -> dt + .with(ChronoField.DAY_OF_WEEK, 1) + .toLocalDate().atStartOfDay(dt.getZone()), + "weeks", "wk", "ww"), + DAY(dt -> dt.toLocalDate().atStartOfDay(dt.getZone()), "days", "dd", "d"), + HOUR(dt -> { + int hour = dt.getHour(); + return dt.toLocalDate().atStartOfDay(dt.getZone()) + .with(ChronoField.HOUR_OF_DAY, hour); + }, "hours", "hh"), + MINUTE(dt -> { + int hour = dt.getHour(); + int minute = dt.getMinute(); + return dt.toLocalDate().atStartOfDay(dt.getZone()) + .with(ChronoField.HOUR_OF_DAY, hour) + .with(ChronoField.MINUTE_OF_HOUR, minute); + }, "minutes", "mi", "n"), + SECOND(dt -> dt.with(ChronoField.NANO_OF_SECOND, 0), "seconds", "ss", "s"), + MILLISECOND(dt -> { + int micros = dt.get(ChronoField.MICRO_OF_SECOND); + return dt.with(ChronoField.MILLI_OF_SECOND, (micros / 1000)); + }, "milliseconds", "ms"), + MICROSECOND(dt -> { + int nanos = dt.getNano(); + return dt.with(ChronoField.MICRO_OF_SECOND, (nanos / 1000)); + }, "microseconds", "mcs"), + NANOSECOND(dt -> dt, "nanoseconds", "ns"); + + private static final Map NAME_TO_PART; static { - ALL_DATE_PARTS = new HashSet<>(); - RESOLVE_MAP = new HashMap<>(); + NAME_TO_PART = new HashMap<>(); - for (DatePart datePart : DatePart.values()) { + for (Part datePart : Part.values()) { String lowerCaseName = datePart.name().toLowerCase(IsoLocale.ROOT); - ALL_DATE_PARTS.add(lowerCaseName); - ALL_DATE_PARTS.addAll(datePart.aliases); - RESOLVE_MAP.put(lowerCaseName, datePart); + NAME_TO_PART.put(lowerCaseName, datePart); for (String alias : datePart.aliases) { - RESOLVE_MAP.put(alias, datePart); + NAME_TO_PART.put(alias, datePart); } } } private Set aliases; + private Function truncateFunction; - DatePart(String... aliases) { + Part(Function truncateFunction, String... aliases) { + this.truncateFunction = truncateFunction; this.aliases = Set.of(aliases); } - public static DatePart resolveTruncate(String truncateTo) { - return RESOLVE_MAP.get(truncateTo.toLowerCase(IsoLocale.ROOT)); + public static Part resolveTruncate(String truncateTo) { + return NAME_TO_PART.get(truncateTo.toLowerCase(IsoLocale.ROOT)); } public static List findSimilar(String match) { - return StringUtils.findSimilar(match, ALL_DATE_PARTS); + return StringUtils.findSimilar(match, NAME_TO_PART.keySet()); } - public static ZonedDateTime truncate(ZonedDateTime dateTime, DateTrunc.DatePart datePart) { - ZonedDateTime truncated = null; - switch (datePart) { - case MILLENNIUM: - int year = dateTime.getYear(); - int firstYearOfMillenium = year - (year % 1000); - truncated = dateTime - .with(ChronoField.YEAR, firstYearOfMillenium) - .with(ChronoField.MONTH_OF_YEAR, 1) - .with(ChronoField.DAY_OF_MONTH, 1) - .toLocalDate().atStartOfDay(dateTime.getZone()); - break; - case CENTURY: - year = dateTime.getYear(); - int firstYearOfCentury = year - (year % 100); - truncated = dateTime - .with(ChronoField.YEAR, firstYearOfCentury) - .with(ChronoField.MONTH_OF_YEAR, 1) - .with(ChronoField.DAY_OF_MONTH, 1) - .toLocalDate().atStartOfDay(dateTime.getZone()); - break; - case DECADE: - year = dateTime.getYear(); - int firstYearOfDecade = year - (year % 10); - truncated = dateTime - .with(ChronoField.YEAR, firstYearOfDecade) - .with(ChronoField.MONTH_OF_YEAR, 1) - .with(ChronoField.DAY_OF_MONTH, 1) - .toLocalDate().atStartOfDay(dateTime.getZone()); - break; - case YEAR: - truncated = dateTime - .with(ChronoField.MONTH_OF_YEAR, 1) - .with(ChronoField.DAY_OF_MONTH, 1) - .toLocalDate().atStartOfDay(dateTime.getZone()); - break; - case QUARTER: - int month = dateTime.getMonthValue(); - int firstMonthOfQuarter = (((month - 1) / 3) * 3) + 1; - truncated = dateTime - .with(ChronoField.MONTH_OF_YEAR, firstMonthOfQuarter) - .with(ChronoField.DAY_OF_MONTH, 1) - .toLocalDate().atStartOfDay(dateTime.getZone()); - break; - case MONTH: - truncated = dateTime - .with(ChronoField.DAY_OF_MONTH, 1) - .toLocalDate().atStartOfDay(dateTime.getZone()); - break; - case WEEK: - truncated = dateTime - .with(ChronoField.DAY_OF_WEEK, 1) - .toLocalDate().atStartOfDay(dateTime.getZone()); - break; - case DAY: - truncated = dateTime - .toLocalDate().atStartOfDay(dateTime.getZone()); - break; - case HOUR: - int hour = dateTime.getHour(); - truncated = dateTime.toLocalDate().atStartOfDay(dateTime.getZone()) - .with(ChronoField.HOUR_OF_DAY, hour); - break; - case MINUTE: - hour = dateTime.getHour(); - int minute = dateTime.getMinute(); - truncated = dateTime.toLocalDate().atStartOfDay(dateTime.getZone()) - .with(ChronoField.HOUR_OF_DAY, hour) - .with(ChronoField.MINUTE_OF_HOUR, minute); - break; - case SECOND: - truncated = dateTime - .with(ChronoField.NANO_OF_SECOND, 0); - break; - case MILLISECOND: - int micros = dateTime.get(ChronoField.MICRO_OF_SECOND); - truncated = dateTime - .with(ChronoField.MILLI_OF_SECOND, (micros / 1000)); - break; - case MICROSECOND: - int nanos = dateTime.getNano(); - truncated = dateTime - .with(ChronoField.MICRO_OF_SECOND, (nanos / 1000)); - break; - case NANOSECOND: - truncated = dateTime; - break; - } - return truncated; + public ZonedDateTime truncate(ZonedDateTime dateTime) { + return truncateFunction.apply(dateTime); } } - private final ZoneId zoneId; public DateTrunc(Source source, Expression truncateTo, Expression timestamp, ZoneId zoneId) { @@ -199,12 +166,12 @@ protected TypeResolution resolveType() { if (left().foldable()) { String truncateToValue = (String) left().fold(); - if (truncateToValue != null && DatePart.resolveTruncate(truncateToValue) == null) { - List similar = DatePart.findSimilar(truncateToValue); + if (truncateToValue != null && Part.resolveTruncate(truncateToValue) == null) { + List similar = Part.findSimilar(truncateToValue); if (similar.isEmpty()) { return new TypeResolution(format(null, "first argument of [{}] must be one of {} or their aliases, found value [{}]", sourceText(), - DatePart.values(), + Part.values(), Expressions.name(left()))); } else { return new TypeResolution(format(null, "Unknown value [{}] for first argument of [{}]; did you mean {}?", diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DateTruncProcessor.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DateTruncProcessor.java index 733b6606a0db4..446ede5ba1460 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DateTruncProcessor.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DateTruncProcessor.java @@ -18,7 +18,7 @@ import java.util.List; import java.util.Objects; -import static org.elasticsearch.xpack.sql.expression.function.scalar.datetime.DateTrunc.DatePart; +import static org.elasticsearch.xpack.sql.expression.function.scalar.datetime.DateTrunc.Part; public class DateTruncProcessor extends BinaryProcessor { @@ -68,12 +68,12 @@ static Object process(Object source1, Object source2, ZoneId zoneId) { if (!(source1 instanceof String)) { throw new SqlIllegalArgumentException("A string is required; received [{}]", source1); } - DatePart truncateDateField = DatePart.resolveTruncate((String) source1); + Part truncateDateField = Part.resolveTruncate((String) source1); if (truncateDateField == null) { - List similar = DatePart.findSimilar((String) source1); + List similar = Part.findSimilar((String) source1); if (similar.isEmpty()) { throw new SqlIllegalArgumentException("A value of {} or their aliases is required; received [{}]", - DatePart.values(), source1); + Part.values(), source1); } else { throw new SqlIllegalArgumentException("Received value [{}] is not valid date part for truncation; " + "" + "did you mean {}?", source1, similar); @@ -84,7 +84,7 @@ static Object process(Object source1, Object source2, ZoneId zoneId) { throw new SqlIllegalArgumentException("A datetime/date is required; received [{}]", source2); } - return DatePart.truncate(((ZonedDateTime) source2).withZoneSameInstant(zoneId), truncateDateField); + return truncateDateField.truncate(((ZonedDateTime) source2).withZoneSameInstant(zoneId)); } @Override