diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/watcher/WatchStatusDateParser.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/watcher/WatchStatusDateParser.java
index a71ec58ce1caa..5162a4b245038 100644
--- a/client/rest-high-level/src/main/java/org/elasticsearch/client/watcher/WatchStatusDateParser.java
+++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/watcher/WatchStatusDateParser.java
@@ -21,8 +21,8 @@
import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.common.joda.FormatDateTimeFormatter;
+import org.elasticsearch.common.joda.Joda;
import org.elasticsearch.common.xcontent.XContentParser;
-import org.elasticsearch.index.mapper.DateFieldMapper;
import org.joda.time.DateTime;
import org.joda.time.DateTimeZone;
@@ -30,7 +30,7 @@
public final class WatchStatusDateParser {
- private static final FormatDateTimeFormatter FORMATTER = DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER;
+ private static final FormatDateTimeFormatter FORMATTER = Joda.forPattern("strict_date_optional_time||epoch_millis");
private WatchStatusDateParser() {
// Prevent instantiation.
diff --git a/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/ScaledFloatFieldMapper.java b/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/ScaledFloatFieldMapper.java
index 07ee5b5dc6243..2b64da607c751 100644
--- a/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/ScaledFloatFieldMapper.java
+++ b/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/ScaledFloatFieldMapper.java
@@ -59,9 +59,9 @@
import org.elasticsearch.indices.breaker.CircuitBreakerService;
import org.elasticsearch.search.DocValueFormat;
import org.elasticsearch.search.MultiValueMode;
-import org.joda.time.DateTimeZone;
import java.io.IOException;
+import java.time.ZoneId;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Iterator;
@@ -302,7 +302,7 @@ public Object valueForDisplay(Object value) {
}
@Override
- public DocValueFormat docValueFormat(String format, DateTimeZone timeZone) {
+ public DocValueFormat docValueFormat(String format, ZoneId timeZone) {
if (timeZone != null) {
throw new IllegalArgumentException("Field [" + name() + "] of type [" + typeName()
+ "] does not support custom time zones");
diff --git a/plugins/analysis-icu/src/main/java/org/elasticsearch/index/mapper/ICUCollationKeywordFieldMapper.java b/plugins/analysis-icu/src/main/java/org/elasticsearch/index/mapper/ICUCollationKeywordFieldMapper.java
index 2c20d4b47844e..94cce454311de 100644
--- a/plugins/analysis-icu/src/main/java/org/elasticsearch/index/mapper/ICUCollationKeywordFieldMapper.java
+++ b/plugins/analysis-icu/src/main/java/org/elasticsearch/index/mapper/ICUCollationKeywordFieldMapper.java
@@ -46,9 +46,9 @@
import org.elasticsearch.index.fielddata.plain.DocValuesIndexFieldData;
import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.search.DocValueFormat;
-import org.joda.time.DateTimeZone;
import java.io.IOException;
+import java.time.ZoneId;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
@@ -229,7 +229,7 @@ public BytesRef parseBytesRef(String value) {
};
@Override
- public DocValueFormat docValueFormat(final String format, final DateTimeZone timeZone) {
+ public DocValueFormat docValueFormat(final String format, final ZoneId timeZone) {
return COLLATE_FORMAT;
}
}
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/search/180_local_dependent_mapping.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/search/180_locale_dependent_mapping.yml
similarity index 100%
rename from rest-api-spec/src/main/resources/rest-api-spec/test/search/180_local_dependent_mapping.yml
rename to rest-api-spec/src/main/resources/rest-api-spec/test/search/180_locale_dependent_mapping.yml
diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolver.java b/server/src/main/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolver.java
index 9baedf6b65e33..8fff9196de1e3 100644
--- a/server/src/main/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolver.java
+++ b/server/src/main/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolver.java
@@ -30,6 +30,7 @@
import org.elasticsearch.common.time.DateFormatter;
import org.elasticsearch.common.time.DateFormatters;
import org.elasticsearch.common.time.DateMathParser;
+import org.elasticsearch.common.time.DateUtils;
import org.elasticsearch.common.time.JavaDateMathParser;
import org.elasticsearch.common.util.set.Sets;
import org.elasticsearch.index.Index;
@@ -913,18 +914,19 @@ String resolveExpression(String expression, final Context context) {
int formatPatternTimeZoneSeparatorIndex = patternAndTZid.indexOf(TIME_ZONE_BOUND);
if (formatPatternTimeZoneSeparatorIndex != -1) {
dateFormatterPattern = patternAndTZid.substring(0, formatPatternTimeZoneSeparatorIndex);
- timeZone = ZoneId.of(patternAndTZid.substring(formatPatternTimeZoneSeparatorIndex + 1));
+ timeZone = DateUtils.of(patternAndTZid.substring(formatPatternTimeZoneSeparatorIndex + 1));
} else {
dateFormatterPattern = patternAndTZid;
timeZone = ZoneOffset.UTC;
}
dateFormatter = DateFormatters.forPattern(dateFormatterPattern);
}
+
DateFormatter formatter = dateFormatter.withZone(timeZone);
DateMathParser dateMathParser = new JavaDateMathParser(formatter);
- long millis = dateMathParser.parse(mathExpression, context::getStartTime, false, timeZone);
+ Instant instant = dateMathParser.parse(mathExpression, context::getStartTime, false, timeZone);
- String time = formatter.format(Instant.ofEpochMilli(millis));
+ String time = formatter.format(instant);
beforePlaceHolderSb.append(time);
inPlaceHolderSb = new StringBuilder();
inPlaceHolder = false;
diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/MappingMetaData.java b/server/src/main/java/org/elasticsearch/cluster/metadata/MappingMetaData.java
index 1aa53fca1b31c..875013f31b4bc 100644
--- a/server/src/main/java/org/elasticsearch/cluster/metadata/MappingMetaData.java
+++ b/server/src/main/java/org/elasticsearch/cluster/metadata/MappingMetaData.java
@@ -174,7 +174,7 @@ public void writeTo(StreamOutput out) throws IOException {
if (out.getVersion().before(Version.V_6_0_0_alpha1)) {
// timestamp
out.writeBoolean(false); // enabled
- out.writeString(DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.format());
+ out.writeString(DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.pattern());
out.writeOptionalString("now"); // 5.x default
out.writeOptionalBoolean(null);
}
diff --git a/server/src/main/java/org/elasticsearch/common/Rounding.java b/server/src/main/java/org/elasticsearch/common/Rounding.java
index 593964f61e93f..7f3c385c2448f 100644
--- a/server/src/main/java/org/elasticsearch/common/Rounding.java
+++ b/server/src/main/java/org/elasticsearch/common/Rounding.java
@@ -19,6 +19,7 @@
package org.elasticsearch.common;
import org.elasticsearch.ElasticsearchException;
+import org.elasticsearch.Version;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.io.stream.Writeable;
@@ -367,8 +368,13 @@ public long nextRoundingValue(long utcMillis) {
@Override
public void innerWriteTo(StreamOutput out) throws IOException {
out.writeByte(unit.getId());
- String tz = ZoneOffset.UTC.equals(timeZone) ? "UTC" : timeZone.getId(); // stay joda compatible
- out.writeString(tz);
+ if (out.getVersion().onOrAfter(Version.V_7_0_0)) {
+ out.writeString(timeZone.getId());
+ } else {
+ // stay joda compatible
+ String tz = ZoneOffset.UTC.equals(timeZone) ? "UTC" : timeZone.getId();
+ out.writeString(tz);
+ }
}
@Override
@@ -490,8 +496,13 @@ public long nextRoundingValue(long time) {
@Override
public void innerWriteTo(StreamOutput out) throws IOException {
out.writeVLong(interval);
- String tz = ZoneOffset.UTC.equals(timeZone) ? "UTC" : timeZone.getId(); // stay joda compatible
- out.writeString(tz);
+ if (out.getVersion().onOrAfter(Version.V_7_0_0)) {
+ out.writeString(timeZone.getId());
+ } else {
+ // stay joda compatible
+ String tz = ZoneOffset.UTC.equals(timeZone) ? "UTC" : timeZone.getId();
+ out.writeString(tz);
+ }
}
@Override
diff --git a/server/src/main/java/org/elasticsearch/common/io/stream/StreamInput.java b/server/src/main/java/org/elasticsearch/common/io/stream/StreamInput.java
index fd9ffdfd31d16..7759e13e536b7 100644
--- a/server/src/main/java/org/elasticsearch/common/io/stream/StreamInput.java
+++ b/server/src/main/java/org/elasticsearch/common/io/stream/StreamInput.java
@@ -653,6 +653,23 @@ public DateTimeZone readOptionalTimeZone() throws IOException {
return null;
}
+ /**
+ * Read a {@linkplain DateTimeZone}.
+ */
+ public ZoneId readZoneId() throws IOException {
+ return ZoneId.of(readString());
+ }
+
+ /**
+ * Read an optional {@linkplain ZoneId}.
+ */
+ public ZoneId readOptionalZoneId() throws IOException {
+ if (readBoolean()) {
+ return ZoneId.of(readString());
+ }
+ return null;
+ }
+
public int[] readIntArray() throws IOException {
int length = readArraySize();
int[] values = new int[length];
diff --git a/server/src/main/java/org/elasticsearch/common/io/stream/StreamOutput.java b/server/src/main/java/org/elasticsearch/common/io/stream/StreamOutput.java
index b00706b78aedb..0d78f7145f7b1 100644
--- a/server/src/main/java/org/elasticsearch/common/io/stream/StreamOutput.java
+++ b/server/src/main/java/org/elasticsearch/common/io/stream/StreamOutput.java
@@ -55,6 +55,7 @@
import java.nio.file.FileSystemLoopException;
import java.nio.file.NoSuchFileException;
import java.nio.file.NotDirectoryException;
+import java.time.ZoneId;
import java.time.ZonedDateTime;
import java.util.Collection;
import java.util.Collections;
@@ -677,7 +678,6 @@ public final void writeMap(final Map map, final Writer keyWriter
writers.put(ZonedDateTime.class, (o, v) -> {
o.writeByte((byte) 23);
final ZonedDateTime zonedDateTime = (ZonedDateTime) v;
- zonedDateTime.getZone().getId();
o.writeString(zonedDateTime.getZone().getId());
o.writeLong(zonedDateTime.toInstant().toEpochMilli());
});
@@ -974,6 +974,13 @@ public void writeTimeZone(DateTimeZone timeZone) throws IOException {
writeString(timeZone.getID());
}
+ /**
+ * Write a {@linkplain ZoneId} to the stream.
+ */
+ public void writeZoneId(ZoneId timeZone) throws IOException {
+ writeString(timeZone.getId());
+ }
+
/**
* Write an optional {@linkplain DateTimeZone} to the stream.
*/
@@ -986,6 +993,18 @@ public void writeOptionalTimeZone(@Nullable DateTimeZone timeZone) throws IOExce
}
}
+ /**
+ * Write an optional {@linkplain ZoneId} to the stream.
+ */
+ public void writeOptionalZoneId(@Nullable ZoneId timeZone) throws IOException {
+ if (timeZone == null) {
+ writeBoolean(false);
+ } else {
+ writeBoolean(true);
+ writeZoneId(timeZone);
+ }
+ }
+
/**
* Writes a list of {@link Streamable} objects
*/
diff --git a/server/src/main/java/org/elasticsearch/common/joda/JodaDateMathParser.java b/server/src/main/java/org/elasticsearch/common/joda/JodaDateMathParser.java
index 0cef1d3e09b1b..8bf765100b7ef 100644
--- a/server/src/main/java/org/elasticsearch/common/joda/JodaDateMathParser.java
+++ b/server/src/main/java/org/elasticsearch/common/joda/JodaDateMathParser.java
@@ -26,6 +26,7 @@
import org.joda.time.MutableDateTime;
import org.joda.time.format.DateTimeFormatter;
+import java.time.Instant;
import java.time.ZoneId;
import java.util.Objects;
import java.util.function.LongSupplier;
@@ -41,7 +42,7 @@ public class JodaDateMathParser implements DateMathParser {
private final FormatDateTimeFormatter dateTimeFormatter;
- public JodaDateMathParser(FormatDateTimeFormatter dateTimeFormatter) {
+ JodaDateMathParser(FormatDateTimeFormatter dateTimeFormatter) {
Objects.requireNonNull(dateTimeFormatter);
this.dateTimeFormatter = dateTimeFormatter;
}
@@ -50,7 +51,7 @@ public JodaDateMathParser(FormatDateTimeFormatter dateTimeFormatter) {
// if it has been used. For instance, the request cache does not cache requests that make
// use of `now`.
@Override
- public long parse(String text, LongSupplier now, boolean roundUp, ZoneId tz) {
+ public Instant parse(String text, LongSupplier now, boolean roundUp, ZoneId tz) {
final DateTimeZone timeZone = tz == null ? null : DateUtils.zoneIdToDateTimeZone(tz);
long time;
String mathString;
@@ -64,13 +65,13 @@ public long parse(String text, LongSupplier now, boolean roundUp, ZoneId tz) {
} else {
int index = text.indexOf("||");
if (index == -1) {
- return parseDateTime(text, timeZone, roundUp);
+ return Instant.ofEpochMilli(parseDateTime(text, timeZone, roundUp));
}
time = parseDateTime(text.substring(0, index), timeZone, false);
mathString = text.substring(index + 2);
}
- return parseMath(mathString, time, roundUp, timeZone);
+ return Instant.ofEpochMilli(parseMath(mathString, time, roundUp, timeZone));
}
private long parseMath(String mathString, long time, boolean roundUp, DateTimeZone timeZone) throws ElasticsearchParseException {
diff --git a/server/src/main/java/org/elasticsearch/common/time/DateFormatter.java b/server/src/main/java/org/elasticsearch/common/time/DateFormatter.java
index b952f1d69bcd6..a94c0e347ba2d 100644
--- a/server/src/main/java/org/elasticsearch/common/time/DateFormatter.java
+++ b/server/src/main/java/org/elasticsearch/common/time/DateFormatter.java
@@ -19,6 +19,9 @@
package org.elasticsearch.common.time;
+import org.elasticsearch.ElasticsearchParseException;
+
+import java.time.Instant;
import java.time.ZoneId;
import java.time.format.DateTimeParseException;
import java.time.temporal.TemporalAccessor;
@@ -26,6 +29,8 @@
import java.util.Arrays;
import java.util.Locale;
import java.util.Map;
+import java.util.Objects;
+import java.util.function.LongSupplier;
import java.util.stream.Collectors;
public interface DateFormatter {
@@ -94,6 +99,13 @@ public interface DateFormatter {
*/
DateFormatter parseDefaulting(Map fields);
+ /**
+ * Create a DateMathParser from the existing formatter
+ *
+ * @return The DateMathParser object
+ */
+ DateMathParser toDateMathParser();
+
/**
* Merge several date formatters into a single one. Useful if you need to have several formatters with
* different formats act as one, for example when you specify a
@@ -106,6 +118,34 @@ static DateFormatter merge(DateFormatter ... formatters) {
return new MergedDateFormatter(formatters);
}
+ class MergedDateMathParser implements DateMathParser {
+
+ private final DateMathParser[] parsers;
+
+ MergedDateMathParser(DateFormatter ... formatters) {
+ this.parsers = Arrays.stream(formatters)
+ .map(DateFormatter::toDateMathParser)
+ .collect(Collectors.toList()).toArray(new DateMathParser[0]);
+ }
+
+ @Override
+ public Instant parse(String text, LongSupplier now, boolean roundUp, ZoneId tz) {
+ ElasticsearchParseException failure = null;
+ for (DateMathParser parser : parsers) {
+ try {
+ return parser.parse(text, now, roundUp, tz);
+ } catch (ElasticsearchParseException e) {
+ if (failure == null) {
+ failure = e;
+ } else {
+ failure.addSuppressed(e);
+ }
+ }
+ }
+ throw failure;
+ }
+ }
+
class MergedDateFormatter implements DateFormatter {
private final String format;
@@ -118,11 +158,11 @@ class MergedDateFormatter implements DateFormatter {
@Override
public TemporalAccessor parse(String input) {
- DateTimeParseException failure = null;
+ ElasticsearchParseException failure = null;
for (DateFormatter formatter : formatters) {
try {
return formatter.parse(input);
- } catch (DateTimeParseException e) {
+ } catch (ElasticsearchParseException e) {
if (failure == null) {
failure = e;
} else {
@@ -163,9 +203,31 @@ public ZoneId getZone() {
return formatters[0].getZone();
}
+ @Override
+ public DateMathParser toDateMathParser() {
+ return new MergedDateMathParser(formatters);
+ }
+
@Override
public DateFormatter parseDefaulting(Map fields) {
return new MergedDateFormatter(Arrays.stream(formatters).map(f -> f.parseDefaulting(fields)).toArray(DateFormatter[]::new));
}
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(getLocale(), format);
+ }
+
+ @Override
+ public boolean equals(Object obj) {
+ if (obj.getClass().equals(this.getClass()) == false) {
+ return false;
+ }
+ MergedDateFormatter other = (MergedDateFormatter) obj;
+
+ return Objects.equals(pattern(), other.pattern()) &&
+ Objects.equals(getLocale(), other.getLocale()) &&
+ Objects.equals(getZone(), other.getZone());
+ }
}
}
diff --git a/server/src/main/java/org/elasticsearch/common/time/DateFormatters.java b/server/src/main/java/org/elasticsearch/common/time/DateFormatters.java
index a330e02b0cb28..181ca08b29e10 100644
--- a/server/src/main/java/org/elasticsearch/common/time/DateFormatters.java
+++ b/server/src/main/java/org/elasticsearch/common/time/DateFormatters.java
@@ -74,7 +74,14 @@ public class DateFormatters {
.append(STRICT_YEAR_MONTH_DAY_FORMATTER)
.optionalStart()
.appendLiteral('T')
- .append(STRICT_HOUR_MINUTE_SECOND_FORMATTER)
+ .optionalStart()
+ .appendValue(HOUR_OF_DAY, 2, 2, SignStyle.NOT_NEGATIVE)
+ .optionalStart()
+ .appendLiteral(':')
+ .appendValue(MINUTE_OF_HOUR, 2, 2, SignStyle.NOT_NEGATIVE)
+ .optionalStart()
+ .appendLiteral(':')
+ .appendValue(SECOND_OF_MINUTE, 2, 2, SignStyle.NOT_NEGATIVE)
.optionalStart()
.appendFraction(MILLI_OF_SECOND, 3, 3, true)
.optionalEnd()
@@ -82,13 +89,23 @@ public class DateFormatters {
.appendZoneOrOffsetId()
.optionalEnd()
.optionalEnd()
+ .optionalEnd()
+ .optionalEnd()
+ .optionalEnd()
.toFormatter(Locale.ROOT);
private static final DateTimeFormatter STRICT_DATE_OPTIONAL_TIME_FORMATTER_2 = new DateTimeFormatterBuilder()
.append(STRICT_YEAR_MONTH_DAY_FORMATTER)
.optionalStart()
.appendLiteral('T')
- .append(STRICT_HOUR_MINUTE_SECOND_FORMATTER)
+ .optionalStart()
+ .appendValue(HOUR_OF_DAY, 2, 2, SignStyle.NOT_NEGATIVE)
+ .optionalStart()
+ .appendLiteral(':')
+ .appendValue(MINUTE_OF_HOUR, 2, 2, SignStyle.NOT_NEGATIVE)
+ .optionalStart()
+ .appendLiteral(':')
+ .appendValue(SECOND_OF_MINUTE, 2, 2, SignStyle.NOT_NEGATIVE)
.optionalStart()
.appendFraction(MILLI_OF_SECOND, 3, 3, true)
.optionalEnd()
@@ -96,15 +113,11 @@ public class DateFormatters {
.appendOffset("+HHmm", "Z")
.optionalEnd()
.optionalEnd()
+ .optionalEnd()
+ .optionalEnd()
+ .optionalEnd()
.toFormatter(Locale.ROOT);
- /**
- * Returns a generic ISO datetime parser where the date is mandatory and the time is optional.
- */
- private static final DateFormatter STRICT_DATE_OPTIONAL_TIME =
- new JavaDateFormatter("strict_date_optional_time", STRICT_DATE_OPTIONAL_TIME_FORMATTER_1,
- STRICT_DATE_OPTIONAL_TIME_FORMATTER_1, STRICT_DATE_OPTIONAL_TIME_FORMATTER_2);
-
private static final DateTimeFormatter STRICT_DATE_OPTIONAL_TIME_FORMATTER_WITH_NANOS_1 = new DateTimeFormatterBuilder()
.append(STRICT_YEAR_MONTH_DAY_FORMATTER)
.optionalStart()
@@ -140,6 +153,14 @@ public class DateFormatters {
STRICT_DATE_OPTIONAL_TIME_FORMATTER_WITH_NANOS_1,
STRICT_DATE_OPTIONAL_TIME_FORMATTER_WITH_NANOS_1, STRICT_DATE_OPTIONAL_TIME_FORMATTER_WITH_NANOS_2);
+ /**
+ * Returns a generic ISO datetime parser where the date is mandatory and the time is optional.
+ */
+ private static final DateFormatter STRICT_DATE_OPTIONAL_TIME =
+ new JavaDateFormatter("strict_date_optional_time", STRICT_DATE_OPTIONAL_TIME_FORMATTER_1,
+ STRICT_DATE_OPTIONAL_TIME_FORMATTER_1, STRICT_DATE_OPTIONAL_TIME_FORMATTER_2,
+ STRICT_DATE_OPTIONAL_TIME_FORMATTER_WITH_NANOS_1, STRICT_DATE_OPTIONAL_TIME_FORMATTER_WITH_NANOS_2);
+
/////////////////////////////////////////
//
// BEGIN basic time formatters
@@ -338,13 +359,14 @@ public class DateFormatters {
* Returns a basic formatter that combines a basic weekyear date and time
* without millis, separated by a 'T' (xxxx'W'wwe'T'HHmmssX).
*/
- private static final DateFormatter STRICT_BASIC_WEEK_DATE_TIME_NO_MILLIS = new JavaDateFormatter("strict_basic_week_date_no_millis",
- new DateTimeFormatterBuilder()
- .append(STRICT_BASIC_WEEK_DATE_PRINTER).append(DateTimeFormatter.ofPattern("'T'HHmmssX", Locale.ROOT))
- .toFormatter(Locale.ROOT),
- new DateTimeFormatterBuilder()
- .append(STRICT_BASIC_WEEK_DATE_FORMATTER).append(DateTimeFormatter.ofPattern("'T'HHmmssX", Locale.ROOT))
- .toFormatter(Locale.ROOT)
+ private static final DateFormatter STRICT_BASIC_WEEK_DATE_TIME_NO_MILLIS =
+ new JavaDateFormatter("strict_basic_week_date_time_no_millis",
+ new DateTimeFormatterBuilder()
+ .append(STRICT_BASIC_WEEK_DATE_PRINTER).append(DateTimeFormatter.ofPattern("'T'HHmmssX", Locale.ROOT))
+ .toFormatter(Locale.ROOT),
+ new DateTimeFormatterBuilder()
+ .append(STRICT_BASIC_WEEK_DATE_FORMATTER).append(DateTimeFormatter.ofPattern("'T'HHmmssX", Locale.ROOT))
+ .toFormatter(Locale.ROOT)
);
/*
@@ -366,7 +388,7 @@ public class DateFormatters {
* An ISO date formatter that formats or parses a date without an offset, such as '2011-12-03'.
*/
private static final DateFormatter STRICT_DATE = new JavaDateFormatter("strict_date",
- DateTimeFormatter.ISO_LOCAL_DATE.withResolverStyle(ResolverStyle.LENIENT));
+ DateTimeFormatter.ISO_LOCAL_DATE.withResolverStyle(ResolverStyle.LENIENT).withLocale(Locale.ROOT));
/*
* A date formatter that formats or parses a date plus an hour without an offset, such as '2011-12-03T01'.
@@ -489,7 +511,9 @@ public class DateFormatters {
new JavaDateFormatter("strict_hour_minute_second_millis",
STRICT_HOUR_MINUTE_SECOND_MILLIS_PRINTER, STRICT_HOUR_MINUTE_SECOND_MILLIS_FORMATTER);
- private static final DateFormatter STRICT_HOUR_MINUTE_SECOND_FRACTION = STRICT_HOUR_MINUTE_SECOND_MILLIS;
+ private static final DateFormatter STRICT_HOUR_MINUTE_SECOND_FRACTION =
+ new JavaDateFormatter("strict_hour_minute_second_fraction",
+ STRICT_HOUR_MINUTE_SECOND_MILLIS_PRINTER, STRICT_HOUR_MINUTE_SECOND_MILLIS_FORMATTER);
/*
* Returns a formatter that combines a full date, two digit hour of day,
@@ -512,7 +536,21 @@ public class DateFormatters {
.toFormatter(Locale.ROOT)
);
- private static final DateFormatter STRICT_DATE_HOUR_MINUTE_SECOND_MILLIS = STRICT_DATE_HOUR_MINUTE_SECOND_FRACTION;
+ private static final DateFormatter STRICT_DATE_HOUR_MINUTE_SECOND_MILLIS = new JavaDateFormatter(
+ "strict_date_hour_minute_second_millis",
+ new DateTimeFormatterBuilder()
+ .append(STRICT_YEAR_MONTH_DAY_FORMATTER)
+ .appendLiteral("T")
+ .append(STRICT_HOUR_MINUTE_SECOND_MILLIS_PRINTER)
+ .toFormatter(Locale.ROOT),
+ new DateTimeFormatterBuilder()
+ .append(STRICT_YEAR_MONTH_DAY_FORMATTER)
+ .appendLiteral("T")
+ .append(STRICT_HOUR_MINUTE_SECOND_FORMATTER)
+ // this one here is lenient as well to retain joda time based bwc compatibility
+ .appendFraction(MILLI_OF_SECOND, 1, 3, true)
+ .toFormatter(Locale.ROOT)
+ );
/*
* Returns a formatter for a two digit hour of day. (HH)
@@ -921,7 +959,17 @@ public class DateFormatters {
.append(HOUR_MINUTE_SECOND_MILLIS_FORMATTER)
.toFormatter(Locale.ROOT));
- private static final DateFormatter DATE_HOUR_MINUTE_SECOND_FRACTION = DATE_HOUR_MINUTE_SECOND_MILLIS;
+ private static final DateFormatter DATE_HOUR_MINUTE_SECOND_FRACTION = new JavaDateFormatter("date_hour_minute_second_fraction",
+ new DateTimeFormatterBuilder()
+ .append(STRICT_YEAR_MONTH_DAY_FORMATTER)
+ .appendLiteral("T")
+ .append(STRICT_HOUR_MINUTE_SECOND_MILLIS_PRINTER)
+ .toFormatter(Locale.ROOT),
+ new DateTimeFormatterBuilder()
+ .append(DATE_FORMATTER)
+ .appendLiteral("T")
+ .append(HOUR_MINUTE_SECOND_MILLIS_FORMATTER)
+ .toFormatter(Locale.ROOT));
/*
* Returns a formatter that combines a full date, two digit hour of day,
@@ -1026,6 +1074,9 @@ public class DateFormatters {
private static final DateFormatter HOUR_MINUTE_SECOND_MILLIS = new JavaDateFormatter("hour_minute_second_millis",
STRICT_HOUR_MINUTE_SECOND_MILLIS_PRINTER, HOUR_MINUTE_SECOND_MILLIS_FORMATTER);
+ private static final DateFormatter HOUR_MINUTE_SECOND_FRACTION = new JavaDateFormatter("hour_minute_second_fraction",
+ STRICT_HOUR_MINUTE_SECOND_MILLIS_PRINTER, HOUR_MINUTE_SECOND_MILLIS_FORMATTER);
+
/*
* Returns a formatter for a two digit hour of day and two digit minute of
* hour. (HH:mm)
@@ -1328,7 +1379,7 @@ private static DateFormatter forPattern(String input, Locale locale) {
} else if ("hourMinuteSecond".equals(input) || "hour_minute_second".equals(input)) {
return HOUR_MINUTE_SECOND;
} else if ("hourMinuteSecondFraction".equals(input) || "hour_minute_second_fraction".equals(input)) {
- return HOUR_MINUTE_SECOND_MILLIS;
+ return HOUR_MINUTE_SECOND_FRACTION;
} else if ("hourMinuteSecondMillis".equals(input) || "hour_minute_second_millis".equals(input)) {
return HOUR_MINUTE_SECOND_MILLIS;
} else if ("ordinalDate".equals(input) || "ordinal_date".equals(input)) {
@@ -1439,12 +1490,12 @@ private static DateFormatter forPattern(String input, Locale locale) {
} else if (Strings.hasLength(input) && input.contains("||")) {
String[] formats = Strings.delimitedListToStringArray(input, "||");
if (formats.length == 1) {
- return forPattern(formats[0], locale);
+ return forPattern(formats[0], Locale.ROOT).withLocale(locale);
} else {
try {
DateFormatter[] formatters = new DateFormatter[formats.length];
for (int i = 0; i < formats.length; i++) {
- formatters[i] = forPattern(formats[i], locale);
+ formatters[i] = forPattern(formats[i], Locale.ROOT).withLocale(locale);
}
return DateFormatter.merge(formatters);
diff --git a/server/src/main/java/org/elasticsearch/common/time/DateMathParser.java b/server/src/main/java/org/elasticsearch/common/time/DateMathParser.java
index 1e997cce23be8..3ba392822ca0c 100644
--- a/server/src/main/java/org/elasticsearch/common/time/DateMathParser.java
+++ b/server/src/main/java/org/elasticsearch/common/time/DateMathParser.java
@@ -21,6 +21,7 @@
import org.joda.time.DateTimeZone;
+import java.time.Instant;
import java.time.ZoneId;
import java.util.function.LongSupplier;
@@ -32,7 +33,7 @@ public interface DateMathParser {
/**
* Parse a date math expression without timzeone info and rounding down.
*/
- default long parse(String text, LongSupplier now) {
+ default Instant parse(String text, LongSupplier now) {
return parse(text, now, false, (ZoneId) null);
}
@@ -42,7 +43,7 @@ default long parse(String text, LongSupplier now) {
// exists for backcompat, do not use!
@Deprecated
- default long parse(String text, LongSupplier now, boolean roundUp, DateTimeZone tz) {
+ default Instant parse(String text, LongSupplier now, boolean roundUp, DateTimeZone tz) {
return parse(text, now, roundUp, tz == null ? null : ZoneId.of(tz.getID()));
}
@@ -68,7 +69,7 @@ default long parse(String text, LongSupplier now, boolean roundUp, DateTimeZone
* @param now a supplier to retrieve the current date in milliseconds, if needed for additions
* @param roundUp should the result be rounded up
* @param tz an optional timezone that should be applied before returning the milliseconds since the epoch
- * @return the parsed date in milliseconds since the epoch
+ * @return the parsed date as an Instant since the epoch
*/
- long parse(String text, LongSupplier now, boolean roundUp, ZoneId tz);
+ Instant parse(String text, LongSupplier now, boolean roundUp, ZoneId tz);
}
diff --git a/server/src/main/java/org/elasticsearch/common/time/DateUtils.java b/server/src/main/java/org/elasticsearch/common/time/DateUtils.java
index 1112b6cb301f5..6b96b75685473 100644
--- a/server/src/main/java/org/elasticsearch/common/time/DateUtils.java
+++ b/server/src/main/java/org/elasticsearch/common/time/DateUtils.java
@@ -62,12 +62,17 @@ public static ZoneId dateTimeZoneToZoneId(DateTimeZone timeZone) {
return null;
}
- String deprecatedId = DEPRECATED_SHORT_TIMEZONES.get(timeZone.getID());
+ return of(timeZone.getID());
+ }
+
+ public static ZoneId of(String zoneId) {
+ String deprecatedId = DEPRECATED_SHORT_TIMEZONES.get(zoneId);
if (deprecatedId != null) {
deprecationLogger.deprecatedAndMaybeLog("timezone",
- "Use of short timezone id " + timeZone.getID() + " is deprecated. Use " + deprecatedId + " instead");
+ "Use of short timezone id " + zoneId + " is deprecated. Use " + deprecatedId + " instead");
return ZoneId.of(deprecatedId);
}
- return ZoneId.of(timeZone.getID());
+ return ZoneId.of(zoneId);
+
}
}
diff --git a/server/src/main/java/org/elasticsearch/common/time/EpochMillisDateFormatter.java b/server/src/main/java/org/elasticsearch/common/time/EpochMillisDateFormatter.java
index 4fce63510029d..5fed08c28398c 100644
--- a/server/src/main/java/org/elasticsearch/common/time/EpochMillisDateFormatter.java
+++ b/server/src/main/java/org/elasticsearch/common/time/EpochMillisDateFormatter.java
@@ -19,11 +19,12 @@
package org.elasticsearch.common.time;
+import org.elasticsearch.ElasticsearchParseException;
+
import java.math.BigDecimal;
import java.time.Instant;
import java.time.ZoneId;
import java.time.ZoneOffset;
-import java.time.format.DateTimeParseException;
import java.time.temporal.TemporalAccessor;
import java.time.temporal.TemporalField;
import java.util.Locale;
@@ -63,7 +64,7 @@ public TemporalAccessor parse(String input) {
}
if (inputs[1].length() > 6) {
- throw new DateTimeParseException("too much granularity after dot [" + input + "]", input, 0);
+ throw new ElasticsearchParseException("too much granularity after dot [{}]", input);
}
Long nanos = new BigDecimal(inputs[1]).movePointRight(6 - inputs[1].length()).longValueExact();
if (milliSeconds < 0) {
@@ -74,7 +75,7 @@ public TemporalAccessor parse(String input) {
return Instant.ofEpochMilli(Long.valueOf(input)).atZone(ZoneOffset.UTC);
}
} catch (NumberFormatException e) {
- throw new DateTimeParseException("invalid number [" + input + "]", input, 0, e);
+ throw new ElasticsearchParseException("invalid number [{}]", input);
}
}
@Override
@@ -109,6 +110,10 @@ public DateFormatter parseDefaulting(Map fields) {
}
@Override
+ public DateMathParser toDateMathParser() {
+ return new JavaDateMathParser(this);
+ }
+
public Locale getLocale() {
return Locale.ROOT;
}
diff --git a/server/src/main/java/org/elasticsearch/common/time/EpochSecondsDateFormatter.java b/server/src/main/java/org/elasticsearch/common/time/EpochSecondsDateFormatter.java
index 218542f817be6..f950b9b515b51 100644
--- a/server/src/main/java/org/elasticsearch/common/time/EpochSecondsDateFormatter.java
+++ b/server/src/main/java/org/elasticsearch/common/time/EpochSecondsDateFormatter.java
@@ -19,11 +19,12 @@
package org.elasticsearch.common.time;
+import org.elasticsearch.ElasticsearchParseException;
+
import java.math.BigDecimal;
import java.time.Instant;
import java.time.ZoneId;
import java.time.ZoneOffset;
-import java.time.format.DateTimeParseException;
import java.time.temporal.TemporalAccessor;
import java.time.temporal.TemporalField;
import java.util.Locale;
@@ -52,7 +53,7 @@ public TemporalAccessor parse(String input) {
return Instant.ofEpochSecond(Double.valueOf(input).longValue()).atZone(ZoneOffset.UTC);
}
if (inputs[1].length() > 9) {
- throw new DateTimeParseException("too much granularity after dot [" + input + "]", input, 0);
+ throw new ElasticsearchParseException("too much granularity after dot [{}]", input);
}
Long nanos = new BigDecimal(inputs[1]).movePointRight(9 - inputs[1].length()).longValueExact();
if (seconds < 0) {
@@ -63,7 +64,7 @@ public TemporalAccessor parse(String input) {
return Instant.ofEpochSecond(Long.valueOf(input)).atZone(ZoneOffset.UTC);
}
} catch (NumberFormatException e) {
- throw new DateTimeParseException("invalid number [" + input + "]", input, 0, e);
+ throw new ElasticsearchParseException("invalid number [{}]", input);
}
}
@@ -111,4 +112,9 @@ public DateFormatter withLocale(Locale locale) {
public DateFormatter parseDefaulting(Map fields) {
return this;
}
+
+ @Override
+ public DateMathParser toDateMathParser() {
+ return new JavaDateMathParser(this);
+ }
}
diff --git a/server/src/main/java/org/elasticsearch/common/time/JavaDateFormatter.java b/server/src/main/java/org/elasticsearch/common/time/JavaDateFormatter.java
index 75cd82b51e85a..41b69560491c6 100644
--- a/server/src/main/java/org/elasticsearch/common/time/JavaDateFormatter.java
+++ b/server/src/main/java/org/elasticsearch/common/time/JavaDateFormatter.java
@@ -19,6 +19,8 @@
package org.elasticsearch.common.time;
+import org.elasticsearch.ElasticsearchParseException;
+
import java.time.ZoneId;
import java.time.format.DateTimeFormatter;
import java.time.format.DateTimeFormatterBuilder;
@@ -48,27 +50,33 @@ class JavaDateFormatter implements DateFormatter {
if (distinctLocales > 1) {
throw new IllegalArgumentException("formatters must have the same locale");
}
+ this.printer = printer;
+ this.format = format;
if (parsers.length == 0) {
this.parsers = new DateTimeFormatter[]{printer};
} else {
this.parsers = parsers;
}
- this.format = format;
- this.printer = printer;
}
@Override
public TemporalAccessor parse(String input) {
- DateTimeParseException failure = null;
+ ElasticsearchParseException failure = null;
for (int i = 0; i < parsers.length; i++) {
try {
return parsers[i].parse(input);
} catch (DateTimeParseException e) {
if (failure == null) {
- failure = e;
- } else {
- failure.addSuppressed(e);
+ String msg = "could not parse input [" + input + "] with date formatter [" + format + "]";
+ if (getLocale().equals(Locale.ROOT) == false) {
+ msg += " and locale [" + getLocale() + "]";
+ }
+ if (e.getErrorIndex() > 0) {
+ msg += "at position [" + e.getErrorIndex() + "]";
+ }
+ failure = new ElasticsearchParseException(msg);
}
+ failure.addSuppressed(e);
}
}
@@ -117,32 +125,36 @@ public String pattern() {
}
@Override
+ public Locale getLocale() {
+ return this.printer.getLocale();
+ }
+
+ @Override
+ public ZoneId getZone() {
+ return this.printer.getZone();
+ }
+
+ @Override
+ public DateMathParser toDateMathParser() {
+ return new JavaDateMathParser(this);
+ }
+
public DateFormatter parseDefaulting(Map fields) {
final DateTimeFormatterBuilder parseDefaultingBuilder = new DateTimeFormatterBuilder().append(printer);
fields.forEach(parseDefaultingBuilder::parseDefaulting);
if (parsers.length == 1 && parsers[0].equals(printer)) {
- return new JavaDateFormatter(format, parseDefaultingBuilder.toFormatter(Locale.ROOT));
+ return new JavaDateFormatter(format, parseDefaultingBuilder.toFormatter(getLocale()));
} else {
final DateTimeFormatter[] parsersWithDefaulting = new DateTimeFormatter[parsers.length];
for (int i = 0; i < parsers.length; i++) {
DateTimeFormatterBuilder builder = new DateTimeFormatterBuilder().append(parsers[i]);
fields.forEach(builder::parseDefaulting);
- parsersWithDefaulting[i] = builder.toFormatter(Locale.ROOT);
+ parsersWithDefaulting[i] = builder.toFormatter(getLocale());
}
- return new JavaDateFormatter(format, parseDefaultingBuilder.toFormatter(Locale.ROOT), parsersWithDefaulting);
+ return new JavaDateFormatter(format, parseDefaultingBuilder.toFormatter(getLocale()), parsersWithDefaulting);
}
}
- @Override
- public Locale getLocale() {
- return this.printer.getLocale();
- }
-
- @Override
- public ZoneId getZone() {
- return this.printer.getZone();
- }
-
@Override
public int hashCode() {
return Objects.hash(getLocale(), printer.getZone(), format);
@@ -157,7 +169,7 @@ public boolean equals(Object obj) {
return Objects.equals(format, other.format) &&
Objects.equals(getLocale(), other.getLocale()) &&
- Objects.equals(this.printer.getZone(), other.printer.getZone());
+ Objects.equals(getZone(), other.getZone());
}
@Override
diff --git a/server/src/main/java/org/elasticsearch/common/time/JavaDateMathParser.java b/server/src/main/java/org/elasticsearch/common/time/JavaDateMathParser.java
index c3a59f521904b..fa128e895123b 100644
--- a/server/src/main/java/org/elasticsearch/common/time/JavaDateMathParser.java
+++ b/server/src/main/java/org/elasticsearch/common/time/JavaDateMathParser.java
@@ -68,12 +68,13 @@ public JavaDateMathParser(DateFormatter formatter) {
}
@Override
- public long parse(String text, LongSupplier now, boolean roundUp, ZoneId timeZone) {
- long time;
+ public Instant parse(String text, LongSupplier now, boolean roundUp, ZoneId timeZone) {
+ Instant time;
String mathString;
if (text.startsWith("now")) {
try {
- time = now.getAsLong();
+ // TODO only millisecond granularity here!
+ time = Instant.ofEpochMilli(now.getAsLong());
} catch (Exception e) {
throw new ElasticsearchParseException("could not read the current timestamp", e);
}
@@ -90,12 +91,12 @@ public long parse(String text, LongSupplier now, boolean roundUp, ZoneId timeZon
return parseMath(mathString, time, roundUp, timeZone);
}
- private long parseMath(final String mathString, final long time, final boolean roundUp,
+ private Instant parseMath(final String mathString, final Instant time, final boolean roundUp,
ZoneId timeZone) throws ElasticsearchParseException {
if (timeZone == null) {
timeZone = ZoneOffset.UTC;
}
- ZonedDateTime dateTime = ZonedDateTime.ofInstant(Instant.ofEpochMilli(time), timeZone);
+ ZonedDateTime dateTime = ZonedDateTime.ofInstant(time, timeZone);
for (int i = 0; i < mathString.length(); ) {
char c = mathString.charAt(i++);
final boolean round;
@@ -216,14 +217,14 @@ private long parseMath(final String mathString, final long time, final boolean r
dateTime = dateTime.minus(1, ChronoField.MILLI_OF_SECOND.getBaseUnit());
}
}
- return dateTime.toInstant().toEpochMilli();
+ return dateTime.toInstant();
}
- private long parseDateTime(String value, ZoneId timeZone, boolean roundUpIfNoTime) {
+ private Instant parseDateTime(String value, ZoneId timeZone, boolean roundUpIfNoTime) {
DateFormatter formatter = roundUpIfNoTime ? this.roundUpFormatter : this.formatter;
try {
if (timeZone == null) {
- return DateFormatters.toZonedDateTime(formatter.parse(value)).toInstant().toEpochMilli();
+ return DateFormatters.toZonedDateTime(formatter.parse(value)).toInstant();
} else {
TemporalAccessor accessor = formatter.parse(value);
ZoneId zoneId = TemporalQueries.zone().queryFrom(accessor);
@@ -231,7 +232,7 @@ private long parseDateTime(String value, ZoneId timeZone, boolean roundUpIfNoTim
timeZone = zoneId;
}
- return DateFormatters.toZonedDateTime(accessor).withZoneSameLocal(timeZone).toInstant().toEpochMilli();
+ return DateFormatters.toZonedDateTime(accessor).withZoneSameLocal(timeZone).toInstant();
}
} catch (IllegalArgumentException | DateTimeException e) {
throw new ElasticsearchParseException("failed to parse date field [{}]: [{}]", e, value, e.getMessage());
diff --git a/server/src/main/java/org/elasticsearch/index/mapper/BinaryFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/BinaryFieldMapper.java
index 69b6a6e04a936..7a5bd97770297 100644
--- a/server/src/main/java/org/elasticsearch/index/mapper/BinaryFieldMapper.java
+++ b/server/src/main/java/org/elasticsearch/index/mapper/BinaryFieldMapper.java
@@ -20,7 +20,6 @@
package org.elasticsearch.index.mapper;
import com.carrotsearch.hppc.ObjectArrayList;
-
import org.apache.lucene.document.Field;
import org.apache.lucene.index.IndexOptions;
import org.apache.lucene.index.IndexableField;
@@ -41,9 +40,9 @@
import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.index.query.QueryShardException;
import org.elasticsearch.search.DocValueFormat;
-import org.joda.time.DateTimeZone;
import java.io.IOException;
+import java.time.ZoneId;
import java.util.Base64;
import java.util.List;
import java.util.Map;
@@ -108,7 +107,7 @@ public String typeName() {
}
@Override
- public DocValueFormat docValueFormat(String format, DateTimeZone timeZone) {
+ public DocValueFormat docValueFormat(String format, ZoneId timeZone) {
return DocValueFormat.BINARY;
}
diff --git a/server/src/main/java/org/elasticsearch/index/mapper/BooleanFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/BooleanFieldMapper.java
index 9e0b9f62acbe7..caf8baac24da1 100644
--- a/server/src/main/java/org/elasticsearch/index/mapper/BooleanFieldMapper.java
+++ b/server/src/main/java/org/elasticsearch/index/mapper/BooleanFieldMapper.java
@@ -40,9 +40,9 @@
import org.elasticsearch.index.fielddata.plain.DocValuesIndexFieldData;
import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.search.DocValueFormat;
-import org.joda.time.DateTimeZone;
import java.io.IOException;
+import java.time.ZoneId;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
@@ -190,7 +190,7 @@ public IndexFieldData.Builder fielddataBuilder(String fullyQualifiedIndexName) {
}
@Override
- public DocValueFormat docValueFormat(@Nullable String format, DateTimeZone timeZone) {
+ public DocValueFormat docValueFormat(@Nullable String format, ZoneId timeZone) {
if (format != null) {
throw new IllegalArgumentException("Field [" + name() + "] of type [" + typeName() + "] does not support custom formats");
}
diff --git a/server/src/main/java/org/elasticsearch/index/mapper/DateFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/DateFieldMapper.java
index 0de2731ffd11f..c0be64b9f0c45 100644
--- a/server/src/main/java/org/elasticsearch/index/mapper/DateFieldMapper.java
+++ b/server/src/main/java/org/elasticsearch/index/mapper/DateFieldMapper.java
@@ -33,14 +33,15 @@
import org.apache.lucene.search.Query;
import org.apache.lucene.search.TermQuery;
import org.apache.lucene.util.BytesRef;
+import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.common.Explicit;
import org.elasticsearch.common.Nullable;
+import org.elasticsearch.common.Strings;
import org.elasticsearch.common.geo.ShapeRelation;
-import org.elasticsearch.common.joda.FormatDateTimeFormatter;
-import org.elasticsearch.common.joda.Joda;
import org.elasticsearch.common.settings.Settings;
+import org.elasticsearch.common.time.DateFormatter;
+import org.elasticsearch.common.time.DateFormatters;
import org.elasticsearch.common.time.DateMathParser;
-import org.elasticsearch.common.time.DateUtils;
import org.elasticsearch.common.util.LocaleUtils;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.support.XContentMapValues;
@@ -50,23 +51,23 @@
import org.elasticsearch.index.query.QueryRewriteContext;
import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.search.DocValueFormat;
-import org.joda.time.DateTimeZone;
import java.io.IOException;
+import java.time.Instant;
+import java.time.ZoneId;
+import java.time.ZoneOffset;
import java.util.Iterator;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.Objects;
-import static org.elasticsearch.index.mapper.TypeParsers.parseDateTimeFormatter;
-
-/** A {@link FieldMapper} for ip addresses. */
+/** A {@link FieldMapper} for dates. */
public class DateFieldMapper extends FieldMapper {
public static final String CONTENT_TYPE = "date";
- public static final FormatDateTimeFormatter DEFAULT_DATE_TIME_FORMATTER = Joda.forPattern(
- "strict_date_optional_time||epoch_millis", Locale.ROOT);
+ public static final String DEFAULT_DATE_FORMATTER_STRING = "strict_date_optional_time||epoch_millis";
+ public static final DateFormatter DEFAULT_DATE_TIME_FORMATTER = DateFormatters.forPattern(DEFAULT_DATE_FORMATTER_STRING);
public static class Defaults {
public static final Explicit IGNORE_MALFORMED = new Explicit<>(false, false);
@@ -75,8 +76,8 @@ public static class Defaults {
public static class Builder extends FieldMapper.Builder {
private Boolean ignoreMalformed;
+ private Explicit format = new Explicit<>(DEFAULT_DATE_TIME_FORMATTER.pattern(), false);
private Locale locale;
- private boolean dateTimeFormatterSet = false;
public Builder(String name) {
super(name, new DateFieldType(), new DateFieldType());
@@ -104,28 +105,35 @@ protected Explicit ignoreMalformed(BuilderContext context) {
return Defaults.IGNORE_MALFORMED;
}
- /** Whether an explicit format for this date field has been set already. */
- public boolean isDateTimeFormatterSet() {
- return dateTimeFormatterSet;
+ public Builder locale(Locale locale) {
+ this.locale = locale;
+ return this;
+ }
+
+ public Locale locale() {
+ return locale;
+ }
+
+ public String format() {
+ return format.value();
}
- public Builder dateTimeFormatter(FormatDateTimeFormatter dateTimeFormatter) {
- fieldType().setDateTimeFormatter(dateTimeFormatter);
- dateTimeFormatterSet = true;
+ public Builder format(String format) {
+ this.format = new Explicit<>(format, true);
return this;
}
- public void locale(Locale locale) {
- this.locale = locale;
+ public boolean isFormatterSet() {
+ return format.explicit();
}
@Override
protected void setupFieldType(BuilderContext context) {
super.setupFieldType(context);
- FormatDateTimeFormatter dateTimeFormatter = fieldType().dateTimeFormatter;
- if (!locale.equals(dateTimeFormatter.locale())) {
- fieldType().setDateTimeFormatter( new FormatDateTimeFormatter(dateTimeFormatter.format(),
- dateTimeFormatter.parser(), dateTimeFormatter.printer(), locale));
+ String formatter = this.format.value();
+ if (Objects.equals(locale, fieldType().dateTimeFormatter.getLocale()) == false ||
+ (Objects.equals(formatter, fieldType().dateTimeFormatter.pattern()) == false && Strings.isEmpty(formatter) == false)) {
+ fieldType().setDateTimeFormatter(DateFormatters.forPattern(formatter).withLocale(locale));
}
}
@@ -163,7 +171,7 @@ public Mapper.Builder,?> parse(String name, Map node, ParserCo
builder.locale(LocaleUtils.parse(propNode.toString()));
iterator.remove();
} else if (propName.equals("format")) {
- builder.dateTimeFormatter(parseDateTimeFormatter(propNode));
+ builder.format(propNode.toString());
iterator.remove();
} else if (TypeParsers.parseMultiField(builder, name, parserContext, propName, propNode)) {
iterator.remove();
@@ -174,7 +182,7 @@ public Mapper.Builder,?> parse(String name, Map node, ParserCo
}
public static final class DateFieldType extends MappedFieldType {
- protected FormatDateTimeFormatter dateTimeFormatter;
+ protected DateFormatter dateTimeFormatter;
protected DateMathParser dateMathParser;
DateFieldType() {
@@ -199,13 +207,12 @@ public MappedFieldType clone() {
public boolean equals(Object o) {
if (!super.equals(o)) return false;
DateFieldType that = (DateFieldType) o;
- return Objects.equals(dateTimeFormatter.format(), that.dateTimeFormatter.format()) &&
- Objects.equals(dateTimeFormatter.locale(), that.dateTimeFormatter.locale());
+ return Objects.equals(dateTimeFormatter, that.dateTimeFormatter);
}
@Override
public int hashCode() {
- return Objects.hash(super.hashCode(), dateTimeFormatter.format(), dateTimeFormatter.locale());
+ return Objects.hash(super.hashCode(), dateTimeFormatter);
}
@Override
@@ -217,21 +224,21 @@ public String typeName() {
public void checkCompatibility(MappedFieldType fieldType, List conflicts) {
super.checkCompatibility(fieldType, conflicts);
DateFieldType other = (DateFieldType) fieldType;
- if (Objects.equals(dateTimeFormatter().format(), other.dateTimeFormatter().format()) == false) {
+ if (Objects.equals(dateTimeFormatter.pattern(), other.dateTimeFormatter.pattern()) == false) {
conflicts.add("mapper [" + name() + "] has different [format] values");
}
- if (Objects.equals(dateTimeFormatter().locale(), other.dateTimeFormatter().locale()) == false) {
+ if (Objects.equals(dateTimeFormatter.getLocale(), other.dateTimeFormatter.getLocale()) == false) {
conflicts.add("mapper [" + name() + "] has different [locale] values");
}
}
- public FormatDateTimeFormatter dateTimeFormatter() {
+ public DateFormatter dateTimeFormatter() {
return dateTimeFormatter;
}
- public void setDateTimeFormatter(FormatDateTimeFormatter dateTimeFormatter) {
+ void setDateTimeFormatter(DateFormatter formatter) {
checkIfFrozen();
- this.dateTimeFormatter = dateTimeFormatter;
+ this.dateTimeFormatter = formatter;
this.dateMathParser = dateTimeFormatter.toDateMathParser();
}
@@ -240,7 +247,7 @@ protected DateMathParser dateMathParser() {
}
long parse(String value) {
- return dateTimeFormatter().parser().parseMillis(value);
+ return DateFormatters.toZonedDateTime(dateTimeFormatter().parse(value)).toInstant().toEpochMilli();
}
@Override
@@ -263,7 +270,7 @@ public Query termQuery(Object value, @Nullable QueryShardContext context) {
@Override
public Query rangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper, ShapeRelation relation,
- @Nullable DateTimeZone timeZone, @Nullable DateMathParser forcedDateParser, QueryShardContext context) {
+ @Nullable ZoneId timeZone, @Nullable DateMathParser forcedDateParser, QueryShardContext context) {
failIfNotIndexed();
if (relation == ShapeRelation.DISJOINT) {
throw new IllegalArgumentException("Field [" + name() + "] of type [" + typeName() +
@@ -297,8 +304,8 @@ public Query rangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower
return query;
}
- public long parseToMilliseconds(Object value, boolean roundUp, @Nullable DateTimeZone zone,
- @Nullable DateMathParser forcedDateParser, QueryRewriteContext context) {
+ public long parseToMilliseconds(Object value, boolean roundUp,
+ @Nullable ZoneId zone, @Nullable DateMathParser forcedDateParser, QueryRewriteContext context) {
DateMathParser dateParser = dateMathParser();
if (forcedDateParser != null) {
dateParser = forcedDateParser;
@@ -310,13 +317,13 @@ public long parseToMilliseconds(Object value, boolean roundUp, @Nullable DateTim
} else {
strValue = value.toString();
}
- return dateParser.parse(strValue, context::nowInMillis, roundUp, DateUtils.dateTimeZoneToZoneId(zone));
+ return dateParser.parse(strValue, context::nowInMillis, roundUp, zone).toEpochMilli();
}
@Override
- public Relation isFieldWithinQuery(IndexReader reader, Object from, Object to, boolean includeLower, boolean includeUpper,
- DateTimeZone timeZone, DateMathParser dateParser,
- QueryRewriteContext context) throws IOException {
+ public Relation isFieldWithinQuery(IndexReader reader,
+ Object from, Object to, boolean includeLower, boolean includeUpper,
+ ZoneId timeZone, DateMathParser dateParser, QueryRewriteContext context) throws IOException {
if (dateParser == null) {
dateParser = this.dateMathParser;
}
@@ -375,17 +382,17 @@ public Object valueForDisplay(Object value) {
if (val == null) {
return null;
}
- return dateTimeFormatter().printer().print(val);
+ return dateTimeFormatter().format(Instant.ofEpochMilli(val).atZone(ZoneOffset.UTC));
}
@Override
- public DocValueFormat docValueFormat(@Nullable String format, DateTimeZone timeZone) {
- FormatDateTimeFormatter dateTimeFormatter = this.dateTimeFormatter;
+ public DocValueFormat docValueFormat(@Nullable String format, ZoneId timeZone) {
+ DateFormatter dateTimeFormatter = this.dateTimeFormatter;
if (format != null) {
- dateTimeFormatter = Joda.forPattern(format);
+ dateTimeFormatter = DateFormatters.forPattern(format).withLocale(dateTimeFormatter.getLocale());
}
if (timeZone == null) {
- timeZone = DateTimeZone.UTC;
+ timeZone = ZoneOffset.UTC;
}
return new DocValueFormat.DateTime(dateTimeFormatter, timeZone);
}
@@ -445,7 +452,7 @@ protected void parseCreateField(ParseContext context, List field
long timestamp;
try {
timestamp = fieldType().parse(dateAsString);
- } catch (IllegalArgumentException e) {
+ } catch (ElasticsearchParseException e) {
if (ignoreMalformed.value()) {
context.addIgnoredField(fieldType.name());
return;
@@ -489,12 +496,12 @@ protected void doXContentBody(XContentBuilder builder, boolean includeDefaults,
}
if (includeDefaults
- || fieldType().dateTimeFormatter().format().equals(DEFAULT_DATE_TIME_FORMATTER.format()) == false) {
- builder.field("format", fieldType().dateTimeFormatter().format());
+ || fieldType().dateTimeFormatter().pattern().equals(DEFAULT_DATE_TIME_FORMATTER.pattern()) == false) {
+ builder.field("format", fieldType().dateTimeFormatter().pattern());
}
if (includeDefaults
- || fieldType().dateTimeFormatter().locale() != Locale.ROOT) {
- builder.field("locale", fieldType().dateTimeFormatter().locale());
+ || fieldType().dateTimeFormatter().getLocale().equals(DEFAULT_DATE_TIME_FORMATTER.getLocale()) == false) {
+ builder.field("locale", fieldType().dateTimeFormatter().getLocale());
}
}
}
diff --git a/server/src/main/java/org/elasticsearch/index/mapper/DocumentParser.java b/server/src/main/java/org/elasticsearch/index/mapper/DocumentParser.java
index 86674617272a7..2d8b58667d34d 100644
--- a/server/src/main/java/org/elasticsearch/index/mapper/DocumentParser.java
+++ b/server/src/main/java/org/elasticsearch/index/mapper/DocumentParser.java
@@ -21,10 +21,11 @@
import org.apache.lucene.document.Field;
import org.apache.lucene.index.IndexableField;
+import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.Version;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.collect.Tuple;
-import org.elasticsearch.common.joda.FormatDateTimeFormatter;
+import org.elasticsearch.common.time.DateFormatter;
import org.elasticsearch.common.xcontent.LoggingDeprecationHandler;
import org.elasticsearch.common.xcontent.XContentHelper;
import org.elasticsearch.common.xcontent.XContentParser;
@@ -35,6 +36,7 @@
import org.elasticsearch.index.mapper.TextFieldMapper.TextFieldType;
import java.io.IOException;
+import java.time.format.DateTimeParseException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
@@ -668,10 +670,10 @@ private static Mapper.Builder,?> createBuilderFromFieldType(final ParseContext
return new NumberFieldMapper.Builder(name, NumberFieldMapper.NumberType.FLOAT);
}
- private static Mapper.Builder, ?> newDateBuilder(String name, FormatDateTimeFormatter dateTimeFormatter, Version indexCreated) {
+ private static Mapper.Builder, ?> newDateBuilder(String name, DateFormatter dateTimeFormatter, Version indexCreated) {
DateFieldMapper.Builder builder = new DateFieldMapper.Builder(name);
if (dateTimeFormatter != null) {
- builder.dateTimeFormatter(dateTimeFormatter);
+ builder.format(dateTimeFormatter.pattern()).locale(dateTimeFormatter.getLocale());
}
return builder;
}
@@ -714,10 +716,10 @@ private static Mapper.Builder,?> createBuilderFromDynamicValue(final ParseCont
// We refuse to match pure numbers, which are too likely to be
// false positives with date formats that include eg.
// `epoch_millis` or `YYYY`
- for (FormatDateTimeFormatter dateTimeFormatter : context.root().dynamicDateTimeFormatters()) {
+ for (DateFormatter dateTimeFormatter : context.root().dynamicDateTimeFormatters()) {
try {
- dateTimeFormatter.parser().parseMillis(text);
- } catch (IllegalArgumentException e) {
+ dateTimeFormatter.parse(text);
+ } catch (ElasticsearchParseException | DateTimeParseException e) {
// failure to parse this, continue
continue;
}
@@ -727,8 +729,8 @@ private static Mapper.Builder,?> createBuilderFromDynamicValue(final ParseCont
}
if (builder instanceof DateFieldMapper.Builder) {
DateFieldMapper.Builder dateBuilder = (DateFieldMapper.Builder) builder;
- if (dateBuilder.isDateTimeFormatterSet() == false) {
- dateBuilder.dateTimeFormatter(dateTimeFormatter);
+ if (dateBuilder.isFormatterSet() == false) {
+ dateBuilder.format(dateTimeFormatter.pattern()).locale(dateTimeFormatter.getLocale());
}
}
return builder;
diff --git a/server/src/main/java/org/elasticsearch/index/mapper/IpFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/IpFieldMapper.java
index a8ef46b93060e..2b52e42ffe558 100644
--- a/server/src/main/java/org/elasticsearch/index/mapper/IpFieldMapper.java
+++ b/server/src/main/java/org/elasticsearch/index/mapper/IpFieldMapper.java
@@ -44,10 +44,10 @@
import org.elasticsearch.index.fielddata.plain.DocValuesIndexFieldData;
import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.search.DocValueFormat;
-import org.joda.time.DateTimeZone;
import java.io.IOException;
import java.net.InetAddress;
+import java.time.ZoneId;
import java.util.Arrays;
import java.util.Iterator;
import java.util.List;
@@ -303,7 +303,7 @@ public Object valueForDisplay(Object value) {
}
@Override
- public DocValueFormat docValueFormat(@Nullable String format, DateTimeZone timeZone) {
+ public DocValueFormat docValueFormat(@Nullable String format, ZoneId timeZone) {
if (format != null) {
throw new IllegalArgumentException("Field [" + name() + "] of type [" + typeName() + "] does not support custom formats");
}
diff --git a/server/src/main/java/org/elasticsearch/index/mapper/MappedFieldType.java b/server/src/main/java/org/elasticsearch/index/mapper/MappedFieldType.java
index eaafeefa7e0dd..3651a75c32a35 100644
--- a/server/src/main/java/org/elasticsearch/index/mapper/MappedFieldType.java
+++ b/server/src/main/java/org/elasticsearch/index/mapper/MappedFieldType.java
@@ -47,9 +47,9 @@
import org.elasticsearch.index.query.QueryShardException;
import org.elasticsearch.index.similarity.SimilarityProvider;
import org.elasticsearch.search.DocValueFormat;
-import org.joda.time.DateTimeZone;
import java.io.IOException;
+import java.time.ZoneId;
import java.util.List;
import java.util.Objects;
@@ -332,10 +332,10 @@ public Query termsQuery(List> values, @Nullable QueryShardContext context) {
* @param relation the relation, nulls should be interpreted like INTERSECTS
*/
public Query rangeQuery(
- Object lowerTerm, Object upperTerm,
- boolean includeLower, boolean includeUpper,
- ShapeRelation relation, DateTimeZone timeZone, DateMathParser parser,
- QueryShardContext context) {
+ Object lowerTerm, Object upperTerm,
+ boolean includeLower, boolean includeUpper,
+ ShapeRelation relation, ZoneId timeZone, DateMathParser parser,
+ QueryShardContext context) {
throw new IllegalArgumentException("Field [" + name + "] of type [" + typeName() + "] does not support range queries");
}
@@ -392,7 +392,7 @@ public Relation isFieldWithinQuery(
IndexReader reader,
Object from, Object to,
boolean includeLower, boolean includeUpper,
- DateTimeZone timeZone, DateMathParser dateMathParser, QueryRewriteContext context) throws IOException {
+ ZoneId timeZone, DateMathParser dateMathParser, QueryRewriteContext context) throws IOException {
return Relation.INTERSECTS;
}
@@ -427,7 +427,7 @@ public void setEagerGlobalOrdinals(boolean eagerGlobalOrdinals) {
/** Return a {@link DocValueFormat} that can be used to display and parse
* values as returned by the fielddata API.
* The default implementation returns a {@link DocValueFormat#RAW}. */
- public DocValueFormat docValueFormat(@Nullable String format, DateTimeZone timeZone) {
+ public DocValueFormat docValueFormat(@Nullable String format, ZoneId timeZone) {
if (format != null) {
throw new IllegalArgumentException("Field [" + name() + "] of type [" + typeName() + "] does not support custom formats");
}
diff --git a/server/src/main/java/org/elasticsearch/index/mapper/NumberFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/NumberFieldMapper.java
index 8d9a688776548..06e12ca8b5e4c 100644
--- a/server/src/main/java/org/elasticsearch/index/mapper/NumberFieldMapper.java
+++ b/server/src/main/java/org/elasticsearch/index/mapper/NumberFieldMapper.java
@@ -53,9 +53,9 @@
import org.elasticsearch.index.fielddata.plain.DocValuesIndexFieldData;
import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.search.DocValueFormat;
-import org.joda.time.DateTimeZone;
import java.io.IOException;
+import java.time.ZoneId;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Iterator;
@@ -961,7 +961,7 @@ public Object valueForDisplay(Object value) {
}
@Override
- public DocValueFormat docValueFormat(String format, DateTimeZone timeZone) {
+ public DocValueFormat docValueFormat(String format, ZoneId timeZone) {
if (timeZone != null) {
throw new IllegalArgumentException("Field [" + name() + "] of type [" + typeName()
+ "] does not support custom time zones");
diff --git a/server/src/main/java/org/elasticsearch/index/mapper/RangeFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/RangeFieldMapper.java
index 0deb6e8afa052..9e1b45269c749 100644
--- a/server/src/main/java/org/elasticsearch/index/mapper/RangeFieldMapper.java
+++ b/server/src/main/java/org/elasticsearch/index/mapper/RangeFieldMapper.java
@@ -42,26 +42,27 @@
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.common.Explicit;
import org.elasticsearch.common.Nullable;
+import org.elasticsearch.common.Strings;
import org.elasticsearch.common.collect.Tuple;
import org.elasticsearch.common.geo.ShapeRelation;
-import org.elasticsearch.common.joda.FormatDateTimeFormatter;
import org.elasticsearch.common.network.InetAddresses;
import org.elasticsearch.common.settings.Setting;
import org.elasticsearch.common.settings.Settings;
+import org.elasticsearch.common.time.DateFormatter;
+import org.elasticsearch.common.time.DateFormatters;
import org.elasticsearch.common.time.DateMathParser;
-import org.elasticsearch.common.time.DateUtils;
import org.elasticsearch.common.util.LocaleUtils;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.support.XContentMapValues;
import org.elasticsearch.index.mapper.NumberFieldMapper.NumberType;
import org.elasticsearch.index.query.QueryShardContext;
-import org.joda.time.DateTimeZone;
import java.io.IOException;
import java.net.InetAddress;
import java.net.UnknownHostException;
import java.time.ZoneId;
+import java.time.ZoneOffset;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.Iterator;
@@ -71,7 +72,6 @@
import java.util.Objects;
import java.util.Set;
-import static org.elasticsearch.index.mapper.TypeParsers.parseDateTimeFormatter;
import static org.elasticsearch.index.query.RangeQueryBuilder.GTE_FIELD;
import static org.elasticsearch.index.query.RangeQueryBuilder.GT_FIELD;
import static org.elasticsearch.index.query.RangeQueryBuilder.LTE_FIELD;
@@ -92,12 +92,12 @@ public static class Defaults {
public static class Builder extends FieldMapper.Builder {
private Boolean coerce;
- private Locale locale;
+ private Locale locale = Locale.ROOT;
+ private String pattern;
public Builder(String name, RangeType type) {
super(name, new RangeFieldType(type), new RangeFieldType(type));
builder = this;
- locale = Locale.ROOT;
}
@Override
@@ -128,8 +128,8 @@ protected Explicit coerce(BuilderContext context) {
return Defaults.COERCE;
}
- public Builder dateTimeFormatter(FormatDateTimeFormatter dateTimeFormatter) {
- fieldType().setDateTimeFormatter(dateTimeFormatter);
+ public Builder format(String format) {
+ this.pattern = format;
return this;
}
@@ -145,13 +145,14 @@ public void locale(Locale locale) {
@Override
protected void setupFieldType(BuilderContext context) {
super.setupFieldType(context);
- FormatDateTimeFormatter dateTimeFormatter = fieldType().dateTimeFormatter;
+ DateFormatter formatter = fieldType().dateTimeFormatter;
if (fieldType().rangeType == RangeType.DATE) {
- if (!locale.equals(dateTimeFormatter.locale())) {
- fieldType().setDateTimeFormatter(new FormatDateTimeFormatter(dateTimeFormatter.format(),
- dateTimeFormatter.parser(), dateTimeFormatter.printer(), locale));
+ if (Strings.hasLength(builder.pattern) &&
+ Objects.equals(builder.pattern, formatter.pattern()) == false ||
+ Objects.equals(builder.locale, formatter.getLocale()) == false) {
+ fieldType().setDateTimeFormatter(DateFormatters.forPattern(pattern).withLocale(locale));
}
- } else if (dateTimeFormatter != null) {
+ } else if (pattern != null) {
throw new IllegalArgumentException("field [" + name() + "] of type [" + fieldType().rangeType
+ "] should not define a dateTimeFormatter unless it is a " + RangeType.DATE + " type");
}
@@ -191,7 +192,7 @@ public Mapper.Builder,?> parse(String name, Map node,
builder.locale(LocaleUtils.parse(propNode.toString()));
iterator.remove();
} else if (propName.equals("format")) {
- builder.dateTimeFormatter(parseDateTimeFormatter(propNode));
+ builder.format(propNode.toString());
iterator.remove();
} else if (TypeParsers.parseMultiField(builder, name, parserContext, propName, propNode)) {
iterator.remove();
@@ -203,7 +204,7 @@ public Mapper.Builder,?> parse(String name, Map node,
public static final class RangeFieldType extends MappedFieldType {
protected RangeType rangeType;
- protected FormatDateTimeFormatter dateTimeFormatter;
+ protected DateFormatter dateTimeFormatter;
protected DateMathParser dateMathParser;
RangeFieldType(RangeType type) {
@@ -220,8 +221,8 @@ public static final class RangeFieldType extends MappedFieldType {
RangeFieldType(RangeFieldType other) {
super(other);
this.rangeType = other.rangeType;
- if (other.dateTimeFormatter() != null) {
- setDateTimeFormatter(other.dateTimeFormatter);
+ if (other.rangeType == RangeType.DATE && other.dateTimeFormatter() != null) {
+ setDateTimeFormatter(other.dateTimeFormatter());
}
}
@@ -236,15 +237,13 @@ public boolean equals(Object o) {
RangeFieldType that = (RangeFieldType) o;
return Objects.equals(rangeType, that.rangeType) &&
(rangeType == RangeType.DATE) ?
- Objects.equals(dateTimeFormatter.format(), that.dateTimeFormatter.format())
- && Objects.equals(dateTimeFormatter.locale(), that.dateTimeFormatter.locale())
+ Objects.equals(dateTimeFormatter, that.dateTimeFormatter)
: dateTimeFormatter == null && that.dateTimeFormatter == null;
}
@Override
public int hashCode() {
- return (dateTimeFormatter == null) ? Objects.hash(super.hashCode(), rangeType)
- : Objects.hash(super.hashCode(), rangeType, dateTimeFormatter.format(), dateTimeFormatter.locale());
+ return Objects.hash(super.hashCode(), rangeType, dateTimeFormatter);
}
@Override
@@ -252,11 +251,11 @@ public String typeName() {
return rangeType.name;
}
- public FormatDateTimeFormatter dateTimeFormatter() {
+ public DateFormatter dateTimeFormatter() {
return dateTimeFormatter;
}
- public void setDateTimeFormatter(FormatDateTimeFormatter dateTimeFormatter) {
+ public void setDateTimeFormatter(DateFormatter dateTimeFormatter) {
checkIfFrozen();
this.dateTimeFormatter = dateTimeFormatter;
this.dateMathParser = dateTimeFormatter.toDateMathParser();
@@ -286,7 +285,7 @@ public Query termQuery(Object value, QueryShardContext context) {
@Override
public Query rangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper,
- ShapeRelation relation, DateTimeZone timeZone, DateMathParser parser, QueryShardContext context) {
+ ShapeRelation relation, ZoneId timeZone, DateMathParser parser, QueryShardContext context) {
failIfNotIndexed();
if (parser == null) {
parser = dateMathParser();
@@ -406,13 +405,14 @@ protected void doXContentBody(XContentBuilder builder, boolean includeDefaults,
if (fieldType().rangeType == RangeType.DATE
&& (includeDefaults || (fieldType().dateTimeFormatter() != null
- && fieldType().dateTimeFormatter().format().equals(DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.format()) == false))) {
- builder.field("format", fieldType().dateTimeFormatter().format());
+ && fieldType().dateTimeFormatter().pattern()
+ .equals(DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.pattern()) == false))) {
+ builder.field("format", fieldType().dateTimeFormatter.pattern());
}
if (fieldType().rangeType == RangeType.DATE
&& (includeDefaults || (fieldType().dateTimeFormatter() != null
- && fieldType().dateTimeFormatter().locale() != Locale.ROOT))) {
- builder.field("locale", fieldType().dateTimeFormatter().locale());
+ && fieldType().dateTimeFormatter().getLocale() != Locale.ROOT))) {
+ builder.field("locale", fieldType().dateTimeFormatter().getLocale());
}
if (includeDefaults || coerce.explicit()) {
builder.field("coerce", coerce.value());
@@ -544,7 +544,8 @@ public Field getRangeField(String name, Range r) {
return new LongRange(name, new long[] {((Number)r.from).longValue()}, new long[] {((Number)r.to).longValue()});
}
private Number parse(DateMathParser dateMathParser, String dateStr) {
- return dateMathParser.parse(dateStr, () -> {throw new IllegalArgumentException("now is not used at indexing time");});
+ return dateMathParser.parse(dateStr, () -> {throw new IllegalArgumentException("now is not used at indexing time");})
+ .toEpochMilli();
}
@Override
public Number parseFrom(RangeFieldType fieldType, XContentParser parser, boolean coerce, boolean included)
@@ -587,18 +588,18 @@ public Query dvRangeQuery(String field, QueryType queryType, Object from, Object
@Override
public Query rangeQuery(String field, boolean hasDocValues, Object lowerTerm, Object upperTerm, boolean includeLower,
- boolean includeUpper, ShapeRelation relation, @Nullable DateTimeZone timeZone,
+ boolean includeUpper, ShapeRelation relation, @Nullable ZoneId timeZone,
@Nullable DateMathParser parser, QueryShardContext context) {
- DateTimeZone zone = (timeZone == null) ? DateTimeZone.UTC : timeZone;
- ZoneId zoneId = DateUtils.dateTimeZoneToZoneId(zone);
+ ZoneId zone = (timeZone == null) ? ZoneOffset.UTC : timeZone;
+
DateMathParser dateMathParser = (parser == null) ?
DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.toDateMathParser() : parser;
Long low = lowerTerm == null ? Long.MIN_VALUE :
dateMathParser.parse(lowerTerm instanceof BytesRef ? ((BytesRef) lowerTerm).utf8ToString() : lowerTerm.toString(),
- context::nowInMillis, false, zoneId);
+ context::nowInMillis, false, zone).toEpochMilli();
Long high = upperTerm == null ? Long.MAX_VALUE :
dateMathParser.parse(upperTerm instanceof BytesRef ? ((BytesRef) upperTerm).utf8ToString() : upperTerm.toString(),
- context::nowInMillis, false, zoneId);
+ context::nowInMillis, false, zone).toEpochMilli();
return super.rangeQuery(field, hasDocValues, low, high, includeLower, includeUpper, relation, zone,
dateMathParser, context);
@@ -911,7 +912,7 @@ public Object parse(Object value, boolean coerce) {
return numberType.parse(value, coerce);
}
public Query rangeQuery(String field, boolean hasDocValues, Object from, Object to, boolean includeFrom, boolean includeTo,
- ShapeRelation relation, @Nullable DateTimeZone timeZone, @Nullable DateMathParser dateMathParser,
+ ShapeRelation relation, @Nullable ZoneId timeZone, @Nullable DateMathParser dateMathParser,
QueryShardContext context) {
Object lower = from == null ? minValue() : parse(from, false);
Object upper = to == null ? maxValue() : parse(to, false);
diff --git a/server/src/main/java/org/elasticsearch/index/mapper/RootObjectMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/RootObjectMapper.java
index ed5135785cde1..b5463f6803c45 100644
--- a/server/src/main/java/org/elasticsearch/index/mapper/RootObjectMapper.java
+++ b/server/src/main/java/org/elasticsearch/index/mapper/RootObjectMapper.java
@@ -22,9 +22,9 @@
import org.elasticsearch.Version;
import org.elasticsearch.common.Explicit;
import org.elasticsearch.common.Nullable;
-import org.elasticsearch.common.joda.FormatDateTimeFormatter;
-import org.elasticsearch.common.joda.Joda;
import org.elasticsearch.common.settings.Settings;
+import org.elasticsearch.common.time.DateFormatter;
+import org.elasticsearch.common.time.DateFormatters;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.index.mapper.DynamicTemplate.XContentFieldType;
@@ -43,10 +43,10 @@
public class RootObjectMapper extends ObjectMapper {
public static class Defaults {
- public static final FormatDateTimeFormatter[] DYNAMIC_DATE_TIME_FORMATTERS =
- new FormatDateTimeFormatter[]{
+ public static final DateFormatter[] DYNAMIC_DATE_TIME_FORMATTERS =
+ new DateFormatter[]{
DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER,
- Joda.getStrictStandardDateFormatter()
+ DateFormatters.forPattern("yyyy/MM/dd HH:mm:ss||yyyy/MM/dd||epoch_millis")
};
public static final boolean DATE_DETECTION = true;
public static final boolean NUMERIC_DETECTION = false;
@@ -55,8 +55,7 @@ public static class Defaults {
public static class Builder extends ObjectMapper.Builder {
protected Explicit dynamicTemplates = new Explicit<>(new DynamicTemplate[0], false);
- protected Explicit dynamicDateTimeFormatters =
- new Explicit<>(Defaults.DYNAMIC_DATE_TIME_FORMATTERS, false);
+ protected Explicit dynamicDateTimeFormatters = new Explicit<>(Defaults.DYNAMIC_DATE_TIME_FORMATTERS, false);
protected Explicit dateDetection = new Explicit<>(Defaults.DATE_DETECTION, false);
protected Explicit numericDetection = new Explicit<>(Defaults.NUMERIC_DETECTION, false);
@@ -65,8 +64,8 @@ public Builder(String name) {
this.builder = this;
}
- public Builder dynamicDateTimeFormatter(Collection dateTimeFormatters) {
- this.dynamicDateTimeFormatters = new Explicit<>(dateTimeFormatters.toArray(new FormatDateTimeFormatter[0]), true);
+ public Builder dynamicDateTimeFormatter(Collection dateTimeFormatters) {
+ this.dynamicDateTimeFormatters = new Explicit<>(dateTimeFormatters.toArray(new DateFormatter[0]), true);
return this;
}
@@ -141,7 +140,7 @@ protected boolean processField(RootObjectMapper.Builder builder, String fieldNam
Version indexVersionCreated) {
if (fieldName.equals("date_formats") || fieldName.equals("dynamic_date_formats")) {
if (fieldNode instanceof List) {
- List formatters = new ArrayList<>();
+ List formatters = new ArrayList<>();
for (Object formatter : (List>) fieldNode) {
if (formatter.toString().startsWith("epoch_")) {
throw new MapperParsingException("Epoch ["+ formatter +"] is not supported as dynamic date format");
@@ -193,13 +192,13 @@ protected boolean processField(RootObjectMapper.Builder builder, String fieldNam
}
}
- private Explicit dynamicDateTimeFormatters;
+ private Explicit dynamicDateTimeFormatters;
private Explicit dateDetection;
private Explicit numericDetection;
private Explicit dynamicTemplates;
RootObjectMapper(String name, boolean enabled, Dynamic dynamic, Map mappers,
- Explicit dynamicDateTimeFormatters, Explicit dynamicTemplates,
+ Explicit dynamicDateTimeFormatters, Explicit dynamicTemplates,
Explicit dateDetection, Explicit numericDetection, Settings settings) {
super(name, name, enabled, Nested.NO, dynamic, mappers, settings);
this.dynamicTemplates = dynamicTemplates;
@@ -215,7 +214,7 @@ public ObjectMapper mappingUpdate(Mapper mapper) {
// set everything to they implicit default value so that they are not
// applied at merge time
update.dynamicTemplates = new Explicit<>(new DynamicTemplate[0], false);
- update.dynamicDateTimeFormatters = new Explicit(Defaults.DYNAMIC_DATE_TIME_FORMATTERS, false);
+ update.dynamicDateTimeFormatters = new Explicit<>(Defaults.DYNAMIC_DATE_TIME_FORMATTERS, false);
update.dateDetection = new Explicit<>(Defaults.DATE_DETECTION, false);
update.numericDetection = new Explicit<>(Defaults.NUMERIC_DETECTION, false);
return update;
@@ -229,7 +228,7 @@ public boolean numericDetection() {
return this.numericDetection.value();
}
- public FormatDateTimeFormatter[] dynamicDateTimeFormatters() {
+ public DateFormatter[] dynamicDateTimeFormatters() {
return dynamicDateTimeFormatters.value();
}
@@ -302,8 +301,8 @@ protected void doXContent(XContentBuilder builder, ToXContent.Params params) thr
if (dynamicDateTimeFormatters.explicit() || includeDefaults) {
builder.startArray("dynamic_date_formats");
- for (FormatDateTimeFormatter dateTimeFormatter : dynamicDateTimeFormatters.value()) {
- builder.value(dateTimeFormatter.format());
+ for (DateFormatter dateTimeFormatter : dynamicDateTimeFormatters.value()) {
+ builder.value(dateTimeFormatter.pattern());
}
builder.endArray();
}
diff --git a/server/src/main/java/org/elasticsearch/index/mapper/SimpleMappedFieldType.java b/server/src/main/java/org/elasticsearch/index/mapper/SimpleMappedFieldType.java
index 3d3b160787050..366eb3b36f0fe 100644
--- a/server/src/main/java/org/elasticsearch/index/mapper/SimpleMappedFieldType.java
+++ b/server/src/main/java/org/elasticsearch/index/mapper/SimpleMappedFieldType.java
@@ -23,7 +23,8 @@
import org.elasticsearch.common.geo.ShapeRelation;
import org.elasticsearch.common.time.DateMathParser;
import org.elasticsearch.index.query.QueryShardContext;
-import org.joda.time.DateTimeZone;
+
+import java.time.ZoneId;
/**
* {@link MappedFieldType} base impl for field types that are neither dates nor ranges.
@@ -40,7 +41,7 @@ protected SimpleMappedFieldType(MappedFieldType ref) {
@Override
public final Query rangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper,
- ShapeRelation relation, DateTimeZone timeZone, DateMathParser parser, QueryShardContext context) {
+ ShapeRelation relation, ZoneId timeZone, DateMathParser parser, QueryShardContext context) {
if (relation == ShapeRelation.DISJOINT) {
throw new IllegalArgumentException("Field [" + name() + "] of type [" + typeName() +
"] does not support DISJOINT ranges");
@@ -52,7 +53,7 @@ public final Query rangeQuery(Object lowerTerm, Object upperTerm, boolean includ
}
/**
- * Same as {@link #rangeQuery(Object, Object, boolean, boolean, ShapeRelation, DateTimeZone, DateMathParser, QueryShardContext)}
+ * Same as {@link #rangeQuery(Object, Object, boolean, boolean, ShapeRelation, ZoneId, DateMathParser, QueryShardContext)}
* but without the trouble of relations or date-specific options.
*/
protected Query rangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper,
diff --git a/server/src/main/java/org/elasticsearch/index/mapper/TypeParsers.java b/server/src/main/java/org/elasticsearch/index/mapper/TypeParsers.java
index a43aed3b08de7..12acd28ae809c 100644
--- a/server/src/main/java/org/elasticsearch/index/mapper/TypeParsers.java
+++ b/server/src/main/java/org/elasticsearch/index/mapper/TypeParsers.java
@@ -21,8 +21,8 @@
import org.apache.lucene.index.IndexOptions;
import org.elasticsearch.ElasticsearchParseException;
-import org.elasticsearch.common.joda.FormatDateTimeFormatter;
-import org.elasticsearch.common.joda.Joda;
+import org.elasticsearch.common.time.DateFormatter;
+import org.elasticsearch.common.time.DateFormatters;
import org.elasticsearch.common.xcontent.support.XContentMapValues;
import org.elasticsearch.index.analysis.NamedAnalyzer;
import org.elasticsearch.index.similarity.SimilarityProvider;
@@ -263,9 +263,9 @@ private static IndexOptions nodeIndexOptionValue(final Object propNode) {
}
}
- public static FormatDateTimeFormatter parseDateTimeFormatter(Object node) {
+ public static DateFormatter parseDateTimeFormatter(Object node) {
if (node instanceof String) {
- return Joda.forPattern((String) node);
+ return DateFormatters.forPattern(node.toString());
}
throw new IllegalArgumentException("Invalid format: [" + node.toString() + "]: expected string value");
}
diff --git a/server/src/main/java/org/elasticsearch/index/query/QueryStringQueryBuilder.java b/server/src/main/java/org/elasticsearch/index/query/QueryStringQueryBuilder.java
index dd5b21c3fde18..184d6e70d1031 100644
--- a/server/src/main/java/org/elasticsearch/index/query/QueryStringQueryBuilder.java
+++ b/server/src/main/java/org/elasticsearch/index/query/QueryStringQueryBuilder.java
@@ -38,9 +38,9 @@
import org.elasticsearch.index.query.support.QueryParsers;
import org.elasticsearch.index.search.QueryParserHelper;
import org.elasticsearch.index.search.QueryStringQueryParser;
-import org.joda.time.DateTimeZone;
import java.io.IOException;
+import java.time.ZoneId;
import java.util.ArrayList;
import java.util.List;
import java.util.Locale;
@@ -144,7 +144,7 @@ public class QueryStringQueryBuilder extends AbstractQueryBuilder i
private static final ParseField RELATION_FIELD = new ParseField("relation");
private final String fieldName;
-
private Object from;
-
private Object to;
-
- private DateTimeZone timeZone;
-
+ private ZoneId timeZone;
private boolean includeLower = DEFAULT_INCLUDE_LOWER;
-
private boolean includeUpper = DEFAULT_INCLUDE_UPPER;
-
- private FormatDateTimeFormatter format;
-
+ private String format;
private ShapeRelation relation;
/**
@@ -102,11 +95,8 @@ public RangeQueryBuilder(StreamInput in) throws IOException {
to = in.readGenericValue();
includeLower = in.readBoolean();
includeUpper = in.readBoolean();
- timeZone = in.readOptionalTimeZone();
- String formatString = in.readOptionalString();
- if (formatString != null) {
- format = Joda.forPattern(formatString);
- }
+ timeZone = in.readOptionalZoneId();
+ format = in.readOptionalString();
String relationString = in.readOptionalString();
if (relationString != null) {
relation = ShapeRelation.getRelationByName(relationString);
@@ -130,12 +120,8 @@ protected void doWriteTo(StreamOutput out) throws IOException {
out.writeGenericValue(this.to);
out.writeBoolean(this.includeLower);
out.writeBoolean(this.includeUpper);
- out.writeOptionalTimeZone(timeZone);
- String formatString = null;
- if (this.format != null) {
- formatString = this.format.format();
- }
- out.writeOptionalString(formatString);
+ out.writeOptionalZoneId(timeZone);
+ out.writeOptionalString(format);
String relationString = null;
if (this.relation != null) {
relationString = this.relation.getRelationName();
@@ -268,7 +254,11 @@ public RangeQueryBuilder timeZone(String timeZone) {
if (timeZone == null) {
throw new IllegalArgumentException("timezone cannot be null");
}
- this.timeZone = DateTimeZone.forID(timeZone);
+ try {
+ this.timeZone = ZoneId.of(timeZone);
+ } catch (ZoneRulesException e) {
+ throw new IllegalArgumentException(e);
+ }
return this;
}
@@ -276,10 +266,10 @@ public RangeQueryBuilder timeZone(String timeZone) {
* In case of date field, gets the from/to fields timezone adjustment
*/
public String timeZone() {
- return this.timeZone == null ? null : this.timeZone.getID();
+ return this.timeZone == null ? null : this.timeZone.getId();
}
- DateTimeZone getDateTimeZone() { // for testing
+ ZoneId getDateTimeZone() { // for testing
return timeZone;
}
@@ -290,7 +280,9 @@ public RangeQueryBuilder format(String format) {
if (format == null) {
throw new IllegalArgumentException("format cannot be null");
}
- this.format = Joda.forPattern(format);
+ // this just ensure that the pattern is actually valid, no need to keep it here
+ DateFormatters.forPattern(format);
+ this.format = format;
return this;
}
@@ -298,12 +290,12 @@ public RangeQueryBuilder format(String format) {
* Gets the format field to parse the from/to fields
*/
public String format() {
- return this.format == null ? null : this.format.format();
+ return format;
}
DateMathParser getForceDateParser() { // pkg private for testing
- if (this.format != null) {
- return this.format.toDateMathParser();
+ if (Strings.isEmpty(format) == false) {
+ return DateFormatters.forPattern(this.format).toDateMathParser();
}
return null;
}
@@ -335,10 +327,10 @@ protected void doXContent(XContentBuilder builder, Params params) throws IOExcep
builder.field(INCLUDE_LOWER_FIELD.getPreferredName(), includeLower);
builder.field(INCLUDE_UPPER_FIELD.getPreferredName(), includeUpper);
if (timeZone != null) {
- builder.field(TIME_ZONE_FIELD.getPreferredName(), timeZone.getID());
+ builder.field(TIME_ZONE_FIELD.getPreferredName(), timeZone.getId());
}
- if (format != null) {
- builder.field(FORMAT_FIELD.getPreferredName(), format.format());
+ if (Strings.isEmpty(format) == false) {
+ builder.field(FORMAT_FIELD.getPreferredName(), format);
}
if (relation != null) {
builder.field(RELATION_FIELD.getPreferredName(), relation.getRelationName());
@@ -532,21 +524,19 @@ protected Query doToQuery(QueryShardContext context) throws IOException {
@Override
protected int doHashCode() {
- String timeZoneId = timeZone == null ? null : timeZone.getID();
- String formatString = format == null ? null : format.format();
- return Objects.hash(fieldName, from, to, timeZoneId, includeLower, includeUpper, formatString);
+ String timeZoneId = timeZone == null ? null : timeZone.getId();
+ return Objects.hash(fieldName, from, to, timeZoneId, includeLower, includeUpper, format);
}
@Override
protected boolean doEquals(RangeQueryBuilder other) {
- String timeZoneId = timeZone == null ? null : timeZone.getID();
- String formatString = format == null ? null : format.format();
+ String timeZoneId = timeZone == null ? null : timeZone.getId();
return Objects.equals(fieldName, other.fieldName) &&
Objects.equals(from, other.from) &&
Objects.equals(to, other.to) &&
Objects.equals(timeZoneId, other.timeZone()) &&
Objects.equals(includeLower, other.includeLower) &&
Objects.equals(includeUpper, other.includeUpper) &&
- Objects.equals(formatString, other.format());
+ Objects.equals(format, other.format);
}
}
diff --git a/server/src/main/java/org/elasticsearch/index/search/QueryStringQueryParser.java b/server/src/main/java/org/elasticsearch/index/search/QueryStringQueryParser.java
index 84597d4d3383c..92e6dd60c01d1 100644
--- a/server/src/main/java/org/elasticsearch/index/search/QueryStringQueryParser.java
+++ b/server/src/main/java/org/elasticsearch/index/search/QueryStringQueryParser.java
@@ -55,9 +55,9 @@
import org.elasticsearch.index.query.MultiMatchQueryBuilder;
import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.index.query.support.QueryParsers;
-import org.joda.time.DateTimeZone;
import java.io.IOException;
+import java.time.ZoneId;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
@@ -90,7 +90,7 @@ public class QueryStringQueryParser extends XQueryParser {
private Analyzer forceQuoteAnalyzer;
private String quoteFieldSuffix;
private boolean analyzeWildcard;
- private DateTimeZone timeZone;
+ private ZoneId timeZone;
private Fuzziness fuzziness = Fuzziness.AUTO;
private int fuzzyMaxExpansions = FuzzyQuery.defaultMaxExpansions;
private MappedFieldType currentFieldType;
@@ -228,7 +228,7 @@ public void setAnalyzeWildcard(boolean analyzeWildcard) {
/**
* @param timeZone Time Zone to be applied to any range query related to dates.
*/
- public void setTimeZone(DateTimeZone timeZone) {
+ public void setTimeZone(ZoneId timeZone) {
this.timeZone = timeZone;
}
diff --git a/server/src/main/java/org/elasticsearch/script/ScoreScriptUtils.java b/server/src/main/java/org/elasticsearch/script/ScoreScriptUtils.java
index 892d921091e37..87c62c7d6c5fb 100644
--- a/server/src/main/java/org/elasticsearch/script/ScoreScriptUtils.java
+++ b/server/src/main/java/org/elasticsearch/script/ScoreScriptUtils.java
@@ -25,7 +25,8 @@
import org.elasticsearch.common.geo.GeoDistance;
import org.elasticsearch.common.geo.GeoPoint;
import org.elasticsearch.common.geo.GeoUtils;
-import org.elasticsearch.common.joda.JodaDateMathParser;
+import org.elasticsearch.common.time.DateMathParser;
+import org.elasticsearch.common.time.JavaDateMathParser;
import org.elasticsearch.common.unit.DistanceUnit;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.index.mapper.DateFieldMapper;
@@ -204,7 +205,7 @@ public double decayNumericGauss(double docValue) {
*
*/
private static final ZoneId defaultZoneId = ZoneId.of("UTC");
- private static final JodaDateMathParser dateParser = new JodaDateMathParser(DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER);
+ private static final DateMathParser dateParser = new JavaDateMathParser(DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER);
public static final class DecayDateLinear {
long origin;
@@ -212,7 +213,7 @@ public static final class DecayDateLinear {
double scaling;
public DecayDateLinear(String originStr, String scaleStr, String offsetStr, double decay) {
- this.origin = dateParser.parse(originStr, null, false, defaultZoneId);
+ this.origin = dateParser.parse(originStr, null, false, defaultZoneId).toEpochMilli();
long scale = TimeValue.parseTimeValue(scaleStr, TimeValue.timeValueHours(24), getClass().getSimpleName() + ".scale")
.getMillis();
this.offset = TimeValue.parseTimeValue(offsetStr, TimeValue.timeValueHours(24), getClass().getSimpleName() + ".offset")
@@ -235,7 +236,7 @@ public static final class DecayDateExp {
double scaling;
public DecayDateExp(String originStr, String scaleStr, String offsetStr, double decay) {
- this.origin = dateParser.parse(originStr, null, false, defaultZoneId);
+ this.origin = dateParser.parse(originStr, null, false, defaultZoneId).toEpochMilli();
long scale = TimeValue.parseTimeValue(scaleStr, TimeValue.timeValueHours(24), getClass().getSimpleName() + ".scale")
.getMillis();
this.offset = TimeValue.parseTimeValue(offsetStr, TimeValue.timeValueHours(24), getClass().getSimpleName() + ".offset")
@@ -258,7 +259,7 @@ public static final class DecayDateGauss {
double scaling;
public DecayDateGauss(String originStr, String scaleStr, String offsetStr, double decay) {
- this.origin = dateParser.parse(originStr, null, false, defaultZoneId);
+ this.origin = dateParser.parse(originStr, null, false, defaultZoneId).toEpochMilli();
long scale = TimeValue.parseTimeValue(scaleStr, TimeValue.timeValueHours(24), getClass().getSimpleName() + ".scale")
.getMillis();
this.offset = TimeValue.parseTimeValue(offsetStr, TimeValue.timeValueHours(24), getClass().getSimpleName() + ".offset")
diff --git a/server/src/main/java/org/elasticsearch/search/DocValueFormat.java b/server/src/main/java/org/elasticsearch/search/DocValueFormat.java
index e5ece1afa33e4..b39de46a6c28f 100644
--- a/server/src/main/java/org/elasticsearch/search/DocValueFormat.java
+++ b/server/src/main/java/org/elasticsearch/search/DocValueFormat.java
@@ -25,12 +25,11 @@
import org.elasticsearch.common.io.stream.NamedWriteable;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
-import org.elasticsearch.common.joda.FormatDateTimeFormatter;
-import org.elasticsearch.common.joda.Joda;
import org.elasticsearch.common.network.InetAddresses;
import org.elasticsearch.common.network.NetworkAddress;
+import org.elasticsearch.common.time.DateFormatter;
+import org.elasticsearch.common.time.DateFormatters;
import org.elasticsearch.common.time.DateMathParser;
-import org.elasticsearch.common.time.DateUtils;
import org.joda.time.DateTimeZone;
import java.io.IOException;
@@ -39,6 +38,9 @@
import java.text.DecimalFormatSymbols;
import java.text.NumberFormat;
import java.text.ParseException;
+import java.time.Instant;
+import java.time.ZoneId;
+import java.time.ZoneOffset;
import java.util.Arrays;
import java.util.Base64;
import java.util.Locale;
@@ -171,19 +173,25 @@ final class DateTime implements DocValueFormat {
public static final String NAME = "date_time";
- final FormatDateTimeFormatter formatter;
- // TODO: change this to ZoneId, but will require careful change to serialization
- final DateTimeZone timeZone;
+ final DateFormatter formatter;
+ final ZoneId timeZone;
private final DateMathParser parser;
- public DateTime(FormatDateTimeFormatter formatter, DateTimeZone timeZone) {
- this.formatter = Objects.requireNonNull(formatter);
+ public DateTime(DateFormatter formatter, ZoneId timeZone) {
+ this.formatter = formatter;
this.timeZone = Objects.requireNonNull(timeZone);
this.parser = formatter.toDateMathParser();
}
public DateTime(StreamInput in) throws IOException {
- this(Joda.forPattern(in.readString()), DateTimeZone.forID(in.readString()));
+ this.formatter = DateFormatters.forPattern(in.readString());
+ this.parser = formatter.toDateMathParser();
+ // calling ZoneId.of("UTC) will produce "UTC" as timezone in the formatter
+ // calling ZoneOffset.UTC will produce "Z" as timezone in the formatter
+ // as returning a date having UTC is always returning Z as timezone in all
+ // versions, this is a hack around the java time behaviour
+ String zoneId = in.readString();
+ this.timeZone = zoneId.equals("UTC") ? ZoneOffset.UTC : ZoneId.of(zoneId);
}
@Override
@@ -193,13 +201,14 @@ public String getWriteableName() {
@Override
public void writeTo(StreamOutput out) throws IOException {
- out.writeString(formatter.format());
- out.writeString(timeZone.getID());
+ out.writeString(formatter.pattern());
+ // joda does not understand "Z" for utc, so we must special case
+ out.writeString(timeZone.getId().equals("Z") ? DateTimeZone.UTC.getID() : timeZone.getId());
}
@Override
public String format(long value) {
- return formatter.printer().withZone(timeZone).print(value);
+ return formatter.format(Instant.ofEpochMilli(value).atZone(timeZone));
}
@Override
@@ -214,7 +223,7 @@ public String format(BytesRef value) {
@Override
public long parseLong(String value, boolean roundUp, LongSupplier now) {
- return parser.parse(value, now, roundUp, DateUtils.dateTimeZoneToZoneId(timeZone));
+ return parser.parse(value, now, roundUp, timeZone).toEpochMilli();
}
@Override
diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/DateHistogramValuesSourceBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/DateHistogramValuesSourceBuilder.java
index 28970ec828af9..4e5ab6988eb8c 100644
--- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/DateHistogramValuesSourceBuilder.java
+++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/DateHistogramValuesSourceBuilder.java
@@ -20,10 +20,9 @@
package org.elasticsearch.search.aggregations.bucket.composite;
import org.elasticsearch.common.ParseField;
+import org.elasticsearch.common.Rounding;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
-import org.elasticsearch.common.rounding.DateTimeUnit;
-import org.elasticsearch.common.rounding.Rounding;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.xcontent.ObjectParser;
import org.elasticsearch.common.xcontent.XContentBuilder;
@@ -37,9 +36,10 @@
import org.elasticsearch.search.aggregations.support.ValuesSource;
import org.elasticsearch.search.aggregations.support.ValuesSourceConfig;
import org.elasticsearch.search.internal.SearchContext;
-import org.joda.time.DateTimeZone;
import java.io.IOException;
+import java.time.ZoneId;
+import java.time.ZoneOffset;
import java.util.Objects;
import static org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramAggregationBuilder.DATE_FIELD_UNITS;
@@ -70,9 +70,9 @@ public class DateHistogramValuesSourceBuilder extends CompositeValuesSourceBuild
}, Histogram.INTERVAL_FIELD, ObjectParser.ValueType.LONG);
PARSER.declareField(DateHistogramValuesSourceBuilder::timeZone, p -> {
if (p.currentToken() == XContentParser.Token.VALUE_STRING) {
- return DateTimeZone.forID(p.text());
+ return ZoneId.of(p.text());
} else {
- return DateTimeZone.forOffsetHours(p.intValue());
+ return ZoneOffset.ofHours(p.intValue());
}
}, new ParseField("time_zone"), ObjectParser.ValueType.LONG);
CompositeValuesSourceParserHelper.declareValuesSourceFields(PARSER, ValueType.NUMERIC);
@@ -82,7 +82,7 @@ static DateHistogramValuesSourceBuilder parse(String name, XContentParser parser
}
private long interval = 0;
- private DateTimeZone timeZone = null;
+ private ZoneId timeZone = null;
private DateHistogramInterval dateHistogramInterval;
public DateHistogramValuesSourceBuilder(String name) {
@@ -93,20 +93,14 @@ protected DateHistogramValuesSourceBuilder(StreamInput in) throws IOException {
super(in);
this.interval = in.readLong();
this.dateHistogramInterval = in.readOptionalWriteable(DateHistogramInterval::new);
- if (in.readBoolean()) {
- timeZone = DateTimeZone.forID(in.readString());
- }
+ timeZone = in.readOptionalZoneId();
}
@Override
protected void innerWriteTo(StreamOutput out) throws IOException {
out.writeLong(interval);
out.writeOptionalWriteable(dateHistogramInterval);
- boolean hasTimeZone = timeZone != null;
- out.writeBoolean(hasTimeZone);
- if (hasTimeZone) {
- out.writeString(timeZone.getID());
- }
+ out.writeOptionalZoneId(timeZone);
}
@Override
@@ -176,7 +170,7 @@ public DateHistogramValuesSourceBuilder dateHistogramInterval(DateHistogramInter
/**
* Sets the time zone to use for this aggregation
*/
- public DateHistogramValuesSourceBuilder timeZone(DateTimeZone timeZone) {
+ public DateHistogramValuesSourceBuilder timeZone(ZoneId timeZone) {
if (timeZone == null) {
throw new IllegalArgumentException("[timeZone] must not be null: [" + name + "]");
}
@@ -187,14 +181,14 @@ public DateHistogramValuesSourceBuilder timeZone(DateTimeZone timeZone) {
/**
* Gets the time zone to use for this aggregation
*/
- public DateTimeZone timeZone() {
+ public ZoneId timeZone() {
return timeZone;
}
private Rounding createRounding() {
Rounding.Builder tzRoundingBuilder;
if (dateHistogramInterval != null) {
- DateTimeUnit dateTimeUnit = DATE_FIELD_UNITS.get(dateHistogramInterval.toString());
+ Rounding.DateTimeUnit dateTimeUnit = DATE_FIELD_UNITS.get(dateHistogramInterval.toString());
if (dateTimeUnit != null) {
tzRoundingBuilder = Rounding.builder(dateTimeUnit);
} else {
diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/RoundingValuesSource.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/RoundingValuesSource.java
index 635690c44f49e..9ee142fcd2fd5 100644
--- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/RoundingValuesSource.java
+++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/RoundingValuesSource.java
@@ -21,7 +21,7 @@
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.index.SortedNumericDocValues;
-import org.elasticsearch.common.rounding.Rounding;
+import org.elasticsearch.common.Rounding;
import org.elasticsearch.index.fielddata.SortedBinaryDocValues;
import org.elasticsearch.index.fielddata.SortedNumericDoubleValues;
import org.elasticsearch.search.aggregations.support.ValuesSource;
diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/AutoDateHistogramAggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/AutoDateHistogramAggregationBuilder.java
index 87ba80af9a4b0..794ce066ed76e 100644
--- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/AutoDateHistogramAggregationBuilder.java
+++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/AutoDateHistogramAggregationBuilder.java
@@ -20,11 +20,10 @@
package org.elasticsearch.search.aggregations.bucket.histogram;
import org.elasticsearch.common.ParseField;
+import org.elasticsearch.common.Rounding;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.io.stream.Writeable;
-import org.elasticsearch.common.rounding.DateTimeUnit;
-import org.elasticsearch.common.rounding.Rounding;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.ObjectParser;
import org.elasticsearch.common.xcontent.XContentBuilder;
@@ -42,9 +41,9 @@
import org.elasticsearch.search.aggregations.support.ValuesSourceParserHelper;
import org.elasticsearch.search.aggregations.support.ValuesSourceType;
import org.elasticsearch.search.internal.SearchContext;
-import org.joda.time.DateTimeZone;
import java.io.IOException;
+import java.time.ZoneId;
import java.util.Arrays;
import java.util.Map;
import java.util.Objects;
@@ -70,19 +69,19 @@ public class AutoDateHistogramAggregationBuilder
* The current implementation probably should not be invoked in a tight loop.
* @return Array of RoundingInfo
*/
- static RoundingInfo[] buildRoundings(DateTimeZone timeZone) {
+ static RoundingInfo[] buildRoundings(ZoneId timeZone) {
RoundingInfo[] roundings = new RoundingInfo[6];
- roundings[0] = new RoundingInfo(createRounding(DateTimeUnit.SECOND_OF_MINUTE, timeZone),
- 1000L, "s" , 1, 5, 10, 30);
- roundings[1] = new RoundingInfo(createRounding(DateTimeUnit.MINUTES_OF_HOUR, timeZone),
+ roundings[0] = new RoundingInfo(createRounding(Rounding.DateTimeUnit.SECOND_OF_MINUTE, timeZone),
+ 1000L, "s", 1, 5, 10, 30);
+ roundings[1] = new RoundingInfo(createRounding(Rounding.DateTimeUnit.MINUTES_OF_HOUR, timeZone),
60 * 1000L, "m", 1, 5, 10, 30);
- roundings[2] = new RoundingInfo(createRounding(DateTimeUnit.HOUR_OF_DAY, timeZone),
- 60 * 60 * 1000L, "h", 1, 3, 12);
- roundings[3] = new RoundingInfo(createRounding(DateTimeUnit.DAY_OF_MONTH, timeZone),
+ roundings[2] = new RoundingInfo(createRounding(Rounding.DateTimeUnit.HOUR_OF_DAY, timeZone),
+ 60 * 60 * 1000L, "h",1, 3, 12);
+ roundings[3] = new RoundingInfo(createRounding(Rounding.DateTimeUnit.DAY_OF_MONTH, timeZone),
24 * 60 * 60 * 1000L, "d", 1, 7);
- roundings[4] = new RoundingInfo(createRounding(DateTimeUnit.MONTH_OF_YEAR, timeZone),
+ roundings[4] = new RoundingInfo(createRounding(Rounding.DateTimeUnit.MONTH_OF_YEAR, timeZone),
30 * 24 * 60 * 60 * 1000L, "M", 1, 3);
- roundings[5] = new RoundingInfo(createRounding(DateTimeUnit.YEAR_OF_CENTURY, timeZone),
+ roundings[5] = new RoundingInfo(createRounding(Rounding.DateTimeUnit.YEAR_OF_CENTURY, timeZone),
365 * 24 * 60 * 60 * 1000L, "y", 1, 5, 10, 20, 50, 100);
return roundings;
}
@@ -156,7 +155,7 @@ public int getNumBuckets() {
return new AutoDateHistogramAggregatorFactory(name, config, numBuckets, roundings, context, parent, subFactoriesBuilder, metaData);
}
- static Rounding createRounding(DateTimeUnit interval, DateTimeZone timeZone) {
+ static Rounding createRounding(Rounding.DateTimeUnit interval, ZoneId timeZone) {
Rounding.Builder tzRoundingBuilder = Rounding.builder(interval);
if (timeZone != null) {
tzRoundingBuilder.timeZone(timeZone);
@@ -196,7 +195,7 @@ public RoundingInfo(Rounding rounding, long roughEstimateDurationMillis, String
}
public RoundingInfo(StreamInput in) throws IOException {
- rounding = Rounding.Streams.read(in);
+ rounding = Rounding.read(in);
roughEstimateDurationMillis = in.readVLong();
innerIntervals = in.readIntArray();
unitAbbreviation = in.readString();
@@ -204,7 +203,7 @@ public RoundingInfo(StreamInput in) throws IOException {
@Override
public void writeTo(StreamOutput out) throws IOException {
- Rounding.Streams.write(rounding, out);
+ rounding.writeTo(out);
out.writeVLong(roughEstimateDurationMillis);
out.writeIntArray(innerIntervals);
out.writeString(unitAbbreviation);
diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/AutoDateHistogramAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/AutoDateHistogramAggregator.java
index 81bb70bd9672a..1b982ea9deca2 100644
--- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/AutoDateHistogramAggregator.java
+++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/AutoDateHistogramAggregator.java
@@ -23,8 +23,8 @@
import org.apache.lucene.search.ScoreMode;
import org.apache.lucene.util.CollectionUtil;
import org.elasticsearch.common.Nullable;
+import org.elasticsearch.common.Rounding;
import org.elasticsearch.common.lease.Releasables;
-import org.elasticsearch.common.rounding.Rounding;
import org.elasticsearch.common.util.LongHash;
import org.elasticsearch.search.DocValueFormat;
import org.elasticsearch.search.aggregations.Aggregator;
diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/DateHistogramAggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/DateHistogramAggregationBuilder.java
index dba7fbb34fb9a..0bfc056e13123 100644
--- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/DateHistogramAggregationBuilder.java
+++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/DateHistogramAggregationBuilder.java
@@ -23,11 +23,10 @@
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.index.SortedNumericDocValues;
import org.apache.lucene.search.DocIdSetIterator;
+import org.elasticsearch.common.Rounding;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
-import org.elasticsearch.common.joda.Joda;
-import org.elasticsearch.common.rounding.DateTimeUnit;
-import org.elasticsearch.common.rounding.Rounding;
+import org.elasticsearch.common.time.DateFormatters;
import org.elasticsearch.common.time.DateMathParser;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.xcontent.ObjectParser;
@@ -54,13 +53,14 @@
import org.elasticsearch.search.aggregations.support.ValuesSourceParserHelper;
import org.elasticsearch.search.aggregations.support.ValuesSourceType;
import org.elasticsearch.search.internal.SearchContext;
-import org.joda.time.DateTimeField;
-import org.joda.time.DateTimeZone;
import java.io.IOException;
+import java.time.Instant;
+import java.time.ZoneId;
+import java.time.ZoneOffset;
+import java.time.zone.ZoneOffsetTransition;
import java.util.HashMap;
import java.util.List;
-import java.util.Locale;
import java.util.Map;
import java.util.Objects;
@@ -72,28 +72,28 @@
public class DateHistogramAggregationBuilder extends ValuesSourceAggregationBuilder
implements MultiBucketAggregationBuilder {
public static final String NAME = "date_histogram";
- private static DateMathParser EPOCH_MILLIS_PARSER = Joda.forPattern("epoch_millis", Locale.ROOT).toDateMathParser();
+ private static DateMathParser EPOCH_MILLIS_PARSER = DateFormatters.forPattern("epoch_millis").toDateMathParser();
- public static final Map DATE_FIELD_UNITS;
+ public static final Map DATE_FIELD_UNITS;
static {
- Map dateFieldUnits = new HashMap<>();
- dateFieldUnits.put("year", DateTimeUnit.YEAR_OF_CENTURY);
- dateFieldUnits.put("1y", DateTimeUnit.YEAR_OF_CENTURY);
- dateFieldUnits.put("quarter", DateTimeUnit.QUARTER);
- dateFieldUnits.put("1q", DateTimeUnit.QUARTER);
- dateFieldUnits.put("month", DateTimeUnit.MONTH_OF_YEAR);
- dateFieldUnits.put("1M", DateTimeUnit.MONTH_OF_YEAR);
- dateFieldUnits.put("week", DateTimeUnit.WEEK_OF_WEEKYEAR);
- dateFieldUnits.put("1w", DateTimeUnit.WEEK_OF_WEEKYEAR);
- dateFieldUnits.put("day", DateTimeUnit.DAY_OF_MONTH);
- dateFieldUnits.put("1d", DateTimeUnit.DAY_OF_MONTH);
- dateFieldUnits.put("hour", DateTimeUnit.HOUR_OF_DAY);
- dateFieldUnits.put("1h", DateTimeUnit.HOUR_OF_DAY);
- dateFieldUnits.put("minute", DateTimeUnit.MINUTES_OF_HOUR);
- dateFieldUnits.put("1m", DateTimeUnit.MINUTES_OF_HOUR);
- dateFieldUnits.put("second", DateTimeUnit.SECOND_OF_MINUTE);
- dateFieldUnits.put("1s", DateTimeUnit.SECOND_OF_MINUTE);
+ Map dateFieldUnits = new HashMap<>();
+ dateFieldUnits.put("year", Rounding.DateTimeUnit.YEAR_OF_CENTURY);
+ dateFieldUnits.put("1y", Rounding.DateTimeUnit.YEAR_OF_CENTURY);
+ dateFieldUnits.put("quarter", Rounding.DateTimeUnit.QUARTER_OF_YEAR);
+ dateFieldUnits.put("1q", Rounding.DateTimeUnit.QUARTER_OF_YEAR);
+ dateFieldUnits.put("month", Rounding.DateTimeUnit.MONTH_OF_YEAR);
+ dateFieldUnits.put("1M", Rounding.DateTimeUnit.MONTH_OF_YEAR);
+ dateFieldUnits.put("week", Rounding.DateTimeUnit.WEEK_OF_WEEKYEAR);
+ dateFieldUnits.put("1w", Rounding.DateTimeUnit.WEEK_OF_WEEKYEAR);
+ dateFieldUnits.put("day", Rounding.DateTimeUnit.DAY_OF_MONTH);
+ dateFieldUnits.put("1d", Rounding.DateTimeUnit.DAY_OF_MONTH);
+ dateFieldUnits.put("hour", Rounding.DateTimeUnit.HOUR_OF_DAY);
+ dateFieldUnits.put("1h", Rounding.DateTimeUnit.HOUR_OF_DAY);
+ dateFieldUnits.put("minute", Rounding.DateTimeUnit.MINUTES_OF_HOUR);
+ dateFieldUnits.put("1m", Rounding.DateTimeUnit.MINUTES_OF_HOUR);
+ dateFieldUnits.put("second", Rounding.DateTimeUnit.SECOND_OF_MINUTE);
+ dateFieldUnits.put("1s", Rounding.DateTimeUnit.SECOND_OF_MINUTE);
DATE_FIELD_UNITS = unmodifiableMap(dateFieldUnits);
}
@@ -370,11 +370,11 @@ public String getType() {
* coordinating node in order to generate missing buckets, which may cross a transition
* even though data on the shards doesn't.
*/
- DateTimeZone rewriteTimeZone(QueryShardContext context) throws IOException {
- final DateTimeZone tz = timeZone();
+ ZoneId rewriteTimeZone(QueryShardContext context) throws IOException {
+ final ZoneId tz = timeZone();
if (field() != null &&
tz != null &&
- tz.isFixed() == false &&
+ tz.getRules().isFixedOffset() == false &&
field() != null &&
script() == null) {
final MappedFieldType ft = context.fieldMapper(field());
@@ -392,16 +392,23 @@ DateTimeZone rewriteTimeZone(QueryShardContext context) throws IOException {
}
if (anyInstant != null) {
- final long prevTransition = tz.previousTransition(anyInstant);
- final long nextTransition = tz.nextTransition(anyInstant);
+ Instant instant = Instant.ofEpochMilli(anyInstant);
+ final long prevTransition = tz.getRules().previousTransition(instant).getInstant().toEpochMilli();
+ ZoneOffsetTransition nextOffsetTransition = tz.getRules().nextTransition(instant);
+ final long nextTransition;
+ if (nextOffsetTransition != null) {
+ nextTransition = nextOffsetTransition.getInstant().toEpochMilli();
+ } else {
+ nextTransition = instant.toEpochMilli();
+ }
// We need all not only values but also rounded values to be within
// [prevTransition, nextTransition].
final long low;
- DateTimeUnit intervalAsUnit = getIntervalAsDateTimeUnit();
+ Rounding.DateTimeUnit intervalAsUnit = getIntervalAsDateTimeUnit();
if (intervalAsUnit != null) {
- final DateTimeField dateTimeField = intervalAsUnit.field(tz);
- low = dateTimeField.roundCeiling(prevTransition);
+ Rounding rounding = Rounding.builder(intervalAsUnit).timeZone(timeZone()).build();
+ low = rounding.nextRoundingValue(prevTransition);
} else {
final TimeValue intervalAsMillis = getIntervalAsTimeValue();
low = Math.addExact(prevTransition, intervalAsMillis.millis());
@@ -409,12 +416,12 @@ DateTimeZone rewriteTimeZone(QueryShardContext context) throws IOException {
// rounding rounds down, so 'nextTransition' is a good upper bound
final long high = nextTransition;
- if (ft.isFieldWithinQuery(reader, low, high, true, false, DateTimeZone.UTC, EPOCH_MILLIS_PARSER,
+ if (ft.isFieldWithinQuery(reader, low, high, true, false, ZoneOffset.UTC, EPOCH_MILLIS_PARSER,
context) == Relation.WITHIN) {
// All values in this reader have the same offset despite daylight saving times.
// This is very common for location-based timezones such as Europe/Paris in
// combination with time-based indices.
- return DateTimeZone.forOffsetMillis(tz.getOffset(anyInstant));
+ return ZoneOffset.ofTotalSeconds(tz.getRules().getOffset(instant).getTotalSeconds());
}
}
}
@@ -425,9 +432,9 @@ DateTimeZone rewriteTimeZone(QueryShardContext context) throws IOException {
@Override
protected ValuesSourceAggregatorFactory innerBuild(SearchContext context, ValuesSourceConfig config,
AggregatorFactory> parent, Builder subFactoriesBuilder) throws IOException {
- final DateTimeZone tz = timeZone();
+ final ZoneId tz = timeZone();
final Rounding rounding = createRounding(tz);
- final DateTimeZone rewrittenTimeZone = rewriteTimeZone(context.getQueryShardContext());
+ final ZoneId rewrittenTimeZone = rewriteTimeZone(context.getQueryShardContext());
final Rounding shardRounding;
if (tz == rewrittenTimeZone) {
shardRounding = rounding;
@@ -448,7 +455,7 @@ DateTimeZone rewriteTimeZone(QueryShardContext context) throws IOException {
* {@code null} then it means that the interval is expressed as a fixed
* {@link TimeValue} and may be accessed via
* {@link #getIntervalAsTimeValue()}. */
- private DateTimeUnit getIntervalAsDateTimeUnit() {
+ private Rounding.DateTimeUnit getIntervalAsDateTimeUnit() {
if (dateHistogramInterval != null) {
return DATE_FIELD_UNITS.get(dateHistogramInterval.toString());
}
@@ -467,9 +474,9 @@ private TimeValue getIntervalAsTimeValue() {
}
}
- private Rounding createRounding(DateTimeZone timeZone) {
+ private Rounding createRounding(ZoneId timeZone) {
Rounding.Builder tzRoundingBuilder;
- DateTimeUnit intervalAsUnit = getIntervalAsDateTimeUnit();
+ Rounding.DateTimeUnit intervalAsUnit = getIntervalAsDateTimeUnit();
if (intervalAsUnit != null) {
tzRoundingBuilder = Rounding.builder(intervalAsUnit);
} else {
diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/DateHistogramAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/DateHistogramAggregator.java
index 735a6717210a5..0c7a91505ae88 100644
--- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/DateHistogramAggregator.java
+++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/DateHistogramAggregator.java
@@ -23,8 +23,8 @@
import org.apache.lucene.search.ScoreMode;
import org.apache.lucene.util.CollectionUtil;
import org.elasticsearch.common.Nullable;
+import org.elasticsearch.common.Rounding;
import org.elasticsearch.common.lease.Releasables;
-import org.elasticsearch.common.rounding.Rounding;
import org.elasticsearch.common.util.LongHash;
import org.elasticsearch.search.DocValueFormat;
import org.elasticsearch.search.aggregations.Aggregator;
diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/DateHistogramAggregatorFactory.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/DateHistogramAggregatorFactory.java
index c7ad6de7e0d72..8c025eb34eeb3 100644
--- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/DateHistogramAggregatorFactory.java
+++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/DateHistogramAggregatorFactory.java
@@ -19,7 +19,7 @@
package org.elasticsearch.search.aggregations.bucket.histogram;
-import org.elasticsearch.common.rounding.Rounding;
+import org.elasticsearch.common.Rounding;
import org.elasticsearch.search.aggregations.Aggregator;
import org.elasticsearch.search.aggregations.AggregatorFactories;
import org.elasticsearch.search.aggregations.AggregatorFactory;
diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/ExtendedBounds.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/ExtendedBounds.java
index 4cecfeff83381..b0dfbb9d66e9d 100644
--- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/ExtendedBounds.java
+++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/ExtendedBounds.java
@@ -21,10 +21,10 @@
import org.elasticsearch.common.CheckedFunction;
import org.elasticsearch.common.ParseField;
+import org.elasticsearch.common.Rounding;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.io.stream.Writeable;
-import org.elasticsearch.common.rounding.Rounding;
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
import org.elasticsearch.common.xcontent.ObjectParser.ValueType;
import org.elasticsearch.common.xcontent.ToXContentFragment;
diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalAutoDateHistogram.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalAutoDateHistogram.java
index f2e450942c3ad..63d08f5e832ac 100644
--- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalAutoDateHistogram.java
+++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalAutoDateHistogram.java
@@ -19,9 +19,9 @@
package org.elasticsearch.search.aggregations.bucket.histogram;
import org.apache.lucene.util.PriorityQueue;
+import org.elasticsearch.common.Rounding;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
-import org.elasticsearch.common.rounding.Rounding;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.search.DocValueFormat;
import org.elasticsearch.search.aggregations.Aggregations;
@@ -32,10 +32,10 @@
import org.elasticsearch.search.aggregations.bucket.MultiBucketsAggregation;
import org.elasticsearch.search.aggregations.bucket.histogram.AutoDateHistogramAggregationBuilder.RoundingInfo;
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
-import org.joda.time.DateTime;
-import org.joda.time.DateTimeZone;
import java.io.IOException;
+import java.time.Instant;
+import java.time.ZoneOffset;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
@@ -108,7 +108,7 @@ public String getKeyAsString() {
@Override
public Object getKey() {
- return new DateTime(key, DateTimeZone.UTC);
+ return Instant.ofEpochMilli(key).atZone(ZoneOffset.UTC);
}
@Override
diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalDateHistogram.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalDateHistogram.java
index 669bda5574d31..58c8ff638fb3e 100644
--- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalDateHistogram.java
+++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalDateHistogram.java
@@ -20,9 +20,9 @@
import org.apache.lucene.util.CollectionUtil;
import org.apache.lucene.util.PriorityQueue;
+import org.elasticsearch.common.Rounding;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
-import org.elasticsearch.common.rounding.Rounding;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.search.DocValueFormat;
import org.elasticsearch.search.aggregations.Aggregations;
@@ -34,10 +34,10 @@
import org.elasticsearch.search.aggregations.KeyComparable;
import org.elasticsearch.search.aggregations.bucket.MultiBucketsAggregation;
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
-import org.joda.time.DateTime;
-import org.joda.time.DateTimeZone;
import java.io.IOException;
+import java.time.Instant;
+import java.time.ZoneOffset;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Iterator;
@@ -112,7 +112,7 @@ public String getKeyAsString() {
@Override
public Object getKey() {
- return new DateTime(key, DateTimeZone.UTC);
+ return Instant.ofEpochMilli(key).atZone(ZoneOffset.UTC);
}
@Override
@@ -185,13 +185,13 @@ static class EmptyBucketInfo {
}
EmptyBucketInfo(StreamInput in) throws IOException {
- rounding = Rounding.Streams.read(in);
+ rounding = Rounding.read(in);
subAggregations = InternalAggregations.readAggregations(in);
bounds = in.readOptionalWriteable(ExtendedBounds::new);
}
void writeTo(StreamOutput out) throws IOException {
- Rounding.Streams.write(rounding, out);
+ rounding.writeTo(out);
subAggregations.writeTo(out);
out.writeOptionalWriteable(bounds);
}
diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/ParsedAutoDateHistogram.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/ParsedAutoDateHistogram.java
index c9ff1389f8ad3..66a29b4e05073 100644
--- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/ParsedAutoDateHistogram.java
+++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/ParsedAutoDateHistogram.java
@@ -24,10 +24,10 @@
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.search.aggregations.ParsedMultiBucketAggregation;
-import org.joda.time.DateTime;
-import org.joda.time.DateTimeZone;
import java.io.IOException;
+import java.time.Instant;
+import java.time.ZoneOffset;
import java.util.List;
public class ParsedAutoDateHistogram extends ParsedMultiBucketAggregation implements Histogram {
@@ -83,7 +83,7 @@ public static class ParsedBucket extends ParsedMultiBucketAggregation.ParsedBuck
@Override
public Object getKey() {
if (key != null) {
- return new DateTime(key, DateTimeZone.UTC);
+ return Instant.ofEpochMilli(key).atZone(ZoneOffset.UTC);
}
return null;
}
diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/ParsedDateHistogram.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/ParsedDateHistogram.java
index ace0cb59907a8..1cf43a53ed26c 100644
--- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/ParsedDateHistogram.java
+++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/ParsedDateHistogram.java
@@ -23,10 +23,10 @@
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.search.aggregations.ParsedMultiBucketAggregation;
-import org.joda.time.DateTime;
-import org.joda.time.DateTimeZone;
import java.io.IOException;
+import java.time.Instant;
+import java.time.ZoneOffset;
import java.util.List;
public class ParsedDateHistogram extends ParsedMultiBucketAggregation implements Histogram {
@@ -62,7 +62,7 @@ public static class ParsedBucket extends ParsedMultiBucketAggregation.ParsedBuck
@Override
public Object getKey() {
if (key != null) {
- return new DateTime(key, DateTimeZone.UTC);
+ return Instant.ofEpochMilli(key).atZone(ZoneOffset.UTC);
}
return null;
}
diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/DateRangeAggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/DateRangeAggregationBuilder.java
index b5bdba85b78ef..2b5e92ddcb3f9 100644
--- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/DateRangeAggregationBuilder.java
+++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/DateRangeAggregationBuilder.java
@@ -30,9 +30,9 @@
import org.elasticsearch.search.aggregations.support.ValuesSourceConfig;
import org.elasticsearch.search.aggregations.support.ValuesSourceParserHelper;
import org.elasticsearch.search.internal.SearchContext;
-import org.joda.time.DateTime;
import java.io.IOException;
+import java.time.ZonedDateTime;
import java.util.Map;
public class DateRangeAggregationBuilder extends AbstractRangeBuilder {
@@ -224,24 +224,24 @@ public DateRangeAggregationBuilder addUnboundedFrom(double from) {
* @param to
* the upper bound on the dates, exclusive
*/
- public DateRangeAggregationBuilder addRange(String key, DateTime from, DateTime to) {
+ public DateRangeAggregationBuilder addRange(String key, ZonedDateTime from, ZonedDateTime to) {
addRange(new RangeAggregator.Range(key, convertDateTime(from), convertDateTime(to)));
return this;
}
- private static Double convertDateTime(DateTime dateTime) {
+ private static Double convertDateTime(ZonedDateTime dateTime) {
if (dateTime == null) {
return null;
} else {
- return (double) dateTime.getMillis();
+ return (double) dateTime.toInstant().toEpochMilli();
}
}
/**
- * Same as {@link #addRange(String, DateTime, DateTime)} but the key will be
+ * Same as {@link #addRange(String, ZonedDateTime, ZonedDateTime)} but the key will be
* automatically generated based on from and to.
*/
- public DateRangeAggregationBuilder addRange(DateTime from, DateTime to) {
+ public DateRangeAggregationBuilder addRange(ZonedDateTime from, ZonedDateTime to) {
return addRange(null, from, to);
}
@@ -253,16 +253,16 @@ public DateRangeAggregationBuilder addRange(DateTime from, DateTime to) {
* @param to
* the upper bound on the dates, exclusive
*/
- public DateRangeAggregationBuilder addUnboundedTo(String key, DateTime to) {
+ public DateRangeAggregationBuilder addUnboundedTo(String key, ZonedDateTime to) {
addRange(new RangeAggregator.Range(key, null, convertDateTime(to)));
return this;
}
/**
- * Same as {@link #addUnboundedTo(String, DateTime)} but the key will be
+ * Same as {@link #addUnboundedTo(String, ZonedDateTime)} but the key will be
* computed automatically.
*/
- public DateRangeAggregationBuilder addUnboundedTo(DateTime to) {
+ public DateRangeAggregationBuilder addUnboundedTo(ZonedDateTime to) {
return addUnboundedTo(null, to);
}
@@ -274,16 +274,16 @@ public DateRangeAggregationBuilder addUnboundedTo(DateTime to) {
* @param from
* the lower bound on the distances, inclusive
*/
- public DateRangeAggregationBuilder addUnboundedFrom(String key, DateTime from) {
+ public DateRangeAggregationBuilder addUnboundedFrom(String key, ZonedDateTime from) {
addRange(new RangeAggregator.Range(key, convertDateTime(from), null));
return this;
}
/**
- * Same as {@link #addUnboundedFrom(String, DateTime)} but the key will be
+ * Same as {@link #addUnboundedFrom(String, ZonedDateTime)} but the key will be
* computed automatically.
*/
- public DateRangeAggregationBuilder addUnboundedFrom(DateTime from) {
+ public DateRangeAggregationBuilder addUnboundedFrom(ZonedDateTime from) {
return addUnboundedFrom(null, from);
}
diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/InternalDateRange.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/InternalDateRange.java
index 408c1325b85c9..a354aaeadbac0 100644
--- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/InternalDateRange.java
+++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/InternalDateRange.java
@@ -24,10 +24,10 @@
import org.elasticsearch.search.aggregations.InternalAggregations;
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
import org.elasticsearch.search.aggregations.support.ValueType;
-import org.joda.time.DateTime;
-import org.joda.time.DateTimeZone;
import java.io.IOException;
+import java.time.Instant;
+import java.time.ZoneOffset;
import java.util.List;
import java.util.Map;
@@ -48,12 +48,14 @@ public Bucket(String key, double from, double to, long docCount, InternalAggrega
@Override
public Object getFrom() {
- return Double.isInfinite(((Number) from).doubleValue()) ? null : new DateTime(((Number) from).longValue(), DateTimeZone.UTC);
+ return Double.isInfinite(((Number) from).doubleValue()) ? null :
+ Instant.ofEpochMilli(((Number) from).longValue()).atZone(ZoneOffset.UTC);
}
@Override
public Object getTo() {
- return Double.isInfinite(((Number) to).doubleValue()) ? null : new DateTime(((Number) to).longValue(), DateTimeZone.UTC);
+ return Double.isInfinite(((Number) to).doubleValue()) ? null :
+ Instant.ofEpochMilli(((Number) to).longValue()).atZone(ZoneOffset.UTC);
}
private Double internalGetFrom() {
diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/ParsedDateRange.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/ParsedDateRange.java
index 68adc41d23765..d4504e245541b 100644
--- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/ParsedDateRange.java
+++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/ParsedDateRange.java
@@ -21,10 +21,11 @@
import org.elasticsearch.common.xcontent.ObjectParser;
import org.elasticsearch.common.xcontent.XContentParser;
-import org.joda.time.DateTime;
-import org.joda.time.DateTimeZone;
import java.io.IOException;
+import java.time.Instant;
+import java.time.ZoneOffset;
+import java.time.ZonedDateTime;
public class ParsedDateRange extends ParsedRange {
@@ -59,11 +60,11 @@ public Object getTo() {
return doubleAsDateTime(to);
}
- private static DateTime doubleAsDateTime(Double d) {
+ private static ZonedDateTime doubleAsDateTime(Double d) {
if (d == null || Double.isInfinite(d)) {
return null;
}
- return new DateTime(d.longValue(), DateTimeZone.UTC);
+ return Instant.ofEpochMilli(d.longValue()).atZone(ZoneOffset.UTC);
}
static ParsedBucket fromXContent(final XContentParser parser, final boolean keyed) throws IOException {
diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/DerivativePipelineAggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/DerivativePipelineAggregationBuilder.java
index a8ee1293dc93a..882067e496e1b 100644
--- a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/DerivativePipelineAggregationBuilder.java
+++ b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/DerivativePipelineAggregationBuilder.java
@@ -21,9 +21,9 @@
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.ParsingException;
+import org.elasticsearch.common.Rounding;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
-import org.elasticsearch.common.rounding.DateTimeUnit;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
@@ -36,7 +36,6 @@
import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramInterval;
import org.elasticsearch.search.aggregations.bucket.histogram.HistogramAggregatorFactory;
import org.elasticsearch.search.aggregations.pipeline.BucketHelpers.GapPolicy;
-import org.joda.time.DateTimeZone;
import java.io.IOException;
import java.util.ArrayList;
@@ -141,9 +140,9 @@ protected PipelineAggregator createInternal(Map metaData) throws
}
Long xAxisUnits = null;
if (units != null) {
- DateTimeUnit dateTimeUnit = DateHistogramAggregationBuilder.DATE_FIELD_UNITS.get(units);
+ Rounding.DateTimeUnit dateTimeUnit = DateHistogramAggregationBuilder.DATE_FIELD_UNITS.get(units);
if (dateTimeUnit != null) {
- xAxisUnits = dateTimeUnit.field(DateTimeZone.UTC).getDurationField().getUnitMillis();
+ xAxisUnits = dateTimeUnit.getField().getBaseUnit().getDuration().toMillis();
} else {
TimeValue timeValue = TimeValue.parseTimeValue(units, null, getClass().getSimpleName() + ".unit");
if (timeValue != null) {
diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/support/MultiValuesSourceFieldConfig.java b/server/src/main/java/org/elasticsearch/search/aggregations/support/MultiValuesSourceFieldConfig.java
index 56ceae69ff78e..5f97df983ac87 100644
--- a/server/src/main/java/org/elasticsearch/search/aggregations/support/MultiValuesSourceFieldConfig.java
+++ b/server/src/main/java/org/elasticsearch/search/aggregations/support/MultiValuesSourceFieldConfig.java
@@ -29,16 +29,17 @@
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.script.Script;
-import org.joda.time.DateTimeZone;
import java.io.IOException;
+import java.time.ZoneId;
+import java.time.ZoneOffset;
import java.util.function.BiFunction;
public class MultiValuesSourceFieldConfig implements Writeable, ToXContentFragment {
private String fieldName;
private Object missing;
private Script script;
- private DateTimeZone timeZone;
+ private ZoneId timeZone;
private static final String NAME = "field_config";
@@ -61,16 +62,16 @@ public class MultiValuesSourceFieldConfig implements Writeable, ToXContentFragme
if (timezoneAware) {
parser.declareField(MultiValuesSourceFieldConfig.Builder::setTimeZone, p -> {
if (p.currentToken() == XContentParser.Token.VALUE_STRING) {
- return DateTimeZone.forID(p.text());
+ return ZoneId.of(p.text());
} else {
- return DateTimeZone.forOffsetHours(p.intValue());
+ return ZoneOffset.ofHours(p.intValue());
}
}, ParseField.CommonFields.TIME_ZONE, ObjectParser.ValueType.LONG);
}
return parser;
};
- private MultiValuesSourceFieldConfig(String fieldName, Object missing, Script script, DateTimeZone timeZone) {
+ private MultiValuesSourceFieldConfig(String fieldName, Object missing, Script script, ZoneId timeZone) {
this.fieldName = fieldName;
this.missing = missing;
this.script = script;
@@ -81,7 +82,7 @@ public MultiValuesSourceFieldConfig(StreamInput in) throws IOException {
this.fieldName = in.readString();
this.missing = in.readGenericValue();
this.script = in.readOptionalWriteable(Script::new);
- this.timeZone = in.readOptionalTimeZone();
+ this.timeZone = in.readOptionalZoneId();
}
public Object getMissing() {
@@ -92,7 +93,7 @@ public Script getScript() {
return script;
}
- public DateTimeZone getTimeZone() {
+ public ZoneId getTimeZone() {
return timeZone;
}
@@ -105,7 +106,7 @@ public void writeTo(StreamOutput out) throws IOException {
out.writeString(fieldName);
out.writeGenericValue(missing);
out.writeOptionalWriteable(script);
- out.writeOptionalTimeZone(timeZone);
+ out.writeOptionalZoneId(timeZone);
}
@Override
@@ -129,7 +130,7 @@ public static class Builder {
private String fieldName;
private Object missing = null;
private Script script = null;
- private DateTimeZone timeZone = null;
+ private ZoneId timeZone = null;
public String getFieldName() {
return fieldName;
@@ -158,11 +159,11 @@ public Builder setScript(Script script) {
return this;
}
- public DateTimeZone getTimeZone() {
+ public ZoneId getTimeZone() {
return timeZone;
}
- public Builder setTimeZone(DateTimeZone timeZone) {
+ public Builder setTimeZone(ZoneId timeZone) {
this.timeZone = timeZone;
return this;
}
diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/support/ValueType.java b/server/src/main/java/org/elasticsearch/search/aggregations/support/ValueType.java
index 25a90e581f00c..3cbd11288bffc 100644
--- a/server/src/main/java/org/elasticsearch/search/aggregations/support/ValueType.java
+++ b/server/src/main/java/org/elasticsearch/search/aggregations/support/ValueType.java
@@ -28,9 +28,9 @@
import org.elasticsearch.index.fielddata.IndexNumericFieldData;
import org.elasticsearch.index.mapper.DateFieldMapper;
import org.elasticsearch.search.DocValueFormat;
-import org.joda.time.DateTimeZone;
import java.io.IOException;
+import java.time.ZoneOffset;
public enum ValueType implements Writeable {
@@ -42,7 +42,7 @@ public enum ValueType implements Writeable {
DOUBLE((byte) 3, "float|double", "double", ValuesSourceType.NUMERIC, IndexNumericFieldData.class, DocValueFormat.RAW),
NUMBER((byte) 4, "number", "number", ValuesSourceType.NUMERIC, IndexNumericFieldData.class, DocValueFormat.RAW),
DATE((byte) 5, "date", "date", ValuesSourceType.NUMERIC, IndexNumericFieldData.class,
- new DocValueFormat.DateTime(DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER, DateTimeZone.UTC)),
+ new DocValueFormat.DateTime(DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER, ZoneOffset.UTC)),
IP((byte) 6, "ip", "ip", ValuesSourceType.BYTES, IndexFieldData.class, DocValueFormat.IP),
// TODO: what is the difference between "number" and "numeric"?
NUMERIC((byte) 7, "numeric", "numeric", ValuesSourceType.NUMERIC, IndexNumericFieldData.class, DocValueFormat.RAW),
diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/support/ValuesSourceAggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/support/ValuesSourceAggregationBuilder.java
index 040cc1b542f07..2a39c5d4a4734 100644
--- a/server/src/main/java/org/elasticsearch/search/aggregations/support/ValuesSourceAggregationBuilder.java
+++ b/server/src/main/java/org/elasticsearch/search/aggregations/support/ValuesSourceAggregationBuilder.java
@@ -28,9 +28,9 @@
import org.elasticsearch.search.aggregations.AggregatorFactories.Builder;
import org.elasticsearch.search.aggregations.AggregatorFactory;
import org.elasticsearch.search.internal.SearchContext;
-import org.joda.time.DateTimeZone;
import java.io.IOException;
+import java.time.ZoneId;
import java.util.Map;
import java.util.Objects;
@@ -81,7 +81,7 @@ public final AB subAggregations(Builder subFactories) {
private ValueType valueType = null;
private String format = null;
private Object missing = null;
- private DateTimeZone timeZone = null;
+ private ZoneId timeZone = null;
protected ValuesSourceConfig config;
protected ValuesSourceAggregationBuilder(String name, ValuesSourceType valuesSourceType, ValueType targetValueType) {
@@ -145,7 +145,7 @@ private void read(StreamInput in) throws IOException {
format = in.readOptionalString();
missing = in.readGenericValue();
if (in.readBoolean()) {
- timeZone = DateTimeZone.forID(in.readString());
+ timeZone = ZoneId.of(in.readString());
}
}
@@ -170,7 +170,7 @@ protected final void doWriteTo(StreamOutput out) throws IOException {
boolean hasTimeZone = timeZone != null;
out.writeBoolean(hasTimeZone);
if (hasTimeZone) {
- out.writeString(timeZone.getID());
+ out.writeString(timeZone.getId());
}
innerWriteTo(out);
}
@@ -289,7 +289,7 @@ public Object missing() {
* Sets the time zone to use for this aggregation
*/
@SuppressWarnings("unchecked")
- public AB timeZone(DateTimeZone timeZone) {
+ public AB timeZone(ZoneId timeZone) {
if (timeZone == null) {
throw new IllegalArgumentException("[timeZone] must not be null: [" + name + "]");
}
@@ -300,7 +300,7 @@ public AB timeZone(DateTimeZone timeZone) {
/**
* Gets the time zone to use for this aggregation
*/
- public DateTimeZone timeZone() {
+ public ZoneId timeZone() {
return timeZone;
}
diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/support/ValuesSourceConfig.java b/server/src/main/java/org/elasticsearch/search/aggregations/support/ValuesSourceConfig.java
index c152a5d5bc497..9a1e491556166 100644
--- a/server/src/main/java/org/elasticsearch/search/aggregations/support/ValuesSourceConfig.java
+++ b/server/src/main/java/org/elasticsearch/search/aggregations/support/ValuesSourceConfig.java
@@ -21,7 +21,7 @@
import org.apache.lucene.util.BytesRef;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.geo.GeoPoint;
-import org.elasticsearch.common.joda.Joda;
+import org.elasticsearch.common.time.DateFormatters;
import org.elasticsearch.index.fielddata.IndexFieldData;
import org.elasticsearch.index.fielddata.IndexGeoPointFieldData;
import org.elasticsearch.index.fielddata.IndexNumericFieldData;
@@ -32,7 +32,9 @@
import org.elasticsearch.script.Script;
import org.elasticsearch.search.DocValueFormat;
import org.elasticsearch.search.aggregations.AggregationExecutionException;
-import org.joda.time.DateTimeZone;
+
+import java.time.ZoneId;
+import java.time.ZoneOffset;
/**
* A configuration that tells aggregations how to retrieve data from the index
@@ -48,7 +50,7 @@ public static ValuesSourceConfig resolve(
ValueType valueType,
String field, Script script,
Object missing,
- DateTimeZone timeZone,
+ ZoneId timeZone,
String format) {
if (field == null) {
@@ -121,7 +123,7 @@ private static AggregationScript.LeafFactory createScript(Script script, QuerySh
}
}
- private static DocValueFormat resolveFormat(@Nullable String format, @Nullable ValueType valueType, @Nullable DateTimeZone tz) {
+ private static DocValueFormat resolveFormat(@Nullable String format, @Nullable ValueType valueType, @Nullable ZoneId tz) {
if (valueType == null) {
return DocValueFormat.RAW; // we can't figure it out
}
@@ -130,7 +132,7 @@ private static DocValueFormat resolveFormat(@Nullable String format, @Nullable V
valueFormat = new DocValueFormat.Decimal(format);
}
if (valueFormat instanceof DocValueFormat.DateTime && format != null) {
- valueFormat = new DocValueFormat.DateTime(Joda.forPattern(format), tz != null ? tz : DateTimeZone.UTC);
+ valueFormat = new DocValueFormat.DateTime(DateFormatters.forPattern(format), tz != null ? tz : ZoneOffset.UTC);
}
return valueFormat;
}
@@ -142,7 +144,7 @@ private static DocValueFormat resolveFormat(@Nullable String format, @Nullable V
private boolean unmapped = false;
private DocValueFormat format = DocValueFormat.RAW;
private Object missing;
- private DateTimeZone timeZone;
+ private ZoneId timeZone;
public ValuesSourceConfig(ValuesSourceType valueSourceType) {
this.valueSourceType = valueSourceType;
@@ -206,12 +208,12 @@ public Object missing() {
return this.missing;
}
- public ValuesSourceConfig timezone(final DateTimeZone timeZone) {
- this.timeZone= timeZone;
+ public ValuesSourceConfig timezone(final ZoneId timeZone) {
+ this.timeZone = timeZone;
return this;
}
- public DateTimeZone timezone() {
+ public ZoneId timezone() {
return this.timeZone;
}
diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/support/ValuesSourceParserHelper.java b/server/src/main/java/org/elasticsearch/search/aggregations/support/ValuesSourceParserHelper.java
index fc0a2f3a9fefe..24bdffaa3fa89 100644
--- a/server/src/main/java/org/elasticsearch/search/aggregations/support/ValuesSourceParserHelper.java
+++ b/server/src/main/java/org/elasticsearch/search/aggregations/support/ValuesSourceParserHelper.java
@@ -25,7 +25,9 @@
import org.elasticsearch.common.xcontent.ObjectParser;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.script.Script;
-import org.joda.time.DateTimeZone;
+
+import java.time.ZoneId;
+import java.time.ZoneOffset;
public final class ValuesSourceParserHelper {
@@ -91,9 +93,9 @@ private static void declareFields(
if (timezoneAware) {
objectParser.declareField(ValuesSourceAggregationBuilder::timeZone, p -> {
if (p.currentToken() == XContentParser.Token.VALUE_STRING) {
- return DateTimeZone.forID(p.text());
+ return ZoneId.of(p.text());
} else {
- return DateTimeZone.forOffsetHours(p.intValue());
+ return ZoneOffset.ofHours(p.intValue());
}
}, ParseField.CommonFields.TIME_ZONE, ObjectParser.ValueType.LONG);
}
diff --git a/server/src/test/java/org/elasticsearch/common/joda/JavaJodaTimeDuellingTests.java b/server/src/test/java/org/elasticsearch/common/joda/JavaJodaTimeDuellingTests.java
index c4efa78378d5e..864403f8ba3a7 100644
--- a/server/src/test/java/org/elasticsearch/common/joda/JavaJodaTimeDuellingTests.java
+++ b/server/src/test/java/org/elasticsearch/common/joda/JavaJodaTimeDuellingTests.java
@@ -19,6 +19,7 @@
package org.elasticsearch.common.joda;
+import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.common.time.DateFormatter;
import org.elasticsearch.common.time.DateFormatters;
import org.elasticsearch.test.ESTestCase;
@@ -28,7 +29,6 @@
import java.time.ZoneOffset;
import java.time.ZonedDateTime;
import java.time.format.DateTimeFormatter;
-import java.time.format.DateTimeParseException;
import java.time.temporal.TemporalAccessor;
import java.util.Locale;
@@ -63,11 +63,22 @@ public void testTimeZoneFormatting() {
formatter3.parse("20181126T121212.123-0830");
}
- public void testCustomTimeFormats() {
- assertSameDate("2010 12 06 11:05:15", "yyyy dd MM HH:mm:ss");
- assertSameDate("12/06", "dd/MM");
- assertSameDate("Nov 24 01:29:01 -0800", "MMM dd HH:mm:ss Z");
- }
+ // this test requires tests to run with -Djava.locale.providers=COMPAT in order to work
+// public void testCustomTimeFormats() {
+// assertSameDate("2010 12 06 11:05:15", "yyyy dd MM HH:mm:ss");
+// assertSameDate("12/06", "dd/MM");
+// assertSameDate("Nov 24 01:29:01 -0800", "MMM dd HH:mm:ss Z");
+//
+// // also ensure that locale based dates are the same
+// assertSameDate("Di., 05 Dez. 2000 02:55:00 -0800", "E, d MMM yyyy HH:mm:ss Z", LocaleUtils.parse("de"));
+// assertSameDate("Mi., 06 Dez. 2000 02:55:00 -0800", "E, d MMM yyyy HH:mm:ss Z", LocaleUtils.parse("de"));
+// assertSameDate("Do., 07 Dez. 2000 00:00:00 -0800", "E, d MMM yyyy HH:mm:ss Z", LocaleUtils.parse("de"));
+// assertSameDate("Fr., 08 Dez. 2000 00:00:00 -0800", "E, d MMM yyyy HH:mm:ss Z", LocaleUtils.parse("de"));
+//
+// DateTime dateTimeNow = DateTime.now(DateTimeZone.UTC);
+// ZonedDateTime javaTimeNow = Instant.ofEpochMilli(dateTimeNow.getMillis()).atZone(ZoneOffset.UTC);
+// assertSamePrinterOutput("E, d MMM yyyy HH:mm:ss Z", LocaleUtils.parse("de"), javaTimeNow, dateTimeNow);
+// }
public void testDuellingFormatsValidParsing() {
assertSameDate("1522332219", "epoch_second");
@@ -208,7 +219,7 @@ public void testDuellingFormatsValidParsing() {
// joda comes up with a different exception message here, so we have to adapt
assertJodaParseException("2012-W1-8", "week_date",
"Cannot parse \"2012-W1-8\": Value 8 for dayOfWeek must be in the range [1,7]");
- assertJavaTimeParseException("2012-W1-8", "week_date", "Text '2012-W1-8' could not be parsed");
+ assertJavaTimeParseException("2012-W1-8", "week_date");
assertSameDate("2012-W48-6T10:15:30.123Z", "week_date_time");
assertSameDate("2012-W1-6T10:15:30.123Z", "week_date_time");
@@ -270,6 +281,7 @@ public void testDuelingStrictParsing() {
assertParseException("2018-12-1", "strict_date_optional_time");
assertParseException("2018-1-31", "strict_date_optional_time");
assertParseException("10000-01-31", "strict_date_optional_time");
+ assertSameDate("2010-01-05T02:00", "strict_date_optional_time");
assertSameDate("2018-12-31T10:15:30", "strict_date_optional_time");
assertParseException("2018-12-31T10:15:3", "strict_date_optional_time");
assertParseException("2018-12-31T10:5:30", "strict_date_optional_time");
@@ -345,7 +357,7 @@ public void testDuelingStrictParsing() {
// joda comes up with a different exception message here, so we have to adapt
assertJodaParseException("2012-W01-8", "strict_week_date",
"Cannot parse \"2012-W01-8\": Value 8 for dayOfWeek must be in the range [1,7]");
- assertJavaTimeParseException("2012-W01-8", "strict_week_date", "Text '2012-W01-8' could not be parsed");
+ assertJavaTimeParseException("2012-W01-8", "strict_week_date");
assertSameDate("2012-W48-6T10:15:30.123Z", "strict_week_date_time");
assertParseException("2012-W1-6T10:15:30.123Z", "strict_week_date_time");
@@ -469,11 +481,39 @@ public void testSamePrinterOutput() {
assertSamePrinterOutput("strictYear", javaDate, jodaDate);
assertSamePrinterOutput("strictYearMonth", javaDate, jodaDate);
assertSamePrinterOutput("strictYearMonthDay", javaDate, jodaDate);
+ assertSamePrinterOutput("strict_date_optional_time||epoch_millis", javaDate, jodaDate);
}
public void testSeveralTimeFormats() {
assertSameDate("2018-12-12", "year_month_day||ordinal_date");
assertSameDate("2018-128", "year_month_day||ordinal_date");
+ assertSameDate("2018-08-20T10:57:45.427Z", "strict_date_optional_time||epoch_millis");
+ assertSameDate("2017-02-01T08:02:00.000-01", "strict_date_optional_time||epoch_millis");
+ assertSameDate("2017-02-01T08:02:00.000-01:00", "strict_date_optional_time||epoch_millis");
+ }
+
+ public void testSamePrinterOutputWithTimeZone() {
+ String format = "strict_date_optional_time||date_time";
+ String dateInput = "2017-02-01T08:02:00.000-01:00";
+ DateFormatter javaFormatter = DateFormatters.forPattern(format);
+ TemporalAccessor javaDate = javaFormatter.parse(dateInput);
+
+ FormatDateTimeFormatter jodaFormatter = Joda.forPattern(format);
+ DateTime dateTime = jodaFormatter.parser().parseDateTime(dateInput);
+
+ String javaDateString = javaFormatter.withZone(ZoneOffset.ofHours(-1)).format(javaDate);
+ String jodaDateString = jodaFormatter.printer().withZone(DateTimeZone.forOffsetHours(-1)).print(dateTime);
+ String message = String.format(Locale.ROOT, "expected string representation to be equal for format [%s]: joda [%s], java [%s]",
+ format, jodaDateString, javaDateString);
+ assertThat(message, javaDateString, is(jodaDateString));
+ }
+
+ public void testDateFormatterWithLocale() {
+ Locale locale = randomLocale(random());
+ String pattern = randomBoolean() ? "strict_date_optional_time||date_time" : "date_time||strict_date_optional_time";
+ DateFormatter formatter = DateFormatters.forPattern(pattern).withLocale(locale);
+ assertThat(formatter.pattern(), is(pattern));
+ assertThat(formatter.getLocale(), is(locale));
}
private void assertSamePrinterOutput(String format, ZonedDateTime javaDate, DateTime jodaDate) {
@@ -501,7 +541,7 @@ private void assertSameDate(String input, String format) {
private void assertParseException(String input, String format) {
assertJodaParseException(input, format, "Invalid format: \"" + input);
- assertJavaTimeParseException(input, format, "Text '" + input + "' could not be parsed");
+ assertJavaTimeParseException(input, format);
}
private void assertJodaParseException(String input, String format, String expectedMessage) {
@@ -510,9 +550,10 @@ private void assertJodaParseException(String input, String format, String expect
assertThat(e.getMessage(), containsString(expectedMessage));
}
- private void assertJavaTimeParseException(String input, String format, String expectedMessage) {
+ private void assertJavaTimeParseException(String input, String format) {
DateFormatter javaTimeFormatter = DateFormatters.forPattern(format);
- DateTimeParseException dateTimeParseException = expectThrows(DateTimeParseException.class, () -> javaTimeFormatter.parse(input));
- assertThat(dateTimeParseException.getMessage(), startsWith(expectedMessage));
+ ElasticsearchParseException e= expectThrows(ElasticsearchParseException.class, () -> javaTimeFormatter.parse(input));
+ // using starts with because the message might contain a position in addition
+ assertThat(e.getMessage(), startsWith("could not parse input [" + input + "] with date formatter [" + format + "]"));
}
}
diff --git a/server/src/test/java/org/elasticsearch/common/joda/JodaDateMathParserTests.java b/server/src/test/java/org/elasticsearch/common/joda/JodaDateMathParserTests.java
index 61448ce15ea26..5d1e0babca014 100644
--- a/server/src/test/java/org/elasticsearch/common/joda/JodaDateMathParserTests.java
+++ b/server/src/test/java/org/elasticsearch/common/joda/JodaDateMathParserTests.java
@@ -24,12 +24,14 @@
import org.elasticsearch.test.ESTestCase;
import org.joda.time.DateTimeZone;
+import java.time.Instant;
import java.time.ZoneId;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.function.LongSupplier;
import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.equalTo;
+import static org.hamcrest.Matchers.is;
public class JodaDateMathParserTests extends ESTestCase {
@@ -41,12 +43,12 @@ void assertDateMathEquals(String toTest, String expected) {
}
void assertDateMathEquals(String toTest, String expected, final long now, boolean roundUp, DateTimeZone timeZone) {
- long gotMillis = parser.parse(toTest, () -> now, roundUp, timeZone);
+ long gotMillis = parser.parse(toTest, () -> now, roundUp, timeZone).toEpochMilli();
assertDateEquals(gotMillis, toTest, expected);
}
void assertDateEquals(long gotMillis, String original, String expected) {
- long expectedMillis = parser.parse(expected, () -> 0);
+ long expectedMillis = parser.parse(expected, () -> 0).toEpochMilli();
if (gotMillis != expectedMillis) {
fail("Date math not equal\n" +
"Original : " + original + "\n" +
@@ -145,7 +147,7 @@ public void testMultipleAdjustments() {
public void testNow() {
- final long now = parser.parse("2014-11-18T14:27:32", () -> 0, false, (ZoneId) null);
+ final long now = parser.parse("2014-11-18T14:27:32", () -> 0, false, (ZoneId) null).toEpochMilli();
assertDateMathEquals("now", "2014-11-18T14:27:32", now, false, null);
assertDateMathEquals("now+M", "2014-12-18T14:27:32", now, false, null);
@@ -162,10 +164,10 @@ public void testRoundingPreservesEpochAsBaseDate() {
JodaDateMathParser parser = new JodaDateMathParser(formatter);
assertEquals(
this.formatter.parser().parseMillis("1970-01-01T04:52:20.000Z"),
- parser.parse("04:52:20", () -> 0, false, (ZoneId) null));
+ parser.parse("04:52:20", () -> 0, false, (ZoneId) null).toEpochMilli());
assertEquals(
this.formatter.parser().parseMillis("1970-01-01T04:52:20.999Z"),
- parser.parse("04:52:20", () -> 0, true, (ZoneId) null));
+ parser.parse("04:52:20", () -> 0, true, (ZoneId) null).toEpochMilli());
}
// Implicit rounding happening when parts of the date are not specified
@@ -185,7 +187,7 @@ public void testImplicitRounding() {
// implicit rounding with explicit timezone in the date format
FormatDateTimeFormatter formatter = Joda.forPattern("YYYY-MM-ddZ");
JodaDateMathParser parser = new JodaDateMathParser(formatter);
- long time = parser.parse("2011-10-09+01:00", () -> 0, false, (ZoneId) null);
+ Instant time = parser.parse("2011-10-09+01:00", () -> 0, false, (ZoneId) null);
assertEquals(this.parser.parse("2011-10-09T00:00:00.000+01:00", () -> 0), time);
time = parser.parse("2011-10-09+01:00", () -> 0, true, (ZoneId) null);
assertEquals(this.parser.parse("2011-10-09T23:59:59.999+01:00", () -> 0), time);
@@ -259,7 +261,7 @@ public void testTimestamps() {
// also check other time units
JodaDateMathParser parser = new JodaDateMathParser(Joda.forPattern("epoch_second||dateOptionalTime"));
- long datetime = parser.parse("1418248078", () -> 0);
+ long datetime = parser.parse("1418248078", () -> 0).toEpochMilli();
assertDateEquals(datetime, "1418248078", "2014-12-10T21:47:58.000");
// a timestamp before 10000 is a year
@@ -304,6 +306,11 @@ public void testOnlyCallsNowIfNecessary() {
assertTrue(called.get());
}
+ public void testSupportsScientificNotation() {
+ long result = parser.parse("1.0e3", () -> 42).toEpochMilli();
+ assertThat(result, is(1000L));
+ }
+
public void testThatUnixTimestampMayNotHaveTimeZone() {
JodaDateMathParser parser = new JodaDateMathParser(Joda.forPattern("epoch_millis"));
try {
diff --git a/server/src/test/java/org/elasticsearch/common/rounding/RoundingDuelTests.java b/server/src/test/java/org/elasticsearch/common/rounding/RoundingDuelTests.java
index 7e3dbdd5b94df..8c5c502388fc1 100644
--- a/server/src/test/java/org/elasticsearch/common/rounding/RoundingDuelTests.java
+++ b/server/src/test/java/org/elasticsearch/common/rounding/RoundingDuelTests.java
@@ -19,6 +19,7 @@
package org.elasticsearch.common.rounding;
+import org.elasticsearch.Version;
import org.elasticsearch.common.io.stream.BytesStreamOutput;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.test.ESTestCase;
@@ -42,6 +43,7 @@ public void testSerialization() throws Exception {
rounding = org.elasticsearch.common.Rounding.builder(timeValue()).timeZone(ZoneOffset.UTC).build();
}
BytesStreamOutput output = new BytesStreamOutput();
+ output.setVersion(Version.V_6_4_0);
rounding.writeTo(output);
Rounding roundingJoda = Rounding.Streams.read(output.bytes().streamInput());
diff --git a/server/src/test/java/org/elasticsearch/common/time/DateFormattersTests.java b/server/src/test/java/org/elasticsearch/common/time/DateFormattersTests.java
index 6a263b7db4215..3ce4da4111552 100644
--- a/server/src/test/java/org/elasticsearch/common/time/DateFormattersTests.java
+++ b/server/src/test/java/org/elasticsearch/common/time/DateFormattersTests.java
@@ -19,12 +19,13 @@
package org.elasticsearch.common.time;
+import org.elasticsearch.ElasticsearchParseException;
+import org.elasticsearch.index.mapper.RootObjectMapper;
import org.elasticsearch.test.ESTestCase;
import java.time.Instant;
import java.time.ZoneId;
import java.time.ZoneOffset;
-import java.time.format.DateTimeParseException;
import java.time.temporal.TemporalAccessor;
import java.util.Locale;
@@ -79,12 +80,12 @@ public void testEpochMillisParser() {
}
}
- public void testEpochMilliParser() {
+ public void testInvalidEpochMilliParser() {
DateFormatter formatter = DateFormatters.forPattern("epoch_millis");
- DateTimeParseException e = expectThrows(DateTimeParseException.class, () -> formatter.parse("invalid"));
- assertThat(e.getMessage(), containsString("invalid number"));
+ ElasticsearchParseException e = expectThrows(ElasticsearchParseException.class, () -> formatter.parse("invalid"));
+ assertThat(e.getMessage(), is("invalid number [invalid]"));
- e = expectThrows(DateTimeParseException.class, () -> formatter.parse("123.1234567"));
+ e = expectThrows(ElasticsearchParseException.class, () -> formatter.parse("123.1234567"));
assertThat(e.getMessage(), containsString("too much granularity after dot [123.1234567]"));
}
@@ -110,13 +111,13 @@ public void testEpochSecondParser() {
assertThat(Instant.from(formatter.parse("-1234.567")).toEpochMilli(), is(-1234567L));
assertThat(Instant.from(formatter.parse("-1234")).getNano(), is(0));
- DateTimeParseException e = expectThrows(DateTimeParseException.class, () -> formatter.parse("1234.1234567890"));
+ ElasticsearchParseException e = expectThrows(ElasticsearchParseException.class, () -> formatter.parse("1234.1234567890"));
assertThat(e.getMessage(), is("too much granularity after dot [1234.1234567890]"));
- e = expectThrows(DateTimeParseException.class, () -> formatter.parse("1234.123456789013221"));
+ e = expectThrows(ElasticsearchParseException.class, () -> formatter.parse("1234.123456789013221"));
assertThat(e.getMessage(), is("too much granularity after dot [1234.123456789013221]"));
- e = expectThrows(DateTimeParseException.class, () -> formatter.parse("abc"));
+ e = expectThrows(ElasticsearchParseException.class, () -> formatter.parse("abc"));
assertThat(e.getMessage(), is("invalid number [abc]"));
- e = expectThrows(DateTimeParseException.class, () -> formatter.parse("1234.abc"));
+ e = expectThrows(ElasticsearchParseException.class, () -> formatter.parse("1234.abc"));
assertThat(e.getMessage(), is("invalid number [1234.abc]"));
}
@@ -192,4 +193,35 @@ public void testEqualsAndHashcode() {
assertThat(epochMillisFormatter, sameInstance(DateFormatters.forPattern("epoch_millis")));
assertThat(epochMillisFormatter, equalTo(DateFormatters.forPattern("epoch_millis")));
}
+
+ public void testThatRootObjectParsingIsStrict() {
+ String[] datesThatWork = new String[] { "2014/10/10", "2014/10/10 12:12:12", "2014-05-05", "2014-05-05T12:12:12.123Z" };
+ String[] datesThatShouldNotWork = new String[]{ "5-05-05", "2014-5-05", "2014-05-5",
+ "2014-05-05T1:12:12.123Z", "2014-05-05T12:1:12.123Z", "2014-05-05T12:12:1.123Z",
+ "4/10/10", "2014/1/10", "2014/10/1",
+ "2014/10/10 1:12:12", "2014/10/10 12:1:12", "2014/10/10 12:12:1"
+ };
+
+ // good case
+ for (String date : datesThatWork) {
+ boolean dateParsingSuccessful = false;
+ for (DateFormatter dateTimeFormatter : RootObjectMapper.Defaults.DYNAMIC_DATE_TIME_FORMATTERS) {
+ try {
+ dateTimeFormatter.parse(date);
+ dateParsingSuccessful = true;
+ break;
+ } catch (Exception e) {}
+ }
+ if (!dateParsingSuccessful) {
+ fail("Parsing for date " + date + " in root object mapper failed, but shouldnt");
+ }
+ }
+
+ // bad case
+ for (String date : datesThatShouldNotWork) {
+ for (DateFormatter dateTimeFormatter : RootObjectMapper.Defaults.DYNAMIC_DATE_TIME_FORMATTERS) {
+ expectThrows(Exception.class, () -> dateTimeFormatter.parse(date));
+ }
+ }
+ }
}
diff --git a/server/src/test/java/org/elasticsearch/common/time/JavaDateMathParserTests.java b/server/src/test/java/org/elasticsearch/common/time/JavaDateMathParserTests.java
index a543af0445db1..f2ddf86503864 100644
--- a/server/src/test/java/org/elasticsearch/common/time/JavaDateMathParserTests.java
+++ b/server/src/test/java/org/elasticsearch/common/time/JavaDateMathParserTests.java
@@ -125,7 +125,7 @@ public void testMultipleAdjustments() {
}
public void testNow() {
- final long now = parser.parse("2014-11-18T14:27:32", () -> 0, false, (ZoneId) null);
+ final long now = parser.parse("2014-11-18T14:27:32", () -> 0, false, (ZoneId) null).toEpochMilli();
assertDateMathEquals("now", "2014-11-18T14:27:32", now, false, null);
assertDateMathEquals("now+M", "2014-12-18T14:27:32", now, false, null);
@@ -142,11 +142,11 @@ public void testRoundingPreservesEpochAsBaseDate() {
JavaDateMathParser parser = new JavaDateMathParser(formatter);
ZonedDateTime zonedDateTime = DateFormatters.toZonedDateTime(formatter.parse("04:52:20"));
assertThat(zonedDateTime.getYear(), is(1970));
- long millisStart = zonedDateTime.toInstant().toEpochMilli();
+ Instant millisStart = zonedDateTime.toInstant();
assertEquals(millisStart, parser.parse("04:52:20", () -> 0, false, (ZoneId) null));
// due to rounding up, we have to add the number of milliseconds here manually
long millisEnd = DateFormatters.toZonedDateTime(formatter.parse("04:52:20")).toInstant().toEpochMilli() + 999;
- assertEquals(millisEnd, parser.parse("04:52:20", () -> 0, true, (ZoneId) null));
+ assertEquals(millisEnd, parser.parse("04:52:20", () -> 0, true, (ZoneId) null).toEpochMilli());
}
// Implicit rounding happening when parts of the date are not specified
@@ -165,8 +165,8 @@ public void testImplicitRounding() {
// implicit rounding with explicit timezone in the date format
DateFormatter formatter = DateFormatters.forPattern("yyyy-MM-ddXXX");
- JavaDateMathParser parser = new JavaDateMathParser(formatter);
- long time = parser.parse("2011-10-09+01:00", () -> 0, false, (ZoneId) null);
+ DateMathParser parser = new JavaDateMathParser(formatter);
+ Instant time = parser.parse("2011-10-09+01:00", () -> 0, false, (ZoneId) null);
assertEquals(this.parser.parse("2011-10-09T00:00:00.000+01:00", () -> 0), time);
time = parser.parse("2011-10-09+01:00", () -> 0, true, (ZoneId) null);
assertEquals(this.parser.parse("2011-10-09T23:59:59.999+01:00", () -> 0), time);
@@ -240,7 +240,7 @@ public void testTimestamps() {
// also check other time units
JavaDateMathParser parser = new JavaDateMathParser(DateFormatters.forPattern("epoch_second||dateOptionalTime"));
- long datetime = parser.parse("1418248078", () -> 0);
+ long datetime = parser.parse("1418248078", () -> 0).toEpochMilli();
assertDateEquals(datetime, "1418248078", "2014-12-10T21:47:58.000");
// a timestamp before 10000 is a year
@@ -252,12 +252,8 @@ public void testTimestamps() {
}
void assertParseException(String msg, String date, String exc) {
- try {
- parser.parse(date, () -> 0);
- fail("Date: " + date + "\n" + msg);
- } catch (ElasticsearchParseException e) {
- assertThat(ExceptionsHelper.detailedMessage(e), containsString(exc));
- }
+ ElasticsearchParseException e = expectThrows(ElasticsearchParseException.class, () -> parser.parse(date, () -> 0));
+ assertThat(msg, ExceptionsHelper.detailedMessage(e), containsString(exc));
}
public void testIllegalMathFormat() {
@@ -269,8 +265,8 @@ public void testIllegalMathFormat() {
}
public void testIllegalDateFormat() {
- assertParseException("Expected bad timestamp exception", Long.toString(Long.MAX_VALUE) + "0", "failed to parse date field");
- assertParseException("Expected bad date format exception", "123bogus", "could not be parsed");
+ assertParseException("Expected bad timestamp exception", Long.toString(Long.MAX_VALUE) + "0", "could not parse input");
+ assertParseException("Expected bad date format exception", "123bogus", "could not parse input [123bogus]");
}
public void testOnlyCallsNowIfNecessary() {
@@ -285,17 +281,22 @@ public void testOnlyCallsNowIfNecessary() {
assertTrue(called.get());
}
+ public void testSupportsScientificNotation() {
+ long result = parser.parse("1.0e3", () -> 42).toEpochMilli();
+ assertThat(result, is(1000L));
+ }
+
private void assertDateMathEquals(String toTest, String expected) {
assertDateMathEquals(toTest, expected, 0, false, null);
}
private void assertDateMathEquals(String toTest, String expected, final long now, boolean roundUp, ZoneId timeZone) {
- long gotMillis = parser.parse(toTest, () -> now, roundUp, timeZone);
+ long gotMillis = parser.parse(toTest, () -> now, roundUp, timeZone).toEpochMilli();
assertDateEquals(gotMillis, toTest, expected);
}
private void assertDateEquals(long gotMillis, String original, String expected) {
- long expectedMillis = parser.parse(expected, () -> 0);
+ long expectedMillis = parser.parse(expected, () -> 0).toEpochMilli();
if (gotMillis != expectedMillis) {
ZonedDateTime zonedDateTime = ZonedDateTime.ofInstant(Instant.ofEpochMilli(gotMillis), ZoneOffset.UTC);
fail("Date math not equal\n" +
diff --git a/server/src/test/java/org/elasticsearch/deps/joda/SimpleJodaTests.java b/server/src/test/java/org/elasticsearch/deps/joda/SimpleJodaTests.java
index 257ebef9a9477..0e700c92e2160 100644
--- a/server/src/test/java/org/elasticsearch/deps/joda/SimpleJodaTests.java
+++ b/server/src/test/java/org/elasticsearch/deps/joda/SimpleJodaTests.java
@@ -22,7 +22,6 @@
import org.elasticsearch.common.joda.FormatDateTimeFormatter;
import org.elasticsearch.common.joda.Joda;
import org.elasticsearch.common.unit.TimeValue;
-import org.elasticsearch.index.mapper.RootObjectMapper;
import org.elasticsearch.test.ESTestCase;
import org.joda.time.DateTime;
import org.joda.time.DateTimeZone;
@@ -715,40 +714,6 @@ public void testThatDefaultFormatterChecksForCorrectYearLength() throws Exceptio
assertDateFormatParsingThrowingException("strictYearMonthDay", "2014-05-5");
}
- public void testThatRootObjectParsingIsStrict() throws Exception {
- String[] datesThatWork = new String[] { "2014/10/10", "2014/10/10 12:12:12", "2014-05-05", "2014-05-05T12:12:12.123Z" };
- String[] datesThatShouldNotWork = new String[]{ "5-05-05", "2014-5-05", "2014-05-5",
- "2014-05-05T1:12:12.123Z", "2014-05-05T12:1:12.123Z", "2014-05-05T12:12:1.123Z",
- "4/10/10", "2014/1/10", "2014/10/1",
- "2014/10/10 1:12:12", "2014/10/10 12:1:12", "2014/10/10 12:12:1"
- };
-
- // good case
- for (String date : datesThatWork) {
- boolean dateParsingSuccessful = false;
- for (FormatDateTimeFormatter dateTimeFormatter : RootObjectMapper.Defaults.DYNAMIC_DATE_TIME_FORMATTERS) {
- try {
- dateTimeFormatter.parser().parseMillis(date);
- dateParsingSuccessful = true;
- break;
- } catch (Exception e) {}
- }
- if (!dateParsingSuccessful) {
- fail("Parsing for date " + date + " in root object mapper failed, but shouldnt");
- }
- }
-
- // bad case
- for (String date : datesThatShouldNotWork) {
- for (FormatDateTimeFormatter dateTimeFormatter : RootObjectMapper.Defaults.DYNAMIC_DATE_TIME_FORMATTERS) {
- try {
- dateTimeFormatter.parser().parseMillis(date);
- fail(String.format(Locale.ROOT, "Expected exception when parsing date %s in root mapper", date));
- } catch (Exception e) {}
- }
- }
- }
-
private void assertValidDateFormatParsing(String pattern, String dateToParse) {
assertValidDateFormatParsing(pattern, dateToParse, dateToParse);
}
diff --git a/server/src/test/java/org/elasticsearch/index/mapper/DateFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/DateFieldMapperTests.java
index 97921f57ca592..801efa3c08dd5 100644
--- a/server/src/test/java/org/elasticsearch/index/mapper/DateFieldMapperTests.java
+++ b/server/src/test/java/org/elasticsearch/index/mapper/DateFieldMapperTests.java
@@ -21,23 +21,25 @@
import org.apache.lucene.index.DocValuesType;
import org.apache.lucene.index.IndexableField;
+import org.elasticsearch.bootstrap.JavaVersion;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.compress.CompressedXContent;
+import org.elasticsearch.common.time.DateFormatter;
+import org.elasticsearch.common.time.DateFormatters;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.index.IndexService;
import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.test.ESSingleNodeTestCase;
import org.elasticsearch.test.InternalSettingsPlugin;
-import org.joda.time.DateTime;
-import org.joda.time.DateTimeZone;
-import org.joda.time.format.DateTimeFormat;
import org.junit.Before;
import java.io.IOException;
+import java.time.ZoneId;
+import java.time.ZoneOffset;
+import java.time.ZonedDateTime;
import java.util.Collection;
-import java.util.Locale;
import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.notNullValue;
@@ -174,7 +176,7 @@ public void testIgnoreMalformed() throws Exception {
.endObject()),
XContentType.JSON));
MapperParsingException e = expectThrows(MapperParsingException.class, runnable);
- assertThat(e.getCause().getMessage(), containsString("Cannot parse \"2016-03-99\""));
+ assertThat(e.getCause().getMessage(), containsString("could not parse input [2016-03-99]"));
mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("field").field("type", "date")
@@ -218,36 +220,13 @@ public void testChangeFormat() throws IOException {
assertEquals(1457654400000L, pointField.numericValue().longValue());
}
- public void testFloatEpochFormat() throws IOException {
- String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
- .startObject("properties").startObject("field").field("type", "date")
- .field("format", "epoch_millis").endObject().endObject()
- .endObject().endObject());
-
- DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping));
-
- assertEquals(mapping, mapper.mappingSource().toString());
-
- double epochFloatMillisFromEpoch = (randomDouble() * 2 - 1) * 1000000;
- String epochFloatValue = String.format(Locale.US, "%f", epochFloatMillisFromEpoch);
-
- ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference
- .bytes(XContentFactory.jsonBuilder()
- .startObject()
- .field("field", epochFloatValue)
- .endObject()),
- XContentType.JSON));
-
- IndexableField[] fields = doc.rootDoc().getFields("field");
- assertEquals(2, fields.length);
- IndexableField pointField = fields[0];
- assertEquals((long)epochFloatMillisFromEpoch, pointField.numericValue().longValue());
- }
-
public void testChangeLocale() throws IOException {
+ assumeTrue("need java 9 for testing ",JavaVersion.current().compareTo(JavaVersion.parse("9")) >= 0);
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
- .startObject("properties").startObject("field").field("type", "date").field("locale", "fr").endObject().endObject()
- .endObject().endObject());
+ .startObject("properties").startObject("field").field("type", "date")
+ .field("format", "E, d MMM yyyy HH:mm:ss Z")
+ .field("locale", "de")
+ .endObject().endObject().endObject().endObject());
DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping));
@@ -256,7 +235,7 @@ public void testChangeLocale() throws IOException {
mapper.parse(SourceToParse.source("test", "type", "1", BytesReference
.bytes(XContentFactory.jsonBuilder()
.startObject()
- .field("field", 1457654400)
+ .field("field", "Mi., 06 Dez. 2000 02:55:00 -0800")
.endObject()),
XContentType.JSON));
}
@@ -341,12 +320,8 @@ public void testEmptyName() throws IOException {
assertThat(e.getMessage(), containsString("name cannot be empty string"));
}
- /**
- * Test that time zones are correctly parsed by the {@link DateFieldMapper}.
- * There is a known bug with Joda 2.9.4 reported in https://github.com/JodaOrg/joda-time/issues/373.
- */
public void testTimeZoneParsing() throws Exception {
- final String timeZonePattern = "yyyy-MM-dd" + randomFrom("ZZZ", "[ZZZ]", "'['ZZZ']'");
+ final String timeZonePattern = "yyyy-MM-dd" + randomFrom("XXX", "[XXX]", "'['XXX']'");
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject()
.startObject("type")
@@ -361,20 +336,22 @@ public void testTimeZoneParsing() throws Exception {
DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping));
assertEquals(mapping, mapper.mappingSource().toString());
- final DateTimeZone randomTimeZone = randomBoolean() ? DateTimeZone.forID(randomFrom("UTC", "CET")) : randomDateTimeZone();
- final DateTime randomDate = new DateTime(2016, 03, 11, 0, 0, 0, randomTimeZone);
+ DateFormatter formatter = DateFormatters.forPattern(timeZonePattern);
+ final ZoneId randomTimeZone = randomBoolean() ? ZoneId.of(randomFrom("UTC", "CET")) : randomZone();
+ final ZonedDateTime randomDate = ZonedDateTime.of(2016, 3, 11, 0, 0, 0, 0, randomTimeZone);
ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference
.bytes(XContentFactory.jsonBuilder()
.startObject()
- .field("field", DateTimeFormat.forPattern(timeZonePattern).print(randomDate))
+ .field("field", formatter.format(randomDate))
.endObject()),
XContentType.JSON));
IndexableField[] fields = doc.rootDoc().getFields("field");
assertEquals(2, fields.length);
- assertEquals(randomDate.withZone(DateTimeZone.UTC).getMillis(), fields[0].numericValue().longValue());
+ long millis = randomDate.withZoneSameInstant(ZoneOffset.UTC).toInstant().toEpochMilli();
+ assertEquals(millis, fields[0].numericValue().longValue());
}
public void testMergeDate() throws IOException {
@@ -430,6 +407,6 @@ public void testIllegalFormatField() throws Exception {
IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
() -> parser.parse("type", new CompressedXContent(mapping)));
- assertEquals("Invalid format: [[test_format]]: expected string value", e.getMessage());
+ assertEquals("Invalid format: [[test_format]]: Unknown pattern letter: t", e.getMessage());
}
}
diff --git a/server/src/test/java/org/elasticsearch/index/mapper/DateFieldTypeTests.java b/server/src/test/java/org/elasticsearch/index/mapper/DateFieldTypeTests.java
index 3a185620f7b7b..9856560cc8a13 100644
--- a/server/src/test/java/org/elasticsearch/index/mapper/DateFieldTypeTests.java
+++ b/server/src/test/java/org/elasticsearch/index/mapper/DateFieldTypeTests.java
@@ -31,8 +31,8 @@
import org.apache.lucene.store.Directory;
import org.elasticsearch.Version;
import org.elasticsearch.cluster.metadata.IndexMetaData;
-import org.elasticsearch.common.joda.Joda;
import org.elasticsearch.common.settings.Settings;
+import org.elasticsearch.common.time.DateFormatters;
import org.elasticsearch.common.time.DateMathParser;
import org.elasticsearch.core.internal.io.IOUtils;
import org.elasticsearch.index.IndexSettings;
@@ -45,6 +45,7 @@
import org.junit.Before;
import java.io.IOException;
+import java.time.ZoneOffset;
import java.util.Locale;
public class DateFieldTypeTests extends FieldTypeTestCase {
@@ -61,13 +62,13 @@ public void setupProperties() {
addModifier(new Modifier("format", false) {
@Override
public void modify(MappedFieldType ft) {
- ((DateFieldType) ft).setDateTimeFormatter(Joda.forPattern("basic_week_date", Locale.ROOT));
+ ((DateFieldType) ft).setDateTimeFormatter(DateFormatters.forPattern("basic_week_date"));
}
});
addModifier(new Modifier("locale", false) {
@Override
public void modify(MappedFieldType ft) {
- ((DateFieldType) ft).setDateTimeFormatter(Joda.forPattern("date_optional_time", Locale.CANADA));
+ ((DateFieldType) ft).setDateTimeFormatter(DateFormatters.forPattern("strict_date_optional_time").withLocale(Locale.CANADA));
}
});
nowInMillis = randomNonNegativeLong();
@@ -110,8 +111,11 @@ private void doTestIsFieldWithinQuery(DateFieldType ft, DirectoryReader reader,
public void testIsFieldWithinQuery() throws IOException {
Directory dir = newDirectory();
IndexWriter w = new IndexWriter(dir, new IndexWriterConfig(null));
- long instant1 = DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.parser().parseDateTime("2015-10-12").getMillis();
- long instant2 = DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.parser().parseDateTime("2016-04-03").getMillis();
+
+ long instant1 =
+ DateFormatters.toZonedDateTime(DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.parse("2015-10-12")).toInstant().toEpochMilli();
+ long instant2 =
+ DateFormatters.toZonedDateTime(DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.parse("2016-04-03")).toInstant().toEpochMilli();
Document doc = new Document();
LongPoint field = new LongPoint("my_date", instant1);
doc.add(field);
@@ -138,25 +142,27 @@ public void testIsFieldWithinQuery() throws IOException {
public void testValueFormat() {
MappedFieldType ft = createDefaultFieldType();
- long instant = DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.parser().parseDateTime("2015-10-12T14:10:55").getMillis();
+ long instant = DateFormatters.toZonedDateTime(DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.parse("2015-10-12T14:10:55"))
+ .toInstant().toEpochMilli();
+
assertEquals("2015-10-12T14:10:55.000Z",
- ft.docValueFormat(null, DateTimeZone.UTC).format(instant));
+ ft.docValueFormat(null, ZoneOffset.UTC).format(instant));
assertEquals("2015-10-12T15:10:55.000+01:00",
- ft.docValueFormat(null, DateTimeZone.forOffsetHours(1)).format(instant));
+ ft.docValueFormat(null, ZoneOffset.ofHours(1)).format(instant));
assertEquals("2015",
- createDefaultFieldType().docValueFormat("YYYY", DateTimeZone.UTC).format(instant));
+ createDefaultFieldType().docValueFormat("YYYY", ZoneOffset.UTC).format(instant));
assertEquals(instant,
- ft.docValueFormat(null, DateTimeZone.UTC).parseLong("2015-10-12T14:10:55", false, null));
+ ft.docValueFormat(null, ZoneOffset.UTC).parseLong("2015-10-12T14:10:55", false, null));
assertEquals(instant + 999,
- ft.docValueFormat(null, DateTimeZone.UTC).parseLong("2015-10-12T14:10:55", true, null));
- assertEquals(DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.parser().parseDateTime("2015-10-13").getMillis() - 1,
- ft.docValueFormat(null, DateTimeZone.UTC).parseLong("2015-10-12||/d", true, null));
+ ft.docValueFormat(null, ZoneOffset.UTC).parseLong("2015-10-12T14:10:55", true, null));
+ long i = DateFormatters.toZonedDateTime(DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.parse("2015-10-13")).toInstant().toEpochMilli();
+ assertEquals(i - 1, ft.docValueFormat(null, ZoneOffset.UTC).parseLong("2015-10-12||/d", true, null));
}
public void testValueForSearch() {
MappedFieldType ft = createDefaultFieldType();
String date = "2015-10-12T12:09:55.000Z";
- long instant = DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.parser().parseDateTime(date).getMillis();
+ long instant = DateFormatters.toZonedDateTime(DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.parse(date)).toInstant().toEpochMilli();
assertEquals(date, ft.valueForDisplay(instant));
}
@@ -170,7 +176,7 @@ public void testTermQuery() {
MappedFieldType ft = createDefaultFieldType();
ft.setName("field");
String date = "2015-10-12T14:10:55";
- long instant = DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.parser().parseDateTime(date).getMillis();
+ long instant = DateFormatters.toZonedDateTime(DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.parse(date)).toInstant().toEpochMilli();
ft.setIndexOptions(IndexOptions.DOCS);
Query expected = new IndexOrDocValuesQuery(
LongPoint.newRangeQuery("field", instant, instant + 999),
@@ -193,8 +199,9 @@ public void testRangeQuery() throws IOException {
ft.setName("field");
String date1 = "2015-10-12T14:10:55";
String date2 = "2016-04-28T11:33:52";
- long instant1 = DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.parser().parseDateTime(date1).getMillis();
- long instant2 = DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.parser().parseDateTime(date2).getMillis() + 999;
+ long instant1 = DateFormatters.toZonedDateTime(DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.parse(date1)).toInstant().toEpochMilli();
+ long instant2 =
+ DateFormatters.toZonedDateTime(DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.parse(date2)).toInstant().toEpochMilli() + 999;
ft.setIndexOptions(IndexOptions.DOCS);
Query expected = new IndexOrDocValuesQuery(
LongPoint.newRangeQuery("field", instant1, instant2),
diff --git a/server/src/test/java/org/elasticsearch/index/mapper/DynamicMappingTests.java b/server/src/test/java/org/elasticsearch/index/mapper/DynamicMappingTests.java
index 26c814f4dcdc3..5546d5f4d4804 100644
--- a/server/src/test/java/org/elasticsearch/index/mapper/DynamicMappingTests.java
+++ b/server/src/test/java/org/elasticsearch/index/mapper/DynamicMappingTests.java
@@ -42,6 +42,7 @@
import org.elasticsearch.test.InternalSettingsPlugin;
import java.io.IOException;
+import java.time.Instant;
import java.util.Collection;
import java.util.Collections;
@@ -455,7 +456,7 @@ public void testReuseExistingMappings() throws IOException, Exception {
.field("my_field3", 44)
.field("my_field4", 45)
.field("my_field5", 46)
- .field("my_field6", 47)
+ .field("my_field6", Instant.now().toEpochMilli())
.field("my_field7", true)
.endObject());
Mapper myField1Mapper = null;
@@ -709,11 +710,11 @@ public void testDateDetectionInheritsFormat() throws Exception {
DateFieldMapper dateMapper2 = (DateFieldMapper) defaultMapper.mappers().getMapper("date2");
DateFieldMapper dateMapper3 = (DateFieldMapper) defaultMapper.mappers().getMapper("date3");
// inherited from dynamic date format
- assertEquals("yyyy-MM-dd", dateMapper1.fieldType().dateTimeFormatter().format());
+ assertEquals("yyyy-MM-dd", dateMapper1.fieldType().dateTimeFormatter().pattern());
// inherited from dynamic date format since the mapping in the template did not specify a format
- assertEquals("yyyy-MM-dd", dateMapper2.fieldType().dateTimeFormatter().format());
+ assertEquals("yyyy-MM-dd", dateMapper2.fieldType().dateTimeFormatter().pattern());
// not inherited from the dynamic date format since the template defined an explicit format
- assertEquals("yyyy-MM-dd||epoch_millis", dateMapper3.fieldType().dateTimeFormatter().format());
+ assertEquals("yyyy-MM-dd||epoch_millis", dateMapper3.fieldType().dateTimeFormatter().pattern());
}
public void testDynamicTemplateOrder() throws IOException {
diff --git a/server/src/test/java/org/elasticsearch/index/mapper/DynamicTemplatesTests.java b/server/src/test/java/org/elasticsearch/index/mapper/DynamicTemplatesTests.java
index 62c764e8060af..2a9c41ba7aa38 100644
--- a/server/src/test/java/org/elasticsearch/index/mapper/DynamicTemplatesTests.java
+++ b/server/src/test/java/org/elasticsearch/index/mapper/DynamicTemplatesTests.java
@@ -60,8 +60,6 @@ public void testMatchTypeOnly() throws Exception {
assertThat(mapperService.fullName("l"), notNullValue());
assertNotSame(IndexOptions.NONE, mapperService.fullName("l").indexOptions());
-
-
}
public void testSimple() throws Exception {
diff --git a/server/src/test/java/org/elasticsearch/index/mapper/RangeFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/RangeFieldMapperTests.java
index 1f8b0b58af813..1511031eb9279 100644
--- a/server/src/test/java/org/elasticsearch/index/mapper/RangeFieldMapperTests.java
+++ b/server/src/test/java/org/elasticsearch/index/mapper/RangeFieldMapperTests.java
@@ -458,7 +458,7 @@ public void testIllegalFormatField() throws Exception {
IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
() -> parser.parse("type", new CompressedXContent(mapping)));
- assertEquals("Invalid format: [[test_format]]: expected string value", e.getMessage());
+ assertEquals("Invalid format: [[test_format]]: Unknown pattern letter: t", e.getMessage());
}
}
diff --git a/server/src/test/java/org/elasticsearch/index/mapper/RangeFieldQueryStringQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/mapper/RangeFieldQueryStringQueryBuilderTests.java
index 34e7081d51d5d..699f85f1b12b1 100644
--- a/server/src/test/java/org/elasticsearch/index/mapper/RangeFieldQueryStringQueryBuilderTests.java
+++ b/server/src/test/java/org/elasticsearch/index/mapper/RangeFieldQueryStringQueryBuilderTests.java
@@ -104,11 +104,12 @@ public void testDateRangeQuery() throws Exception {
DateMathParser parser = type.dateMathParser;
Query query = new QueryStringQueryBuilder(DATE_RANGE_FIELD_NAME + ":[2010-01-01 TO 2018-01-01]").toQuery(createShardContext());
Query range = LongRange.newIntersectsQuery(DATE_RANGE_FIELD_NAME,
- new long[]{ parser.parse("2010-01-01", () -> 0)}, new long[]{ parser.parse("2018-01-01", () -> 0)});
+ new long[]{ parser.parse("2010-01-01", () -> 0).toEpochMilli()},
+ new long[]{ parser.parse("2018-01-01", () -> 0).toEpochMilli()});
Query dv = RangeFieldMapper.RangeType.DATE.dvRangeQuery(DATE_RANGE_FIELD_NAME,
BinaryDocValuesRangeQuery.QueryType.INTERSECTS,
- parser.parse("2010-01-01", () -> 0),
- parser.parse("2018-01-01", () -> 0), true, true);
+ parser.parse("2010-01-01", () -> 0).toEpochMilli(),
+ parser.parse("2018-01-01", () -> 0).toEpochMilli(), true, true);
assertEquals(new IndexOrDocValuesQuery(range, dv), query);
}
diff --git a/server/src/test/java/org/elasticsearch/index/mapper/RangeFieldTypeTests.java b/server/src/test/java/org/elasticsearch/index/mapper/RangeFieldTypeTests.java
index 6ecd61275fe96..fe95ab2d40615 100644
--- a/server/src/test/java/org/elasticsearch/index/mapper/RangeFieldTypeTests.java
+++ b/server/src/test/java/org/elasticsearch/index/mapper/RangeFieldTypeTests.java
@@ -34,10 +34,10 @@
import org.elasticsearch.Version;
import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.common.geo.ShapeRelation;
-import org.elasticsearch.common.joda.FormatDateTimeFormatter;
-import org.elasticsearch.common.joda.Joda;
import org.elasticsearch.common.network.InetAddresses;
import org.elasticsearch.common.settings.Settings;
+import org.elasticsearch.common.time.DateFormatter;
+import org.elasticsearch.common.time.DateFormatters;
import org.elasticsearch.index.IndexSettings;
import org.elasticsearch.index.mapper.RangeFieldMapper.RangeFieldType;
import org.elasticsearch.index.mapper.RangeFieldMapper.RangeType;
@@ -49,6 +49,8 @@
import java.net.InetAddress;
import java.util.Locale;
+import static org.hamcrest.Matchers.containsString;
+
public class RangeFieldTypeTests extends FieldTypeTestCase {
RangeType type;
protected static String FIELDNAME = "field";
@@ -63,13 +65,13 @@ public void setupProperties() {
addModifier(new Modifier("format", true) {
@Override
public void modify(MappedFieldType ft) {
- ((RangeFieldType) ft).setDateTimeFormatter(Joda.forPattern("basic_week_date", Locale.ROOT));
+ ((RangeFieldType) ft).setDateTimeFormatter(DateFormatters.forPattern("basic_week_date"));
}
});
addModifier(new Modifier("locale", true) {
@Override
public void modify(MappedFieldType ft) {
- ((RangeFieldType) ft).setDateTimeFormatter(Joda.forPattern("date_optional_time", Locale.CANADA));
+ ((RangeFieldType) ft).setDateTimeFormatter(DateFormatters.forPattern("date_optional_time").withLocale(Locale.CANADA));
}
});
}
@@ -112,19 +114,18 @@ public void testDateRangeQueryUsingMappingFormat() {
fieldType.setHasDocValues(false);
ShapeRelation relation = randomFrom(ShapeRelation.values());
- // dates will break the default format
+ // dates will break the default format, month/day of month is turned around in the format
final String from = "2016-15-06T15:29:50+08:00";
final String to = "2016-16-06T15:29:50+08:00";
ElasticsearchParseException ex = expectThrows(ElasticsearchParseException.class,
() -> fieldType.rangeQuery(from, to, true, true, relation, null, null, context));
- assertEquals("failed to parse date field [2016-15-06T15:29:50+08:00] with format [strict_date_optional_time||epoch_millis]",
- ex.getMessage());
+ assertThat(ex.getMessage(), containsString("could not parse input [2016-15-06T15:29:50+08:00]"));
// setting mapping format which is compatible with those dates
- final FormatDateTimeFormatter formatter = Joda.forPattern("yyyy-dd-MM'T'HH:mm:ssZZ");
- assertEquals(1465975790000L, formatter.parser().parseMillis(from));
- assertEquals(1466062190000L, formatter.parser().parseMillis(to));
+ final DateFormatter formatter = DateFormatters.forPattern("yyyy-dd-MM'T'HH:mm:ssZZZZZ");
+ assertEquals(1465975790000L, DateFormatters.toZonedDateTime(formatter.parse(from)).toInstant().toEpochMilli());
+ assertEquals(1466062190000L, DateFormatters.toZonedDateTime(formatter.parse(to)).toInstant().toEpochMilli());
fieldType.setDateTimeFormatter(formatter);
final Query query = fieldType.rangeQuery(from, to, true, true, relation, null, null, context);
diff --git a/server/src/test/java/org/elasticsearch/index/query/QueryStringQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/QueryStringQueryBuilderTests.java
index 9ea98cebe711b..4314265a9f103 100644
--- a/server/src/test/java/org/elasticsearch/index/query/QueryStringQueryBuilderTests.java
+++ b/server/src/test/java/org/elasticsearch/index/query/QueryStringQueryBuilderTests.java
@@ -60,9 +60,10 @@
import org.elasticsearch.search.internal.SearchContext;
import org.elasticsearch.test.AbstractQueryTestCase;
import org.hamcrest.Matchers;
-import org.joda.time.DateTimeZone;
import java.io.IOException;
+import java.time.DateTimeException;
+import java.time.ZoneId;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
@@ -161,7 +162,7 @@ protected QueryStringQueryBuilder doCreateTestQueryBuilder() {
queryStringQueryBuilder.useDisMax(randomBoolean());
}
if (randomBoolean()) {
- queryStringQueryBuilder.timeZone(randomDateTimeZone().getID());
+ queryStringQueryBuilder.timeZone(randomZone().getId());
}
if (randomBoolean()) {
queryStringQueryBuilder.autoGenerateSynonymsPhraseQuery(randomBoolean());
@@ -195,7 +196,7 @@ public QueryStringQueryBuilder mutateInstance(QueryStringQueryBuilder instance)
String quoteFieldSuffix = instance.quoteFieldSuffix();
Float tieBreaker = instance.tieBreaker();
String minimumShouldMatch = instance.minimumShouldMatch();
- String timeZone = instance.timeZone() == null ? null : instance.timeZone().getID();
+ String timeZone = instance.timeZone() == null ? null : instance.timeZone().getId();
boolean autoGenerateSynonymsPhraseQuery = instance.autoGenerateSynonymsPhraseQuery();
boolean fuzzyTranspositions = instance.fuzzyTranspositions();
@@ -303,12 +304,12 @@ public QueryStringQueryBuilder mutateInstance(QueryStringQueryBuilder instance)
break;
case 20:
if (timeZone == null) {
- timeZone = randomDateTimeZone().getID();
+ timeZone = randomZone().getId();
} else {
if (randomBoolean()) {
timeZone = null;
} else {
- timeZone = randomValueOtherThan(timeZone, () -> randomDateTimeZone().getID());
+ timeZone = randomValueOtherThan(timeZone, () -> randomZone().getId());
}
}
break;
@@ -811,7 +812,7 @@ public void testTimezone() throws Exception {
QueryBuilder queryBuilder = parseQuery(queryAsString);
assertThat(queryBuilder, instanceOf(QueryStringQueryBuilder.class));
QueryStringQueryBuilder queryStringQueryBuilder = (QueryStringQueryBuilder) queryBuilder;
- assertThat(queryStringQueryBuilder.timeZone(), equalTo(DateTimeZone.forID("Europe/Paris")));
+ assertThat(queryStringQueryBuilder.timeZone(), equalTo(ZoneId.of("Europe/Paris")));
String invalidQueryAsString = "{\n" +
" \"query_string\":{\n" +
@@ -819,7 +820,7 @@ public void testTimezone() throws Exception {
" \"query\":\"" + DATE_FIELD_NAME + ":[2012 TO 2014]\"\n" +
" }\n" +
"}";
- expectThrows(IllegalArgumentException.class, () -> parseQuery(invalidQueryAsString));
+ expectThrows(DateTimeException.class, () -> parseQuery(invalidQueryAsString));
}
public void testToQueryBooleanQueryMultipleBoosts() throws Exception {
diff --git a/server/src/test/java/org/elasticsearch/index/query/RangeQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/RangeQueryBuilderTests.java
index 28349994c63e3..ffcc3f8bba4e2 100644
--- a/server/src/test/java/org/elasticsearch/index/query/RangeQueryBuilderTests.java
+++ b/server/src/test/java/org/elasticsearch/index/query/RangeQueryBuilderTests.java
@@ -36,6 +36,8 @@
import org.elasticsearch.common.ParsingException;
import org.elasticsearch.common.geo.ShapeRelation;
import org.elasticsearch.common.lucene.BytesRefs;
+import org.elasticsearch.common.time.DateFormatter;
+import org.elasticsearch.common.time.DateFormatters;
import org.elasticsearch.index.mapper.DateFieldMapper;
import org.elasticsearch.index.mapper.FieldNamesFieldMapper;
import org.elasticsearch.index.mapper.MappedFieldType;
@@ -44,10 +46,12 @@
import org.elasticsearch.search.internal.SearchContext;
import org.elasticsearch.test.AbstractQueryTestCase;
import org.joda.time.DateTime;
-import org.joda.time.DateTimeZone;
import org.joda.time.chrono.ISOChronology;
import java.io.IOException;
+import java.time.Instant;
+import java.time.ZoneOffset;
+import java.time.ZonedDateTime;
import java.util.HashMap;
import java.util.Map;
@@ -72,18 +76,25 @@ protected RangeQueryBuilder doCreateTestQueryBuilder() {
break;
case 1:
// use mapped date field, using date string representation
+ Instant now = Instant.now();
+ ZonedDateTime start = now.minusMillis(randomIntBetween(0, 1000000)).atZone(ZoneOffset.UTC);
+ ZonedDateTime end = now.plusMillis(randomIntBetween(0, 1000000)).atZone(ZoneOffset.UTC);
query = new RangeQueryBuilder(randomFrom(
DATE_FIELD_NAME, DATE_RANGE_FIELD_NAME, DATE_ALIAS_FIELD_NAME));
- query.from(new DateTime(System.currentTimeMillis() - randomIntBetween(0, 1000000), DateTimeZone.UTC).toString());
- query.to(new DateTime(System.currentTimeMillis() + randomIntBetween(0, 1000000), DateTimeZone.UTC).toString());
+ query.from(DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.format(start));
+ query.to(DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.format(end));
// Create timestamp option only then we have a date mapper,
// otherwise we could trigger exception.
if (createShardContext().getMapperService().fullName(DATE_FIELD_NAME) != null) {
if (randomBoolean()) {
- query.timeZone(randomDateTimeZone().getID());
+ query.timeZone(randomZone().getId());
}
if (randomBoolean()) {
- query.format("yyyy-MM-dd'T'HH:mm:ss.SSSZZ");
+ String format = "yyyy-MM-dd'T'HH:mm:ss";
+ query.format(format);
+ DateFormatter formatter = DateFormatters.forPattern(format);
+ query.from(formatter.format(start));
+ query.to(formatter.format(end));
}
}
break;
@@ -166,7 +177,7 @@ protected void doAssertLuceneQuery(RangeQueryBuilder queryBuilder, Query query,
if (mappedFieldType instanceof DateFieldMapper.DateFieldType) {
fromInMillis = queryBuilder.from() == null ? null :
((DateFieldMapper.DateFieldType) mappedFieldType).parseToMilliseconds(queryBuilder.from(),
- queryBuilder.includeLower(),
+ !queryBuilder.includeLower(),
queryBuilder.getDateTimeZone(),
queryBuilder.getForceDateParser(), context.getQueryShardContext());
toInMillis = queryBuilder.to() == null ? null :
@@ -444,7 +455,7 @@ protected MappedFieldType.Relation getRelation(QueryRewriteContext queryRewriteC
DateTime queryToValue = new DateTime(2016, 1, 1, 0, 0, 0, ISOChronology.getInstanceUTC());
query.from(queryFromValue);
query.to(queryToValue);
- query.timeZone(randomDateTimeZone().getID());
+ query.timeZone(randomZone().getId());
query.format("yyyy-MM-dd");
QueryShardContext queryShardContext = createShardContext();
QueryBuilder rewritten = query.rewrite(queryShardContext);
diff --git a/server/src/test/java/org/elasticsearch/indices/IndicesRequestCacheIT.java b/server/src/test/java/org/elasticsearch/indices/IndicesRequestCacheIT.java
index 809d647527402..2f900f9c1676b 100644
--- a/server/src/test/java/org/elasticsearch/indices/IndicesRequestCacheIT.java
+++ b/server/src/test/java/org/elasticsearch/indices/IndicesRequestCacheIT.java
@@ -26,6 +26,8 @@
import org.elasticsearch.client.Client;
import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.common.settings.Settings;
+import org.elasticsearch.common.time.DateFormatter;
+import org.elasticsearch.common.time.DateFormatters;
import org.elasticsearch.index.cache.request.RequestCacheStats;
import org.elasticsearch.index.query.QueryBuilders;
import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramInterval;
@@ -34,8 +36,8 @@
import org.elasticsearch.test.ESIntegTestCase;
import org.elasticsearch.test.hamcrest.ElasticsearchAssertions;
import org.elasticsearch.test.junit.annotations.TestLogging;
-import org.joda.time.DateTimeZone;
+import java.time.ZoneId;
import java.time.ZoneOffset;
import java.time.ZonedDateTime;
import java.time.format.DateTimeFormatter;
@@ -68,7 +70,7 @@ public void testCacheAggs() throws Exception {
// which used to not work well with the query cache because of the handles stream output
// see #9500
final SearchResponse r1 = client.prepareSearch("index").setSize(0).setSearchType(SearchType.QUERY_THEN_FETCH)
- .addAggregation(dateHistogram("histo").field("f").timeZone(DateTimeZone.forID("+01:00")).minDocCount(0)
+ .addAggregation(dateHistogram("histo").field("f").timeZone(ZoneId.of("+01:00")).minDocCount(0)
.dateHistogramInterval(DateHistogramInterval.MONTH))
.get();
assertSearchResponse(r1);
@@ -80,7 +82,7 @@ public void testCacheAggs() throws Exception {
for (int i = 0; i < 10; ++i) {
final SearchResponse r2 = client.prepareSearch("index").setSize(0)
.setSearchType(SearchType.QUERY_THEN_FETCH).addAggregation(dateHistogram("histo").field("f")
- .timeZone(DateTimeZone.forID("+01:00")).minDocCount(0).dateHistogramInterval(DateHistogramInterval.MONTH))
+ .timeZone(ZoneId.of("+01:00")).minDocCount(0).dateHistogramInterval(DateHistogramInterval.MONTH))
.get();
assertSearchResponse(r2);
Histogram h1 = r1.getAggregations().get("histo");
@@ -246,15 +248,16 @@ public void testQueryRewriteDatesWithNow() throws Exception {
assertAcked(client.admin().indices().prepareCreate("index-3").addMapping("type", "d", "type=date")
.setSettings(settings).get());
ZonedDateTime now = ZonedDateTime.now(ZoneOffset.UTC);
- indexRandom(true, client.prepareIndex("index-1", "type", "1").setSource("d", now),
- client.prepareIndex("index-1", "type", "2").setSource("d", now.minusDays(1)),
- client.prepareIndex("index-1", "type", "3").setSource("d", now.minusDays(2)),
- client.prepareIndex("index-2", "type", "4").setSource("d", now.minusDays(3)),
- client.prepareIndex("index-2", "type", "5").setSource("d", now.minusDays(4)),
- client.prepareIndex("index-2", "type", "6").setSource("d", now.minusDays(5)),
- client.prepareIndex("index-3", "type", "7").setSource("d", now.minusDays(6)),
- client.prepareIndex("index-3", "type", "8").setSource("d", now.minusDays(7)),
- client.prepareIndex("index-3", "type", "9").setSource("d", now.minusDays(8)));
+ DateFormatter formatter = DateFormatters.forPattern("strict_date_optional_time");
+ indexRandom(true, client.prepareIndex("index-1", "type", "1").setSource("d", formatter.format(now)),
+ client.prepareIndex("index-1", "type", "2").setSource("d", formatter.format(now.minusDays(1))),
+ client.prepareIndex("index-1", "type", "3").setSource("d", formatter.format(now.minusDays(2))),
+ client.prepareIndex("index-2", "type", "4").setSource("d", formatter.format(now.minusDays(3))),
+ client.prepareIndex("index-2", "type", "5").setSource("d", formatter.format(now.minusDays(4))),
+ client.prepareIndex("index-2", "type", "6").setSource("d", formatter.format(now.minusDays(5))),
+ client.prepareIndex("index-3", "type", "7").setSource("d", formatter.format(now.minusDays(6))),
+ client.prepareIndex("index-3", "type", "8").setSource("d", formatter.format(now.minusDays(7))),
+ client.prepareIndex("index-3", "type", "9").setSource("d", formatter.format(now.minusDays(8))));
ensureSearchable("index-1", "index-2", "index-3");
assertCacheState(client, "index-1", 0, 0);
assertCacheState(client, "index-2", 0, 0);
diff --git a/server/src/test/java/org/elasticsearch/search/DocValueFormatTests.java b/server/src/test/java/org/elasticsearch/search/DocValueFormatTests.java
index 0190627947448..e2b137e9506e7 100644
--- a/server/src/test/java/org/elasticsearch/search/DocValueFormatTests.java
+++ b/server/src/test/java/org/elasticsearch/search/DocValueFormatTests.java
@@ -26,11 +26,12 @@
import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
import org.elasticsearch.common.io.stream.NamedWriteableRegistry.Entry;
import org.elasticsearch.common.io.stream.StreamInput;
-import org.elasticsearch.common.joda.Joda;
import org.elasticsearch.common.network.InetAddresses;
+import org.elasticsearch.common.time.DateFormatter;
+import org.elasticsearch.common.time.DateFormatters;
import org.elasticsearch.test.ESTestCase;
-import org.joda.time.DateTimeZone;
+import java.time.ZoneOffset;
import java.util.ArrayList;
import java.util.List;
@@ -60,14 +61,15 @@ public void testSerialization() throws Exception {
assertEquals(DocValueFormat.Decimal.class, vf.getClass());
assertEquals("###.##", ((DocValueFormat.Decimal) vf).pattern);
- DocValueFormat.DateTime dateFormat = new DocValueFormat.DateTime(Joda.forPattern("epoch_second"), DateTimeZone.forOffsetHours(1));
+ DateFormatter formatter = DateFormatters.forPattern("epoch_second");
+ DocValueFormat.DateTime dateFormat = new DocValueFormat.DateTime(formatter, ZoneOffset.ofHours(1));
out = new BytesStreamOutput();
out.writeNamedWriteable(dateFormat);
in = new NamedWriteableAwareStreamInput(out.bytes().streamInput(), registry);
vf = in.readNamedWriteable(DocValueFormat.class);
assertEquals(DocValueFormat.DateTime.class, vf.getClass());
- assertEquals("epoch_second", ((DocValueFormat.DateTime) vf).formatter.format());
- assertEquals(DateTimeZone.forOffsetHours(1), ((DocValueFormat.DateTime) vf).timeZone);
+ assertEquals("epoch_second", ((DocValueFormat.DateTime) vf).formatter.pattern());
+ assertEquals(ZoneOffset.ofHours(1), ((DocValueFormat.DateTime) vf).timeZone);
out = new BytesStreamOutput();
out.writeNamedWriteable(DocValueFormat.GEOHASH);
diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/AutoDateHistogramTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/AutoDateHistogramTests.java
index 3a10edf183376..a54f30ffac0d1 100644
--- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/AutoDateHistogramTests.java
+++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/AutoDateHistogramTests.java
@@ -36,7 +36,7 @@ protected AutoDateHistogramAggregationBuilder createTestAggregatorBuilder() {
builder.missing(randomIntBetween(0, 10));
}
if (randomBoolean()) {
- builder.timeZone(randomDateTimeZone());
+ builder.timeZone(randomZone());
}
return builder;
}
diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/DateHistogramIT.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/DateHistogramIT.java
index 0b4659d7e7008..afc8d2f535b3d 100644
--- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/DateHistogramIT.java
+++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/DateHistogramIT.java
@@ -23,8 +23,9 @@
import org.elasticsearch.action.search.SearchPhaseExecutionException;
import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.common.Strings;
-import org.elasticsearch.common.joda.Joda;
import org.elasticsearch.common.settings.Settings;
+import org.elasticsearch.common.time.DateFormatter;
+import org.elasticsearch.common.time.DateFormatters;
import org.elasticsearch.common.time.DateMathParser;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.index.mapper.DateFieldMapper;
@@ -44,12 +45,14 @@
import org.elasticsearch.search.aggregations.metrics.Sum;
import org.elasticsearch.test.ESIntegTestCase;
import org.hamcrest.Matchers;
-import org.joda.time.DateTime;
-import org.joda.time.DateTimeZone;
-import org.joda.time.format.DateTimeFormat;
import org.junit.After;
import java.io.IOException;
+import java.time.Instant;
+import java.time.ZoneId;
+import java.time.ZoneOffset;
+import java.time.ZonedDateTime;
+import java.time.temporal.ChronoUnit;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
@@ -80,21 +83,21 @@
@ESIntegTestCase.SuiteScopeTestCase
public class DateHistogramIT extends ESIntegTestCase {
- static Map> expectedMultiSortBuckets;
+ static Map> expectedMultiSortBuckets;
- private DateTime date(int month, int day) {
- return new DateTime(2012, month, day, 0, 0, DateTimeZone.UTC);
+ private ZonedDateTime date(int month, int day) {
+ return ZonedDateTime.of(2012, month, day, 0, 0, 0, 0, ZoneOffset.UTC);
}
- private DateTime date(String date) {
- return DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.parser().parseDateTime(date);
+ private ZonedDateTime date(String date) {
+ return DateFormatters.toZonedDateTime(DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.parse(date));
}
- private static String format(DateTime date, String pattern) {
- return DateTimeFormat.forPattern(pattern).print(date);
+ private static String format(ZonedDateTime date, String pattern) {
+ return DateFormatters.forPattern(pattern).format(date);
}
- private IndexRequestBuilder indexDoc(String idx, DateTime date, int value) throws Exception {
+ private IndexRequestBuilder indexDoc(String idx, ZonedDateTime date, int value) throws Exception {
return client().prepareIndex(idx, "type").setSource(jsonBuilder()
.startObject()
.timeField("date", date)
@@ -139,7 +142,7 @@ public void setupSuiteScopeCluster() throws Exception {
ensureSearchable();
}
- private void addExpectedBucket(DateTime key, long docCount, double avg, double sum) {
+ private void addExpectedBucket(ZonedDateTime key, long docCount, double avg, double sum) {
Map bucketProps = new HashMap<>();
bucketProps.put("_count", docCount);
bucketProps.put("avg_l", avg);
@@ -193,12 +196,12 @@ public void afterEachTest() throws IOException {
internalCluster().wipeIndices("idx2");
}
- private static String getBucketKeyAsString(DateTime key) {
- return getBucketKeyAsString(key, DateTimeZone.UTC);
+ private static String getBucketKeyAsString(ZonedDateTime key) {
+ return getBucketKeyAsString(key, ZoneOffset.UTC);
}
- private static String getBucketKeyAsString(DateTime key, DateTimeZone tz) {
- return Joda.forPattern(DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.format()).printer().withZone(tz).print(key);
+ private static String getBucketKeyAsString(ZonedDateTime key, ZoneId tz) {
+ return DateFormatters.forPattern("strict_date_optional_time").withZone(tz).format(key);
}
public void testSingleValuedField() throws Exception {
@@ -214,33 +217,34 @@ public void testSingleValuedField() throws Exception {
List extends Bucket> buckets = histo.getBuckets();
assertThat(buckets.size(), equalTo(3));
- DateTime key = new DateTime(2012, 1, 1, 0, 0, DateTimeZone.UTC);
+ ZonedDateTime key = ZonedDateTime.of(2012, 1, 1, 0, 0, 0, 0, ZoneOffset.UTC);
Histogram.Bucket bucket = buckets.get(0);
assertThat(bucket, notNullValue());
assertThat(bucket.getKeyAsString(), equalTo(getBucketKeyAsString(key)));
- assertThat(((DateTime) bucket.getKey()), equalTo(key));
+ assertThat(((ZonedDateTime) bucket.getKey()), equalTo(key));
assertThat(bucket.getDocCount(), equalTo(1L));
- key = new DateTime(2012, 2, 1, 0, 0, DateTimeZone.UTC);
+ key = ZonedDateTime.of(2012, 2, 1, 0, 0, 0, 0, ZoneOffset.UTC);
bucket = buckets.get(1);
assertThat(bucket, notNullValue());
assertThat(bucket.getKeyAsString(), equalTo(getBucketKeyAsString(key)));
- assertThat(((DateTime) bucket.getKey()), equalTo(key));
+ assertThat(((ZonedDateTime) bucket.getKey()), equalTo(key));
assertThat(bucket.getDocCount(), equalTo(2L));
- key = new DateTime(2012, 3, 1, 0, 0, DateTimeZone.UTC);
+ key = ZonedDateTime.of(2012, 3, 1, 0, 0, 0, 0, ZoneOffset.UTC);
bucket = buckets.get(2);
assertThat(bucket, notNullValue());
assertThat(bucket.getKeyAsString(), equalTo(getBucketKeyAsString(key)));
- assertThat(((DateTime) bucket.getKey()), equalTo(key));
+ assertThat(((ZonedDateTime) bucket.getKey()), equalTo(key));
assertThat(bucket.getDocCount(), equalTo(3L));
}
public void testSingleValuedFieldWithTimeZone() throws Exception {
SearchResponse response = client().prepareSearch("idx")
- .addAggregation(dateHistogram("histo").field("date").dateHistogramInterval(DateHistogramInterval.DAY).minDocCount(1).timeZone(DateTimeZone.forID("+01:00"))).execute()
+ .addAggregation(dateHistogram("histo").field("date").dateHistogramInterval(DateHistogramInterval.DAY).minDocCount(1)
+ .timeZone(ZoneId.of("+01:00"))).execute()
.actionGet();
- DateTimeZone tz = DateTimeZone.forID("+01:00");
+ ZoneId tz = ZoneId.of("+01:00");
assertSearchResponse(response);
Histogram histo = response.getAggregations().get("histo");
@@ -249,46 +253,46 @@ public void testSingleValuedFieldWithTimeZone() throws Exception {
List extends Bucket> buckets = histo.getBuckets();
assertThat(buckets.size(), equalTo(6));
- DateTime key = new DateTime(2012, 1, 1, 23, 0, DateTimeZone.UTC);
+ ZonedDateTime key = ZonedDateTime.of(2012, 1, 1, 23, 0, 0, 0, ZoneOffset.UTC);
Histogram.Bucket bucket = buckets.get(0);
assertThat(bucket, notNullValue());
assertThat(bucket.getKeyAsString(), equalTo(getBucketKeyAsString(key, tz)));
- assertThat(((DateTime) bucket.getKey()), equalTo(key));
+ assertThat(((ZonedDateTime) bucket.getKey()), equalTo(key));
assertThat(bucket.getDocCount(), equalTo(1L));
- key = new DateTime(2012, 2, 1, 23, 0, DateTimeZone.UTC);
+ key = ZonedDateTime.of(2012, 2, 1, 23, 0, 0, 0, ZoneOffset.UTC);
bucket = buckets.get(1);
assertThat(bucket, notNullValue());
assertThat(bucket.getKeyAsString(), equalTo(getBucketKeyAsString(key, tz)));
- assertThat(((DateTime) bucket.getKey()), equalTo(key));
+ assertThat(((ZonedDateTime) bucket.getKey()), equalTo(key));
assertThat(bucket.getDocCount(), equalTo(1L));
- key = new DateTime(2012, 2, 14, 23, 0, DateTimeZone.UTC);
+ key = ZonedDateTime.of(2012, 2, 14, 23, 0, 0, 0, ZoneOffset.UTC);
bucket = buckets.get(2);
assertThat(bucket, notNullValue());
assertThat(bucket.getKeyAsString(), equalTo(getBucketKeyAsString(key, tz)));
- assertThat(((DateTime) bucket.getKey()), equalTo(key));
+ assertThat(((ZonedDateTime) bucket.getKey()), equalTo(key));
assertThat(bucket.getDocCount(), equalTo(1L));
- key = new DateTime(2012, 3, 1, 23, 0, DateTimeZone.UTC);
+ key = ZonedDateTime.of(2012, 3, 1, 23, 0, 0, 0, ZoneOffset.UTC);
bucket = buckets.get(3);
assertThat(bucket, notNullValue());
assertThat(bucket.getKeyAsString(), equalTo(getBucketKeyAsString(key, tz)));
- assertThat(((DateTime) bucket.getKey()), equalTo(key));
+ assertThat(((ZonedDateTime) bucket.getKey()), equalTo(key));
assertThat(bucket.getDocCount(), equalTo(1L));
- key = new DateTime(2012, 3, 14, 23, 0, DateTimeZone.UTC);
+ key = ZonedDateTime.of(2012, 3, 14, 23, 0, 0, 0, ZoneOffset.UTC);
bucket = buckets.get(4);
assertThat(bucket, notNullValue());
assertThat(bucket.getKeyAsString(), equalTo(getBucketKeyAsString(key, tz)));
- assertThat(((DateTime) bucket.getKey()), equalTo(key));
+ assertThat(((ZonedDateTime) bucket.getKey()), equalTo(key));
assertThat(bucket.getDocCount(), equalTo(1L));
- key = new DateTime(2012, 3, 22, 23, 0, DateTimeZone.UTC);
+ key = ZonedDateTime.of(2012, 3, 22, 23, 0, 0, 0, ZoneOffset.UTC);
bucket = buckets.get(5);
assertThat(bucket, notNullValue());
assertThat(bucket.getKeyAsString(), equalTo(getBucketKeyAsString(key, tz)));
- assertThat(((DateTime) bucket.getKey()), equalTo(key));
+ assertThat(((ZonedDateTime) bucket.getKey()), equalTo(key));
assertThat(bucket.getDocCount(), equalTo(1L));
}
@@ -298,7 +302,7 @@ public void testSingleValued_timeZone_epoch() throws Exception {
if (randomBoolean()) {
format = format + "||date_optional_time";
}
- DateTimeZone tz = DateTimeZone.forID("+01:00");
+ ZoneId tz = ZoneId.of("+01:00");
SearchResponse response = client().prepareSearch("idx")
.addAggregation(dateHistogram("histo").field("date")
.dateHistogramInterval(DateHistogramInterval.DAY).minDocCount(1)
@@ -313,21 +317,21 @@ public void testSingleValued_timeZone_epoch() throws Exception {
List extends Bucket> buckets = histo.getBuckets();
assertThat(buckets.size(), equalTo(6));
- List expectedKeys = new ArrayList<>();
- expectedKeys.add(new DateTime(2012, 1, 1, 23, 0, DateTimeZone.UTC));
- expectedKeys.add(new DateTime(2012, 2, 1, 23, 0, DateTimeZone.UTC));
- expectedKeys.add(new DateTime(2012, 2, 14, 23, 0, DateTimeZone.UTC));
- expectedKeys.add(new DateTime(2012, 3, 1, 23, 0, DateTimeZone.UTC));
- expectedKeys.add(new DateTime(2012, 3, 14, 23, 0, DateTimeZone.UTC));
- expectedKeys.add(new DateTime(2012, 3, 22, 23, 0, DateTimeZone.UTC));
+ List expectedKeys = new ArrayList<>();
+ expectedKeys.add(ZonedDateTime.of(2012, 1, 1, 23, 0, 0, 0, ZoneOffset.UTC));
+ expectedKeys.add(ZonedDateTime.of(2012, 2, 1, 23, 0, 0, 0, ZoneOffset.UTC));
+ expectedKeys.add(ZonedDateTime.of(2012, 2, 14, 23, 0, 0, 0, ZoneOffset.UTC));
+ expectedKeys.add(ZonedDateTime.of(2012, 3, 1, 23, 0, 0, 0, ZoneOffset.UTC));
+ expectedKeys.add(ZonedDateTime.of(2012, 3, 14, 23, 0, 0, 0, ZoneOffset.UTC));
+ expectedKeys.add(ZonedDateTime.of(2012, 3, 22, 23, 0, 0, 0, ZoneOffset.UTC));
- Iterator keyIterator = expectedKeys.iterator();
+ Iterator keyIterator = expectedKeys.iterator();
for (Histogram.Bucket bucket : buckets) {
assertThat(bucket, notNullValue());
- DateTime expectedKey = keyIterator.next();
- assertThat(bucket.getKeyAsString(), equalTo(Long.toString(expectedKey.getMillis() / millisDivider)));
- assertThat(((DateTime) bucket.getKey()), equalTo(expectedKey));
+ ZonedDateTime expectedKey = keyIterator.next();
+ assertThat(bucket.getKeyAsString(), equalTo(Long.toString(expectedKey.toInstant().toEpochMilli() / millisDivider)));
+ assertThat(((ZonedDateTime) bucket.getKey()), equalTo(expectedKey));
assertThat(bucket.getDocCount(), equalTo(1L));
}
}
@@ -350,7 +354,7 @@ public void testSingleValuedFieldOrderedByKeyAsc() throws Exception {
int i = 0;
for (Histogram.Bucket bucket : buckets) {
- assertThat(((DateTime) bucket.getKey()), equalTo(new DateTime(2012, i + 1, 1, 0, 0, DateTimeZone.UTC)));
+ assertThat(((ZonedDateTime) bucket.getKey()), equalTo(ZonedDateTime.of(2012, i + 1, 1, 0, 0, 0, 0, ZoneOffset.UTC)));
i++;
}
}
@@ -372,7 +376,7 @@ public void testSingleValuedFieldOrderedByKeyDesc() throws Exception {
int i = 2;
for (Histogram.Bucket bucket : histo.getBuckets()) {
- assertThat(((DateTime) bucket.getKey()), equalTo(new DateTime(2012, i + 1, 1, 0, 0, DateTimeZone.UTC)));
+ assertThat(((ZonedDateTime) bucket.getKey()), equalTo(ZonedDateTime.of(2012, i + 1, 1, 0, 0, 0, 0, ZoneOffset.UTC)));
i--;
}
}
@@ -394,7 +398,7 @@ public void testSingleValuedFieldOrderedByCountAsc() throws Exception {
int i = 0;
for (Histogram.Bucket bucket : histo.getBuckets()) {
- assertThat(((DateTime) bucket.getKey()), equalTo(new DateTime(2012, i + 1, 1, 0, 0, DateTimeZone.UTC)));
+ assertThat(((ZonedDateTime) bucket.getKey()), equalTo(ZonedDateTime.of(2012, i + 1, 1, 0, 0, 0, 0, ZoneOffset.UTC)));
i++;
}
}
@@ -416,7 +420,7 @@ public void testSingleValuedFieldOrderedByCountDesc() throws Exception {
int i = 2;
for (Histogram.Bucket bucket : histo.getBuckets()) {
- assertThat(((DateTime) bucket.getKey()), equalTo(new DateTime(2012, i + 1, 1, 0, 0, DateTimeZone.UTC)));
+ assertThat(((ZonedDateTime) bucket.getKey()), equalTo(ZonedDateTime.of(2012, i + 1, 1, 0, 0, 0, 0, ZoneOffset.UTC)));
i--;
}
}
@@ -439,42 +443,42 @@ public void testSingleValuedFieldWithSubAggregation() throws Exception {
Object[] propertiesDocCounts = (Object[]) ((InternalAggregation)histo).getProperty("_count");
Object[] propertiesCounts = (Object[]) ((InternalAggregation)histo).getProperty("sum.value");
- DateTime key = new DateTime(2012, 1, 1, 0, 0, DateTimeZone.UTC);
+ ZonedDateTime key = ZonedDateTime.of(2012, 1, 1, 0, 0, 0, 0, ZoneOffset.UTC);
Histogram.Bucket bucket = buckets.get(0);
assertThat(bucket, notNullValue());
assertThat(bucket.getKeyAsString(), equalTo(getBucketKeyAsString(key)));
- assertThat(((DateTime) bucket.getKey()), equalTo(key));
+ assertThat(((ZonedDateTime) bucket.getKey()), equalTo(key));
assertThat(bucket.getDocCount(), equalTo(1L));
Sum sum = bucket.getAggregations().get("sum");
assertThat(sum, notNullValue());
assertThat(sum.getValue(), equalTo(1.0));
- assertThat((DateTime) propertiesKeys[0], equalTo(key));
+ assertThat((ZonedDateTime) propertiesKeys[0], equalTo(key));
assertThat((long) propertiesDocCounts[0], equalTo(1L));
assertThat((double) propertiesCounts[0], equalTo(1.0));
- key = new DateTime(2012, 2, 1, 0, 0, DateTimeZone.UTC);
+ key = ZonedDateTime.of(2012, 2, 1, 0, 0, 0, 0, ZoneOffset.UTC);
bucket = buckets.get(1);
assertThat(bucket, notNullValue());
assertThat(bucket.getKeyAsString(), equalTo(getBucketKeyAsString(key)));
- assertThat(((DateTime) bucket.getKey()), equalTo(key));
+ assertThat(((ZonedDateTime) bucket.getKey()), equalTo(key));
assertThat(bucket.getDocCount(), equalTo(2L));
sum = bucket.getAggregations().get("sum");
assertThat(sum, notNullValue());
assertThat(sum.getValue(), equalTo(5.0));
- assertThat((DateTime) propertiesKeys[1], equalTo(key));
+ assertThat((ZonedDateTime) propertiesKeys[1], equalTo(key));
assertThat((long) propertiesDocCounts[1], equalTo(2L));
assertThat((double) propertiesCounts[1], equalTo(5.0));
- key = new DateTime(2012, 3, 1, 0, 0, DateTimeZone.UTC);
+ key = ZonedDateTime.of(2012, 3, 1, 0, 0, 0, 0, ZoneOffset.UTC);
bucket = buckets.get(2);
assertThat(bucket, notNullValue());
assertThat(bucket.getKeyAsString(), equalTo(getBucketKeyAsString(key)));
- assertThat(((DateTime) bucket.getKey()), equalTo(key));
+ assertThat(((ZonedDateTime) bucket.getKey()), equalTo(key));
assertThat(bucket.getDocCount(), equalTo(3L));
sum = bucket.getAggregations().get("sum");
assertThat(sum, notNullValue());
assertThat(sum.getValue(), equalTo(15.0));
- assertThat((DateTime) propertiesKeys[2], equalTo(key));
+ assertThat((ZonedDateTime) propertiesKeys[2], equalTo(key));
assertThat((long) propertiesDocCounts[2], equalTo(3L));
assertThat((double) propertiesCounts[2], equalTo(15.0));
}
@@ -497,7 +501,7 @@ public void testSingleValuedFieldOrderedBySubAggregationAsc() throws Exception {
int i = 0;
for (Histogram.Bucket bucket : histo.getBuckets()) {
- assertThat(((DateTime) bucket.getKey()), equalTo(new DateTime(2012, i + 1, 1, 0, 0, DateTimeZone.UTC)));
+ assertThat(((ZonedDateTime) bucket.getKey()), equalTo(ZonedDateTime.of(2012, i + 1, 1, 0, 0, 0, 0, ZoneOffset.UTC)));
i++;
}
}
@@ -520,7 +524,7 @@ public void testSingleValuedFieldOrderedBySubAggregationDesc() throws Exception
int i = 2;
for (Histogram.Bucket bucket : histo.getBuckets()) {
- assertThat(((DateTime) bucket.getKey()), equalTo(new DateTime(2012, i + 1, 1, 0, 0, DateTimeZone.UTC)));
+ assertThat(((ZonedDateTime) bucket.getKey()), equalTo(ZonedDateTime.of(2012, i + 1, 1, 0, 0, 0, 0, ZoneOffset.UTC)));
i--;
}
}
@@ -543,7 +547,7 @@ public void testSingleValuedFieldOrderedByMultiValuedSubAggregationDesc() throws
int i = 2;
for (Histogram.Bucket bucket : histo.getBuckets()) {
- assertThat(((DateTime) bucket.getKey()), equalTo(new DateTime(2012, i + 1, 1, 0, 0, DateTimeZone.UTC)));
+ assertThat(((ZonedDateTime) bucket.getKey()), equalTo(ZonedDateTime.of(2012, i + 1, 1, 0, 0, 0, 0, ZoneOffset.UTC)));
i--;
}
}
@@ -620,25 +624,25 @@ public void testSingleValuedFieldWithValueScript() throws Exception {
List extends Bucket> buckets = histo.getBuckets();
assertThat(buckets.size(), equalTo(3));
- DateTime key = new DateTime(2012, 2, 1, 0, 0, DateTimeZone.UTC);
+ ZonedDateTime key = ZonedDateTime.of(2012, 2, 1, 0, 0, 0, 0, ZoneOffset.UTC);
Histogram.Bucket bucket = buckets.get(0);
assertThat(bucket, notNullValue());
assertThat(bucket.getKeyAsString(), equalTo(getBucketKeyAsString(key)));
- assertThat(((DateTime) bucket.getKey()), equalTo(key));
+ assertThat(((ZonedDateTime) bucket.getKey()), equalTo(key));
assertThat(bucket.getDocCount(), equalTo(1L));
- key = new DateTime(2012, 3, 1, 0, 0, DateTimeZone.UTC);
+ key = ZonedDateTime.of(2012, 3, 1, 0, 0, 0, 0, ZoneOffset.UTC);
bucket = buckets.get(1);
assertThat(bucket, notNullValue());
assertThat(bucket.getKeyAsString(), equalTo(getBucketKeyAsString(key)));
- assertThat(((DateTime) bucket.getKey()), equalTo(key));
+ assertThat(((ZonedDateTime) bucket.getKey()), equalTo(key));
assertThat(bucket.getDocCount(), equalTo(2L));
- key = new DateTime(2012, 4, 1, 0, 0, DateTimeZone.UTC);
+ key = ZonedDateTime.of(2012, 4, 1, 0, 0, 0, 0, ZoneOffset.UTC);
bucket = buckets.get(2);
assertThat(bucket, notNullValue());
assertThat(bucket.getKeyAsString(), equalTo(getBucketKeyAsString(key)));
- assertThat(((DateTime) bucket.getKey()), equalTo(key));
+ assertThat(((ZonedDateTime) bucket.getKey()), equalTo(key));
assertThat(bucket.getDocCount(), equalTo(3L));
}
@@ -664,32 +668,32 @@ public void testMultiValuedField() throws Exception {
List extends Bucket> buckets = histo.getBuckets();
assertThat(buckets.size(), equalTo(4));
- DateTime key = new DateTime(2012, 1, 1, 0, 0, DateTimeZone.UTC);
+ ZonedDateTime key = ZonedDateTime.of(2012, 1, 1, 0, 0, 0, 0, ZoneOffset.UTC);
Histogram.Bucket bucket = buckets.get(0);
assertThat(bucket, notNullValue());
assertThat(bucket.getKeyAsString(), equalTo(getBucketKeyAsString(key)));
- assertThat(((DateTime) bucket.getKey()), equalTo(key));
+ assertThat(((ZonedDateTime) bucket.getKey()), equalTo(key));
assertThat(bucket.getDocCount(), equalTo(1L));
- key = new DateTime(2012, 2, 1, 0, 0, DateTimeZone.UTC);
+ key = ZonedDateTime.of(2012, 2, 1, 0, 0, 0, 0, ZoneOffset.UTC);
bucket = buckets.get(1);
assertThat(bucket, notNullValue());
assertThat(bucket.getKeyAsString(), equalTo(getBucketKeyAsString(key)));
- assertThat(((DateTime) bucket.getKey()), equalTo(key));
+ assertThat(((ZonedDateTime) bucket.getKey()), equalTo(key));
assertThat(bucket.getDocCount(), equalTo(3L));
- key = new DateTime(2012, 3, 1, 0, 0, DateTimeZone.UTC);
+ key = ZonedDateTime.of(2012, 3, 1, 0, 0, 0, 0, ZoneOffset.UTC);
bucket = buckets.get(2);
assertThat(bucket, notNullValue());
assertThat(bucket.getKeyAsString(), equalTo(getBucketKeyAsString(key)));
- assertThat(((DateTime) bucket.getKey()), equalTo(key));
+ assertThat(((ZonedDateTime) bucket.getKey()), equalTo(key));
assertThat(bucket.getDocCount(), equalTo(5L));
- key = new DateTime(2012, 4, 1, 0, 0, DateTimeZone.UTC);
+ key = ZonedDateTime.of(2012, 4, 1, 0, 0, 0, 0, ZoneOffset.UTC);
bucket = buckets.get(3);
assertThat(bucket, notNullValue());
assertThat(bucket.getKeyAsString(), equalTo(getBucketKeyAsString(key)));
- assertThat(((DateTime) bucket.getKey()), equalTo(key));
+ assertThat(((ZonedDateTime) bucket.getKey()), equalTo(key));
assertThat(bucket.getDocCount(), equalTo(3L));
}
@@ -758,32 +762,32 @@ public void testMultiValuedFieldWithValueScript() throws Exception {
List extends Bucket> buckets = histo.getBuckets();
assertThat(buckets.size(), equalTo(4));
- DateTime key = new DateTime(2012, 2, 1, 0, 0, DateTimeZone.UTC);
+ ZonedDateTime key = ZonedDateTime.of(2012, 2, 1, 0, 0, 0, 0, ZoneOffset.UTC);
Histogram.Bucket bucket = buckets.get(0);
assertThat(bucket, notNullValue());
assertThat(bucket.getKeyAsString(), equalTo(getBucketKeyAsString(key)));
- assertThat(((DateTime) bucket.getKey()), equalTo(key));
+ assertThat(((ZonedDateTime) bucket.getKey()), equalTo(key));
assertThat(bucket.getDocCount(), equalTo(1L));
- key = new DateTime(2012, 3, 1, 0, 0, DateTimeZone.UTC);
+ key = ZonedDateTime.of(2012, 3, 1, 0, 0, 0, 0, ZoneOffset.UTC);
bucket = buckets.get(1);
assertThat(bucket, notNullValue());
assertThat(bucket.getKeyAsString(), equalTo(getBucketKeyAsString(key)));
- assertThat(((DateTime) bucket.getKey()), equalTo(key));
+ assertThat(((ZonedDateTime) bucket.getKey()), equalTo(key));
assertThat(bucket.getDocCount(), equalTo(3L));
- key = new DateTime(2012, 4, 1, 0, 0, DateTimeZone.UTC);
+ key = ZonedDateTime.of(2012, 4, 1, 0, 0, 0, 0, ZoneOffset.UTC);
bucket = buckets.get(2);
assertThat(bucket, notNullValue());
assertThat(bucket.getKeyAsString(), equalTo(getBucketKeyAsString(key)));
- assertThat(((DateTime) bucket.getKey()), equalTo(key));
+ assertThat(((ZonedDateTime) bucket.getKey()), equalTo(key));
assertThat(bucket.getDocCount(), equalTo(5L));
- key = new DateTime(2012, 5, 1, 0, 0, DateTimeZone.UTC);
+ key = ZonedDateTime.of(2012, 5, 1, 0, 0, 0, 0, ZoneOffset.UTC);
bucket = buckets.get(3);
assertThat(bucket, notNullValue());
assertThat(bucket.getKeyAsString(), equalTo(getBucketKeyAsString(key)));
- assertThat(((DateTime) bucket.getKey()), equalTo(key));
+ assertThat(((ZonedDateTime) bucket.getKey()), equalTo(key));
assertThat(bucket.getDocCount(), equalTo(3L));
}
@@ -812,25 +816,25 @@ public void testScriptSingleValue() throws Exception {
List extends Bucket> buckets = histo.getBuckets();
assertThat(buckets.size(), equalTo(3));
- DateTime key = new DateTime(2012, 1, 1, 0, 0, DateTimeZone.UTC);
+ ZonedDateTime key = ZonedDateTime.of(2012, 1, 1, 0, 0, 0, 0, ZoneOffset.UTC);
Histogram.Bucket bucket = buckets.get(0);
assertThat(bucket, notNullValue());
assertThat(bucket.getKeyAsString(), equalTo(getBucketKeyAsString(key)));
- assertThat(((DateTime) bucket.getKey()), equalTo(key));
+ assertThat(((ZonedDateTime) bucket.getKey()), equalTo(key));
assertThat(bucket.getDocCount(), equalTo(1L));
- key = new DateTime(2012, 2, 1, 0, 0, DateTimeZone.UTC);
+ key = ZonedDateTime.of(2012, 2, 1, 0, 0, 0, 0, ZoneOffset.UTC);
bucket = buckets.get(1);
assertThat(bucket, notNullValue());
assertThat(bucket.getKeyAsString(), equalTo(getBucketKeyAsString(key)));
- assertThat(((DateTime) bucket.getKey()), equalTo(key));
+ assertThat(((ZonedDateTime) bucket.getKey()), equalTo(key));
assertThat(bucket.getDocCount(), equalTo(2L));
- key = new DateTime(2012, 3, 1, 0, 0, DateTimeZone.UTC);
+ key = ZonedDateTime.of(2012, 3, 1, 0, 0, 0, 0, ZoneOffset.UTC);
bucket = buckets.get(2);
assertThat(bucket, notNullValue());
assertThat(bucket.getKeyAsString(), equalTo(getBucketKeyAsString(key)));
- assertThat(((DateTime) bucket.getKey()), equalTo(key));
+ assertThat(((ZonedDateTime) bucket.getKey()), equalTo(key));
assertThat(bucket.getDocCount(), equalTo(3L));
}
@@ -851,32 +855,32 @@ public void testScriptMultiValued() throws Exception {
List extends Bucket> buckets = histo.getBuckets();
assertThat(buckets.size(), equalTo(4));
- DateTime key = new DateTime(2012, 1, 1, 0, 0, DateTimeZone.UTC);
+ ZonedDateTime key = ZonedDateTime.of(2012, 1, 1, 0, 0, 0, 0, ZoneOffset.UTC);
Histogram.Bucket bucket = buckets.get(0);
assertThat(bucket, notNullValue());
assertThat(bucket.getKeyAsString(), equalTo(getBucketKeyAsString(key)));
- assertThat(((DateTime) bucket.getKey()), equalTo(key));
+ assertThat(((ZonedDateTime) bucket.getKey()), equalTo(key));
assertThat(bucket.getDocCount(), equalTo(1L));
- key = new DateTime(2012, 2, 1, 0, 0, DateTimeZone.UTC);
+ key = ZonedDateTime.of(2012, 2, 1, 0, 0, 0, 0, ZoneOffset.UTC);
bucket = buckets.get(1);
assertThat(bucket, notNullValue());
assertThat(bucket.getKeyAsString(), equalTo(getBucketKeyAsString(key)));
- assertThat(((DateTime) bucket.getKey()), equalTo(key));
+ assertThat(((ZonedDateTime) bucket.getKey()), equalTo(key));
assertThat(bucket.getDocCount(), equalTo(3L));
- key = new DateTime(2012, 3, 1, 0, 0, DateTimeZone.UTC);
+ key = ZonedDateTime.of(2012, 3, 1, 0, 0, 0, 0, ZoneOffset.UTC);
bucket = buckets.get(2);
assertThat(bucket, notNullValue());
assertThat(bucket.getKeyAsString(), equalTo(getBucketKeyAsString(key)));
- assertThat(((DateTime) bucket.getKey()), equalTo(key));
+ assertThat(((ZonedDateTime) bucket.getKey()), equalTo(key));
assertThat(bucket.getDocCount(), equalTo(5L));
- key = new DateTime(2012, 4, 1, 0, 0, DateTimeZone.UTC);
+ key = ZonedDateTime.of(2012, 4, 1, 0, 0, 0, 0, ZoneOffset.UTC);
bucket = buckets.get(3);
assertThat(bucket, notNullValue());
assertThat(bucket.getKeyAsString(), equalTo(getBucketKeyAsString(key)));
- assertThat(((DateTime) bucket.getKey()), equalTo(key));
+ assertThat(((ZonedDateTime) bucket.getKey()), equalTo(key));
assertThat(bucket.getDocCount(), equalTo(3L));
}
@@ -917,25 +921,25 @@ public void testPartiallyUnmapped() throws Exception {
List extends Bucket> buckets = histo.getBuckets();
assertThat(buckets.size(), equalTo(3));
- DateTime key = new DateTime(2012, 1, 1, 0, 0, DateTimeZone.UTC);
+ ZonedDateTime key = ZonedDateTime.of(2012, 1, 1, 0, 0, 0, 0, ZoneOffset.UTC);
Histogram.Bucket bucket = buckets.get(0);
assertThat(bucket, notNullValue());
assertThat(bucket.getKeyAsString(), equalTo(getBucketKeyAsString(key)));
- assertThat(((DateTime) bucket.getKey()), equalTo(key));
+ assertThat(((ZonedDateTime) bucket.getKey()), equalTo(key));
assertThat(bucket.getDocCount(), equalTo(1L));
- key = new DateTime(2012, 2, 1, 0, 0, DateTimeZone.UTC);
+ key = ZonedDateTime.of(2012, 2, 1, 0, 0, 0, 0, ZoneOffset.UTC);
bucket = buckets.get(1);
assertThat(bucket, notNullValue());
assertThat(bucket.getKeyAsString(), equalTo(getBucketKeyAsString(key)));
- assertThat(((DateTime) bucket.getKey()), equalTo(key));
+ assertThat(((ZonedDateTime) bucket.getKey()), equalTo(key));
assertThat(bucket.getDocCount(), equalTo(2L));
- key = new DateTime(2012, 3, 1, 0, 0, DateTimeZone.UTC);
+ key = ZonedDateTime.of(2012, 3, 1, 0, 0, 0, 0, ZoneOffset.UTC);
bucket = buckets.get(2);
assertThat(bucket, notNullValue());
assertThat(bucket.getKeyAsString(), equalTo(getBucketKeyAsString(key)));
- assertThat(((DateTime) bucket.getKey()), equalTo(key));
+ assertThat(((ZonedDateTime) bucket.getKey()), equalTo(key));
assertThat(bucket.getDocCount(), equalTo(3L));
}
@@ -966,7 +970,7 @@ public void testEmptyAggregation() throws Exception {
public void testSingleValueWithTimeZone() throws Exception {
prepareCreate("idx2").addMapping("type", "date", "type=date").execute().actionGet();
IndexRequestBuilder[] reqs = new IndexRequestBuilder[5];
- DateTime date = date("2014-03-11T00:00:00+00:00");
+ ZonedDateTime date = date("2014-03-11T00:00:00+00:00");
for (int i = 0; i < reqs.length; i++) {
reqs[i] = client().prepareIndex("idx2", "type", "" + i).setSource(jsonBuilder().startObject().timeField("date", date).endObject());
date = date.plusHours(1);
@@ -977,9 +981,9 @@ public void testSingleValueWithTimeZone() throws Exception {
.setQuery(matchAllQuery())
.addAggregation(dateHistogram("date_histo")
.field("date")
- .timeZone(DateTimeZone.forID("-02:00"))
+ .timeZone(ZoneId.of("-02:00"))
.dateHistogramInterval(DateHistogramInterval.DAY)
- .format("yyyy-MM-dd:HH-mm-ssZZ"))
+ .format("yyyy-MM-dd:HH-mm-ssZZZZZ"))
.execute().actionGet();
assertThat(response.getHits().getTotalHits(), equalTo(5L));
@@ -1004,8 +1008,9 @@ public void testSingleValueFieldWithExtendedBounds() throws Exception {
// we're testing on days, so the base must be rounded to a day
int interval = randomIntBetween(1, 2); // in days
long intervalMillis = interval * 24 * 60 * 60 * 1000;
- DateTime base = new DateTime(DateTimeZone.UTC).dayOfMonth().roundFloorCopy();
- DateTime baseKey = new DateTime(intervalMillis * (base.getMillis() / intervalMillis), DateTimeZone.UTC);
+ ZonedDateTime base = ZonedDateTime.now(ZoneOffset.UTC).withDayOfMonth(1);
+ ZonedDateTime baseKey = Instant.ofEpochMilli(intervalMillis * (base.toInstant().toEpochMilli() / intervalMillis))
+ .atZone(ZoneOffset.UTC);
prepareCreate("idx2")
.setSettings(
@@ -1022,7 +1027,7 @@ public void testSingleValueFieldWithExtendedBounds() throws Exception {
} else {
int docCount = randomIntBetween(1, 3);
for (int j = 0; j < docCount; j++) {
- DateTime date = baseKey.plusDays(i * interval + randomIntBetween(0, interval - 1));
+ ZonedDateTime date = baseKey.plusDays(i * interval + randomIntBetween(0, interval - 1));
builders.add(indexDoc("idx2", date, j));
}
docCounts[i] = docCount;
@@ -1031,19 +1036,19 @@ public void testSingleValueFieldWithExtendedBounds() throws Exception {
indexRandom(true, builders);
ensureSearchable("idx2");
- DateTime lastDataBucketKey = baseKey.plusDays((numOfBuckets - 1) * interval);
+ ZonedDateTime lastDataBucketKey = baseKey.plusDays((numOfBuckets - 1) * interval);
// randomizing the number of buckets on the min bound
// (can sometimes fall within the data range, but more frequently will fall before the data range)
int addedBucketsLeft = randomIntBetween(0, numOfBuckets);
- DateTime boundsMinKey;
+ ZonedDateTime boundsMinKey;
if (frequently()) {
boundsMinKey = baseKey.minusDays(addedBucketsLeft * interval);
} else {
boundsMinKey = baseKey.plusDays(addedBucketsLeft * interval);
addedBucketsLeft = 0;
}
- DateTime boundsMin = boundsMinKey.plusDays(randomIntBetween(0, interval - 1));
+ ZonedDateTime boundsMin = boundsMinKey.plusDays(randomIntBetween(0, interval - 1));
// randomizing the number of buckets on the max bound
// (can sometimes fall within the data range, but more frequently will fall after the data range)
@@ -1053,8 +1058,8 @@ public void testSingleValueFieldWithExtendedBounds() throws Exception {
addedBucketsRight = 0;
boundsMaxKeyDelta = -boundsMaxKeyDelta;
}
- DateTime boundsMaxKey = lastDataBucketKey.plusDays(boundsMaxKeyDelta);
- DateTime boundsMax = boundsMaxKey.plusDays(randomIntBetween(0, interval - 1));
+ ZonedDateTime boundsMaxKey = lastDataBucketKey.plusDays(boundsMaxKeyDelta);
+ ZonedDateTime boundsMax = boundsMaxKey.plusDays(randomIntBetween(0, interval - 1));
// it could be that the random bounds.min we chose ended up greater than
// bounds.max - this should
@@ -1099,11 +1104,11 @@ public void testSingleValueFieldWithExtendedBounds() throws Exception {
List extends Bucket> buckets = histo.getBuckets();
assertThat(buckets.size(), equalTo(bucketsCount));
- DateTime key = baseKey.isBefore(boundsMinKey) ? baseKey : boundsMinKey;
+ ZonedDateTime key = baseKey.isBefore(boundsMinKey) ? baseKey : boundsMinKey;
for (int i = 0; i < bucketsCount; i++) {
Histogram.Bucket bucket = buckets.get(i);
assertThat(bucket, notNullValue());
- assertThat(((DateTime) bucket.getKey()), equalTo(key));
+ assertThat(((ZonedDateTime) bucket.getKey()), equalTo(key));
assertThat(bucket.getKeyAsString(), equalTo(format(key, pattern)));
assertThat(bucket.getDocCount(), equalTo(extendedValueCounts[i]));
key = key.plusDays(interval);
@@ -1120,15 +1125,15 @@ public void testSingleValueFieldWithExtendedBoundsTimezone() throws Exception {
.setSettings(Settings.builder().put(indexSettings()).put("index.number_of_shards", 1).put("index.number_of_replicas", 0))
.execute().actionGet();
- DateMathParser parser = Joda.getStrictStandardDateFormatter().toDateMathParser();
+ DateMathParser parser = DateFormatters.forPattern("yyyy/MM/dd HH:mm:ss||yyyy/MM/dd||epoch_millis").toDateMathParser();
// we pick a random timezone offset of +12/-12 hours and insert two documents
// one at 00:00 in that time zone and one at 12:00
List builders = new ArrayList<>();
int timeZoneHourOffset = randomIntBetween(-12, 12);
- DateTimeZone timezone = DateTimeZone.forOffsetHours(timeZoneHourOffset);
- DateTime timeZoneStartToday = new DateTime(parser.parse("now/d", System::currentTimeMillis, false, timezone), DateTimeZone.UTC);
- DateTime timeZoneNoonToday = new DateTime(parser.parse("now/d+12h", System::currentTimeMillis, false, timezone), DateTimeZone.UTC);
+ ZoneId timezone = ZoneOffset.ofHours(timeZoneHourOffset);
+ ZonedDateTime timeZoneStartToday = parser.parse("now/d", System::currentTimeMillis, false, timezone).atZone(ZoneOffset.UTC);
+ ZonedDateTime timeZoneNoonToday = parser.parse("now/d+12h", System::currentTimeMillis, false, timezone).atZone(ZoneOffset.UTC);
builders.add(indexDoc(index, timeZoneStartToday, 1));
builders.add(indexDoc(index, timeZoneNoonToday, 2));
indexRandom(true, builders);
@@ -1138,7 +1143,7 @@ public void testSingleValueFieldWithExtendedBoundsTimezone() throws Exception {
// retrieve those docs with the same time zone and extended bounds
response = client()
.prepareSearch(index)
- .setQuery(QueryBuilders.rangeQuery("date").from("now/d").to("now/d").includeLower(true).includeUpper(true).timeZone(timezone.getID()))
+ .setQuery(QueryBuilders.rangeQuery("date").from("now/d").to("now/d").includeLower(true).includeUpper(true).timeZone(timezone.getId()))
.addAggregation(
dateHistogram("histo").field("date").dateHistogramInterval(DateHistogramInterval.hours(1)).timeZone(timezone).minDocCount(0)
.extendedBounds(new ExtendedBounds("now/d", "now/d+23h"))
@@ -1156,7 +1161,8 @@ public void testSingleValueFieldWithExtendedBoundsTimezone() throws Exception {
for (int i = 0; i < buckets.size(); i++) {
Histogram.Bucket bucket = buckets.get(i);
assertThat(bucket, notNullValue());
- assertThat("InternalBucket " + i + " had wrong key", (DateTime) bucket.getKey(), equalTo(new DateTime(timeZoneStartToday.getMillis() + (i * 60 * 60 * 1000), DateTimeZone.UTC)));
+ ZonedDateTime zonedDateTime = timeZoneStartToday.plus(i * 60 * 60 * 1000, ChronoUnit.MILLIS);
+ assertThat("InternalBucket " + i + " had wrong key", (ZonedDateTime) bucket.getKey(), equalTo(zonedDateTime));
if (i == 0 || i == 12) {
assertThat(bucket.getDocCount(), equalTo(1L));
} else {
@@ -1177,10 +1183,11 @@ public void testSingleValueFieldWithExtendedBoundsOffset() throws Exception {
.execute().actionGet();
List builders = new ArrayList<>();
- builders.add(indexDoc(index, DateTime.parse("2016-01-03T08:00:00.000Z"), 1));
- builders.add(indexDoc(index, DateTime.parse("2016-01-03T08:00:00.000Z"), 2));
- builders.add(indexDoc(index, DateTime.parse("2016-01-06T08:00:00.000Z"), 3));
- builders.add(indexDoc(index, DateTime.parse("2016-01-06T08:00:00.000Z"), 4));
+ DateFormatter formatter = DateFormatters.forPattern("date_optional_time");
+ builders.add(indexDoc(index, DateFormatters.toZonedDateTime(formatter.parse("2016-01-03T08:00:00.000Z")), 1));
+ builders.add(indexDoc(index, DateFormatters.toZonedDateTime(formatter.parse("2016-01-03T08:00:00.000Z")), 2));
+ builders.add(indexDoc(index, DateFormatters.toZonedDateTime(formatter.parse("2016-01-06T08:00:00.000Z")), 3));
+ builders.add(indexDoc(index, DateFormatters.toZonedDateTime(formatter.parse("2016-01-06T08:00:00.000Z")), 4));
indexRandom(true, builders);
ensureSearchable(index);
@@ -1242,22 +1249,22 @@ public void testSingleValueWithMultipleDateFormatsFromMapping() throws Exception
List extends Histogram.Bucket> buckets = histo.getBuckets();
assertThat(buckets.size(), equalTo(1));
- DateTime key = new DateTime(2014, 3, 10, 0, 0, DateTimeZone.UTC);
+ ZonedDateTime key = ZonedDateTime.of(2014, 3, 10, 0, 0, 0, 0, ZoneOffset.UTC);
Histogram.Bucket bucket = buckets.get(0);
assertThat(bucket, notNullValue());
assertThat(bucket.getKeyAsString(), equalTo(getBucketKeyAsString(key)));
- assertThat(((DateTime) bucket.getKey()), equalTo(key));
+ assertThat(((ZonedDateTime) bucket.getKey()), equalTo(key));
assertThat(bucket.getDocCount(), equalTo(5L));
}
public void testIssue6965() {
SearchResponse response = client().prepareSearch("idx")
- .addAggregation(dateHistogram("histo").field("date").timeZone(DateTimeZone.forID("+01:00")).dateHistogramInterval(DateHistogramInterval.MONTH).minDocCount(0))
+ .addAggregation(dateHistogram("histo").field("date").timeZone(ZoneId.of("+01:00")).dateHistogramInterval(DateHistogramInterval.MONTH).minDocCount(0))
.execute().actionGet();
assertSearchResponse(response);
- DateTimeZone tz = DateTimeZone.forID("+01:00");
+ ZoneId tz = ZoneId.of("+01:00");
Histogram histo = response.getAggregations().get("histo");
assertThat(histo, notNullValue());
@@ -1265,25 +1272,25 @@ public void testIssue6965() {
List extends Bucket> buckets = histo.getBuckets();
assertThat(buckets.size(), equalTo(3));
- DateTime key = new DateTime(2011, 12, 31, 23, 0, DateTimeZone.UTC);
+ ZonedDateTime key = ZonedDateTime.of(2011, 12, 31, 23, 0, 0, 0, ZoneOffset.UTC);
Histogram.Bucket bucket = buckets.get(0);
assertThat(bucket, notNullValue());
assertThat(bucket.getKeyAsString(), equalTo(getBucketKeyAsString(key, tz)));
- assertThat(((DateTime) bucket.getKey()), equalTo(key));
+ assertThat(((ZonedDateTime) bucket.getKey()), equalTo(key));
assertThat(bucket.getDocCount(), equalTo(1L));
- key = new DateTime(2012, 1, 31, 23, 0, DateTimeZone.UTC);
+ key = ZonedDateTime.of(2012, 1, 31, 23, 0, 0, 0, ZoneOffset.UTC);
bucket = buckets.get(1);
assertThat(bucket, notNullValue());
assertThat(bucket.getKeyAsString(), equalTo(getBucketKeyAsString(key, tz)));
- assertThat(((DateTime) bucket.getKey()), equalTo(key));
+ assertThat(((ZonedDateTime) bucket.getKey()), equalTo(key));
assertThat(bucket.getDocCount(), equalTo(2L));
- key = new DateTime(2012, 2, 29, 23, 0, DateTimeZone.UTC);
+ key = ZonedDateTime.of(2012, 2, 29, 23, 0, 0, 0, ZoneOffset.UTC);
bucket = buckets.get(2);
assertThat(bucket, notNullValue());
assertThat(bucket.getKeyAsString(), equalTo(getBucketKeyAsString(key, tz)));
- assertThat(((DateTime) bucket.getKey()), equalTo(key));
+ assertThat(((ZonedDateTime) bucket.getKey()), equalTo(key));
assertThat(bucket.getDocCount(), equalTo(3L));
}
@@ -1293,7 +1300,8 @@ public void testDSTBoundaryIssue9491() throws InterruptedException, ExecutionExc
client().prepareIndex("test9491", "type").setSource("d", "2014-11-08T13:00:00Z"));
ensureSearchable("test9491");
SearchResponse response = client().prepareSearch("test9491")
- .addAggregation(dateHistogram("histo").field("d").dateHistogramInterval(DateHistogramInterval.YEAR).timeZone(DateTimeZone.forID("Asia/Jerusalem")))
+ .addAggregation(dateHistogram("histo").field("d").dateHistogramInterval(DateHistogramInterval.YEAR)
+ .timeZone(ZoneId.of("Asia/Jerusalem")).format("yyyy-MM-dd'T'HH:mm:ss.SSSXXXXX"))
.execute().actionGet();
assertSearchResponse(response);
Histogram histo = response.getAggregations().get("histo");
@@ -1310,8 +1318,9 @@ public void testIssue8209() throws InterruptedException, ExecutionException {
client().prepareIndex("test8209", "type").setSource("d", "2014-04-30T00:00:00Z"));
ensureSearchable("test8209");
SearchResponse response = client().prepareSearch("test8209")
- .addAggregation(dateHistogram("histo").field("d").dateHistogramInterval(DateHistogramInterval.MONTH).timeZone(DateTimeZone.forID("CET"))
- .minDocCount(0))
+ .addAggregation(dateHistogram("histo").field("d").dateHistogramInterval(DateHistogramInterval.MONTH)
+ .format("yyyy-MM-dd'T'HH:mm:ss.SSSXXXXX")
+ .timeZone(ZoneId.of("CET")).minDocCount(0))
.execute().actionGet();
assertSearchResponse(response);
Histogram histo = response.getAggregations().get("histo");
@@ -1355,7 +1364,7 @@ public void testFormatIndexUnmapped() throws InterruptedException, ExecutionExce
SearchResponse response = client().prepareSearch(indexDateUnmapped)
.addAggregation(
- dateHistogram("histo").field("dateField").dateHistogramInterval(DateHistogramInterval.MONTH).format("YYYY-MM")
+ dateHistogram("histo").field("dateField").dateHistogramInterval(DateHistogramInterval.MONTH).format("yyyy-MM")
.minDocCount(0).extendedBounds(new ExtendedBounds("2018-01", "2018-01")))
.execute().actionGet();
assertSearchResponse(response);
@@ -1377,7 +1386,7 @@ public void testRewriteTimeZone_EpochMillisFormat() throws InterruptedException,
indexRandom(true, client().prepareIndex(index, "type").setSource("d", "1477954800000"));
ensureSearchable(index);
SearchResponse response = client().prepareSearch(index).addAggregation(dateHistogram("histo").field("d")
- .dateHistogramInterval(DateHistogramInterval.MONTH).timeZone(DateTimeZone.forID("Europe/Berlin"))).execute().actionGet();
+ .dateHistogramInterval(DateHistogramInterval.MONTH).timeZone(ZoneId.of("Europe/Berlin"))).execute().actionGet();
assertSearchResponse(response);
Histogram histo = response.getAggregations().get("histo");
assertThat(histo.getBuckets().size(), equalTo(1));
@@ -1385,7 +1394,7 @@ public void testRewriteTimeZone_EpochMillisFormat() throws InterruptedException,
assertThat(histo.getBuckets().get(0).getDocCount(), equalTo(1L));
response = client().prepareSearch(index).addAggregation(dateHistogram("histo").field("d")
- .dateHistogramInterval(DateHistogramInterval.MONTH).timeZone(DateTimeZone.forID("Europe/Berlin")).format("yyyy-MM-dd"))
+ .dateHistogramInterval(DateHistogramInterval.MONTH).timeZone(ZoneId.of("Europe/Berlin")).format("yyyy-MM-dd"))
.execute().actionGet();
assertSearchResponse(response);
histo = response.getAggregations().get("histo");
@@ -1406,7 +1415,7 @@ public void testRewriteTimeZone_EpochMillisFormat() throws InterruptedException,
public void testDSTEndTransition() throws Exception {
SearchResponse response = client().prepareSearch("idx")
.setQuery(new MatchNoneQueryBuilder())
- .addAggregation(dateHistogram("histo").field("date").timeZone(DateTimeZone.forID("Europe/Oslo"))
+ .addAggregation(dateHistogram("histo").field("date").timeZone(ZoneId.of("Europe/Oslo"))
.dateHistogramInterval(DateHistogramInterval.HOUR).minDocCount(0).extendedBounds(
new ExtendedBounds("2015-10-25T02:00:00.000+02:00", "2015-10-25T04:00:00.000+01:00")))
.execute().actionGet();
@@ -1414,9 +1423,12 @@ public void testDSTEndTransition() throws Exception {
Histogram histo = response.getAggregations().get("histo");
List extends Bucket> buckets = histo.getBuckets();
assertThat(buckets.size(), equalTo(4));
- assertThat(((DateTime) buckets.get(1).getKey()).getMillis() - ((DateTime) buckets.get(0).getKey()).getMillis(), equalTo(3600000L));
- assertThat(((DateTime) buckets.get(2).getKey()).getMillis() - ((DateTime) buckets.get(1).getKey()).getMillis(), equalTo(3600000L));
- assertThat(((DateTime) buckets.get(3).getKey()).getMillis() - ((DateTime) buckets.get(2).getKey()).getMillis(), equalTo(3600000L));
+ assertThat(((ZonedDateTime) buckets.get(1).getKey()).toInstant().toEpochMilli() -
+ ((ZonedDateTime) buckets.get(0).getKey()).toInstant().toEpochMilli(), equalTo(3600000L));
+ assertThat(((ZonedDateTime) buckets.get(2).getKey()).toInstant().toEpochMilli() -
+ ((ZonedDateTime) buckets.get(1).getKey()).toInstant().toEpochMilli(), equalTo(3600000L));
+ assertThat(((ZonedDateTime) buckets.get(3).getKey()).toInstant().toEpochMilli() -
+ ((ZonedDateTime) buckets.get(2).getKey()).toInstant().toEpochMilli(), equalTo(3600000L));
}
/**
@@ -1427,8 +1439,10 @@ public void testDontCacheScripts() throws Exception {
assertAcked(prepareCreate("cache_test_idx").addMapping("type", "d", "type=date")
.setSettings(Settings.builder().put("requests.cache.enable", true).put("number_of_shards", 1).put("number_of_replicas", 1))
.get());
- indexRandom(true, client().prepareIndex("cache_test_idx", "type", "1").setSource("d", date(1, 1)),
- client().prepareIndex("cache_test_idx", "type", "2").setSource("d", date(2, 1)));
+ String date = DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.format(date(1, 1));
+ String date2 = DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.format(date(2, 1));
+ indexRandom(true, client().prepareIndex("cache_test_idx", "type", "1").setSource("d", date),
+ client().prepareIndex("cache_test_idx", "type", "2").setSource("d", date2));
// Make sure we are starting with a clear cache
assertThat(client().admin().indices().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache()
@@ -1498,7 +1512,7 @@ public void testSingleValuedFieldOrderedBySingleValueSubAggregationAscAsCompound
}
private void assertMultiSortResponse(int[] expectedDays, BucketOrder... order) {
- DateTime[] expectedKeys = Arrays.stream(expectedDays).mapToObj(d -> date(1, d)).toArray(DateTime[]::new);
+ ZonedDateTime[] expectedKeys = Arrays.stream(expectedDays).mapToObj(d -> date(1, d)).toArray(ZonedDateTime[]::new);
SearchResponse response = client()
.prepareSearch("sort_idx")
.setTypes("type")
@@ -1528,7 +1542,7 @@ private void assertMultiSortResponse(int[] expectedDays, BucketOrder... order) {
}
}
- private DateTime key(Histogram.Bucket bucket) {
- return (DateTime) bucket.getKey();
+ private ZonedDateTime key(Histogram.Bucket bucket) {
+ return (ZonedDateTime) bucket.getKey();
}
}
diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/DateHistogramOffsetIT.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/DateHistogramOffsetIT.java
index f6ad9b17a4514..74622d13d3cbf 100644
--- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/DateHistogramOffsetIT.java
+++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/DateHistogramOffsetIT.java
@@ -20,16 +20,18 @@
import org.elasticsearch.action.index.IndexRequestBuilder;
import org.elasticsearch.action.search.SearchResponse;
+import org.elasticsearch.common.time.DateFormatter;
+import org.elasticsearch.common.time.DateFormatters;
import org.elasticsearch.index.mapper.DateFieldMapper;
import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramInterval;
import org.elasticsearch.search.aggregations.bucket.histogram.Histogram;
import org.elasticsearch.test.ESIntegTestCase;
-import org.joda.time.DateTime;
-import org.joda.time.DateTimeZone;
import org.junit.After;
import org.junit.Before;
import java.io.IOException;
+import java.time.ZoneOffset;
+import java.time.ZonedDateTime;
import java.util.List;
import java.util.concurrent.ExecutionException;
@@ -49,9 +51,10 @@
public class DateHistogramOffsetIT extends ESIntegTestCase {
private static final String DATE_FORMAT = "yyyy-MM-dd:hh-mm-ss";
+ private static final DateFormatter FORMATTER = DateFormatters.forPattern(DATE_FORMAT);
- private DateTime date(String date) {
- return DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.parser().parseDateTime(date);
+ private ZonedDateTime date(String date) {
+ return DateFormatters.toZonedDateTime(DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.parse(date));
}
@Before
@@ -64,7 +67,7 @@ public void afterEachTest() throws IOException {
internalCluster().wipeIndices("idx2");
}
- private void prepareIndex(DateTime date, int numHours, int stepSizeHours, int idxIdStart) throws IOException, InterruptedException, ExecutionException {
+ private void prepareIndex(ZonedDateTime date, int numHours, int stepSizeHours, int idxIdStart) throws IOException, InterruptedException, ExecutionException {
IndexRequestBuilder[] reqs = new IndexRequestBuilder[numHours];
for (int i = idxIdStart; i < idxIdStart + reqs.length; i++) {
reqs[i - idxIdStart] = client().prepareIndex("idx2", "type", "" + i).setSource(jsonBuilder().startObject().timeField("date", date).endObject());
@@ -91,8 +94,8 @@ public void testSingleValueWithPositiveOffset() throws Exception {
List extends Histogram.Bucket> buckets = histo.getBuckets();
assertThat(buckets.size(), equalTo(2));
- checkBucketFor(buckets.get(0), new DateTime(2014, 3, 10, 2, 0, DateTimeZone.UTC), 2L);
- checkBucketFor(buckets.get(1), new DateTime(2014, 3, 11, 2, 0, DateTimeZone.UTC), 3L);
+ checkBucketFor(buckets.get(0), ZonedDateTime.of(2014, 3, 10, 2, 0, 0, 0, ZoneOffset.UTC), 2L);
+ checkBucketFor(buckets.get(1), ZonedDateTime.of(2014, 3, 11, 2, 0, 0, 0, ZoneOffset.UTC), 3L);
}
public void testSingleValueWithNegativeOffset() throws Exception {
@@ -113,8 +116,8 @@ public void testSingleValueWithNegativeOffset() throws Exception {
List extends Histogram.Bucket> buckets = histo.getBuckets();
assertThat(buckets.size(), equalTo(2));
- checkBucketFor(buckets.get(0), new DateTime(2014, 3, 9, 22, 0, DateTimeZone.UTC), 2L);
- checkBucketFor(buckets.get(1), new DateTime(2014, 3, 10, 22, 0, DateTimeZone.UTC), 3L);
+ checkBucketFor(buckets.get(0), ZonedDateTime.of(2014, 3, 9, 22, 0, 0, 0, ZoneOffset.UTC), 2L);
+ checkBucketFor(buckets.get(1), ZonedDateTime.of(2014, 3, 10, 22, 0, 0, 0, ZoneOffset.UTC), 3L);
}
/**
@@ -140,11 +143,11 @@ public void testSingleValueWithOffsetMinDocCount() throws Exception {
List extends Histogram.Bucket> buckets = histo.getBuckets();
assertThat(buckets.size(), equalTo(5));
- checkBucketFor(buckets.get(0), new DateTime(2014, 3, 10, 6, 0, DateTimeZone.UTC), 6L);
- checkBucketFor(buckets.get(1), new DateTime(2014, 3, 11, 6, 0, DateTimeZone.UTC), 6L);
- checkBucketFor(buckets.get(2), new DateTime(2014, 3, 12, 6, 0, DateTimeZone.UTC), 0L);
- checkBucketFor(buckets.get(3), new DateTime(2014, 3, 13, 6, 0, DateTimeZone.UTC), 6L);
- checkBucketFor(buckets.get(4), new DateTime(2014, 3, 14, 6, 0, DateTimeZone.UTC), 6L);
+ checkBucketFor(buckets.get(0), ZonedDateTime.of(2014, 3, 10, 6, 0, 0, 0, ZoneOffset.UTC), 6L);
+ checkBucketFor(buckets.get(1), ZonedDateTime.of(2014, 3, 11, 6, 0, 0, 0, ZoneOffset.UTC), 6L);
+ checkBucketFor(buckets.get(2), ZonedDateTime.of(2014, 3, 12, 6, 0, 0, 0, ZoneOffset.UTC), 0L);
+ checkBucketFor(buckets.get(3), ZonedDateTime.of(2014, 3, 13, 6, 0, 0, 0, ZoneOffset.UTC), 6L);
+ checkBucketFor(buckets.get(4), ZonedDateTime.of(2014, 3, 14, 6, 0, 0, 0, ZoneOffset.UTC), 6L);
}
/**
@@ -152,10 +155,10 @@ public void testSingleValueWithOffsetMinDocCount() throws Exception {
* @param key the expected key
* @param expectedSize the expected size of the bucket
*/
- private static void checkBucketFor(Histogram.Bucket bucket, DateTime key, long expectedSize) {
+ private static void checkBucketFor(Histogram.Bucket bucket, ZonedDateTime key, long expectedSize) {
assertThat(bucket, notNullValue());
- assertThat(bucket.getKeyAsString(), equalTo(key.toString(DATE_FORMAT)));
- assertThat(((DateTime) bucket.getKey()), equalTo(key));
+ assertThat(bucket.getKeyAsString(), equalTo(FORMATTER.format(key)));
+ assertThat(((ZonedDateTime) bucket.getKey()), equalTo(key));
assertThat(bucket.getDocCount(), equalTo(expectedSize));
}
}
diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/DateRangeIT.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/DateRangeIT.java
index c076fa827d072..dc52cb636268c 100644
--- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/DateRangeIT.java
+++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/DateRangeIT.java
@@ -18,7 +18,6 @@
*/
package org.elasticsearch.search.aggregations.bucket;
-import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.action.index.IndexRequestBuilder;
import org.elasticsearch.action.search.SearchPhaseExecutionException;
import org.elasticsearch.action.search.SearchResponse;
@@ -34,9 +33,10 @@
import org.elasticsearch.search.aggregations.metrics.Sum;
import org.elasticsearch.test.ESIntegTestCase;
import org.hamcrest.Matchers;
-import org.joda.time.DateTime;
-import org.joda.time.DateTimeZone;
+import java.time.ZoneId;
+import java.time.ZoneOffset;
+import java.time.ZonedDateTime;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
@@ -52,6 +52,7 @@
import static org.elasticsearch.search.aggregations.AggregationBuilders.sum;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchResponse;
+import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.instanceOf;
import static org.hamcrest.Matchers.is;
@@ -70,12 +71,12 @@ private static IndexRequestBuilder indexDoc(int month, int day, int value) throw
.endObject());
}
- private static DateTime date(int month, int day) {
- return date(month, day, DateTimeZone.UTC);
+ private static ZonedDateTime date(int month, int day) {
+ return date(month, day, ZoneOffset.UTC);
}
- private static DateTime date(int month, int day, DateTimeZone timezone) {
- return new DateTime(2012, month, day, 0, 0, timezone);
+ private static ZonedDateTime date(int month, int day, ZoneId timezone) {
+ return ZonedDateTime.of(2012, month, day, 0, 0, 0, 0, timezone);
}
private static int numDocs;
@@ -128,7 +129,7 @@ public void testDateMath() throws Exception {
.prepareSearch("idx")
.addAggregation(
rangeBuilder.addUnboundedTo("a long time ago", "now-50y").addRange("recently", "now-50y", "now-1y")
- .addUnboundedFrom("last year", "now-1y").timeZone(DateTimeZone.forID("EST"))).execute().actionGet();
+ .addUnboundedFrom("last year", "now-1y").timeZone(ZoneId.of("Etc/GMT+5"))).execute().actionGet();
assertSearchResponse(response);
@@ -176,8 +177,8 @@ public void testSingleValueField() throws Exception {
Range.Bucket bucket = buckets.get(0);
assertThat(bucket, notNullValue());
assertThat((String) bucket.getKey(), equalTo("*-2012-02-15T00:00:00.000Z"));
- assertThat(((DateTime) bucket.getFrom()), nullValue());
- assertThat(((DateTime) bucket.getTo()), equalTo(date(2, 15)));
+ assertThat(((ZonedDateTime) bucket.getFrom()), nullValue());
+ assertThat(((ZonedDateTime) bucket.getTo()), equalTo(date(2, 15)));
assertThat(bucket.getFromAsString(), nullValue());
assertThat(bucket.getToAsString(), equalTo("2012-02-15T00:00:00.000Z"));
assertThat(bucket.getDocCount(), equalTo(2L));
@@ -185,8 +186,8 @@ public void testSingleValueField() throws Exception {
bucket = buckets.get(1);
assertThat(bucket, notNullValue());
assertThat((String) bucket.getKey(), equalTo("2012-02-15T00:00:00.000Z-2012-03-15T00:00:00.000Z"));
- assertThat(((DateTime) bucket.getFrom()), equalTo(date(2, 15)));
- assertThat(((DateTime) bucket.getTo()), equalTo(date(3, 15)));
+ assertThat(((ZonedDateTime) bucket.getFrom()), equalTo(date(2, 15)));
+ assertThat(((ZonedDateTime) bucket.getTo()), equalTo(date(3, 15)));
assertThat(bucket.getFromAsString(), equalTo("2012-02-15T00:00:00.000Z"));
assertThat(bucket.getToAsString(), equalTo("2012-03-15T00:00:00.000Z"));
assertThat(bucket.getDocCount(), equalTo(2L));
@@ -194,8 +195,8 @@ public void testSingleValueField() throws Exception {
bucket = buckets.get(2);
assertThat(bucket, notNullValue());
assertThat((String) bucket.getKey(), equalTo("2012-03-15T00:00:00.000Z-*"));
- assertThat(((DateTime) bucket.getFrom()), equalTo(date(3, 15)));
- assertThat(((DateTime) bucket.getTo()), nullValue());
+ assertThat(((ZonedDateTime) bucket.getFrom()), equalTo(date(3, 15)));
+ assertThat(((ZonedDateTime) bucket.getTo()), nullValue());
assertThat(bucket.getFromAsString(), equalTo("2012-03-15T00:00:00.000Z"));
assertThat(bucket.getToAsString(), nullValue());
assertThat(bucket.getDocCount(), equalTo(numDocs - 4L));
@@ -222,8 +223,8 @@ public void testSingleValueFieldWithStringDates() throws Exception {
Range.Bucket bucket = buckets.get(0);
assertThat(bucket, notNullValue());
assertThat((String) bucket.getKey(), equalTo("*-2012-02-15T00:00:00.000Z"));
- assertThat(((DateTime) bucket.getFrom()), nullValue());
- assertThat(((DateTime) bucket.getTo()), equalTo(date(2, 15)));
+ assertThat(((ZonedDateTime) bucket.getFrom()), nullValue());
+ assertThat(((ZonedDateTime) bucket.getTo()), equalTo(date(2, 15)));
assertThat(bucket.getFromAsString(), nullValue());
assertThat(bucket.getToAsString(), equalTo("2012-02-15T00:00:00.000Z"));
assertThat(bucket.getDocCount(), equalTo(2L));
@@ -231,8 +232,8 @@ public void testSingleValueFieldWithStringDates() throws Exception {
bucket = buckets.get(1);
assertThat(bucket, notNullValue());
assertThat((String) bucket.getKey(), equalTo("2012-02-15T00:00:00.000Z-2012-03-15T00:00:00.000Z"));
- assertThat(((DateTime) bucket.getFrom()), equalTo(date(2, 15)));
- assertThat(((DateTime) bucket.getTo()), equalTo(date(3, 15)));
+ assertThat(((ZonedDateTime) bucket.getFrom()), equalTo(date(2, 15)));
+ assertThat(((ZonedDateTime) bucket.getTo()), equalTo(date(3, 15)));
assertThat(bucket.getFromAsString(), equalTo("2012-02-15T00:00:00.000Z"));
assertThat(bucket.getToAsString(), equalTo("2012-03-15T00:00:00.000Z"));
assertThat(bucket.getDocCount(), equalTo(2L));
@@ -240,8 +241,8 @@ public void testSingleValueFieldWithStringDates() throws Exception {
bucket = buckets.get(2);
assertThat(bucket, notNullValue());
assertThat((String) bucket.getKey(), equalTo("2012-03-15T00:00:00.000Z-*"));
- assertThat(((DateTime) bucket.getFrom()), equalTo(date(3, 15)));
- assertThat(((DateTime) bucket.getTo()), nullValue());
+ assertThat(((ZonedDateTime) bucket.getFrom()), equalTo(date(3, 15)));
+ assertThat(((ZonedDateTime) bucket.getTo()), nullValue());
assertThat(bucket.getFromAsString(), equalTo("2012-03-15T00:00:00.000Z"));
assertThat(bucket.getToAsString(), nullValue());
assertThat(bucket.getDocCount(), equalTo(numDocs - 4L));
@@ -269,8 +270,8 @@ public void testSingleValueFieldWithStringDatesWithCustomFormat() throws Excepti
Range.Bucket bucket = buckets.get(0);
assertThat(bucket, notNullValue());
assertThat((String) bucket.getKey(), equalTo("*-2012-02-15"));
- assertThat(((DateTime) bucket.getFrom()), nullValue());
- assertThat(((DateTime) bucket.getTo()), equalTo(date(2, 15)));
+ assertThat(((ZonedDateTime) bucket.getFrom()), nullValue());
+ assertThat(((ZonedDateTime) bucket.getTo()), equalTo(date(2, 15)));
assertThat(bucket.getFromAsString(), nullValue());
assertThat(bucket.getToAsString(), equalTo("2012-02-15"));
assertThat(bucket.getDocCount(), equalTo(2L));
@@ -278,8 +279,8 @@ public void testSingleValueFieldWithStringDatesWithCustomFormat() throws Excepti
bucket = buckets.get(1);
assertThat(bucket, notNullValue());
assertThat((String) bucket.getKey(), equalTo("2012-02-15-2012-03-15"));
- assertThat(((DateTime) bucket.getFrom()), equalTo(date(2, 15)));
- assertThat(((DateTime) bucket.getTo()), equalTo(date(3, 15)));
+ assertThat(((ZonedDateTime) bucket.getFrom()), equalTo(date(2, 15)));
+ assertThat(((ZonedDateTime) bucket.getTo()), equalTo(date(3, 15)));
assertThat(bucket.getFromAsString(), equalTo("2012-02-15"));
assertThat(bucket.getToAsString(), equalTo("2012-03-15"));
assertThat(bucket.getDocCount(), equalTo(2L));
@@ -287,19 +288,18 @@ public void testSingleValueFieldWithStringDatesWithCustomFormat() throws Excepti
bucket = buckets.get(2);
assertThat(bucket, notNullValue());
assertThat((String) bucket.getKey(), equalTo("2012-03-15-*"));
- assertThat(((DateTime) bucket.getFrom()), equalTo(date(3, 15)));
- assertThat(((DateTime) bucket.getTo()), nullValue());
+ assertThat(((ZonedDateTime) bucket.getFrom()), equalTo(date(3, 15)));
+ assertThat(((ZonedDateTime) bucket.getTo()), nullValue());
assertThat(bucket.getFromAsString(), equalTo("2012-03-15"));
assertThat(bucket.getToAsString(), nullValue());
assertThat(bucket.getDocCount(), equalTo(numDocs - 4L));
}
public void testSingleValueFieldWithDateMath() throws Exception {
- DateTimeZone timezone = randomDateTimeZone();
- int timeZoneOffset = timezone.getOffset(date(2, 15));
- // if time zone is UTC (or equivalent), time zone suffix is "Z", else something like "+03:00", which we get with the "ZZ" format
- String feb15Suffix = timeZoneOffset == 0 ? "Z" : date(2,15, timezone).toString("ZZ");
- String mar15Suffix = timeZoneOffset == 0 ? "Z" : date(3,15, timezone).toString("ZZ");
+// ZoneId timezone = randomZone();
+ ZoneId timezone = ZoneId.of("Asia/Urumqi");
+ int timeZoneOffset = timezone.getRules().getOffset(date(2, 15).toInstant()).getTotalSeconds();
+ String suffix = timeZoneOffset == 0 ? "Z" : timezone.getId();
long expectedFirstBucketCount = timeZoneOffset < 0 ? 3L : 2L;
SearchResponse response = client().prepareSearch("idx")
@@ -321,29 +321,29 @@ public void testSingleValueFieldWithDateMath() throws Exception {
Range.Bucket bucket = buckets.get(0);
assertThat(bucket, notNullValue());
- assertThat((String) bucket.getKey(), equalTo("*-2012-02-15T00:00:00.000" + feb15Suffix));
- assertThat(((DateTime) bucket.getFrom()), nullValue());
- assertThat(((DateTime) bucket.getTo()), equalTo(date(2, 15, timezone).toDateTime(DateTimeZone.UTC)));
+ assertThat((String) bucket.getKey(), equalTo("*-2012-02-15T00:00:00.000" + suffix));
+ assertThat(((ZonedDateTime) bucket.getFrom()), nullValue());
+ assertThat(((ZonedDateTime) bucket.getTo()), equalTo(date(2, 15, timezone).withZoneSameInstant(ZoneOffset.UTC)));
assertThat(bucket.getFromAsString(), nullValue());
- assertThat(bucket.getToAsString(), equalTo("2012-02-15T00:00:00.000" + feb15Suffix));
+ assertThat(bucket.getToAsString(), equalTo("2012-02-15T00:00:00.000" + suffix));
assertThat(bucket.getDocCount(), equalTo(expectedFirstBucketCount));
bucket = buckets.get(1);
assertThat(bucket, notNullValue());
- assertThat((String) bucket.getKey(), equalTo("2012-02-15T00:00:00.000" + feb15Suffix +
- "-2012-03-15T00:00:00.000" + mar15Suffix));
- assertThat(((DateTime) bucket.getFrom()), equalTo(date(2, 15, timezone).toDateTime(DateTimeZone.UTC)));
- assertThat(((DateTime) bucket.getTo()), equalTo(date(3, 15, timezone).toDateTime(DateTimeZone.UTC)));
- assertThat(bucket.getFromAsString(), equalTo("2012-02-15T00:00:00.000" + feb15Suffix));
- assertThat(bucket.getToAsString(), equalTo("2012-03-15T00:00:00.000" + mar15Suffix));
+ assertThat((String) bucket.getKey(), equalTo("2012-02-15T00:00:00.000" + suffix +
+ "-2012-03-15T00:00:00.000" + suffix));
+ assertThat(((ZonedDateTime) bucket.getFrom()), equalTo(date(2, 15, timezone).withZoneSameInstant(ZoneOffset.UTC)));
+ assertThat(((ZonedDateTime) bucket.getTo()), equalTo(date(3, 15, timezone).withZoneSameInstant(ZoneOffset.UTC)));
+ assertThat(bucket.getFromAsString(), equalTo("2012-02-15T00:00:00.000" + suffix));
+ assertThat(bucket.getToAsString(), equalTo("2012-03-15T00:00:00.000" + suffix));
assertThat(bucket.getDocCount(), equalTo(2L));
bucket = buckets.get(2);
assertThat(bucket, notNullValue());
- assertThat((String) bucket.getKey(), equalTo("2012-03-15T00:00:00.000" + mar15Suffix + "-*"));
- assertThat(((DateTime) bucket.getFrom()), equalTo(date(3, 15, timezone).toDateTime(DateTimeZone.UTC)));
- assertThat(((DateTime) bucket.getTo()), nullValue());
- assertThat(bucket.getFromAsString(), equalTo("2012-03-15T00:00:00.000" + mar15Suffix));
+ assertThat((String) bucket.getKey(), equalTo("2012-03-15T00:00:00.000" + suffix + "-*"));
+ assertThat(((ZonedDateTime) bucket.getFrom()), equalTo(date(3, 15, timezone).withZoneSameInstant(ZoneOffset.UTC)));
+ assertThat(((ZonedDateTime) bucket.getTo()), nullValue());
+ assertThat(bucket.getFromAsString(), equalTo("2012-03-15T00:00:00.000" + suffix));
assertThat(bucket.getToAsString(), nullValue());
assertThat(bucket.getDocCount(), equalTo(numDocs - 2L - expectedFirstBucketCount));
}
@@ -369,8 +369,8 @@ public void testSingleValueFieldWithCustomKey() throws Exception {
Range.Bucket bucket = buckets.get(0);
assertThat(bucket, notNullValue());
assertThat((String) bucket.getKey(), equalTo("r1"));
- assertThat(((DateTime) bucket.getFrom()), nullValue());
- assertThat(((DateTime) bucket.getTo()), equalTo(date(2, 15)));
+ assertThat(((ZonedDateTime) bucket.getFrom()), nullValue());
+ assertThat(((ZonedDateTime) bucket.getTo()), equalTo(date(2, 15)));
assertThat(bucket.getFromAsString(), nullValue());
assertThat(bucket.getToAsString(), equalTo("2012-02-15T00:00:00.000Z"));
assertThat(bucket.getDocCount(), equalTo(2L));
@@ -378,8 +378,8 @@ public void testSingleValueFieldWithCustomKey() throws Exception {
bucket = buckets.get(1);
assertThat(bucket, notNullValue());
assertThat((String) bucket.getKey(), equalTo("r2"));
- assertThat(((DateTime) bucket.getFrom()), equalTo(date(2, 15)));
- assertThat(((DateTime) bucket.getTo()), equalTo(date(3, 15)));
+ assertThat(((ZonedDateTime) bucket.getFrom()), equalTo(date(2, 15)));
+ assertThat(((ZonedDateTime) bucket.getTo()), equalTo(date(3, 15)));
assertThat(bucket.getFromAsString(), equalTo("2012-02-15T00:00:00.000Z"));
assertThat(bucket.getToAsString(), equalTo("2012-03-15T00:00:00.000Z"));
assertThat(bucket.getDocCount(), equalTo(2L));
@@ -387,8 +387,8 @@ public void testSingleValueFieldWithCustomKey() throws Exception {
bucket = buckets.get(2);
assertThat(bucket, notNullValue());
assertThat((String) bucket.getKey(), equalTo("r3"));
- assertThat(((DateTime) bucket.getFrom()), equalTo(date(3, 15)));
- assertThat(((DateTime) bucket.getTo()), nullValue());
+ assertThat(((ZonedDateTime) bucket.getFrom()), equalTo(date(3, 15)));
+ assertThat(((ZonedDateTime) bucket.getTo()), nullValue());
assertThat(bucket.getFromAsString(), equalTo("2012-03-15T00:00:00.000Z"));
assertThat(bucket.getToAsString(), nullValue());
assertThat(bucket.getDocCount(), equalTo(numDocs - 4L));
@@ -429,8 +429,8 @@ public void testSingleValuedFieldWithSubAggregation() throws Exception {
Range.Bucket bucket = buckets.get(0);
assertThat(bucket, notNullValue());
assertThat((String) bucket.getKey(), equalTo("r1"));
- assertThat(((DateTime) bucket.getFrom()), nullValue());
- assertThat(((DateTime) bucket.getTo()), equalTo(date(2, 15)));
+ assertThat(((ZonedDateTime) bucket.getFrom()), nullValue());
+ assertThat(((ZonedDateTime) bucket.getTo()), equalTo(date(2, 15)));
assertThat(bucket.getFromAsString(), nullValue());
assertThat(bucket.getToAsString(), equalTo("2012-02-15T00:00:00.000Z"));
assertThat(bucket.getDocCount(), equalTo(2L));
@@ -444,8 +444,8 @@ public void testSingleValuedFieldWithSubAggregation() throws Exception {
bucket = buckets.get(1);
assertThat(bucket, notNullValue());
assertThat((String) bucket.getKey(), equalTo("r2"));
- assertThat(((DateTime) bucket.getFrom()), equalTo(date(2, 15)));
- assertThat(((DateTime) bucket.getTo()), equalTo(date(3, 15)));
+ assertThat(((ZonedDateTime) bucket.getFrom()), equalTo(date(2, 15)));
+ assertThat(((ZonedDateTime) bucket.getTo()), equalTo(date(3, 15)));
assertThat(bucket.getFromAsString(), equalTo("2012-02-15T00:00:00.000Z"));
assertThat(bucket.getToAsString(), equalTo("2012-03-15T00:00:00.000Z"));
assertThat(bucket.getDocCount(), equalTo(2L));
@@ -459,8 +459,8 @@ public void testSingleValuedFieldWithSubAggregation() throws Exception {
bucket = buckets.get(2);
assertThat(bucket, notNullValue());
assertThat((String) bucket.getKey(), equalTo("r3"));
- assertThat(((DateTime) bucket.getFrom()), equalTo(date(3, 15)));
- assertThat(((DateTime) bucket.getTo()), nullValue());
+ assertThat(((ZonedDateTime) bucket.getFrom()), equalTo(date(3, 15)));
+ assertThat(((ZonedDateTime) bucket.getTo()), nullValue());
assertThat(bucket.getFromAsString(), equalTo("2012-03-15T00:00:00.000Z"));
assertThat(bucket.getToAsString(), nullValue());
assertThat(bucket.getDocCount(), equalTo(numDocs - 4L));
@@ -502,8 +502,8 @@ public void testMultiValuedField() throws Exception {
Range.Bucket bucket = buckets.get(0);
assertThat(bucket, notNullValue());
assertThat((String) bucket.getKey(), equalTo("*-2012-02-15T00:00:00.000Z"));
- assertThat(((DateTime) bucket.getFrom()), nullValue());
- assertThat(((DateTime) bucket.getTo()), equalTo(date(2, 15)));
+ assertThat(((ZonedDateTime) bucket.getFrom()), nullValue());
+ assertThat(((ZonedDateTime) bucket.getTo()), equalTo(date(2, 15)));
assertThat(bucket.getFromAsString(), nullValue());
assertThat(bucket.getToAsString(), equalTo("2012-02-15T00:00:00.000Z"));
assertThat(bucket.getDocCount(), equalTo(2L));
@@ -511,8 +511,8 @@ public void testMultiValuedField() throws Exception {
bucket = buckets.get(1);
assertThat(bucket, notNullValue());
assertThat((String) bucket.getKey(), equalTo("2012-02-15T00:00:00.000Z-2012-03-15T00:00:00.000Z"));
- assertThat(((DateTime) bucket.getFrom()), equalTo(date(2, 15)));
- assertThat(((DateTime) bucket.getTo()), equalTo(date(3, 15)));
+ assertThat(((ZonedDateTime) bucket.getFrom()), equalTo(date(2, 15)));
+ assertThat(((ZonedDateTime) bucket.getTo()), equalTo(date(3, 15)));
assertThat(bucket.getFromAsString(), equalTo("2012-02-15T00:00:00.000Z"));
assertThat(bucket.getToAsString(), equalTo("2012-03-15T00:00:00.000Z"));
assertThat(bucket.getDocCount(), equalTo(3L));
@@ -520,8 +520,8 @@ public void testMultiValuedField() throws Exception {
bucket = buckets.get(2);
assertThat(bucket, notNullValue());
assertThat((String) bucket.getKey(), equalTo("2012-03-15T00:00:00.000Z-*"));
- assertThat(((DateTime) bucket.getFrom()), equalTo(date(3, 15)));
- assertThat(((DateTime) bucket.getTo()), nullValue());
+ assertThat(((ZonedDateTime) bucket.getFrom()), equalTo(date(3, 15)));
+ assertThat(((ZonedDateTime) bucket.getTo()), nullValue());
assertThat(bucket.getFromAsString(), equalTo("2012-03-15T00:00:00.000Z"));
assertThat(bucket.getToAsString(), nullValue());
assertThat(bucket.getDocCount(), equalTo(numDocs - 2L));
@@ -558,8 +558,8 @@ public void testMultiValuedFieldWithValueScript() throws Exception {
Range.Bucket bucket = buckets.get(0);
assertThat(bucket, notNullValue());
assertThat((String) bucket.getKey(), equalTo("*-2012-02-15T00:00:00.000Z"));
- assertThat(((DateTime) bucket.getFrom()), nullValue());
- assertThat(((DateTime) bucket.getTo()), equalTo(date(2, 15)));
+ assertThat(((ZonedDateTime) bucket.getFrom()), nullValue());
+ assertThat(((ZonedDateTime) bucket.getTo()), equalTo(date(2, 15)));
assertThat(bucket.getFromAsString(), nullValue());
assertThat(bucket.getToAsString(), equalTo("2012-02-15T00:00:00.000Z"));
assertThat(bucket.getDocCount(), equalTo(1L));
@@ -567,8 +567,8 @@ public void testMultiValuedFieldWithValueScript() throws Exception {
bucket = buckets.get(1);
assertThat(bucket, notNullValue());
assertThat((String) bucket.getKey(), equalTo("2012-02-15T00:00:00.000Z-2012-03-15T00:00:00.000Z"));
- assertThat(((DateTime) bucket.getFrom()), equalTo(date(2, 15)));
- assertThat(((DateTime) bucket.getTo()), equalTo(date(3, 15)));
+ assertThat(((ZonedDateTime) bucket.getFrom()), equalTo(date(2, 15)));
+ assertThat(((ZonedDateTime) bucket.getTo()), equalTo(date(3, 15)));
assertThat(bucket.getFromAsString(), equalTo("2012-02-15T00:00:00.000Z"));
assertThat(bucket.getToAsString(), equalTo("2012-03-15T00:00:00.000Z"));
assertThat(bucket.getDocCount(), equalTo(2L));
@@ -576,8 +576,8 @@ public void testMultiValuedFieldWithValueScript() throws Exception {
bucket = buckets.get(2);
assertThat(bucket, notNullValue());
assertThat((String) bucket.getKey(), equalTo("2012-03-15T00:00:00.000Z-*"));
- assertThat(((DateTime) bucket.getFrom()), equalTo(date(3, 15)));
- assertThat(((DateTime) bucket.getTo()), nullValue());
+ assertThat(((ZonedDateTime) bucket.getFrom()), equalTo(date(3, 15)));
+ assertThat(((ZonedDateTime) bucket.getTo()), nullValue());
assertThat(bucket.getFromAsString(), equalTo("2012-03-15T00:00:00.000Z"));
assertThat(bucket.getToAsString(), nullValue());
assertThat(bucket.getDocCount(), equalTo(numDocs - 1L));
@@ -617,8 +617,8 @@ public void testScriptSingleValue() throws Exception {
Range.Bucket bucket = buckets.get(0);
assertThat(bucket, notNullValue());
assertThat((String) bucket.getKey(), equalTo("*-2012-02-15T00:00:00.000Z"));
- assertThat(((DateTime) bucket.getFrom()), nullValue());
- assertThat(((DateTime) bucket.getTo()), equalTo(date(2, 15)));
+ assertThat(((ZonedDateTime) bucket.getFrom()), nullValue());
+ assertThat(((ZonedDateTime) bucket.getTo()), equalTo(date(2, 15)));
assertThat(bucket.getFromAsString(), nullValue());
assertThat(bucket.getToAsString(), equalTo("2012-02-15T00:00:00.000Z"));
assertThat(bucket.getDocCount(), equalTo(2L));
@@ -626,8 +626,8 @@ public void testScriptSingleValue() throws Exception {
bucket = buckets.get(1);
assertThat(bucket, notNullValue());
assertThat((String) bucket.getKey(), equalTo("2012-02-15T00:00:00.000Z-2012-03-15T00:00:00.000Z"));
- assertThat(((DateTime) bucket.getFrom()), equalTo(date(2, 15)));
- assertThat(((DateTime) bucket.getTo()), equalTo(date(3, 15)));
+ assertThat(((ZonedDateTime) bucket.getFrom()), equalTo(date(2, 15)));
+ assertThat(((ZonedDateTime) bucket.getTo()), equalTo(date(3, 15)));
assertThat(bucket.getFromAsString(), equalTo("2012-02-15T00:00:00.000Z"));
assertThat(bucket.getToAsString(), equalTo("2012-03-15T00:00:00.000Z"));
assertThat(bucket.getDocCount(), equalTo(2L));
@@ -635,8 +635,8 @@ public void testScriptSingleValue() throws Exception {
bucket = buckets.get(2);
assertThat(bucket, notNullValue());
assertThat((String) bucket.getKey(), equalTo("2012-03-15T00:00:00.000Z-*"));
- assertThat(((DateTime) bucket.getFrom()), equalTo(date(3, 15)));
- assertThat(((DateTime) bucket.getTo()), nullValue());
+ assertThat(((ZonedDateTime) bucket.getFrom()), equalTo(date(3, 15)));
+ assertThat(((ZonedDateTime) bucket.getTo()), nullValue());
assertThat(bucket.getFromAsString(), equalTo("2012-03-15T00:00:00.000Z"));
assertThat(bucket.getToAsString(), nullValue());
assertThat(bucket.getDocCount(), equalTo(numDocs - 4L));
@@ -676,8 +676,8 @@ public void testScriptMultiValued() throws Exception {
Range.Bucket bucket = buckets.get(0);
assertThat(bucket, notNullValue());
assertThat((String) bucket.getKey(), equalTo("*-2012-02-15T00:00:00.000Z"));
- assertThat(((DateTime) bucket.getFrom()), nullValue());
- assertThat(((DateTime) bucket.getTo()), equalTo(date(2, 15)));
+ assertThat(((ZonedDateTime) bucket.getFrom()), nullValue());
+ assertThat(((ZonedDateTime) bucket.getTo()), equalTo(date(2, 15)));
assertThat(bucket.getFromAsString(), nullValue());
assertThat(bucket.getToAsString(), equalTo("2012-02-15T00:00:00.000Z"));
assertThat(bucket.getDocCount(), equalTo(2L));
@@ -685,8 +685,8 @@ public void testScriptMultiValued() throws Exception {
bucket = buckets.get(1);
assertThat(bucket, notNullValue());
assertThat((String) bucket.getKey(), equalTo("2012-02-15T00:00:00.000Z-2012-03-15T00:00:00.000Z"));
- assertThat(((DateTime) bucket.getFrom()), equalTo(date(2, 15)));
- assertThat(((DateTime) bucket.getTo()), equalTo(date(3, 15)));
+ assertThat(((ZonedDateTime) bucket.getFrom()), equalTo(date(2, 15)));
+ assertThat(((ZonedDateTime) bucket.getTo()), equalTo(date(3, 15)));
assertThat(bucket.getFromAsString(), equalTo("2012-02-15T00:00:00.000Z"));
assertThat(bucket.getToAsString(), equalTo("2012-03-15T00:00:00.000Z"));
assertThat(bucket.getDocCount(), equalTo(3L));
@@ -694,8 +694,8 @@ public void testScriptMultiValued() throws Exception {
bucket = buckets.get(2);
assertThat(bucket, notNullValue());
assertThat((String) bucket.getKey(), equalTo("2012-03-15T00:00:00.000Z-*"));
- assertThat(((DateTime) bucket.getFrom()), equalTo(date(3, 15)));
- assertThat(((DateTime) bucket.getTo()), nullValue());
+ assertThat(((ZonedDateTime) bucket.getFrom()), equalTo(date(3, 15)));
+ assertThat(((ZonedDateTime) bucket.getTo()), nullValue());
assertThat(bucket.getFromAsString(), equalTo("2012-03-15T00:00:00.000Z"));
assertThat(bucket.getToAsString(), nullValue());
assertThat(bucket.getDocCount(), equalTo(numDocs - 2L));
@@ -724,8 +724,8 @@ public void testUnmapped() throws Exception {
Range.Bucket bucket = buckets.get(0);
assertThat(bucket, notNullValue());
assertThat((String) bucket.getKey(), equalTo("*-2012-02-15T00:00:00.000Z"));
- assertThat(((DateTime) bucket.getFrom()), nullValue());
- assertThat(((DateTime) bucket.getTo()), equalTo(date(2, 15)));
+ assertThat(((ZonedDateTime) bucket.getFrom()), nullValue());
+ assertThat(((ZonedDateTime) bucket.getTo()), equalTo(date(2, 15)));
assertThat(bucket.getFromAsString(), nullValue());
assertThat(bucket.getToAsString(), equalTo("2012-02-15T00:00:00.000Z"));
assertThat(bucket.getDocCount(), equalTo(0L));
@@ -733,8 +733,8 @@ public void testUnmapped() throws Exception {
bucket = buckets.get(1);
assertThat(bucket, notNullValue());
assertThat((String) bucket.getKey(), equalTo("2012-02-15T00:00:00.000Z-2012-03-15T00:00:00.000Z"));
- assertThat(((DateTime) bucket.getFrom()), equalTo(date(2, 15)));
- assertThat(((DateTime) bucket.getTo()), equalTo(date(3, 15)));
+ assertThat(((ZonedDateTime) bucket.getFrom()), equalTo(date(2, 15)));
+ assertThat(((ZonedDateTime) bucket.getTo()), equalTo(date(3, 15)));
assertThat(bucket.getFromAsString(), equalTo("2012-02-15T00:00:00.000Z"));
assertThat(bucket.getToAsString(), equalTo("2012-03-15T00:00:00.000Z"));
assertThat(bucket.getDocCount(), equalTo(0L));
@@ -742,8 +742,8 @@ public void testUnmapped() throws Exception {
bucket = buckets.get(2);
assertThat(bucket, notNullValue());
assertThat((String) bucket.getKey(), equalTo("2012-03-15T00:00:00.000Z-*"));
- assertThat(((DateTime) bucket.getFrom()), equalTo(date(3, 15)));
- assertThat(((DateTime) bucket.getTo()), nullValue());
+ assertThat(((ZonedDateTime) bucket.getFrom()), equalTo(date(3, 15)));
+ assertThat(((ZonedDateTime) bucket.getTo()), nullValue());
assertThat(bucket.getFromAsString(), equalTo("2012-03-15T00:00:00.000Z"));
assertThat(bucket.getToAsString(), nullValue());
assertThat(bucket.getDocCount(), equalTo(0L));
@@ -770,8 +770,8 @@ public void testUnmappedWithStringDates() throws Exception {
Range.Bucket bucket = buckets.get(0);
assertThat(bucket, notNullValue());
assertThat((String) bucket.getKey(), equalTo("*-2012-02-15T00:00:00.000Z"));
- assertThat(((DateTime) bucket.getFrom()), nullValue());
- assertThat(((DateTime) bucket.getTo()), equalTo(date(2, 15)));
+ assertThat(((ZonedDateTime) bucket.getFrom()), nullValue());
+ assertThat(((ZonedDateTime) bucket.getTo()), equalTo(date(2, 15)));
assertThat(bucket.getFromAsString(), nullValue());
assertThat(bucket.getToAsString(), equalTo("2012-02-15T00:00:00.000Z"));
assertThat(bucket.getDocCount(), equalTo(0L));
@@ -779,8 +779,8 @@ public void testUnmappedWithStringDates() throws Exception {
bucket = buckets.get(1);
assertThat(bucket, notNullValue());
assertThat((String) bucket.getKey(), equalTo("2012-02-15T00:00:00.000Z-2012-03-15T00:00:00.000Z"));
- assertThat(((DateTime) bucket.getFrom()), equalTo(date(2, 15)));
- assertThat(((DateTime) bucket.getTo()), equalTo(date(3, 15)));
+ assertThat(((ZonedDateTime) bucket.getFrom()), equalTo(date(2, 15)));
+ assertThat(((ZonedDateTime) bucket.getTo()), equalTo(date(3, 15)));
assertThat(bucket.getFromAsString(), equalTo("2012-02-15T00:00:00.000Z"));
assertThat(bucket.getToAsString(), equalTo("2012-03-15T00:00:00.000Z"));
assertThat(bucket.getDocCount(), equalTo(0L));
@@ -788,8 +788,8 @@ public void testUnmappedWithStringDates() throws Exception {
bucket = buckets.get(2);
assertThat(bucket, notNullValue());
assertThat((String) bucket.getKey(), equalTo("2012-03-15T00:00:00.000Z-*"));
- assertThat(((DateTime) bucket.getFrom()), equalTo(date(3, 15)));
- assertThat(((DateTime) bucket.getTo()), nullValue());
+ assertThat(((ZonedDateTime) bucket.getFrom()), equalTo(date(3, 15)));
+ assertThat(((ZonedDateTime) bucket.getTo()), nullValue());
assertThat(bucket.getFromAsString(), equalTo("2012-03-15T00:00:00.000Z"));
assertThat(bucket.getToAsString(), nullValue());
assertThat(bucket.getDocCount(), equalTo(0L));
@@ -816,8 +816,8 @@ public void testPartiallyUnmapped() throws Exception {
Range.Bucket bucket = buckets.get(0);
assertThat(bucket, notNullValue());
assertThat((String) bucket.getKey(), equalTo("*-2012-02-15T00:00:00.000Z"));
- assertThat(((DateTime) bucket.getFrom()), nullValue());
- assertThat(((DateTime) bucket.getTo()), equalTo(date(2, 15)));
+ assertThat(((ZonedDateTime) bucket.getFrom()), nullValue());
+ assertThat(((ZonedDateTime) bucket.getTo()), equalTo(date(2, 15)));
assertThat(bucket.getFromAsString(), nullValue());
assertThat(bucket.getToAsString(), equalTo("2012-02-15T00:00:00.000Z"));
assertThat(bucket.getDocCount(), equalTo(2L));
@@ -825,8 +825,8 @@ public void testPartiallyUnmapped() throws Exception {
bucket = buckets.get(1);
assertThat(bucket, notNullValue());
assertThat((String) bucket.getKey(), equalTo("2012-02-15T00:00:00.000Z-2012-03-15T00:00:00.000Z"));
- assertThat(((DateTime) bucket.getFrom()), equalTo(date(2, 15)));
- assertThat(((DateTime) bucket.getTo()), equalTo(date(3, 15)));
+ assertThat(((ZonedDateTime) bucket.getFrom()), equalTo(date(2, 15)));
+ assertThat(((ZonedDateTime) bucket.getTo()), equalTo(date(3, 15)));
assertThat(bucket.getFromAsString(), equalTo("2012-02-15T00:00:00.000Z"));
assertThat(bucket.getToAsString(), equalTo("2012-03-15T00:00:00.000Z"));
assertThat(bucket.getDocCount(), equalTo(2L));
@@ -834,8 +834,8 @@ public void testPartiallyUnmapped() throws Exception {
bucket = buckets.get(2);
assertThat(bucket, notNullValue());
assertThat((String) bucket.getKey(), equalTo("2012-03-15T00:00:00.000Z-*"));
- assertThat(((DateTime) bucket.getFrom()), equalTo(date(3, 15)));
- assertThat(((DateTime) bucket.getTo()), nullValue());
+ assertThat(((ZonedDateTime) bucket.getFrom()), equalTo(date(3, 15)));
+ assertThat(((ZonedDateTime) bucket.getTo()), nullValue());
assertThat(bucket.getFromAsString(), equalTo("2012-03-15T00:00:00.000Z"));
assertThat(bucket.getToAsString(), nullValue());
assertThat(bucket.getDocCount(), equalTo(numDocs - 4L));
@@ -860,8 +860,8 @@ public void testEmptyAggregation() throws Exception {
assertThat(dateRange.getName(), equalTo("date_range"));
assertThat(buckets.size(), is(1));
assertThat((String) buckets.get(0).getKey(), equalTo("0-1"));
- assertThat(((DateTime) buckets.get(0).getFrom()).getMillis(), equalTo(0L));
- assertThat(((DateTime) buckets.get(0).getTo()).getMillis(), equalTo(1L));
+ assertThat(((ZonedDateTime) buckets.get(0).getFrom()).toInstant().toEpochMilli(), equalTo(0L));
+ assertThat(((ZonedDateTime) buckets.get(0).getTo()).toInstant().toEpochMilli(), equalTo(1L));
assertThat(buckets.get(0).getDocCount(), equalTo(0L));
assertThat(buckets.get(0).getAggregations().asList().isEmpty(), is(true));
}
@@ -904,7 +904,8 @@ public void testDontCacheScripts() throws Exception {
params.put("fieldname", "date");
SearchResponse r = client().prepareSearch("cache_test_idx").setSize(0).addAggregation(dateRange("foo").field("date")
.script(new Script(ScriptType.INLINE, "mockscript", DateScriptMocksPlugin.DOUBLE_PLUS_ONE_MONTH, params))
- .addRange(new DateTime(2012, 1, 1, 0, 0, 0, 0, DateTimeZone.UTC), new DateTime(2013, 1, 1, 0, 0, 0, 0, DateTimeZone.UTC)))
+ .addRange(ZonedDateTime.of(2012, 1, 1, 0, 0, 0, 0, ZoneOffset.UTC),
+ ZonedDateTime.of(2013, 1, 1, 0, 0, 0, 0, ZoneOffset.UTC)))
.get();
assertSearchResponse(r);
@@ -916,7 +917,8 @@ public void testDontCacheScripts() throws Exception {
// To make sure that the cache is working test that a request not using
// a script is cached
r = client().prepareSearch("cache_test_idx").setSize(0).addAggregation(dateRange("foo").field("date")
- .addRange(new DateTime(2012, 1, 1, 0, 0, 0, 0, DateTimeZone.UTC), new DateTime(2013, 1, 1, 0, 0, 0, 0, DateTimeZone.UTC)))
+ .addRange(ZonedDateTime.of(2012, 1, 1, 0, 0, 0, 0, ZoneOffset.UTC),
+ ZonedDateTime.of(2013, 1, 1, 0, 0, 0, 0, ZoneOffset.UTC)))
.get();
assertSearchResponse(r);
@@ -973,8 +975,8 @@ public void testRangeWithFormatStringValue() throws Exception {
Exception e = expectThrows(Exception.class, () -> client().prepareSearch(indexName).setSize(0)
.addAggregation(dateRange("date_range").field("date").addRange(1000000, 3000000).addRange(3000000, 4000000)).get());
Throwable cause = e.getCause();
- assertThat(cause, instanceOf(ElasticsearchParseException.class));
- assertEquals("failed to parse date field [1000000] with format [strict_hour_minute_second]", cause.getMessage());
+ assertThat(cause.getMessage(),
+ containsString("could not parse input [1000000] with date formatter [strict_hour_minute_second]"));
}
/**
@@ -985,9 +987,9 @@ public void testRangeWithFormatNumericValue() throws Exception {
String indexName = "dateformat_numeric_test_idx";
assertAcked(prepareCreate(indexName).addMapping("type", "date", "type=date,format=epoch_second"));
indexRandom(true,
- client().prepareIndex(indexName, "type", "1").setSource(jsonBuilder().startObject().field("date", 1000).endObject()),
+ client().prepareIndex(indexName, "type", "1").setSource(jsonBuilder().startObject().field("date", 1002).endObject()),
client().prepareIndex(indexName, "type", "2").setSource(jsonBuilder().startObject().field("date", 2000).endObject()),
- client().prepareIndex(indexName, "type", "3").setSource(jsonBuilder().startObject().field("date", 3000).endObject()));
+ client().prepareIndex(indexName, "type", "3").setSource(jsonBuilder().startObject().field("date", 3008).endObject()));
// using no format should work when to/from is compatible with format in
// mapping
@@ -1026,8 +1028,8 @@ public void testRangeWithFormatNumericValue() throws Exception {
.addAggregation(dateRange("date_range").field("date").addRange("1000.123", "3000.8").addRange("3000.8", "4000.3")).get();
assertThat(searchResponse.getHits().getTotalHits(), equalTo(3L));
buckets = checkBuckets(searchResponse.getAggregations().get("date_range"), "date_range", 2);
- assertBucket(buckets.get(0), 2L, "1000-3000", 1000000L, 3000000L);
- assertBucket(buckets.get(1), 1L, "3000-4000", 3000000L, 4000000L);
+ assertBucket(buckets.get(0), 2L, "1000.123-3000.8", 1000123L, 3000800L);
+ assertBucket(buckets.get(1), 1L, "3000.8-4000.3", 3000800L, 4000300L);
// using different format should work when to/from is compatible with
// format in aggregation
@@ -1062,8 +1064,8 @@ private static List checkBuckets(Range dateRange, String expectedA
private static void assertBucket(Bucket bucket, long bucketSize, String expectedKey, long expectedFrom, long expectedTo) {
assertThat(bucket.getDocCount(), equalTo(bucketSize));
assertThat((String) bucket.getKey(), equalTo(expectedKey));
- assertThat(((DateTime) bucket.getFrom()).getMillis(), equalTo(expectedFrom));
- assertThat(((DateTime) bucket.getTo()).getMillis(), equalTo(expectedTo));
+ assertThat(((ZonedDateTime) bucket.getFrom()).toInstant().toEpochMilli(), equalTo(expectedFrom));
+ assertThat(((ZonedDateTime) bucket.getTo()).toInstant().toEpochMilli(), equalTo(expectedTo));
assertThat(bucket.getAggregations().asList().isEmpty(), is(true));
}
}
diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/DateRangeTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/DateRangeTests.java
index 1c198fd3ca5d6..34164bc28967c 100644
--- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/DateRangeTests.java
+++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/DateRangeTests.java
@@ -65,7 +65,7 @@ protected DateRangeAggregationBuilder createTestAggregatorBuilder() {
factory.missing(randomIntBetween(0, 10));
}
if (randomBoolean()) {
- factory.timeZone(randomDateTimeZone());
+ factory.timeZone(randomZone());
}
return factory;
}
diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeAggregationBuilderTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeAggregationBuilderTests.java
index ac985660399d7..d31f7a89b462e 100644
--- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeAggregationBuilderTests.java
+++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeAggregationBuilderTests.java
@@ -42,7 +42,7 @@ private DateHistogramValuesSourceBuilder randomDateHistogramSourceBuilder() {
histo.interval(randomNonNegativeLong());
}
if (randomBoolean()) {
- histo.timeZone(randomDateTimeZone());
+ histo.timeZone(randomZone());
}
if (randomBoolean()) {
histo.missingBucket(true);
diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeAggregatorTests.java
index f2cb91673ce10..f04e0a1326280 100644
--- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeAggregatorTests.java
+++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeAggregatorTests.java
@@ -39,6 +39,7 @@
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.NumericUtils;
import org.elasticsearch.ElasticsearchParseException;
+import org.elasticsearch.common.time.DateFormatters;
import org.elasticsearch.index.mapper.ContentPath;
import org.elasticsearch.index.mapper.DateFieldMapper;
import org.elasticsearch.index.mapper.IpFieldMapper;
@@ -57,12 +58,12 @@
import org.elasticsearch.search.aggregations.metrics.TopHitsAggregationBuilder;
import org.elasticsearch.search.aggregations.support.ValueType;
import org.elasticsearch.search.sort.SortOrder;
-import org.joda.time.DateTimeZone;
import org.junit.After;
import org.junit.Before;
import java.io.IOException;
import java.net.InetAddress;
+import java.time.ZoneOffset;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
@@ -80,6 +81,7 @@
import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.instanceOf;
+import static org.hamcrest.Matchers.is;
public class CompositeAggregatorTests extends AggregatorTestCase {
private static MappedFieldType[] FIELD_TYPES;
@@ -1155,8 +1157,7 @@ public void testThatDateHistogramFailsFormatAfter() throws IOException {
},
(result) -> {}
));
- assertThat(exc.getCause(), instanceOf(IllegalArgumentException.class));
- assertThat(exc.getCause().getMessage(), containsString("Parse failure"));
+ assertThat(exc.getMessage(), is("could not parse input [1474329600000] with date formatter [yyyy-MM-dd]"));
}
public void testWithDateHistogramAndTimeZone() throws IOException {
@@ -1176,7 +1177,7 @@ public void testWithDateHistogramAndTimeZone() throws IOException {
DateHistogramValuesSourceBuilder histo = new DateHistogramValuesSourceBuilder("date")
.field("date")
.dateHistogramInterval(DateHistogramInterval.days(1))
- .timeZone(DateTimeZone.forOffsetHours(1));
+ .timeZone(ZoneOffset.ofHours(1));
return new CompositeAggregationBuilder("name", Collections.singletonList(histo));
},
(result) -> {
@@ -1196,7 +1197,7 @@ public void testWithDateHistogramAndTimeZone() throws IOException {
DateHistogramValuesSourceBuilder histo = new DateHistogramValuesSourceBuilder("date")
.field("date")
.dateHistogramInterval(DateHistogramInterval.days(1))
- .timeZone(DateTimeZone.forOffsetHours(1));
+ .timeZone(ZoneOffset.ofHours(1));
return new CompositeAggregationBuilder("name", Collections.singletonList(histo))
.aggregateAfter(createAfterKey("date", 1474326000000L));
@@ -1835,6 +1836,6 @@ private static Map> createDocument(Object... fields) {
}
private static long asLong(String dateTime) {
- return DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.parser().parseDateTime(dateTime).getMillis();
+ return DateFormatters.toZonedDateTime(DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.parse(dateTime)).toInstant().toEpochMilli();
}
}
diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/composite/InternalCompositeTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/composite/InternalCompositeTests.java
index 022f5e6abc13c..3d831d78bc387 100644
--- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/composite/InternalCompositeTests.java
+++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/composite/InternalCompositeTests.java
@@ -21,7 +21,7 @@
import org.apache.lucene.util.BytesRef;
import org.elasticsearch.common.io.stream.Writeable;
-import org.elasticsearch.common.joda.Joda;
+import org.elasticsearch.common.time.DateFormatters;
import org.elasticsearch.common.util.BigArrays;
import org.elasticsearch.search.DocValueFormat;
import org.elasticsearch.search.aggregations.InternalAggregation;
@@ -29,10 +29,10 @@
import org.elasticsearch.search.aggregations.ParsedAggregation;
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
import org.elasticsearch.test.InternalMultiBucketAggregationTestCase;
-import org.joda.time.DateTimeZone;
import org.junit.After;
import java.io.IOException;
+import java.time.ZoneOffset;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
@@ -58,7 +58,7 @@ private static DocValueFormat randomDocValueFormat(boolean isLong) {
if (isLong) {
// we use specific format only for date histogram on a long/date field
if (randomBoolean()) {
- return new DocValueFormat.DateTime(Joda.forPattern("epoch_second"), DateTimeZone.forOffsetHours(1));
+ return new DocValueFormat.DateTime(DateFormatters.forPattern("epoch_second"), ZoneOffset.ofHours(1));
} else {
return DocValueFormat.RAW;
}
diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/AutoDateHistogramAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/AutoDateHistogramAggregatorTests.java
index 2d5109405dc1c..4cfdd107ee807 100644
--- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/AutoDateHistogramAggregatorTests.java
+++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/AutoDateHistogramAggregatorTests.java
@@ -33,6 +33,7 @@
import org.elasticsearch.Version;
import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.common.settings.Settings;
+import org.elasticsearch.common.time.DateFormatters;
import org.elasticsearch.index.IndexSettings;
import org.elasticsearch.index.mapper.DateFieldMapper;
import org.elasticsearch.search.aggregations.AggregationBuilders;
@@ -40,11 +41,13 @@
import org.elasticsearch.search.aggregations.MultiBucketConsumerService;
import org.elasticsearch.search.aggregations.metrics.Stats;
import org.hamcrest.Matchers;
-import org.joda.time.DateTime;
-import org.joda.time.DateTimeZone;
import org.junit.Assert;
import java.io.IOException;
+import java.time.LocalDate;
+import java.time.YearMonth;
+import java.time.ZoneOffset;
+import java.time.ZonedDateTime;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
@@ -58,17 +61,17 @@ public class AutoDateHistogramAggregatorTests extends AggregatorTestCase {
private static final String DATE_FIELD = "date";
private static final String INSTANT_FIELD = "instant";
- private static final List DATES_WITH_TIME = Arrays.asList(
- new DateTime(2010, 3, 12, 1, 7, 45, DateTimeZone.UTC),
- new DateTime(2010, 4, 27, 3, 43, 34, DateTimeZone.UTC),
- new DateTime(2012, 5, 18, 4, 11, 0, DateTimeZone.UTC),
- new DateTime(2013, 5, 29, 5, 11, 31, DateTimeZone.UTC),
- new DateTime(2013, 10, 31, 8, 24, 5, DateTimeZone.UTC),
- new DateTime(2015, 2, 13, 13, 9, 32, DateTimeZone.UTC),
- new DateTime(2015, 6, 24, 13, 47, 43, DateTimeZone.UTC),
- new DateTime(2015, 11, 13, 16, 14, 34, DateTimeZone.UTC),
- new DateTime(2016, 3, 4, 17, 9, 50, DateTimeZone.UTC),
- new DateTime(2017, 12, 12, 22, 55, 46, DateTimeZone.UTC));
+ private static final List DATES_WITH_TIME = Arrays.asList(
+ ZonedDateTime.of(2010, 3, 12, 1, 7, 45, 0, ZoneOffset.UTC),
+ ZonedDateTime.of(2010, 4, 27, 3, 43, 34, 0, ZoneOffset.UTC),
+ ZonedDateTime.of(2012, 5, 18, 4, 11, 0, 0, ZoneOffset.UTC),
+ ZonedDateTime.of(2013, 5, 29, 5, 11, 31, 0, ZoneOffset.UTC),
+ ZonedDateTime.of(2013, 10, 31, 8, 24, 5, 0, ZoneOffset.UTC),
+ ZonedDateTime.of(2015, 2, 13, 13, 9, 32, 0, ZoneOffset.UTC),
+ ZonedDateTime.of(2015, 6, 24, 13, 47, 43, 0, ZoneOffset.UTC),
+ ZonedDateTime.of(2015, 11, 13, 16, 14, 34, 0, ZoneOffset.UTC),
+ ZonedDateTime.of(2016, 3, 4, 17, 9, 50, 0, ZoneOffset.UTC),
+ ZonedDateTime.of(2017, 12, 12, 22, 55, 46, 0, ZoneOffset.UTC));
private static final Query DEFAULT_QUERY = new MatchAllDocsQuery();
@@ -165,7 +168,7 @@ public void testSubAggregations() throws IOException {
}
public void testNoDocs() throws IOException {
- final List dates = Collections.emptyList();
+ final List dates = Collections.emptyList();
final Consumer aggregation = agg -> agg.setNumBuckets(10).field(DATE_FIELD);
testSearchCase(DEFAULT_QUERY, dates, aggregation,
@@ -184,8 +187,10 @@ public void testAggregateWrongField() throws IOException {
}
public void testIntervalYear() throws IOException {
- final long start = new DateTime(DateTimeZone.UTC).withDate(2015, 1, 1).getMillis();
- final long end = new DateTime(DateTimeZone.UTC).withDate(2017, 12, 31).getMillis();
+
+
+ final long start = LocalDate.of(2015, 1, 1).atStartOfDay(ZoneOffset.UTC).toInstant().toEpochMilli();
+ final long end = LocalDate.of(2017, 12, 31).atStartOfDay(ZoneOffset.UTC).toInstant().toEpochMilli();
final Query rangeQuery = LongPoint.newRangeQuery(INSTANT_FIELD, start, end);
testSearchCase(rangeQuery, DATES_WITH_TIME,
aggregation -> aggregation.setNumBuckets(4).field(DATE_FIELD),
@@ -202,8 +207,8 @@ public void testIntervalYear() throws IOException {
testSearchAndReduceCase(rangeQuery, DATES_WITH_TIME,
aggregation -> aggregation.setNumBuckets(4).field(DATE_FIELD),
histogram -> {
- final DateTime startDate = new DateTime(2015, 1, 1, 0, 0, DateTimeZone.UTC);
- final Map expectedDocCount = new HashMap<>();
+ final ZonedDateTime startDate = ZonedDateTime.of(2015, 1, 1, 0, 0, 0, 0, ZoneOffset.UTC);
+ final Map expectedDocCount = new HashMap<>();
expectedDocCount.put(startDate, 3);
expectedDocCount.put(startDate.plusYears(1), 1);
expectedDocCount.put(startDate.plusYears(2), 1);
@@ -216,13 +221,13 @@ public void testIntervalYear() throws IOException {
}
public void testIntervalMonth() throws IOException {
- final List datesForMonthInterval = Arrays.asList(
- new DateTime(2017, 1, 1, 0, 0, 0, DateTimeZone.UTC),
- new DateTime(2017, 2, 2, 0, 0, 0, DateTimeZone.UTC),
- new DateTime(2017, 2, 3, 0, 0, 0, DateTimeZone.UTC),
- new DateTime(2017, 3, 4, 0, 0, 0, DateTimeZone.UTC),
- new DateTime(2017, 3, 5, 0, 0, 0, DateTimeZone.UTC),
- new DateTime(2017, 3, 6, 0, 0, 0, DateTimeZone.UTC));
+ final List datesForMonthInterval = Arrays.asList(
+ ZonedDateTime.of(2017, 1, 1, 0, 0, 0, 0, ZoneOffset.UTC),
+ ZonedDateTime.of(2017, 2, 2, 0, 0, 0, 0, ZoneOffset.UTC),
+ ZonedDateTime.of(2017, 2, 3, 0, 0, 0, 0, ZoneOffset.UTC),
+ ZonedDateTime.of(2017, 3, 4, 0, 0, 0, 0, ZoneOffset.UTC),
+ ZonedDateTime.of(2017, 3, 5, 0, 0, 0, 0, ZoneOffset.UTC),
+ ZonedDateTime.of(2017, 3, 6, 0, 0, 0, 0, ZoneOffset.UTC));
testSearchCase(DEFAULT_QUERY, datesForMonthInterval,
aggregation -> aggregation.setNumBuckets(4).field(DATE_FIELD), histogram -> {
final List extends Histogram.Bucket> buckets = histogram.getBuckets();
@@ -236,7 +241,7 @@ public void testIntervalMonth() throws IOException {
testSearchAndReduceCase(DEFAULT_QUERY, datesForMonthInterval,
aggregation -> aggregation.setNumBuckets(4).field(DATE_FIELD),
histogram -> {
- final Map expectedDocCount = new HashMap<>();
+ final Map expectedDocCount = new HashMap<>();
expectedDocCount.put(datesForMonthInterval.get(0).withDayOfMonth(1), 1);
expectedDocCount.put(datesForMonthInterval.get(1).withDayOfMonth(1), 2);
expectedDocCount.put(datesForMonthInterval.get(3).withDayOfMonth(1), 3);
@@ -259,15 +264,15 @@ public void testWithLargeNumberOfBuckets() {
}
public void testIntervalDay() throws IOException {
- final List datesForDayInterval = Arrays.asList(
- new DateTime(2017, 2, 1, 0, 0, 0, DateTimeZone.UTC),
- new DateTime(2017, 2, 2, 0, 0, 0, DateTimeZone.UTC),
- new DateTime(2017, 2, 2, 0, 0, 0, DateTimeZone.UTC),
- new DateTime(2017, 2, 3, 0, 0, 0, DateTimeZone.UTC),
- new DateTime(2017, 2, 3, 0, 0, 0, DateTimeZone.UTC),
- new DateTime(2017, 2, 3, 0, 0, 0, DateTimeZone.UTC),
- new DateTime(2017, 2, 5, 0, 0, 0, DateTimeZone.UTC));
- final Map expectedDocCount = new HashMap<>();
+ final List datesForDayInterval = Arrays.asList(
+ ZonedDateTime.of(2017, 2, 1, 0, 0, 0, 0, ZoneOffset.UTC),
+ ZonedDateTime.of(2017, 2, 2, 0, 0, 0, 0, ZoneOffset.UTC),
+ ZonedDateTime.of(2017, 2, 2, 0, 0, 0, 0, ZoneOffset.UTC),
+ ZonedDateTime.of(2017, 2, 3, 0, 0, 0, 0, ZoneOffset.UTC),
+ ZonedDateTime.of(2017, 2, 3, 0, 0, 0, 0, ZoneOffset.UTC),
+ ZonedDateTime.of(2017, 2, 3, 0, 0, 0, 0, ZoneOffset.UTC),
+ ZonedDateTime.of(2017, 2, 5, 0, 0, 0, 0, ZoneOffset.UTC));
+ final Map expectedDocCount = new HashMap<>();
expectedDocCount.put(datesForDayInterval.get(0), 1);
expectedDocCount.put(datesForDayInterval.get(1), 2);
expectedDocCount.put(datesForDayInterval.get(3), 3);
@@ -292,16 +297,16 @@ public void testIntervalDay() throws IOException {
}
public void testIntervalDayWithTZ() throws IOException {
- final List datesForDayInterval = Arrays.asList(
- new DateTime(2017, 2, 1, 0, 0, 0, DateTimeZone.UTC),
- new DateTime(2017, 2, 2, 0, 0, 0, DateTimeZone.UTC),
- new DateTime(2017, 2, 2, 0, 0, 0, DateTimeZone.UTC),
- new DateTime(2017, 2, 3, 0, 0, 0, DateTimeZone.UTC),
- new DateTime(2017, 2, 3, 0, 0, 0, DateTimeZone.UTC),
- new DateTime(2017, 2, 3, 0, 0, 0, DateTimeZone.UTC),
- new DateTime(2017, 2, 5, 0, 0, 0, DateTimeZone.UTC));
+ final List datesForDayInterval = Arrays.asList(
+ ZonedDateTime.of(2017, 2, 1, 0, 0, 0, 0, ZoneOffset.UTC),
+ ZonedDateTime.of(2017, 2, 2, 0, 0, 0, 0, ZoneOffset.UTC),
+ ZonedDateTime.of(2017, 2, 2, 0, 0, 0, 0, ZoneOffset.UTC),
+ ZonedDateTime.of(2017, 2, 3, 0, 0, 0, 0, ZoneOffset.UTC),
+ ZonedDateTime.of(2017, 2, 3, 0, 0, 0, 0, ZoneOffset.UTC),
+ ZonedDateTime.of(2017, 2, 3, 0, 0, 0, 0, ZoneOffset.UTC),
+ ZonedDateTime.of(2017, 2, 5, 0, 0, 0, 0, ZoneOffset.UTC));
testSearchCase(DEFAULT_QUERY, datesForDayInterval,
- aggregation -> aggregation.setNumBuckets(5).field(DATE_FIELD).timeZone(DateTimeZone.forOffsetHours(-1)), histogram -> {
+ aggregation -> aggregation.setNumBuckets(5).field(DATE_FIELD).timeZone(ZoneOffset.ofHours(-1)), histogram -> {
final Map expectedDocCount = new HashMap<>();
expectedDocCount.put("2017-01-31T23:00:00.000-01:00", 1);
expectedDocCount.put("2017-02-01T23:00:00.000-01:00", 2);
@@ -313,7 +318,7 @@ public void testIntervalDayWithTZ() throws IOException {
assertEquals(expectedDocCount.getOrDefault(bucket.getKeyAsString(), 0).longValue(), bucket.getDocCount()));
});
testSearchAndReduceCase(DEFAULT_QUERY, datesForDayInterval,
- aggregation -> aggregation.setNumBuckets(5).field(DATE_FIELD).timeZone(DateTimeZone.forOffsetHours(-1)), histogram -> {
+ aggregation -> aggregation.setNumBuckets(5).field(DATE_FIELD).timeZone(ZoneOffset.ofHours(-1)), histogram -> {
final Map expectedDocCount = new HashMap<>();
expectedDocCount.put("2017-01-31T00:00:00.000-01:00", 1);
expectedDocCount.put("2017-02-01T00:00:00.000-01:00", 2);
@@ -327,17 +332,17 @@ public void testIntervalDayWithTZ() throws IOException {
}
public void testIntervalHour() throws IOException {
- final List datesForHourInterval = Arrays.asList(
- new DateTime(2017, 2, 1, 9, 2, 0, DateTimeZone.UTC),
- new DateTime(2017, 2, 1, 9, 35, 0, DateTimeZone.UTC),
- new DateTime(2017, 2, 1, 10, 15, 0, DateTimeZone.UTC),
- new DateTime(2017, 2, 1, 13, 6, 0, DateTimeZone.UTC),
- new DateTime(2017, 2, 1, 14, 4, 0, DateTimeZone.UTC),
- new DateTime(2017, 2, 1, 14, 5, 0, DateTimeZone.UTC),
- new DateTime(2017, 2, 1, 15, 59, 0, DateTimeZone.UTC),
- new DateTime(2017, 2, 1, 16, 6, 0, DateTimeZone.UTC),
- new DateTime(2017, 2, 1, 16, 48, 0, DateTimeZone.UTC),
- new DateTime(2017, 2, 1, 16, 59, 0, DateTimeZone.UTC));
+ final List datesForHourInterval = Arrays.asList(
+ ZonedDateTime.of(2017, 2, 1, 9, 2, 0, 0, ZoneOffset.UTC),
+ ZonedDateTime.of(2017, 2, 1, 9, 35, 0, 0, ZoneOffset.UTC),
+ ZonedDateTime.of(2017, 2, 1, 10, 15, 0, 0, ZoneOffset.UTC),
+ ZonedDateTime.of(2017, 2, 1, 13, 6, 0, 0, ZoneOffset.UTC),
+ ZonedDateTime.of(2017, 2, 1, 14, 4, 0, 0, ZoneOffset.UTC),
+ ZonedDateTime.of(2017, 2, 1, 14, 5, 0, 0, ZoneOffset.UTC),
+ ZonedDateTime.of(2017, 2, 1, 15, 59, 0, 0, ZoneOffset.UTC),
+ ZonedDateTime.of(2017, 2, 1, 16, 6, 0, 0, ZoneOffset.UTC),
+ ZonedDateTime.of(2017, 2, 1, 16, 48, 0, 0, ZoneOffset.UTC),
+ ZonedDateTime.of(2017, 2, 1, 16, 59, 0, 0, ZoneOffset.UTC));
testSearchCase(DEFAULT_QUERY, datesForHourInterval,
aggregation -> aggregation.setNumBuckets(8).field(DATE_FIELD),
histogram -> {
@@ -353,13 +358,13 @@ public void testIntervalHour() throws IOException {
testSearchAndReduceCase(DEFAULT_QUERY, datesForHourInterval,
aggregation -> aggregation.setNumBuckets(10).field(DATE_FIELD),
histogram -> {
- final Map expectedDocCount = new HashMap<>();
- expectedDocCount.put(datesForHourInterval.get(0).withMinuteOfHour(0), 2);
- expectedDocCount.put(datesForHourInterval.get(2).withMinuteOfHour(0), 1);
- expectedDocCount.put(datesForHourInterval.get(3).withMinuteOfHour(0), 1);
- expectedDocCount.put(datesForHourInterval.get(4).withMinuteOfHour(0), 2);
- expectedDocCount.put(datesForHourInterval.get(6).withMinuteOfHour(0), 1);
- expectedDocCount.put(datesForHourInterval.get(7).withMinuteOfHour(0), 3);
+ final Map expectedDocCount = new HashMap<>();
+ expectedDocCount.put(datesForHourInterval.get(0).withMinute(0), 2);
+ expectedDocCount.put(datesForHourInterval.get(2).withMinute(0), 1);
+ expectedDocCount.put(datesForHourInterval.get(3).withMinute(0), 1);
+ expectedDocCount.put(datesForHourInterval.get(4).withMinute(0), 2);
+ expectedDocCount.put(datesForHourInterval.get(6).withMinute(0), 1);
+ expectedDocCount.put(datesForHourInterval.get(7).withMinute(0), 3);
final List extends Histogram.Bucket> buckets = histogram.getBuckets();
assertEquals(8, buckets.size());
buckets.forEach(bucket ->
@@ -369,10 +374,10 @@ public void testIntervalHour() throws IOException {
testSearchAndReduceCase(DEFAULT_QUERY, datesForHourInterval,
aggregation -> aggregation.setNumBuckets(6).field(DATE_FIELD),
histogram -> {
- final Map expectedDocCount = new HashMap<>();
- expectedDocCount.put(datesForHourInterval.get(0).withMinuteOfHour(0), 3);
- expectedDocCount.put(datesForHourInterval.get(0).plusHours(3).withMinuteOfHour(0), 3);
- expectedDocCount.put(datesForHourInterval.get(0).plusHours(6).withMinuteOfHour(0), 4);
+ final Map expectedDocCount = new HashMap<>();
+ expectedDocCount.put(datesForHourInterval.get(0).withMinute(0), 3);
+ expectedDocCount.put(datesForHourInterval.get(0).plusHours(3).withMinute(0), 3);
+ expectedDocCount.put(datesForHourInterval.get(0).plusHours(6).withMinute(0), 4);
final List extends Histogram.Bucket> buckets = histogram.getBuckets();
assertEquals(expectedDocCount.size(), buckets.size());
buckets.forEach(bucket ->
@@ -382,22 +387,23 @@ public void testIntervalHour() throws IOException {
}
public void testIntervalHourWithTZ() throws IOException {
- final List datesForHourInterval = Arrays.asList(
- new DateTime(2017, 2, 1, 9, 2, 0, DateTimeZone.UTC),
- new DateTime(2017, 2, 1, 9, 35, 0, DateTimeZone.UTC),
- new DateTime(2017, 2, 1, 10, 15, 0, DateTimeZone.UTC),
- new DateTime(2017, 2, 1, 13, 6, 0, DateTimeZone.UTC),
- new DateTime(2017, 2, 1, 14, 4, 0, DateTimeZone.UTC),
- new DateTime(2017, 2, 1, 14, 5, 0, DateTimeZone.UTC),
- new DateTime(2017, 2, 1, 15, 59, 0, DateTimeZone.UTC),
- new DateTime(2017, 2, 1, 16, 6, 0, DateTimeZone.UTC),
- new DateTime(2017, 2, 1, 16, 48, 0, DateTimeZone.UTC),
- new DateTime(2017, 2, 1, 16, 59, 0, DateTimeZone.UTC));
+ final List datesForHourInterval = Arrays.asList(
+ ZonedDateTime.of(2017, 2, 1, 9, 2, 0, 0, ZoneOffset.UTC),
+ ZonedDateTime.of(2017, 2, 1, 9, 35, 0, 0, ZoneOffset.UTC),
+ ZonedDateTime.of(2017, 2, 1, 10, 15, 0, 0, ZoneOffset.UTC),
+ ZonedDateTime.of(2017, 2, 1, 13, 6, 0, 0, ZoneOffset.UTC),
+ ZonedDateTime.of(2017, 2, 1, 14, 4, 0, 0, ZoneOffset.UTC),
+ ZonedDateTime.of(2017, 2, 1, 14, 5, 0, 0, ZoneOffset.UTC),
+ ZonedDateTime.of(2017, 2, 1, 15, 59, 0, 0, ZoneOffset.UTC),
+ ZonedDateTime.of(2017, 2, 1, 16, 6, 0, 0, ZoneOffset.UTC),
+ ZonedDateTime.of(2017, 2, 1, 16, 48, 0, 0, ZoneOffset.UTC),
+ ZonedDateTime.of(2017, 2, 1, 16, 59, 0, 0, ZoneOffset.UTC));
testSearchCase(DEFAULT_QUERY, datesForHourInterval,
- aggregation -> aggregation.setNumBuckets(8).field(DATE_FIELD).timeZone(DateTimeZone.forOffsetHours(-1)),
+ aggregation -> aggregation.setNumBuckets(8).field(DATE_FIELD).timeZone(ZoneOffset.ofHours(-1)),
histogram -> {
final List dateStrings = datesForHourInterval.stream()
- .map(dateTime -> dateTime.withZone(DateTimeZone.forOffsetHours(-1)).toString()).collect(Collectors.toList());
+ .map(dateTime -> DateFormatters.forPattern("strict_date_time")
+ .format(dateTime.withZoneSameInstant(ZoneOffset.ofHours(-1)))).collect(Collectors.toList());
final List extends Histogram.Bucket> buckets = histogram.getBuckets();
assertEquals(datesForHourInterval.size(), buckets.size());
for (int i = 0; i < buckets.size(); i++) {
@@ -408,7 +414,7 @@ public void testIntervalHourWithTZ() throws IOException {
}
);
testSearchAndReduceCase(DEFAULT_QUERY, datesForHourInterval,
- aggregation -> aggregation.setNumBuckets(10).field(DATE_FIELD).timeZone(DateTimeZone.forOffsetHours(-1)),
+ aggregation -> aggregation.setNumBuckets(10).field(DATE_FIELD).timeZone(ZoneOffset.ofHours(-1)),
histogram -> {
final Map expectedDocCount = new HashMap<>();
expectedDocCount.put("2017-02-01T08:00:00.000-01:00", 2);
@@ -427,10 +433,10 @@ public void testIntervalHourWithTZ() throws IOException {
public void testRandomSecondIntervals() throws IOException {
final int length = 120;
- final List dataset = new ArrayList<>(length);
- final DateTime startDate = new DateTime(2017, 1, 1, 0, 0, 0, DateTimeZone.UTC);
+ final List dataset = new ArrayList<>(length);
+ final ZonedDateTime startDate = ZonedDateTime.of(2017, 1, 1, 0, 0, 0, 0, ZoneOffset.UTC);
for (int i = 0; i < length; i++) {
- final DateTime date = startDate.plusSeconds(i);
+ final ZonedDateTime date = startDate.plusSeconds(i);
dataset.add(date);
}
final Map bucketsToExpectedDocCountMap = new HashMap<>();
@@ -456,10 +462,10 @@ public void testRandomSecondIntervals() throws IOException {
public void testRandomMinuteIntervals() throws IOException {
final int length = 120;
- final List dataset = new ArrayList<>(length);
- final DateTime startDate = new DateTime(2017, 1, 1, 0, 0, DateTimeZone.UTC);
+ final List dataset = new ArrayList<>(length);
+ final ZonedDateTime startDate = ZonedDateTime.of(2017, 1, 1, 0, 0, 0, 0, ZoneOffset.UTC);
for (int i = 0; i < length; i++) {
- final DateTime date = startDate.plusMinutes(i);
+ final ZonedDateTime date = startDate.plusMinutes(i);
dataset.add(date);
}
final Map bucketsToExpectedDocCountMap = new HashMap<>();
@@ -485,10 +491,10 @@ public void testRandomMinuteIntervals() throws IOException {
public void testRandomHourIntervals() throws IOException {
final int length = 72;
- final List dataset = new ArrayList<>(length);
- final DateTime startDate = new DateTime(2017, 1, 1, 0, 0, DateTimeZone.UTC);
+ final List dataset = new ArrayList<>(length);
+ final ZonedDateTime startDate = ZonedDateTime.of(2017, 1, 1, 0, 0, 0, 0, ZoneOffset.UTC);
for (int i = 0; i < length; i++) {
- final DateTime date = startDate.plusHours(i);
+ final ZonedDateTime date = startDate.plusHours(i);
dataset.add(date);
}
final Map bucketsToExpectedDocCountMap = new HashMap<>();
@@ -513,10 +519,10 @@ public void testRandomHourIntervals() throws IOException {
public void testRandomDayIntervals() throws IOException {
final int length = 140;
- final List dataset = new ArrayList<>(length);
- final DateTime startDate = new DateTime(2017, 1, 1, 0, 0, DateTimeZone.UTC);
+ final List dataset = new ArrayList<>(length);
+ final ZonedDateTime startDate = ZonedDateTime.of(2017, 1, 1, 0, 0, 0, 0, ZoneOffset.UTC);
for (int i = 0; i < length; i++) {
- final DateTime date = startDate.plusDays(i);
+ final ZonedDateTime date = startDate.plusDays(i);
dataset.add(date);
}
final int randomChoice = randomIntBetween(1, 3);
@@ -552,17 +558,17 @@ public void testRandomDayIntervals() throws IOException {
final int randomIndex = randomInt(2);
final Histogram.Bucket bucket = buckets.get(randomIndex);
assertEquals(startDate.plusMonths(randomIndex), bucket.getKey());
- assertEquals(startDate.plusMonths(randomIndex).dayOfMonth().getMaximumValue(), bucket.getDocCount());
+ assertEquals(YearMonth.from(startDate.plusMonths(randomIndex)).lengthOfMonth(), bucket.getDocCount());
});
}
}
public void testRandomMonthIntervals() throws IOException {
final int length = 60;
- final List dataset = new ArrayList<>(length);
- final DateTime startDate = new DateTime(2017, 1, 1, 0, 0, DateTimeZone.UTC);
+ final List dataset = new ArrayList<>(length);
+ final ZonedDateTime startDate = ZonedDateTime.of(2017, 1, 1, 0, 0, 0, 0, ZoneOffset.UTC);
for (int i = 0; i < length; i++) {
- final DateTime date = startDate.plusMonths(i);
+ final ZonedDateTime date = startDate.plusMonths(i);
dataset.add(date);
}
final Map bucketsToExpectedDocCountMap = new HashMap<>();
@@ -586,10 +592,10 @@ public void testRandomMonthIntervals() throws IOException {
public void testRandomYearIntervals() throws IOException {
final int length = 300;
- final List dataset = new ArrayList<>(length);
- final DateTime startDate = new DateTime(2017, 1, 1, 0, 0, DateTimeZone.UTC);
+ final List dataset = new ArrayList<>(length);
+ final ZonedDateTime startDate = ZonedDateTime.of(2017, 1, 1, 0, 0, 0, 0, ZoneOffset.UTC);
for (int i = 0; i < length; i++) {
- final DateTime date = startDate.plusYears(i);
+ final ZonedDateTime date = startDate.plusYears(i);
dataset.add(date);
}
final Map bucketsToExpectedDocCountMap = new HashMap<>();
@@ -615,12 +621,12 @@ public void testRandomYearIntervals() throws IOException {
}
public void testIntervalMinute() throws IOException {
- final List datesForMinuteInterval = Arrays.asList(
- new DateTime(2017, 2, 1, 9, 2, 35, DateTimeZone.UTC),
- new DateTime(2017, 2, 1, 9, 2, 59, DateTimeZone.UTC),
- new DateTime(2017, 2, 1, 9, 15, 37, DateTimeZone.UTC),
- new DateTime(2017, 2, 1, 9, 16, 4, DateTimeZone.UTC),
- new DateTime(2017, 2, 1, 9, 16, 42, DateTimeZone.UTC));
+ final List datesForMinuteInterval = Arrays.asList(
+ ZonedDateTime.of(2017, 2, 1, 9, 2, 35, 0, ZoneOffset.UTC),
+ ZonedDateTime.of(2017, 2, 1, 9, 2, 59, 0, ZoneOffset.UTC),
+ ZonedDateTime.of(2017, 2, 1, 9, 15, 37, 0, ZoneOffset.UTC),
+ ZonedDateTime.of(2017, 2, 1, 9, 16, 4, 0, ZoneOffset.UTC),
+ ZonedDateTime.of(2017, 2, 1, 9, 16, 42, 0, ZoneOffset.UTC));
testSearchCase(DEFAULT_QUERY, datesForMinuteInterval,
aggregation -> aggregation.setNumBuckets(4).field(DATE_FIELD),
@@ -637,10 +643,10 @@ public void testIntervalMinute() throws IOException {
testSearchAndReduceCase(DEFAULT_QUERY, datesForMinuteInterval,
aggregation -> aggregation.setNumBuckets(15).field(DATE_FIELD),
histogram -> {
- final Map expectedDocCount = new HashMap<>();
- expectedDocCount.put(datesForMinuteInterval.get(0).withSecondOfMinute(0), 2);
- expectedDocCount.put(datesForMinuteInterval.get(2).withSecondOfMinute(0), 1);
- expectedDocCount.put(datesForMinuteInterval.get(3).withSecondOfMinute(0), 2);
+ final Map expectedDocCount = new HashMap<>();
+ expectedDocCount.put(datesForMinuteInterval.get(0).withSecond(0), 2);
+ expectedDocCount.put(datesForMinuteInterval.get(2).withSecond(0), 1);
+ expectedDocCount.put(datesForMinuteInterval.get(3).withSecond(0), 2);
final List extends Histogram.Bucket> buckets = histogram.getBuckets();
assertEquals(15, buckets.size());
buckets.forEach(bucket ->
@@ -650,15 +656,15 @@ public void testIntervalMinute() throws IOException {
}
public void testIntervalSecond() throws IOException {
- final List datesForSecondInterval = Arrays.asList(
- new DateTime(2017, 2, 1, 0, 0, 5, 15, DateTimeZone.UTC),
- new DateTime(2017, 2, 1, 0, 0, 7, 299, DateTimeZone.UTC),
- new DateTime(2017, 2, 1, 0, 0, 7, 74, DateTimeZone.UTC),
- new DateTime(2017, 2, 1, 0, 0, 11, 688, DateTimeZone.UTC),
- new DateTime(2017, 2, 1, 0, 0, 11, 210, DateTimeZone.UTC),
- new DateTime(2017, 2, 1, 0, 0, 11, 380, DateTimeZone.UTC));
- final DateTime startDate = datesForSecondInterval.get(0).withMillisOfSecond(0);
- final Map expectedDocCount = new HashMap<>();
+ final List datesForSecondInterval = Arrays.asList(
+ ZonedDateTime.of(2017, 2, 1, 0, 0, 5, 15, ZoneOffset.UTC),
+ ZonedDateTime.of(2017, 2, 1, 0, 0, 7, 299, ZoneOffset.UTC),
+ ZonedDateTime.of(2017, 2, 1, 0, 0, 7, 74, ZoneOffset.UTC),
+ ZonedDateTime.of(2017, 2, 1, 0, 0, 11, 688, ZoneOffset.UTC),
+ ZonedDateTime.of(2017, 2, 1, 0, 0, 11, 210, ZoneOffset.UTC),
+ ZonedDateTime.of(2017, 2, 1, 0, 0, 11, 380, ZoneOffset.UTC));
+ final ZonedDateTime startDate = datesForSecondInterval.get(0).withNano(0);
+ final Map expectedDocCount = new HashMap<>();
expectedDocCount.put(startDate, 1);
expectedDocCount.put(startDate.plusSeconds(2), 2);
expectedDocCount.put(startDate.plusSeconds(6), 3);
@@ -681,19 +687,19 @@ public void testIntervalSecond() throws IOException {
);
}
- private void testSearchCase(final Query query, final List dataset,
+ private void testSearchCase(final Query query, final List dataset,
final Consumer configure,
final Consumer verify) throws IOException {
executeTestCase(false, query, dataset, configure, verify);
}
- private void testSearchAndReduceCase(final Query query, final List dataset,
+ private void testSearchAndReduceCase(final Query query, final List dataset,
final Consumer configure,
final Consumer verify) throws IOException {
executeTestCase(true, query, dataset, configure, verify);
}
- private void testBothCases(final Query query, final List dataset,
+ private void testBothCases(final Query query, final List dataset,
final Consumer configure,
final Consumer verify) throws IOException {
executeTestCase(false, query, dataset, configure, verify);
@@ -714,18 +720,18 @@ protected IndexSettings createIndexSettings() {
);
}
- private void executeTestCase(final boolean reduced, final Query query, final List dataset,
+ private void executeTestCase(final boolean reduced, final Query query, final List dataset,
final Consumer configure,
final Consumer verify) throws IOException {
try (Directory directory = newDirectory()) {
try (RandomIndexWriter indexWriter = new RandomIndexWriter(random(), directory)) {
final Document document = new Document();
- for (final DateTime date : dataset) {
+ for (final ZonedDateTime date : dataset) {
if (frequently()) {
indexWriter.commit();
}
- final long instant = date.getMillis();
+ final long instant = date.toInstant().toEpochMilli();
document.add(new SortedNumericDocValuesField(DATE_FIELD, instant));
document.add(new LongPoint(INSTANT_FIELD, instant));
indexWriter.addDocument(document);
diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/DateHistogramAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/DateHistogramAggregatorTests.java
index b29f7032562ba..c3e7e012d3e42 100644
--- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/DateHistogramAggregatorTests.java
+++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/DateHistogramAggregatorTests.java
@@ -30,6 +30,7 @@
import org.apache.lucene.search.MatchNoDocsQuery;
import org.apache.lucene.search.Query;
import org.apache.lucene.store.Directory;
+import org.elasticsearch.common.time.DateFormatters;
import org.elasticsearch.index.mapper.DateFieldMapper;
import org.elasticsearch.search.aggregations.AggregationBuilders;
import org.elasticsearch.search.aggregations.AggregatorTestCase;
@@ -455,6 +456,6 @@ private void executeTestCase(boolean reduced, Query query, List dataset,
}
private static long asLong(String dateTime) {
- return DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.parser().parseDateTime(dateTime).getMillis();
+ return DateFormatters.toZonedDateTime(DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.parse(dateTime)).toInstant().toEpochMilli();
}
}
diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/DateHistogramTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/DateHistogramTests.java
index 5148b0b85754f..90c0c282d71ff 100644
--- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/DateHistogramTests.java
+++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/DateHistogramTests.java
@@ -31,9 +31,10 @@
import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.search.aggregations.BaseAggregationTestCase;
import org.elasticsearch.search.aggregations.BucketOrder;
-import org.joda.time.DateTimeZone;
import java.io.IOException;
+import java.time.ZoneId;
+import java.time.ZoneOffset;
import java.util.ArrayList;
import java.util.List;
@@ -166,15 +167,15 @@ public void testRewriteTimeZone() throws IOException {
assertNull(builder.rewriteTimeZone(shardContextThatCrosses));
// fixed timeZone => no rewrite
- DateTimeZone tz = DateTimeZone.forOffsetHours(1);
+ ZoneId tz = ZoneOffset.ofHours(1);
builder.timeZone(tz);
assertSame(tz, builder.rewriteTimeZone(shardContextThatDoesntCross));
assertSame(tz, builder.rewriteTimeZone(shardContextThatCrosses));
// daylight-saving-times => rewrite if doesn't cross
- tz = DateTimeZone.forID("Europe/Paris");
+ tz = ZoneId.of("Europe/Paris");
builder.timeZone(tz);
- assertEquals(DateTimeZone.forOffsetHours(1), builder.rewriteTimeZone(shardContextThatDoesntCross));
+ assertEquals(ZoneOffset.ofHours(1), builder.rewriteTimeZone(shardContextThatDoesntCross));
assertSame(tz, builder.rewriteTimeZone(shardContextThatCrosses));
// Rounded values are no longer all within the same transitions => no rewrite
@@ -187,7 +188,7 @@ public void testRewriteTimeZone() throws IOException {
builder.timeZone(tz);
builder.interval(1000L * 60 * 60 * 24); // ~ 1 day
- assertEquals(DateTimeZone.forOffsetHours(1), builder.rewriteTimeZone(shardContextThatDoesntCross));
+ assertEquals(ZoneOffset.ofHours(1), builder.rewriteTimeZone(shardContextThatDoesntCross));
assertSame(tz, builder.rewriteTimeZone(shardContextThatCrosses));
// Because the interval is large, rounded values are not
diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/ExtendedBoundsTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/ExtendedBoundsTests.java
index 86ddd4843a75b..f5581d1661c3d 100644
--- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/ExtendedBoundsTests.java
+++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/ExtendedBoundsTests.java
@@ -27,6 +27,8 @@
import org.elasticsearch.common.joda.FormatDateTimeFormatter;
import org.elasticsearch.common.joda.Joda;
import org.elasticsearch.common.settings.Settings;
+import org.elasticsearch.common.time.DateFormatter;
+import org.elasticsearch.common.time.DateFormatters;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
@@ -37,10 +39,10 @@
import org.elasticsearch.search.SearchParseException;
import org.elasticsearch.search.internal.SearchContext;
import org.elasticsearch.test.ESTestCase;
-import org.joda.time.DateTimeZone;
import org.joda.time.Instant;
import java.io.IOException;
+import java.time.ZoneOffset;
import static java.lang.Math.max;
import static java.lang.Math.min;
@@ -64,17 +66,19 @@ public static ExtendedBounds randomExtendedBounds() {
* Construct a random {@link ExtendedBounds} in pre-parsed form.
*/
public static ExtendedBounds randomParsedExtendedBounds() {
+ long maxDateValue = 253402300799999L; // end of year 9999
+ long minDateValue = -377705116800000L; // beginning of year -9999
if (randomBoolean()) {
// Construct with one missing bound
if (randomBoolean()) {
- return new ExtendedBounds(null, randomLong());
+ return new ExtendedBounds(null, maxDateValue);
}
- return new ExtendedBounds(randomLong(), null);
+ return new ExtendedBounds(minDateValue, null);
}
- long a = randomLong();
+ long a = randomLongBetween(minDateValue, maxDateValue);
long b;
do {
- b = randomLong();
+ b = randomLongBetween(minDateValue, maxDateValue);
} while (a == b);
long min = min(a, b);
long max = max(a, b);
@@ -101,8 +105,8 @@ public void testParseAndValidate() {
new IndexSettings(IndexMetaData.builder("foo").settings(indexSettings).build(), indexSettings), null, null, null, null,
null, xContentRegistry(), writableRegistry(), null, null, () -> now, null);
when(context.getQueryShardContext()).thenReturn(qsc);
- FormatDateTimeFormatter formatter = Joda.forPattern("dateOptionalTime");
- DocValueFormat format = new DocValueFormat.DateTime(formatter, DateTimeZone.UTC);
+ DateFormatter formatter = DateFormatters.forPattern("dateOptionalTime");
+ DocValueFormat format = new DocValueFormat.DateTime(formatter, ZoneOffset.UTC);
ExtendedBounds expected = randomParsedExtendedBounds();
ExtendedBounds parsed = unparsed(expected).parseAndValidate("test", context, format);
diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalAutoDateHistogramTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalAutoDateHistogramTests.java
index dd3425c20f43c..fe5c967f54be8 100644
--- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalAutoDateHistogramTests.java
+++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalAutoDateHistogramTests.java
@@ -19,8 +19,8 @@
package org.elasticsearch.search.aggregations.bucket.histogram;
+import org.elasticsearch.common.Rounding;
import org.elasticsearch.common.io.stream.Writeable;
-import org.elasticsearch.common.rounding.DateTimeUnit;
import org.elasticsearch.search.DocValueFormat;
import org.elasticsearch.search.aggregations.InternalAggregations;
import org.elasticsearch.search.aggregations.ParsedMultiBucketAggregation;
@@ -28,12 +28,12 @@
import org.elasticsearch.search.aggregations.bucket.histogram.InternalAutoDateHistogram.BucketInfo;
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
import org.elasticsearch.test.InternalMultiBucketAggregationTestCase;
-import org.joda.time.DateTime;
-import org.joda.time.DateTimeZone;
import java.time.Instant;
import java.time.OffsetDateTime;
+import java.time.ZoneId;
import java.time.ZoneOffset;
+import java.time.ZonedDateTime;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
@@ -89,16 +89,16 @@ protected InternalAutoDateHistogram createTestInstance(String name,
*/
public void testGetAppropriateRoundingUsesCorrectIntervals() {
RoundingInfo[] roundings = new RoundingInfo[6];
- DateTimeZone timeZone = DateTimeZone.UTC;
+ ZoneId timeZone = ZoneOffset.UTC;
// Since we pass 0 as the starting index to getAppropriateRounding, we'll also use
// an innerInterval that is quite large, such that targetBuckets * roundings[i].getMaximumInnerInterval()
// will be larger than the estimate.
- roundings[0] = new RoundingInfo(createRounding(DateTimeUnit.SECOND_OF_MINUTE, timeZone),
- 1000L, "s", 1000);
- roundings[1] = new RoundingInfo(createRounding(DateTimeUnit.MINUTES_OF_HOUR, timeZone),
- 60 * 1000L, "m", 1, 5, 10, 30);
- roundings[2] = new RoundingInfo(createRounding(DateTimeUnit.HOUR_OF_DAY, timeZone),
- 60 * 60 * 1000L, "h", 1, 3, 12);
+ roundings[0] = new RoundingInfo(createRounding(Rounding.DateTimeUnit.SECOND_OF_MINUTE, timeZone),
+ 1000L, "s",1000);
+ roundings[1] = new RoundingInfo(createRounding(Rounding.DateTimeUnit.MINUTES_OF_HOUR, timeZone),
+ 60 * 1000L, "m",1, 5, 10, 30);
+ roundings[2] = new RoundingInfo(createRounding(Rounding.DateTimeUnit.HOUR_OF_DAY, timeZone),
+ 60 * 60 * 1000L, "h",1, 3, 12);
OffsetDateTime timestamp = Instant.parse("2018-01-01T00:00:01.000Z").atOffset(ZoneOffset.UTC);
// We want to pass a roundingIdx of zero, because in order to reproduce this bug, we need the function
@@ -117,7 +117,7 @@ protected void assertReduced(InternalAutoDateHistogram reduced, List= keyForBucket
&& roundedBucketKey < keyForBucket + intervalInMillis) {
@@ -194,7 +194,7 @@ protected void assertReduced(InternalAutoDateHistogram reduced, List actualCounts = new TreeMap<>();
for (Histogram.Bucket bucket : reduced.getBuckets()) {
- actualCounts.compute(((DateTime) bucket.getKey()).getMillis(),
+ actualCounts.compute(((ZonedDateTime) bucket.getKey()).toInstant().toEpochMilli(),
(key, oldValue) -> (oldValue == null ? 0 : oldValue) + bucket.getDocCount());
}
assertEquals(expectedCounts, actualCounts);
diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalDateHistogramTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalDateHistogramTests.java
index b2b7079815ea9..f0f5e650d4ea4 100644
--- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalDateHistogramTests.java
+++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalDateHistogramTests.java
@@ -23,11 +23,11 @@
import org.elasticsearch.search.DocValueFormat;
import org.elasticsearch.search.aggregations.BucketOrder;
import org.elasticsearch.search.aggregations.InternalAggregations;
-import org.elasticsearch.test.InternalMultiBucketAggregationTestCase;
import org.elasticsearch.search.aggregations.ParsedMultiBucketAggregation;
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
-import org.joda.time.DateTime;
+import org.elasticsearch.test.InternalMultiBucketAggregationTestCase;
+import java.time.ZonedDateTime;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
@@ -76,13 +76,13 @@ protected void assertReduced(InternalDateHistogram reduced, List expectedCounts = new TreeMap<>();
for (Histogram histogram : inputs) {
for (Histogram.Bucket bucket : histogram.getBuckets()) {
- expectedCounts.compute(((DateTime) bucket.getKey()).getMillis(),
+ expectedCounts.compute(((ZonedDateTime) bucket.getKey()).toInstant().toEpochMilli(),
(key, oldValue) -> (oldValue == null ? 0 : oldValue) + bucket.getDocCount());
}
}
Map actualCounts = new TreeMap<>();
for (Histogram.Bucket bucket : reduced.getBuckets()) {
- actualCounts.compute(((DateTime) bucket.getKey()).getMillis(),
+ actualCounts.compute(((ZonedDateTime) bucket.getKey()).toInstant().toEpochMilli(),
(key, oldValue) -> (oldValue == null ? 0 : oldValue) + bucket.getDocCount());
}
assertEquals(expectedCounts, actualCounts);
diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/WeightedAvgAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/WeightedAvgAggregatorTests.java
index 3836f0cc2ae14..47a8bd53fa1bc 100644
--- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/WeightedAvgAggregatorTests.java
+++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/WeightedAvgAggregatorTests.java
@@ -40,9 +40,9 @@
import org.elasticsearch.search.aggregations.metrics.WeightedAvgAggregationBuilder;
import org.elasticsearch.search.aggregations.metrics.WeightedAvgAggregator;
import org.elasticsearch.search.aggregations.support.MultiValuesSourceFieldConfig;
-import org.joda.time.DateTimeZone;
import java.io.IOException;
+import java.time.ZoneOffset;
import java.util.Arrays;
import java.util.Collections;
import java.util.function.Consumer;
@@ -248,7 +248,7 @@ public void testWeightSetTimezone() throws IOException {
MultiValuesSourceFieldConfig valueConfig = new MultiValuesSourceFieldConfig.Builder().setFieldName("value_field").build();
MultiValuesSourceFieldConfig weightConfig = new MultiValuesSourceFieldConfig.Builder()
.setFieldName("weight_field")
- .setTimeZone(DateTimeZone.UTC)
+ .setTimeZone(ZoneOffset.UTC)
.build();
WeightedAvgAggregationBuilder aggregationBuilder = new WeightedAvgAggregationBuilder("_name")
.value(valueConfig)
@@ -271,7 +271,7 @@ public void testWeightSetTimezone() throws IOException {
public void testValueSetTimezone() throws IOException {
MultiValuesSourceFieldConfig valueConfig = new MultiValuesSourceFieldConfig.Builder()
.setFieldName("value_field")
- .setTimeZone(DateTimeZone.UTC)
+ .setTimeZone(ZoneOffset.UTC)
.build();
MultiValuesSourceFieldConfig weightConfig = new MultiValuesSourceFieldConfig.Builder().setFieldName("weight_field").build();
WeightedAvgAggregationBuilder aggregationBuilder = new WeightedAvgAggregationBuilder("_name")
diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/AvgBucketAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/AvgBucketAggregatorTests.java
index c0dd46011755b..26e90a457c2e0 100644
--- a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/AvgBucketAggregatorTests.java
+++ b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/AvgBucketAggregatorTests.java
@@ -28,6 +28,7 @@
import org.apache.lucene.search.MatchAllDocsQuery;
import org.apache.lucene.search.Query;
import org.apache.lucene.store.Directory;
+import org.elasticsearch.common.time.DateFormatters;
import org.elasticsearch.index.mapper.DateFieldMapper;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.NumberFieldMapper;
@@ -141,8 +142,7 @@ public void testSameAggNames() throws IOException {
}
}
-
private static long asLong(String dateTime) {
- return DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.parser().parseDateTime(dateTime).getMillis();
+ return DateFormatters.toZonedDateTime(DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.parse(dateTime)).toInstant().toEpochMilli();
}
}
diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/BucketSortIT.java b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/BucketSortIT.java
index bc3610fca8e86..4d246947c3d41 100644
--- a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/BucketSortIT.java
+++ b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/BucketSortIT.java
@@ -31,9 +31,9 @@
import org.elasticsearch.search.sort.FieldSortBuilder;
import org.elasticsearch.search.sort.SortOrder;
import org.elasticsearch.test.ESIntegTestCase;
-import org.joda.time.DateTime;
import java.io.IOException;
+import java.time.ZonedDateTime;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
@@ -131,10 +131,10 @@ public void testEmptyBucketSort() {
assertThat(histogram, notNullValue());
// These become our baseline
List extends Histogram.Bucket> timeBuckets = histogram.getBuckets();
- DateTime previousKey = (DateTime) timeBuckets.get(0).getKey();
+ ZonedDateTime previousKey = (ZonedDateTime) timeBuckets.get(0).getKey();
for (Histogram.Bucket timeBucket : timeBuckets) {
- assertThat(previousKey, lessThanOrEqualTo((DateTime) timeBucket.getKey()));
- previousKey = (DateTime) timeBucket.getKey();
+ assertThat(previousKey, lessThanOrEqualTo((ZonedDateTime) timeBucket.getKey()));
+ previousKey = (ZonedDateTime) timeBucket.getKey();
}
// Now let's test using size
diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/CumulativeSumAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/CumulativeSumAggregatorTests.java
index 961db6931fb6a..0dfb8de801f22 100644
--- a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/CumulativeSumAggregatorTests.java
+++ b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/CumulativeSumAggregatorTests.java
@@ -31,6 +31,7 @@
import org.apache.lucene.search.Query;
import org.apache.lucene.store.Directory;
import org.elasticsearch.common.CheckedConsumer;
+import org.elasticsearch.common.time.DateFormatters;
import org.elasticsearch.index.mapper.DateFieldMapper;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.NumberFieldMapper;
@@ -307,6 +308,6 @@ private void executeTestCase(Query query, AggregationBuilder aggBuilder, Consume
}
private static long asLong(String dateTime) {
- return DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.parser().parseDateTime(dateTime).getMillis();
+ return DateFormatters.toZonedDateTime(DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.parse(dateTime)).toInstant().toEpochMilli();
}
}
diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/DateDerivativeIT.java b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/DateDerivativeIT.java
index 95710ead1a4e1..dfc273f366866 100644
--- a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/DateDerivativeIT.java
+++ b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/DateDerivativeIT.java
@@ -21,6 +21,8 @@
import org.elasticsearch.action.index.IndexRequestBuilder;
import org.elasticsearch.action.search.SearchResponse;
+import org.elasticsearch.common.time.DateFormatter;
+import org.elasticsearch.common.time.DateFormatters;
import org.elasticsearch.index.mapper.DateFieldMapper;
import org.elasticsearch.search.aggregations.InternalAggregation;
import org.elasticsearch.search.aggregations.InternalMultiBucketAggregation;
@@ -31,12 +33,14 @@
import org.elasticsearch.search.aggregations.support.AggregationPath;
import org.elasticsearch.test.ESIntegTestCase;
import org.hamcrest.Matcher;
-import org.joda.time.DateTime;
-import org.joda.time.DateTimeZone;
-import org.joda.time.format.DateTimeFormat;
import org.junit.After;
import java.io.IOException;
+import java.time.LocalDate;
+import java.time.LocalDateTime;
+import java.time.ZoneId;
+import java.time.ZoneOffset;
+import java.time.ZonedDateTime;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
@@ -60,19 +64,19 @@ public class DateDerivativeIT extends ESIntegTestCase {
private static final String IDX_DST_END = "idx_dst_end";
private static final String IDX_DST_KATHMANDU = "idx_dst_kathmandu";
- private DateTime date(int month, int day) {
- return new DateTime(2012, month, day, 0, 0, DateTimeZone.UTC);
+ private ZonedDateTime date(int month, int day) {
+ return ZonedDateTime.of(2012, month, day, 0, 0, 0, 0, ZoneOffset.UTC);
}
- private DateTime date(String date) {
- return DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.parser().parseDateTime(date);
+ private ZonedDateTime date(String date) {
+ return DateFormatters.toZonedDateTime(DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.parse(date));
}
- private static String format(DateTime date, String pattern) {
- return DateTimeFormat.forPattern(pattern).print(date);
+ private static String format(ZonedDateTime date, String pattern) {
+ return DateFormatters.forPattern(pattern).format(date);
}
- private static IndexRequestBuilder indexDoc(String idx, DateTime date, int value) throws Exception {
+ private static IndexRequestBuilder indexDoc(String idx, ZonedDateTime date, int value) throws Exception {
return client().prepareIndex(idx, "type").setSource(
jsonBuilder().startObject().timeField("date", date).field("value", value).endObject());
}
@@ -124,27 +128,27 @@ public void testSingleValuedField() throws Exception {
List extends Bucket> buckets = deriv.getBuckets();
assertThat(buckets.size(), equalTo(3));
- DateTime key = new DateTime(2012, 1, 1, 0, 0, DateTimeZone.UTC);
+ ZonedDateTime key = ZonedDateTime.of(2012, 1, 1, 0, 0, 0, 0, ZoneOffset.UTC);
Histogram.Bucket bucket = buckets.get(0);
assertThat(bucket, notNullValue());
- assertThat((DateTime) bucket.getKey(), equalTo(key));
+ assertThat((ZonedDateTime) bucket.getKey(), equalTo(key));
assertThat(bucket.getDocCount(), equalTo(1L));
SimpleValue docCountDeriv = bucket.getAggregations().get("deriv");
assertThat(docCountDeriv, nullValue());
- key = new DateTime(2012, 2, 1, 0, 0, DateTimeZone.UTC);
+ key = ZonedDateTime.of(2012, 2, 1, 0, 0, 0, 0, ZoneOffset.UTC);
bucket = buckets.get(1);
assertThat(bucket, notNullValue());
- assertThat((DateTime) bucket.getKey(), equalTo(key));
+ assertThat((ZonedDateTime) bucket.getKey(), equalTo(key));
assertThat(bucket.getDocCount(), equalTo(2L));
docCountDeriv = bucket.getAggregations().get("deriv");
assertThat(docCountDeriv, notNullValue());
assertThat(docCountDeriv.value(), equalTo(1d));
- key = new DateTime(2012, 3, 1, 0, 0, DateTimeZone.UTC);
+ key = ZonedDateTime.of(2012, 3, 1, 0, 0, 0, 0, ZoneOffset.UTC);
bucket = buckets.get(2);
assertThat(bucket, notNullValue());
- assertThat((DateTime) bucket.getKey(), equalTo(key));
+ assertThat((ZonedDateTime) bucket.getKey(), equalTo(key));
assertThat(bucket.getDocCount(), equalTo(3L));
docCountDeriv = bucket.getAggregations().get("deriv");
assertThat(docCountDeriv, notNullValue());
@@ -167,28 +171,28 @@ public void testSingleValuedFieldNormalised() throws Exception {
List extends Bucket> buckets = deriv.getBuckets();
assertThat(buckets.size(), equalTo(3));
- DateTime key = new DateTime(2012, 1, 1, 0, 0, DateTimeZone.UTC);
+ ZonedDateTime key = ZonedDateTime.of(2012, 1, 1, 0, 0, 0, 0, ZoneOffset.UTC);
Histogram.Bucket bucket = buckets.get(0);
assertThat(bucket, notNullValue());
- assertThat((DateTime) bucket.getKey(), equalTo(key));
+ assertThat(bucket.getKey(), equalTo(key));
assertThat(bucket.getDocCount(), equalTo(1L));
Derivative docCountDeriv = bucket.getAggregations().get("deriv");
assertThat(docCountDeriv, nullValue());
- key = new DateTime(2012, 2, 1, 0, 0, DateTimeZone.UTC);
+ key = ZonedDateTime.of(2012, 2, 1, 0, 0, 0, 0, ZoneOffset.UTC);
bucket = buckets.get(1);
assertThat(bucket, notNullValue());
- assertThat((DateTime) bucket.getKey(), equalTo(key));
+ assertThat(bucket.getKey(), equalTo(key));
assertThat(bucket.getDocCount(), equalTo(2L));
docCountDeriv = bucket.getAggregations().get("deriv");
assertThat(docCountDeriv, notNullValue());
assertThat(docCountDeriv.value(), closeTo(1d, 0.00001));
assertThat(docCountDeriv.normalizedValue(), closeTo(1d / 31d, 0.00001));
- key = new DateTime(2012, 3, 1, 0, 0, DateTimeZone.UTC);
+ key = ZonedDateTime.of(2012, 3, 1, 0, 0, 0, 0, ZoneOffset.UTC);
bucket = buckets.get(2);
assertThat(bucket, notNullValue());
- assertThat((DateTime) bucket.getKey(), equalTo(key));
+ assertThat(bucket.getKey(), equalTo(key));
assertThat(bucket.getDocCount(), equalTo(3L));
docCountDeriv = bucket.getAggregations().get("deriv");
assertThat(docCountDeriv, notNullValue());
@@ -203,11 +207,14 @@ public void testSingleValuedFieldNormalised_timeZone_CET_DstStart() throws Excep
createIndex(IDX_DST_START);
List builders = new ArrayList<>();
- DateTimeZone timezone = DateTimeZone.forID("CET");
- addNTimes(1, IDX_DST_START, new DateTime("2012-03-24T01:00:00", timezone), builders);
- addNTimes(2, IDX_DST_START, new DateTime("2012-03-25T01:00:00", timezone), builders); // day with dst shift, only 23h long
- addNTimes(3, IDX_DST_START, new DateTime("2012-03-26T01:00:00", timezone), builders);
- addNTimes(4, IDX_DST_START, new DateTime("2012-03-27T01:00:00", timezone), builders);
+ ZoneId timezone = ZoneId.of("CET");
+ DateFormatter formatter = DateFormatters.forPattern("yyyy-MM-dd'T'HH:mm:ss").withZone(timezone);
+ // epoch millis: 1332547200000
+ addNTimes(1, IDX_DST_START, DateFormatters.toZonedDateTime(formatter.parse("2012-03-24T01:00:00")), builders);
+ // day with dst shift, only 23h long
+ addNTimes(2, IDX_DST_START, DateFormatters.toZonedDateTime(formatter.parse("2012-03-25T01:00:00")), builders);
+ addNTimes(3, IDX_DST_START, DateFormatters.toZonedDateTime(formatter.parse("2012-03-26T01:00:00")), builders);
+ addNTimes(4, IDX_DST_START, DateFormatters.toZonedDateTime(formatter.parse("2012-03-27T01:00:00")), builders);
indexRandom(true, builders);
ensureSearchable();
@@ -227,11 +234,23 @@ public void testSingleValuedFieldNormalised_timeZone_CET_DstStart() throws Excep
List extends Bucket> buckets = deriv.getBuckets();
assertThat(buckets.size(), equalTo(4));
- assertBucket(buckets.get(0), new DateTime("2012-03-24", timezone).toDateTime(DateTimeZone.UTC), 1L, nullValue(), null, null);
- assertBucket(buckets.get(1), new DateTime("2012-03-25", timezone).toDateTime(DateTimeZone.UTC), 2L, notNullValue(), 1d, 1d / 24d);
+ DateFormatter dateFormatter = DateFormatters.forPattern("yyyy-MM-dd");
+ ZonedDateTime expectedKeyFirstBucket =
+ LocalDate.from(dateFormatter.parse("2012-03-24")).atStartOfDay(timezone).withZoneSameInstant(ZoneOffset.UTC);
+ assertBucket(buckets.get(0), expectedKeyFirstBucket, 1L, nullValue(), null, null);
+
+ ZonedDateTime expectedKeySecondBucket =
+ LocalDate.from(dateFormatter.parse("2012-03-25")).atStartOfDay(timezone).withZoneSameInstant(ZoneOffset.UTC);
+ assertBucket(buckets.get(1), expectedKeySecondBucket,2L, notNullValue(), 1d, 1d / 24d);
+
// the following is normalized using a 23h bucket width
- assertBucket(buckets.get(2), new DateTime("2012-03-26", timezone).toDateTime(DateTimeZone.UTC), 3L, notNullValue(), 1d, 1d / 23d);
- assertBucket(buckets.get(3), new DateTime("2012-03-27", timezone).toDateTime(DateTimeZone.UTC), 4L, notNullValue(), 1d, 1d / 24d);
+ ZonedDateTime expectedKeyThirdBucket =
+ LocalDate.from(dateFormatter.parse("2012-03-26")).atStartOfDay(timezone).withZoneSameInstant(ZoneOffset.UTC);
+ assertBucket(buckets.get(2), expectedKeyThirdBucket, 3L, notNullValue(), 1d, 1d / 23d);
+
+ ZonedDateTime expectedKeyFourthBucket =
+ LocalDate.from(dateFormatter.parse("2012-03-27")).atStartOfDay(timezone).withZoneSameInstant(ZoneOffset.UTC);
+ assertBucket(buckets.get(3), expectedKeyFourthBucket, 4L, notNullValue(), 1d, 1d / 24d);
}
/**
@@ -239,13 +258,15 @@ public void testSingleValuedFieldNormalised_timeZone_CET_DstStart() throws Excep
*/
public void testSingleValuedFieldNormalised_timeZone_CET_DstEnd() throws Exception {
createIndex(IDX_DST_END);
- DateTimeZone timezone = DateTimeZone.forID("CET");
+ ZoneId timezone = ZoneId.of("CET");
List builders = new ArrayList<>();
- addNTimes(1, IDX_DST_END, new DateTime("2012-10-27T01:00:00", timezone), builders);
- addNTimes(2, IDX_DST_END, new DateTime("2012-10-28T01:00:00", timezone), builders); // day with dst shift -1h, 25h long
- addNTimes(3, IDX_DST_END, new DateTime("2012-10-29T01:00:00", timezone), builders);
- addNTimes(4, IDX_DST_END, new DateTime("2012-10-30T01:00:00", timezone), builders);
+ DateFormatter formatter = DateFormatters.forPattern("yyyy-MM-dd'T'HH:mm:ss").withZone(timezone);
+ addNTimes(1, IDX_DST_END, DateFormatters.toZonedDateTime(formatter.parse("2012-10-27T01:00:00")), builders);
+ // day with dst shift -1h, 25h long
+ addNTimes(2, IDX_DST_END, DateFormatters.toZonedDateTime(formatter.parse("2012-10-28T01:00:00")), builders);
+ addNTimes(3, IDX_DST_END, DateFormatters.toZonedDateTime(formatter.parse("2012-10-29T01:00:00")), builders);
+ addNTimes(4, IDX_DST_END, DateFormatters.toZonedDateTime(formatter.parse("2012-10-30T01:00:00")), builders);
indexRandom(true, builders);
ensureSearchable();
@@ -265,27 +286,43 @@ public void testSingleValuedFieldNormalised_timeZone_CET_DstEnd() throws Excepti
List extends Bucket> buckets = deriv.getBuckets();
assertThat(buckets.size(), equalTo(4));
- assertBucket(buckets.get(0), new DateTime("2012-10-27", timezone).toDateTime(DateTimeZone.UTC), 1L, nullValue(), null, null);
- assertBucket(buckets.get(1), new DateTime("2012-10-28", timezone).toDateTime(DateTimeZone.UTC), 2L, notNullValue(), 1d, 1d / 24d);
+ DateFormatter dateFormatter = DateFormatters.forPattern("yyyy-MM-dd").withZone(ZoneOffset.UTC);
+
+ ZonedDateTime expectedKeyFirstBucket =
+ LocalDate.from(dateFormatter.parse("2012-10-27")).atStartOfDay(timezone).withZoneSameInstant(ZoneOffset.UTC);
+ assertBucket(buckets.get(0), expectedKeyFirstBucket, 1L, nullValue(), null, null);
+
+ ZonedDateTime expectedKeySecondBucket =
+ LocalDate.from(dateFormatter.parse("2012-10-28")).atStartOfDay(timezone).withZoneSameInstant(ZoneOffset.UTC);
+ assertBucket(buckets.get(1), expectedKeySecondBucket, 2L, notNullValue(), 1d, 1d / 24d);
+
// the following is normalized using a 25h bucket width
- assertBucket(buckets.get(2), new DateTime("2012-10-29", timezone).toDateTime(DateTimeZone.UTC), 3L, notNullValue(), 1d, 1d / 25d);
- assertBucket(buckets.get(3), new DateTime("2012-10-30", timezone).toDateTime(DateTimeZone.UTC), 4L, notNullValue(), 1d, 1d / 24d);
+ ZonedDateTime expectedKeyThirdBucket =
+ LocalDate.from(dateFormatter.parse("2012-10-29")).atStartOfDay(timezone).withZoneSameInstant(ZoneOffset.UTC);
+ assertBucket(buckets.get(2), expectedKeyThirdBucket, 3L, notNullValue(), 1d, 1d / 25d);
+
+ ZonedDateTime expectedKeyFourthBucket =
+ LocalDate.from(dateFormatter.parse("2012-10-30")).atStartOfDay(timezone).withZoneSameInstant(ZoneOffset.UTC);
+ assertBucket(buckets.get(3), expectedKeyFourthBucket, 4L, notNullValue(), 1d, 1d / 24d);
}
/**
* also check for time zone shifts that are not one hour, e.g.
* "Asia/Kathmandu, 1 Jan 1986 - Time Zone Change (IST → NPT), at 00:00:00 clocks were turned forward 00:15 minutes
*/
+ // This test fails because we cannot parse negative epoch milli seconds yet... but perhaps we dont have to if we use instants in the
+ // rangefield method?
public void testSingleValuedFieldNormalised_timeZone_AsiaKathmandu() throws Exception {
createIndex(IDX_DST_KATHMANDU);
- DateTimeZone timezone = DateTimeZone.forID("Asia/Kathmandu");
+ ZoneId timezone = ZoneId.of("Asia/Kathmandu");
List builders = new ArrayList<>();
- addNTimes(1, IDX_DST_KATHMANDU, new DateTime("1985-12-31T22:30:00", timezone), builders);
+ DateFormatter formatter = DateFormatters.forPattern("yyyy-MM-dd'T'HH:mm:ss").withZone(timezone);
+ addNTimes(1, IDX_DST_KATHMANDU, DateFormatters.toZonedDateTime(formatter.parse("1985-12-31T22:30:00")), builders);
// the shift happens during the next bucket, which includes the 45min that do not start on the full hour
- addNTimes(2, IDX_DST_KATHMANDU, new DateTime("1985-12-31T23:30:00", timezone), builders);
- addNTimes(3, IDX_DST_KATHMANDU, new DateTime("1986-01-01T01:30:00", timezone), builders);
- addNTimes(4, IDX_DST_KATHMANDU, new DateTime("1986-01-01T02:30:00", timezone), builders);
+ addNTimes(2, IDX_DST_KATHMANDU, DateFormatters.toZonedDateTime(formatter.parse("1985-12-31T23:30:00")), builders);
+ addNTimes(3, IDX_DST_KATHMANDU, DateFormatters.toZonedDateTime(formatter.parse("1986-01-01T01:30:00")), builders);
+ addNTimes(4, IDX_DST_KATHMANDU, DateFormatters.toZonedDateTime(formatter.parse("1986-01-01T02:30:00")), builders);
indexRandom(true, builders);
ensureSearchable();
@@ -305,27 +342,36 @@ public void testSingleValuedFieldNormalised_timeZone_AsiaKathmandu() throws Exce
List extends Bucket> buckets = deriv.getBuckets();
assertThat(buckets.size(), equalTo(4));
- assertBucket(buckets.get(0), new DateTime("1985-12-31T22:00:00", timezone).toDateTime(DateTimeZone.UTC), 1L, nullValue(), null,
- null);
- assertBucket(buckets.get(1), new DateTime("1985-12-31T23:00:00", timezone).toDateTime(DateTimeZone.UTC), 2L, notNullValue(), 1d,
- 1d / 60d);
+ DateFormatter dateFormatter = DateFormatters.forPattern("yyyy-MM-dd'T'HH:mm:ss").withZone(ZoneOffset.UTC);
+
+ ZonedDateTime expectedKeyFirstBucket =
+ LocalDateTime.from(dateFormatter.parse("1985-12-31T22:00:00")).atZone(timezone).withZoneSameInstant(ZoneOffset.UTC);
+ assertBucket(buckets.get(0), expectedKeyFirstBucket, 1L, nullValue(), null,null);
+
+ ZonedDateTime expectedKeySecondBucket =
+ LocalDateTime.from(dateFormatter.parse("1985-12-31T23:00:00")).atZone(timezone).withZoneSameInstant(ZoneOffset.UTC);
+ assertBucket(buckets.get(1), expectedKeySecondBucket, 2L, notNullValue(), 1d,1d / 60d);
+
// the following is normalized using a 105min bucket width
- assertBucket(buckets.get(2), new DateTime("1986-01-01T01:00:00", timezone).toDateTime(DateTimeZone.UTC), 3L, notNullValue(), 1d,
- 1d / 105d);
- assertBucket(buckets.get(3), new DateTime("1986-01-01T02:00:00", timezone).toDateTime(DateTimeZone.UTC), 4L, notNullValue(), 1d,
- 1d / 60d);
+ ZonedDateTime expectedKeyThirdBucket =
+ LocalDateTime.from(dateFormatter.parse("1986-01-01T01:00:00")).atZone(timezone).withZoneSameInstant(ZoneOffset.UTC);
+ assertBucket(buckets.get(2), expectedKeyThirdBucket, 3L, notNullValue(), 1d,1d / 105d);
+
+ ZonedDateTime expectedKeyFourthBucket =
+ LocalDateTime.from(dateFormatter.parse("1986-01-01T02:00:00")).atZone(timezone).withZoneSameInstant(ZoneOffset.UTC);
+ assertBucket(buckets.get(3), expectedKeyFourthBucket, 4L, notNullValue(), 1d,1d / 60d);
}
- private static void addNTimes(int amount, String index, DateTime dateTime, List builders) throws Exception {
+ private static void addNTimes(int amount, String index, ZonedDateTime dateTime, List builders) throws Exception {
for (int i = 0; i < amount; i++) {
builders.add(indexDoc(index, dateTime, 1));
}
}
- private static void assertBucket(Histogram.Bucket bucket, DateTime expectedKey, long expectedDocCount,
+ private static void assertBucket(Histogram.Bucket bucket, ZonedDateTime expectedKey, long expectedDocCount,
Matcher
* @param field the name of the date field to use for the date histogram (required)
* @param interval the interval to use for the date histogram (required)
@@ -229,23 +228,14 @@ public static DateHistogramGroupConfig fromXContent(final XContentParser parser)
}
private static Rounding createRounding(final String expr, final String timeZone) {
- DateTimeUnit timeUnit = DateHistogramAggregationBuilder.DATE_FIELD_UNITS.get(expr);
+ Rounding.DateTimeUnit timeUnit = DateHistogramAggregationBuilder.DATE_FIELD_UNITS.get(expr);
final Rounding.Builder rounding;
if (timeUnit != null) {
rounding = new Rounding.Builder(timeUnit);
} else {
rounding = new Rounding.Builder(TimeValue.parseTimeValue(expr, "createRounding"));
}
- rounding.timeZone(toDateTimeZone(timeZone));
+ rounding.timeZone(ZoneId.of(timeZone));
return rounding.build();
}
-
- private static DateTimeZone toDateTimeZone(final String timezone) {
- try {
- return DateTimeZone.forOffsetHours(Integer.parseInt(timezone));
- } catch (NumberFormatException e) {
- return DateTimeZone.forID(timezone);
- }
- }
-
}
diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/support/WatcherDateTimeUtils.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/support/WatcherDateTimeUtils.java
index 991f9ba332394..3f4e9b10f8de7 100644
--- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/support/WatcherDateTimeUtils.java
+++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/support/WatcherDateTimeUtils.java
@@ -10,11 +10,11 @@
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.joda.FormatDateTimeFormatter;
+import org.elasticsearch.common.joda.Joda;
import org.elasticsearch.common.time.DateMathParser;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
-import org.elasticsearch.index.mapper.DateFieldMapper;
import org.joda.time.DateTime;
import org.joda.time.DateTimeZone;
@@ -25,7 +25,7 @@
import java.util.concurrent.TimeUnit;
public class WatcherDateTimeUtils {
- public static final FormatDateTimeFormatter dateTimeFormatter = DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER;
+ public static final FormatDateTimeFormatter dateTimeFormatter = Joda.forPattern("strict_date_optional_time||epoch_millis");
public static final DateMathParser dateMathParser = dateTimeFormatter.toDateMathParser();
private WatcherDateTimeUtils() {
@@ -87,7 +87,7 @@ public static DateTime parseDateMathOrNull(String fieldName, XContentParser pars
}
public static DateTime parseDateMath(String valueString, DateTimeZone timeZone, final Clock clock) {
- return new DateTime(dateMathParser.parse(valueString, clock::millis), timeZone);
+ return new DateTime(dateMathParser.parse(valueString, clock::millis).toEpochMilli(), timeZone);
}
public static DateTime parseDate(String fieldName, XContentParser parser, DateTimeZone timeZone) throws IOException {
diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/index/engine/RewriteCachingDirectoryReaderTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/index/engine/RewriteCachingDirectoryReaderTests.java
index 6812aca474749..2219a78055544 100644
--- a/x-pack/plugin/core/src/test/java/org/elasticsearch/index/engine/RewriteCachingDirectoryReaderTests.java
+++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/index/engine/RewriteCachingDirectoryReaderTests.java
@@ -15,9 +15,9 @@
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.query.QueryRewriteContext;
import org.elasticsearch.test.ESTestCase;
-import org.joda.time.DateTimeZone;
import java.io.IOException;
+import java.time.ZoneOffset;
public class RewriteCachingDirectoryReaderTests extends ESTestCase {
@@ -92,15 +92,15 @@ public void testIsWithinQuery() throws IOException {
dateFieldType.setName("test");
QueryRewriteContext context = new QueryRewriteContext(xContentRegistry(), writableRegistry(), null, () -> 0);
MappedFieldType.Relation relation = dateFieldType.isFieldWithinQuery(cachingDirectoryReader, 0, 10,
- true, true, DateTimeZone.UTC, null, context);
+ true, true, ZoneOffset.UTC, null, context);
assertEquals(relation, MappedFieldType.Relation.WITHIN);
relation = dateFieldType.isFieldWithinQuery(cachingDirectoryReader, 3, 11,
- true, true, DateTimeZone.UTC, null, context);
+ true, true, ZoneOffset.UTC, null, context);
assertEquals(relation, MappedFieldType.Relation.INTERSECTS);
relation = dateFieldType.isFieldWithinQuery(cachingDirectoryReader, 10, 11,
- false, true, DateTimeZone.UTC, null, context);
+ false, true, ZoneOffset.UTC, null, context);
assertEquals(relation, MappedFieldType.Relation.DISJOINT);
}
}
diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/license/TestUtils.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/license/TestUtils.java
index 0113634a8824f..d810f490a52a0 100644
--- a/x-pack/plugin/core/src/test/java/org/elasticsearch/license/TestUtils.java
+++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/license/TestUtils.java
@@ -54,11 +54,11 @@ public class TestUtils {
private static final DateTimeFormatter dateTimeFormatter = formatDateTimeFormatter.printer();
public static String dateMathString(String time, final long now) {
- return dateTimeFormatter.print(dateMathParser.parse(time, () -> now));
+ return dateTimeFormatter.print(dateMathParser.parse(time, () -> now).toEpochMilli());
}
public static long dateMath(String time, final long now) {
- return dateMathParser.parse(time, () -> now);
+ return dateMathParser.parse(time, () -> now).toEpochMilli();
}
public static LicenseSpec generateRandomLicenseSpec(int version) {
diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/datafeed/DatafeedConfigTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/datafeed/DatafeedConfigTests.java
index fe7c5b1a1d104..3a52ff5649cae 100644
--- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/datafeed/DatafeedConfigTests.java
+++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/datafeed/DatafeedConfigTests.java
@@ -6,7 +6,6 @@
package org.elasticsearch.xpack.core.ml.datafeed;
import com.carrotsearch.randomizedtesting.generators.CodepointSetGenerator;
-
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
import org.elasticsearch.common.io.stream.Writeable;
@@ -36,13 +35,12 @@
import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.xpack.core.ml.datafeed.ChunkingConfig.Mode;
import org.elasticsearch.xpack.core.ml.job.messages.Messages;
-import org.joda.time.DateTimeZone;
import java.io.IOException;
+import java.time.ZoneId;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
-import java.util.TimeZone;
import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.equalTo;
@@ -330,7 +328,7 @@ public void testBuild_GivenHistogramWithDefaultInterval() {
public void testBuild_GivenDateHistogramWithInvalidTimeZone() {
MaxAggregationBuilder maxTime = AggregationBuilders.max("time").field("time");
DateHistogramAggregationBuilder dateHistogram = AggregationBuilders.dateHistogram("bucket").field("time")
- .interval(300000L).timeZone(DateTimeZone.forTimeZone(TimeZone.getTimeZone("EST"))).subAggregation(maxTime);
+ .interval(300000L).timeZone(ZoneId.of("CET")).subAggregation(maxTime);
ElasticsearchException e = expectThrows(ElasticsearchException.class,
() -> createDatafeedWithDateHistogram(dateHistogram));
diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/datafeed/extractor/ExtractorUtilsTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/datafeed/extractor/ExtractorUtilsTests.java
index 7770def0fae9a..2148929a9ac68 100644
--- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/datafeed/extractor/ExtractorUtilsTests.java
+++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/datafeed/extractor/ExtractorUtilsTests.java
@@ -14,9 +14,8 @@
import org.elasticsearch.search.aggregations.metrics.AvgAggregationBuilder;
import org.elasticsearch.search.aggregations.metrics.MaxAggregationBuilder;
import org.elasticsearch.test.ESTestCase;
-import org.joda.time.DateTimeZone;
-import java.util.TimeZone;
+import java.time.ZoneId;
import static org.hamcrest.Matchers.equalTo;
@@ -73,7 +72,7 @@ public void testGetHistogramAggregation_MissingHistogramAgg() {
public void testGetHistogramIntervalMillis_GivenDateHistogramWithInvalidTimeZone() {
MaxAggregationBuilder maxTime = AggregationBuilders.max("time").field("time");
DateHistogramAggregationBuilder dateHistogram = AggregationBuilders.dateHistogram("bucket").field("time")
- .interval(300000L).timeZone(DateTimeZone.forTimeZone(TimeZone.getTimeZone("EST"))).subAggregation(maxTime);
+ .interval(300000L).timeZone(ZoneId.of("CET")).subAggregation(maxTime);
ElasticsearchException e = expectThrows(ElasticsearchException.class,
() -> ExtractorUtils.getHistogramIntervalMillis(dateHistogram));
diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/rollup/ConfigTestHelpers.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/rollup/ConfigTestHelpers.java
index d892eb550a17a..605ea6e901a90 100644
--- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/rollup/ConfigTestHelpers.java
+++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/rollup/ConfigTestHelpers.java
@@ -28,7 +28,7 @@
import static com.carrotsearch.randomizedtesting.generators.RandomNumbers.randomIntBetween;
import static com.carrotsearch.randomizedtesting.generators.RandomPicks.randomFrom;
import static com.carrotsearch.randomizedtesting.generators.RandomStrings.randomAsciiAlphanumOfLengthBetween;
-import static org.elasticsearch.test.ESTestCase.randomDateTimeZone;
+import static org.elasticsearch.test.ESTestCase.randomZone;
public class ConfigTestHelpers {
@@ -71,7 +71,7 @@ public static DateHistogramGroupConfig randomDateHistogramGroupConfig(final Rand
final String field = randomField(random);
final DateHistogramInterval interval = randomInterval();
final DateHistogramInterval delay = random.nextBoolean() ? randomInterval() : null;
- final String timezone = random.nextBoolean() ? randomDateTimeZone().toString() : null;
+ String timezone = random.nextBoolean() ? randomZone().getId() : null;
return new DateHistogramGroupConfig(field, interval, delay, timezone);
}
diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/rollup/job/DateHistogramGroupConfigSerializingTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/rollup/job/DateHistogramGroupConfigSerializingTests.java
index 415e1a00a60cf..95df682ff5e14 100644
--- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/rollup/job/DateHistogramGroupConfigSerializingTests.java
+++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/rollup/job/DateHistogramGroupConfigSerializingTests.java
@@ -14,9 +14,9 @@
import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramInterval;
import org.elasticsearch.test.AbstractSerializingTestCase;
import org.elasticsearch.xpack.core.rollup.ConfigTestHelpers;
-import org.joda.time.DateTimeZone;
import java.io.IOException;
+import java.time.ZoneId;
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
@@ -155,28 +155,28 @@ public void testBwcSerialization() throws IOException {
DateHistogramInterval interval = new DateHistogramInterval(in);
String field = in.readString();
DateHistogramInterval delay = in.readOptionalWriteable(DateHistogramInterval::new);
- DateTimeZone timeZone = in.readTimeZone();
+ ZoneId timeZone = in.readZoneId();
- assertEqualInstances(reference, new DateHistogramGroupConfig(field, interval, delay, timeZone.getID()));
+ assertEqualInstances(reference, new DateHistogramGroupConfig(field, interval, delay, timeZone.getId()));
}
for (int runs = 0; runs < NUMBER_OF_TEST_RUNS; runs++) {
final String field = ConfigTestHelpers.randomField(random());
final DateHistogramInterval interval = ConfigTestHelpers.randomInterval();
final DateHistogramInterval delay = randomBoolean() ? ConfigTestHelpers.randomInterval() : null;
- final DateTimeZone timezone = randomDateTimeZone();
+ final ZoneId timezone = randomZone();
// previous way to serialize a DateHistogramGroupConfig
final BytesStreamOutput out = new BytesStreamOutput();
interval.writeTo(out);
out.writeString(field);
out.writeOptionalWriteable(delay);
- out.writeTimeZone(timezone);
+ out.writeZoneId(timezone);
final StreamInput in = out.bytes().streamInput();
DateHistogramGroupConfig deserialized = new DateHistogramGroupConfig(in);
- assertEqualInstances(new DateHistogramGroupConfig(field, interval, delay, timezone.getID()), deserialized);
+ assertEqualInstances(new DateHistogramGroupConfig(field, interval, delay, timezone.getId()), deserialized);
}
}
}
diff --git a/x-pack/plugin/ml/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/ml/transforms/PainlessDomainSplitIT.java b/x-pack/plugin/ml/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/ml/transforms/PainlessDomainSplitIT.java
index bc847e1a07d58..3428b1e984243 100644
--- a/x-pack/plugin/ml/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/ml/transforms/PainlessDomainSplitIT.java
+++ b/x-pack/plugin/ml/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/ml/transforms/PainlessDomainSplitIT.java
@@ -13,8 +13,9 @@
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.test.rest.ESRestTestCase;
import org.elasticsearch.xpack.ml.MachineLearning;
-import org.joda.time.DateTime;
+import java.time.ZonedDateTime;
+import java.time.format.DateTimeFormatter;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Map;
@@ -266,7 +267,7 @@ public void testHRDSplit() throws Exception {
"\"time\": { \"type\": \"date\" } } }");
// Index some data
- DateTime baseTime = new DateTime().minusYears(1);
+ ZonedDateTime baseTime = ZonedDateTime.now().minusYears(1);
TestConfiguration test = tests.get(randomInt(tests.size()-1));
// domainSplit() tests had subdomain, testHighestRegisteredDomainCases() did not, so we need a special case for sub
@@ -276,18 +277,24 @@ public void testHRDSplit() throws Exception {
for (int i = 0; i < 100; i++) {
- DateTime time = baseTime.plusHours(i);
+ ZonedDateTime time = baseTime.plusHours(i);
if (i == 64) {
// Anomaly has 100 docs, but we don't care about the value
for (int j = 0; j < 100; j++) {
- Request createDocRequest = new Request("PUT", "/painless/test/" + time.toDateTimeISO() + "_" + j);
- createDocRequest.setJsonEntity("{\"domain\": \"" + "bar.bar.com\", \"time\": \"" + time.toDateTimeISO() + "\"}");
+ String endpoint = "/painless/test/" + time.format(DateTimeFormatter.ISO_DATE_TIME) + "_" + j;
+ Request createDocRequest = new Request("PUT", endpoint);
+ String entity = "{\"domain\": \"" + "bar.bar.com\", \"time\": \"" + time.format(DateTimeFormatter.ISO_DATE_TIME) +
+ "\"}";
+ createDocRequest.setJsonEntity(entity);
client().performRequest(createDocRequest);
}
} else {
// Non-anomalous values will be what's seen when the anomaly is reported
- Request createDocRequest = new Request("PUT", "/painless/test/" + time.toDateTimeISO());
- createDocRequest.setJsonEntity("{\"domain\": \"" + test.hostName + "\", \"time\": \"" + time.toDateTimeISO() + "\"}");
+ String endpoint = "/painless/test/" + time.format(DateTimeFormatter.ISO_DATE_TIME);
+ Request createDocRequest = new Request("PUT", endpoint);
+ String entity =
+ "{\"domain\": \"" + test.hostName + "\", \"time\": \"" + time.format(DateTimeFormatter.ISO_DATE_TIME) + "\"}";
+ createDocRequest.setJsonEntity(entity);
client().performRequest(createDocRequest);
}
}
diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MlDailyMaintenanceService.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MlDailyMaintenanceService.java
index 190933b1e9316..9cd6cb7e06355 100644
--- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MlDailyMaintenanceService.java
+++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MlDailyMaintenanceService.java
@@ -16,9 +16,11 @@
import org.elasticsearch.common.util.concurrent.FutureUtils;
import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.xpack.core.ml.action.DeleteExpiredDataAction;
-import org.joda.time.DateTime;
-import org.joda.time.chrono.ISOChronology;
+import java.time.Instant;
+import java.time.ZoneId;
+import java.time.ZonedDateTime;
+import java.time.chrono.IsoChronology;
import java.util.Objects;
import java.util.Random;
import java.util.concurrent.ScheduledFuture;
@@ -70,9 +72,11 @@ public MlDailyMaintenanceService(ClusterName clusterName, ThreadPool threadPool,
private static TimeValue delayToNextTime(ClusterName clusterName) {
Random random = new Random(clusterName.hashCode());
int minutesOffset = random.ints(0, MAX_TIME_OFFSET_MINUTES).findFirst().getAsInt();
- DateTime now = DateTime.now(ISOChronology.getInstance());
- DateTime next = now.plusDays(1).withTimeAtStartOfDay().plusMinutes(30).plusMinutes(minutesOffset);
- return TimeValue.timeValueMillis(next.getMillis() - now.getMillis());
+
+ Instant instant = ZonedDateTime.now().toInstant();
+ ZonedDateTime now = IsoChronology.INSTANCE.zonedDateTime(instant, ZoneId.systemDefault());
+ ZonedDateTime next = now.plusDays(1).toLocalDate().atStartOfDay(now.getZone()).plusMinutes(30).plusMinutes(minutesOffset);
+ return TimeValue.timeValueMillis(next.toInstant().toEpochMilli() - now.toInstant().toEpochMilli());
}
public void start() {
diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/DatafeedJob.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/DatafeedJob.java
index 952e1c1f27e5a..d986fb736bebe 100644
--- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/DatafeedJob.java
+++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/DatafeedJob.java
@@ -32,6 +32,8 @@
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.InputStream;
+import java.time.Instant;
+import java.time.ZoneOffset;
import java.util.Date;
import java.util.List;
import java.util.Objects;
@@ -108,8 +110,9 @@ Long runLookBack(long startTime, Long endTime) throws Exception {
}
String msg = Messages.getMessage(Messages.JOB_AUDIT_DATAFEED_STARTED_FROM_TO,
- DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.printer().print(lookbackStartTimeMs),
- endTime == null ? "real-time" : DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.printer().print(lookbackEnd),
+ DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.format(Instant.ofEpochMilli(lookbackStartTimeMs).atZone(ZoneOffset.UTC)),
+ endTime == null ? "real-time" :
+ DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.format(Instant.ofEpochMilli(lookbackEnd).atZone(ZoneOffset.UTC)),
TimeValue.timeValueMillis(frequencyMs).getStringRep());
auditor.info(jobId, msg);
LOGGER.info("[{}] {}", jobId, msg);
diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/delayeddatacheck/DatafeedDelayedDataDetector.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/delayeddatacheck/DatafeedDelayedDataDetector.java
index 86fe439ac16cb..92c157b8be699 100644
--- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/delayeddatacheck/DatafeedDelayedDataDetector.java
+++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/delayeddatacheck/DatafeedDelayedDataDetector.java
@@ -20,9 +20,9 @@
import org.elasticsearch.xpack.ml.datafeed.delayeddatacheck.DelayedDataDetectorFactory.BucketWithMissingData;
import org.elasticsearch.xpack.core.ml.job.results.Bucket;
import org.elasticsearch.xpack.core.ml.utils.Intervals;
-import org.joda.time.DateTime;
import java.util.Collections;
+import java.time.ZonedDateTime;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
@@ -131,8 +131,8 @@ private Map checkCurrentBucketEventCount(long start, long end) {
}
private static long toHistogramKeyToEpoch(Object key) {
- if (key instanceof DateTime) {
- return ((DateTime)key).getMillis();
+ if (key instanceof ZonedDateTime) {
+ return ((ZonedDateTime)key).toInstant().toEpochMilli();
} else if (key instanceof Double) {
return ((Double)key).longValue();
} else if (key instanceof Long){
diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/aggregation/AggregationToJsonProcessor.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/aggregation/AggregationToJsonProcessor.java
index c934653a6268e..1b9fe37f54c62 100644
--- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/aggregation/AggregationToJsonProcessor.java
+++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/aggregation/AggregationToJsonProcessor.java
@@ -20,10 +20,10 @@
import org.elasticsearch.search.aggregations.metrics.Percentiles;
import org.elasticsearch.xpack.core.ml.datafeed.DatafeedConfig;
import org.elasticsearch.xpack.core.ml.job.messages.Messages;
-import org.joda.time.DateTime;
import java.io.IOException;
import java.io.OutputStream;
+import java.time.ZonedDateTime;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Iterator;
@@ -175,15 +175,15 @@ private void processDateHistogram(Histogram agg) throws IOException {
}
/*
- * Date Histograms have a {@link DateTime} object as the key,
+ * Date Histograms have a {@link ZonedDateTime} object as the key,
* Histograms have either a Double or Long.
*/
private long toHistogramKeyToEpoch(Object key) {
- if (key instanceof DateTime) {
- return ((DateTime)key).getMillis();
+ if (key instanceof ZonedDateTime) {
+ return ((ZonedDateTime)key).toInstant().toEpochMilli();
} else if (key instanceof Double) {
return ((Double)key).longValue();
- } else if (key instanceof Long){
+ } else if (key instanceof Long) {
return (Long)key;
} else {
throw new IllegalStateException("Histogram key [" + key + "] cannot be converted to a timestamp");
diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/fields/ExtractedField.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/fields/ExtractedField.java
index 232cd53a359ce..4223bff49825e 100644
--- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/fields/ExtractedField.java
+++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/fields/ExtractedField.java
@@ -8,7 +8,6 @@
import org.elasticsearch.common.document.DocumentField;
import org.elasticsearch.search.SearchHit;
import org.elasticsearch.search.fetch.subphase.DocValueFieldsContext;
-import org.joda.time.base.BaseDateTime;
import java.util.List;
import java.util.Map;
@@ -112,8 +111,6 @@ public Object[] value(SearchHit hit) {
}
if (value[0] instanceof String) { // doc_value field with the epoch_millis format
value[0] = Long.parseLong((String) value[0]);
- } else if (value[0] instanceof BaseDateTime) { // script field
- value[0] = ((BaseDateTime) value[0]).getMillis();
} else if (value[0] instanceof Long == false) { // pre-6.0 field
throw new IllegalStateException("Unexpected value for a time field: " + value[0].getClass());
}
diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/overallbuckets/OverallBucketsProvider.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/overallbuckets/OverallBucketsProvider.java
index 204ae42720433..dd9a6229ec887 100644
--- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/overallbuckets/OverallBucketsProvider.java
+++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/overallbuckets/OverallBucketsProvider.java
@@ -14,8 +14,8 @@
import org.elasticsearch.xpack.core.ml.job.config.Job;
import org.elasticsearch.xpack.core.ml.job.results.OverallBucket;
import org.elasticsearch.xpack.core.ml.job.results.Result;
-import org.joda.time.DateTime;
+import java.time.ZonedDateTime;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
@@ -64,8 +64,8 @@ public List computeOverallBuckets(Histogram histogram) {
}
private static Date getHistogramBucketTimestamp(Histogram.Bucket bucket) {
- DateTime bucketTimestamp = (DateTime) bucket.getKey();
- return new Date(bucketTimestamp.getMillis());
+ ZonedDateTime bucketTimestamp = (ZonedDateTime) bucket.getKey();
+ return new Date(bucketTimestamp.toInstant().toEpochMilli());
}
static class TopNScores extends PriorityQueue {
diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/retention/AbstractExpiredJobDataRemover.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/retention/AbstractExpiredJobDataRemover.java
index 8364e015a3456..942304475e1f3 100644
--- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/retention/AbstractExpiredJobDataRemover.java
+++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/retention/AbstractExpiredJobDataRemover.java
@@ -15,9 +15,11 @@
import org.elasticsearch.xpack.core.ml.job.config.Job;
import org.elasticsearch.xpack.core.ml.job.results.Result;
import org.elasticsearch.xpack.ml.utils.VolatileCursorIterator;
-import org.joda.time.DateTime;
-import org.joda.time.chrono.ISOChronology;
+import java.time.Instant;
+import java.time.ZoneId;
+import java.time.ZonedDateTime;
+import java.time.chrono.IsoChronology;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
@@ -70,7 +72,9 @@ protected static Iterator createVolatileCursorIterator(List items) {
}
private long calcCutoffEpochMs(long retentionDays) {
- long nowEpochMs = DateTime.now(ISOChronology.getInstance()).getMillis();
+ Instant instant = ZonedDateTime.now().toInstant();
+ long nowEpochMs = IsoChronology.INSTANCE.zonedDateTime(instant, ZoneId.systemDefault())
+ .toInstant().toEpochMilli();
return nowEpochMs - new TimeValue(retentionDays, TimeUnit.DAYS).getMillis();
}
diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/retention/ExpiredForecastsRemover.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/retention/ExpiredForecastsRemover.java
index 981d257afa1a0..c662f0059aed0 100644
--- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/retention/ExpiredForecastsRemover.java
+++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/retention/ExpiredForecastsRemover.java
@@ -35,11 +35,13 @@
import org.elasticsearch.xpack.core.ml.job.results.ForecastRequestStats;
import org.elasticsearch.xpack.core.ml.job.results.Result;
import org.elasticsearch.xpack.ml.MachineLearning;
-import org.joda.time.DateTime;
-import org.joda.time.chrono.ISOChronology;
import java.io.IOException;
import java.io.InputStream;
+import java.time.Instant;
+import java.time.ZoneId;
+import java.time.ZonedDateTime;
+import java.time.chrono.IsoChronology;
import java.util.ArrayList;
import java.util.List;
import java.util.Objects;
@@ -66,7 +68,13 @@ public class ExpiredForecastsRemover implements MlDataRemover {
public ExpiredForecastsRemover(Client client, ThreadPool threadPool) {
this.client = Objects.requireNonNull(client);
this.threadPool = Objects.requireNonNull(threadPool);
- this.cutoffEpochMs = DateTime.now(ISOChronology.getInstance()).getMillis();
+ this.cutoffEpochMs = getNowEpochMs();
+ }
+
+ private long getNowEpochMs() {
+ Instant instant = ZonedDateTime.now().toInstant();
+ return IsoChronology.INSTANCE.zonedDateTime(instant, ZoneId.systemDefault())
+ .toInstant().toEpochMilli();
}
@Override
diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/extractor/fields/ExtractedFieldTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/extractor/fields/ExtractedFieldTests.java
index 1e5e6fa652db1..ea026b7e397a5 100644
--- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/extractor/fields/ExtractedFieldTests.java
+++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/extractor/fields/ExtractedFieldTests.java
@@ -8,9 +8,7 @@
import org.elasticsearch.search.SearchHit;
import org.elasticsearch.search.fetch.subphase.DocValueFieldsContext;
import org.elasticsearch.test.ESTestCase;
-import org.elasticsearch.xpack.ml.datafeed.extractor.fields.ExtractedField;
import org.elasticsearch.xpack.ml.test.SearchHitBuilder;
-import org.joda.time.DateTime;
import java.util.Arrays;
@@ -98,13 +96,6 @@ public void testNewTimeFieldGivenSource() {
expectThrows(IllegalArgumentException.class, () -> ExtractedField.newTimeField("time", ExtractedField.ExtractionMethod.SOURCE));
}
- public void testValueGivenTimeField() {
- final long millis = randomLong();
- final SearchHit hit = new SearchHitBuilder(randomInt()).addField("time", new DateTime(millis)).build();
- final ExtractedField timeField = ExtractedField.newTimeField("time", ExtractedField.ExtractionMethod.DOC_VALUE);
- assertThat(timeField.value(hit), equalTo(new Object[] { millis }));
- }
-
public void testValueGivenStringTimeField() {
final long millis = randomLong();
final SearchHit hit = new SearchHitBuilder(randomInt()).addField("time", Long.toString(millis)).build();
diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/extractor/fields/TimeBasedExtractedFieldsTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/extractor/fields/TimeBasedExtractedFieldsTests.java
index 5e388afad282a..07cb645bcce8c 100644
--- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/extractor/fields/TimeBasedExtractedFieldsTests.java
+++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/extractor/fields/TimeBasedExtractedFieldsTests.java
@@ -17,7 +17,6 @@
import org.elasticsearch.xpack.core.ml.job.config.Detector;
import org.elasticsearch.xpack.core.ml.job.config.Job;
import org.elasticsearch.xpack.ml.test.SearchHitBuilder;
-import org.joda.time.DateTime;
import java.util.Arrays;
import java.util.Collections;
@@ -64,13 +63,6 @@ public void testAllTypesOfFields() {
assertThat(extractedFields.getSourceFields(), equalTo(new String[] {"src1", "src2"}));
}
- public void testTimeFieldValue() {
- long millis = randomLong();
- SearchHit hit = new SearchHitBuilder(randomInt()).addField("time", new DateTime(millis)).build();
- TimeBasedExtractedFields extractedFields = new TimeBasedExtractedFields(timeField, Collections.singletonList(timeField));
- assertThat(extractedFields.timeFieldValue(hit), equalTo(millis));
- }
-
public void testStringTimeFieldValue() {
long millis = randomLong();
SearchHit hit = new SearchHitBuilder(randomInt()).addField("time", Long.toString(millis)).build();
diff --git a/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/MonitoringTestUtils.java b/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/MonitoringTestUtils.java
index 368758654cb9b..647835bf9311e 100644
--- a/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/MonitoringTestUtils.java
+++ b/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/MonitoringTestUtils.java
@@ -87,7 +87,8 @@ public static MonitoringBulkDoc randomMonitoringBulkDoc(final Random random,
final MonitoredSystem system,
final String type) throws IOException {
final String id = random.nextBoolean() ? RandomStrings.randomAsciiLettersOfLength(random, 5) : null;
- final long timestamp = RandomNumbers.randomLongBetween(random, 0L, Long.MAX_VALUE);
+ // ending date is the last second of 9999, should be sufficient
+ final long timestamp = RandomNumbers.randomLongBetween(random, 0L, 253402300799000L);
final long interval = RandomNumbers.randomLongBetween(random, 0L, Long.MAX_VALUE);
return new MonitoringBulkDoc(system, type, id, timestamp, interval, source, xContentType);
}
diff --git a/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/exporter/local/LocalExporterIntegTests.java b/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/exporter/local/LocalExporterIntegTests.java
index 7bc035f7ae236..8dd34e0bef4c5 100644
--- a/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/exporter/local/LocalExporterIntegTests.java
+++ b/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/exporter/local/LocalExporterIntegTests.java
@@ -61,7 +61,7 @@
@ESIntegTestCase.ClusterScope(scope = ESIntegTestCase.Scope.SUITE,
numDataNodes = 1, numClientNodes = 0, transportClientRatio = 0.0, supportsDedicatedMasters = false)
public class LocalExporterIntegTests extends LocalExporterIntegTestCase {
- private final String indexTimeFormat = randomFrom("YY", "YYYY", "YYYY.MM", "YYYY-MM", "MM.YYYY", "MM", null);
+ private final String indexTimeFormat = randomFrom("yy", "yyyy", "yyyy.MM", "yyyy-MM", "MM.yyyy", "MM", null);
private void stopMonitoring() {
// Now disabling the monitoring service, so that no more collection are started
diff --git a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/RollupJobIdentifierUtils.java b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/RollupJobIdentifierUtils.java
index 232034177e87b..59141d2a83aeb 100644
--- a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/RollupJobIdentifierUtils.java
+++ b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/RollupJobIdentifierUtils.java
@@ -5,7 +5,7 @@
*/
package org.elasticsearch.xpack.rollup;
-import org.elasticsearch.common.rounding.DateTimeUnit;
+import org.elasticsearch.common.Rounding;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.search.aggregations.AggregationBuilder;
import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramAggregationBuilder;
@@ -16,7 +16,6 @@
import org.elasticsearch.xpack.core.rollup.RollupField;
import org.elasticsearch.xpack.core.rollup.action.RollupJobCaps;
import org.elasticsearch.xpack.core.rollup.job.DateHistogramGroupConfig;
-import org.joda.time.DateTimeZone;
import java.util.ArrayList;
import java.util.Comparator;
@@ -98,7 +97,7 @@ private static void checkDateHisto(DateHistogramAggregationBuilder source, List<
DateHistogramInterval interval = new DateHistogramInterval((String)agg.get(RollupField.INTERVAL));
String thisTimezone = (String)agg.get(DateHistogramGroupConfig.TIME_ZONE);
- String sourceTimeZone = source.timeZone() == null ? DateTimeZone.UTC.toString() : source.timeZone().toString();
+ String sourceTimeZone = source.timeZone() == null ? "UTC" : source.timeZone().toString();
// Ensure we are working on the same timezone
if (thisTimezone.equalsIgnoreCase(sourceTimeZone) == false) {
@@ -152,10 +151,10 @@ static boolean validateCalendarInterval(DateHistogramInterval requestInterval,
// The request must be gte the config. The CALENDAR_ORDERING map values are integers representing
// relative orders between the calendar units
- DateTimeUnit requestUnit = DateHistogramAggregationBuilder.DATE_FIELD_UNITS.get(requestInterval.toString());
- long requestOrder = requestUnit.field(DateTimeZone.UTC).getDurationField().getUnitMillis();
- DateTimeUnit configUnit = DateHistogramAggregationBuilder.DATE_FIELD_UNITS.get(configInterval.toString());
- long configOrder = configUnit.field(DateTimeZone.UTC).getDurationField().getUnitMillis();
+ Rounding.DateTimeUnit requestUnit = DateHistogramAggregationBuilder.DATE_FIELD_UNITS.get(requestInterval.toString());
+ long requestOrder = requestUnit.getField().getBaseUnit().getDuration().toMillis();
+ Rounding.DateTimeUnit configUnit = DateHistogramAggregationBuilder.DATE_FIELD_UNITS.get(configInterval.toString());
+ long configOrder = configUnit.getField().getBaseUnit().getDuration().toMillis();
// All calendar units are multiples naturally, so we just care about gte
return requestOrder >= configOrder;
@@ -387,8 +386,8 @@ private static Comparator getComparator() {
static long getMillisFixedOrCalendar(String value) {
DateHistogramInterval interval = new DateHistogramInterval(value);
if (isCalendarInterval(interval)) {
- DateTimeUnit intervalUnit = DateHistogramAggregationBuilder.DATE_FIELD_UNITS.get(interval.toString());
- return intervalUnit.field(DateTimeZone.UTC).getDurationField().getUnitMillis();
+ Rounding.DateTimeUnit intervalUnit = DateHistogramAggregationBuilder.DATE_FIELD_UNITS.get(interval.toString());
+ return intervalUnit.getField().getBaseUnit().getDuration().toMillis();
} else {
return TimeValue.parseTimeValue(value, "date_histo.comparator.interval").getMillis();
}
diff --git a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/job/RollupIndexer.java b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/job/RollupIndexer.java
index ee29e56a33169..1d5f9093a29df 100644
--- a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/job/RollupIndexer.java
+++ b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/job/RollupIndexer.java
@@ -28,9 +28,9 @@
import org.elasticsearch.search.aggregations.support.ValueType;
import org.elasticsearch.search.aggregations.support.ValuesSourceAggregationBuilder;
import org.elasticsearch.search.builder.SearchSourceBuilder;
+import org.elasticsearch.xpack.core.indexing.AsyncTwoPhaseIndexer;
import org.elasticsearch.xpack.core.indexing.IndexerState;
import org.elasticsearch.xpack.core.indexing.IterationResult;
-import org.elasticsearch.xpack.core.indexing.AsyncTwoPhaseIndexer;
import org.elasticsearch.xpack.core.rollup.RollupField;
import org.elasticsearch.xpack.core.rollup.job.DateHistogramGroupConfig;
import org.elasticsearch.xpack.core.rollup.job.GroupConfig;
@@ -42,6 +42,7 @@
import org.elasticsearch.xpack.core.rollup.job.TermsGroupConfig;
import org.joda.time.DateTimeZone;
+import java.time.ZoneId;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
@@ -214,7 +215,7 @@ public static List> createValueSourceBuilders(fi
final DateHistogramValuesSourceBuilder dateHistogramBuilder = new DateHistogramValuesSourceBuilder(dateHistogramName);
dateHistogramBuilder.dateHistogramInterval(dateHistogram.getInterval());
dateHistogramBuilder.field(dateHistogramField);
- dateHistogramBuilder.timeZone(toDateTimeZone(dateHistogram.getTimeZone()));
+ dateHistogramBuilder.timeZone(ZoneId.of(dateHistogram.getTimeZone()));
return Collections.singletonList(dateHistogramBuilder);
}
diff --git a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/RollupJobIdentifierUtilTests.java b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/RollupJobIdentifierUtilTests.java
index 95161e0d149dc..d05a78e121296 100644
--- a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/RollupJobIdentifierUtilTests.java
+++ b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/RollupJobIdentifierUtilTests.java
@@ -25,6 +25,7 @@
import org.elasticsearch.xpack.core.rollup.job.TermsGroupConfig;
import org.joda.time.DateTimeZone;
+import java.time.ZoneOffset;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashSet;
@@ -122,14 +123,14 @@ public void testIncompatibleFixedCalendarInterval() {
}
public void testBadTimeZone() {
- final GroupConfig group = new GroupConfig(new DateHistogramGroupConfig("foo", new DateHistogramInterval("1h"), null, "EST"));
+ final GroupConfig group = new GroupConfig(new DateHistogramGroupConfig("foo", new DateHistogramInterval("1h"), null, "CET"));
final RollupJobConfig job = new RollupJobConfig("foo", "index", "rollup", "*/5 * * * * ?", 10, group, emptyList(), null);
RollupJobCaps cap = new RollupJobCaps(job);
Set caps = singletonSet(cap);
DateHistogramAggregationBuilder builder = new DateHistogramAggregationBuilder("foo").field("foo")
.dateHistogramInterval(new DateHistogramInterval("1h"))
- .timeZone(DateTimeZone.UTC);
+ .timeZone(ZoneOffset.UTC);
RuntimeException e = expectThrows(RuntimeException.class, () -> RollupJobIdentifierUtils.findBestJobs(builder, caps));
assertThat(e.getMessage(), equalTo("There is not a rollup job that has a [date_histogram] agg on field " +
diff --git a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/action/SearchActionTests.java b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/action/SearchActionTests.java
index 3dc91ede1bd2c..0032b5a88a563 100644
--- a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/action/SearchActionTests.java
+++ b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/action/SearchActionTests.java
@@ -147,7 +147,7 @@ public void testRangeWrongTZ() {
Set caps = new HashSet<>();
caps.add(cap);
Exception e = expectThrows(IllegalArgumentException.class,
- () -> TransportRollupSearchAction.rewriteQuery(new RangeQueryBuilder("foo").gt(1).timeZone("EST"), caps));
+ () -> TransportRollupSearchAction.rewriteQuery(new RangeQueryBuilder("foo").gt(1).timeZone("CET"), caps));
assertThat(e.getMessage(), equalTo("Field [foo] in [range] query was found in rollup indices, but requested timezone is not " +
"compatible. Options include: [UTC]"));
}
diff --git a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/config/ConfigTests.java b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/config/ConfigTests.java
index 86891eda669fa..d34e5fd80b611 100644
--- a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/config/ConfigTests.java
+++ b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/config/ConfigTests.java
@@ -15,6 +15,7 @@
import org.elasticsearch.xpack.core.rollup.job.TermsGroupConfig;
import org.joda.time.DateTimeZone;
+import java.time.zone.ZoneRulesException;
import java.util.HashMap;
import java.util.Map;
@@ -84,9 +85,9 @@ public void testDefaultTimeZone() {
}
public void testUnkownTimeZone() {
- Exception e = expectThrows(IllegalArgumentException.class,
+ Exception e = expectThrows(ZoneRulesException.class,
() -> new DateHistogramGroupConfig("foo", DateHistogramInterval.HOUR, null, "FOO"));
- assertThat(e.getMessage(), equalTo("The datetime zone id 'FOO' is not recognised"));
+ assertThat(e.getMessage(), equalTo("Unknown time-zone ID: FOO"));
}
public void testEmptyHistoField() {
diff --git a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/job/RollupIndexerIndexingTests.java b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/job/RollupIndexerIndexingTests.java
index f33c1d4e008ba..ea099399bc4c1 100644
--- a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/job/RollupIndexerIndexingTests.java
+++ b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/job/RollupIndexerIndexingTests.java
@@ -29,8 +29,8 @@
import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.action.search.SearchResponseSections;
import org.elasticsearch.action.search.ShardSearchFailure;
-import org.elasticsearch.common.joda.Joda;
-import org.elasticsearch.common.rounding.Rounding;
+import org.elasticsearch.common.Rounding;
+import org.elasticsearch.common.time.DateFormatters;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.index.IndexSettings;
import org.elasticsearch.index.mapper.ContentPath;
@@ -58,12 +58,14 @@
import org.junit.Before;
import java.io.IOException;
+import java.time.ZoneId;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
+import java.util.Locale;
import java.util.Map;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.Executor;
@@ -449,7 +451,7 @@ static Map asMap(Object... fields) {
}
private static long asLong(String dateTime) {
- return DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.parser().parseDateTime(dateTime).getMillis();
+ return DateFormatters.toZonedDateTime(DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.parse(dateTime)).toInstant().toEpochMilli();
}
/**
@@ -488,7 +490,8 @@ private void executeTestCase(List