Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
@@ -0,0 +1,95 @@
setup:
Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This could be in the same file as the rest of the range agg tests, but I'm refactoring that file in a different branch, and wanted to save myself a merge conflict.

- do:
indices.create:
index: test
body:
settings:
number_of_replicas: 0
mappings:
properties:
mydate:
type: date
format: "uuuu-MM-dd'T'HH:mm:ss.SSSSSSSSSZZZZZ"

- do:
cluster.health:
wait_for_status: green

- do:
index:
index: test
id: 1
body: { "mydate": "2021-08-12T01:00:00.000000000+02:00" }

- do:
indices.refresh: {}

---
"respect offsets in range bounds":
- skip:
version: " - 7.99.99"
reason: "Fixed in 7.16 (backport pending)"
- do:
search:
rest_total_hits_as_int: true
body: {
"query": {
"match_all": {}
},
"aggregations": {
"myagg": {
"date_range": {
"field": "mydate",
"ranges": [
{
"from": "2021-08-12T00:00:00.000000000+02:00",
"to": "2021-08-12T02:00:00.000000000+02:00"
}
]
}
}
}
}
- match: { hits.total: 1 }
- length: { aggregations.myagg.buckets: 1 }
- match: { aggregations.myagg.buckets.0.from_as_string: "2021-08-11T22:00:00.000000000Z" }
- match: { aggregations.myagg.buckets.0.from: 1628719200000 }
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I wonder if we could add human readable date in a comment to help with readability?

- match: { aggregations.myagg.buckets.0.to_as_string: "2021-08-12T00:00:00.000000000Z" }
- match: { aggregations.myagg.buckets.0.to: 1628726400000 }
- match: { aggregations.myagg.buckets.0.doc_count: 1 }

---
"offsets and timezones play nicely together":
- skip:
version: " - 7.99.99"
reason: "Fixed in 7.16 (backport pending)"
- do:
search:
rest_total_hits_as_int: true
body: {
"query": {
"match_all": {}
},
"aggregations": {
"myagg": {
"date_range": {
"time_zone": "America/New_York",
"field": "mydate",
"ranges": [
{
"from": "2021-08-12T00:00:00.000000000+02:00",
"to": "2021-08-12T02:00:00.000000000+02:00"
}
]
}
}
}
}
- match: { hits.total: 1 }
- length: { aggregations.myagg.buckets: 1 }
- match: { aggregations.myagg.buckets.0.from_as_string: "2021-08-11T18:00:00.000000000-04:00" }
- match: { aggregations.myagg.buckets.0.from: 1628719200000 }
- match: { aggregations.myagg.buckets.0.to_as_string: "2021-08-11T20:00:00.000000000-04:00" }
- match: { aggregations.myagg.buckets.0.to: 1628726400000 }
- match: { aggregations.myagg.buckets.0.doc_count: 1 }

Original file line number Diff line number Diff line change
Expand Up @@ -210,7 +210,9 @@ private Instant parseDateTime(String value, ZoneId timeZone, boolean roundUpIfNo
return DateFormatters.from(formatter.parse(value)).toInstant();
} else {
TemporalAccessor accessor = formatter.parse(value);
ZoneId zoneId = TemporalQueries.zone().queryFrom(accessor);
Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

The docs describe the zone() query as "A lenient query for the ZoneId, falling back to the ZoneOffset", and how do we feel about leniency?

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

while this would fix the use case where a mapping expects an offset but the problem will re-highlight itself whe na mapping is using zoneId?

 public void testParseOffset() {
        // Format string from #76415
        DocValueFormat.DateTime parsesZone = new DocValueFormat.DateTime(
            DateFormatter.forPattern("uuuu-MM-dd'T'HH:mm:ss.SSSSSSSSSVV"),
            ZoneOffset.UTC,
            Resolution.MILLISECONDS
        );
        long expected = 1628719200000L;
        ZonedDateTime sample = ZonedDateTime.of(2021, 8, 12, 0, 0, 0, 0, ZoneId.ofOffset("", ZoneOffset.ofHours(2)));
        assertEquals("GUARD: wrong initial millis", expected, sample.toEpochSecond() * 1000);
        //assertEquals("GUARD: wrong initial string", "2021-08-12T00:00:00.000000000+02:00", parsesZone.format(expected));
        long actualMillis = parsesZone.parseLong(
            "2021-08-12T00:00:00.000000000CET",
            false,
            () -> { throw new UnsupportedOperationException("don't use now"); }
        );
        assertEquals(expected, actualMillis);
    }

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

actually I just tested this and it would work..

Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

That's a good test though, we should add it to the suite. Do you mind if I just copy it in?

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I don't mind it at all :) it is a copy of your test with just patterns changed :D
problem is that it is prone to DST changes. ZoneId.ofOffset("", ZoneOffset.ofHours(2))); and 2021-08-12T00:00:00.000000000+02:00 will have to account for this

// Use the offset if provided, otherwise fall back to the zone, or null.
ZoneOffset offset = TemporalQueries.offset().queryFrom(accessor);
ZoneId zoneId = offset == null ? TemporalQueries.zoneId().queryFrom(accessor) : ZoneId.ofOffset("", offset);
if (zoneId != null) {
timeZone = zoneId;
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,7 @@

import java.time.ZoneId;
import java.time.ZoneOffset;
import java.time.ZonedDateTime;
import java.util.ArrayList;
import java.util.List;

Expand Down Expand Up @@ -345,4 +346,45 @@ public void testDateTimeWithTimezone() {
tokyo.parseLong(tokyo.format(millis), false, () -> { throw new UnsupportedOperationException("don't use now"); })
);
}

/**
* This is a regression test for https://github.com/elastic/elasticsearch/issues/76415
*/
public void testParseOffset() {
DocValueFormat.DateTime parsesZone = new DocValueFormat.DateTime(
DateFormatter.forPattern("uuuu-MM-dd'T'HH:mm:ss.SSSSSSSSSZZZZZ"),
ZoneOffset.UTC,
Resolution.MILLISECONDS
);
long expected = 1628719200000L;
ZonedDateTime sample = ZonedDateTime.of(2021, 8, 12, 0, 0, 0, 0, ZoneId.ofOffset("", ZoneOffset.ofHours(2)));
assertEquals("GUARD: wrong initial millis", expected, sample.toEpochSecond() * 1000);
long actualMillis = parsesZone.parseLong(
"2021-08-12T00:00:00.000000000+02:00",
false,
() -> { throw new UnsupportedOperationException("don't use now"); }
);
assertEquals(expected, actualMillis);
}

/**
* Make sure fixing 76415 doesn't break parsing zone strings
*/
public void testParseZone() {
DocValueFormat.DateTime parsesZone = new DocValueFormat.DateTime(
DateFormatter.forPattern("uuuu-MM-dd'T'HH:mm:ss.SSSSSSSSSVV"),
ZoneOffset.UTC,
Resolution.MILLISECONDS
);
long expected = 1628719200000L;
ZonedDateTime sample = ZonedDateTime.of(2021, 8, 12, 0, 0, 0, 0, ZoneId.ofOffset("", ZoneOffset.ofHours(2)));
assertEquals("GUARD: wrong initial millis", expected, sample.toEpochSecond() * 1000);
//assertEquals("GUARD: wrong initial string", "2021-08-12T00:00:00.000000000+02:00", parsesZone.format(expected));
long actualMillis = parsesZone.parseLong(
"2021-08-12T00:00:00.000000000CET",
false,
() -> { throw new UnsupportedOperationException("don't use now"); }
);
assertEquals(expected, actualMillis);
}
}