Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 5 additions & 0 deletions docs/changelog/81272.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
pr: 81272
summary: Add date fields to the scripting fields api
area: Infra/Scripting
type: enhancement
issues: []
Original file line number Diff line number Diff line change
Expand Up @@ -68,6 +68,16 @@ class org.elasticsearch.script.field.ShortDocValuesField @dynamic_type {
short get(int, int)
}

class org.elasticsearch.script.field.DateMillisDocValuesField @dynamic_type {
ZonedDateTime get(ZonedDateTime)
ZonedDateTime get(int, ZonedDateTime)
}

class org.elasticsearch.script.field.DateNanosDocValuesField @dynamic_type {
ZonedDateTime get(ZonedDateTime)
ZonedDateTime get(int, ZonedDateTime)
}

class org.elasticsearch.script.field.KeywordDocValuesField @dynamic_type {
String get(String)
String get(int, String)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,8 @@ setup:
type: boolean
date:
type: date
nanos:
type: date_nanos
geo_point:
type: geo_point
ip:
Expand Down Expand Up @@ -49,6 +51,7 @@ setup:
rank: 1
boolean: true
date: 2017-01-01T12:11:12
nanos: 2015-01-01T12:10:30.123456789Z
geo_point: 41.12,-71.34
ip: 192.168.0.1
keyword: not split at all
Expand Down Expand Up @@ -76,6 +79,8 @@ setup:
body:
rank: 3
boolean: [true, false, true]
date: [2017-01-01T12:11:12, 2018-01-01T12:11:12]
nanos: [2015-01-01T12:10:30.123456789Z, 2015-01-01T12:10:30.987654321Z]
keyword: ["one string", "another string"]
long: [1152921504606846976, 576460752303423488]
integer: [5, 17, 29]
Expand Down Expand Up @@ -228,6 +233,193 @@ setup:
source: "doc.date.value"
- match: { hits.hits.0.fields.field.0: '2017-01-01T12:11:12.000Z' }

- do:
search:
rest_total_hits_as_int: true
body:
query: { term: { _id: 1 } }
script_fields:
field:
script:
source: "field('date').get(null)"
- match: { hits.hits.0.fields.field.0: '2017-01-01T12:11:12.000Z' }

- do:
search:
rest_total_hits_as_int: true
body:
query: { term: { _id: 1 } }
script_fields:
field:
script:
source: "/* avoid yaml stash */ $('date', null)"
- match: { hits.hits.0.fields.field.0: '2017-01-01T12:11:12.000Z' }

- do:
search:
rest_total_hits_as_int: true
body:
query: { term: { _id: 2 } }
script_fields:
field:
script:
source: "field('date').get(ZonedDateTime.parse('2018-01-01T12:11:12.000Z'))"
- match: { hits.hits.0.fields.field.0: '2018-01-01T12:11:12.000Z' }

- do:
search:
rest_total_hits_as_int: true
body:
query: { term: { _id: 2 } }
script_fields:
field:
script:
source: "/* avoid yaml stash */ $('date', ZonedDateTime.parse('2018-01-01T12:11:12.000Z'))"
- match: { hits.hits.0.fields.field.0: '2018-01-01T12:11:12.000Z' }

- do:
search:
rest_total_hits_as_int: true
body:
query: { term: { _id: 1 } }
script_fields:
field:
script:
source: "doc['nanos'].value"
- match: { hits.hits.0.fields.field.0: '2015-01-01T12:10:30.123456789Z' }

- do:
search:
rest_total_hits_as_int: true
body:
query: { term: { _id: 1 } }
script_fields:
field:
script:
source: "field('nanos').get(null)"
- match: { hits.hits.0.fields.field.0: '2015-01-01T12:10:30.123456789Z' }

- do:
search:
rest_total_hits_as_int: true
body:
query: { term: { _id: 1 } }
script_fields:
field:
script:
source: "/* avoid yaml stash */ $('nanos', null)"
- match: { hits.hits.0.fields.field.0: '2015-01-01T12:10:30.123456789Z' }

- do:
search:
rest_total_hits_as_int: true
body:
query: { term: { _id: 2 } }
script_fields:
field:
script:
source: "field('nanos').get(ZonedDateTime.parse('2016-01-01T12:10:30.123Z'))"
- match: { hits.hits.0.fields.field.0: '2016-01-01T12:10:30.123Z' }

- do:
search:
rest_total_hits_as_int: true
body:
query: { term: { _id: 2 } }
script_fields:
field:
script:
source: "/* avoid yaml stash */ $('nanos', ZonedDateTime.parse('2016-01-01T12:10:30.123Z'))"
- match: { hits.hits.0.fields.field.0: '2016-01-01T12:10:30.123Z' }

- do:
search:
rest_total_hits_as_int: true
body:
query: { term: { _id: 1 } }
script_fields:
field:
script:
source: "doc['nanos'].value.getNano()"
- match: { hits.hits.0.fields.field.0: 123456789 }

- do:
search:
rest_total_hits_as_int: true
body:
query: { term: { _id: 1 } }
script_fields:
field:
script:
source: "field('nanos').get(null).getNano()"
- match: { hits.hits.0.fields.field.0: 123456789 }

- do:
search:
rest_total_hits_as_int: true
body:
query: { term: { _id: 1 } }
script_fields:
field:
script:
source: "/* avoid yaml stash */ $('nanos', null).getNano()"
- match: { hits.hits.0.fields.field.0: 123456789 }

- do:
search:
rest_total_hits_as_int: true
body:
query: { term: { _id: 2 } }
script_fields:
field:
script:
source: "field('nanos').get(ZonedDateTime.parse('2016-01-01T12:10:30.123Z')).getNano()"
- match: { hits.hits.0.fields.field.0: 123000000 }

- do:
search:
rest_total_hits_as_int: true
body:
query: { term: { _id: 3 } }
script_fields:
field:
script:
source: "field('date').get(1, null)"
- match: { hits.hits.0.fields.field.0: "2018-01-01T12:11:12.000Z" }

- do:
search:
rest_total_hits_as_int: true
body:
query: { term: { _id: 3 } }
script_fields:
field:
script:
source: "field('nanos').get(1, null)"
- match: { hits.hits.0.fields.field.0: "2015-01-01T12:10:30.987654321Z" }

- do:
search:
rest_total_hits_as_int: true
body:
query: { term: { _id: 3 } }
script_fields:
field:
script:
source: "List times = new ArrayList(); for (ZonedDateTime zdt : field('date')) times.add(zdt); times"
- match: { hits.hits.0.fields.field: ["2017-01-01T12:11:12.000Z", "2018-01-01T12:11:12.000Z"] }

- do:
search:
rest_total_hits_as_int: true
body:
query: { term: { _id: 3 } }
script_fields:
field:
script:
source: "List times = new ArrayList(); for (ZonedDateTime zdt : field('nanos')) times.add(zdt); times"
- match: { hits.hits.0.fields.field: ["2015-01-01T12:10:30.123456789Z", "2015-01-01T12:10:30.987654321Z"] }

---
"geo_point":
- do:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -15,13 +15,10 @@
import org.elasticsearch.common.geo.GeoBoundingBox;
import org.elasticsearch.common.geo.GeoPoint;
import org.elasticsearch.common.geo.GeoUtils;
import org.elasticsearch.common.time.DateUtils;
import org.elasticsearch.geometry.utils.Geohash;
import org.elasticsearch.script.field.DocValuesField;

import java.io.IOException;
import java.time.Instant;
import java.time.ZoneOffset;
import java.time.ZonedDateTime;
import java.util.AbstractList;
import java.util.Comparator;
Expand Down Expand Up @@ -161,63 +158,6 @@ public int size() {
}
}

public static class DatesSupplier implements Supplier<ZonedDateTime> {

private final SortedNumericDocValues in;
private final boolean isNanos;

/**
* Values wrapped in {@link java.time.ZonedDateTime} objects.
*/
private ZonedDateTime[] dates;
private int count;

public DatesSupplier(SortedNumericDocValues in, boolean isNanos) {
this.in = in;
this.isNanos = isNanos;
}

@Override
public ZonedDateTime getInternal(int index) {
return dates[index];
}

@Override
public int size() {
return count;
}

@Override
public void setNextDocId(int docId) throws IOException {
if (in.advanceExact(docId)) {
count = in.docValueCount();
} else {
count = 0;
}
refreshArray();
}

/**
* Refresh the backing array. Package private so it can be called when {@link Longs} loads dates.
*/
private void refreshArray() throws IOException {
if (count == 0) {
return;
}
if (dates == null || count > dates.length) {
// Happens for the document. We delay allocating dates so we can allocate it with a reasonable size.
dates = new ZonedDateTime[count];
}
for (int i = 0; i < count; ++i) {
if (isNanos) {
dates[i] = ZonedDateTime.ofInstant(DateUtils.toInstant(in.nextValue()), ZoneOffset.UTC);
} else {
dates[i] = ZonedDateTime.ofInstant(Instant.ofEpochMilli(in.nextValue()), ZoneOffset.UTC);
}
}
}
}

public static class Dates extends ScriptDocValues<ZonedDateTime> {

public Dates(Supplier<ZonedDateTime> supplier) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -33,16 +33,15 @@
import org.elasticsearch.core.TimeValue;
import org.elasticsearch.index.fielddata.IndexFieldData;
import org.elasticsearch.index.fielddata.IndexNumericFieldData.NumericType;
import org.elasticsearch.index.fielddata.ScriptDocValues.Dates;
import org.elasticsearch.index.fielddata.ScriptDocValues.DatesSupplier;
import org.elasticsearch.index.fielddata.plain.SortedNumericIndexFieldData;
import org.elasticsearch.index.query.DateRangeIncludingNowQuery;
import org.elasticsearch.index.query.QueryRewriteContext;
import org.elasticsearch.index.query.SearchExecutionContext;
import org.elasticsearch.script.DateFieldScript;
import org.elasticsearch.script.Script;
import org.elasticsearch.script.ScriptCompiler;
import org.elasticsearch.script.field.DelegateDocValuesField;
import org.elasticsearch.script.field.DateMillisDocValuesField;
import org.elasticsearch.script.field.DateNanosDocValuesField;
import org.elasticsearch.script.field.ToScriptField;
import org.elasticsearch.search.DocValueFormat;
import org.elasticsearch.search.lookup.FieldValues;
Expand Down Expand Up @@ -81,7 +80,7 @@ public final class DateFieldMapper extends FieldMapper {
private static final DateMathParser EPOCH_MILLIS_PARSER = DateFormatter.forPattern("epoch_millis").toDateMathParser();

public enum Resolution {
MILLISECONDS(CONTENT_TYPE, NumericType.DATE, (dv, n) -> new DelegateDocValuesField(new Dates(new DatesSupplier(dv, false)), n)) {
MILLISECONDS(CONTENT_TYPE, NumericType.DATE, DateMillisDocValuesField::new) {
@Override
public long convert(Instant instant) {
return instant.toEpochMilli();
Expand Down Expand Up @@ -112,11 +111,7 @@ protected Query distanceFeatureQuery(String field, float boost, long origin, Tim
return LongPoint.newDistanceFeatureQuery(field, boost, origin, pivot.getMillis());
}
},
NANOSECONDS(
DATE_NANOS_CONTENT_TYPE,
NumericType.DATE_NANOSECONDS,
(dv, n) -> new DelegateDocValuesField(new Dates(new DatesSupplier(dv, true)), n)
) {
NANOSECONDS(DATE_NANOS_CONTENT_TYPE, NumericType.DATE_NANOSECONDS, DateNanosDocValuesField::new) {
@Override
public long convert(Instant instant) {
return toLong(instant);
Expand Down
Loading