Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -30,20 +30,32 @@
import org.apache.lucene.util.NumericUtils;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.NumberFieldMapper;
import org.elasticsearch.search.aggregations.AggregationBuilder;
import org.elasticsearch.search.aggregations.AggregatorTestCase;
import org.elasticsearch.search.aggregations.metrics.Percentile;
import org.elasticsearch.search.aggregations.metrics.PercentileRanks;
import org.elasticsearch.search.aggregations.metrics.PercentileRanksAggregationBuilder;
import org.elasticsearch.search.aggregations.metrics.PercentilesMethod;
import org.elasticsearch.search.aggregations.support.AggregationInspectionHelper;
import org.elasticsearch.search.aggregations.support.CoreValuesSourceType;
import org.elasticsearch.search.aggregations.support.ValuesSourceType;
import org.hamcrest.Matchers;

import java.io.IOException;
import java.util.Iterator;
import java.util.List;


public class HDRPercentileRanksAggregatorTests extends AggregatorTestCase {

@Override
protected AggregationBuilder createAggBuilderForTypeTest(MappedFieldType fieldType, String fieldName) {
return new PercentileRanksAggregationBuilder("hdr_ranks", new double[]{0.1, 0.5, 12})
.field(fieldName)
.percentilesConfig(new PercentilesConfig.Hdr());
}

@Override
protected List<ValuesSourceType> getSupportedValuesSourceTypes() {
return List.of(CoreValuesSourceType.NUMERIC);
}

public void testEmpty() throws IOException {
PercentileRanksAggregationBuilder aggBuilder = new PercentileRanksAggregationBuilder("my_agg", new double[]{0.5})
.field("field")
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -33,10 +33,14 @@
import org.elasticsearch.common.CheckedConsumer;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.NumberFieldMapper;
import org.elasticsearch.search.aggregations.AggregationBuilder;
import org.elasticsearch.search.aggregations.AggregatorTestCase;
import org.elasticsearch.search.aggregations.support.AggregationInspectionHelper;
import org.elasticsearch.search.aggregations.support.CoreValuesSourceType;
import org.elasticsearch.search.aggregations.support.ValuesSourceType;

import java.io.IOException;
import java.util.List;
import java.util.function.Consumer;

import static java.util.Arrays.asList;
Expand All @@ -46,6 +50,18 @@

public class HDRPercentilesAggregatorTests extends AggregatorTestCase {

@Override
protected AggregationBuilder createAggBuilderForTypeTest(MappedFieldType fieldType, String fieldName) {
return new PercentilesAggregationBuilder("hdr_percentiles")
.field(fieldName)
.percentilesConfig(new PercentilesConfig.Hdr());
}

@Override
protected List<ValuesSourceType> getSupportedValuesSourceTypes() {
return List.of(CoreValuesSourceType.NUMERIC);
}

public void testNoDocs() throws IOException {
testCase(new MatchAllDocsQuery(), iw -> {
// Intentionally not writing any docs
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -30,20 +30,32 @@
import org.apache.lucene.util.NumericUtils;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.NumberFieldMapper;
import org.elasticsearch.search.aggregations.AggregationBuilder;
import org.elasticsearch.search.aggregations.AggregatorTestCase;
import org.elasticsearch.search.aggregations.metrics.Percentile;
import org.elasticsearch.search.aggregations.metrics.PercentileRanks;
import org.elasticsearch.search.aggregations.metrics.PercentileRanksAggregationBuilder;
import org.elasticsearch.search.aggregations.metrics.PercentilesMethod;
import org.elasticsearch.search.aggregations.support.AggregationInspectionHelper;
import org.elasticsearch.search.aggregations.support.CoreValuesSourceType;
import org.elasticsearch.search.aggregations.support.ValuesSourceType;
import org.hamcrest.Matchers;

import java.io.IOException;
import java.util.Iterator;
import java.util.List;


public class TDigestPercentileRanksAggregatorTests extends AggregatorTestCase {

@Override
protected AggregationBuilder createAggBuilderForTypeTest(MappedFieldType fieldType, String fieldName) {
return new PercentileRanksAggregationBuilder("tdigest_ranks", new double[]{0.1, 0.5, 12})
.field(fieldName)
.percentilesConfig(new PercentilesConfig.TDigest());
}

@Override
protected List<ValuesSourceType> getSupportedValuesSourceTypes() {
return List.of(CoreValuesSourceType.NUMERIC);
}

public void testEmpty() throws IOException {
PercentileRanksAggregationBuilder aggBuilder = new PercentileRanksAggregationBuilder("my_agg", new double[]{0.5})
.field("field")
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -33,10 +33,14 @@
import org.elasticsearch.common.CheckedConsumer;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.NumberFieldMapper;
import org.elasticsearch.search.aggregations.AggregationBuilder;
import org.elasticsearch.search.aggregations.AggregatorTestCase;
import org.elasticsearch.search.aggregations.support.AggregationInspectionHelper;
import org.elasticsearch.search.aggregations.support.CoreValuesSourceType;
import org.elasticsearch.search.aggregations.support.ValuesSourceType;

import java.io.IOException;
import java.util.List;
import java.util.function.Consumer;

import static java.util.Arrays.asList;
Expand All @@ -46,6 +50,18 @@

public class TDigestPercentilesAggregatorTests extends AggregatorTestCase {

@Override
protected AggregationBuilder createAggBuilderForTypeTest(MappedFieldType fieldType, String fieldName) {
return new PercentilesAggregationBuilder("tdist_percentiles")
.field(fieldName)
.percentilesConfig(new PercentilesConfig.TDigest());
}

@Override
protected List<ValuesSourceType> getSupportedValuesSourceTypes() {
return List.of(CoreValuesSourceType.NUMERIC);
}

public void testNoDocs() throws IOException {
testCase(new MatchAllDocsQuery(), iw -> {
// Intentionally not writing any docs
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@
package org.elasticsearch.search.aggregations;

import org.apache.lucene.document.BinaryDocValuesField;
import org.apache.lucene.document.HalfFloatPoint;
import org.apache.lucene.document.InetAddressPoint;
import org.apache.lucene.document.LatLonDocValuesField;
import org.apache.lucene.document.SortedNumericDocValuesField;
Expand All @@ -41,6 +42,7 @@
import org.apache.lucene.search.Weight;
import org.apache.lucene.store.Directory;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.NumericUtils;
import org.elasticsearch.Version;
import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.common.breaker.CircuitBreaker;
Expand Down Expand Up @@ -74,6 +76,7 @@
import org.elasticsearch.index.mapper.Mapper;
import org.elasticsearch.index.mapper.Mapper.BuilderContext;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.mapper.NumberFieldMapper;
import org.elasticsearch.index.mapper.ObjectMapper;
import org.elasticsearch.index.mapper.ObjectMapper.Nested;
import org.elasticsearch.index.mapper.RangeFieldMapper;
Expand Down Expand Up @@ -605,7 +608,7 @@ protected AggregationBuilder createAggBuilderForTypeTest(MappedFieldType fieldTy
*
* Exception types/messages are not currently checked, just presence/absence of an exception.
*/
public void testSupportedFieldTypes() throws IOException {
public final void testSupportedFieldTypes() throws IOException {
MapperRegistry mapperRegistry = new IndicesModule(Collections.emptyList()).getMapperRegistry();
Settings settings = Settings.builder().put("index.version.created", Version.CURRENT.id).build();
String fieldName = "typeTestFieldName";
Expand Down Expand Up @@ -675,67 +678,78 @@ public void testSupportedFieldTypes() throws IOException {
*/
private void writeTestDoc(MappedFieldType fieldType, String fieldName, RandomIndexWriter iw) throws IOException {

if (fieldType.getValuesSourceType().equals(CoreValuesSourceType.NUMERIC)) {
String typeName = fieldType.typeName();
ValuesSourceType vst = fieldType.getValuesSourceType();

if (vst.equals(CoreValuesSourceType.NUMERIC)) {
// TODO note: once VS refactor adds DATE/BOOLEAN, this conditional will go away
if (fieldType.typeName().equals(DateFieldMapper.CONTENT_TYPE)
|| fieldType.typeName().equals(DateFieldMapper.DATE_NANOS_CONTENT_TYPE)) {
if (typeName.equals(DateFieldMapper.CONTENT_TYPE) || typeName.equals(DateFieldMapper.DATE_NANOS_CONTENT_TYPE)) {
iw.addDocument(singleton(new SortedNumericDocValuesField(fieldName, randomNonNegativeLong())));
} else if (fieldType.typeName().equals(BooleanFieldMapper.CONTENT_TYPE)) {
} else if (typeName.equals(BooleanFieldMapper.CONTENT_TYPE)) {
iw.addDocument(singleton(new SortedNumericDocValuesField(fieldName, randomBoolean() ? 0 : 1)));
} else if (typeName.equals(NumberFieldMapper.NumberType.DOUBLE.typeName())) {
long encoded = NumericUtils.doubleToSortableLong(Math.abs(randomDouble()));
iw.addDocument(singleton(new SortedNumericDocValuesField(fieldName, encoded)));
} else if (typeName.equals(NumberFieldMapper.NumberType.FLOAT.typeName())) {
long encoded = NumericUtils.floatToSortableInt(Math.abs(randomFloat()));
iw.addDocument(singleton(new SortedNumericDocValuesField(fieldName, encoded)));
} else if (typeName.equals(NumberFieldMapper.NumberType.HALF_FLOAT.typeName())) {
long encoded = HalfFloatPoint.halfFloatToSortableShort(Math.abs(randomFloat()));
iw.addDocument(singleton(new SortedNumericDocValuesField(fieldName, encoded)));
} else {
iw.addDocument(singleton(new SortedNumericDocValuesField(fieldName, randomLong())));
iw.addDocument(singleton(new SortedNumericDocValuesField(fieldName, randomNonNegativeLong())));
}
} else if (fieldType.getValuesSourceType().equals(CoreValuesSourceType.BYTES)) {
if (fieldType.typeName().equals(BinaryFieldMapper.CONTENT_TYPE)) {
} else if (vst.equals(CoreValuesSourceType.BYTES)) {
if (typeName.equals(BinaryFieldMapper.CONTENT_TYPE)) {
iw.addDocument(singleton(new BinaryFieldMapper.CustomBinaryDocValuesField(fieldName, new BytesRef("a").bytes)));
} else if (fieldType.typeName().equals(IpFieldMapper.CONTENT_TYPE)) {
} else if (typeName.equals(IpFieldMapper.CONTENT_TYPE)) {
// TODO note: once VS refactor adds IP, this conditional will go away
boolean v4 = randomBoolean();
iw.addDocument(singleton(new SortedSetDocValuesField(fieldName, new BytesRef(InetAddressPoint.encode(randomIp(v4))))));
} else {
iw.addDocument(singleton(new SortedSetDocValuesField(fieldName, new BytesRef("a"))));
}
} else if (fieldType.getValuesSourceType().equals(CoreValuesSourceType.RANGE)) {
} else if (vst.equals(CoreValuesSourceType.RANGE)) {
Object start;
Object end;
RangeType rangeType;

if (fieldType.typeName().equals(RangeType.DOUBLE.typeName())) {
if (typeName.equals(RangeType.DOUBLE.typeName())) {
start = randomDouble();
end = RangeType.DOUBLE.nextUp(start);
rangeType = RangeType.DOUBLE;
} else if (fieldType.typeName().equals(RangeType.FLOAT.typeName())) {
} else if (typeName.equals(RangeType.FLOAT.typeName())) {
start = randomFloat();
end = RangeType.FLOAT.nextUp(start);
rangeType = RangeType.DOUBLE;
} else if (fieldType.typeName().equals(RangeType.IP.typeName())) {
} else if (typeName.equals(RangeType.IP.typeName())) {
boolean v4 = randomBoolean();
start = randomIp(v4);
end = RangeType.IP.nextUp(start);
rangeType = RangeType.IP;
} else if (fieldType.typeName().equals(RangeType.LONG.typeName())) {
} else if (typeName.equals(RangeType.LONG.typeName())) {
start = randomLong();
end = RangeType.LONG.nextUp(start);
rangeType = RangeType.LONG;
} else if (fieldType.typeName().equals(RangeType.INTEGER.typeName())) {
} else if (typeName.equals(RangeType.INTEGER.typeName())) {
start = randomInt();
end = RangeType.INTEGER.nextUp(start);
rangeType = RangeType.INTEGER;
} else if (fieldType.typeName().equals(RangeType.DATE.typeName())) {
} else if (typeName.equals(RangeType.DATE.typeName())) {
start = randomNonNegativeLong();
end = RangeType.DATE.nextUp(start);
rangeType = RangeType.DATE;
} else {
throw new IllegalStateException("Unknown type of range [" + fieldType.typeName() + "]");
throw new IllegalStateException("Unknown type of range [" + typeName + "]");
}

final RangeFieldMapper.Range range = new RangeFieldMapper.Range(rangeType, start, end, true, true);
iw.addDocument(singleton(new BinaryDocValuesField(fieldName, rangeType.encodeRanges(Collections.singleton(range)))));

} else if (fieldType.getValuesSourceType().equals(CoreValuesSourceType.GEOPOINT)) {
} else if (vst.equals(CoreValuesSourceType.GEOPOINT)) {
iw.addDocument(singleton(new LatLonDocValuesField(fieldName, randomDouble(), randomDouble())));
} else {
throw new IllegalStateException("Unknown field type [" + fieldType.typeName() + "]");
throw new IllegalStateException("Unknown field type [" + typeName + "]");
}
}

Expand Down