Skip to content
Merged
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -18,11 +18,15 @@
*/
package org.elasticsearch.search.aggregations;

import org.apache.lucene.analysis.standard.StandardAnalyzer;
import org.apache.lucene.document.BinaryDocValuesField;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.HalfFloatPoint;
import org.apache.lucene.document.InetAddressPoint;
import org.apache.lucene.document.LatLonDocValuesField;
import org.apache.lucene.document.SortedNumericDocValuesField;
import org.apache.lucene.document.SortedSetDocValuesField;
import org.apache.lucene.document.StoredField;
import org.apache.lucene.index.AssertingDirectoryReader;
import org.apache.lucene.index.CompositeReaderContext;
import org.apache.lucene.index.DirectoryReader;
Expand All @@ -41,19 +45,25 @@
import org.apache.lucene.search.Weight;
import org.apache.lucene.store.Directory;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.NumericUtils;
import org.elasticsearch.Version;
import org.elasticsearch.cluster.metadata.IndexMetadata;
import org.elasticsearch.common.breaker.CircuitBreaker;
import org.elasticsearch.common.lease.Releasable;
import org.elasticsearch.common.lease.Releasables;
import org.elasticsearch.common.lucene.index.ElasticsearchDirectoryReader;
import org.elasticsearch.common.network.NetworkAddress;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.text.Text;
import org.elasticsearch.common.util.BigArrays;
import org.elasticsearch.common.util.MockBigArrays;
import org.elasticsearch.common.util.MockPageCacheRecycler;
import org.elasticsearch.index.Index;
import org.elasticsearch.index.IndexSettings;
import org.elasticsearch.index.analysis.AnalysisRegistry;
import org.elasticsearch.index.analysis.AnalyzerScope;
import org.elasticsearch.index.analysis.IndexAnalyzers;
import org.elasticsearch.index.analysis.NamedAnalyzer;
import org.elasticsearch.index.cache.bitset.BitsetFilterCache;
import org.elasticsearch.index.cache.bitset.BitsetFilterCache.Listener;
import org.elasticsearch.index.cache.query.DisabledQueryCache;
Expand All @@ -74,6 +84,7 @@
import org.elasticsearch.index.mapper.Mapper;
import org.elasticsearch.index.mapper.Mapper.BuilderContext;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.mapper.NumberFieldMapper;
import org.elasticsearch.index.mapper.ObjectMapper;
import org.elasticsearch.index.mapper.ObjectMapper.Nested;
import org.elasticsearch.index.mapper.RangeFieldMapper;
Expand Down Expand Up @@ -105,6 +116,7 @@
import org.junit.After;

import java.io.IOException;
import java.net.InetAddress;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
Expand All @@ -116,7 +128,8 @@
import java.util.function.Function;
import java.util.stream.Collectors;

import static java.util.Collections.singleton;
import static java.util.Collections.singletonMap;
import static java.util.Collections.emptyMap;
import static org.elasticsearch.test.InternalAggregationTestCase.DEFAULT_MAX_BUCKETS;
import static org.mockito.Matchers.anyObject;
import static org.mockito.Matchers.anyString;
Expand All @@ -141,7 +154,6 @@ public abstract class AggregatorTestCase extends ESTestCase {
List<String> blacklist = new ArrayList<>();
blacklist.add(ObjectMapper.CONTENT_TYPE); // Cannot aggregate objects
blacklist.add(GeoShapeFieldMapper.CONTENT_TYPE); // Cannot aggregate geoshapes (yet)
blacklist.add(TextFieldMapper.CONTENT_TYPE); // TODO Does not support doc values, but does support FD, needs a lot of mocking
blacklist.add(ObjectMapper.NESTED_CONTENT_TYPE); // TODO support for nested
blacklist.add(CompletionFieldMapper.CONTENT_TYPE); // TODO support completion
blacklist.add(FieldAliasMapper.CONTENT_TYPE); // TODO support alias
Expand Down Expand Up @@ -595,6 +607,18 @@ protected AggregationBuilder createAggBuilderForTypeTest(MappedFieldType fieldTy
"createAggBuilderForTypeTest() must be implemented as well.");
}

/**
* A method that allows implementors to specifically blacklist particular field types (based on their content_name).
* This is needed in some areas where the ValuesSourceType is not granular enough, for example integer values
* vs floating points, or `keyword` bytes vs `binary` bytes (which are not searchable)
*
* This is a blacklist instead of a whitelist because there are vastly more field types than ValuesSourceTypes,
* and it's expected that these unsupported cases are exceptional rather than common
*/
protected List<String> unsupportedMappedFieldTypes() {
return Collections.emptyList();
}

/**
* This test will validate that an aggregator succeeds or fails to run against all the field types
* that are registered in {@link IndicesModule} (e.g. all the core field types). An aggregator
Expand All @@ -604,11 +628,12 @@ protected AggregationBuilder createAggBuilderForTypeTest(MappedFieldType fieldTy
*
* Exception types/messages are not currently checked, just presence/absence of an exception.
*/
public void testSupportedFieldTypes() throws IOException {
public final void testSupportedFieldTypes() throws IOException {
MapperRegistry mapperRegistry = new IndicesModule(Collections.emptyList()).getMapperRegistry();
Settings settings = Settings.builder().put("index.version.created", Version.CURRENT.id).build();
String fieldName = "typeTestFieldName";
List<ValuesSourceType> supportedVSTypes = getSupportedValuesSourceTypes();
List<String> unsupportedMappedFieldTypes = unsupportedMappedFieldTypes();

if (supportedVSTypes.isEmpty()) {
// If the test says it doesn't support any VStypes, it has not been converted yet so skip
Expand All @@ -627,7 +652,11 @@ public void testSupportedFieldTypes() throws IOException {

Map<String, Object> source = new HashMap<>();
source.put("type", mappedType.getKey());
source.put("doc_values", "true");

// Text is the only field that doesn't support DVs, instead FD
if (mappedType.getKey().equals(TextFieldMapper.CONTENT_TYPE) == false) {
source.put("doc_values", "true");
}

Mapper.Builder builder = mappedType.getValue().parse(fieldName, source, new MockParserContext());
FieldMapper mapper = (FieldMapper) builder.build(new BuilderContext(settings, new ContentPath()));
Expand All @@ -648,15 +677,16 @@ public void testSupportedFieldTypes() throws IOException {
IndexSearcher indexSearcher = newIndexSearcher(indexReader);
AggregationBuilder aggregationBuilder = createAggBuilderForTypeTest(fieldType, fieldName);

ValuesSourceType vst = fieldType.getValuesSourceType();
// TODO in the future we can make this more explicit with expectThrows(), when the exceptions are standardized
try {
searchAndReduce(indexSearcher, new MatchAllDocsQuery(), aggregationBuilder, fieldType);
if (supportedVSTypes.contains(fieldType.getValuesSourceType()) == false) {
if (supportedVSTypes.contains(vst) == false || unsupportedMappedFieldTypes.contains(fieldType.typeName())) {
fail("Aggregator [" + aggregationBuilder.getType() + "] should not support field type ["
+ fieldType.typeName() + "] but executing against the field did not throw an excetion");
+ fieldType.typeName() + "] but executing against the field did not throw an exception");
}
} catch (Exception e) {
if (supportedVSTypes.contains(fieldType.getValuesSourceType())) {
if (supportedVSTypes.contains(vst) && unsupportedMappedFieldTypes.contains(fieldType.typeName()) == false) {
fail("Aggregator [" + aggregationBuilder.getType() + "] supports field type ["
+ fieldType.typeName() + "] but executing against the field threw an exception: [" + e.getMessage() + "]");
}
Expand All @@ -674,74 +704,118 @@ public void testSupportedFieldTypes() throws IOException {
*/
private void writeTestDoc(MappedFieldType fieldType, String fieldName, RandomIndexWriter iw) throws IOException {

if (fieldType.getValuesSourceType().equals(CoreValuesSourceType.NUMERIC)) {
String typeName = fieldType.typeName();
ValuesSourceType vst = fieldType.getValuesSourceType();
Document doc = new Document();
String json;

if (vst.equals(CoreValuesSourceType.NUMERIC)) {
// TODO note: once VS refactor adds DATE/BOOLEAN, this conditional will go away
if (fieldType.typeName().equals(DateFieldMapper.CONTENT_TYPE)
|| fieldType.typeName().equals(DateFieldMapper.DATE_NANOS_CONTENT_TYPE)) {
iw.addDocument(singleton(new SortedNumericDocValuesField(fieldName, randomNonNegativeLong())));
} else if (fieldType.typeName().equals(BooleanFieldMapper.CONTENT_TYPE)) {
iw.addDocument(singleton(new SortedNumericDocValuesField(fieldName, randomBoolean() ? 0 : 1)));
long v;
if (typeName.equals(DateFieldMapper.CONTENT_TYPE) || typeName.equals(DateFieldMapper.DATE_NANOS_CONTENT_TYPE)) {
// positive integer because date_nanos gets unhappy with large longs
v = Math.abs(randomInt());
json = "{ \"" + fieldName + "\" : \"" + v + "\" }";
} else if (typeName.equals(BooleanFieldMapper.CONTENT_TYPE)) {
v = randomBoolean() ? 0 : 1;
json = "{ \"" + fieldName + "\" : \"" + (v == 0 ? "false" : "true") + "\" }";
} else if (typeName.equals(NumberFieldMapper.NumberType.DOUBLE.typeName())) {
double d = Math.abs(randomDouble());
v = NumericUtils.doubleToSortableLong(d);
json = "{ \"" + fieldName + "\" : \"" + d + "\" }";
} else if (typeName.equals(NumberFieldMapper.NumberType.FLOAT.typeName())) {
float f = Math.abs(randomFloat());
v = NumericUtils.floatToSortableInt(f);
json = "{ \"" + fieldName + "\" : \"" + f + "\" }";
} else if (typeName.equals(NumberFieldMapper.NumberType.HALF_FLOAT.typeName())) {
// Respect half float range
float f = Math.abs((randomFloat() * 2 - 1) * 70000);
v = HalfFloatPoint.halfFloatToSortableShort(f);
json = "{ \"" + fieldName + "\" : \"" + f + "\" }";
} else {
iw.addDocument(singleton(new SortedNumericDocValuesField(fieldName, randomLong())));
// smallest numeric is a byte so we select the smallest
v = Math.abs(randomByte());
json = "{ \"" + fieldName + "\" : \"" + v + "\" }";
}
} else if (fieldType.getValuesSourceType().equals(CoreValuesSourceType.BYTES)) {
if (fieldType.typeName().equals(BinaryFieldMapper.CONTENT_TYPE)) {
iw.addDocument(singleton(new BinaryFieldMapper.CustomBinaryDocValuesField(fieldName, new BytesRef("a").bytes)));
} else if (fieldType.typeName().equals(IpFieldMapper.CONTENT_TYPE)) {
doc.add(new SortedNumericDocValuesField(fieldName, v));

} else if (vst.equals(CoreValuesSourceType.BYTES)) {
if (typeName.equals(BinaryFieldMapper.CONTENT_TYPE)) {
doc.add(new BinaryFieldMapper.CustomBinaryDocValuesField(fieldName, new BytesRef("a").bytes));
json = "{ \"" + fieldName + "\" : \"a\" }";
} else if (typeName.equals(IpFieldMapper.CONTENT_TYPE)) {
// TODO note: once VS refactor adds IP, this conditional will go away
boolean v4 = randomBoolean();
iw.addDocument(singleton(new SortedSetDocValuesField(fieldName, new BytesRef(InetAddressPoint.encode(randomIp(v4))))));
InetAddress ip = randomIp(randomBoolean());
json = "{ \"" + fieldName + "\" : \"" + NetworkAddress.format(ip) + "\" }";
doc.add(new SortedSetDocValuesField(fieldName, new BytesRef(InetAddressPoint.encode(ip))));
} else {
iw.addDocument(singleton(new SortedSetDocValuesField(fieldName, new BytesRef("a"))));
doc.add(new SortedSetDocValuesField(fieldName, new BytesRef("a")));
json = "{ \"" + fieldName + "\" : \"a\" }";
}
} else if (fieldType.getValuesSourceType().equals(CoreValuesSourceType.RANGE)) {
} else if (vst.equals(CoreValuesSourceType.RANGE)) {
Object start;
Object end;
RangeType rangeType;

if (fieldType.typeName().equals(RangeType.DOUBLE.typeName())) {
if (typeName.equals(RangeType.DOUBLE.typeName())) {
start = randomDouble();
end = RangeType.DOUBLE.nextUp(start);
rangeType = RangeType.DOUBLE;
} else if (fieldType.typeName().equals(RangeType.FLOAT.typeName())) {
} else if (typeName.equals(RangeType.FLOAT.typeName())) {
start = randomFloat();
end = RangeType.FLOAT.nextUp(start);
rangeType = RangeType.DOUBLE;
} else if (fieldType.typeName().equals(RangeType.IP.typeName())) {
} else if (typeName.equals(RangeType.IP.typeName())) {
boolean v4 = randomBoolean();
start = randomIp(v4);
end = RangeType.IP.nextUp(start);
rangeType = RangeType.IP;
} else if (fieldType.typeName().equals(RangeType.LONG.typeName())) {
} else if (typeName.equals(RangeType.LONG.typeName())) {
start = randomLong();
end = RangeType.LONG.nextUp(start);
rangeType = RangeType.LONG;
} else if (fieldType.typeName().equals(RangeType.INTEGER.typeName())) {
} else if (typeName.equals(RangeType.INTEGER.typeName())) {
start = randomInt();
end = RangeType.INTEGER.nextUp(start);
rangeType = RangeType.INTEGER;
} else if (fieldType.typeName().equals(RangeType.DATE.typeName())) {
} else if (typeName.equals(RangeType.DATE.typeName())) {
start = randomNonNegativeLong();
end = RangeType.DATE.nextUp(start);
rangeType = RangeType.DATE;
} else {
throw new IllegalStateException("Unknown type of range [" + fieldType.typeName() + "]");
throw new IllegalStateException("Unknown type of range [" + typeName + "]");
}

final RangeFieldMapper.Range range = new RangeFieldMapper.Range(rangeType, start, end, true, true);
iw.addDocument(singleton(new BinaryDocValuesField(fieldName, rangeType.encodeRanges(Collections.singleton(range)))));

} else if (fieldType.getValuesSourceType().equals(CoreValuesSourceType.GEOPOINT)) {
iw.addDocument(singleton(new LatLonDocValuesField(fieldName, randomDouble(), randomDouble())));
doc.add(new BinaryDocValuesField(fieldName, rangeType.encodeRanges(Collections.singleton(range))));
json = "{ \"" + fieldName + "\" : { \n" +
" \"gte\" : \"" + start + "\",\n" +
" \"lte\" : \"" + end + "\"\n" +
" }}";
} else if (vst.equals(CoreValuesSourceType.GEOPOINT)) {
double lat = randomDouble();
double lon = randomDouble();
doc.add(new LatLonDocValuesField(fieldName, lat, lon));
json = "{ \"" + fieldName + "\" : \"[" + lon + "," + lat + "]\" }";
} else {
throw new IllegalStateException("Unknown field type [" + fieldType.typeName() + "]");
throw new IllegalStateException("Unknown field type [" + typeName + "]");
}

doc.add(new StoredField("_source", new BytesRef(json)));
iw.addDocument(doc);
}

private class MockParserContext extends Mapper.TypeParser.ParserContext {
MockParserContext() {
super(null, null, null, null, null);
}

@Override
public IndexAnalyzers getIndexAnalyzers() {
NamedAnalyzer defaultAnalyzer = new NamedAnalyzer(AnalysisRegistry.DEFAULT_ANALYZER_NAME,
AnalyzerScope.GLOBAL, new StandardAnalyzer());
return new IndexAnalyzers(singletonMap(AnalysisRegistry.DEFAULT_ANALYZER_NAME, defaultAnalyzer), emptyMap(), emptyMap());
}
}

@After
Expand Down