1818 */
1919package org .elasticsearch .search .aggregations ;
2020
21+ import org .apache .lucene .analysis .standard .StandardAnalyzer ;
2122import org .apache .lucene .document .BinaryDocValuesField ;
23+ import org .apache .lucene .document .Document ;
24+ import org .apache .lucene .document .HalfFloatPoint ;
2225import org .apache .lucene .document .InetAddressPoint ;
2326import org .apache .lucene .document .LatLonDocValuesField ;
2427import org .apache .lucene .document .SortedNumericDocValuesField ;
2528import org .apache .lucene .document .SortedSetDocValuesField ;
29+ import org .apache .lucene .document .StoredField ;
2630import org .apache .lucene .index .AssertingDirectoryReader ;
2731import org .apache .lucene .index .CompositeReaderContext ;
2832import org .apache .lucene .index .DirectoryReader ;
4145import org .apache .lucene .search .Weight ;
4246import org .apache .lucene .store .Directory ;
4347import org .apache .lucene .util .BytesRef ;
48+ import org .apache .lucene .util .NumericUtils ;
4449import org .elasticsearch .Version ;
4550import org .elasticsearch .cluster .metadata .IndexMetadata ;
4651import org .elasticsearch .common .breaker .CircuitBreaker ;
4752import org .elasticsearch .common .lease .Releasable ;
4853import org .elasticsearch .common .lease .Releasables ;
4954import org .elasticsearch .common .lucene .index .ElasticsearchDirectoryReader ;
55+ import org .elasticsearch .common .network .NetworkAddress ;
5056import org .elasticsearch .common .settings .Settings ;
5157import org .elasticsearch .common .text .Text ;
5258import org .elasticsearch .common .util .BigArrays ;
5359import org .elasticsearch .common .util .MockBigArrays ;
5460import org .elasticsearch .common .util .MockPageCacheRecycler ;
5561import org .elasticsearch .index .Index ;
5662import org .elasticsearch .index .IndexSettings ;
63+ import org .elasticsearch .index .analysis .AnalysisRegistry ;
64+ import org .elasticsearch .index .analysis .AnalyzerScope ;
65+ import org .elasticsearch .index .analysis .IndexAnalyzers ;
66+ import org .elasticsearch .index .analysis .NamedAnalyzer ;
5767import org .elasticsearch .index .cache .bitset .BitsetFilterCache ;
5868import org .elasticsearch .index .cache .bitset .BitsetFilterCache .Listener ;
5969import org .elasticsearch .index .cache .query .DisabledQueryCache ;
7484import org .elasticsearch .index .mapper .Mapper ;
7585import org .elasticsearch .index .mapper .Mapper .BuilderContext ;
7686import org .elasticsearch .index .mapper .MapperService ;
87+ import org .elasticsearch .index .mapper .NumberFieldMapper ;
7788import org .elasticsearch .index .mapper .ObjectMapper ;
7889import org .elasticsearch .index .mapper .ObjectMapper .Nested ;
7990import org .elasticsearch .index .mapper .RangeFieldMapper ;
105116import org .junit .After ;
106117
107118import java .io .IOException ;
119+ import java .net .InetAddress ;
108120import java .util .ArrayList ;
109121import java .util .Arrays ;
110122import java .util .Collections ;
116128import java .util .function .Function ;
117129import java .util .stream .Collectors ;
118130
119- import static java .util .Collections .singleton ;
131+ import static java .util .Collections .singletonMap ;
132+ import static java .util .Collections .emptyMap ;
120133import static org .elasticsearch .test .InternalAggregationTestCase .DEFAULT_MAX_BUCKETS ;
121134import static org .mockito .Matchers .anyObject ;
122135import static org .mockito .Matchers .anyString ;
@@ -141,7 +154,6 @@ public abstract class AggregatorTestCase extends ESTestCase {
141154 List <String > blacklist = new ArrayList <>();
142155 blacklist .add (ObjectMapper .CONTENT_TYPE ); // Cannot aggregate objects
143156 blacklist .add (GeoShapeFieldMapper .CONTENT_TYPE ); // Cannot aggregate geoshapes (yet)
144- blacklist .add (TextFieldMapper .CONTENT_TYPE ); // TODO Does not support doc values, but does support FD, needs a lot of mocking
145157 blacklist .add (ObjectMapper .NESTED_CONTENT_TYPE ); // TODO support for nested
146158 blacklist .add (CompletionFieldMapper .CONTENT_TYPE ); // TODO support completion
147159 blacklist .add (FieldAliasMapper .CONTENT_TYPE ); // TODO support alias
@@ -595,6 +607,18 @@ protected AggregationBuilder createAggBuilderForTypeTest(MappedFieldType fieldTy
595607 "createAggBuilderForTypeTest() must be implemented as well." );
596608 }
597609
610+ /**
611+ * A method that allows implementors to specifically blacklist particular field types (based on their content_name).
612+ * This is needed in some areas where the ValuesSourceType is not granular enough, for example integer values
613+ * vs floating points, or `keyword` bytes vs `binary` bytes (which are not searchable)
614+ *
615+ * This is a blacklist instead of a whitelist because there are vastly more field types than ValuesSourceTypes,
616+ * and it's expected that these unsupported cases are exceptional rather than common
617+ */
618+ protected List <String > unsupportedMappedFieldTypes () {
619+ return Collections .emptyList ();
620+ }
621+
598622 /**
599623 * This test will validate that an aggregator succeeds or fails to run against all the field types
600624 * that are registered in {@link IndicesModule} (e.g. all the core field types). An aggregator
@@ -604,11 +628,12 @@ protected AggregationBuilder createAggBuilderForTypeTest(MappedFieldType fieldTy
604628 *
605629 * Exception types/messages are not currently checked, just presence/absence of an exception.
606630 */
607- public void testSupportedFieldTypes () throws IOException {
631+ public final void testSupportedFieldTypes () throws IOException {
608632 MapperRegistry mapperRegistry = new IndicesModule (Collections .emptyList ()).getMapperRegistry ();
609633 Settings settings = Settings .builder ().put ("index.version.created" , Version .CURRENT .id ).build ();
610634 String fieldName = "typeTestFieldName" ;
611635 List <ValuesSourceType > supportedVSTypes = getSupportedValuesSourceTypes ();
636+ List <String > unsupportedMappedFieldTypes = unsupportedMappedFieldTypes ();
612637
613638 if (supportedVSTypes .isEmpty ()) {
614639 // If the test says it doesn't support any VStypes, it has not been converted yet so skip
@@ -627,7 +652,11 @@ public void testSupportedFieldTypes() throws IOException {
627652
628653 Map <String , Object > source = new HashMap <>();
629654 source .put ("type" , mappedType .getKey ());
630- source .put ("doc_values" , "true" );
655+
656+ // Text is the only field that doesn't support DVs, instead FD
657+ if (mappedType .getKey ().equals (TextFieldMapper .CONTENT_TYPE ) == false ) {
658+ source .put ("doc_values" , "true" );
659+ }
631660
632661 Mapper .Builder builder = mappedType .getValue ().parse (fieldName , source , new MockParserContext ());
633662 FieldMapper mapper = (FieldMapper ) builder .build (new BuilderContext (settings , new ContentPath ()));
@@ -648,15 +677,16 @@ public void testSupportedFieldTypes() throws IOException {
648677 IndexSearcher indexSearcher = newIndexSearcher (indexReader );
649678 AggregationBuilder aggregationBuilder = createAggBuilderForTypeTest (fieldType , fieldName );
650679
680+ ValuesSourceType vst = fieldType .getValuesSourceType ();
651681 // TODO in the future we can make this more explicit with expectThrows(), when the exceptions are standardized
652682 try {
653683 searchAndReduce (indexSearcher , new MatchAllDocsQuery (), aggregationBuilder , fieldType );
654- if (supportedVSTypes .contains (fieldType . getValuesSourceType ()) == false ) {
684+ if (supportedVSTypes .contains (vst ) == false || unsupportedMappedFieldTypes . contains ( fieldType . typeName ()) ) {
655685 fail ("Aggregator [" + aggregationBuilder .getType () + "] should not support field type ["
656- + fieldType .typeName () + "] but executing against the field did not throw an excetion " );
686+ + fieldType .typeName () + "] but executing against the field did not throw an exception " );
657687 }
658688 } catch (Exception e ) {
659- if (supportedVSTypes .contains (fieldType .getValuesSourceType ())) {
689+ if (supportedVSTypes .contains (vst ) && unsupportedMappedFieldTypes . contains ( fieldType .typeName ()) == false ) {
660690 fail ("Aggregator [" + aggregationBuilder .getType () + "] supports field type ["
661691 + fieldType .typeName () + "] but executing against the field threw an exception: [" + e .getMessage () + "]" );
662692 }
@@ -674,74 +704,118 @@ public void testSupportedFieldTypes() throws IOException {
674704 */
675705 private void writeTestDoc (MappedFieldType fieldType , String fieldName , RandomIndexWriter iw ) throws IOException {
676706
677- if (fieldType .getValuesSourceType ().equals (CoreValuesSourceType .NUMERIC )) {
707+ String typeName = fieldType .typeName ();
708+ ValuesSourceType vst = fieldType .getValuesSourceType ();
709+ Document doc = new Document ();
710+ String json ;
711+
712+ if (vst .equals (CoreValuesSourceType .NUMERIC )) {
678713 // TODO note: once VS refactor adds DATE/BOOLEAN, this conditional will go away
679- if (fieldType .typeName ().equals (DateFieldMapper .CONTENT_TYPE )
680- || fieldType .typeName ().equals (DateFieldMapper .DATE_NANOS_CONTENT_TYPE )) {
681- iw .addDocument (singleton (new SortedNumericDocValuesField (fieldName , randomNonNegativeLong ())));
682- } else if (fieldType .typeName ().equals (BooleanFieldMapper .CONTENT_TYPE )) {
683- iw .addDocument (singleton (new SortedNumericDocValuesField (fieldName , randomBoolean () ? 0 : 1 )));
714+ long v ;
715+ if (typeName .equals (DateFieldMapper .CONTENT_TYPE ) || typeName .equals (DateFieldMapper .DATE_NANOS_CONTENT_TYPE )) {
716+ // positive integer because date_nanos gets unhappy with large longs
717+ v = Math .abs (randomInt ());
718+ json = "{ \" " + fieldName + "\" : \" " + v + "\" }" ;
719+ } else if (typeName .equals (BooleanFieldMapper .CONTENT_TYPE )) {
720+ v = randomBoolean () ? 0 : 1 ;
721+ json = "{ \" " + fieldName + "\" : \" " + (v == 0 ? "false" : "true" ) + "\" }" ;
722+ } else if (typeName .equals (NumberFieldMapper .NumberType .DOUBLE .typeName ())) {
723+ double d = Math .abs (randomDouble ());
724+ v = NumericUtils .doubleToSortableLong (d );
725+ json = "{ \" " + fieldName + "\" : \" " + d + "\" }" ;
726+ } else if (typeName .equals (NumberFieldMapper .NumberType .FLOAT .typeName ())) {
727+ float f = Math .abs (randomFloat ());
728+ v = NumericUtils .floatToSortableInt (f );
729+ json = "{ \" " + fieldName + "\" : \" " + f + "\" }" ;
730+ } else if (typeName .equals (NumberFieldMapper .NumberType .HALF_FLOAT .typeName ())) {
731+ // Respect half float range
732+ float f = Math .abs ((randomFloat () * 2 - 1 ) * 70000 );
733+ v = HalfFloatPoint .halfFloatToSortableShort (f );
734+ json = "{ \" " + fieldName + "\" : \" " + f + "\" }" ;
684735 } else {
685- iw .addDocument (singleton (new SortedNumericDocValuesField (fieldName , randomLong ())));
736+ // smallest numeric is a byte so we select the smallest
737+ v = Math .abs (randomByte ());
738+ json = "{ \" " + fieldName + "\" : \" " + v + "\" }" ;
686739 }
687- } else if (fieldType .getValuesSourceType ().equals (CoreValuesSourceType .BYTES )) {
688- if (fieldType .typeName ().equals (BinaryFieldMapper .CONTENT_TYPE )) {
689- iw .addDocument (singleton (new BinaryFieldMapper .CustomBinaryDocValuesField (fieldName , new BytesRef ("a" ).bytes )));
690- } else if (fieldType .typeName ().equals (IpFieldMapper .CONTENT_TYPE )) {
740+ doc .add (new SortedNumericDocValuesField (fieldName , v ));
741+
742+ } else if (vst .equals (CoreValuesSourceType .BYTES )) {
743+ if (typeName .equals (BinaryFieldMapper .CONTENT_TYPE )) {
744+ doc .add (new BinaryFieldMapper .CustomBinaryDocValuesField (fieldName , new BytesRef ("a" ).bytes ));
745+ json = "{ \" " + fieldName + "\" : \" a\" }" ;
746+ } else if (typeName .equals (IpFieldMapper .CONTENT_TYPE )) {
691747 // TODO note: once VS refactor adds IP, this conditional will go away
692- boolean v4 = randomBoolean ();
693- iw .addDocument (singleton (new SortedSetDocValuesField (fieldName , new BytesRef (InetAddressPoint .encode (randomIp (v4 ))))));
748+ InetAddress ip = randomIp (randomBoolean ());
749+ json = "{ \" " + fieldName + "\" : \" " + NetworkAddress .format (ip ) + "\" }" ;
750+ doc .add (new SortedSetDocValuesField (fieldName , new BytesRef (InetAddressPoint .encode (ip ))));
694751 } else {
695- iw .addDocument (singleton (new SortedSetDocValuesField (fieldName , new BytesRef ("a" ))));
752+ doc .add (new SortedSetDocValuesField (fieldName , new BytesRef ("a" )));
753+ json = "{ \" " + fieldName + "\" : \" a\" }" ;
696754 }
697- } else if (fieldType . getValuesSourceType () .equals (CoreValuesSourceType .RANGE )) {
755+ } else if (vst .equals (CoreValuesSourceType .RANGE )) {
698756 Object start ;
699757 Object end ;
700758 RangeType rangeType ;
701759
702- if (fieldType . typeName () .equals (RangeType .DOUBLE .typeName ())) {
760+ if (typeName .equals (RangeType .DOUBLE .typeName ())) {
703761 start = randomDouble ();
704762 end = RangeType .DOUBLE .nextUp (start );
705763 rangeType = RangeType .DOUBLE ;
706- } else if (fieldType . typeName () .equals (RangeType .FLOAT .typeName ())) {
764+ } else if (typeName .equals (RangeType .FLOAT .typeName ())) {
707765 start = randomFloat ();
708766 end = RangeType .FLOAT .nextUp (start );
709767 rangeType = RangeType .DOUBLE ;
710- } else if (fieldType . typeName () .equals (RangeType .IP .typeName ())) {
768+ } else if (typeName .equals (RangeType .IP .typeName ())) {
711769 boolean v4 = randomBoolean ();
712770 start = randomIp (v4 );
713771 end = RangeType .IP .nextUp (start );
714772 rangeType = RangeType .IP ;
715- } else if (fieldType . typeName () .equals (RangeType .LONG .typeName ())) {
773+ } else if (typeName .equals (RangeType .LONG .typeName ())) {
716774 start = randomLong ();
717775 end = RangeType .LONG .nextUp (start );
718776 rangeType = RangeType .LONG ;
719- } else if (fieldType . typeName () .equals (RangeType .INTEGER .typeName ())) {
777+ } else if (typeName .equals (RangeType .INTEGER .typeName ())) {
720778 start = randomInt ();
721779 end = RangeType .INTEGER .nextUp (start );
722780 rangeType = RangeType .INTEGER ;
723- } else if (fieldType . typeName () .equals (RangeType .DATE .typeName ())) {
781+ } else if (typeName .equals (RangeType .DATE .typeName ())) {
724782 start = randomNonNegativeLong ();
725783 end = RangeType .DATE .nextUp (start );
726784 rangeType = RangeType .DATE ;
727785 } else {
728- throw new IllegalStateException ("Unknown type of range [" + fieldType . typeName () + "]" );
786+ throw new IllegalStateException ("Unknown type of range [" + typeName + "]" );
729787 }
730788
731789 final RangeFieldMapper .Range range = new RangeFieldMapper .Range (rangeType , start , end , true , true );
732- iw .addDocument (singleton (new BinaryDocValuesField (fieldName , rangeType .encodeRanges (Collections .singleton (range )))));
733-
734- } else if (fieldType .getValuesSourceType ().equals (CoreValuesSourceType .GEOPOINT )) {
735- iw .addDocument (singleton (new LatLonDocValuesField (fieldName , randomDouble (), randomDouble ())));
790+ doc .add (new BinaryDocValuesField (fieldName , rangeType .encodeRanges (Collections .singleton (range ))));
791+ json = "{ \" " + fieldName + "\" : { \n " +
792+ " \" gte\" : \" " + start + "\" ,\n " +
793+ " \" lte\" : \" " + end + "\" \n " +
794+ " }}" ;
795+ } else if (vst .equals (CoreValuesSourceType .GEOPOINT )) {
796+ double lat = randomDouble ();
797+ double lon = randomDouble ();
798+ doc .add (new LatLonDocValuesField (fieldName , lat , lon ));
799+ json = "{ \" " + fieldName + "\" : \" [" + lon + "," + lat + "]\" }" ;
736800 } else {
737- throw new IllegalStateException ("Unknown field type [" + fieldType . typeName () + "]" );
801+ throw new IllegalStateException ("Unknown field type [" + typeName + "]" );
738802 }
803+
804+ doc .add (new StoredField ("_source" , new BytesRef (json )));
805+ iw .addDocument (doc );
739806 }
740807
741808 private class MockParserContext extends Mapper .TypeParser .ParserContext {
742809 MockParserContext () {
743810 super (null , null , null , null , null );
744811 }
812+
813+ @ Override
814+ public IndexAnalyzers getIndexAnalyzers () {
815+ NamedAnalyzer defaultAnalyzer = new NamedAnalyzer (AnalysisRegistry .DEFAULT_ANALYZER_NAME ,
816+ AnalyzerScope .GLOBAL , new StandardAnalyzer ());
817+ return new IndexAnalyzers (singletonMap (AnalysisRegistry .DEFAULT_ANALYZER_NAME , defaultAnalyzer ), emptyMap (), emptyMap ());
818+ }
745819 }
746820
747821 @ After
0 commit comments