Skip to content
Merged
Original file line number Diff line number Diff line change
Expand Up @@ -113,6 +113,19 @@ public IndexFieldData.Builder fielddataBuilder(String fullyQualifiedIndexName, S
throw new IllegalArgumentException("[rank_feature] fields do not support sorting, scripting or aggregating");
}

@Override
public ValueFetcher valueFetcher(MapperService mapperService, SearchLookup searchLookup, String format) {
if (format != null) {
throw new IllegalArgumentException("Field [" + name() + "] of type [" + typeName() + "] doesn't support formats.");
}
return new SourceValueFetcher(name(), mapperService, false) {
Copy link
Contributor

@jtibshirani jtibshirani Oct 6, 2020

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

One other change I noticed -- previously we always passed the result of FieldMapper#parsesArrayValue instead of hard-coding the booleans. Now there is nothing really tying this to parsesArrayValue and it could be easily overlooked. I wonder how we could make this more robust.

An update: I opened #63354 with a small improvement.

@Override
protected Float parseSourceValue(Object value) {
return objectToFloat(value);
}
};
}

@Override
public Query termQuery(Object value, QueryShardContext context) {
throw new IllegalArgumentException("Queries on [rank_feature] fields are not supported");
Expand Down Expand Up @@ -162,27 +175,14 @@ protected void parseCreateField(ParseContext context) throws IOException {
context.doc().addWithKey(name(), new FeatureField("_feature", name(), value));
}

private Float objectToFloat(Object value) {
private static Float objectToFloat(Object value) {
if (value instanceof Number) {
return ((Number) value).floatValue();
} else {
return Float.parseFloat(value.toString());
}
}

@Override
public ValueFetcher valueFetcher(MapperService mapperService, SearchLookup searchLookup, String format) {
if (format != null) {
throw new IllegalArgumentException("Field [" + name() + "] of type [" + typeName() + "] doesn't support formats.");
}
return new SourceValueFetcher(name(), mapperService, parsesArrayValue()) {
@Override
protected Float parseSourceValue(Object value) {
return objectToFloat(value);
}
};
}

@Override
protected String contentType() {
return CONTENT_TYPE;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@

import org.apache.lucene.search.Query;
import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.search.lookup.SearchLookup;

import java.util.Collections;

Expand Down Expand Up @@ -50,6 +51,11 @@ public String typeName() {
return CONTENT_TYPE;
}

@Override
public ValueFetcher valueFetcher(MapperService mapperService, SearchLookup searchLookup, String format) {
throw new UnsupportedOperationException("Cannot fetch values for internal field [" + typeName() + "].");
}

@Override
public Query existsQuery(QueryShardContext context) {
throw new UnsupportedOperationException("Cannot run exists query on [_feature]");
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -88,6 +88,19 @@ public IndexFieldData.Builder fielddataBuilder(String fullyQualifiedIndexName, S
throw new IllegalArgumentException("[rank_features] fields do not support sorting, scripting or aggregating");
}

@Override
public ValueFetcher valueFetcher(MapperService mapperService, SearchLookup searchLookup, String format) {
if (format != null) {
throw new IllegalArgumentException("Field [" + name() + "] of type [" + typeName() + "] doesn't support formats.");
}
return new SourceValueFetcher(name(), mapperService, false) {
@Override
protected Object parseSourceValue(Object value) {
return value;
}
};
}

@Override
public Query termQuery(Object value, QueryShardContext context) {
throw new IllegalArgumentException("Queries on [rank_features] fields are not supported");
Expand Down Expand Up @@ -152,19 +165,6 @@ protected void parseCreateField(ParseContext context) {
throw new AssertionError("parse is implemented directly");
}

@Override
public ValueFetcher valueFetcher(MapperService mapperService, SearchLookup searchLookup, String format) {
if (format != null) {
throw new IllegalArgumentException("Field [" + name() + "] of type [" + typeName() + "] doesn't support formats.");
}
return new SourceValueFetcher(name(), mapperService, parsesArrayValue()) {
@Override
protected Object parseSourceValue(Object value) {
return value;
}
};
}

@Override
protected String contentType() {
return CONTENT_TYPE;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -122,7 +122,7 @@ protected List<Parameter<?>> getParameters() {
@Override
public ScaledFloatFieldMapper build(BuilderContext context) {
ScaledFloatFieldType type = new ScaledFloatFieldType(buildFullName(context), indexed.getValue(), stored.getValue(),
hasDocValues.getValue(), meta.getValue(), scalingFactor.getValue());
hasDocValues.getValue(), meta.getValue(), scalingFactor.getValue(), nullValue.getValue());
Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

These telescoping constructors are getting too complicated now - one idea I have for a follow-up is to add a FieldCharacteristics wrapper object that takes the (indexed, docvales, stored, meta) tuple, analogous to TextSearchInfo, and give both of these 'holder' objects constructors that take Parameter sets directly, so that you can go FieldCharacteristics.build(index, hasDocValues, stored, meta) directly in build and remove some of the ceremony.

return new ScaledFloatFieldMapper(name, type, multiFieldsBuilder.build(this, context), copyTo.build(), this);
}
}
Expand All @@ -132,15 +132,17 @@ public ScaledFloatFieldMapper build(BuilderContext context) {
public static final class ScaledFloatFieldType extends SimpleMappedFieldType {

private final double scalingFactor;
private final Double nullValue;

public ScaledFloatFieldType(String name, boolean indexed, boolean stored, boolean hasDocValues,
Map<String, String> meta, double scalingFactor) {
Map<String, String> meta, double scalingFactor, Double nullValue) {
super(name, indexed, stored, hasDocValues, TextSearchInfo.SIMPLE_MATCH_ONLY, meta);
this.scalingFactor = scalingFactor;
this.nullValue = nullValue;
}

public ScaledFloatFieldType(String name, double scalingFactor) {
this(name, true, false, true, Collections.emptyMap(), scalingFactor);
this(name, true, false, true, Collections.emptyMap(), scalingFactor, null);
}

public double getScalingFactor() {
Expand Down Expand Up @@ -204,6 +206,30 @@ public IndexFieldData.Builder fielddataBuilder(String fullyQualifiedIndexName, S
};
}

@Override
public ValueFetcher valueFetcher(MapperService mapperService, SearchLookup searchLookup, String format) {
if (format != null) {
throw new IllegalArgumentException("Field [" + name() + "] of type [" + typeName() + "] doesn't support formats.");
}
return new SourceValueFetcher(name(), mapperService, false) {
@Override
protected Double parseSourceValue(Object value) {
double doubleValue;
if (value.equals("")) {
if (nullValue == null) {
return null;
}
doubleValue = nullValue;
} else {
doubleValue = objectToDouble(value);
}

double scalingFactor = getScalingFactor();
return Math.round(doubleValue * scalingFactor) / scalingFactor;
}
};
}

@Override
public Object valueForDisplay(Object value) {
if (value == null) {
Expand Down Expand Up @@ -380,31 +406,6 @@ private static double objectToDouble(Object value) {
return doubleValue;
}

@Override
public ValueFetcher valueFetcher(MapperService mapperService, SearchLookup searchLookup, String format) {
if (format != null) {
throw new IllegalArgumentException("Field [" + name() + "] of type [" + typeName() + "] doesn't support formats.");
}
return new SourceValueFetcher(name(), mapperService, parsesArrayValue()) {
@Override
protected Double parseSourceValue(Object value) {
double doubleValue;
if (value.equals("")) {
if (nullValue == null) {
return null;
}
doubleValue = nullValue;
} else {
doubleValue = objectToDouble(value);
}

double scalingFactor = fieldType().getScalingFactor();
return Math.round(doubleValue * scalingFactor) / scalingFactor;
}
};
}


private static class ScaledFloatIndexFieldData extends IndexNumericFieldData {

private final IndexNumericFieldData scaledFieldData;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -263,6 +263,11 @@ private ShingleFieldType shingleFieldForPositions(int positions) {
return shingleFields[Math.min(indexFromShingleSize, shingleFields.length - 1)];
}

@Override
public ValueFetcher valueFetcher(MapperService mapperService, SearchLookup searchLookup, String format) {
throw new UnsupportedOperationException();
}

@Override
public Query prefixQuery(String value, MultiTermQuery.RewriteMethod method, boolean caseInsensitive, QueryShardContext context) {
if (prefixField == null || prefixField.termLengthWithinBounds(value.length()) == false) {
Expand Down Expand Up @@ -369,6 +374,11 @@ public Query prefixQuery(String value, MultiTermQuery.RewriteMethod method, bool
.build();
}

@Override
public ValueFetcher valueFetcher(MapperService mapperService, SearchLookup searchLookup, String format) {
throw new UnsupportedOperationException();
}

@Override
public String typeName() {
return "prefix";
Expand Down Expand Up @@ -405,11 +415,6 @@ protected void parseCreateField(ParseContext context) {
throw new UnsupportedOperationException();
}

@Override
public ValueFetcher valueFetcher(MapperService mapperService, SearchLookup searchLookup, String format) {
throw new UnsupportedOperationException();
}

@Override
protected void mergeOptions(FieldMapper other, List<String> conflicts) {

Expand Down Expand Up @@ -451,11 +456,6 @@ protected void mergeOptions(FieldMapper other, List<String> conflicts) {

}

@Override
public ValueFetcher valueFetcher(MapperService mapperService, SearchLookup searchLookup, String format) {
throw new UnsupportedOperationException();
}

@Override
protected String contentType() {
return "shingle";
Expand All @@ -478,6 +478,11 @@ void setPrefixFieldType(PrefixFieldType prefixFieldType) {
this.prefixFieldType = prefixFieldType;
}

@Override
public ValueFetcher valueFetcher(MapperService mapperService, SearchLookup searchLookup, String format) {
throw new UnsupportedOperationException();
}

@Override
public String typeName() {
return CONTENT_TYPE;
Expand Down Expand Up @@ -573,11 +578,6 @@ protected void parseCreateField(ParseContext context) throws IOException {
}
}

@Override
public ValueFetcher valueFetcher(MapperService mapperService, SearchLookup searchLookup, String format) {
throw new UnsupportedOperationException();
}

@Override
protected String contentType() {
return CONTENT_TYPE;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,6 @@
import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.analysis.tokenattributes.PositionIncrementAttribute;
import org.elasticsearch.index.analysis.NamedAnalyzer;
import org.elasticsearch.search.lookup.SearchLookup;

import java.io.IOException;
import java.util.Arrays;
Expand Down Expand Up @@ -79,6 +78,8 @@ public TokenCountFieldMapper build(BuilderContext context) {
index.getValue(),
store.getValue(),
hasDocValues.getValue(),
false,
nullValue.getValue(),
meta.getValue());
return new TokenCountFieldMapper(name, ft, multiFieldsBuilder.build(this, context), copyTo.build(), this);
}
Expand Down Expand Up @@ -129,20 +130,6 @@ protected void parseCreateField(ParseContext context) throws IOException {
);
}

@Override
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

After this change, I think value parsing will always fail -- since it uses NumberFieldType, it will attempt to parse a piece of text as a number.

public ValueFetcher valueFetcher(MapperService mapperService, SearchLookup searchLookup, String format) {
if (format != null) {
throw new IllegalArgumentException("Field [" + name() + "] of type [" + typeName() + "] doesn't support formats.");
}

return new SourceValueFetcher(name(), mapperService, parsesArrayValue(), nullValue) {
@Override
protected String parseSourceValue(Object value) {
return value.toString();
}
};
}

/**
* Count position increments in a token stream. Package private for testing.
* @param analyzer analyzer to create token stream
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -25,10 +25,7 @@
import org.apache.lucene.index.IndexableField;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.TermQuery;
import org.elasticsearch.Version;
import org.elasticsearch.cluster.metadata.IndexMetadata;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.plugins.Plugin;

Expand Down Expand Up @@ -144,13 +141,4 @@ public void testRejectMultiValuedFields() throws MapperParsingException, IOExcep
assertEquals("[rank_feature] fields do not support indexing multiple values for the same field [foo.field] in the same document",
e.getCause().getMessage());
}

public void testFetchSourceValue() throws IOException {
Settings settings = Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT.id).build();
Mapper.BuilderContext context = new Mapper.BuilderContext(settings, new ContentPath());
RankFeatureFieldMapper mapper = new RankFeatureFieldMapper.Builder("field").build(context);

assertEquals(List.of(3.14f), fetchSourceValue(mapper, 3.14));
assertEquals(List.of(42.9f), fetchSourceValue(mapper, "42.9"));
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -19,12 +19,27 @@

package org.elasticsearch.index.mapper;

import org.elasticsearch.Version;
import org.elasticsearch.cluster.metadata.IndexMetadata;
import org.elasticsearch.common.settings.Settings;

import java.io.IOException;
import java.util.Collections;
import java.util.List;

public class RankFeatureFieldTypeTests extends FieldTypeTestCase {

public void testIsNotAggregatable() {
MappedFieldType fieldType = new RankFeatureFieldMapper.RankFeatureFieldType("field", Collections.emptyMap(), true);
assertFalse(fieldType.isAggregatable());
}

public void testFetchSourceValue() throws IOException {
Settings settings = Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT.id).build();
Mapper.BuilderContext context = new Mapper.BuilderContext(settings, new ContentPath());
MappedFieldType mapper = new RankFeatureFieldMapper.Builder("field").build(context).fieldType();

assertEquals(List.of(3.14f), fetchSourceValue(mapper, 3.14));
assertEquals(List.of(42.9f), fetchSourceValue(mapper, "42.9"));
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -21,11 +21,8 @@

import org.apache.lucene.index.DocValuesType;
import org.apache.lucene.index.IndexableField;
import org.elasticsearch.Version;
import org.elasticsearch.cluster.metadata.IndexMetadata;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentType;
Expand Down Expand Up @@ -285,21 +282,4 @@ public void testRejectIndexOptions() {
containsString("Failed to parse mapping: unknown parameter [index_options] on mapper [field] of type [scaled_float]"));
}

public void testFetchSourceValue() throws IOException {
Settings settings = Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT.id).build();
Mapper.BuilderContext context = new Mapper.BuilderContext(settings, new ContentPath());

ScaledFloatFieldMapper mapper = new ScaledFloatFieldMapper.Builder("field", false, false)
.scalingFactor(100)
.build(context);
assertEquals(List.of(3.14), fetchSourceValue(mapper, 3.1415926));
assertEquals(List.of(3.14), fetchSourceValue(mapper, "3.1415"));
assertEquals(List.of(), fetchSourceValue(mapper, ""));

ScaledFloatFieldMapper nullValueMapper = new ScaledFloatFieldMapper.Builder("field", false, false)
.scalingFactor(100)
.nullValue(2.71)
.build(context);
assertEquals(List.of(2.71), fetchSourceValue(nullValueMapper, ""));
}
}
Loading