Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,6 @@
import org.apache.lucene.search.suggest.document.PrefixCompletionQuery;
import org.apache.lucene.search.suggest.document.RegexCompletionQuery;
import org.apache.lucene.search.suggest.document.SuggestField;
import org.elasticsearch.Version;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.ParsingException;
import org.elasticsearch.common.settings.Settings;
Expand Down Expand Up @@ -74,7 +73,7 @@
* <li>"min_input_length": 50 (default)</li>
* <li>"contexts" : CONTEXTS</li>
* </ul>
* see {@link ContextMappings#load(Object, Version)} for CONTEXTS<br>
* see {@link ContextMappings#load(Object, org.elasticsearch.index.mapper.Mapper.TypeParser.ParserContext)} for CONTEXTS<br>
* see {@link #parse(ParseContext)} for acceptable inputs for indexing<br>
* <p>
* This field type constructs completion queries that are run
Expand Down Expand Up @@ -144,7 +143,7 @@ public static class TypeParser implements Mapper.TypeParser {
builder.maxInputLength(Integer.parseInt(fieldNode.toString()));
iterator.remove();
} else if (Fields.CONTEXTS.match(fieldName, LoggingDeprecationHandler.INSTANCE)) {
builder.contextMappings(ContextMappings.load(fieldNode, parserContext.indexVersionCreated()));
builder.contextMappings(ContextMappings.load(fieldNode, parserContext));
iterator.remove();
} else if (parseMultiField(builder, name, parserContext, fieldName, fieldNode)) {
iterator.remove();
Expand Down Expand Up @@ -376,7 +375,7 @@ public Builder maxInputLength(int maxInputLength) {

/**
* Add context mapping to this field
* @param contextMappings see {@link ContextMappings#load(Object, Version)}
* @param contextMappings see {@link ContextMappings#load(Object, org.elasticsearch.index.mapper.Mapper.TypeParser.ParserContext)}
*/
public Builder contextMappings(ContextMappings contextMappings) {
this.contextMappings = contextMappings;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -24,11 +24,11 @@
import org.apache.lucene.document.StoredField;
import org.apache.lucene.index.IndexableField;
import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.Version;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentParser.Token;
import org.elasticsearch.index.mapper.KeywordFieldMapper;
import org.elasticsearch.index.mapper.Mapper;
import org.elasticsearch.index.mapper.ParseContext;
import org.elasticsearch.index.mapper.ParseContext.Document;
import org.elasticsearch.index.mapper.StringFieldType;
Expand Down Expand Up @@ -79,7 +79,7 @@ public String getFieldName() {
/**
* Loads a <code>name</code>d {@link CategoryContextMapping} instance
* from a map.
* see {@link ContextMappings#load(Object, Version)}
* see {@link ContextMappings#load(Object, Mapper.TypeParser.ParserContext)}
*
* Acceptable map param: <code>path</code>
*/
Expand Down Expand Up @@ -134,7 +134,7 @@ public Set<CharSequence> parseContext(ParseContext parseContext, XContentParser
}

@Override
public Set<CharSequence> parseContext(Document document) {
public Set<CharSequence> parseContext(Mapper.TypeParser.ParserContext parserContext, Document document) {
Set<CharSequence> values = null;
if (fieldName != null) {
IndexableField[] fields = document.getFields(fieldName);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,7 @@
import org.elasticsearch.common.xcontent.XContentParser.Token;
import org.elasticsearch.common.xcontent.json.JsonXContent;
import org.elasticsearch.index.mapper.CompletionFieldMapper;
import org.elasticsearch.index.mapper.Mapper;
import org.elasticsearch.index.mapper.ParseContext;

import java.io.IOException;
Expand Down Expand Up @@ -96,7 +97,7 @@ public String name() {
/**
* Retrieves a set of context from a <code>document</code> at index-time.
*/
protected abstract Set<CharSequence> parseContext(ParseContext.Document document);
protected abstract Set<CharSequence> parseContext(Mapper.TypeParser.ParserContext parserContext, ParseContext.Document document);

/**
* Prototype for the query context
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,7 @@
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.index.mapper.CompletionFieldMapper;
import org.elasticsearch.index.mapper.DocumentMapperParser;
import org.elasticsearch.index.mapper.Mapper;
import org.elasticsearch.index.mapper.ParseContext;
import org.elasticsearch.search.suggest.completion.context.ContextMapping.Type;

Expand All @@ -54,8 +55,9 @@ public class ContextMappings implements ToXContent {

private final List<ContextMapping<?>> contextMappings;
private final Map<String, ContextMapping<?>> contextNameMap;
private final Mapper.TypeParser.ParserContext parserContext;

public ContextMappings(List<ContextMapping<?>> contextMappings) {
public ContextMappings(List<ContextMapping<?>> contextMappings, Mapper.TypeParser.ParserContext parserContext) {
if (contextMappings.size() > 255) {
// we can support more, but max of 255 (1 byte) unique context types per suggest field
// seems reasonable?
Expand All @@ -66,6 +68,7 @@ public ContextMappings(List<ContextMapping<?>> contextMappings) {
for (ContextMapping<?> mapping : contextMappings) {
contextNameMap.put(mapping.name(), mapping);
}
this.parserContext = parserContext;
}

/**
Expand Down Expand Up @@ -134,7 +137,7 @@ protected Iterable<CharSequence> contexts() {
scratch.setCharAt(0, (char) typeId);
scratch.setLength(1);
ContextMapping<?> mapping = contextMappings.get(typeId);
Set<CharSequence> contexts = new HashSet<>(mapping.parseContext(document));
Set<CharSequence> contexts = new HashSet<>(mapping.parseContext(parserContext, document));
if (this.contexts.get(mapping.name()) != null) {
contexts.addAll(this.contexts.get(mapping.name()));
}
Expand Down Expand Up @@ -216,23 +219,24 @@ public Map<String, Set<CharSequence>> getNamedContexts(List<CharSequence> contex
* [{"name": .., "type": .., ..}, {..}]
*
*/
public static ContextMappings load(Object configuration, Version indexVersionCreated) throws ElasticsearchParseException {
public static ContextMappings load(Object configuration, Mapper.TypeParser.ParserContext parserContext)
throws ElasticsearchParseException {
final List<ContextMapping<?>> contextMappings;
if (configuration instanceof List) {
contextMappings = new ArrayList<>();
List<Object> configurations = (List<Object>) configuration;
for (Object contextConfig : configurations) {
contextMappings.add(load((Map<String, Object>) contextConfig, indexVersionCreated));
contextMappings.add(load((Map<String, Object>) contextConfig, parserContext.indexVersionCreated()));
}
if (contextMappings.size() == 0) {
throw new ElasticsearchParseException("expected at least one context mapping");
}
} else if (configuration instanceof Map) {
contextMappings = Collections.singletonList(load(((Map<String, Object>) configuration), indexVersionCreated));
contextMappings = Collections.singletonList(load(((Map<String, Object>) configuration), parserContext.indexVersionCreated()));
} else {
throw new ElasticsearchParseException("expected a list or an entry of context mapping");
}
return new ContextMappings(contextMappings);
return new ContextMappings(contextMappings, parserContext);
}

private static ContextMapping<?> load(Map<String, Object> contextConfig, Version indexVersionCreated) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,8 +19,8 @@

package org.elasticsearch.search.suggest.completion.context;

import org.apache.lucene.document.StringField;
import org.apache.lucene.index.DocValuesType;
import org.apache.lucene.document.LatLonDocValuesField;
import org.apache.lucene.document.LatLonPoint;
import org.apache.lucene.index.IndexableField;
import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.common.geo.GeoPoint;
Expand All @@ -31,6 +31,7 @@
import org.elasticsearch.common.xcontent.XContentParser.Token;
import org.elasticsearch.index.mapper.GeoPointFieldMapper;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.Mapper;
import org.elasticsearch.index.mapper.ParseContext;
import org.elasticsearch.index.mapper.ParseContext.Document;

Expand Down Expand Up @@ -180,35 +181,19 @@ public Set<CharSequence> parseContext(ParseContext parseContext, XContentParser
}

@Override
public Set<CharSequence> parseContext(Document document) {
public Set<CharSequence> parseContext(Mapper.TypeParser.ParserContext parserContext, Document document) {
final Set<CharSequence> geohashes = new HashSet<>();

if (fieldName != null) {
MappedFieldType fieldType = parserContext.mapperService().fullName(fieldName);
if (!(fieldType instanceof GeoPointFieldMapper.GeoPointFieldType)) {
throw new ElasticsearchParseException("cannot parse geo context field [{}], it must be mapped as a geo_point", fieldName);
}
IndexableField[] fields = document.getFields(fieldName);
GeoPoint spare = new GeoPoint();
if (fields.length == 0) {
IndexableField[] lonFields = document.getFields(fieldName + ".lon");
IndexableField[] latFields = document.getFields(fieldName + ".lat");
if (lonFields.length > 0 && latFields.length > 0) {
for (int i = 0; i < lonFields.length; i++) {
IndexableField lonField = lonFields[i];
IndexableField latField = latFields[i];
assert lonField.fieldType().docValuesType() == latField.fieldType().docValuesType();
// we write doc values fields differently: one field for all values, so we need to only care about indexed fields
if (lonField.fieldType().docValuesType() == DocValuesType.NONE) {
spare.reset(latField.numericValue().doubleValue(), lonField.numericValue().doubleValue());
geohashes.add(stringEncode(spare.getLon(), spare.getLat(), precision));
}
}
}
} else {
for (IndexableField field : fields) {
if (field instanceof StringField) {
spare.resetFromString(field.stringValue());
} else {
// todo return this to .stringValue() once LatLonPoint implements it
spare.resetFromIndexableField(field);
}
for (IndexableField field : fields) {
if (field instanceof LatLonPoint || field instanceof LatLonDocValuesField){
// todo return this to .stringValue() once LatLonPoint implements it
spare.resetFromIndexableField(field);
geohashes.add(spare.geohash());
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -52,7 +52,8 @@ public void modify(MappedFieldType ft) {
@Override
public void modify(MappedFieldType ft) {
CompletionFieldMapper.CompletionFieldType cft = (CompletionFieldMapper.CompletionFieldType)ft;
ContextMappings contextMappings = new ContextMappings(Arrays.asList(ContextBuilder.category("foo").build(), ContextBuilder.geo("geo").build()));
ContextMappings contextMappings = new ContextMappings(
Arrays.asList(ContextBuilder.category("foo").build(), ContextBuilder.geo("geo").build()), null);
cft.setContextMappings(contextMappings);
}
});
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@

import com.carrotsearch.randomizedtesting.generators.RandomStrings;
import org.apache.lucene.util.LuceneTestCase.SuppressCodecs;
import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.action.index.IndexRequestBuilder;
import org.elasticsearch.action.index.IndexResponse;
import org.elasticsearch.action.search.SearchResponse;
Expand Down Expand Up @@ -493,14 +494,19 @@ public void testGeoNeighbours() throws Exception {
}

public void testGeoField() throws Exception {
// Version version = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.V_5_0_0_alpha5);
// Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build();
XContentBuilder mapping = jsonBuilder();
mapping.startObject();
mapping.startObject(TYPE);
mapping.startObject("properties");
mapping.startObject("pin");
mapping.field("type", "geo_point");
// Enable store and disable indexing sometimes
if (randomBoolean()) {
mapping.field("store", "true");
}
if (randomBoolean()) {
mapping.field("index", "false");
}
mapping.endObject();
mapping.startObject(FIELD);
mapping.field("type", "completion");
Expand Down Expand Up @@ -551,6 +557,97 @@ public void testGeoField() throws Exception {
assertEquals("Hotel Amsterdam in Berlin", searchResponse.getSuggest().getSuggestion(suggestionName).iterator().next().getOptions().iterator().next().getText().string());
}

public void testMalformedGeoField() throws Exception {
XContentBuilder mapping = jsonBuilder();
mapping.startObject();
mapping.startObject(TYPE);
mapping.startObject("properties");
mapping.startObject("pin");
String type = randomFrom("text", "keyword", "long", "object");
mapping.field("type", type);
mapping.endObject();
mapping.startObject(FIELD);
mapping.field("type", "completion");
mapping.field("analyzer", "simple");

mapping.startArray("contexts");
mapping.startObject();
mapping.field("name", "st");
mapping.field("type", "geo");
mapping.field("path", "pin");
mapping.field("precision", 5);
mapping.endObject();
mapping.endArray();

mapping.endObject();
mapping.endObject();
mapping.endObject();
mapping.endObject();

assertAcked(prepareCreate(INDEX).addMapping(TYPE, mapping));

if ("object".equals(type)) {
XContentBuilder source1 = jsonBuilder()
.startObject()
.startObject("pin")
.field("type", "shape")
.endObject()
.startObject(FIELD)
.array("input", "Hotel Amsterdam in Berlin")
.endObject()
.endObject();
assertThat(expectThrows(ElasticsearchParseException.class, () ->
client().prepareIndex(INDEX, TYPE, "1").setSource(source1).execute().actionGet()).getMessage(),
equalTo("cannot parse geo context field [pin], it must be mapped as a geo_point"));

XContentBuilder source2 = jsonBuilder()
.startObject()
.startObject("pin")
.field("lat", 10.0)
.endObject()
.startObject(FIELD)
.array("input", "Hotel Berlin in Amsterdam")
.endObject()
.endObject();
assertThat(expectThrows(ElasticsearchParseException.class, () ->
client().prepareIndex(INDEX, TYPE, "2").setSource(source2).execute().actionGet()).getMessage(),
equalTo("cannot parse geo context field [pin], it must be mapped as a geo_point"));
} else if ("long".equals(type)) {
XContentBuilder source2 = jsonBuilder()
.startObject()
.field("pin", 1000)
.startObject(FIELD)
.array("input", "Hotel Berlin in Amsterdam")
.endObject()
.endObject();
assertThat(expectThrows(ElasticsearchParseException.class, () ->
client().prepareIndex(INDEX, TYPE, "2").setSource(source2).execute().actionGet()).getMessage(),
equalTo("cannot parse geo context field [pin], it must be mapped as a geo_point"));
} else {
XContentBuilder source1 = jsonBuilder()
.startObject()
.field("pin", "52.529172, 13.407333")
.startObject(FIELD)
.array("input", "Hotel Amsterdam in Berlin")
.endObject()
.endObject();
assertThat(expectThrows(ElasticsearchParseException.class, () ->
client().prepareIndex(INDEX, TYPE, "1").setSource(source1).execute().actionGet()).getMessage(),
equalTo("cannot parse geo context field [pin], it must be mapped as a geo_point"));

XContentBuilder source2 = jsonBuilder()
.startObject()
.field("pin", "u173zhryfg5n")
.startObject(FIELD)
.array("input", "Hotel Berlin in Amsterdam")
.endObject()
.endObject();
assertThat(expectThrows(ElasticsearchParseException.class, () ->
client().prepareIndex(INDEX, TYPE, "2").setSource(source2).execute().actionGet()).getMessage(),
equalTo("cannot parse geo context field [pin], it must be mapped as a geo_point"));
}
}

public void testSkipDuplicatesWithContexts() throws Exception {
LinkedHashMap<String, ContextMapping<?>> map = new LinkedHashMap<>();
map.put("type", ContextBuilder.category("type").field("type").build());
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -724,7 +724,7 @@ public void testParsingContextFromDocument() throws Exception {
document.add(new Field(keyword.name(), new BytesRef("category1"), keyword));
// Ignore doc values
document.add(new SortedSetDocValuesField(keyword.name(), new BytesRef("category1")));
Set<CharSequence> context = mapping.parseContext(document);
Set<CharSequence> context = mapping.parseContext(null, document);
assertThat(context.size(), equalTo(1));
assertTrue(context.contains("category1"));

Expand All @@ -735,23 +735,23 @@ public void testParsingContextFromDocument() throws Exception {
document.add(new Field(text.name(), "category1", text));
// Ignore stored field
document.add(new StoredField(text.name(), "category1", text));
context = mapping.parseContext(document);
context = mapping.parseContext(null, document);
assertThat(context.size(), equalTo(1));
assertTrue(context.contains("category1"));

document = new ParseContext.Document();
document.add(new SortedSetDocValuesField("category", new BytesRef("category")));
context = mapping.parseContext(document);
context = mapping.parseContext(null, document);
assertThat(context.size(), equalTo(0));

document = new ParseContext.Document();
document.add(new SortedDocValuesField("category", new BytesRef("category")));
context = mapping.parseContext(document);
context = mapping.parseContext(null, document);
assertThat(context.size(), equalTo(0));

final ParseContext.Document doc = new ParseContext.Document();
doc.add(new IntPoint("category", 36));
IllegalArgumentException exc = expectThrows(IllegalArgumentException.class, () -> mapping.parseContext(doc));
IllegalArgumentException exc = expectThrows(IllegalArgumentException.class, () -> mapping.parseContext(null, doc));
assertThat(exc.getMessage(), containsString("Failed to parse context field [category]"));
}

Expand Down
Loading