From 2a1db1aafc83b80a9df5650cd5195d01d27eab41 Mon Sep 17 00:00:00 2001 From: Nicholas Knize Date: Thu, 16 Jul 2015 15:30:43 -0500 Subject: [PATCH 1/3] Deprecate geo_point validate_* and normalize_* No need to carry 3 validate and 3 normalize options in GeoPointFieldMapping. This is just causing confusion so this change deprecates the validate_lat, validate_lon, normalize_lat, and normalize_lon options and simply uses validate and normalize, respectively. Geo filters have also been updated to include a validate and normalize option to provide consistency with the mapping defaults. Unit tests and documentation have been updated. --- .../mapping/types/geo-point-type.asciidoc | 12 --- .../filters/geo-bounding-box-filter.asciidoc | 27 +++++ .../filters/geo-distance-filter.asciidoc | 23 +++++ .../geo-distance-range-filter.asciidoc | 2 +- .../filters/geo-polygon-filter.asciidoc | 24 +++++ .../index/mapper/geo/GeoPointFieldMapper.java | 99 ++++++++++++------- .../query/GeoBoundingBoxFilterParser.java | 22 ++++- .../index/query/GeoDistanceFilterParser.java | 26 +++-- .../query/GeoDistanceRangeFilterParser.java | 24 +++-- .../index/query/GeoPolygonFilterParser.java | 22 +++-- .../search/sort/GeoDistanceSortParser.java | 22 +++-- .../mapper/geo/GeoPointFieldMapperTests.java | 4 +- .../index/search/geo/GeoUtilsTests.java | 17 ++-- .../search/geo/GeoDistanceTests.java | 63 +++++++----- .../search/geo/GeoPolygonTests.java | 54 ++++++++++ 15 files changed, 324 insertions(+), 117 deletions(-) diff --git a/docs/reference/mapping/types/geo-point-type.asciidoc b/docs/reference/mapping/types/geo-point-type.asciidoc index 2d3f3ad6dd2fa..7ea3c62066969 100644 --- a/docs/reference/mapping/types/geo-point-type.asciidoc +++ b/docs/reference/mapping/types/geo-point-type.asciidoc @@ -143,21 +143,9 @@ longitude (default is `true`). *Note*: Validation only works when normalization has been disabled. This option will be deprecated and removed in upcoming releases. -|`validate_lat` |Set to `false` to accept geo points with an invalid -latitude (default is `true`). This option will be deprecated and removed -in upcoming releases. - -|`validate_lon` |Set to `false` to accept geo points with an invalid -longitude (default is `true`). This option will be deprecated and removed -in upcoming releases. - |`normalize` |Set to `true` to normalize latitude and longitude (default is `true`). -|`normalize_lat` |Set to `true` to normalize latitude. - -|`normalize_lon` |Set to `true` to normalize longitude. - |`precision_step` |The precision step (influences the number of terms generated for each number value) for `.lat` and `.lon` fields if `lat_lon` is set to `true`. diff --git a/docs/reference/query-dsl/filters/geo-bounding-box-filter.asciidoc b/docs/reference/query-dsl/filters/geo-bounding-box-filter.asciidoc index 7f16ec562d99a..eee90598a86ef 100644 --- a/docs/reference/query-dsl/filters/geo-bounding-box-filter.asciidoc +++ b/docs/reference/query-dsl/filters/geo-bounding-box-filter.asciidoc @@ -44,6 +44,33 @@ Then the following simple query can be executed with a } -------------------------------------------------- +[float] +==== Filter Options + +[cols="<,<",options="header",] +|======================================================================= +|Option |Description +|`_name` |Optional name field to identify the filter + +|`_cache` |Set to `true` to cache the *result* of the filter. +Defaults to `false`. See <> below for further details. + +|`_cache_key` |Associate a unique custom cache key to use instead of +the actual filter. + +|`validate` |Set to `false` to accept geo points with invalid latitude or +longitude (default is `true`). *Note*: Validation only works when +normalization has been disabled. This option will be deprecated and removed +in upcoming releases. + +|`normalize` |Set to `true` to normalize latitude and longitude (default +is `true`). + +|`type` |Set to one of `indexed` or `memory` to defines whether this filter will +be executed in memory or indexed. See <> below for further details +Default is `memory`. +|======================================================================= + [float] ==== Accepted Formats diff --git a/docs/reference/query-dsl/filters/geo-distance-filter.asciidoc b/docs/reference/query-dsl/filters/geo-distance-filter.asciidoc index 76c64b8186882..325f0eff0f59c 100644 --- a/docs/reference/query-dsl/filters/geo-distance-filter.asciidoc +++ b/docs/reference/query-dsl/filters/geo-distance-filter.asciidoc @@ -158,6 +158,29 @@ The following are options allowed on the filter: sure the `geo_point` type index lat lon in this case), or `none` which disables bounding box optimization. +`_name`:: + + Optional name field to identify the filter + +`_cache`:: + + Set to `true` to cache the *result* of the filter. + Defaults to `false`. See <> below for further details. + +`_cache_key`:: + + Associate a unique custom cache key to use instead of the actual filter. + +`validate`:: + + Set to `false` to accept geo points with invalid latitude or + longitude (default is `true`). *Note*: Validation only works when + normalization has been disabled. This option will be deprecated and removed + in upcoming releases. + +`normalize`:: + + Set to `true` to normalize latitude and longitude. Defaults to `true`. [float] ==== geo_point Type diff --git a/docs/reference/query-dsl/filters/geo-distance-range-filter.asciidoc b/docs/reference/query-dsl/filters/geo-distance-range-filter.asciidoc index 6da05947023dc..22da604b129c6 100644 --- a/docs/reference/query-dsl/filters/geo-distance-range-filter.asciidoc +++ b/docs/reference/query-dsl/filters/geo-distance-range-filter.asciidoc @@ -24,7 +24,7 @@ Filters documents that exists within a range from a specific point: } -------------------------------------------------- -Supports the same point location parameter as the +Supports the same point location parameter and filter options as the <> filter. And also support the common parameters for range (lt, lte, gt, gte, from, to, include_upper and include_lower). diff --git a/docs/reference/query-dsl/filters/geo-polygon-filter.asciidoc b/docs/reference/query-dsl/filters/geo-polygon-filter.asciidoc index a4212343eff81..9fb69d633bdf1 100644 --- a/docs/reference/query-dsl/filters/geo-polygon-filter.asciidoc +++ b/docs/reference/query-dsl/filters/geo-polygon-filter.asciidoc @@ -26,6 +26,30 @@ points. Here is an example: } -------------------------------------------------- +[float] +==== Filter Options + +[cols="<,<",options="header",] +|======================================================================= +|Option |Description +|`_name` |Optional name field to identify the filter + +|`_cache` |Set to `true` to cache the *result* of the filter. +Defaults to `false`. See <> below. + +|`_cache_key` |Associate a unique custom cache key to use instead of +the actual filter. + +|`validate` |Set to `false` to accept geo points with invalid latitude or +longitude (default is `true`). *Note*: Validation only works when +normalization has been disabled. This option will be deprecated and removed +in upcoming releases. + +|`normalize` |Set to `false` to prevent latitude and longitude from auto converting +to proper lat/lon bounds. +Default is `true'. +|======================================================================= + [float] ==== Allowed Formats diff --git a/src/main/java/org/elasticsearch/index/mapper/geo/GeoPointFieldMapper.java b/src/main/java/org/elasticsearch/index/mapper/geo/GeoPointFieldMapper.java index 54e65de5eceb1..3dff3c03b7c9e 100644 --- a/src/main/java/org/elasticsearch/index/mapper/geo/GeoPointFieldMapper.java +++ b/src/main/java/org/elasticsearch/index/mapper/geo/GeoPointFieldMapper.java @@ -29,6 +29,7 @@ import org.apache.lucene.util.BytesRef; import org.elasticsearch.ElasticsearchIllegalArgumentException; import org.elasticsearch.ElasticsearchIllegalStateException; +import org.elasticsearch.Version; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.Strings; import org.elasticsearch.common.geo.GeoDistance; @@ -90,10 +91,13 @@ public static class Defaults { public static final boolean ENABLE_GEOHASH = false; public static final boolean ENABLE_GEOHASH_PREFIX = false; public static final int GEO_HASH_PRECISION = GeoHashUtils.PRECISION; + public static final boolean BACK_COMPAT = true; public static final boolean NORMALIZE_LAT = true; public static final boolean NORMALIZE_LON = true; public static final boolean VALIDATE_LAT = true; public static final boolean VALIDATE_LON = true; + public static final boolean NORMALIZE = true; + public static final boolean VALIDATE = true; public static final FieldType FIELD_TYPE = new FieldType(StringFieldMapper.Defaults.FIELD_TYPE); @@ -120,10 +124,13 @@ public static class Builder extends AbstractFieldMapper.Builder parse(String name, Map node, ParserContext parserContext) throws MapperParsingException { Builder builder = geoPointField(name); + builder.backCompat = parserContext.indexVersionCreated().before(Version.V_1_7_1); parseField(builder, name, node, parserContext); for (Map.Entry entry : node.entrySet()) { String fieldName = Strings.toUnderscoreCase(entry.getKey()); @@ -231,18 +239,24 @@ public static class TypeParser implements Mapper.TypeParser { builder.geoHashPrecision(GeoUtils.geoHashLevelsForPrecision(fieldNode.toString())); } } else if (fieldName.equals("validate")) { - builder.validateLat = XContentMapValues.nodeBooleanValue(fieldNode); - builder.validateLon = XContentMapValues.nodeBooleanValue(fieldNode); - } else if (fieldName.equals("validate_lon")) { + builder.validate = XContentMapValues.nodeBooleanValue(fieldNode); + if (builder.backCompat) { + builder.validateLat = builder.validate; + builder.validateLon = builder.validate; + } + } else if (builder.backCompat && fieldName.equals("validate_lon")) { + builder.validate = false; builder.validateLon = XContentMapValues.nodeBooleanValue(fieldNode); - } else if (fieldName.equals("validate_lat")) { + } else if (builder.backCompat && fieldName.equals("validate_lat")) { + builder.validate = false; builder.validateLat = XContentMapValues.nodeBooleanValue(fieldNode); } else if (fieldName.equals("normalize")) { + builder.normalize = XContentMapValues.nodeBooleanValue(fieldNode); + } else if (builder.backCompat && fieldName.equals("normalize_lat")) { + builder.normalize = false; builder.normalizeLat = XContentMapValues.nodeBooleanValue(fieldNode); - builder.normalizeLon = XContentMapValues.nodeBooleanValue(fieldNode); - } else if (fieldName.equals("normalize_lat")) { - builder.normalizeLat = XContentMapValues.nodeBooleanValue(fieldNode); - } else if (fieldName.equals("normalize_lon")) { + } else if (builder.backCompat && fieldName.equals("normalize_lon")) { + builder.normalize = false; builder.normalizeLon = XContentMapValues.nodeBooleanValue(fieldNode); } else { parseMultiField(builder, name, parserContext, fieldName, fieldNode); @@ -395,20 +409,22 @@ public GeoPoint decode(long latBits, long lonBits, GeoPoint out) { private final StringFieldMapper geohashMapper; + private boolean backCompat; + private boolean validateLon; private boolean validateLat; + private boolean validate; private final boolean normalizeLon; private final boolean normalizeLat; - - public GeoPointFieldMapper(FieldMapper.Names names, FieldType fieldType, Boolean docValues, - NamedAnalyzer indexAnalyzer, NamedAnalyzer searchAnalyzer, - PostingsFormatProvider postingsFormat, DocValuesFormatProvider docValuesFormat, - SimilarityProvider similarity, @Nullable Settings fieldDataSettings, Settings indexSettings, - ContentPath.Type pathType, boolean enableLatLon, boolean enableGeoHash, boolean enableGeohashPrefix, Integer precisionStep, int geoHashPrecision, - DoubleFieldMapper latMapper, DoubleFieldMapper lonMapper, StringFieldMapper geohashMapper, - boolean validateLon, boolean validateLat, - boolean normalizeLon, boolean normalizeLat, MultiFields multiFields) { + private boolean normalize; + + public GeoPointFieldMapper(FieldMapper.Names names, FieldType fieldType, Boolean docValues, NamedAnalyzer indexAnalyzer, + PostingsFormatProvider postingsFormat, DocValuesFormatProvider docValuesFormat, SimilarityProvider similarity, + @Nullable Settings fieldDataSettings, Settings indexSettings, ContentPath.Type pathType, boolean enableLatLon, + boolean enableGeoHash, boolean enableGeohashPrefix, Integer precisionStep, int geoHashPrecision, DoubleFieldMapper latMapper, + DoubleFieldMapper lonMapper, StringFieldMapper geohashMapper, boolean backCompat, boolean validate, boolean validateLat, + boolean validateLon, boolean normalize, boolean normalizeLat, boolean normalizeLon, MultiFields multiFields) { super(names, 1f, fieldType, docValues, null, indexAnalyzer, postingsFormat, docValuesFormat, similarity, null, fieldDataSettings, indexSettings, multiFields, null); this.pathType = pathType; this.enableLatLon = enableLatLon; @@ -421,9 +437,13 @@ public GeoPointFieldMapper(FieldMapper.Names names, FieldType fieldType, Boolean this.lonMapper = lonMapper; this.geohashMapper = geohashMapper; + this.backCompat = backCompat; + + this.validate = validate; this.validateLat = validateLat; this.validateLon = validateLon; + this.normalize = normalize; this.normalizeLat = normalizeLat; this.normalizeLon = normalizeLon; } @@ -553,17 +573,14 @@ private void parsePointFromString(ParseContext context, GeoPoint sparse, String } private void parse(ParseContext context, GeoPoint point, String geohash) throws IOException { - if (normalizeLat || normalizeLon) { - GeoUtils.normalizePoint(point, normalizeLat, normalizeLon); - } - - if (validateLat) { - if (point.lat() > 90.0 || point.lat() < -90.0) { + // if the coordinate is normalized there is no need for a validation check + if (backCompat || normalize) { + GeoUtils.normalizePoint(point, (backCompat) ? normalizeLat : normalize, (backCompat) ? normalizeLon : normalize); + } else if (backCompat || validate) { + if ((validate || (backCompat && validateLat)) && (point.lat() > 90.0 || point.lat() < -90.0)) { throw new ElasticsearchIllegalArgumentException("illegal latitude value [" + point.lat() + "] for " + name()); } - } - if (validateLon) { - if (point.lon() > 180.0 || point.lon() < -180) { + if ((validate || (backCompat && validateLon)) && (point.lon() > 180.0 || point.lon() < -180)) { throw new ElasticsearchIllegalArgumentException("illegal longitude value [" + point.lon() + "] for " + name()); } } @@ -628,19 +645,25 @@ public void merge(Mapper mergeWith, MergeContext mergeContext) throws MergeMappi if (this.enableGeohashPrefix != fieldMergeWith.enableGeohashPrefix) { mergeContext.addConflict("mapper [" + names.fullName() + "] has different geohash_prefix"); } - if (this.normalizeLat != fieldMergeWith.normalizeLat) { + if (!this.backCompat && this.normalize != fieldMergeWith.normalize) { + mergeContext.addConflict("mapper [" + names.fullName() + "] has different normalize"); + } + if (this.backCompat && this.normalizeLat != fieldMergeWith.normalizeLat) { mergeContext.addConflict("mapper [" + names.fullName() + "] has different normalize_lat"); } - if (this.normalizeLon != fieldMergeWith.normalizeLon) { + if (this.backCompat && this.normalizeLon != fieldMergeWith.normalizeLon) { mergeContext.addConflict("mapper [" + names.fullName() + "] has different normalize_lon"); } if (!Objects.equal(this.precisionStep, fieldMergeWith.precisionStep)) { mergeContext.addConflict("mapper [" + names.fullName() + "] has different precision_step"); } - if (this.validateLat != fieldMergeWith.validateLat) { + if (!this.backCompat && this.validate != fieldMergeWith.validate) { + mergeContext.addConflict("mapper [" + names.fullName() + "] has different validate"); + } + if (this.backCompat && this.validateLat != fieldMergeWith.validateLat) { mergeContext.addConflict("mapper [" + names.fullName() + "] has different validate_lat"); } - if (this.validateLon != fieldMergeWith.validateLon) { + if (this.backCompat && this.validateLon != fieldMergeWith.validateLon) { mergeContext.addConflict("mapper [" + names.fullName() + "] has different validate_lon"); } } @@ -682,7 +705,10 @@ protected void doXContentBody(XContentBuilder builder, boolean includeDefaults, if (includeDefaults || precisionStep != null) { builder.field("precision_step", precisionStep); } - if (includeDefaults || validateLat != Defaults.VALIDATE_LAT || validateLon != Defaults.VALIDATE_LON) { + if (!backCompat && (includeDefaults || validate != Defaults.VALIDATE)) { + builder.field("validate", validate); + } + if (backCompat && (includeDefaults || validateLat != Defaults.VALIDATE_LAT || validateLon != Defaults.VALIDATE_LON)) { if (validateLat && validateLon) { builder.field("validate", true); } else if (!validateLat && !validateLon) { @@ -696,7 +722,10 @@ protected void doXContentBody(XContentBuilder builder, boolean includeDefaults, } } } - if (includeDefaults || normalizeLat != Defaults.NORMALIZE_LAT || normalizeLon != Defaults.NORMALIZE_LON) { + if (!backCompat && (includeDefaults || normalize != Defaults.NORMALIZE)) { + builder.field("normalize", normalize); + } + if (backCompat && (includeDefaults || normalizeLat != Defaults.NORMALIZE_LAT || normalizeLon != Defaults.NORMALIZE_LON)) { if (normalizeLat && normalizeLon) { builder.field("normalize", true); } else if (!normalizeLat && !normalizeLon) { diff --git a/src/main/java/org/elasticsearch/index/query/GeoBoundingBoxFilterParser.java b/src/main/java/org/elasticsearch/index/query/GeoBoundingBoxFilterParser.java index 80eb326d723e9..89ca30deba647 100644 --- a/src/main/java/org/elasticsearch/index/query/GeoBoundingBoxFilterParser.java +++ b/src/main/java/org/elasticsearch/index/query/GeoBoundingBoxFilterParser.java @@ -85,6 +85,7 @@ public Filter parse(QueryParseContext parseContext) throws IOException, QueryPar String filterName = null; String currentFieldName = null; XContentParser.Token token; + boolean validate = true; boolean normalize = true; GeoPoint sparse = new GeoPoint(); @@ -143,6 +144,8 @@ public Filter parse(QueryParseContext parseContext) throws IOException, QueryPar cache = parser.booleanValue(); } else if ("_cache_key".equals(currentFieldName) || "_cacheKey".equals(currentFieldName)) { cacheKey = new CacheKeyFilter.Key(parser.text()); + } else if ("validate".equals(currentFieldName)) { + validate = parser.booleanValue(); } else if ("normalize".equals(currentFieldName)) { normalize = parser.booleanValue(); } else if ("type".equals(currentFieldName)) { @@ -157,8 +160,8 @@ public Filter parse(QueryParseContext parseContext) throws IOException, QueryPar final GeoPoint bottomRight = new GeoPoint(bottom, right); if (normalize) { - // Special case: if the difference bettween the left and right is 360 and the right is greater than the left, we are asking for - // the complete longitude range so need to set longitude to the complete longditude range + // Special case: if the difference between the left and right is 360 and the right is greater than the left, we are asking for + // the complete longitude range so need to set longitude to the complete longitude range boolean completeLonRange = ((right - left) % 360 == 0 && right > left); GeoUtils.normalizePoint(topLeft, true, !completeLonRange); GeoUtils.normalizePoint(bottomRight, true, !completeLonRange); @@ -166,6 +169,21 @@ public Filter parse(QueryParseContext parseContext) throws IOException, QueryPar topLeft.resetLon(-180); bottomRight.resetLon(180); } + } else if (validate) { + if (topLeft.lat() > 90.0 || topLeft.lat() < -90.0) { + throw new QueryParsingException(parseContext.index(), "illegal latitude value [" + topLeft.lat() + "] for " + filterName); + } + if (topLeft.lon() > 180.0 || topLeft.lon() < -180) { + throw new QueryParsingException(parseContext.index(), "illegal longitude value [" + topLeft.lon() + "] for " + filterName); + } + if (bottomRight.lat() > 90.0 || bottomRight.lat() < -90.0) { + throw new QueryParsingException(parseContext.index(), "illegal latitude value [" + bottomRight.lat() + "] for " + + filterName); + } + if (bottomRight.lon() > 180.0 || bottomRight.lon() < -180) { + throw new QueryParsingException(parseContext.index(), "illegal longitude value [" + bottomRight.lon() + "] for " + + filterName); + } } MapperService.SmartNameFieldMappers smartMappers = parseContext.smartFieldMappers(fieldName); diff --git a/src/main/java/org/elasticsearch/index/query/GeoDistanceFilterParser.java b/src/main/java/org/elasticsearch/index/query/GeoDistanceFilterParser.java index c0aeb9418a51b..ae3db762072e3 100644 --- a/src/main/java/org/elasticsearch/index/query/GeoDistanceFilterParser.java +++ b/src/main/java/org/elasticsearch/index/query/GeoDistanceFilterParser.java @@ -76,8 +76,10 @@ public Filter parse(QueryParseContext parseContext) throws IOException, QueryPar DistanceUnit unit = DistanceUnit.DEFAULT; GeoDistance geoDistance = GeoDistance.DEFAULT; String optimizeBbox = "memory"; - boolean normalizeLon = true; - boolean normalizeLat = true; + + boolean validate = true; + boolean normalize = true; + while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { if (token == XContentParser.Token.FIELD_NAME) { currentFieldName = parser.currentName(); @@ -131,9 +133,10 @@ public Filter parse(QueryParseContext parseContext) throws IOException, QueryPar cacheKey = new CacheKeyFilter.Key(parser.text()); } else if ("optimize_bbox".equals(currentFieldName) || "optimizeBbox".equals(currentFieldName)) { optimizeBbox = parser.textOrNull(); + } else if ("validate".equals(currentFieldName)) { + validate = parser.booleanValue(); } else if ("normalize".equals(currentFieldName)) { - normalizeLat = parser.booleanValue(); - normalizeLon = parser.booleanValue(); + normalize = parser.booleanValue(); } else { point.resetFromString(parser.text()); fieldName = currentFieldName; @@ -141,6 +144,17 @@ public Filter parse(QueryParseContext parseContext) throws IOException, QueryPar } } + if (normalize) { + GeoUtils.normalizePoint(point, normalize, normalize); + } else if (validate) { + if (point.lat() > 90.0 || point.lat() < -90.0) { + throw new QueryParsingException(parseContext.index(), "illegal latitude value [" + point.lat() + "] for " + filterName); + } + if (point.lon() > 180.0 || point.lon() < -180) { + throw new QueryParsingException(parseContext.index(), "illegal longitude value [" + point.lon() + "] for " + filterName); + } + } + if (vDistance == null) { throw new QueryParsingException(parseContext.index(), "geo_distance requires 'distance' to be specified"); } else if (vDistance instanceof Number) { @@ -150,10 +164,6 @@ public Filter parse(QueryParseContext parseContext) throws IOException, QueryPar } distance = geoDistance.normalize(distance, DistanceUnit.DEFAULT); - if (normalizeLat || normalizeLon) { - GeoUtils.normalizePoint(point, normalizeLat, normalizeLon); - } - MapperService.SmartNameFieldMappers smartMappers = parseContext.smartFieldMappers(fieldName); if (smartMappers == null || !smartMappers.hasMapper()) { throw new QueryParsingException(parseContext.index(), "failed to find geo_point field [" + fieldName + "]"); diff --git a/src/main/java/org/elasticsearch/index/query/GeoDistanceRangeFilterParser.java b/src/main/java/org/elasticsearch/index/query/GeoDistanceRangeFilterParser.java index b2926307c5ce6..5833cb92960be 100644 --- a/src/main/java/org/elasticsearch/index/query/GeoDistanceRangeFilterParser.java +++ b/src/main/java/org/elasticsearch/index/query/GeoDistanceRangeFilterParser.java @@ -78,8 +78,8 @@ public Filter parse(QueryParseContext parseContext) throws IOException, QueryPar DistanceUnit unit = DistanceUnit.DEFAULT; GeoDistance geoDistance = GeoDistance.DEFAULT; String optimizeBbox = "memory"; - boolean normalizeLon = true; - boolean normalizeLat = true; + boolean validate = true; + boolean normalize = true; while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { if (token == XContentParser.Token.FIELD_NAME) { currentFieldName = parser.currentName(); @@ -162,9 +162,10 @@ public Filter parse(QueryParseContext parseContext) throws IOException, QueryPar cacheKey = new CacheKeyFilter.Key(parser.text()); } else if ("optimize_bbox".equals(currentFieldName) || "optimizeBbox".equals(currentFieldName)) { optimizeBbox = parser.textOrNull(); + } else if ("validate".equals(currentFieldName)) { + validate = parser.booleanValue(); } else if ("normalize".equals(currentFieldName)) { - normalizeLat = parser.booleanValue(); - normalizeLon = parser.booleanValue(); + normalize = parser.booleanValue(); } else { point.resetFromString(parser.text()); fieldName = currentFieldName; @@ -172,6 +173,17 @@ public Filter parse(QueryParseContext parseContext) throws IOException, QueryPar } } + if (normalize) { + GeoUtils.normalizePoint(point, normalize, normalize); + } else if (validate) { + if (point.lat() > 90.0 || point.lat() < -90.0) { + throw new QueryParsingException(parseContext.index(), "illegal latitude value [" + point.lat() + "] for " + filterName); + } + if (point.lon() > 180.0 || point.lon() < -180) { + throw new QueryParsingException(parseContext.index(), "illegal longitude value [" + point.lon() + "] for " + filterName); + } + } + Double from = null; Double to = null; if (vFrom != null) { @@ -191,10 +203,6 @@ public Filter parse(QueryParseContext parseContext) throws IOException, QueryPar to = geoDistance.normalize(to, DistanceUnit.DEFAULT); } - if (normalizeLat || normalizeLon) { - GeoUtils.normalizePoint(point, normalizeLat, normalizeLon); - } - MapperService.SmartNameFieldMappers smartMappers = parseContext.smartFieldMappers(fieldName); if (smartMappers == null || !smartMappers.hasMapper()) { throw new QueryParsingException(parseContext.index(), "failed to find geo_point field [" + fieldName + "]"); diff --git a/src/main/java/org/elasticsearch/index/query/GeoPolygonFilterParser.java b/src/main/java/org/elasticsearch/index/query/GeoPolygonFilterParser.java index 0591c4b6b2665..d4bbadf9bc6e2 100644 --- a/src/main/java/org/elasticsearch/index/query/GeoPolygonFilterParser.java +++ b/src/main/java/org/elasticsearch/index/query/GeoPolygonFilterParser.java @@ -74,8 +74,8 @@ public Filter parse(QueryParseContext parseContext) throws IOException, QueryPar List shell = Lists.newArrayList(); - boolean normalizeLon = true; - boolean normalizeLat = true; + boolean validate = true; + boolean normalize = true; String filterName = null; String currentFieldName = null; @@ -109,9 +109,10 @@ public Filter parse(QueryParseContext parseContext) throws IOException, QueryPar cache = parser.booleanValue(); } else if ("_cache_key".equals(currentFieldName) || "_cacheKey".equals(currentFieldName)) { cacheKey = new CacheKeyFilter.Key(parser.text()); + } else if ("validate".equals(currentFieldName)) { + validate = parser.booleanValue(); } else if ("normalize".equals(currentFieldName)) { - normalizeLat = parser.booleanValue(); - normalizeLon = parser.booleanValue(); + normalize = parser.booleanValue(); } else { throw new QueryParsingException(parseContext.index(), "[geo_polygon] filter does not support [" + currentFieldName + "]"); } @@ -135,9 +136,18 @@ public Filter parse(QueryParseContext parseContext) throws IOException, QueryPar } } - if (normalizeLat || normalizeLon) { + if (normalize) { for (GeoPoint point : shell) { - GeoUtils.normalizePoint(point, normalizeLat, normalizeLon); + GeoUtils.normalizePoint(point, normalize, normalize); + } + } else if (validate) { + for (GeoPoint point : shell) { + if (point.lat() > 90.0 || point.lat() < -90.0) { + throw new QueryParsingException(parseContext.index(), "illegal latitude value [" + point.lat() + "] for " + filterName); + } + if (point.lon() > 180.0 || point.lon() < -180) { + throw new QueryParsingException(parseContext.index(), "illegal longitude value [" + point.lon() + "] for " + filterName); + } } } diff --git a/src/main/java/org/elasticsearch/search/sort/GeoDistanceSortParser.java b/src/main/java/org/elasticsearch/search/sort/GeoDistanceSortParser.java index ead6d6ea7d45c..a62e6c7b0c824 100644 --- a/src/main/java/org/elasticsearch/search/sort/GeoDistanceSortParser.java +++ b/src/main/java/org/elasticsearch/search/sort/GeoDistanceSortParser.java @@ -66,8 +66,8 @@ public SortField parse(XContentParser parser, SearchContext context) throws Exce MultiValueMode sortMode = null; NestedInnerQueryParseSupport nestedHelper = null; - boolean normalizeLon = true; - boolean normalizeLat = true; + boolean validate = true; + boolean normalize = true; XContentParser.Token token; String currentName = parser.currentName(); @@ -100,9 +100,10 @@ public SortField parse(XContentParser parser, SearchContext context) throws Exce unit = DistanceUnit.fromString(parser.text()); } else if (currentName.equals("distance_type") || currentName.equals("distanceType")) { geoDistance = GeoDistance.fromString(parser.text()); + } else if ("validate".equals(currentName)) { + validate = parser.booleanValue(); } else if ("normalize".equals(currentName)) { - normalizeLat = parser.booleanValue(); - normalizeLon = parser.booleanValue(); + normalize = parser.booleanValue(); } else if ("sort_mode".equals(currentName) || "sortMode".equals(currentName) || "mode".equals(currentName)) { sortMode = MultiValueMode.fromString(parser.text()); } else if ("nested_path".equals(currentName) || "nestedPath".equals(currentName)) { @@ -119,9 +120,18 @@ public SortField parse(XContentParser parser, SearchContext context) throws Exce } } - if (normalizeLat || normalizeLon) { + if (normalize) { for (GeoPoint point : geoPoints) { - GeoUtils.normalizePoint(point, normalizeLat, normalizeLon); + GeoUtils.normalizePoint(point, normalize, normalize); + } + } else if (validate) { + for (GeoPoint point : geoPoints) { + if (point.lat() > 90.0 || point.lat() < -90.0) { + throw new ElasticsearchIllegalArgumentException("illegal latitude value [" + point.lat() + "] for geo distance sort"); + } + if (point.lon() > 180.0 || point.lon() < -180) { + throw new ElasticsearchIllegalArgumentException("illegal longitude value [" + point.lon() + "] for geo distance sort"); + } } } diff --git a/src/test/java/org/elasticsearch/index/mapper/geo/GeoPointFieldMapperTests.java b/src/test/java/org/elasticsearch/index/mapper/geo/GeoPointFieldMapperTests.java index 48d86a7449aec..2d0b8f6f86d7a 100644 --- a/src/test/java/org/elasticsearch/index/mapper/geo/GeoPointFieldMapperTests.java +++ b/src/test/java/org/elasticsearch/index/mapper/geo/GeoPointFieldMapperTests.java @@ -488,9 +488,9 @@ public void testGeoPointMapperMerge() throws Exception { DocumentMapper.MergeResult mergeResult = stage1.merge(stage2, mergeFlags().simulate(false)); assertThat(mergeResult.hasConflicts(), equalTo(true)); - assertThat(mergeResult.conflicts().length, equalTo(2)); + assertThat(mergeResult.conflicts().length, equalTo(1)); // todo better way of checking conflict? - assertThat("mapper [point] has different validate_lat", isIn(new ArrayList<>(Arrays.asList(mergeResult.conflicts())))); + assertThat("mapper [point] has different validate", isIn(new ArrayList<>(Arrays.asList(mergeResult.conflicts())))); // correct mapping and ensure no failures stage2Mapping = XContentFactory.jsonBuilder().startObject().startObject("type") diff --git a/src/test/java/org/elasticsearch/index/search/geo/GeoUtilsTests.java b/src/test/java/org/elasticsearch/index/search/geo/GeoUtilsTests.java index 58787077565f7..d93d2e8ab386c 100644 --- a/src/test/java/org/elasticsearch/index/search/geo/GeoUtilsTests.java +++ b/src/test/java/org/elasticsearch/index/search/geo/GeoUtilsTests.java @@ -339,8 +339,7 @@ public void testNormalizePoint_outsideNormalRange() { @Test public void testNormalizePoint_outsideNormalRange_withOptions() { for (int i = 0; i < 100; i++) { - boolean normLat = randomBoolean(); - boolean normLon = randomBoolean(); + boolean normalize = randomBoolean(); double normalisedLat = (randomDouble() * 180.0) - 90.0; double normalisedLon = (randomDouble() * 360.0) - 180.0; int shiftLat = randomIntBetween(1, 10000); @@ -350,23 +349,19 @@ public void testNormalizePoint_outsideNormalRange_withOptions() { double expectedLat; double expectedLon; - if (normLat) { + if (normalize) { expectedLat = normalisedLat * (shiftLat % 2 == 0 ? 1 : -1); - } else { - expectedLat = testLat; - } - if (normLon) { - expectedLon = normalisedLon + ((normLat && shiftLat % 2 == 1) ? 180 : 0); + expectedLon = normalisedLon + ((shiftLat % 2 == 1) ? 180 : 0); if (expectedLon > 180.0) { expectedLon -= 360; } } else { - double shiftValue = normalisedLon > 0 ? -180 : 180; - expectedLon = testLon + ((normLat && shiftLat % 2 == 1) ? shiftValue : 0); + expectedLat = testLat; + expectedLon = testLon; } GeoPoint testPoint = new GeoPoint(testLat, testLon); GeoPoint expectedPoint = new GeoPoint(expectedLat, expectedLon); - GeoUtils.normalizePoint(testPoint, normLat, normLon); + GeoUtils.normalizePoint(testPoint, normalize, normalize); assertThat("Unexpected Latitude", testPoint.lat(), closeTo(expectedPoint.lat(), MAX_ACCEPTABLE_ERROR)); assertThat("Unexpected Longitude", testPoint.lon(), closeTo(expectedPoint.lon(), MAX_ACCEPTABLE_ERROR)); } diff --git a/src/test/java/org/elasticsearch/search/geo/GeoDistanceTests.java b/src/test/java/org/elasticsearch/search/geo/GeoDistanceTests.java index d6321b2791e9c..bd31e3216ebc3 100644 --- a/src/test/java/org/elasticsearch/search/geo/GeoDistanceTests.java +++ b/src/test/java/org/elasticsearch/search/geo/GeoDistanceTests.java @@ -210,8 +210,8 @@ public void simpleDistanceTests() throws Exception { public void testDistanceSortingMVFields() throws Exception { XContentBuilder xContentBuilder = XContentFactory.jsonBuilder().startObject().startObject("type1") .startObject("properties").startObject("locations").field("type", "geo_point").field("lat_lon", true) - .startObject("fielddata").field("format", randomNumericFieldDataFormat()).endObject().endObject().endObject() - .endObject().endObject(); + .field("normalize", true).startObject("fielddata").field("format", randomNumericFieldDataFormat()).endObject().endObject() + .endObject().endObject().endObject(); assertAcked(prepareCreate("test") .addMapping("type1", xContentBuilder)); ensureGreen(); @@ -222,6 +222,11 @@ public void testDistanceSortingMVFields() throws Exception { .endObject()).execute().actionGet(); client().prepareIndex("test", "type1", "2").setSource(jsonBuilder().startObject() + .field("names", "New York 2") + .startObject("locations").field("lat", 400.7143528).field("lon", 285.9940269).endObject() + .endObject()).execute().actionGet(); + + client().prepareIndex("test", "type1", "3").setSource(jsonBuilder().startObject() .field("names", "Times Square", "Tribeca") .startArray("locations") // to NY: 5.286 km @@ -231,7 +236,7 @@ public void testDistanceSortingMVFields() throws Exception { .endArray() .endObject()).execute().actionGet(); - client().prepareIndex("test", "type1", "3").setSource(jsonBuilder().startObject() + client().prepareIndex("test", "type1", "4").setSource(jsonBuilder().startObject() .field("names", "Wall Street", "Soho") .startArray("locations") // to NY: 1.055 km @@ -242,7 +247,7 @@ public void testDistanceSortingMVFields() throws Exception { .endObject()).execute().actionGet(); - client().prepareIndex("test", "type1", "4").setSource(jsonBuilder().startObject() + client().prepareIndex("test", "type1", "5").setSource(jsonBuilder().startObject() .field("names", "Greenwich Village", "Brooklyn") .startArray("locations") // to NY: 2.029 km @@ -259,70 +264,76 @@ public void testDistanceSortingMVFields() throws Exception { .addSort(SortBuilders.geoDistanceSort("locations").point(40.7143528, -74.0059731).order(SortOrder.ASC)) .execute().actionGet(); - assertHitCount(searchResponse, 4); - assertOrderedSearchHits(searchResponse, "1", "2", "3", "4"); + assertHitCount(searchResponse, 5); + assertOrderedSearchHits(searchResponse, "1", "2", "3", "4", "5"); assertThat(((Number) searchResponse.getHits().getAt(0).sortValues()[0]).doubleValue(), closeTo(0d, 10d)); - assertThat(((Number) searchResponse.getHits().getAt(1).sortValues()[0]).doubleValue(), closeTo(462.1d, 10d)); - assertThat(((Number) searchResponse.getHits().getAt(2).sortValues()[0]).doubleValue(), closeTo(1055.0d, 10d)); - assertThat(((Number) searchResponse.getHits().getAt(3).sortValues()[0]).doubleValue(), closeTo(2029.0d, 10d)); + assertThat(((Number) searchResponse.getHits().getAt(1).sortValues()[0]).doubleValue(), closeTo(0d, 10d)); + assertThat(((Number) searchResponse.getHits().getAt(2).sortValues()[0]).doubleValue(), closeTo(462.1d, 10d)); + assertThat(((Number) searchResponse.getHits().getAt(3).sortValues()[0]).doubleValue(), closeTo(1055.0d, 10d)); + assertThat(((Number) searchResponse.getHits().getAt(4).sortValues()[0]).doubleValue(), closeTo(2029.0d, 10d)); // Order: Asc, Mode: max searchResponse = client().prepareSearch("test").setQuery(matchAllQuery()) .addSort(SortBuilders.geoDistanceSort("locations").point(40.7143528, -74.0059731).order(SortOrder.ASC).sortMode("max")) .execute().actionGet(); - assertHitCount(searchResponse, 4); - assertOrderedSearchHits(searchResponse, "1", "3", "2", "4"); + assertHitCount(searchResponse, 5); + assertOrderedSearchHits(searchResponse, "1", "2", "4", "3", "5"); assertThat(((Number) searchResponse.getHits().getAt(0).sortValues()[0]).doubleValue(), closeTo(0d, 10d)); - assertThat(((Number) searchResponse.getHits().getAt(1).sortValues()[0]).doubleValue(), closeTo(1258.0d, 10d)); - assertThat(((Number) searchResponse.getHits().getAt(2).sortValues()[0]).doubleValue(), closeTo(5286.0d, 10d)); - assertThat(((Number) searchResponse.getHits().getAt(3).sortValues()[0]).doubleValue(), closeTo(8572.0d, 10d)); + assertThat(((Number) searchResponse.getHits().getAt(1).sortValues()[0]).doubleValue(), closeTo(0d, 10d)); + assertThat(((Number) searchResponse.getHits().getAt(2).sortValues()[0]).doubleValue(), closeTo(1258.0d, 10d)); + assertThat(((Number) searchResponse.getHits().getAt(3).sortValues()[0]).doubleValue(), closeTo(5286.0d, 10d)); + assertThat(((Number) searchResponse.getHits().getAt(4).sortValues()[0]).doubleValue(), closeTo(8572.0d, 10d)); // Order: Desc searchResponse = client().prepareSearch("test").setQuery(matchAllQuery()) .addSort(SortBuilders.geoDistanceSort("locations").point(40.7143528, -74.0059731).order(SortOrder.DESC)) .execute().actionGet(); - assertHitCount(searchResponse, 4); - assertOrderedSearchHits(searchResponse, "4", "2", "3", "1"); + assertHitCount(searchResponse, 5); + assertOrderedSearchHits(searchResponse, "5", "3", "4", "1", "2"); assertThat(((Number) searchResponse.getHits().getAt(0).sortValues()[0]).doubleValue(), closeTo(8572.0d, 10d)); assertThat(((Number) searchResponse.getHits().getAt(1).sortValues()[0]).doubleValue(), closeTo(5286.0d, 10d)); assertThat(((Number) searchResponse.getHits().getAt(2).sortValues()[0]).doubleValue(), closeTo(1258.0d, 10d)); assertThat(((Number) searchResponse.getHits().getAt(3).sortValues()[0]).doubleValue(), closeTo(0d, 10d)); + assertThat(((Number) searchResponse.getHits().getAt(4).sortValues()[0]).doubleValue(), closeTo(0d, 10d)); // Order: Desc, Mode: min searchResponse = client().prepareSearch("test").setQuery(matchAllQuery()) .addSort(SortBuilders.geoDistanceSort("locations").point(40.7143528, -74.0059731).order(SortOrder.DESC).sortMode("min")) .execute().actionGet(); - assertHitCount(searchResponse, 4); - assertOrderedSearchHits(searchResponse, "4", "3", "2", "1"); + assertHitCount(searchResponse, 5); + assertOrderedSearchHits(searchResponse, "5", "4", "3", "1", "2"); assertThat(((Number) searchResponse.getHits().getAt(0).sortValues()[0]).doubleValue(), closeTo(2029.0d, 10d)); assertThat(((Number) searchResponse.getHits().getAt(1).sortValues()[0]).doubleValue(), closeTo(1055.0d, 10d)); assertThat(((Number) searchResponse.getHits().getAt(2).sortValues()[0]).doubleValue(), closeTo(462.1d, 10d)); assertThat(((Number) searchResponse.getHits().getAt(3).sortValues()[0]).doubleValue(), closeTo(0d, 10d)); + assertThat(((Number) searchResponse.getHits().getAt(4).sortValues()[0]).doubleValue(), closeTo(0d, 10d)); searchResponse = client().prepareSearch("test").setQuery(matchAllQuery()) .addSort(SortBuilders.geoDistanceSort("locations").point(40.7143528, -74.0059731).sortMode("avg").order(SortOrder.ASC)) .execute().actionGet(); - assertHitCount(searchResponse, 4); - assertOrderedSearchHits(searchResponse, "1", "3", "2", "4"); + assertHitCount(searchResponse, 5); + assertOrderedSearchHits(searchResponse, "1", "2", "4", "3", "5"); assertThat(((Number) searchResponse.getHits().getAt(0).sortValues()[0]).doubleValue(), closeTo(0d, 10d)); - assertThat(((Number) searchResponse.getHits().getAt(1).sortValues()[0]).doubleValue(), closeTo(1157d, 10d)); - assertThat(((Number) searchResponse.getHits().getAt(2).sortValues()[0]).doubleValue(), closeTo(2874d, 10d)); - assertThat(((Number) searchResponse.getHits().getAt(3).sortValues()[0]).doubleValue(), closeTo(5301d, 10d)); + assertThat(((Number) searchResponse.getHits().getAt(1).sortValues()[0]).doubleValue(), closeTo(0d, 10d)); + assertThat(((Number) searchResponse.getHits().getAt(2).sortValues()[0]).doubleValue(), closeTo(1157d, 10d)); + assertThat(((Number) searchResponse.getHits().getAt(3).sortValues()[0]).doubleValue(), closeTo(2874d, 10d)); + assertThat(((Number) searchResponse.getHits().getAt(4).sortValues()[0]).doubleValue(), closeTo(5301d, 10d)); searchResponse = client().prepareSearch("test").setQuery(matchAllQuery()) .addSort(SortBuilders.geoDistanceSort("locations").point(40.7143528, -74.0059731).sortMode("avg").order(SortOrder.DESC)) .execute().actionGet(); - assertHitCount(searchResponse, 4); - assertOrderedSearchHits(searchResponse, "4", "2", "3", "1"); + assertHitCount(searchResponse, 5); + assertOrderedSearchHits(searchResponse, "5", "3", "4", "1", "2"); assertThat(((Number) searchResponse.getHits().getAt(0).sortValues()[0]).doubleValue(), closeTo(5301.0d, 10d)); assertThat(((Number) searchResponse.getHits().getAt(1).sortValues()[0]).doubleValue(), closeTo(2874.0d, 10d)); assertThat(((Number) searchResponse.getHits().getAt(2).sortValues()[0]).doubleValue(), closeTo(1157.0d, 10d)); assertThat(((Number) searchResponse.getHits().getAt(3).sortValues()[0]).doubleValue(), closeTo(0d, 10d)); + assertThat(((Number) searchResponse.getHits().getAt(4).sortValues()[0]).doubleValue(), closeTo(0d, 10d)); assertFailures(client().prepareSearch("test").setQuery(matchAllQuery()) .addSort(SortBuilders.geoDistanceSort("locations").point(40.7143528, -74.0059731).sortMode("sum")), @@ -636,7 +647,7 @@ public void testGeoDistanceFilter() throws IOException { .execute().actionGet(); assertHitCount(result, 1); - } + } private double randomLon() { return randomDouble() * 360 - 180; diff --git a/src/test/java/org/elasticsearch/search/geo/GeoPolygonTests.java b/src/test/java/org/elasticsearch/search/geo/GeoPolygonTests.java index d370041a24a13..d4f61a61109ec 100644 --- a/src/test/java/org/elasticsearch/search/geo/GeoPolygonTests.java +++ b/src/test/java/org/elasticsearch/search/geo/GeoPolygonTests.java @@ -101,4 +101,58 @@ public void simplePolygonTest() throws Exception { assertThat(hit.id(), anyOf(equalTo("1"), equalTo("3"), equalTo("4"), equalTo("5"))); } } + + @Test + public void polygonNormalizationTest() throws Exception { + String invalidPolyFilter = "{\"geo_polygon\": { \"location\": { \"points\": [{\"lat\": 400.7, \"lon\": -74.0}, " + + "{\"lat\": 40.7, \"lon\": -74.1}, {\"lat\": 40.8, \"lon\": -74.1}, {\"lat\": 40.8, \"lon\": -74.0}, " + + "{\"lat\": 40.7, \"lon\": -74.0}]}, \"normalize\": false}}"; + + // test proper error handling for invalid poly (validation enabled, normalization disabled) + try { + client().prepareSearch("test") // from NY + .setQuery(matchAllQuery()).setPostFilter(invalidPolyFilter).execute().actionGet(); + } catch (Exception e) { + if (!e.getMessage().contains("illegal latitude value")) + throw new Exception("expected: 'QueryParsingException on latitude'\n got: " + e.getMessage()); + } + + // test correct filtering with valid poly (validation enabled, normalization disabled) + String validPolyFilter = "{\"geo_polygon\": { \"location\": { \"points\": [{\"lat\": 400.7, \"lon\": -74.0}, " + + "{\"lat\": 40.7, \"lon\": -74.1}, {\"lat\": 40.8, \"lon\": -74.1}, {\"lat\": 40.8, \"lon\": -74.0}, " + + "{\"lat\": 40.7, \"lon\": -74.0}]}, \"validate\": true, \"normalize\": true}}"; + SearchResponse searchResponse = client().prepareSearch("test") + .setQuery(matchAllQuery()).setPostFilter(validPolyFilter).execute().actionGet(); + assertHitCount(searchResponse, 4); + assertThat(searchResponse.getHits().hits().length, equalTo(4)); + for (SearchHit hit : searchResponse.getHits()) { + assertThat(hit.id(), anyOf(equalTo("1"), equalTo("3"), equalTo("4"), equalTo("5"))); + } + + // test error handling for invalid poly + String invalidPoly = "{\"geo_polygon\": { \"location\": { \"points\": [{\"lat\": 400.7, \"lon\": -74.0}, " + + "{\"lat\": 40.7, \"lon\": -74.1}, {\"lat\": 40.8, \"lon\": -74.1}, {\"lat\": 40.8, \"lon\": -74.0}, " + + "{\"lat\": 40.7, \"lon\": -74.0}]}, \"validate\": false, \"normalize\": false}}"; + + // test proper error handling for invalid poly (validation enabled, normalization disabled) + try { + client().prepareSearch("test") // from NY + .setQuery(matchAllQuery()).setPostFilter(invalidPoly).execute().actionGet(); + } catch (Exception e) { + if (!e.getMessage().contains("illegal latitude value")) + throw new Exception("expected: 'QueryParsingException on latitude'\n got: " + e.getMessage()); + } + + // test correct filtering with valid poly (validation enabled, normalization disabled) + String validPoly = "{\"geo_polygon\": { \"location\": { \"points\": [{\"lat\": 40.7, \"lon\": -74.0}, " + + "{\"lat\": 40.7, \"lon\": -74.1}, {\"lat\": 40.8, \"lon\": -74.1}, {\"lat\": 40.8, \"lon\": -74.0}, " + + "{\"lat\": 40.7, \"lon\": -74.0}]}, \"validate\": false, \"normalize\": false}}"; + searchResponse = client().prepareSearch("test") + .setQuery(matchAllQuery()).setPostFilter(validPoly).execute().actionGet(); + assertHitCount(searchResponse, 4); + assertThat(searchResponse.getHits().hits().length, equalTo(4)); + for (SearchHit hit : searchResponse.getHits()) { + assertThat(hit.id(), anyOf(equalTo("1"), equalTo("3"), equalTo("4"), equalTo("5"))); + } + } } From 72610166395898f981a9c2d779dd690c8710f95b Mon Sep 17 00:00:00 2001 From: Nicholas Knize Date: Fri, 17 Jul 2015 14:57:43 -0500 Subject: [PATCH 2/3] migrating from normalize and validate to coerce and ignore_malformed --- .../index/mapper/geo/GeoPointFieldMapper.java | 183 +++++++++++++----- 1 file changed, 131 insertions(+), 52 deletions(-) diff --git a/src/main/java/org/elasticsearch/index/mapper/geo/GeoPointFieldMapper.java b/src/main/java/org/elasticsearch/index/mapper/geo/GeoPointFieldMapper.java index 3dff3c03b7c9e..365b67cd32b9d 100644 --- a/src/main/java/org/elasticsearch/index/mapper/geo/GeoPointFieldMapper.java +++ b/src/main/java/org/elasticsearch/index/mapper/geo/GeoPointFieldMapper.java @@ -30,6 +30,7 @@ import org.elasticsearch.ElasticsearchIllegalArgumentException; import org.elasticsearch.ElasticsearchIllegalStateException; import org.elasticsearch.Version; +import org.elasticsearch.common.Explicit; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.Strings; import org.elasticsearch.common.geo.GeoDistance; @@ -82,6 +83,8 @@ public static class Names { public static final String LON_SUFFIX = "." + LON; public static final String GEOHASH = "geohash"; public static final String GEOHASH_SUFFIX = "." + GEOHASH; + public static final String COERCE = "coerce"; + public static final String IGNORE_MALFORMED = "ignore_malformed"; } public static class Defaults { @@ -96,8 +99,9 @@ public static class Defaults { public static final boolean NORMALIZE_LON = true; public static final boolean VALIDATE_LAT = true; public static final boolean VALIDATE_LON = true; - public static final boolean NORMALIZE = true; - public static final boolean VALIDATE = true; + + public static final Explicit IGNORE_MALFORMED = new Explicit<>(false, false); + public static final Explicit COERCE = new Explicit<>(false, false); public static final FieldType FIELD_TYPE = new FieldType(StringFieldMapper.Defaults.FIELD_TYPE); @@ -129,14 +133,45 @@ public static class Builder extends AbstractFieldMapper.Builder ignoreMalformed(BuilderContext context) { + if (ignoreMalformed != null) { + return new Explicit<>(ignoreMalformed, true); + } + if (context.indexSettings() != null) { + return new Explicit<>(context.indexSettings().getAsBoolean("index.mapping.ignore_malformed", Defaults.IGNORE_MALFORMED.value()), false); + } + return Defaults.IGNORE_MALFORMED; + } + + public Builder coerce(boolean coerce) { + this.coerce = coerce; + return this; + } + + protected Explicit coerce(BuilderContext context) { + if (coerce != null) { + return new Explicit<>(coerce, true); + } + if (context.indexSettings() != null) { + return new Explicit<>(context.indexSettings().getAsBoolean("index.mapping.coerce", Defaults.COERCE.value()), false); + } + return Defaults.COERCE; + } + public Builder multiFieldPathType(ContentPath.Type pathType) { this.pathType = pathType; return this; @@ -203,10 +238,10 @@ public GeoPointFieldMapper build(BuilderContext context) { // store them as a single token. fieldType.setTokenized(false); - return new GeoPointFieldMapper(buildNames(context), fieldType, docValues, indexAnalyzer, postingsProvider, docValuesProvider, - similarity, fieldDataSettings, context.indexSettings(), origPathType, enableLatLon, enableGeoHash, enableGeohashPrefix, precisionStep, - geoHashPrecision, latMapper, lonMapper, geohashMapper, backCompat, validate, validateLat, validateLon, normalize, - normalizeLat, normalizeLon, multiFieldsBuilder.build(this, context)); + return new GeoPointFieldMapper(buildNames(context), fieldType, coerce(context), ignoreMalformed(context), docValues, + indexAnalyzer, postingsProvider, docValuesProvider, similarity, fieldDataSettings, context.indexSettings(), origPathType, + enableLatLon, enableGeoHash, enableGeohashPrefix, precisionStep, geoHashPrecision, latMapper, lonMapper, geohashMapper, + backCompat, validateLat, validateLon, normalizeLat, normalizeLon, multiFieldsBuilder.build(this, context)); } } @@ -238,26 +273,64 @@ public static class TypeParser implements Mapper.TypeParser { } else { builder.geoHashPrecision(GeoUtils.geoHashLevelsForPrecision(fieldNode.toString())); } + } else if (fieldName.equals(Names.IGNORE_MALFORMED)) { + builder.ignoreMalformed = XContentMapValues.nodeBooleanValue(fieldNode); + if (builder.backCompat) { + builder.validateLat = !builder.ignoreMalformed; + builder.validateLon = !builder.ignoreMalformed; + } } else if (fieldName.equals("validate")) { - builder.validate = XContentMapValues.nodeBooleanValue(fieldNode); if (builder.backCompat) { - builder.validateLat = builder.validate; - builder.validateLon = builder.validate; + builder.ignoreMalformed = XContentMapValues.nodeBooleanValue(fieldNode); + builder.validateLat = builder.ignoreMalformed; + builder.validateLon = builder.validateLat; + } else { + throw new ElasticsearchIllegalArgumentException("'" + fieldName + "' is deprecated in {" + CONTENT_TYPE + + "} field mapping - use '" + Names.IGNORE_MALFORMED + "' instead"); + } + } else if (fieldName.equals("validate_lon")) { + if (builder.backCompat) { + builder.ignoreMalformed = true; + builder.validateLon = XContentMapValues.nodeBooleanValue(fieldNode); + } else { + throw new ElasticsearchIllegalArgumentException("'" + fieldName + "' is deprecated in {" + CONTENT_TYPE + + "} field mapping - use '" + Names.IGNORE_MALFORMED + "' instead"); + } + } else if (fieldName.equals("validate_lat")) { + if (builder.backCompat) { + builder.ignoreMalformed = true; + builder.validateLat = XContentMapValues.nodeBooleanValue(fieldNode); + } else { + throw new ElasticsearchIllegalArgumentException("'" + fieldName + "' is deprecated in {" + CONTENT_TYPE + + "} field mapping - use '" + Names.IGNORE_MALFORMED + "' instead"); + } + } else if (fieldName.equals(Names.COERCE)) { + builder.coerce = XContentMapValues.nodeBooleanValue(fieldNode); + } else if (fieldName.equals("normalize_lat")) { + if (builder.backCompat) { + builder.coerce = false; + builder.normalizeLat = XContentMapValues.nodeBooleanValue(fieldNode); + } else { + throw new ElasticsearchIllegalArgumentException("'" + fieldName + "' is deprecated in {" + CONTENT_TYPE + + "} field mapping - use '" + Names.COERCE + "' instead"); } - } else if (builder.backCompat && fieldName.equals("validate_lon")) { - builder.validate = false; - builder.validateLon = XContentMapValues.nodeBooleanValue(fieldNode); - } else if (builder.backCompat && fieldName.equals("validate_lat")) { - builder.validate = false; - builder.validateLat = XContentMapValues.nodeBooleanValue(fieldNode); } else if (fieldName.equals("normalize")) { - builder.normalize = XContentMapValues.nodeBooleanValue(fieldNode); - } else if (builder.backCompat && fieldName.equals("normalize_lat")) { - builder.normalize = false; - builder.normalizeLat = XContentMapValues.nodeBooleanValue(fieldNode); - } else if (builder.backCompat && fieldName.equals("normalize_lon")) { - builder.normalize = false; - builder.normalizeLon = XContentMapValues.nodeBooleanValue(fieldNode); + if (builder.backCompat) { + builder.coerce = XContentMapValues.nodeBooleanValue(fieldNode); + builder.normalizeLat = builder.coerce.booleanValue(); + builder.normalizeLon = builder.normalizeLat; + } else { + throw new ElasticsearchIllegalArgumentException("'" + fieldName + "' is deprecated in {" + CONTENT_TYPE + + "} field mapping - use '" + Names.COERCE + "' instead"); + } + } else if (fieldName.equals("normalize_lon")) { + if (builder.backCompat) { + builder.coerce = false; + builder.normalizeLon = XContentMapValues.nodeBooleanValue(fieldNode); + } else { + throw new ElasticsearchIllegalArgumentException("'" + fieldName + "' is deprecated in {" + CONTENT_TYPE + + "} field mapping - use '" + Names.COERCE + "' instead"); + } } else { parseMultiField(builder, name, parserContext, fieldName, fieldNode); } @@ -413,18 +486,19 @@ public GeoPoint decode(long latBits, long lonBits, GeoPoint out) { private boolean validateLon; private boolean validateLat; - private boolean validate; private final boolean normalizeLon; private final boolean normalizeLat; - private boolean normalize; - - public GeoPointFieldMapper(FieldMapper.Names names, FieldType fieldType, Boolean docValues, NamedAnalyzer indexAnalyzer, - PostingsFormatProvider postingsFormat, DocValuesFormatProvider docValuesFormat, SimilarityProvider similarity, - @Nullable Settings fieldDataSettings, Settings indexSettings, ContentPath.Type pathType, boolean enableLatLon, - boolean enableGeoHash, boolean enableGeohashPrefix, Integer precisionStep, int geoHashPrecision, DoubleFieldMapper latMapper, - DoubleFieldMapper lonMapper, StringFieldMapper geohashMapper, boolean backCompat, boolean validate, boolean validateLat, - boolean validateLon, boolean normalize, boolean normalizeLat, boolean normalizeLon, MultiFields multiFields) { + + private Explicit coerce; + private Explicit ignoreMalformed; + + public GeoPointFieldMapper(FieldMapper.Names names, FieldType fieldType, Explicit coerce, Explicit ignoreMalformed, + Boolean docValues, NamedAnalyzer indexAnalyzer, PostingsFormatProvider postingsFormat, DocValuesFormatProvider docValuesFormat, + SimilarityProvider similarity, @Nullable Settings fieldDataSettings, Settings indexSettings, ContentPath.Type pathType, + boolean enableLatLon, boolean enableGeoHash, boolean enableGeohashPrefix, Integer precisionStep, int geoHashPrecision, + DoubleFieldMapper latMapper, DoubleFieldMapper lonMapper, StringFieldMapper geohashMapper, boolean backCompat, + boolean validateLat, boolean validateLon, boolean normalizeLat, boolean normalizeLon, MultiFields multiFields) { super(names, 1f, fieldType, docValues, null, indexAnalyzer, postingsFormat, docValuesFormat, similarity, null, fieldDataSettings, indexSettings, multiFields, null); this.pathType = pathType; this.enableLatLon = enableLatLon; @@ -437,13 +511,14 @@ public GeoPointFieldMapper(FieldMapper.Names names, FieldType fieldType, Boolean this.lonMapper = lonMapper; this.geohashMapper = geohashMapper; + this.coerce = coerce; + this.ignoreMalformed = ignoreMalformed; + this.backCompat = backCompat; - this.validate = validate; this.validateLat = validateLat; this.validateLon = validateLon; - this.normalize = normalize; this.normalizeLat = normalizeLat; this.normalizeLon = normalizeLon; } @@ -574,14 +649,18 @@ private void parsePointFromString(ParseContext context, GeoPoint sparse, String private void parse(ParseContext context, GeoPoint point, String geohash) throws IOException { // if the coordinate is normalized there is no need for a validation check - if (backCompat || normalize) { - GeoUtils.normalizePoint(point, (backCompat) ? normalizeLat : normalize, (backCompat) ? normalizeLon : normalize); - } else if (backCompat || validate) { - if ((validate || (backCompat && validateLat)) && (point.lat() > 90.0 || point.lat() < -90.0)) { - throw new ElasticsearchIllegalArgumentException("illegal latitude value [" + point.lat() + "] for " + name()); + if ((backCompat && (normalizeLat || normalizeLon)) || (!backCompat && coerce.value())) { + GeoUtils.normalizePoint(point, (backCompat) ? normalizeLat : coerce.value(), (backCompat) ? normalizeLon : coerce.value()); + } else if (backCompat || !ignoreMalformed.value()) { + if (((backCompat && validateLat) || !ignoreMalformed.value()) && (point.lat() > 90.0 || point.lat() < -90.0)) { + throw new ElasticsearchIllegalArgumentException("illegal latitude value [" + point.lat() + "] for '" + name() + "'. " + + ((backCompat) ? "Use 'validate_lat' or 'normalize_lat' to validate " : "Enable '" + Names.IGNORE_MALFORMED + + "' or '" + Names.COERCE + "' to accept ") + "or correct values outside the standard coordinate system range."); } - if ((validate || (backCompat && validateLon)) && (point.lon() > 180.0 || point.lon() < -180)) { - throw new ElasticsearchIllegalArgumentException("illegal longitude value [" + point.lon() + "] for " + name()); + if (((backCompat && validateLon) || !ignoreMalformed.value()) && (point.lon() > 180.0 || point.lon() < -180)) { + throw new ElasticsearchIllegalArgumentException("illegal longitude value [" + point.lon() + "] for '" + name() + "'. " + + ((backCompat) ? "Use 'validate_lon' or 'normalize_lon' to validate " : "Enable '" + Names.IGNORE_MALFORMED + + "' or '" + Names.COERCE + "' to accept ") + "or correct values outside the standard coordinate system domain."); } } @@ -645,8 +724,8 @@ public void merge(Mapper mergeWith, MergeContext mergeContext) throws MergeMappi if (this.enableGeohashPrefix != fieldMergeWith.enableGeohashPrefix) { mergeContext.addConflict("mapper [" + names.fullName() + "] has different geohash_prefix"); } - if (!this.backCompat && this.normalize != fieldMergeWith.normalize) { - mergeContext.addConflict("mapper [" + names.fullName() + "] has different normalize"); + if (!this.backCompat && this.coerce != fieldMergeWith.coerce) { + this.coerce = fieldMergeWith.coerce; } if (this.backCompat && this.normalizeLat != fieldMergeWith.normalizeLat) { mergeContext.addConflict("mapper [" + names.fullName() + "] has different normalize_lat"); @@ -657,8 +736,8 @@ public void merge(Mapper mergeWith, MergeContext mergeContext) throws MergeMappi if (!Objects.equal(this.precisionStep, fieldMergeWith.precisionStep)) { mergeContext.addConflict("mapper [" + names.fullName() + "] has different precision_step"); } - if (!this.backCompat && this.validate != fieldMergeWith.validate) { - mergeContext.addConflict("mapper [" + names.fullName() + "] has different validate"); + if (!this.backCompat && this.ignoreMalformed != fieldMergeWith.ignoreMalformed) { + this.ignoreMalformed = fieldMergeWith.ignoreMalformed; } if (this.backCompat && this.validateLat != fieldMergeWith.validateLat) { mergeContext.addConflict("mapper [" + names.fullName() + "] has different validate_lat"); @@ -705,9 +784,6 @@ protected void doXContentBody(XContentBuilder builder, boolean includeDefaults, if (includeDefaults || precisionStep != null) { builder.field("precision_step", precisionStep); } - if (!backCompat && (includeDefaults || validate != Defaults.VALIDATE)) { - builder.field("validate", validate); - } if (backCompat && (includeDefaults || validateLat != Defaults.VALIDATE_LAT || validateLon != Defaults.VALIDATE_LON)) { if (validateLat && validateLon) { builder.field("validate", true); @@ -722,9 +798,6 @@ protected void doXContentBody(XContentBuilder builder, boolean includeDefaults, } } } - if (!backCompat && (includeDefaults || normalize != Defaults.NORMALIZE)) { - builder.field("normalize", normalize); - } if (backCompat && (includeDefaults || normalizeLat != Defaults.NORMALIZE_LAT || normalizeLon != Defaults.NORMALIZE_LON)) { if (normalizeLat && normalizeLon) { builder.field("normalize", true); @@ -739,6 +812,12 @@ protected void doXContentBody(XContentBuilder builder, boolean includeDefaults, } } } + if (!backCompat && (includeDefaults || ignoreMalformed.explicit())) { + builder.field("ignore_malformed", ignoreMalformed.value()); + } + if (!backCompat && (includeDefaults || coerce.explicit())) { + builder.field("coerce", coerce.value()); + } } public static class CustomGeoPointDocValuesField extends CustomNumericDocValuesField { From 868b6bec8fb7533405f678f6976245156e9e19ca Mon Sep 17 00:00:00 2001 From: Nicholas Knize Date: Mon, 20 Jul 2015 11:07:59 -0500 Subject: [PATCH 3/3] refactoring filter parameters from validate/normalize to ignore_malformed/coerce, respectively. Docs updated to reflect change --- .../mapping/types/geo-point-type.asciidoc | 11 +++++------ .../filters/geo-bounding-box-filter.asciidoc | 10 ++++------ .../filters/geo-distance-filter.asciidoc | 13 ++++++------- .../filters/geo-polygon-filter.asciidoc | 13 +++++-------- .../query/GeoBoundingBoxFilterParser.java | 16 ++++++++-------- .../index/query/GeoDistanceFilterParser.java | 18 +++++++++--------- .../query/GeoDistanceRangeFilterParser.java | 18 +++++++++--------- .../index/query/GeoPolygonFilterParser.java | 18 +++++++++--------- .../search/sort/GeoDistanceSortParser.java | 18 +++++++++--------- 9 files changed, 64 insertions(+), 71 deletions(-) diff --git a/docs/reference/mapping/types/geo-point-type.asciidoc b/docs/reference/mapping/types/geo-point-type.asciidoc index 7ea3c62066969..72f6502da74c4 100644 --- a/docs/reference/mapping/types/geo-point-type.asciidoc +++ b/docs/reference/mapping/types/geo-point-type.asciidoc @@ -138,13 +138,12 @@ but also all its parent cells (true prefixes) will be indexed as well. The number of terms that will be indexed depends on the `geohash_precision`. Defaults to `false`. *Note*: This option implicitly enables `geohash`. -|`validate` |Set to `false` to accept geo points with invalid latitude or -longitude (default is `true`). *Note*: Validation only works when -normalization has been disabled. This option will be deprecated and removed -in upcoming releases. +|`ignore_malformed` |Formerly `validate`. Set to `true` to +accept geo points with invalid latitude or longitude (default is `false`). -|`normalize` |Set to `true` to normalize latitude and longitude (default -is `true`). +|`coerce` |Formerly `normalize`. Set to `true` to normalize longitude and +latitude values to a standard -180:180 / -90:90 coordinate system. (default +is `false`). |`precision_step` |The precision step (influences the number of terms generated for each number value) for `.lat` and `.lon` fields diff --git a/docs/reference/query-dsl/filters/geo-bounding-box-filter.asciidoc b/docs/reference/query-dsl/filters/geo-bounding-box-filter.asciidoc index eee90598a86ef..3d6920321a4cb 100644 --- a/docs/reference/query-dsl/filters/geo-bounding-box-filter.asciidoc +++ b/docs/reference/query-dsl/filters/geo-bounding-box-filter.asciidoc @@ -58,13 +58,11 @@ Defaults to `false`. See <> below for further details. |`_cache_key` |Associate a unique custom cache key to use instead of the actual filter. -|`validate` |Set to `false` to accept geo points with invalid latitude or -longitude (default is `true`). *Note*: Validation only works when -normalization has been disabled. This option will be deprecated and removed -in upcoming releases. +|`ignore_malformed` |Set to `true` to +accept geo points with invalid latitude or longitude (default is `false`). -|`normalize` |Set to `true` to normalize latitude and longitude (default -is `true`). +|`coerce` |Set to `true` to normalize longitude and latitude values to a +standard -180:180 / -90:90 coordinate system. (default is `false`). |`type` |Set to one of `indexed` or `memory` to defines whether this filter will be executed in memory or indexed. See <> below for further details diff --git a/docs/reference/query-dsl/filters/geo-distance-filter.asciidoc b/docs/reference/query-dsl/filters/geo-distance-filter.asciidoc index 325f0eff0f59c..00deb1d93bce5 100644 --- a/docs/reference/query-dsl/filters/geo-distance-filter.asciidoc +++ b/docs/reference/query-dsl/filters/geo-distance-filter.asciidoc @@ -171,16 +171,15 @@ The following are options allowed on the filter: Associate a unique custom cache key to use instead of the actual filter. -`validate`:: +`ignore_malformed`:: - Set to `false` to accept geo points with invalid latitude or - longitude (default is `true`). *Note*: Validation only works when - normalization has been disabled. This option will be deprecated and removed - in upcoming releases. + Set to `true` to accept geo points with invalid latitude or + longitude (default is `false`). -`normalize`:: +`coerce`:: - Set to `true` to normalize latitude and longitude. Defaults to `true`. + Set to `true` to normalize longitude and latitude values to a standard -180:180 / -90:90 + coordinate system. (default is `false`). [float] ==== geo_point Type diff --git a/docs/reference/query-dsl/filters/geo-polygon-filter.asciidoc b/docs/reference/query-dsl/filters/geo-polygon-filter.asciidoc index 9fb69d633bdf1..eb7531e797d84 100644 --- a/docs/reference/query-dsl/filters/geo-polygon-filter.asciidoc +++ b/docs/reference/query-dsl/filters/geo-polygon-filter.asciidoc @@ -40,14 +40,11 @@ Defaults to `false`. See <> below. |`_cache_key` |Associate a unique custom cache key to use instead of the actual filter. -|`validate` |Set to `false` to accept geo points with invalid latitude or -longitude (default is `true`). *Note*: Validation only works when -normalization has been disabled. This option will be deprecated and removed -in upcoming releases. - -|`normalize` |Set to `false` to prevent latitude and longitude from auto converting -to proper lat/lon bounds. -Default is `true'. +|`ignore_malformed` |Set to `true` to accept geo points with invalid latitude or +longitude (default is `false`). + +|`coerce` |Set to `true` to normalize longitude and latitude values to a +standard -180:180 / -90:90 coordinate system. (default is `false`). |======================================================================= [float] diff --git a/src/main/java/org/elasticsearch/index/query/GeoBoundingBoxFilterParser.java b/src/main/java/org/elasticsearch/index/query/GeoBoundingBoxFilterParser.java index 89ca30deba647..6dcd6061fbf3e 100644 --- a/src/main/java/org/elasticsearch/index/query/GeoBoundingBoxFilterParser.java +++ b/src/main/java/org/elasticsearch/index/query/GeoBoundingBoxFilterParser.java @@ -85,8 +85,8 @@ public Filter parse(QueryParseContext parseContext) throws IOException, QueryPar String filterName = null; String currentFieldName = null; XContentParser.Token token; - boolean validate = true; - boolean normalize = true; + boolean ignoreMalformed = false; + boolean coerce = false; GeoPoint sparse = new GeoPoint(); @@ -144,10 +144,10 @@ public Filter parse(QueryParseContext parseContext) throws IOException, QueryPar cache = parser.booleanValue(); } else if ("_cache_key".equals(currentFieldName) || "_cacheKey".equals(currentFieldName)) { cacheKey = new CacheKeyFilter.Key(parser.text()); - } else if ("validate".equals(currentFieldName)) { - validate = parser.booleanValue(); - } else if ("normalize".equals(currentFieldName)) { - normalize = parser.booleanValue(); + } else if ("ignore_malformed".equals(currentFieldName)) { + ignoreMalformed = parser.booleanValue(); + } else if ("coerce".equals(currentFieldName)) { + coerce = parser.booleanValue(); } else if ("type".equals(currentFieldName)) { type = parser.text(); } else { @@ -159,7 +159,7 @@ public Filter parse(QueryParseContext parseContext) throws IOException, QueryPar final GeoPoint topLeft = sparse.reset(top, left); //just keep the object final GeoPoint bottomRight = new GeoPoint(bottom, right); - if (normalize) { + if (coerce) { // Special case: if the difference between the left and right is 360 and the right is greater than the left, we are asking for // the complete longitude range so need to set longitude to the complete longitude range boolean completeLonRange = ((right - left) % 360 == 0 && right > left); @@ -169,7 +169,7 @@ public Filter parse(QueryParseContext parseContext) throws IOException, QueryPar topLeft.resetLon(-180); bottomRight.resetLon(180); } - } else if (validate) { + } else if (!ignoreMalformed) { if (topLeft.lat() > 90.0 || topLeft.lat() < -90.0) { throw new QueryParsingException(parseContext.index(), "illegal latitude value [" + topLeft.lat() + "] for " + filterName); } diff --git a/src/main/java/org/elasticsearch/index/query/GeoDistanceFilterParser.java b/src/main/java/org/elasticsearch/index/query/GeoDistanceFilterParser.java index ae3db762072e3..1796e705a7536 100644 --- a/src/main/java/org/elasticsearch/index/query/GeoDistanceFilterParser.java +++ b/src/main/java/org/elasticsearch/index/query/GeoDistanceFilterParser.java @@ -77,8 +77,8 @@ public Filter parse(QueryParseContext parseContext) throws IOException, QueryPar GeoDistance geoDistance = GeoDistance.DEFAULT; String optimizeBbox = "memory"; - boolean validate = true; - boolean normalize = true; + boolean ignoreMalformed = false; + boolean coerce = false; while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { if (token == XContentParser.Token.FIELD_NAME) { @@ -133,10 +133,10 @@ public Filter parse(QueryParseContext parseContext) throws IOException, QueryPar cacheKey = new CacheKeyFilter.Key(parser.text()); } else if ("optimize_bbox".equals(currentFieldName) || "optimizeBbox".equals(currentFieldName)) { optimizeBbox = parser.textOrNull(); - } else if ("validate".equals(currentFieldName)) { - validate = parser.booleanValue(); - } else if ("normalize".equals(currentFieldName)) { - normalize = parser.booleanValue(); + } else if ("ignore_malformed".equals(currentFieldName)) { + ignoreMalformed = parser.booleanValue(); + } else if ("coerce".equals(currentFieldName)) { + coerce = parser.booleanValue(); } else { point.resetFromString(parser.text()); fieldName = currentFieldName; @@ -144,9 +144,9 @@ public Filter parse(QueryParseContext parseContext) throws IOException, QueryPar } } - if (normalize) { - GeoUtils.normalizePoint(point, normalize, normalize); - } else if (validate) { + if (coerce) { + GeoUtils.normalizePoint(point, coerce, coerce); + } else if (!ignoreMalformed) { if (point.lat() > 90.0 || point.lat() < -90.0) { throw new QueryParsingException(parseContext.index(), "illegal latitude value [" + point.lat() + "] for " + filterName); } diff --git a/src/main/java/org/elasticsearch/index/query/GeoDistanceRangeFilterParser.java b/src/main/java/org/elasticsearch/index/query/GeoDistanceRangeFilterParser.java index 5833cb92960be..a02e84e1842d2 100644 --- a/src/main/java/org/elasticsearch/index/query/GeoDistanceRangeFilterParser.java +++ b/src/main/java/org/elasticsearch/index/query/GeoDistanceRangeFilterParser.java @@ -78,8 +78,8 @@ public Filter parse(QueryParseContext parseContext) throws IOException, QueryPar DistanceUnit unit = DistanceUnit.DEFAULT; GeoDistance geoDistance = GeoDistance.DEFAULT; String optimizeBbox = "memory"; - boolean validate = true; - boolean normalize = true; + boolean ignoreMalformed = false; + boolean coerce = false; while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { if (token == XContentParser.Token.FIELD_NAME) { currentFieldName = parser.currentName(); @@ -162,10 +162,10 @@ public Filter parse(QueryParseContext parseContext) throws IOException, QueryPar cacheKey = new CacheKeyFilter.Key(parser.text()); } else if ("optimize_bbox".equals(currentFieldName) || "optimizeBbox".equals(currentFieldName)) { optimizeBbox = parser.textOrNull(); - } else if ("validate".equals(currentFieldName)) { - validate = parser.booleanValue(); - } else if ("normalize".equals(currentFieldName)) { - normalize = parser.booleanValue(); + } else if ("ignore_malformed".equals(currentFieldName)) { + ignoreMalformed = parser.booleanValue(); + } else if ("coerce".equals(currentFieldName)) { + coerce = parser.booleanValue(); } else { point.resetFromString(parser.text()); fieldName = currentFieldName; @@ -173,9 +173,9 @@ public Filter parse(QueryParseContext parseContext) throws IOException, QueryPar } } - if (normalize) { - GeoUtils.normalizePoint(point, normalize, normalize); - } else if (validate) { + if (coerce) { + GeoUtils.normalizePoint(point, coerce, coerce); + } else if (!ignoreMalformed) { if (point.lat() > 90.0 || point.lat() < -90.0) { throw new QueryParsingException(parseContext.index(), "illegal latitude value [" + point.lat() + "] for " + filterName); } diff --git a/src/main/java/org/elasticsearch/index/query/GeoPolygonFilterParser.java b/src/main/java/org/elasticsearch/index/query/GeoPolygonFilterParser.java index d4bbadf9bc6e2..36e21edc76555 100644 --- a/src/main/java/org/elasticsearch/index/query/GeoPolygonFilterParser.java +++ b/src/main/java/org/elasticsearch/index/query/GeoPolygonFilterParser.java @@ -74,8 +74,8 @@ public Filter parse(QueryParseContext parseContext) throws IOException, QueryPar List shell = Lists.newArrayList(); - boolean validate = true; - boolean normalize = true; + boolean ignoreMalformed = false; + boolean coerce = false; String filterName = null; String currentFieldName = null; @@ -109,10 +109,10 @@ public Filter parse(QueryParseContext parseContext) throws IOException, QueryPar cache = parser.booleanValue(); } else if ("_cache_key".equals(currentFieldName) || "_cacheKey".equals(currentFieldName)) { cacheKey = new CacheKeyFilter.Key(parser.text()); - } else if ("validate".equals(currentFieldName)) { - validate = parser.booleanValue(); - } else if ("normalize".equals(currentFieldName)) { - normalize = parser.booleanValue(); + } else if ("ignore_malformed".equals(currentFieldName)) { + ignoreMalformed = parser.booleanValue(); + } else if ("coerce".equals(currentFieldName)) { + coerce = parser.booleanValue(); } else { throw new QueryParsingException(parseContext.index(), "[geo_polygon] filter does not support [" + currentFieldName + "]"); } @@ -136,11 +136,11 @@ public Filter parse(QueryParseContext parseContext) throws IOException, QueryPar } } - if (normalize) { + if (coerce) { for (GeoPoint point : shell) { - GeoUtils.normalizePoint(point, normalize, normalize); + GeoUtils.normalizePoint(point, coerce, coerce); } - } else if (validate) { + } else if (!ignoreMalformed) { for (GeoPoint point : shell) { if (point.lat() > 90.0 || point.lat() < -90.0) { throw new QueryParsingException(parseContext.index(), "illegal latitude value [" + point.lat() + "] for " + filterName); diff --git a/src/main/java/org/elasticsearch/search/sort/GeoDistanceSortParser.java b/src/main/java/org/elasticsearch/search/sort/GeoDistanceSortParser.java index a62e6c7b0c824..4f70968c054f3 100644 --- a/src/main/java/org/elasticsearch/search/sort/GeoDistanceSortParser.java +++ b/src/main/java/org/elasticsearch/search/sort/GeoDistanceSortParser.java @@ -66,8 +66,8 @@ public SortField parse(XContentParser parser, SearchContext context) throws Exce MultiValueMode sortMode = null; NestedInnerQueryParseSupport nestedHelper = null; - boolean validate = true; - boolean normalize = true; + boolean ignoreMalformed = false; + boolean coerce = false; XContentParser.Token token; String currentName = parser.currentName(); @@ -100,10 +100,10 @@ public SortField parse(XContentParser parser, SearchContext context) throws Exce unit = DistanceUnit.fromString(parser.text()); } else if (currentName.equals("distance_type") || currentName.equals("distanceType")) { geoDistance = GeoDistance.fromString(parser.text()); - } else if ("validate".equals(currentName)) { - validate = parser.booleanValue(); - } else if ("normalize".equals(currentName)) { - normalize = parser.booleanValue(); + } else if ("ignore_malformed".equals(currentName)) { + ignoreMalformed = parser.booleanValue(); + } else if ("coerce".equals(currentName)) { + coerce = parser.booleanValue(); } else if ("sort_mode".equals(currentName) || "sortMode".equals(currentName) || "mode".equals(currentName)) { sortMode = MultiValueMode.fromString(parser.text()); } else if ("nested_path".equals(currentName) || "nestedPath".equals(currentName)) { @@ -120,11 +120,11 @@ public SortField parse(XContentParser parser, SearchContext context) throws Exce } } - if (normalize) { + if (coerce) { for (GeoPoint point : geoPoints) { - GeoUtils.normalizePoint(point, normalize, normalize); + GeoUtils.normalizePoint(point, coerce, coerce); } - } else if (validate) { + } else if (!ignoreMalformed) { for (GeoPoint point : geoPoints) { if (point.lat() > 90.0 || point.lat() < -90.0) { throw new ElasticsearchIllegalArgumentException("illegal latitude value [" + point.lat() + "] for geo distance sort");