From d7f1b954baa3608ae78b374ad947dd24075fd4d5 Mon Sep 17 00:00:00 2001 From: Nicholas Knize Date: Thu, 19 Mar 2015 15:07:56 -0500 Subject: [PATCH 1/3] [GEO] Deprecate validate_* and normalize_* No need to carry 3 validate and 3 normalize options in GeoPointFieldMapping. This is just causing confusion so this change deprecates the validate_lat, validate_lon, normalize_lat, and normalize_lon options and simply uses validate and normalize, respectively. Geo filters have also been updated to include a validate and normalize option to provide consistency with the mapping defaults. Unit tests and documentation have been updated. closes #10170 --- .../mapping/types/geo-point-type.asciidoc | 12 -- .../filters/geo-bounding-box-filter.asciidoc | 27 ++++ .../filters/geo-distance-filter.asciidoc | 23 ++++ .../geo-distance-range-filter.asciidoc | 2 +- .../filters/geo-polygon-filter.asciidoc | 23 ++++ .../index/mapper/geo/GeoPointFieldMapper.java | 115 ++++++------------ .../query/GeoBoundingBoxFilterParser.java | 22 +++- .../index/query/GeoDistanceFilterParser.java | 24 ++-- .../query/GeoDistanceRangeFilterParser.java | 24 ++-- .../index/query/GeoPolygonFilterParser.java | 22 +++- .../search/sort/GeoDistanceSortParser.java | 23 ++-- .../mapper/geo/GeoPointFieldMapperTests.java | 4 +- .../index/search/geo/GeoUtilsTests.java | 18 ++- .../search/geo/GeoPolygonTests.java | 55 ++++++++- 14 files changed, 258 insertions(+), 136 deletions(-) diff --git a/docs/reference/mapping/types/geo-point-type.asciidoc b/docs/reference/mapping/types/geo-point-type.asciidoc index 2d3f3ad6dd2fa..7ea3c62066969 100644 --- a/docs/reference/mapping/types/geo-point-type.asciidoc +++ b/docs/reference/mapping/types/geo-point-type.asciidoc @@ -143,21 +143,9 @@ longitude (default is `true`). *Note*: Validation only works when normalization has been disabled. This option will be deprecated and removed in upcoming releases. -|`validate_lat` |Set to `false` to accept geo points with an invalid -latitude (default is `true`). This option will be deprecated and removed -in upcoming releases. - -|`validate_lon` |Set to `false` to accept geo points with an invalid -longitude (default is `true`). This option will be deprecated and removed -in upcoming releases. - |`normalize` |Set to `true` to normalize latitude and longitude (default is `true`). -|`normalize_lat` |Set to `true` to normalize latitude. - -|`normalize_lon` |Set to `true` to normalize longitude. - |`precision_step` |The precision step (influences the number of terms generated for each number value) for `.lat` and `.lon` fields if `lat_lon` is set to `true`. diff --git a/docs/reference/query-dsl/filters/geo-bounding-box-filter.asciidoc b/docs/reference/query-dsl/filters/geo-bounding-box-filter.asciidoc index 7f16ec562d99a..eee90598a86ef 100644 --- a/docs/reference/query-dsl/filters/geo-bounding-box-filter.asciidoc +++ b/docs/reference/query-dsl/filters/geo-bounding-box-filter.asciidoc @@ -44,6 +44,33 @@ Then the following simple query can be executed with a } -------------------------------------------------- +[float] +==== Filter Options + +[cols="<,<",options="header",] +|======================================================================= +|Option |Description +|`_name` |Optional name field to identify the filter + +|`_cache` |Set to `true` to cache the *result* of the filter. +Defaults to `false`. See <> below for further details. + +|`_cache_key` |Associate a unique custom cache key to use instead of +the actual filter. + +|`validate` |Set to `false` to accept geo points with invalid latitude or +longitude (default is `true`). *Note*: Validation only works when +normalization has been disabled. This option will be deprecated and removed +in upcoming releases. + +|`normalize` |Set to `true` to normalize latitude and longitude (default +is `true`). + +|`type` |Set to one of `indexed` or `memory` to defines whether this filter will +be executed in memory or indexed. See <> below for further details +Default is `memory`. +|======================================================================= + [float] ==== Accepted Formats diff --git a/docs/reference/query-dsl/filters/geo-distance-filter.asciidoc b/docs/reference/query-dsl/filters/geo-distance-filter.asciidoc index 670245a11a3bd..fb0d341ed95f9 100644 --- a/docs/reference/query-dsl/filters/geo-distance-filter.asciidoc +++ b/docs/reference/query-dsl/filters/geo-distance-filter.asciidoc @@ -158,6 +158,29 @@ The following are options allowed on the filter: sure the `geo_point` type index lat lon in this case), or `none` which disables bounding box optimization. +`_name`:: + + Optional name field to identify the filter + +`_cache`:: + + Set to `true` to cache the *result* of the filter. + Defaults to `false`. See <> below for further details. + +`_cache_key`:: + + Associate a unique custom cache key to use instead of the actual filter. + +`validate`:: + + Set to `false` to accept geo points with invalid latitude or + longitude (default is `true`). *Note*: Validation only works when + normalization has been disabled. This option will be deprecated and removed + in upcoming releases. + +`normalize`:: + + Set to `true` to normalize latitude and longitude. Defaults to `true`. [float] ==== geo_point Type diff --git a/docs/reference/query-dsl/filters/geo-distance-range-filter.asciidoc b/docs/reference/query-dsl/filters/geo-distance-range-filter.asciidoc index 1bc4197e5b382..0f3c672c45434 100644 --- a/docs/reference/query-dsl/filters/geo-distance-range-filter.asciidoc +++ b/docs/reference/query-dsl/filters/geo-distance-range-filter.asciidoc @@ -24,7 +24,7 @@ Filters documents that exists within a range from a specific point: } -------------------------------------------------- -Supports the same point location parameter as the +Supports the same point location parameter and filter options as the <> filter. And also support the common parameters for range (lt, lte, gt, gte, from, to, include_upper and include_lower). diff --git a/docs/reference/query-dsl/filters/geo-polygon-filter.asciidoc b/docs/reference/query-dsl/filters/geo-polygon-filter.asciidoc index a4212343eff81..03e1c13c81bb2 100644 --- a/docs/reference/query-dsl/filters/geo-polygon-filter.asciidoc +++ b/docs/reference/query-dsl/filters/geo-polygon-filter.asciidoc @@ -26,6 +26,29 @@ points. Here is an example: } -------------------------------------------------- +[float] +==== Filter Options + +[cols="<,<",options="header",] +|======================================================================= +|Option |Description +|`_name` |Optional name field to identify the filter + +|`_cache` |Set to `true` to cache the *result* of the filter. +Defaults to `false`. See <> below. + +|`_cache_key` |Associate a unique custom cache key to use instead of +the actual filter. + +|`validate` |Set to `false` to accept geo points with invalid latitude or +longitude (default is `true`). *Note*: Validation only works when +normalization has been disabled. This option will be deprecated and removed +in upcoming releases. + +|`normalize` |Set to `true` to normalize latitude and longitude (default +is `true`). +|======================================================================= + [float] ==== Allowed Formats diff --git a/src/main/java/org/elasticsearch/index/mapper/geo/GeoPointFieldMapper.java b/src/main/java/org/elasticsearch/index/mapper/geo/GeoPointFieldMapper.java index f7a39b2c95295..97f614f684d66 100644 --- a/src/main/java/org/elasticsearch/index/mapper/geo/GeoPointFieldMapper.java +++ b/src/main/java/org/elasticsearch/index/mapper/geo/GeoPointFieldMapper.java @@ -30,6 +30,7 @@ import org.apache.lucene.util.BytesRef; import org.elasticsearch.ElasticsearchIllegalArgumentException; import org.elasticsearch.ElasticsearchIllegalStateException; +import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.Version; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.Strings; @@ -105,10 +106,8 @@ public static class Defaults { public static final boolean ENABLE_GEOHASH = false; public static final boolean ENABLE_GEOHASH_PREFIX = false; public static final int GEO_HASH_PRECISION = GeoHashUtils.PRECISION; - public static final boolean NORMALIZE_LAT = true; - public static final boolean NORMALIZE_LON = true; - public static final boolean VALIDATE_LAT = true; - public static final boolean VALIDATE_LON = true; + public static final boolean NORMALIZE = true; + public static final boolean VALIDATE = true; public static final FieldType FIELD_TYPE = new FieldType(StringFieldMapper.Defaults.FIELD_TYPE); @@ -134,10 +133,8 @@ public static class Builder extends AbstractFieldMapper.Builder 90.0 || point.lat() < -90.0) { throw new ElasticsearchIllegalArgumentException("illegal latitude value [" + point.lat() + "] for " + name()); } - } - if (validateLon) { if (point.lon() > 180.0 || point.lon() < -180) { throw new ElasticsearchIllegalArgumentException("illegal longitude value [" + point.lon() + "] for " + name()); } @@ -662,20 +650,14 @@ public void merge(Mapper mergeWith, MergeContext mergeContext) throws MergeMappi if (this.enableGeohashPrefix != fieldMergeWith.enableGeohashPrefix) { mergeContext.addConflict("mapper [" + names.fullName() + "] has different geohash_prefix"); } - if (this.normalizeLat != fieldMergeWith.normalizeLat) { - mergeContext.addConflict("mapper [" + names.fullName() + "] has different normalize_lat"); - } - if (this.normalizeLon != fieldMergeWith.normalizeLon) { - mergeContext.addConflict("mapper [" + names.fullName() + "] has different normalize_lon"); + if (this.normalize != fieldMergeWith.normalize) { + mergeContext.addConflict("mapper [" + names.fullName() + "] has different normalize"); } if (!Objects.equal(this.precisionStep, fieldMergeWith.precisionStep)) { mergeContext.addConflict("mapper [" + names.fullName() + "] has different precision_step"); } - if (this.validateLat != fieldMergeWith.validateLat) { - mergeContext.addConflict("mapper [" + names.fullName() + "] has different validate_lat"); - } - if (this.validateLon != fieldMergeWith.validateLon) { - mergeContext.addConflict("mapper [" + names.fullName() + "] has different validate_lon"); + if (this.validate != fieldMergeWith.validate) { + mergeContext.addConflict("mapper [" + names.fullName() + "] has different validate"); } } @@ -716,33 +698,11 @@ protected void doXContentBody(XContentBuilder builder, boolean includeDefaults, if (includeDefaults || precisionStep != null) { builder.field("precision_step", precisionStep); } - if (includeDefaults || validateLat != Defaults.VALIDATE_LAT || validateLon != Defaults.VALIDATE_LON) { - if (validateLat && validateLon) { - builder.field("validate", true); - } else if (!validateLat && !validateLon) { - builder.field("validate", false); - } else { - if (includeDefaults || validateLat != Defaults.VALIDATE_LAT) { - builder.field("validate_lat", validateLat); - } - if (includeDefaults || validateLon != Defaults.VALIDATE_LON) { - builder.field("validate_lon", validateLon); - } - } + if (includeDefaults || validate != Defaults.VALIDATE) { + builder.field("validate", validate); } - if (includeDefaults || normalizeLat != Defaults.NORMALIZE_LAT || normalizeLon != Defaults.NORMALIZE_LON) { - if (normalizeLat && normalizeLon) { - builder.field("normalize", true); - } else if (!normalizeLat && !normalizeLon) { - builder.field("normalize", false); - } else { - if (includeDefaults || normalizeLat != Defaults.NORMALIZE_LAT) { - builder.field("normalize_lat", normalizeLat); - } - if (includeDefaults || normalizeLon != Defaults.NORMALIZE_LON) { - builder.field("normalize_lon", normalizeLat); - } - } + if (includeDefaults || normalize != Defaults.NORMALIZE) { + builder.field("normalize", normalize); } } @@ -779,5 +739,4 @@ public BytesRef binaryValue() { return new BytesRef(bytes); } } - } diff --git a/src/main/java/org/elasticsearch/index/query/GeoBoundingBoxFilterParser.java b/src/main/java/org/elasticsearch/index/query/GeoBoundingBoxFilterParser.java index 8f68dbea07493..9ad6066de76b0 100644 --- a/src/main/java/org/elasticsearch/index/query/GeoBoundingBoxFilterParser.java +++ b/src/main/java/org/elasticsearch/index/query/GeoBoundingBoxFilterParser.java @@ -84,6 +84,7 @@ public Filter parse(QueryParseContext parseContext) throws IOException, QueryPar String filterName = null; String currentFieldName = null; XContentParser.Token token; + boolean validate = true; boolean normalize = true; GeoPoint sparse = new GeoPoint(); @@ -142,6 +143,8 @@ public Filter parse(QueryParseContext parseContext) throws IOException, QueryPar cache = parseContext.parseFilterCachePolicy(); } else if ("_cache_key".equals(currentFieldName) || "_cacheKey".equals(currentFieldName)) { cacheKey = new HashedBytesRef(parser.text()); + } else if ("validate".equals(currentFieldName)) { + validate = parser.booleanValue(); } else if ("normalize".equals(currentFieldName)) { normalize = parser.booleanValue(); } else if ("type".equals(currentFieldName)) { @@ -156,8 +159,8 @@ public Filter parse(QueryParseContext parseContext) throws IOException, QueryPar final GeoPoint bottomRight = new GeoPoint(bottom, right); if (normalize) { - // Special case: if the difference bettween the left and right is 360 and the right is greater than the left, we are asking for - // the complete longitude range so need to set longitude to the complete longditude range + // Special case: if the difference between the left and right is 360 and the right is greater than the left, we are asking for + // the complete longitude range so need to set longitude to the complete longitude range boolean completeLonRange = ((right - left) % 360 == 0 && right > left); GeoUtils.normalizePoint(topLeft, true, !completeLonRange); GeoUtils.normalizePoint(bottomRight, true, !completeLonRange); @@ -165,6 +168,21 @@ public Filter parse(QueryParseContext parseContext) throws IOException, QueryPar topLeft.resetLon(-180); bottomRight.resetLon(180); } + } else if (validate) { + if (topLeft.lat() > 90.0 || topLeft.lat() < -90.0) { + throw new QueryParsingException(parseContext.index(), "illegal latitude value [" + topLeft.lat() + "] for " + filterName); + } + if (topLeft.lon() > 180.0 || topLeft.lon() < -180) { + throw new QueryParsingException(parseContext.index(), "illegal longitude value [" + topLeft.lon() + "] for " + filterName); + } + if (bottomRight.lat() > 90.0 || bottomRight.lat() < -90.0) { + throw new QueryParsingException(parseContext.index(), "illegal latitude value [" + bottomRight.lat() + "] for " + + filterName); + } + if (bottomRight.lon() > 180.0 || bottomRight.lon() < -180) { + throw new QueryParsingException(parseContext.index(), "illegal longitude value [" + bottomRight.lon() + "] for " + + filterName); + } } MapperService.SmartNameFieldMappers smartMappers = parseContext.smartFieldMappers(fieldName); diff --git a/src/main/java/org/elasticsearch/index/query/GeoDistanceFilterParser.java b/src/main/java/org/elasticsearch/index/query/GeoDistanceFilterParser.java index 252afdf25cf2f..21d11de3f4bb4 100644 --- a/src/main/java/org/elasticsearch/index/query/GeoDistanceFilterParser.java +++ b/src/main/java/org/elasticsearch/index/query/GeoDistanceFilterParser.java @@ -75,8 +75,8 @@ public Filter parse(QueryParseContext parseContext) throws IOException, QueryPar DistanceUnit unit = DistanceUnit.DEFAULT; GeoDistance geoDistance = GeoDistance.DEFAULT; String optimizeBbox = "memory"; - boolean normalizeLon = true; - boolean normalizeLat = true; + boolean validate = true; + boolean normalize = true; while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { if (token == XContentParser.Token.FIELD_NAME) { currentFieldName = parser.currentName(); @@ -130,9 +130,10 @@ public Filter parse(QueryParseContext parseContext) throws IOException, QueryPar cacheKey = new HashedBytesRef(parser.text()); } else if ("optimize_bbox".equals(currentFieldName) || "optimizeBbox".equals(currentFieldName)) { optimizeBbox = parser.textOrNull(); + } else if ("validate".equals(currentFieldName)) { + validate = parser.booleanValue(); } else if ("normalize".equals(currentFieldName)) { - normalizeLat = parser.booleanValue(); - normalizeLon = parser.booleanValue(); + normalize = parser.booleanValue(); } else { point.resetFromString(parser.text()); fieldName = currentFieldName; @@ -140,6 +141,17 @@ public Filter parse(QueryParseContext parseContext) throws IOException, QueryPar } } + if (normalize) { + GeoUtils.normalizePoint(point, normalize, normalize); + } else if (validate) { + if (point.lat() > 90.0 || point.lat() < -90.0) { + throw new QueryParsingException(parseContext.index(), "illegal latitude value [" + point.lat() + "] for " + filterName); + } + if (point.lon() > 180.0 || point.lon() < -180) { + throw new QueryParsingException(parseContext.index(), "illegal longitude value [" + point.lon() + "] for " + filterName); + } + } + if (vDistance == null) { throw new QueryParsingException(parseContext.index(), "geo_distance requires 'distance' to be specified"); } else if (vDistance instanceof Number) { @@ -149,10 +161,6 @@ public Filter parse(QueryParseContext parseContext) throws IOException, QueryPar } distance = geoDistance.normalize(distance, DistanceUnit.DEFAULT); - if (normalizeLat || normalizeLon) { - GeoUtils.normalizePoint(point, normalizeLat, normalizeLon); - } - MapperService.SmartNameFieldMappers smartMappers = parseContext.smartFieldMappers(fieldName); if (smartMappers == null || !smartMappers.hasMapper()) { throw new QueryParsingException(parseContext.index(), "failed to find geo_point field [" + fieldName + "]"); diff --git a/src/main/java/org/elasticsearch/index/query/GeoDistanceRangeFilterParser.java b/src/main/java/org/elasticsearch/index/query/GeoDistanceRangeFilterParser.java index b7452bec0f11e..0e47f5b1d0943 100644 --- a/src/main/java/org/elasticsearch/index/query/GeoDistanceRangeFilterParser.java +++ b/src/main/java/org/elasticsearch/index/query/GeoDistanceRangeFilterParser.java @@ -77,8 +77,8 @@ public Filter parse(QueryParseContext parseContext) throws IOException, QueryPar DistanceUnit unit = DistanceUnit.DEFAULT; GeoDistance geoDistance = GeoDistance.DEFAULT; String optimizeBbox = "memory"; - boolean normalizeLon = true; - boolean normalizeLat = true; + boolean validate = true; + boolean normalize = true; while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { if (token == XContentParser.Token.FIELD_NAME) { currentFieldName = parser.currentName(); @@ -161,9 +161,10 @@ public Filter parse(QueryParseContext parseContext) throws IOException, QueryPar cacheKey = new HashedBytesRef(parser.text()); } else if ("optimize_bbox".equals(currentFieldName) || "optimizeBbox".equals(currentFieldName)) { optimizeBbox = parser.textOrNull(); + } else if ("validate".equals(currentFieldName)) { + validate = parser.booleanValue(); } else if ("normalize".equals(currentFieldName)) { - normalizeLat = parser.booleanValue(); - normalizeLon = parser.booleanValue(); + normalize = parser.booleanValue(); } else { point.resetFromString(parser.text()); fieldName = currentFieldName; @@ -171,6 +172,17 @@ public Filter parse(QueryParseContext parseContext) throws IOException, QueryPar } } + if (normalize) { + GeoUtils.normalizePoint(point, normalize, normalize); + } else if (validate) { + if (point.lat() > 90.0 || point.lat() < -90.0) { + throw new QueryParsingException(parseContext.index(), "illegal latitude value [" + point.lat() + "] for " + filterName); + } + if (point.lon() > 180.0 || point.lon() < -180) { + throw new QueryParsingException(parseContext.index(), "illegal longitude value [" + point.lon() + "] for " + filterName); + } + } + Double from = null; Double to = null; if (vFrom != null) { @@ -190,10 +202,6 @@ public Filter parse(QueryParseContext parseContext) throws IOException, QueryPar to = geoDistance.normalize(to, DistanceUnit.DEFAULT); } - if (normalizeLat || normalizeLon) { - GeoUtils.normalizePoint(point, normalizeLat, normalizeLon); - } - MapperService.SmartNameFieldMappers smartMappers = parseContext.smartFieldMappers(fieldName); if (smartMappers == null || !smartMappers.hasMapper()) { throw new QueryParsingException(parseContext.index(), "failed to find geo_point field [" + fieldName + "]"); diff --git a/src/main/java/org/elasticsearch/index/query/GeoPolygonFilterParser.java b/src/main/java/org/elasticsearch/index/query/GeoPolygonFilterParser.java index fefa37c07e379..2a943ecd3f54b 100644 --- a/src/main/java/org/elasticsearch/index/query/GeoPolygonFilterParser.java +++ b/src/main/java/org/elasticsearch/index/query/GeoPolygonFilterParser.java @@ -74,8 +74,8 @@ public Filter parse(QueryParseContext parseContext) throws IOException, QueryPar List shell = Lists.newArrayList(); - boolean normalizeLon = true; - boolean normalizeLat = true; + boolean validate = true; + boolean normalize = true; String filterName = null; String currentFieldName = null; @@ -109,9 +109,10 @@ public Filter parse(QueryParseContext parseContext) throws IOException, QueryPar cache = parseContext.parseFilterCachePolicy(); } else if ("_cache_key".equals(currentFieldName) || "_cacheKey".equals(currentFieldName)) { cacheKey = new HashedBytesRef(parser.text()); + } else if ("validate".equals(currentFieldName)) { + validate = parser.booleanValue(); } else if ("normalize".equals(currentFieldName)) { - normalizeLat = parser.booleanValue(); - normalizeLon = parser.booleanValue(); + normalize = parser.booleanValue(); } else { throw new QueryParsingException(parseContext.index(), "[geo_polygon] filter does not support [" + currentFieldName + "]"); } @@ -135,9 +136,18 @@ public Filter parse(QueryParseContext parseContext) throws IOException, QueryPar } } - if (normalizeLat || normalizeLon) { + if (normalize) { for (GeoPoint point : shell) { - GeoUtils.normalizePoint(point, normalizeLat, normalizeLon); + GeoUtils.normalizePoint(point, normalize, normalize); + } + } else if (validate) { + for (GeoPoint point : shell) { + if (point.lat() > 90.0 || point.lat() < -90.0) { + throw new QueryParsingException(parseContext.index(), "illegal latitude value [" + point.lat() + "] for " + filterName); + } + if (point.lon() > 180.0 || point.lon() < -180) { + throw new QueryParsingException(parseContext.index(), "illegal longitude value [" + point.lon() + "] for " + filterName); + } } } diff --git a/src/main/java/org/elasticsearch/search/sort/GeoDistanceSortParser.java b/src/main/java/org/elasticsearch/search/sort/GeoDistanceSortParser.java index 4d09018281332..7e5a42cfc4cd6 100644 --- a/src/main/java/org/elasticsearch/search/sort/GeoDistanceSortParser.java +++ b/src/main/java/org/elasticsearch/search/sort/GeoDistanceSortParser.java @@ -65,8 +65,8 @@ public SortField parse(XContentParser parser, SearchContext context) throws Exce MultiValueMode sortMode = null; NestedInnerQueryParseSupport nestedHelper = null; - boolean normalizeLon = true; - boolean normalizeLat = true; + boolean validate = true; + boolean normalize = true; XContentParser.Token token; String currentName = parser.currentName(); @@ -75,7 +75,6 @@ public SortField parse(XContentParser parser, SearchContext context) throws Exce currentName = parser.currentName(); } else if (token == XContentParser.Token.START_ARRAY) { parseGeoPoints(parser, geoPoints); - fieldName = currentName; } else if (token == XContentParser.Token.START_OBJECT) { // the json in the format of -> field : { lat : 30, lon : 12 } @@ -99,9 +98,10 @@ public SortField parse(XContentParser parser, SearchContext context) throws Exce unit = DistanceUnit.fromString(parser.text()); } else if (currentName.equals("distance_type") || currentName.equals("distanceType")) { geoDistance = GeoDistance.fromString(parser.text()); + } else if ("validate".equals(currentName)) { + validate = parser.booleanValue(); } else if ("normalize".equals(currentName)) { - normalizeLat = parser.booleanValue(); - normalizeLon = parser.booleanValue(); + normalize = parser.booleanValue(); } else if ("sort_mode".equals(currentName) || "sortMode".equals(currentName) || "mode".equals(currentName)) { sortMode = MultiValueMode.fromString(parser.text()); } else if ("nested_path".equals(currentName) || "nestedPath".equals(currentName)) { @@ -118,9 +118,18 @@ public SortField parse(XContentParser parser, SearchContext context) throws Exce } } - if (normalizeLat || normalizeLon) { + if (normalize) { + for (GeoPoint point : geoPoints) { + GeoUtils.normalizePoint(point, normalize, normalize); + } + } else if (validate) { for (GeoPoint point : geoPoints) { - GeoUtils.normalizePoint(point, normalizeLat, normalizeLon); + if (point.lat() > 90.0 || point.lat() < -90.0) { + throw new ElasticsearchIllegalArgumentException("illegal latitude value [" + point.lat() + "] for geo distance sort"); + } + if (point.lon() > 180.0 || point.lon() < -180) { + throw new ElasticsearchIllegalArgumentException("illegal longitude value [" + point.lon() + "] for geo distance sort"); + } } } diff --git a/src/test/java/org/elasticsearch/index/mapper/geo/GeoPointFieldMapperTests.java b/src/test/java/org/elasticsearch/index/mapper/geo/GeoPointFieldMapperTests.java index e4f12589dc536..b4ac734b63917 100644 --- a/src/test/java/org/elasticsearch/index/mapper/geo/GeoPointFieldMapperTests.java +++ b/src/test/java/org/elasticsearch/index/mapper/geo/GeoPointFieldMapperTests.java @@ -488,9 +488,9 @@ public void testGeoPointMapperMerge() throws Exception { DocumentMapper.MergeResult mergeResult = stage1.merge(stage2.mapping(), mergeFlags().simulate(false)); assertThat(mergeResult.hasConflicts(), equalTo(true)); - assertThat(mergeResult.conflicts().length, equalTo(2)); + assertThat(mergeResult.conflicts().length, equalTo(1)); // todo better way of checking conflict? - assertThat("mapper [point] has different validate_lat", isIn(new ArrayList<>(Arrays.asList(mergeResult.conflicts())))); + assertThat("mapper [point] has different validate", isIn(new ArrayList<>(Arrays.asList(mergeResult.conflicts())))); // correct mapping and ensure no failures stage2Mapping = XContentFactory.jsonBuilder().startObject().startObject("type") diff --git a/src/test/java/org/elasticsearch/index/search/geo/GeoUtilsTests.java b/src/test/java/org/elasticsearch/index/search/geo/GeoUtilsTests.java index 34c30be9223b8..6c295468d834d 100644 --- a/src/test/java/org/elasticsearch/index/search/geo/GeoUtilsTests.java +++ b/src/test/java/org/elasticsearch/index/search/geo/GeoUtilsTests.java @@ -339,8 +339,7 @@ public void testNormalizePoint_outsideNormalRange() { @Test public void testNormalizePoint_outsideNormalRange_withOptions() { for (int i = 0; i < 100; i++) { - boolean normLat = randomBoolean(); - boolean normLon = randomBoolean(); + boolean normalize = randomBoolean(); double normalisedLat = (randomDouble() * 180.0) - 90.0; double normalisedLon = (randomDouble() * 360.0) - 180.0; int shiftLat = randomIntBetween(1, 10000); @@ -350,23 +349,20 @@ public void testNormalizePoint_outsideNormalRange_withOptions() { double expectedLat; double expectedLon; - if (normLat) { + if (normalize) { expectedLat = normalisedLat * (shiftLat % 2 == 0 ? 1 : -1); - } else { - expectedLat = testLat; - } - if (normLon) { - expectedLon = normalisedLon + ((normLat && shiftLat % 2 == 1) ? 180 : 0); + expectedLon = normalisedLon + ((shiftLat % 2 == 1) ? 180 : 0); if (expectedLon > 180.0) { expectedLon -= 360; } } else { - double shiftValue = normalisedLon > 0 ? -180 : 180; - expectedLon = testLon + ((normLat && shiftLat % 2 == 1) ? shiftValue : 0); + expectedLat = testLat; + expectedLon = testLon; } + GeoPoint testPoint = new GeoPoint(testLat, testLon); GeoPoint expectedPoint = new GeoPoint(expectedLat, expectedLon); - GeoUtils.normalizePoint(testPoint, normLat, normLon); + GeoUtils.normalizePoint(testPoint, normalize, normalize); assertThat("Unexpected Latitude", testPoint.lat(), closeTo(expectedPoint.lat(), MAX_ACCEPTABLE_ERROR)); assertThat("Unexpected Longitude", testPoint.lon(), closeTo(expectedPoint.lon(), MAX_ACCEPTABLE_ERROR)); } diff --git a/src/test/java/org/elasticsearch/search/geo/GeoPolygonTests.java b/src/test/java/org/elasticsearch/search/geo/GeoPolygonTests.java index d370041a24a13..befd147d03b2f 100644 --- a/src/test/java/org/elasticsearch/search/geo/GeoPolygonTests.java +++ b/src/test/java/org/elasticsearch/search/geo/GeoPolygonTests.java @@ -86,7 +86,6 @@ protected void setupSuiteScopeCluster() throws Exception { @Test public void simplePolygonTest() throws Exception { - SearchResponse searchResponse = client().prepareSearch("test") // from NY .setQuery(filteredQuery(matchAllQuery(), geoPolygonFilter("location") .addPoint(40.7, -74.0) @@ -101,4 +100,58 @@ public void simplePolygonTest() throws Exception { assertThat(hit.id(), anyOf(equalTo("1"), equalTo("3"), equalTo("4"), equalTo("5"))); } } + + @Test + public void polygonNormalizationTest() throws Exception { + String invalidPolyFilter = "{\"geo_polygon\": { \"location\": { \"points\": [{\"lat\": 400.7, \"lon\": -74.0}, " + + "{\"lat\": 40.7, \"lon\": -74.1}, {\"lat\": 40.8, \"lon\": -74.1}, {\"lat\": 40.8, \"lon\": -74.0}, " + + "{\"lat\": 40.7, \"lon\": -74.0}]}, \"normalize\": false}}"; + + // test proper error handling for invalid poly (validation enabled, normalization disabled) + try { + client().prepareSearch("test") // from NY + .setQuery(matchAllQuery()).setPostFilter(invalidPolyFilter).execute().actionGet(); + } catch (Exception e) { + if (!e.getMessage().contains("illegal latitude value")) + throw new Exception("expected: 'QueryParsingException on latitude'\n got: " + e.getMessage()); + } + + // test correct filtering with valid poly (validation enabled, normalization disabled) + String validPolyFilter = "{\"geo_polygon\": { \"location\": { \"points\": [{\"lat\": 400.7, \"lon\": -74.0}, " + + "{\"lat\": 40.7, \"lon\": -74.1}, {\"lat\": 40.8, \"lon\": -74.1}, {\"lat\": 40.8, \"lon\": -74.0}, " + + "{\"lat\": 40.7, \"lon\": -74.0}]}, \"validate\": true, \"normalize\": true}}"; + SearchResponse searchResponse = client().prepareSearch("test") + .setQuery(matchAllQuery()).setPostFilter(validPolyFilter).execute().actionGet(); + assertHitCount(searchResponse, 4); + assertThat(searchResponse.getHits().hits().length, equalTo(4)); + for (SearchHit hit : searchResponse.getHits()) { + assertThat(hit.id(), anyOf(equalTo("1"), equalTo("3"), equalTo("4"), equalTo("5"))); + } + + // test error handling for invalid poly + String invalidPoly = "{\"geo_polygon\": { \"location\": { \"points\": [{\"lat\": 400.7, \"lon\": -74.0}, " + + "{\"lat\": 40.7, \"lon\": -74.1}, {\"lat\": 40.8, \"lon\": -74.1}, {\"lat\": 40.8, \"lon\": -74.0}, " + + "{\"lat\": 40.7, \"lon\": -74.0}]}, \"validate\": false, \"normalize\": false}}"; + + // test proper error handling for invalid poly (validation enabled, normalization disabled) + try { + client().prepareSearch("test") // from NY + .setQuery(matchAllQuery()).setPostFilter(invalidPoly).execute().actionGet(); + } catch (Exception e) { + if (!e.getMessage().contains("illegal latitude value")) + throw new Exception("expected: 'QueryParsingException on latitude'\n got: " + e.getMessage()); + } + + // test correct filtering with valid poly (validation enabled, normalization disabled) + String validPoly = "{\"geo_polygon\": { \"location\": { \"points\": [{\"lat\": 40.7, \"lon\": -74.0}, " + + "{\"lat\": 40.7, \"lon\": -74.1}, {\"lat\": 40.8, \"lon\": -74.1}, {\"lat\": 40.8, \"lon\": -74.0}, " + + "{\"lat\": 40.7, \"lon\": -74.0}]}, \"validate\": false, \"normalize\": false}}"; + searchResponse = client().prepareSearch("test") + .setQuery(matchAllQuery()).setPostFilter(validPoly).execute().actionGet(); + assertHitCount(searchResponse, 4); + assertThat(searchResponse.getHits().hits().length, equalTo(4)); + for (SearchHit hit : searchResponse.getHits()) { + assertThat(hit.id(), anyOf(equalTo("1"), equalTo("3"), equalTo("4"), equalTo("5"))); + } + } } From 2fdbedf787094ad85ea8b56396d6a0e01a0991cb Mon Sep 17 00:00:00 2001 From: Nicholas Knize Date: Wed, 25 Mar 2015 11:09:59 -0500 Subject: [PATCH 2/3] updating for bwc --- .../filters/geo-polygon-filter.asciidoc | 5 +- .../index/mapper/geo/GeoPointFieldMapper.java | 74 ++++++++++++------- 2 files changed, 52 insertions(+), 27 deletions(-) diff --git a/docs/reference/query-dsl/filters/geo-polygon-filter.asciidoc b/docs/reference/query-dsl/filters/geo-polygon-filter.asciidoc index 03e1c13c81bb2..9fb69d633bdf1 100644 --- a/docs/reference/query-dsl/filters/geo-polygon-filter.asciidoc +++ b/docs/reference/query-dsl/filters/geo-polygon-filter.asciidoc @@ -45,8 +45,9 @@ longitude (default is `true`). *Note*: Validation only works when normalization has been disabled. This option will be deprecated and removed in upcoming releases. -|`normalize` |Set to `true` to normalize latitude and longitude (default -is `true`). +|`normalize` |Set to `false` to prevent latitude and longitude from auto converting +to proper lat/lon bounds. +Default is `true'. |======================================================================= [float] diff --git a/src/main/java/org/elasticsearch/index/mapper/geo/GeoPointFieldMapper.java b/src/main/java/org/elasticsearch/index/mapper/geo/GeoPointFieldMapper.java index 97f614f684d66..2cf587ae70a45 100644 --- a/src/main/java/org/elasticsearch/index/mapper/geo/GeoPointFieldMapper.java +++ b/src/main/java/org/elasticsearch/index/mapper/geo/GeoPointFieldMapper.java @@ -30,7 +30,6 @@ import org.apache.lucene.util.BytesRef; import org.elasticsearch.ElasticsearchIllegalArgumentException; import org.elasticsearch.ElasticsearchIllegalStateException; -import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.Version; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.Strings; @@ -106,6 +105,11 @@ public static class Defaults { public static final boolean ENABLE_GEOHASH = false; public static final boolean ENABLE_GEOHASH_PREFIX = false; public static final int GEO_HASH_PRECISION = GeoHashUtils.PRECISION; + public static final boolean BACK_COMPAT = true; + public static final boolean NORMALIZE_LAT = true; + public static final boolean NORMALIZE_LON = true; + public static final boolean VALIDATE_LAT = true; + public static final boolean VALIDATE_LON = true; public static final boolean NORMALIZE = true; public static final boolean VALIDATE = true; @@ -133,6 +137,11 @@ public static class Builder extends AbstractFieldMapper.Builder parse(String name, Map node, ParserContext parserContext) throws MapperParsingException { Builder builder = geoPointField(name); + builder.backCompat = parserContext.indexVersionCreated().before(Version.V_1_6_0); parseField(builder, name, node, parserContext); for (Iterator> iterator = node.entrySet().iterator(); iterator.hasNext();) { Map.Entry entry = iterator.next(); @@ -252,25 +263,25 @@ public static class TypeParser implements Mapper.TypeParser { } else if (fieldName.equals("validate")) { builder.validate = XContentMapValues.nodeBooleanValue(fieldNode); iterator.remove(); - } else if (fieldName.equals("validate_lon")) { - if (parserContext.indexVersionCreated().onOrAfter(Version.V_1_6_0)) { - throw new ElasticsearchParseException("'validate_lon' is deprecated and will be removed in the next release"); - } - } else if (fieldName.equals("validate_lat")) { - if (parserContext.indexVersionCreated().onOrAfter(Version.V_1_6_0)) { - throw new ElasticsearchParseException("'validate_lat' is deprecated and will be removed in the next release"); - } + } else if (builder.backCompat && fieldName.equals("validate_lon")) { + builder.validate = false; + builder.validateLon = XContentMapValues.nodeBooleanValue(fieldNode); + iterator.remove(); + } else if (builder.backCompat && fieldName.equals("validate_lat")) { + builder.validate = false; + builder.validateLat = XContentMapValues.nodeBooleanValue(fieldNode); + iterator.remove(); } else if (fieldName.equals("normalize")) { builder.normalize = XContentMapValues.nodeBooleanValue(fieldNode); iterator.remove(); - } else if (fieldName.equals("normalize_lat")) { - if (parserContext.indexVersionCreated().onOrAfter(Version.V_1_6_0)) { - throw new ElasticsearchParseException("'normalize_lat' is deprecated and will be removed in the next release"); - } - } else if (fieldName.equals("normalize_lon")) { - if (parserContext.indexVersionCreated().onOrAfter(Version.V_1_6_0)) { - throw new ElasticsearchParseException("'normalize_lon' is deprecated and will be removed in the next release"); - } + } else if (builder.backCompat && fieldName.equals("normalize_lat")) { + builder.normalize = false; + builder.normalizeLat = XContentMapValues.nodeBooleanValue(fieldNode); + iterator.remove(); + } else if (builder.backCompat && fieldName.equals("normalize_lon")) { + builder.normalize = false; + builder.normalizeLon = XContentMapValues.nodeBooleanValue(fieldNode); + iterator.remove(); } else if (parseMultiField(builder, name, parserContext, fieldName, fieldNode)) { iterator.remove(); } @@ -422,6 +433,11 @@ public GeoPoint decode(long latBits, long lonBits, GeoPoint out) { private final StringFieldMapper geohashMapper; + private boolean backCompat; + private boolean validateLat; + private boolean validateLon; + private boolean normalizeLat; + private boolean normalizeLon; private boolean validate; private boolean normalize; @@ -429,8 +445,9 @@ public GeoPointFieldMapper(FieldMapper.Names names, FieldType fieldType, Boolean NamedAnalyzer indexAnalyzer, NamedAnalyzer searchAnalyzer, SimilarityProvider similarity, @Nullable Settings fieldDataSettings, Settings indexSettings, ContentPath.Type pathType, boolean enableLatLon, boolean enableGeoHash, boolean enableGeohashPrefix, Integer precisionStep, int geoHashPrecision, - DoubleFieldMapper latMapper, DoubleFieldMapper lonMapper, StringFieldMapper geohashMapper, - boolean validate, boolean normalize, MultiFields multiFields) { + DoubleFieldMapper latMapper, DoubleFieldMapper lonMapper, StringFieldMapper geohashMapper, boolean backCompat, + boolean validateLat, boolean validateLon, boolean validate, boolean normalizeLat, boolean normalizeLon, boolean normalize, + MultiFields multiFields) { super(names, 1f, fieldType, docValues, null, indexAnalyzer, similarity, null, fieldDataSettings, indexSettings, multiFields, null); this.pathType = pathType; this.enableLatLon = enableLatLon; @@ -443,6 +460,13 @@ public GeoPointFieldMapper(FieldMapper.Names names, FieldType fieldType, Boolean this.lonMapper = lonMapper; this.geohashMapper = geohashMapper; + this.backCompat = backCompat; + this.validateLat = validateLat; + this.validateLon = validateLon; + + this.normalizeLat = normalizeLat; + this.normalizeLon = normalizeLon; + this.validate = validate; this.normalize = normalize; } @@ -579,13 +603,13 @@ private void parsePointFromString(ParseContext context, GeoPoint sparse, String private void parse(ParseContext context, GeoPoint point, String geohash) throws IOException { // if the coordinate is normalized there is no need for a validation check - if (normalize) { - GeoUtils.normalizePoint(point, normalize, normalize); - } else if (validate) { - if (point.lat() > 90.0 || point.lat() < -90.0) { + if (backCompat || normalize) { + GeoUtils.normalizePoint(point, (backCompat) ? normalizeLat : normalize, (backCompat) ? normalizeLon : normalize); + } else if (backCompat || validate) { + if ((validate || (backCompat && validateLat)) && (point.lat() > 90.0 || point.lat() < -90.0)) { throw new ElasticsearchIllegalArgumentException("illegal latitude value [" + point.lat() + "] for " + name()); } - if (point.lon() > 180.0 || point.lon() < -180) { + if ((validate || (backCompat && validateLon)) && (point.lon() > 180.0 || point.lon() < -180)) { throw new ElasticsearchIllegalArgumentException("illegal longitude value [" + point.lon() + "] for " + name()); } } From 59f226b957aa1df2dccf3589e9dedc3cf5312f9e Mon Sep 17 00:00:00 2001 From: Nicholas Knize Date: Wed, 25 Mar 2015 12:44:30 -0500 Subject: [PATCH 3/3] update validateLat and validateLon based on validate option. (preserve bwc functionality) --- .../elasticsearch/index/mapper/geo/GeoPointFieldMapper.java | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/src/main/java/org/elasticsearch/index/mapper/geo/GeoPointFieldMapper.java b/src/main/java/org/elasticsearch/index/mapper/geo/GeoPointFieldMapper.java index 2cf587ae70a45..563047364a0ed 100644 --- a/src/main/java/org/elasticsearch/index/mapper/geo/GeoPointFieldMapper.java +++ b/src/main/java/org/elasticsearch/index/mapper/geo/GeoPointFieldMapper.java @@ -262,6 +262,10 @@ public static class TypeParser implements Mapper.TypeParser { iterator.remove(); } else if (fieldName.equals("validate")) { builder.validate = XContentMapValues.nodeBooleanValue(fieldNode); + if (builder.backCompat) { + builder.validateLat = builder.validate; + builder.validateLon = builder.validate; + } iterator.remove(); } else if (builder.backCompat && fieldName.equals("validate_lon")) { builder.validate = false;