From b53acfe4ee471feaae94e91611507cf1074c6fcd Mon Sep 17 00:00:00 2001 From: Adrien Grand Date: Fri, 18 Nov 2016 15:51:16 +0100 Subject: [PATCH] Remove 2.x backward compatibility of mappings. For the record, I also had to remove the geo-hash cell and geo-distance range queries to make the code compile. These queries already throw an exception in all cases with 5.x indices, so that does not hurt any more. I also had to rename all 2.x bwc indices from `index-${version}` to `unsupported-${version}` to make `OldIndexBackwardCompatibilityIT` happy. --- .../classic/MapperQueryParser.java | 7 +- .../action/bulk/BulkRequest.java | 20 +- .../action/index/IndexRequest.java | 97 +- .../action/index/IndexRequestBuilder.java | 33 - .../action/index/TransportIndexAction.java | 4 +- .../ingest/SimulatePipelineRequest.java | 2 - .../action/update/UpdateHelper.java | 54 +- .../action/update/UpdateRequestBuilder.java | 30 - .../cluster/metadata/MappingMetaData.java | 160 +-- .../cluster/metadata/MetaData.java | 2 - .../metadata/MetaDataIndexUpgradeService.java | 2 +- .../elasticsearch/common/geo/GeoUtils.java | 7 +- .../common/settings/ClusterSettings.java | 2 - .../PerFieldMappingPostingFormatCodec.java | 4 - .../elasticsearch/index/engine/Engine.java | 9 - .../AbstractGeoPointDVIndexFieldData.java | 15 +- .../plain/GeoPointArrayAtomicFieldData.java | 145 -- .../plain/GeoPointArrayIndexFieldData.java | 180 --- .../GeoPointArrayLegacyAtomicFieldData.java | 162 --- .../GeoPointLegacyDVAtomicFieldData.java | 100 -- .../index/fieldvisitor/FieldsVisitor.java | 4 - .../index/mapper/BaseGeoPointFieldMapper.java | 346 +---- .../index/mapper/CompletionFieldMapper.java | 3 - .../index/mapper/CompletionFieldMapper2x.java | 603 --------- .../index/mapper/DateFieldMapper.java | 9 +- .../index/mapper/DocumentMapper.java | 8 - .../index/mapper/DocumentParser.java | 31 +- .../index/mapper/GeoPointFieldMapper.java | 135 -- .../index/mapper/IpFieldMapper.java | 8 +- .../index/mapper/KeywordFieldMapper.java | 34 - .../index/mapper/LatLonPointFieldMapper.java | 3 +- .../index/mapper/LegacyByteFieldMapper.java | 328 ----- .../index/mapper/LegacyDateFieldMapper.java | 529 -------- .../index/mapper/LegacyDoubleFieldMapper.java | 331 ----- .../index/mapper/LegacyFloatFieldMapper.java | 325 ----- .../mapper/LegacyGeoPointFieldMapper.java | 367 ----- .../mapper/LegacyIntegerFieldMapper.java | 332 ----- .../index/mapper/LegacyIpFieldMapper.java | 341 ----- .../index/mapper/LegacyIpIndexFieldData.java | 145 -- .../index/mapper/LegacyLongFieldMapper.java | 318 ----- .../index/mapper/LegacyNumberFieldMapper.java | 321 ----- .../index/mapper/LegacyShortFieldMapper.java | 333 ----- .../mapper/LegacyTokenCountFieldMapper.java | 191 --- .../index/mapper/NumberFieldMapper.java | 25 +- .../index/mapper/ParsedDocument.java | 16 - .../index/mapper/ScaledFloatFieldMapper.java | 2 +- .../index/mapper/SourceToParse.java | 37 - .../index/mapper/StringFieldMapper.java | 695 ---------- .../index/mapper/TTLFieldMapper.java | 266 ---- .../index/mapper/TextFieldMapper.java | 48 - .../index/mapper/TimestampFieldMapper.java | 310 ----- .../index/mapper/TokenCountFieldMapper.java | 5 +- .../index/mapper/TypeFieldMapper.java | 9 +- .../index/mapper/TypeParsers.java | 38 +- .../query/GeoBoundingBoxQueryBuilder.java | 37 +- .../index/query/GeoDistanceQueryBuilder.java | 41 +- .../query/GeoDistanceRangeQueryBuilder.java | 630 --------- .../index/query/GeoPolygonQueryBuilder.java | 26 +- .../index/query/GeohashCellQuery.java | 390 ------ .../index/query/MoreLikeThisQueryBuilder.java | 3 +- .../index/query/QueryBuilders.java | 68 - .../index/query/QueryStringQueryBuilder.java | 4 - .../index/query/RangeQueryBuilder.java | 7 +- .../functionscore/DecayFunctionBuilder.java | 16 +- .../search/geo/GeoDistanceRangeQuery.java | 236 ---- .../index/search/geo/GeoPolygonQuery.java | 126 -- .../LegacyInMemoryGeoBoundingBoxQuery.java | 168 --- .../geo/LegacyIndexedGeoBoundingBoxQuery.java | 68 - .../elasticsearch/index/shard/IndexShard.java | 6 - .../shard/TranslogRecoveryPerformer.java | 2 +- .../index/termvectors/TermVectorsService.java | 4 +- .../index/translog/Translog.java | 35 +- .../elasticsearch/indices/IndicesModule.java | 10 - .../indices/ttl/IndicesTTLService.java | 359 ----- .../elasticsearch/ingest/IngestDocument.java | 15 +- .../ingest/PipelineExecutionService.java | 6 +- .../java/org/elasticsearch/node/Node.java | 4 - .../rest/action/document/RestIndexAction.java | 4 - .../action/document/RestUpdateAction.java | 8 - .../elasticsearch/search/SearchModule.java | 5 - .../subphase/highlight/HighlightPhase.java | 4 +- .../elasticsearch/search/suggest/Suggest.java | 5 +- .../completion/CompletionSuggester.java | 75 -- .../CompletionSuggestionBuilder.java | 123 +- .../CompletionSuggestionContext.java | 19 - .../completion/context/GeoContextMapping.java | 4 +- .../AnalyzingCompletionLookupProvider.java | 413 ------ .../Completion090PostingsFormat.java | 360 ----- .../completion2x/CompletionSuggestion.java | 141 -- .../completion2x/CompletionTokenStream.java | 173 --- .../completion2x/PayloadProcessor.java | 38 - .../context/CategoryContextMapping.java | 374 ----- .../completion2x/context/ContextBuilder.java | 136 -- .../completion2x/context/ContextMapping.java | 319 ----- .../context/GeolocationContextMapping.java | 750 ----------- .../completion2x/context/package-info.java | 23 - .../suggest/completion2x/package-info.java | 23 - .../org.apache.lucene.codecs.PostingsFormat | 1 - .../tasks/task-index-mapping.json | 2 +- .../MetaDataIndexTemplateServiceTests.java | 2 +- .../action/index/IndexRequestTests.java | 40 - .../action/update/UpdateRequestTests.java | 51 +- .../OldIndexBackwardsCompatibilityIT.java | 35 +- .../bwcompat/RestoreBackwardsCompatIT.java | 41 +- .../MetaDataIndexUpgradeServiceTests.java | 10 +- .../org/elasticsearch/codecs/CodecTests.java | 117 -- .../explain/ExplainActionIT.java | 3 - .../gateway/GatewayIndexStateIT.java | 2 +- .../org/elasticsearch/get/GetActionIT.java | 13 +- .../index/IndexingSlowLogTests.java | 2 +- .../index/engine/InternalEngineTests.java | 142 +- .../index/engine/ShadowEngineTests.java | 44 +- .../fielddata/AbstractFieldDataTestCase.java | 43 +- .../AbstractGeoFieldDataTestCase.java | 17 +- .../fielddata/IndexFieldDataServiceTests.java | 10 +- .../index/mapper/BinaryFieldMapperTests.java | 7 - .../index/mapper/BooleanFieldMapperTests.java | 58 +- .../mapper/CompletionFieldMapper2xTests.java | 156 --- .../index/mapper/CompoundTypesTests.java | 89 -- .../index/mapper/CustomBoostMappingTests.java | 187 --- .../index/mapper/DateFieldMapperTests.java | 9 - .../index/mapper/DynamicTemplateTests.java | 6 - .../mapper/ExternalFieldMapperTests.java | 34 +- .../index/mapper/ExternalMapper.java | 12 +- .../index/mapper/FakeStringFieldMapper.java | 30 +- .../index/mapper/FieldLevelBoostTests.java | 285 ---- .../index/mapper/GeoEncodingTests.java | 48 - .../mapper/GeoPointFieldMapperTests.java | 653 +-------- .../index/mapper/GeoPointFieldTypeTests.java | 22 +- .../mapper/GeoShapeFieldMapperTests.java | 10 - .../index/mapper/IndexFieldMapperTests.java | 9 - .../index/mapper/IpFieldMapperTests.java | 9 - .../index/mapper/KeywordFieldMapperTests.java | 50 - .../mapper/LegacyByteFieldTypeTests.java | 39 - .../mapper/LegacyDateFieldMapperTests.java | 495 ------- .../mapper/LegacyDateFieldTypeTests.java | 153 --- .../mapper/LegacyDoubleFieldTypeTests.java | 49 - .../mapper/LegacyFloatFieldTypeTests.java | 49 - .../LegacyGeohashMappingGeoPointTests.java | 101 -- .../mapper/LegacyIntegerFieldTypeTests.java | 49 - .../mapper/LegacyIpFieldMapperTests.java | 134 -- .../mapper/LegacyLongFieldTypeTests.java | 49 - .../mapper/LegacyNumberFieldMapperTests.java | 620 --------- .../mapper/LegacyShortFieldTypeTests.java | 39 - .../mapper/LegacyStringMappingTests.java | 1196 ---------------- .../LegacyTokenCountFieldMapperTests.java | 110 -- .../index/mapper/MapperServiceTests.java | 2 +- .../mapper/MultiFieldCopyToMapperTests.java | 32 +- .../index/mapper/NumberFieldMapperTests.java | 14 + .../index/mapper/ObjectMapperTests.java | 17 - .../index/mapper/ParentFieldMapperTests.java | 42 - .../mapper/ScaledFloatFieldMapperTests.java | 17 - .../index/mapper/SourceFieldMapperTests.java | 12 - ...gFieldMapperPositionIncrementGapTests.java | 182 --- .../index/mapper/StringFieldTypeTests.java | 61 - .../mapper/StringMappingUpgradeTests.java | 446 ------ .../index/mapper/TTLFieldMapperTests.java | 322 ----- .../index/mapper/TextFieldMapperTests.java | 18 - .../mapper/TimestampFieldMapperTests.java | 459 ------- .../index/mapper/TimestampFieldTypeTests.java | 34 - .../mapper/TokenCountFieldMapperTests.java | 9 - .../index/mapper/TypeFieldMapperTests.java | 16 - .../index/mapper/UpdateMappingTests.java | 24 - .../GeoBoundingBoxQueryBuilderTests.java | 55 - .../query/GeoDistanceQueryBuilderTests.java | 66 +- .../query/GeoDistanceRangeQueryTests.java | 397 ------ .../query/GeoPolygonQueryBuilderTests.java | 90 +- .../query/GeohashCellQueryBuilderTests.java | 168 --- .../index/query/NestedQueryBuilderTests.java | 7 +- .../query/QueryDSLDocumentationTests.java | 19 - .../index/query/RangeQueryBuilderTests.java | 198 +-- .../index/shard/IndexShardIT.java | 11 +- .../index/shard/IndexShardTests.java | 7 +- .../index/shard/RefreshListenersTests.java | 2 +- .../index/translog/TranslogTests.java | 9 +- .../indices/IndicesServiceTests.java | 4 +- .../template/SimpleIndexTemplateIT.java | 2 +- .../elasticsearch/ingest/IngestClientIT.java | 2 +- .../ingest/IngestDocumentTests.java | 2 +- .../ingest/PipelineExecutionServiceTests.java | 60 +- .../search/SearchModuleTests.java | 2 - .../basic/TransportSearchFailuresIT.java | 8 +- .../basic/TransportTwoNodesSearchIT.java | 10 +- .../highlight/HighlighterSearchIT.java | 31 +- .../search/fields/SearchFieldsIT.java | 15 - .../search/geo/GeoBoundingBoxIT.java | 9 - .../search/geo/GeoDistanceIT.java | 3 - .../elasticsearch/search/geo/GeoFilterIT.java | 97 +- .../search/geo/GeoPolygonIT.java | 3 - .../search/query/MultiMatchQueryIT.java | 2 +- .../search/query/SimpleQueryStringIT.java | 2 +- .../search/sort/AbstractSortTestCase.java | 6 +- .../search/sort/GeoDistanceIT.java | 216 --- .../suggest/CompletionSuggestSearch2xIT.java | 1199 ----------------- .../suggest/CompletionSuggestSearchIT.java | 57 - .../suggest/ContextSuggestSearch2xIT.java | 1096 --------------- .../DedicatedClusterSnapshotRestoreIT.java | 12 +- .../SharedClusterSnapshotRestoreIT.java | 8 +- .../timestamp/SimpleTimestampIT.java | 146 -- .../org/elasticsearch/ttl/SimpleTTLIT.java | 305 ----- .../update/TimestampTTLBWIT.java | 237 ---- .../org/elasticsearch/update/UpdateIT.java | 5 - .../bwc/missing-checksum-repo-2.3.4.zip | Bin 11371 -> 0 bytes ...-beta1.zip => unsupported-2.0.0-beta1.zip} | Bin ...-beta2.zip => unsupported-2.0.0-beta2.zip} | Bin ....0.0-rc1.zip => unsupported-2.0.0-rc1.zip} | Bin ...{index-2.0.0.zip => unsupported-2.0.0.zip} | Bin ...{index-2.0.1.zip => unsupported-2.0.1.zip} | Bin ...{index-2.0.2.zip => unsupported-2.0.2.zip} | Bin ...{index-2.1.0.zip => unsupported-2.1.0.zip} | Bin ...{index-2.1.1.zip => unsupported-2.1.1.zip} | Bin ...{index-2.1.2.zip => unsupported-2.1.2.zip} | Bin ...{index-2.2.0.zip => unsupported-2.2.0.zip} | Bin ...{index-2.2.1.zip => unsupported-2.2.1.zip} | Bin ...{index-2.2.2.zip => unsupported-2.2.2.zip} | Bin ...{index-2.3.0.zip => unsupported-2.3.0.zip} | Bin ...{index-2.3.1.zip => unsupported-2.3.1.zip} | Bin ...{index-2.3.2.zip => unsupported-2.3.2.zip} | Bin ...{index-2.3.3.zip => unsupported-2.3.3.zip} | Bin ...{index-2.3.4.zip => unsupported-2.3.4.zip} | Bin ...{index-2.3.5.zip => unsupported-2.3.5.zip} | Bin ...{index-2.4.0.zip => unsupported-2.4.0.zip} | Bin ...{index-2.4.1.zip => unsupported-2.4.1.zip} | Bin ...{index-2.4.2.zip => unsupported-2.4.2.zip} | Bin ...a1.zip => unsupportedrepo-2.0.0-beta1.zip} | Bin ...a2.zip => unsupportedrepo-2.0.0-beta2.zip} | Bin ...-rc1.zip => unsupportedrepo-2.0.0-rc1.zip} | Bin ...po-2.0.0.zip => unsupportedrepo-2.0.0.zip} | Bin ...po-2.0.1.zip => unsupportedrepo-2.0.1.zip} | Bin ...po-2.0.2.zip => unsupportedrepo-2.0.2.zip} | Bin ...po-2.1.0.zip => unsupportedrepo-2.1.0.zip} | Bin ...po-2.1.1.zip => unsupportedrepo-2.1.1.zip} | Bin ...po-2.1.2.zip => unsupportedrepo-2.1.2.zip} | Bin ...po-2.2.0.zip => unsupportedrepo-2.2.0.zip} | Bin ...po-2.2.1.zip => unsupportedrepo-2.2.1.zip} | Bin ...po-2.2.2.zip => unsupportedrepo-2.2.2.zip} | Bin ...po-2.3.0.zip => unsupportedrepo-2.3.0.zip} | Bin ...po-2.3.1.zip => unsupportedrepo-2.3.1.zip} | Bin ...po-2.3.2.zip => unsupportedrepo-2.3.2.zip} | Bin ...po-2.3.3.zip => unsupportedrepo-2.3.3.zip} | Bin ...po-2.3.4.zip => unsupportedrepo-2.3.4.zip} | Bin ...po-2.3.5.zip => unsupportedrepo-2.3.5.zip} | Bin ...po-2.4.0.zip => unsupportedrepo-2.4.0.zip} | Bin ...po-2.4.1.zip => unsupportedrepo-2.4.1.zip} | Bin ...po-2.4.2.zip => unsupportedrepo-2.4.2.zip} | Bin .../template/logstash-5.0.template.json | 7 +- .../migration/migrate_6_0/rest.asciidoc | 5 + .../common/DateIndexNameProcessorTests.java | 8 +- .../ingest/common/ForEachProcessorTests.java | 16 +- .../ExpressionScriptEngineService.java | 4 +- .../PercolateQueryBuilderTests.java | 2 +- .../percolator/PercolatorIT.java | 2 +- .../indices/percolator/bwc_index_2.0.0.zip | Bin 11852 -> 0 bytes .../AbstractAsyncBulkIndexByScrollAction.java | 57 - .../reindex/ClientScrollableHitSource.java | 12 - .../index/reindex/ReindexRequest.java | 6 - .../index/reindex/RestReindexAction.java | 5 - .../index/reindex/ScrollableHitSource.java | 30 - .../index/reindex/TransportReindexAction.java | 17 - .../reindex/TransportUpdateByQueryAction.java | 11 - .../reindex/remote/RemoteResponseParsers.java | 6 +- ...lkIndexbyScrollActionMetadataTestCase.java | 16 - .../index/reindex/ReindexFailureTests.java | 23 - .../index/reindex/ReindexRequestTests.java | 8 - .../index/reindex/ReindexScriptTests.java | 27 - .../reindex/UpdateByQueryWithScriptTests.java | 2 +- .../RemoteScrollableHitSourceTests.java | 10 - .../test/reindex/20_validation.yaml | 36 - .../netty4/Netty4HttpRequestSizeLimitIT.java | 2 +- .../Murmur3FieldMapperUpgradeTests.java | 90 -- .../bwc/index-mapper-murmur3-2.0.0.zip | Bin 8226 -> 0 bytes .../index/mapper/size/SizeFieldMapper.java | 39 +- .../size/SizeFieldMapperUpgradeTests.java | 102 -- .../index/mapper/size/SizeMappingTests.java | 38 - .../indices/bwc/index-mapper-size-2.0.0.zip | Bin 7973 -> 0 bytes .../ingest/IngestDocumentMustacheIT.java | 8 +- .../ingest/ValueSourceMustacheIT.java | 2 +- .../ingest/RandomDocumentPicks.java | 10 +- .../test/AbstractQueryTestCase.java | 7 +- 279 files changed, 466 insertions(+), 24538 deletions(-) delete mode 100644 core/src/main/java/org/elasticsearch/index/fielddata/plain/GeoPointArrayAtomicFieldData.java delete mode 100644 core/src/main/java/org/elasticsearch/index/fielddata/plain/GeoPointArrayIndexFieldData.java delete mode 100644 core/src/main/java/org/elasticsearch/index/fielddata/plain/GeoPointArrayLegacyAtomicFieldData.java delete mode 100644 core/src/main/java/org/elasticsearch/index/fielddata/plain/GeoPointLegacyDVAtomicFieldData.java delete mode 100644 core/src/main/java/org/elasticsearch/index/mapper/CompletionFieldMapper2x.java delete mode 100644 core/src/main/java/org/elasticsearch/index/mapper/GeoPointFieldMapper.java delete mode 100644 core/src/main/java/org/elasticsearch/index/mapper/LegacyByteFieldMapper.java delete mode 100644 core/src/main/java/org/elasticsearch/index/mapper/LegacyDateFieldMapper.java delete mode 100644 core/src/main/java/org/elasticsearch/index/mapper/LegacyDoubleFieldMapper.java delete mode 100644 core/src/main/java/org/elasticsearch/index/mapper/LegacyFloatFieldMapper.java delete mode 100644 core/src/main/java/org/elasticsearch/index/mapper/LegacyGeoPointFieldMapper.java delete mode 100644 core/src/main/java/org/elasticsearch/index/mapper/LegacyIntegerFieldMapper.java delete mode 100644 core/src/main/java/org/elasticsearch/index/mapper/LegacyIpFieldMapper.java delete mode 100644 core/src/main/java/org/elasticsearch/index/mapper/LegacyIpIndexFieldData.java delete mode 100644 core/src/main/java/org/elasticsearch/index/mapper/LegacyLongFieldMapper.java delete mode 100644 core/src/main/java/org/elasticsearch/index/mapper/LegacyNumberFieldMapper.java delete mode 100644 core/src/main/java/org/elasticsearch/index/mapper/LegacyShortFieldMapper.java delete mode 100644 core/src/main/java/org/elasticsearch/index/mapper/LegacyTokenCountFieldMapper.java delete mode 100644 core/src/main/java/org/elasticsearch/index/mapper/StringFieldMapper.java delete mode 100644 core/src/main/java/org/elasticsearch/index/mapper/TTLFieldMapper.java delete mode 100644 core/src/main/java/org/elasticsearch/index/mapper/TimestampFieldMapper.java delete mode 100644 core/src/main/java/org/elasticsearch/index/query/GeoDistanceRangeQueryBuilder.java delete mode 100644 core/src/main/java/org/elasticsearch/index/query/GeohashCellQuery.java delete mode 100644 core/src/main/java/org/elasticsearch/index/search/geo/GeoDistanceRangeQuery.java delete mode 100644 core/src/main/java/org/elasticsearch/index/search/geo/GeoPolygonQuery.java delete mode 100644 core/src/main/java/org/elasticsearch/index/search/geo/LegacyInMemoryGeoBoundingBoxQuery.java delete mode 100644 core/src/main/java/org/elasticsearch/index/search/geo/LegacyIndexedGeoBoundingBoxQuery.java delete mode 100644 core/src/main/java/org/elasticsearch/indices/ttl/IndicesTTLService.java delete mode 100644 core/src/main/java/org/elasticsearch/search/suggest/completion2x/AnalyzingCompletionLookupProvider.java delete mode 100644 core/src/main/java/org/elasticsearch/search/suggest/completion2x/Completion090PostingsFormat.java delete mode 100644 core/src/main/java/org/elasticsearch/search/suggest/completion2x/CompletionSuggestion.java delete mode 100644 core/src/main/java/org/elasticsearch/search/suggest/completion2x/CompletionTokenStream.java delete mode 100644 core/src/main/java/org/elasticsearch/search/suggest/completion2x/PayloadProcessor.java delete mode 100644 core/src/main/java/org/elasticsearch/search/suggest/completion2x/context/CategoryContextMapping.java delete mode 100644 core/src/main/java/org/elasticsearch/search/suggest/completion2x/context/ContextBuilder.java delete mode 100644 core/src/main/java/org/elasticsearch/search/suggest/completion2x/context/ContextMapping.java delete mode 100644 core/src/main/java/org/elasticsearch/search/suggest/completion2x/context/GeolocationContextMapping.java delete mode 100644 core/src/main/java/org/elasticsearch/search/suggest/completion2x/context/package-info.java delete mode 100644 core/src/main/java/org/elasticsearch/search/suggest/completion2x/package-info.java delete mode 100644 core/src/test/java/org/elasticsearch/codecs/CodecTests.java delete mode 100644 core/src/test/java/org/elasticsearch/index/mapper/CompletionFieldMapper2xTests.java delete mode 100644 core/src/test/java/org/elasticsearch/index/mapper/CompoundTypesTests.java delete mode 100644 core/src/test/java/org/elasticsearch/index/mapper/CustomBoostMappingTests.java delete mode 100644 core/src/test/java/org/elasticsearch/index/mapper/FieldLevelBoostTests.java delete mode 100644 core/src/test/java/org/elasticsearch/index/mapper/GeoEncodingTests.java delete mode 100644 core/src/test/java/org/elasticsearch/index/mapper/LegacyByteFieldTypeTests.java delete mode 100644 core/src/test/java/org/elasticsearch/index/mapper/LegacyDateFieldMapperTests.java delete mode 100644 core/src/test/java/org/elasticsearch/index/mapper/LegacyDateFieldTypeTests.java delete mode 100644 core/src/test/java/org/elasticsearch/index/mapper/LegacyDoubleFieldTypeTests.java delete mode 100644 core/src/test/java/org/elasticsearch/index/mapper/LegacyFloatFieldTypeTests.java delete mode 100644 core/src/test/java/org/elasticsearch/index/mapper/LegacyGeohashMappingGeoPointTests.java delete mode 100644 core/src/test/java/org/elasticsearch/index/mapper/LegacyIntegerFieldTypeTests.java delete mode 100644 core/src/test/java/org/elasticsearch/index/mapper/LegacyIpFieldMapperTests.java delete mode 100644 core/src/test/java/org/elasticsearch/index/mapper/LegacyLongFieldTypeTests.java delete mode 100644 core/src/test/java/org/elasticsearch/index/mapper/LegacyNumberFieldMapperTests.java delete mode 100644 core/src/test/java/org/elasticsearch/index/mapper/LegacyShortFieldTypeTests.java delete mode 100644 core/src/test/java/org/elasticsearch/index/mapper/LegacyStringMappingTests.java delete mode 100644 core/src/test/java/org/elasticsearch/index/mapper/LegacyTokenCountFieldMapperTests.java delete mode 100644 core/src/test/java/org/elasticsearch/index/mapper/StringFieldMapperPositionIncrementGapTests.java delete mode 100644 core/src/test/java/org/elasticsearch/index/mapper/StringFieldTypeTests.java delete mode 100644 core/src/test/java/org/elasticsearch/index/mapper/StringMappingUpgradeTests.java delete mode 100644 core/src/test/java/org/elasticsearch/index/mapper/TTLFieldMapperTests.java delete mode 100644 core/src/test/java/org/elasticsearch/index/mapper/TimestampFieldMapperTests.java delete mode 100644 core/src/test/java/org/elasticsearch/index/mapper/TimestampFieldTypeTests.java delete mode 100644 core/src/test/java/org/elasticsearch/index/query/GeoDistanceRangeQueryTests.java delete mode 100644 core/src/test/java/org/elasticsearch/index/query/GeohashCellQueryBuilderTests.java delete mode 100644 core/src/test/java/org/elasticsearch/search/suggest/CompletionSuggestSearch2xIT.java delete mode 100644 core/src/test/java/org/elasticsearch/search/suggest/ContextSuggestSearch2xIT.java delete mode 100644 core/src/test/java/org/elasticsearch/timestamp/SimpleTimestampIT.java delete mode 100644 core/src/test/java/org/elasticsearch/ttl/SimpleTTLIT.java delete mode 100644 core/src/test/java/org/elasticsearch/update/TimestampTTLBWIT.java delete mode 100644 core/src/test/resources/indices/bwc/missing-checksum-repo-2.3.4.zip rename core/src/test/resources/indices/bwc/{index-2.0.0-beta1.zip => unsupported-2.0.0-beta1.zip} (100%) rename core/src/test/resources/indices/bwc/{index-2.0.0-beta2.zip => unsupported-2.0.0-beta2.zip} (100%) rename core/src/test/resources/indices/bwc/{index-2.0.0-rc1.zip => unsupported-2.0.0-rc1.zip} (100%) rename core/src/test/resources/indices/bwc/{index-2.0.0.zip => unsupported-2.0.0.zip} (100%) rename core/src/test/resources/indices/bwc/{index-2.0.1.zip => unsupported-2.0.1.zip} (100%) rename core/src/test/resources/indices/bwc/{index-2.0.2.zip => unsupported-2.0.2.zip} (100%) rename core/src/test/resources/indices/bwc/{index-2.1.0.zip => unsupported-2.1.0.zip} (100%) rename core/src/test/resources/indices/bwc/{index-2.1.1.zip => unsupported-2.1.1.zip} (100%) rename core/src/test/resources/indices/bwc/{index-2.1.2.zip => unsupported-2.1.2.zip} (100%) rename core/src/test/resources/indices/bwc/{index-2.2.0.zip => unsupported-2.2.0.zip} (100%) rename core/src/test/resources/indices/bwc/{index-2.2.1.zip => unsupported-2.2.1.zip} (100%) rename core/src/test/resources/indices/bwc/{index-2.2.2.zip => unsupported-2.2.2.zip} (100%) rename core/src/test/resources/indices/bwc/{index-2.3.0.zip => unsupported-2.3.0.zip} (100%) rename core/src/test/resources/indices/bwc/{index-2.3.1.zip => unsupported-2.3.1.zip} (100%) rename core/src/test/resources/indices/bwc/{index-2.3.2.zip => unsupported-2.3.2.zip} (100%) rename core/src/test/resources/indices/bwc/{index-2.3.3.zip => unsupported-2.3.3.zip} (100%) rename core/src/test/resources/indices/bwc/{index-2.3.4.zip => unsupported-2.3.4.zip} (100%) rename core/src/test/resources/indices/bwc/{index-2.3.5.zip => unsupported-2.3.5.zip} (100%) rename core/src/test/resources/indices/bwc/{index-2.4.0.zip => unsupported-2.4.0.zip} (100%) rename core/src/test/resources/indices/bwc/{index-2.4.1.zip => unsupported-2.4.1.zip} (100%) rename core/src/test/resources/indices/bwc/{index-2.4.2.zip => unsupported-2.4.2.zip} (100%) rename core/src/test/resources/indices/bwc/{repo-2.0.0-beta1.zip => unsupportedrepo-2.0.0-beta1.zip} (100%) rename core/src/test/resources/indices/bwc/{repo-2.0.0-beta2.zip => unsupportedrepo-2.0.0-beta2.zip} (100%) rename core/src/test/resources/indices/bwc/{repo-2.0.0-rc1.zip => unsupportedrepo-2.0.0-rc1.zip} (100%) rename core/src/test/resources/indices/bwc/{repo-2.0.0.zip => unsupportedrepo-2.0.0.zip} (100%) rename core/src/test/resources/indices/bwc/{repo-2.0.1.zip => unsupportedrepo-2.0.1.zip} (100%) rename core/src/test/resources/indices/bwc/{repo-2.0.2.zip => unsupportedrepo-2.0.2.zip} (100%) rename core/src/test/resources/indices/bwc/{repo-2.1.0.zip => unsupportedrepo-2.1.0.zip} (100%) rename core/src/test/resources/indices/bwc/{repo-2.1.1.zip => unsupportedrepo-2.1.1.zip} (100%) rename core/src/test/resources/indices/bwc/{repo-2.1.2.zip => unsupportedrepo-2.1.2.zip} (100%) rename core/src/test/resources/indices/bwc/{repo-2.2.0.zip => unsupportedrepo-2.2.0.zip} (100%) rename core/src/test/resources/indices/bwc/{repo-2.2.1.zip => unsupportedrepo-2.2.1.zip} (100%) rename core/src/test/resources/indices/bwc/{repo-2.2.2.zip => unsupportedrepo-2.2.2.zip} (100%) rename core/src/test/resources/indices/bwc/{repo-2.3.0.zip => unsupportedrepo-2.3.0.zip} (100%) rename core/src/test/resources/indices/bwc/{repo-2.3.1.zip => unsupportedrepo-2.3.1.zip} (100%) rename core/src/test/resources/indices/bwc/{repo-2.3.2.zip => unsupportedrepo-2.3.2.zip} (100%) rename core/src/test/resources/indices/bwc/{repo-2.3.3.zip => unsupportedrepo-2.3.3.zip} (100%) rename core/src/test/resources/indices/bwc/{repo-2.3.4.zip => unsupportedrepo-2.3.4.zip} (100%) rename core/src/test/resources/indices/bwc/{repo-2.3.5.zip => unsupportedrepo-2.3.5.zip} (100%) rename core/src/test/resources/indices/bwc/{repo-2.4.0.zip => unsupportedrepo-2.4.0.zip} (100%) rename core/src/test/resources/indices/bwc/{repo-2.4.1.zip => unsupportedrepo-2.4.1.zip} (100%) rename core/src/test/resources/indices/bwc/{repo-2.4.2.zip => unsupportedrepo-2.4.2.zip} (100%) delete mode 100644 modules/percolator/src/test/resources/indices/percolator/bwc_index_2.0.0.zip delete mode 100644 plugins/mapper-murmur3/src/test/java/org/elasticsearch/index/mapper/murmur3/Murmur3FieldMapperUpgradeTests.java delete mode 100644 plugins/mapper-murmur3/src/test/resources/indices/bwc/index-mapper-murmur3-2.0.0.zip delete mode 100644 plugins/mapper-size/src/test/java/org/elasticsearch/index/mapper/size/SizeFieldMapperUpgradeTests.java delete mode 100644 plugins/mapper-size/src/test/resources/indices/bwc/index-mapper-size-2.0.0.zip diff --git a/core/src/main/java/org/apache/lucene/queryparser/classic/MapperQueryParser.java b/core/src/main/java/org/apache/lucene/queryparser/classic/MapperQueryParser.java index ac9770f2bc8b1..976c4706725ea 100644 --- a/core/src/main/java/org/apache/lucene/queryparser/classic/MapperQueryParser.java +++ b/core/src/main/java/org/apache/lucene/queryparser/classic/MapperQueryParser.java @@ -41,7 +41,6 @@ import org.elasticsearch.common.lucene.search.Queries; import org.elasticsearch.common.unit.Fuzziness; import org.elasticsearch.index.mapper.DateFieldMapper; -import org.elasticsearch.index.mapper.LegacyDateFieldMapper; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.StringFieldType; @@ -336,11 +335,7 @@ private Query getRangeQuerySingle(String field, String part1, String part2, BytesRef part1Binary = part1 == null ? null : getAnalyzer().normalize(field, part1); BytesRef part2Binary = part2 == null ? null : getAnalyzer().normalize(field, part2); Query rangeQuery; - if (currentFieldType instanceof LegacyDateFieldMapper.DateFieldType && settings.timeZone() != null) { - LegacyDateFieldMapper.DateFieldType dateFieldType = (LegacyDateFieldMapper.DateFieldType) this.currentFieldType; - rangeQuery = dateFieldType.rangeQuery(part1Binary, part2Binary, - startInclusive, endInclusive, settings.timeZone(), null, context); - } else if (currentFieldType instanceof DateFieldMapper.DateFieldType && settings.timeZone() != null) { + if (currentFieldType instanceof DateFieldMapper.DateFieldType && settings.timeZone() != null) { DateFieldMapper.DateFieldType dateFieldType = (DateFieldMapper.DateFieldType) this.currentFieldType; rangeQuery = dateFieldType.rangeQuery(part1Binary, part2Binary, startInclusive, endInclusive, settings.timeZone(), null, context); diff --git a/core/src/main/java/org/elasticsearch/action/bulk/BulkRequest.java b/core/src/main/java/org/elasticsearch/action/bulk/BulkRequest.java index 48d163cdb26c1..e26732c8df8d8 100644 --- a/core/src/main/java/org/elasticsearch/action/bulk/BulkRequest.java +++ b/core/src/main/java/org/elasticsearch/action/bulk/BulkRequest.java @@ -305,8 +305,6 @@ public BulkRequest add(BytesReference data, @Nullable String defaultIndex, @Null String parent = null; FetchSourceContext fetchSourceContext = defaultFetchSourceContext; String[] fields = defaultFields; - String timestamp = null; - TimeValue ttl = null; String opType = null; long version = Versions.MATCH_ANY; VersionType versionType = VersionType.INTERNAL; @@ -336,14 +334,6 @@ public BulkRequest add(BytesReference data, @Nullable String defaultIndex, @Null routing = parser.text(); } else if ("_parent".equals(currentFieldName) || "parent".equals(currentFieldName)) { parent = parser.text(); - } else if ("_timestamp".equals(currentFieldName) || "timestamp".equals(currentFieldName)) { - timestamp = parser.text(); - } else if ("_ttl".equals(currentFieldName) || "ttl".equals(currentFieldName)) { - if (parser.currentToken() == XContentParser.Token.VALUE_STRING) { - ttl = TimeValue.parseTimeValue(parser.text(), null, currentFieldName); - } else { - ttl = new TimeValue(parser.longValue()); - } } else if ("op_type".equals(currentFieldName) || "opType".equals(currentFieldName)) { opType = parser.text(); } else if ("_version".equals(currentFieldName) || "version".equals(currentFieldName)) { @@ -394,15 +384,15 @@ public BulkRequest add(BytesReference data, @Nullable String defaultIndex, @Null // of index request. if ("index".equals(action)) { if (opType == null) { - internalAdd(new IndexRequest(index, type, id).routing(routing).parent(parent).timestamp(timestamp).ttl(ttl).version(version).versionType(versionType) + internalAdd(new IndexRequest(index, type, id).routing(routing).parent(parent).version(version).versionType(versionType) .setPipeline(pipeline).source(data.slice(from, nextMarker - from)), payload); } else { - internalAdd(new IndexRequest(index, type, id).routing(routing).parent(parent).timestamp(timestamp).ttl(ttl).version(version).versionType(versionType) + internalAdd(new IndexRequest(index, type, id).routing(routing).parent(parent).version(version).versionType(versionType) .create("create".equals(opType)).setPipeline(pipeline) .source(data.slice(from, nextMarker - from)), payload); } } else if ("create".equals(action)) { - internalAdd(new IndexRequest(index, type, id).routing(routing).parent(parent).timestamp(timestamp).ttl(ttl).version(version).versionType(versionType) + internalAdd(new IndexRequest(index, type, id).routing(routing).parent(parent).version(version).versionType(versionType) .create(true).setPipeline(pipeline) .source(data.slice(from, nextMarker - from)), payload); } else if ("update".equals(action)) { @@ -420,15 +410,11 @@ public BulkRequest add(BytesReference data, @Nullable String defaultIndex, @Null IndexRequest upsertRequest = updateRequest.upsertRequest(); if (upsertRequest != null) { - upsertRequest.timestamp(timestamp); - upsertRequest.ttl(ttl); upsertRequest.version(version); upsertRequest.versionType(versionType); } IndexRequest doc = updateRequest.doc(); if (doc != null) { - doc.timestamp(timestamp); - doc.ttl(ttl); doc.version(version); doc.versionType(versionType); } diff --git a/core/src/main/java/org/elasticsearch/action/index/IndexRequest.java b/core/src/main/java/org/elasticsearch/action/index/IndexRequest.java index fdc248c4b37e9..b818f0f254861 100644 --- a/core/src/main/java/org/elasticsearch/action/index/IndexRequest.java +++ b/core/src/main/java/org/elasticsearch/action/index/IndexRequest.java @@ -20,10 +20,10 @@ package org.elasticsearch.action.index; import org.elasticsearch.ElasticsearchGenerationException; +import org.elasticsearch.Version; import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.DocWriteRequest; import org.elasticsearch.action.RoutingMissingException; -import org.elasticsearch.action.TimestampParsingException; import org.elasticsearch.action.support.replication.ReplicatedWriteRequest; import org.elasticsearch.client.Requests; import org.elasticsearch.cluster.metadata.MappingMetaData; @@ -41,7 +41,6 @@ import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.index.VersionType; -import org.elasticsearch.index.mapper.TimestampFieldMapper; import java.io.IOException; import java.nio.charset.StandardCharsets; @@ -75,10 +74,6 @@ public class IndexRequest extends ReplicatedWriteRequest implement private String routing; @Nullable private String parent; - @Nullable - private String timestamp; - @Nullable - private TimeValue ttl; private BytesReference source; @@ -164,12 +159,6 @@ public ActionRequestValidationException validate() { validationException = addValidationError("version type [force] may no longer be used", validationException); } - if (ttl != null) { - if (ttl.millis() < 0) { - validationException = addValidationError("ttl must not be negative", validationException); - } - } - if (id != null && id.getBytes(StandardCharsets.UTF_8).length > 512) { validationException = addValidationError("id is too long, must be no longer than 512 bytes but was: " + id.getBytes(StandardCharsets.UTF_8).length, validationException); @@ -265,49 +254,6 @@ public String parent() { return this.parent; } - /** - * Sets the timestamp either as millis since the epoch, or, in the configured date format. - */ - public IndexRequest timestamp(String timestamp) { - this.timestamp = timestamp; - return this; - } - - public String timestamp() { - return this.timestamp; - } - - /** - * Sets the ttl value as a time value expression. - */ - public IndexRequest ttl(String ttl) { - this.ttl = TimeValue.parseTimeValue(ttl, null, "ttl"); - return this; - } - - /** - * Sets the ttl as a {@link TimeValue} instance. - */ - public IndexRequest ttl(TimeValue ttl) { - this.ttl = ttl; - return this; - } - - /** - * Sets the relative ttl value in milliseconds. It musts be greater than 0 as it makes little sense otherwise. - */ - public IndexRequest ttl(long ttl) { - this.ttl = new TimeValue(ttl); - return this; - } - - /** - * Returns the ttl as a {@link TimeValue} - */ - public TimeValue ttl() { - return this.ttl; - } - /** * Sets the ingest pipeline to be executed before indexing the document */ @@ -537,11 +483,6 @@ public VersionType versionType() { public void process(@Nullable MappingMetaData mappingMd, boolean allowIdGeneration, String concreteIndex) { - // resolve timestamp if provided externally - if (timestamp != null) { - timestamp = MappingMetaData.Timestamp.parseStringTimestamp(timestamp, - mappingMd != null ? mappingMd.timestamp().dateTimeFormatter() : TimestampFieldMapper.Defaults.DATE_TIME_FORMATTER); - } if (mappingMd != null) { // might as well check for routing here if (mappingMd.routing().required() && routing == null) { @@ -563,30 +504,6 @@ public void process(@Nullable MappingMetaData mappingMd, boolean allowIdGenerati autoGeneratedTimestamp = Math.max(0, System.currentTimeMillis()); // extra paranoia id(UUIDs.base64UUID()); } - - // generate timestamp if not provided, we always have one post this stage... - if (timestamp == null) { - String defaultTimestamp = TimestampFieldMapper.Defaults.DEFAULT_TIMESTAMP; - if (mappingMd != null && mappingMd.timestamp() != null) { - // If we explicitly ask to reject null timestamp - if (mappingMd.timestamp().ignoreMissing() != null && mappingMd.timestamp().ignoreMissing() == false) { - throw new TimestampParsingException("timestamp is required by mapping"); - } - defaultTimestamp = mappingMd.timestamp().defaultTimestamp(); - } - - if (defaultTimestamp.equals(TimestampFieldMapper.Defaults.DEFAULT_TIMESTAMP)) { - timestamp = Long.toString(System.currentTimeMillis()); - } else { - // if we are here, the defaultTimestamp is not - // TimestampFieldMapper.Defaults.DEFAULT_TIMESTAMP but - // this can only happen if defaultTimestamp was - // assigned again because mappingMd and - // mappingMd#timestamp() are not null - assert mappingMd != null; - timestamp = MappingMetaData.Timestamp.parseStringTimestamp(defaultTimestamp, mappingMd.timestamp().dateTimeFormatter()); - } - } } /* resolve the routing if needed */ @@ -601,8 +518,10 @@ public void readFrom(StreamInput in) throws IOException { id = in.readOptionalString(); routing = in.readOptionalString(); parent = in.readOptionalString(); - timestamp = in.readOptionalString(); - ttl = in.readOptionalWriteable(TimeValue::new); + if (in.getVersion().before(Version.V_6_0_0_alpha1_UNRELEASED)) { + in.readOptionalString(); // timestamp + in.readOptionalWriteable(TimeValue::new); // ttl + } source = in.readBytesReference(); opType = OpType.fromId(in.readByte()); version = in.readLong(); @@ -619,8 +538,10 @@ public void writeTo(StreamOutput out) throws IOException { out.writeOptionalString(id); out.writeOptionalString(routing); out.writeOptionalString(parent); - out.writeOptionalString(timestamp); - out.writeOptionalWriteable(ttl); + if (out.getVersion().before(Version.V_6_0_0_alpha1_UNRELEASED)) { + out.writeOptionalString(null); + out.writeOptionalWriteable(null); + } out.writeBytesReference(source); out.writeByte(opType.getId()); out.writeLong(version); diff --git a/core/src/main/java/org/elasticsearch/action/index/IndexRequestBuilder.java b/core/src/main/java/org/elasticsearch/action/index/IndexRequestBuilder.java index 310ef3fb92829..6f7064198264d 100644 --- a/core/src/main/java/org/elasticsearch/action/index/IndexRequestBuilder.java +++ b/core/src/main/java/org/elasticsearch/action/index/IndexRequestBuilder.java @@ -25,7 +25,6 @@ import org.elasticsearch.client.ElasticsearchClient; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.bytes.BytesReference; -import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.index.VersionType; @@ -231,38 +230,6 @@ public IndexRequestBuilder setVersionType(VersionType versionType) { return this; } - /** - * Sets the timestamp either as millis since the epoch, or, in the configured date format. - */ - public IndexRequestBuilder setTimestamp(String timestamp) { - request.timestamp(timestamp); - return this; - } - - /** - * Sets the ttl value as a time value expression. - */ - public IndexRequestBuilder setTTL(String ttl) { - request.ttl(ttl); - return this; - } - - /** - * Sets the relative ttl value in milliseconds. It musts be greater than 0 as it makes little sense otherwise. - */ - public IndexRequestBuilder setTTL(long ttl) { - request.ttl(ttl); - return this; - } - - /** - * Sets the ttl as a {@link TimeValue} instance. - */ - public IndexRequestBuilder setTTL(TimeValue ttl) { - request.ttl(ttl); - return this; - } - /** * Sets the ingest pipeline to be executed before indexing the document */ diff --git a/core/src/main/java/org/elasticsearch/action/index/TransportIndexAction.java b/core/src/main/java/org/elasticsearch/action/index/TransportIndexAction.java index 2ed306ba709d4..23e57616ac205 100644 --- a/core/src/main/java/org/elasticsearch/action/index/TransportIndexAction.java +++ b/core/src/main/java/org/elasticsearch/action/index/TransportIndexAction.java @@ -171,7 +171,7 @@ protected WriteReplicaResult shardOperationOnReplica(IndexRequest request, Index public static Engine.IndexResult executeIndexRequestOnReplica(IndexRequest request, IndexShard replica) { final ShardId shardId = replica.shardId(); SourceToParse sourceToParse = SourceToParse.source(SourceToParse.Origin.REPLICA, shardId.getIndexName(), request.type(), request.id(), request.source()) - .routing(request.routing()).parent(request.parent()).timestamp(request.timestamp()).ttl(request.ttl()); + .routing(request.routing()).parent(request.parent()); final Engine.Index operation; try { @@ -189,7 +189,7 @@ public static Engine.IndexResult executeIndexRequestOnReplica(IndexRequest reque /** Utility method to prepare an index operation on primary shards */ static Engine.Index prepareIndexOperationOnPrimary(IndexRequest request, IndexShard primary) { SourceToParse sourceToParse = SourceToParse.source(SourceToParse.Origin.PRIMARY, request.index(), request.type(), request.id(), request.source()) - .routing(request.routing()).parent(request.parent()).timestamp(request.timestamp()).ttl(request.ttl()); + .routing(request.routing()).parent(request.parent()); return primary.prepareIndexOnPrimary(sourceToParse, request.version(), request.versionType(), request.getAutoGeneratedTimestamp(), request.isRetry()); } diff --git a/core/src/main/java/org/elasticsearch/action/ingest/SimulatePipelineRequest.java b/core/src/main/java/org/elasticsearch/action/ingest/SimulatePipelineRequest.java index c976103441823..ef7b5e3d5bbed 100644 --- a/core/src/main/java/org/elasticsearch/action/ingest/SimulatePipelineRequest.java +++ b/core/src/main/java/org/elasticsearch/action/ingest/SimulatePipelineRequest.java @@ -156,8 +156,6 @@ private static List parseDocs(Map config) { ConfigurationUtils.readStringProperty(null, null, dataMap, MetaData.ID.getFieldName(), "_id"), ConfigurationUtils.readOptionalStringProperty(null, null, dataMap, MetaData.ROUTING.getFieldName()), ConfigurationUtils.readOptionalStringProperty(null, null, dataMap, MetaData.PARENT.getFieldName()), - ConfigurationUtils.readOptionalStringProperty(null, null, dataMap, MetaData.TIMESTAMP.getFieldName()), - ConfigurationUtils.readOptionalStringProperty(null, null, dataMap, MetaData.TTL.getFieldName()), document); ingestDocumentList.add(ingestDocument); } diff --git a/core/src/main/java/org/elasticsearch/action/update/UpdateHelper.java b/core/src/main/java/org/elasticsearch/action/update/UpdateHelper.java index 0e37b6ff064b2..7f5482afb0da5 100644 --- a/core/src/main/java/org/elasticsearch/action/update/UpdateHelper.java +++ b/core/src/main/java/org/elasticsearch/action/update/UpdateHelper.java @@ -31,7 +31,6 @@ import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.Streamable; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.common.xcontent.XContentType; @@ -42,8 +41,6 @@ import org.elasticsearch.index.get.GetResult; import org.elasticsearch.index.mapper.ParentFieldMapper; import org.elasticsearch.index.mapper.RoutingFieldMapper; -import org.elasticsearch.index.mapper.TTLFieldMapper; -import org.elasticsearch.index.mapper.TimestampFieldMapper; import org.elasticsearch.index.shard.IndexShard; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.script.ExecutableScript; @@ -55,7 +52,6 @@ import java.io.IOException; import java.util.ArrayList; -import java.util.Collections; import java.util.HashMap; import java.util.Map; import java.util.function.LongSupplier; @@ -76,7 +72,7 @@ public UpdateHelper(Settings settings, ScriptService scriptService) { */ public Result prepare(UpdateRequest request, IndexShard indexShard, LongSupplier nowInMillis) { final GetResult getResult = indexShard.getService().get(request.type(), request.id(), - new String[]{RoutingFieldMapper.NAME, ParentFieldMapper.NAME, TTLFieldMapper.NAME, TimestampFieldMapper.NAME}, + new String[]{RoutingFieldMapper.NAME, ParentFieldMapper.NAME}, true, request.version(), request.versionType(), FetchSourceContext.FETCH_SOURCE); return prepare(indexShard.shardId(), request, getResult, nowInMillis); } @@ -86,13 +82,11 @@ public Result prepare(UpdateRequest request, IndexShard indexShard, LongSupplier */ @SuppressWarnings("unchecked") protected Result prepare(ShardId shardId, UpdateRequest request, final GetResult getResult, LongSupplier nowInMillis) { - long getDateNS = System.nanoTime(); if (!getResult.isExists()) { if (request.upsertRequest() == null && !request.docAsUpsert()) { throw new DocumentMissingException(shardId, request.type(), request.id()); } IndexRequest indexRequest = request.docAsUpsert() ? request.doc() : request.upsertRequest(); - TimeValue ttl = indexRequest.ttl(); if (request.scriptedUpsert() && request.script() != null) { // Run the script to perform the create logic IndexRequest upsert = request.upsertRequest(); @@ -103,10 +97,6 @@ protected Result prepare(ShardId shardId, UpdateRequest request, final GetResult ctx.put("_source", upsertDoc); ctx.put("_now", nowInMillis.getAsLong()); ctx = executeScript(request.script, ctx); - //Allow the script to set TTL using ctx._ttl - if (ttl == null) { - ttl = getTTLFromScriptContext(ctx); - } //Allow the script to abort the create by setting "op" to "none" String scriptOpChoice = (String) ctx.get("op"); @@ -129,7 +119,6 @@ protected Result prepare(ShardId shardId, UpdateRequest request, final GetResult indexRequest.index(request.index()).type(request.type()).id(request.id()) // it has to be a "create!" .create(true) - .ttl(ttl) .setRefreshPolicy(request.getRefreshPolicy()) .routing(request.routing()) .parent(request.parent()) @@ -155,8 +144,6 @@ protected Result prepare(ShardId shardId, UpdateRequest request, final GetResult Tuple> sourceAndContent = XContentHelper.convertToMap(getResult.internalSourceRef(), true); String operation = null; - String timestamp = null; - TimeValue ttl = null; final Map updatedSourceAsMap; final XContentType updateSourceContentType = sourceAndContent.v1(); String routing = getResult.getFields().containsKey(RoutingFieldMapper.NAME) ? getResult.field(RoutingFieldMapper.NAME).getValue().toString() : null; @@ -165,10 +152,6 @@ protected Result prepare(ShardId shardId, UpdateRequest request, final GetResult if (request.script() == null && request.doc() != null) { IndexRequest indexRequest = request.doc(); updatedSourceAsMap = sourceAndContent.v2(); - if (indexRequest.ttl() != null) { - ttl = indexRequest.ttl(); - } - timestamp = indexRequest.timestamp(); if (indexRequest.routing() != null) { routing = indexRequest.routing(); } @@ -184,16 +167,12 @@ protected Result prepare(ShardId shardId, UpdateRequest request, final GetResult } } else { Map ctx = new HashMap<>(16); - Long originalTtl = getResult.getFields().containsKey(TTLFieldMapper.NAME) ? (Long) getResult.field(TTLFieldMapper.NAME).getValue() : null; - Long originalTimestamp = getResult.getFields().containsKey(TimestampFieldMapper.NAME) ? (Long) getResult.field(TimestampFieldMapper.NAME).getValue() : null; ctx.put("_index", getResult.getIndex()); ctx.put("_type", getResult.getType()); ctx.put("_id", getResult.getId()); ctx.put("_version", getResult.getVersion()); ctx.put("_routing", routing); ctx.put("_parent", parent); - ctx.put("_timestamp", originalTimestamp); - ctx.put("_ttl", originalTtl); ctx.put("_source", sourceAndContent.v2()); ctx.put("_now", nowInMillis.getAsLong()); @@ -201,34 +180,14 @@ protected Result prepare(ShardId shardId, UpdateRequest request, final GetResult operation = (String) ctx.get("op"); - Object fetchedTimestamp = ctx.get("_timestamp"); - if (fetchedTimestamp != null) { - timestamp = fetchedTimestamp.toString(); - } else if (originalTimestamp != null) { - // No timestamp has been given in the update script, so we keep the previous timestamp if there is one - timestamp = originalTimestamp.toString(); - } - - ttl = getTTLFromScriptContext(ctx); - updatedSourceAsMap = (Map) ctx.get("_source"); } - // apply script to update the source - // No TTL has been given in the update script so we keep previous TTL value if there is one - if (ttl == null) { - Long ttlAsLong = getResult.getFields().containsKey(TTLFieldMapper.NAME) ? (Long) getResult.field(TTLFieldMapper.NAME).getValue() : null; - if (ttlAsLong != null) { - ttl = new TimeValue(ttlAsLong - TimeValue.nsecToMSec(System.nanoTime() - getDateNS));// It is an approximation of exact TTL value, could be improved - } - } - if (operation == null || "index".equals(operation)) { final IndexRequest indexRequest = Requests.indexRequest(request.index()).type(request.type()).id(request.id()).routing(routing).parent(parent) .source(updatedSourceAsMap, updateSourceContentType) .version(updateVersion).versionType(request.versionType()) .waitForActiveShards(request.waitForActiveShards()) - .timestamp(timestamp).ttl(ttl) .setRefreshPolicy(request.getRefreshPolicy()); return new Result(indexRequest, DocWriteResponse.Result.UPDATED, updatedSourceAsMap, updateSourceContentType); } else if ("delete".equals(operation)) { @@ -263,17 +222,6 @@ private Map executeScript(Script script, Map ctx return ctx; } - private TimeValue getTTLFromScriptContext(Map ctx) { - Object fetchedTTL = ctx.get("_ttl"); - if (fetchedTTL != null) { - if (fetchedTTL instanceof Number) { - return new TimeValue(((Number) fetchedTTL).longValue()); - } - return TimeValue.parseTimeValue((String) fetchedTTL, null, "_ttl"); - } - return null; - } - /** * Applies {@link UpdateRequest#fetchSource()} to the _source of the updated document to be returned in a update response. * For BWC this function also extracts the {@link UpdateRequest#fields()} from the updated document to be returned in a update response diff --git a/core/src/main/java/org/elasticsearch/action/update/UpdateRequestBuilder.java b/core/src/main/java/org/elasticsearch/action/update/UpdateRequestBuilder.java index bbbc9bafd8f20..50d84a241291f 100644 --- a/core/src/main/java/org/elasticsearch/action/update/UpdateRequestBuilder.java +++ b/core/src/main/java/org/elasticsearch/action/update/UpdateRequestBuilder.java @@ -28,7 +28,6 @@ import org.elasticsearch.common.Nullable; import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.logging.Loggers; -import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.index.VersionType; @@ -355,33 +354,4 @@ public UpdateRequestBuilder setScriptedUpsert(boolean scriptedUpsert) { return this; } - /** - * Set the new ttl of the document as a long. Note that if detectNoop is true (the default) - * and the source of the document isn't changed then the ttl update won't take - * effect. - */ - public UpdateRequestBuilder setTtl(Long ttl) { - request.doc().ttl(ttl); - return this; - } - - /** - * Set the new ttl of the document as a time value expression. Note that if detectNoop is true (the default) - * and the source of the document isn't changed then the ttl update won't take - * effect. - */ - public UpdateRequestBuilder setTtl(String ttl) { - request.doc().ttl(ttl); - return this; - } - - /** - * Set the new ttl of the document as a {@link TimeValue} instance. Note that if detectNoop is true (the default) - * and the source of the document isn't changed then the ttl update won't take - * effect. - */ - public UpdateRequestBuilder setTtl(TimeValue ttl) { - request.doc().ttl(ttl); - return this; - } } diff --git a/core/src/main/java/org/elasticsearch/cluster/metadata/MappingMetaData.java b/core/src/main/java/org/elasticsearch/cluster/metadata/MappingMetaData.java index 0798dff1c9315..3ea61385f1c3f 100644 --- a/core/src/main/java/org/elasticsearch/cluster/metadata/MappingMetaData.java +++ b/core/src/main/java/org/elasticsearch/cluster/metadata/MappingMetaData.java @@ -19,19 +19,17 @@ package org.elasticsearch.cluster.metadata; -import org.elasticsearch.action.TimestampParsingException; +import org.elasticsearch.Version; import org.elasticsearch.cluster.AbstractDiffable; import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.joda.FormatDateTimeFormatter; -import org.elasticsearch.common.joda.Joda; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.index.mapper.DateFieldMapper; import org.elasticsearch.index.mapper.DocumentMapper; -import org.elasticsearch.index.mapper.TimestampFieldMapper; import java.io.IOException; import java.util.Map; @@ -75,103 +73,17 @@ public int hashCode() { } } - public static class Timestamp { - - public static String parseStringTimestamp(String timestampAsString, FormatDateTimeFormatter dateTimeFormatter) throws TimestampParsingException { - try { - return Long.toString(dateTimeFormatter.parser().parseMillis(timestampAsString)); - } catch (RuntimeException e) { - throw new TimestampParsingException(timestampAsString, e); - } - } - - - public static final Timestamp EMPTY = new Timestamp(false, TimestampFieldMapper.DEFAULT_DATE_TIME_FORMAT, - TimestampFieldMapper.Defaults.DEFAULT_TIMESTAMP, null); - - private final boolean enabled; - - private final String format; - - private final FormatDateTimeFormatter dateTimeFormatter; - - private final String defaultTimestamp; - - private final Boolean ignoreMissing; - - public Timestamp(boolean enabled, String format, String defaultTimestamp, Boolean ignoreMissing) { - this.enabled = enabled; - this.format = format; - this.dateTimeFormatter = Joda.forPattern(format); - this.defaultTimestamp = defaultTimestamp; - this.ignoreMissing = ignoreMissing; - } - - public boolean enabled() { - return enabled; - } - - public String format() { - return this.format; - } - - public String defaultTimestamp() { - return this.defaultTimestamp; - } - - public boolean hasDefaultTimestamp() { - return this.defaultTimestamp != null; - } - - public Boolean ignoreMissing() { - return ignoreMissing; - } - - public FormatDateTimeFormatter dateTimeFormatter() { - return this.dateTimeFormatter; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - - Timestamp timestamp = (Timestamp) o; - - if (enabled != timestamp.enabled) return false; - if (format != null ? !format.equals(timestamp.format) : timestamp.format != null) return false; - if (defaultTimestamp != null ? !defaultTimestamp.equals(timestamp.defaultTimestamp) : timestamp.defaultTimestamp != null) return false; - if (ignoreMissing != null ? !ignoreMissing.equals(timestamp.ignoreMissing) : timestamp.ignoreMissing != null) return false; - - return true; - } - - @Override - public int hashCode() { - int result = (enabled ? 1 : 0); - result = 31 * result + (format != null ? format.hashCode() : 0); - result = 31 * result + (dateTimeFormatter != null ? dateTimeFormatter.hashCode() : 0); - result = 31 * result + (defaultTimestamp != null ? defaultTimestamp.hashCode() : 0); - result = 31 * result + (ignoreMissing != null ? ignoreMissing.hashCode() : 0); - return result; - } - } - private final String type; private final CompressedXContent source; private Routing routing; - private Timestamp timestamp; private boolean hasParentField; public MappingMetaData(DocumentMapper docMapper) { this.type = docMapper.type(); this.source = docMapper.mappingSource(); this.routing = new Routing(docMapper.routingFieldMapper().required()); - this.timestamp = new Timestamp(docMapper.timestampFieldMapper().enabled(), - docMapper.timestampFieldMapper().fieldType().dateTimeFormatter().format(), docMapper.timestampFieldMapper().defaultTimestamp(), - docMapper.timestampFieldMapper().ignoreMissing()); this.hasParentField = docMapper.parentFieldMapper().active(); } @@ -227,29 +139,6 @@ private void initMappers(Map withoutType) { } else { this.routing = Routing.EMPTY; } - if (withoutType.containsKey("_timestamp")) { - boolean enabled = false; - String format = TimestampFieldMapper.DEFAULT_DATE_TIME_FORMAT; - String defaultTimestamp = TimestampFieldMapper.Defaults.DEFAULT_TIMESTAMP; - Boolean ignoreMissing = null; - Map timestampNode = (Map) withoutType.get("_timestamp"); - for (Map.Entry entry : timestampNode.entrySet()) { - String fieldName = entry.getKey(); - Object fieldNode = entry.getValue(); - if (fieldName.equals("enabled")) { - enabled = lenientNodeBooleanValue(fieldNode); - } else if (fieldName.equals("format")) { - format = fieldNode.toString(); - } else if (fieldName.equals("default") && fieldNode != null) { - defaultTimestamp = fieldNode.toString(); - } else if (fieldName.equals("ignore_missing")) { - ignoreMissing = lenientNodeBooleanValue(fieldNode); - } - } - this.timestamp = new Timestamp(enabled, format, defaultTimestamp, ignoreMissing); - } else { - this.timestamp = Timestamp.EMPTY; - } if (withoutType.containsKey("_parent")) { this.hasParentField = true; } else { @@ -257,11 +146,10 @@ private void initMappers(Map withoutType) { } } - public MappingMetaData(String type, CompressedXContent source, Routing routing, Timestamp timestamp, boolean hasParentField) { + public MappingMetaData(String type, CompressedXContent source, Routing routing, boolean hasParentField) { this.type = type; this.source = source; this.routing = routing; - this.timestamp = timestamp; this.hasParentField = hasParentField; } @@ -269,9 +157,6 @@ void updateDefaultMapping(MappingMetaData defaultMapping) { if (routing == Routing.EMPTY) { routing = defaultMapping.routing(); } - if (timestamp == Timestamp.EMPTY) { - timestamp = defaultMapping.timestamp(); - } } public String type() { @@ -309,21 +194,19 @@ public Routing routing() { return this.routing; } - public Timestamp timestamp() { - return this.timestamp; - } - @Override public void writeTo(StreamOutput out) throws IOException { out.writeString(type()); source().writeTo(out); // routing out.writeBoolean(routing().required()); - // timestamp - out.writeBoolean(timestamp().enabled()); - out.writeString(timestamp().format()); - out.writeOptionalString(timestamp().defaultTimestamp()); - out.writeOptionalBoolean(timestamp().ignoreMissing()); + if (out.getVersion().before(Version.V_6_0_0_alpha1_UNRELEASED)) { + // timestamp + out.writeBoolean(false); // enabled + out.writeString(DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.format()); + out.writeOptionalString(null); + out.writeOptionalBoolean(null); + } out.writeBoolean(hasParentField()); } @@ -336,7 +219,6 @@ public boolean equals(Object o) { if (!routing.equals(that.routing)) return false; if (!source.equals(that.source)) return false; - if (!timestamp.equals(that.timestamp)) return false; if (!type.equals(that.type)) return false; return true; @@ -347,7 +229,6 @@ public int hashCode() { int result = type.hashCode(); result = 31 * result + source.hashCode(); result = 31 * result + routing.hashCode(); - result = 31 * result + timestamp.hashCode(); return result; } @@ -356,18 +237,19 @@ public MappingMetaData readFrom(StreamInput in) throws IOException { CompressedXContent source = CompressedXContent.readCompressedString(in); // routing Routing routing = new Routing(in.readBoolean()); - // timestamp - - boolean enabled = in.readBoolean(); - String format = in.readString(); - String defaultTimestamp = in.readOptionalString(); - Boolean ignoreMissing = null; - - ignoreMissing = in.readOptionalBoolean(); + if (in.getVersion().before(Version.V_6_0_0_alpha1_UNRELEASED)) { + // timestamp + boolean enabled = in.readBoolean(); + if (enabled) { + throw new IllegalArgumentException("_timestamp may not be enabled"); + } + in.readString(); // format + in.readOptionalString(); // defaultTimestamp + in.readOptionalBoolean(); // ignoreMissing + } - final Timestamp timestamp = new Timestamp(enabled, format, defaultTimestamp, ignoreMissing); final boolean hasParentField = in.readBoolean(); - return new MappingMetaData(type, source, routing, timestamp, hasParentField); + return new MappingMetaData(type, source, routing, hasParentField); } } diff --git a/core/src/main/java/org/elasticsearch/cluster/metadata/MetaData.java b/core/src/main/java/org/elasticsearch/cluster/metadata/MetaData.java index 41b1923c43f3d..9e9923a92d5c0 100644 --- a/core/src/main/java/org/elasticsearch/cluster/metadata/MetaData.java +++ b/core/src/main/java/org/elasticsearch/cluster/metadata/MetaData.java @@ -55,7 +55,6 @@ import org.elasticsearch.index.Index; import org.elasticsearch.index.IndexNotFoundException; import org.elasticsearch.indices.recovery.RecoverySettings; -import org.elasticsearch.indices.ttl.IndicesTTLService; import org.elasticsearch.ingest.IngestMetadata; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.script.ScriptMetaData; @@ -761,7 +760,6 @@ public static Builder builder(MetaData metaData) { /** All known time cluster settings. */ public static final Set CLUSTER_TIME_SETTINGS = unmodifiableSet(newHashSet( - IndicesTTLService.INDICES_TTL_INTERVAL_SETTING.getKey(), RecoverySettings.INDICES_RECOVERY_RETRY_DELAY_STATE_SYNC_SETTING.getKey(), RecoverySettings.INDICES_RECOVERY_RETRY_DELAY_NETWORK_SETTING.getKey(), RecoverySettings.INDICES_RECOVERY_ACTIVITY_TIMEOUT_SETTING.getKey(), diff --git a/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataIndexUpgradeService.java b/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataIndexUpgradeService.java index b261b8850c6f6..6e5b110563de1 100644 --- a/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataIndexUpgradeService.java +++ b/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataIndexUpgradeService.java @@ -106,7 +106,7 @@ private void checkSupportedVersion(IndexMetaData indexMetaData) { * Returns true if this index can be supported by the current version of elasticsearch */ private static boolean isSupportedVersion(IndexMetaData indexMetaData) { - return indexMetaData.getCreationVersion().onOrAfter(Version.V_2_0_0_beta1); + return indexMetaData.getCreationVersion().onOrAfter(Version.V_5_0_0_beta1); } /** diff --git a/core/src/main/java/org/elasticsearch/common/geo/GeoUtils.java b/core/src/main/java/org/elasticsearch/common/geo/GeoUtils.java index d33616cbe6006..2046b1a6e14fa 100644 --- a/core/src/main/java/org/elasticsearch/common/geo/GeoUtils.java +++ b/core/src/main/java/org/elasticsearch/common/geo/GeoUtils.java @@ -26,7 +26,6 @@ import org.elasticsearch.common.unit.DistanceUnit; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentParser.Token; -import org.elasticsearch.index.mapper.GeoPointFieldMapper; import java.io.IOException; @@ -41,9 +40,9 @@ public class GeoUtils { /** Minimum valid longitude in degrees. */ public static final double MIN_LON = -180.0; - public static final String LATITUDE = GeoPointFieldMapper.Names.LAT; - public static final String LONGITUDE = GeoPointFieldMapper.Names.LON; - public static final String GEOHASH = GeoPointFieldMapper.Names.GEOHASH; + public static final String LATITUDE = "lat"; + public static final String LONGITUDE = "lon"; + public static final String GEOHASH = "geohash"; /** Earth ellipsoid major axis defined by WGS 84 in meters */ public static final double EARTH_SEMI_MAJOR_AXIS = 6378137.0; // meters (WGS 84) diff --git a/core/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java b/core/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java index 037109bff12b6..e00b4bc44f1ff 100644 --- a/core/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java +++ b/core/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java @@ -72,7 +72,6 @@ import org.elasticsearch.indices.fielddata.cache.IndicesFieldDataCache; import org.elasticsearch.indices.recovery.RecoverySettings; import org.elasticsearch.indices.store.IndicesStore; -import org.elasticsearch.indices.ttl.IndicesTTLService; import org.elasticsearch.monitor.fs.FsService; import org.elasticsearch.monitor.jvm.JvmGcMonitorService; import org.elasticsearch.monitor.jvm.JvmService; @@ -184,7 +183,6 @@ public void apply(Settings value, Settings current, Settings previous) { IndicesQueryCache.INDICES_CACHE_QUERY_SIZE_SETTING, IndicesQueryCache.INDICES_CACHE_QUERY_COUNT_SETTING, IndicesQueryCache.INDICES_QUERIES_CACHE_ALL_SEGMENTS_SETTING, - IndicesTTLService.INDICES_TTL_INTERVAL_SETTING, MappingUpdatedAction.INDICES_MAPPING_DYNAMIC_TIMEOUT_SETTING, MetaData.SETTING_READ_ONLY_SETTING, RecoverySettings.INDICES_RECOVERY_MAX_BYTES_PER_SEC_SETTING, diff --git a/core/src/main/java/org/elasticsearch/index/codec/PerFieldMappingPostingFormatCodec.java b/core/src/main/java/org/elasticsearch/index/codec/PerFieldMappingPostingFormatCodec.java index 54f15feaa7416..490c837e85a82 100644 --- a/core/src/main/java/org/elasticsearch/index/codec/PerFieldMappingPostingFormatCodec.java +++ b/core/src/main/java/org/elasticsearch/index/codec/PerFieldMappingPostingFormatCodec.java @@ -26,7 +26,6 @@ import org.apache.lucene.codecs.lucene62.Lucene62Codec; import org.elasticsearch.common.lucene.Lucene; import org.elasticsearch.index.mapper.CompletionFieldMapper; -import org.elasticsearch.index.mapper.CompletionFieldMapper2x; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.MapperService; @@ -60,9 +59,6 @@ public PostingsFormat getPostingsFormatForField(String field) { logger.warn("no index mapper found for field: [{}] returning default postings format", field); } else if (fieldType instanceof CompletionFieldMapper.CompletionFieldType) { return CompletionFieldMapper.CompletionFieldType.postingsFormat(); - } else if (fieldType instanceof CompletionFieldMapper2x.CompletionFieldType) { - return ((CompletionFieldMapper2x.CompletionFieldType) fieldType).postingsFormat( - super.getPostingsFormatForField(field)); } return super.getPostingsFormatForField(field); } diff --git a/core/src/main/java/org/elasticsearch/index/engine/Engine.java b/core/src/main/java/org/elasticsearch/index/engine/Engine.java index bd742ee628841..20def3bd89323 100644 --- a/core/src/main/java/org/elasticsearch/index/engine/Engine.java +++ b/core/src/main/java/org/elasticsearch/index/engine/Engine.java @@ -69,7 +69,6 @@ import org.elasticsearch.index.store.Store; import org.elasticsearch.index.translog.Translog; -import javax.net.ssl.SNIServerName; import java.io.Closeable; import java.io.FileNotFoundException; import java.io.IOException; @@ -1031,14 +1030,6 @@ public String routing() { return this.doc.routing(); } - public long timestamp() { - return this.doc.timestamp(); - } - - public long ttl() { - return this.doc.ttl(); - } - public String parent() { return this.doc.parent(); } diff --git a/core/src/main/java/org/elasticsearch/index/fielddata/plain/AbstractGeoPointDVIndexFieldData.java b/core/src/main/java/org/elasticsearch/index/fielddata/plain/AbstractGeoPointDVIndexFieldData.java index 23e770121a729..b35706961ba12 100644 --- a/core/src/main/java/org/elasticsearch/index/fielddata/plain/AbstractGeoPointDVIndexFieldData.java +++ b/core/src/main/java/org/elasticsearch/index/fielddata/plain/AbstractGeoPointDVIndexFieldData.java @@ -21,7 +21,6 @@ import org.apache.lucene.index.DocValues; import org.apache.lucene.index.LeafReaderContext; -import org.elasticsearch.Version; import org.elasticsearch.common.Nullable; import org.elasticsearch.index.Index; import org.elasticsearch.index.IndexSettings; @@ -52,19 +51,14 @@ public final XFieldComparatorSource comparatorSource(@Nullable Object missingVal * Lucene 5.4 GeoPointFieldType */ public static class GeoPointDVIndexFieldData extends AbstractGeoPointDVIndexFieldData { - final boolean indexCreatedBefore2x; - public GeoPointDVIndexFieldData(Index index, String fieldName, final boolean indexCreatedBefore2x) { + public GeoPointDVIndexFieldData(Index index, String fieldName) { super(index, fieldName); - this.indexCreatedBefore2x = indexCreatedBefore2x; } @Override public AtomicGeoPointFieldData load(LeafReaderContext context) { try { - if (indexCreatedBefore2x) { - return new GeoPointLegacyDVAtomicFieldData(DocValues.getBinary(context.reader(), fieldName)); - } return new GeoPointDVAtomicFieldData(DocValues.getSortedNumeric(context.reader(), fieldName)); } catch (IOException e) { throw new IllegalStateException("Cannot load doc values", e); @@ -81,13 +75,8 @@ public static class Builder implements IndexFieldData.Builder { @Override public IndexFieldData build(IndexSettings indexSettings, MappedFieldType fieldType, IndexFieldDataCache cache, CircuitBreakerService breakerService, MapperService mapperService) { - if (indexSettings.getIndexVersionCreated().before(Version.V_2_2_0) - && fieldType.hasDocValues() == false) { - return new GeoPointArrayIndexFieldData(indexSettings, fieldType.name(), cache, breakerService); - } // Ignore breaker - return new GeoPointDVIndexFieldData(indexSettings.getIndex(), fieldType.name(), - indexSettings.getIndexVersionCreated().before(Version.V_2_2_0)); + return new GeoPointDVIndexFieldData(indexSettings.getIndex(), fieldType.name()); } } } diff --git a/core/src/main/java/org/elasticsearch/index/fielddata/plain/GeoPointArrayAtomicFieldData.java b/core/src/main/java/org/elasticsearch/index/fielddata/plain/GeoPointArrayAtomicFieldData.java deleted file mode 100644 index 0627e341a045d..0000000000000 --- a/core/src/main/java/org/elasticsearch/index/fielddata/plain/GeoPointArrayAtomicFieldData.java +++ /dev/null @@ -1,145 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.elasticsearch.index.fielddata.plain; - -import org.apache.lucene.index.DocValues; -import org.apache.lucene.index.RandomAccessOrds; -import org.apache.lucene.index.SortedDocValues; -import org.apache.lucene.util.Accountable; -import org.apache.lucene.util.Accountables; -import org.apache.lucene.util.BitSet; -import org.elasticsearch.common.geo.GeoPoint; -import org.elasticsearch.common.util.LongArray; -import org.elasticsearch.index.fielddata.FieldData; -import org.elasticsearch.index.fielddata.GeoPointValues; -import org.elasticsearch.index.fielddata.MultiGeoPointValues; -import org.elasticsearch.index.fielddata.ordinals.Ordinals; - -import java.util.ArrayList; -import java.util.Collection; -import java.util.Collections; -import java.util.List; - -public abstract class GeoPointArrayAtomicFieldData extends AbstractAtomicGeoPointFieldData { - @Override - public void close() { - } - - static class WithOrdinals extends GeoPointArrayAtomicFieldData { - private final LongArray indexedPoints; - private final Ordinals ordinals; - private final int maxDoc; - - public WithOrdinals(LongArray indexedPoints, Ordinals ordinals, int maxDoc) { - super(); - this.indexedPoints = indexedPoints; - this.ordinals = ordinals; - this.maxDoc = maxDoc; - } - - @Override - public long ramBytesUsed() { - return Integer.BYTES + indexedPoints.ramBytesUsed(); - } - - @Override - public Collection getChildResources() { - List resources = new ArrayList<>(); - resources.add(Accountables.namedAccountable("indexedPoints", indexedPoints)); - return Collections.unmodifiableList(resources); - } - - @Override - public MultiGeoPointValues getGeoPointValues() { - final RandomAccessOrds ords = ordinals.ordinals(); - final SortedDocValues singleOrds = DocValues.unwrapSingleton(ords); - final GeoPoint point = new GeoPoint(Double.NaN, Double.NaN); - if (singleOrds != null) { - final GeoPointValues values = new GeoPointValues() { - @Override - public GeoPoint get(int docID) { - final int ord = singleOrds.getOrd(docID); - if (ord >= 0) { - return point.resetFromIndexHash(indexedPoints.get(ord)); - } - return point.reset(Double.NaN, Double.NaN); - } - }; - return FieldData.singleton(values, DocValues.docsWithValue(singleOrds, maxDoc)); - } - return new MultiGeoPointValues() { - @Override - public GeoPoint valueAt(int index) { - return point.resetFromIndexHash(indexedPoints.get(ords.ordAt(index))); - } - - @Override - public void setDocument(int docId) { - ords.setDocument(docId); - } - - @Override - public int count() { - return ords.cardinality(); - } - }; - } - } - - public static class Single extends GeoPointArrayAtomicFieldData { - private final LongArray indexedPoint; - private final BitSet set; - - public Single(LongArray indexedPoint, BitSet set) { - this.indexedPoint = indexedPoint; - this.set = set; - } - - @Override - public long ramBytesUsed() { - return Integer.BYTES + indexedPoint.ramBytesUsed() - + (set == null ? 0 : set.ramBytesUsed()); - } - - @Override - public Collection getChildResources() { - List resources = new ArrayList<>(); - resources.add(Accountables.namedAccountable("indexedPoints", indexedPoint)); - if (set != null) { - resources.add(Accountables.namedAccountable("missing bitset", set)); - } - return Collections.unmodifiableList(resources); - } - - @Override - public MultiGeoPointValues getGeoPointValues() { - final GeoPoint point = new GeoPoint(); - final GeoPointValues values = new GeoPointValues() { - @Override - public GeoPoint get(int docID) { - if (set == null || set.get(docID)) { - return point.resetFromIndexHash(indexedPoint.get(docID)); - } - return point.reset(Double.NaN, Double.NaN); - } - }; - return FieldData.singleton(values, set); - } - } -} diff --git a/core/src/main/java/org/elasticsearch/index/fielddata/plain/GeoPointArrayIndexFieldData.java b/core/src/main/java/org/elasticsearch/index/fielddata/plain/GeoPointArrayIndexFieldData.java deleted file mode 100644 index 18313f3274517..0000000000000 --- a/core/src/main/java/org/elasticsearch/index/fielddata/plain/GeoPointArrayIndexFieldData.java +++ /dev/null @@ -1,180 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.index.fielddata.plain; - -import org.apache.lucene.index.LeafReader; -import org.apache.lucene.index.LeafReaderContext; -import org.apache.lucene.index.RandomAccessOrds; -import org.apache.lucene.index.Terms; -import org.apache.lucene.index.TermsEnum; -import org.apache.lucene.spatial.geopoint.document.GeoPointField; -import org.apache.lucene.util.BitSet; -import org.elasticsearch.Version; -import org.elasticsearch.common.breaker.CircuitBreaker; -import org.elasticsearch.common.geo.GeoPoint; -import org.elasticsearch.common.util.BigArrays; -import org.elasticsearch.common.util.DoubleArray; -import org.elasticsearch.common.util.LongArray; -import org.elasticsearch.index.IndexSettings; -import org.elasticsearch.index.fielddata.AtomicGeoPointFieldData; -import org.elasticsearch.index.fielddata.FieldData; -import org.elasticsearch.index.fielddata.IndexFieldData; -import org.elasticsearch.index.fielddata.IndexFieldDataCache; -import org.elasticsearch.index.fielddata.ordinals.Ordinals; -import org.elasticsearch.index.fielddata.ordinals.OrdinalsBuilder; -import org.elasticsearch.index.mapper.MappedFieldType; -import org.elasticsearch.index.mapper.MapperService; -import org.elasticsearch.indices.breaker.CircuitBreakerService; - -/** - * Loads FieldData for an array of GeoPoints supporting both long encoded points and backward compatible double arrays - */ -public class GeoPointArrayIndexFieldData extends AbstractIndexGeoPointFieldData { - private final CircuitBreakerService breakerService; - - public GeoPointArrayIndexFieldData(IndexSettings indexSettings, String fieldName, - IndexFieldDataCache cache, CircuitBreakerService breakerService) { - super(indexSettings, fieldName, cache); - this.breakerService = breakerService; - } - - @Override - public AtomicGeoPointFieldData loadDirect(LeafReaderContext context) throws Exception { - LeafReader reader = context.reader(); - - Terms terms = reader.terms(getFieldName()); - AtomicGeoPointFieldData data = null; - // TODO: Use an actual estimator to estimate before loading. - NonEstimatingEstimator estimator = new NonEstimatingEstimator(breakerService.getBreaker(CircuitBreaker.FIELDDATA)); - if (terms == null) { - data = AbstractAtomicGeoPointFieldData.empty(reader.maxDoc()); - estimator.afterLoad(null, data.ramBytesUsed()); - return data; - } - return (indexSettings.getIndexVersionCreated().before(Version.V_2_2_0)) ? - loadLegacyFieldData(reader, estimator, terms, data) : loadFieldData22(reader, estimator, terms, data); - } - - /** - * long encoded geopoint field data - */ - private AtomicGeoPointFieldData loadFieldData22(LeafReader reader, NonEstimatingEstimator estimator, Terms terms, - AtomicGeoPointFieldData data) throws Exception { - LongArray indexedPoints = BigArrays.NON_RECYCLING_INSTANCE.newLongArray(128); - final float acceptableTransientOverheadRatio = OrdinalsBuilder.DEFAULT_ACCEPTABLE_OVERHEAD_RATIO; - boolean success = false; - try (OrdinalsBuilder builder = new OrdinalsBuilder(reader.maxDoc(), acceptableTransientOverheadRatio)) { - final TermsEnum termsEnum; - final GeoPointField.TermEncoding termEncoding; - if (indexSettings.getIndexVersionCreated().onOrAfter(Version.V_2_3_0)) { - termEncoding = GeoPointField.TermEncoding.PREFIX; - termsEnum = OrdinalsBuilder.wrapGeoPointTerms(terms.iterator()); - } else { - termEncoding = GeoPointField.TermEncoding.NUMERIC; - termsEnum = OrdinalsBuilder.wrapNumeric64Bit(terms.iterator()); - } - - final GeoPointTermsEnum iter = new GeoPointTermsEnum(builder.buildFromTerms(termsEnum), termEncoding); - - Long hashedPoint; - long numTerms = 0; - while ((hashedPoint = iter.next()) != null) { - indexedPoints = BigArrays.NON_RECYCLING_INSTANCE.resize(indexedPoints, numTerms + 1); - indexedPoints.set(numTerms++, hashedPoint); - } - indexedPoints = BigArrays.NON_RECYCLING_INSTANCE.resize(indexedPoints, numTerms); - - Ordinals build = builder.build(); - RandomAccessOrds ordinals = build.ordinals(); - if (FieldData.isMultiValued(ordinals) == false) { - int maxDoc = reader.maxDoc(); - LongArray sIndexedPoint = BigArrays.NON_RECYCLING_INSTANCE.newLongArray(reader.maxDoc()); - for (int i=0; i getChildResources() { - List resources = new ArrayList<>(); - resources.add(Accountables.namedAccountable("latitude", lat)); - resources.add(Accountables.namedAccountable("longitude", lon)); - return Collections.unmodifiableList(resources); - } - - @Override - public MultiGeoPointValues getGeoPointValues() { - final RandomAccessOrds ords = ordinals.ordinals(); - final SortedDocValues singleOrds = DocValues.unwrapSingleton(ords); - if (singleOrds != null) { - final GeoPoint point = new GeoPoint(); - final GeoPointValues values = new GeoPointValues() { - @Override - public GeoPoint get(int docID) { - final int ord = singleOrds.getOrd(docID); - if (ord >= 0) { - return point.reset(lat.get(ord), lon.get(ord)); - } - return point.reset(Double.NaN, Double.NaN); - } - }; - return FieldData.singleton(values, DocValues.docsWithValue(singleOrds, maxDoc)); - } else { - final GeoPoint point = new GeoPoint(); - return new MultiGeoPointValues() { - - @Override - public GeoPoint valueAt(int index) { - final long ord = ords.ordAt(index); - if (ord >= 0) { - return point.reset(lat.get(ord), lon.get(ord)); - } - return point.reset(Double.NaN, Double.NaN); - } - - @Override - public void setDocument(int docId) { - ords.setDocument(docId); - } - - @Override - public int count() { - return ords.cardinality(); - } - }; - } - } - } - - /** - * Assumes unset values are marked in bitset, and docId is used as the index to the value array. - */ - public static class Single extends GeoPointArrayLegacyAtomicFieldData { - - private final DoubleArray lon, lat; - private final BitSet set; - - public Single(DoubleArray lon, DoubleArray lat, BitSet set) { - this.lon = lon; - this.lat = lat; - this.set = set; - } - - @Override - public long ramBytesUsed() { - return Integer.BYTES + lon.ramBytesUsed() + lat.ramBytesUsed() + (set == null ? 0 : set.ramBytesUsed()); - } - - @Override - public Collection getChildResources() { - List resources = new ArrayList<>(); - resources.add(Accountables.namedAccountable("latitude", lat)); - resources.add(Accountables.namedAccountable("longitude", lon)); - if (set != null) { - resources.add(Accountables.namedAccountable("missing bitset", set)); - } - return Collections.unmodifiableList(resources); - } - - @Override - public MultiGeoPointValues getGeoPointValues() { - final GeoPoint point = new GeoPoint(); - final GeoPointValues values = new GeoPointValues() { - @Override - public GeoPoint get(int docID) { - if (set == null || set.get(docID)) { - return point.reset(lat.get(docID), lon.get(docID)); - } - return point.reset(Double.NaN, Double.NaN); - } - }; - return FieldData.singleton(values, set); - } - } - -} \ No newline at end of file diff --git a/core/src/main/java/org/elasticsearch/index/fielddata/plain/GeoPointLegacyDVAtomicFieldData.java b/core/src/main/java/org/elasticsearch/index/fielddata/plain/GeoPointLegacyDVAtomicFieldData.java deleted file mode 100644 index c51f2b96982d8..0000000000000 --- a/core/src/main/java/org/elasticsearch/index/fielddata/plain/GeoPointLegacyDVAtomicFieldData.java +++ /dev/null @@ -1,100 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.index.fielddata.plain; - -import org.apache.lucene.index.BinaryDocValues; -import org.apache.lucene.util.Accountable; -import org.apache.lucene.util.ArrayUtil; -import org.apache.lucene.util.BytesRef; -import org.apache.lucene.util.RamUsageEstimator; -import org.elasticsearch.common.geo.GeoPoint; -import org.elasticsearch.common.util.ByteUtils; -import org.elasticsearch.index.fielddata.MultiGeoPointValues; - -import java.util.Arrays; -import java.util.Collection; -import java.util.Collections; - -final class GeoPointLegacyDVAtomicFieldData extends AbstractAtomicGeoPointFieldData { - - private static final int COORDINATE_SIZE = 8; // number of bytes per coordinate - private static final int GEOPOINT_SIZE = COORDINATE_SIZE * 2; // lat + lon - - private final BinaryDocValues values; - - GeoPointLegacyDVAtomicFieldData(BinaryDocValues values) { - super(); - this.values = values; - } - - @Override - public long ramBytesUsed() { - return 0; // not exposed by Lucene - } - - @Override - public Collection getChildResources() { - return Collections.emptyList(); - } - - @Override - public void close() { - // no-op - } - - @Override - public MultiGeoPointValues getGeoPointValues() { - return new MultiGeoPointValues() { - - int count; - GeoPoint[] points = new GeoPoint[0]; - - @Override - public void setDocument(int docId) { - final BytesRef bytes = values.get(docId); - assert bytes.length % GEOPOINT_SIZE == 0; - count = (bytes.length >>> 4); - if (count > points.length) { - final int previousLength = points.length; - points = Arrays.copyOf(points, ArrayUtil.oversize(count, RamUsageEstimator.NUM_BYTES_OBJECT_REF)); - for (int i = previousLength; i < points.length; ++i) { - points[i] = new GeoPoint(Double.NaN, Double.NaN); - } - } - for (int i = 0; i < count; ++i) { - final double lat = ByteUtils.readDoubleLE(bytes.bytes, bytes.offset + i * GEOPOINT_SIZE); - final double lon = ByteUtils.readDoubleLE(bytes.bytes, bytes.offset + i * GEOPOINT_SIZE + COORDINATE_SIZE); - points[i].reset(lat, lon); - } - } - - @Override - public int count() { - return count; - } - - @Override - public GeoPoint valueAt(int index) { - return points[index]; - } - - }; - } -} diff --git a/core/src/main/java/org/elasticsearch/index/fieldvisitor/FieldsVisitor.java b/core/src/main/java/org/elasticsearch/index/fieldvisitor/FieldsVisitor.java index 1316183f862cd..372e7caf921d5 100644 --- a/core/src/main/java/org/elasticsearch/index/fieldvisitor/FieldsVisitor.java +++ b/core/src/main/java/org/elasticsearch/index/fieldvisitor/FieldsVisitor.java @@ -28,8 +28,6 @@ import org.elasticsearch.index.mapper.ParentFieldMapper; import org.elasticsearch.index.mapper.RoutingFieldMapper; import org.elasticsearch.index.mapper.SourceFieldMapper; -import org.elasticsearch.index.mapper.TTLFieldMapper; -import org.elasticsearch.index.mapper.TimestampFieldMapper; import org.elasticsearch.index.mapper.Uid; import org.elasticsearch.index.mapper.UidFieldMapper; @@ -52,8 +50,6 @@ public class FieldsVisitor extends StoredFieldVisitor { private static final Set BASE_REQUIRED_FIELDS = unmodifiableSet(newHashSet( UidFieldMapper.NAME, - TimestampFieldMapper.NAME, - TTLFieldMapper.NAME, RoutingFieldMapper.NAME, ParentFieldMapper.NAME)); diff --git a/core/src/main/java/org/elasticsearch/index/mapper/BaseGeoPointFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/BaseGeoPointFieldMapper.java index 87883b9d363c7..c4f9e7a1cb367 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/BaseGeoPointFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/BaseGeoPointFieldMapper.java @@ -24,12 +24,9 @@ import org.apache.lucene.index.IndexableField; import org.apache.lucene.search.Query; import org.elasticsearch.common.geo.GeoHashUtils; -import org.apache.lucene.util.LegacyNumericUtils; -import org.elasticsearch.Version; import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.common.Explicit; import org.elasticsearch.common.Nullable; -import org.elasticsearch.common.collect.Iterators; import org.elasticsearch.common.geo.GeoPoint; import org.elasticsearch.common.geo.GeoUtils; import org.elasticsearch.common.logging.DeprecationLogger; @@ -46,7 +43,6 @@ import org.joda.time.DateTimeZone; import java.io.IOException; -import java.util.ArrayList; import java.util.Iterator; import java.util.List; import java.util.Map; @@ -78,16 +74,6 @@ public static class Defaults { public abstract static class Builder extends FieldMapper.Builder { - protected boolean enableLatLon = Defaults.ENABLE_LATLON; - - protected Integer precisionStep; - - protected boolean enableGeoHash = Defaults.ENABLE_GEOHASH; - - protected boolean enableGeoHashPrefix = Defaults.ENABLE_GEOHASH_PREFIX; - - protected int geoHashPrecision = Defaults.GEO_HASH_PRECISION; - protected Boolean ignoreMalformed; public Builder(String name, MappedFieldType fieldType) { @@ -99,31 +85,6 @@ public GeoPointFieldType fieldType() { return (GeoPointFieldType)fieldType; } - public T enableLatLon(boolean enableLatLon) { - this.enableLatLon = enableLatLon; - return builder; - } - - public T precisionStep(int precisionStep) { - this.precisionStep = precisionStep; - return builder; - } - - public T enableGeoHash(boolean enableGeoHash) { - this.enableGeoHash = enableGeoHash; - return builder; - } - - public T geoHashPrefix(boolean enableGeoHashPrefix) { - this.enableGeoHashPrefix = enableGeoHashPrefix; - return builder; - } - - public T geoHashPrecision(int precision) { - this.geoHashPrecision = precision; - return builder; - } - public T ignoreMalformed(boolean ignoreMalformed) { this.ignoreMalformed = ignoreMalformed; return builder; @@ -144,112 +105,21 @@ public abstract Y build(BuilderContext context, String simpleName, MappedFieldTy FieldMapper geoHashMapper, MultiFields multiFields, Explicit ignoreMalformed, CopyTo copyTo); public Y build(Mapper.BuilderContext context) { - // version 5.0 cuts over to LatLonPoint and no longer indexes geohash, or lat/lon separately - if (context.indexCreatedVersion().before(LatLonPointFieldMapper.LAT_LON_FIELD_VERSION)) { - return buildLegacy(context); - } return build(context, name, fieldType, defaultFieldType, context.indexSettings(), null, null, null, multiFieldsBuilder.build(this, context), ignoreMalformed(context), copyTo); } - - private Y buildLegacy(Mapper.BuilderContext context) { - LegacyGeoPointFieldType geoPointFieldType = (LegacyGeoPointFieldType)fieldType; - - FieldMapper latMapper = null; - FieldMapper lonMapper = null; - - context.path().add(name); - if (enableLatLon) { - if (context.indexCreatedVersion().before(Version.V_5_0_0_alpha2)) { - LegacyNumberFieldMapper.Builder latMapperBuilder = new LegacyDoubleFieldMapper.Builder(Names.LAT).includeInAll(false); - LegacyNumberFieldMapper.Builder lonMapperBuilder = new LegacyDoubleFieldMapper.Builder(Names.LON).includeInAll(false); - if (precisionStep != null) { - latMapperBuilder.precisionStep(precisionStep); - lonMapperBuilder.precisionStep(precisionStep); - } - latMapper = (LegacyDoubleFieldMapper) latMapperBuilder.includeInAll(false).store(fieldType.stored()).docValues(false).build(context); - lonMapper = (LegacyDoubleFieldMapper) lonMapperBuilder.includeInAll(false).store(fieldType.stored()).docValues(false).build(context); - } else { - latMapper = new NumberFieldMapper.Builder(Names.LAT, NumberFieldMapper.NumberType.DOUBLE) - .includeInAll(false).store(fieldType.stored()).docValues(false).build(context); - lonMapper = new NumberFieldMapper.Builder(Names.LON, NumberFieldMapper.NumberType.DOUBLE) - .includeInAll(false).store(fieldType.stored()).docValues(false).build(context); - } - geoPointFieldType.setLatLonEnabled(latMapper.fieldType(), lonMapper.fieldType()); - } - FieldMapper geoHashMapper = null; - if (enableGeoHash || enableGeoHashPrefix) { - // TODO: possible also implicitly enable geohash if geohash precision is set - if (context.indexCreatedVersion().onOrAfter(Version.V_5_0_0_alpha1)) { - geoHashMapper = new KeywordFieldMapper.Builder(Names.GEOHASH) - .index(true).includeInAll(false).store(fieldType.stored()).build(context); - } else { - geoHashMapper = new StringFieldMapper.Builder(Names.GEOHASH) - .tokenized(false).index(true).omitNorms(true).indexOptions(IndexOptions.DOCS) - .includeInAll(false).store(fieldType.stored()).build(context); - } - geoPointFieldType.setGeoHashEnabled(geoHashMapper.fieldType(), geoHashPrecision, enableGeoHashPrefix); - } - context.path().remove(); - - return build(context, name, fieldType, defaultFieldType, context.indexSettings(), - latMapper, lonMapper, geoHashMapper, multiFieldsBuilder.build(this, context), ignoreMalformed(context), copyTo); - } } public abstract static class TypeParser implements Mapper.TypeParser { @Override public Mapper.Builder parse(String name, Map node, ParserContext parserContext) throws MapperParsingException { - Builder builder; - Version indexVersionCreated = parserContext.indexVersionCreated(); - if (indexVersionCreated.before(Version.V_2_2_0)) { - builder = new LegacyGeoPointFieldMapper.Builder(name); - } else if (indexVersionCreated.onOrAfter(LatLonPointFieldMapper.LAT_LON_FIELD_VERSION)) { - builder = new LatLonPointFieldMapper.Builder(name); - } else { - builder = new GeoPointFieldMapper.Builder(name); - } + Builder builder = new LatLonPointFieldMapper.Builder(name); parseField(builder, name, node, parserContext); for (Iterator> iterator = node.entrySet().iterator(); iterator.hasNext();) { Map.Entry entry = iterator.next(); String propName = entry.getKey(); Object propNode = entry.getValue(); - if (indexVersionCreated.before(LatLonPointFieldMapper.LAT_LON_FIELD_VERSION)) { - if (propName.equals("lat_lon")) { - deprecationLogger.deprecated(CONTENT_TYPE + " lat_lon parameter is deprecated and will be removed " - + "in the next major release"); - builder.enableLatLon(XContentMapValues.lenientNodeBooleanValue(propNode)); - iterator.remove(); - } else if (propName.equals("precision_step")) { - deprecationLogger.deprecated(CONTENT_TYPE + " precision_step parameter is deprecated and will be removed " - + "in the next major release"); - builder.precisionStep(XContentMapValues.nodeIntegerValue(propNode)); - iterator.remove(); - } else if (propName.equals("geohash")) { - deprecationLogger.deprecated(CONTENT_TYPE + " geohash parameter is deprecated and will be removed " - + "in the next major release"); - builder.enableGeoHash(XContentMapValues.lenientNodeBooleanValue(propNode)); - iterator.remove(); - } else if (propName.equals("geohash_prefix")) { - deprecationLogger.deprecated(CONTENT_TYPE + " geohash_prefix parameter is deprecated and will be removed " - + "in the next major release"); - builder.geoHashPrefix(XContentMapValues.lenientNodeBooleanValue(propNode)); - if (XContentMapValues.lenientNodeBooleanValue(propNode)) { - builder.enableGeoHash(true); - } - iterator.remove(); - } else if (propName.equals("geohash_precision")) { - deprecationLogger.deprecated(CONTENT_TYPE + " geohash_precision parameter is deprecated and will be removed " - + "in the next major release"); - if (propNode instanceof Integer) { - builder.geoHashPrecision(XContentMapValues.nodeIntegerValue(propNode)); - } else { - builder.geoHashPrecision(GeoUtils.geoHashLevelsForPrecision(propNode.toString())); - } - iterator.remove(); - } - } if (propName.equals(Names.IGNORE_MALFORMED)) { builder.ignoreMalformed(XContentMapValues.lenientNodeBooleanValue(propNode)); @@ -257,19 +127,13 @@ public abstract static class TypeParser implements Mapper.TypeParser { } } - if (builder instanceof LegacyGeoPointFieldMapper.Builder) { - return LegacyGeoPointFieldMapper.parse((LegacyGeoPointFieldMapper.Builder) builder, node, parserContext); - } else if (builder instanceof LatLonPointFieldMapper.Builder) { - return (LatLonPointFieldMapper.Builder) builder; - } - - return (GeoPointFieldMapper.Builder) builder; + return builder; } } - public abstract static class GeoPointFieldType extends MappedFieldType { - GeoPointFieldType() { - } + public static class GeoPointFieldType extends MappedFieldType { + + GeoPointFieldType() {} GeoPointFieldType(GeoPointFieldType ref) { super(ref); @@ -279,110 +143,10 @@ public abstract static class GeoPointFieldType extends MappedFieldType { public String typeName() { return CONTENT_TYPE; } - } - - public static class LegacyGeoPointFieldType extends GeoPointFieldType { - protected MappedFieldType geoHashFieldType; - protected int geoHashPrecision; - protected boolean geoHashPrefixEnabled; - - protected MappedFieldType latFieldType; - protected MappedFieldType lonFieldType; - - LegacyGeoPointFieldType() {} - - LegacyGeoPointFieldType(LegacyGeoPointFieldType ref) { - super(ref); - this.geoHashFieldType = ref.geoHashFieldType; // copying ref is ok, this can never be modified - this.geoHashPrecision = ref.geoHashPrecision; - this.geoHashPrefixEnabled = ref.geoHashPrefixEnabled; - this.latFieldType = ref.latFieldType; // copying ref is ok, this can never be modified - this.lonFieldType = ref.lonFieldType; // copying ref is ok, this can never be modified - } @Override public MappedFieldType clone() { - return new LegacyGeoPointFieldType(this); - } - - @Override - public boolean equals(Object o) { - if (!super.equals(o)) return false; - LegacyGeoPointFieldType that = (LegacyGeoPointFieldType) o; - return geoHashPrecision == that.geoHashPrecision && - geoHashPrefixEnabled == that.geoHashPrefixEnabled && - java.util.Objects.equals(geoHashFieldType, that.geoHashFieldType) && - java.util.Objects.equals(latFieldType, that.latFieldType) && - java.util.Objects.equals(lonFieldType, that.lonFieldType); - } - - @Override - public int hashCode() { - return java.util.Objects.hash(super.hashCode(), geoHashFieldType, geoHashPrecision, geoHashPrefixEnabled, latFieldType, - lonFieldType); - } - - @Override - public void checkCompatibility(MappedFieldType fieldType, List conflicts, boolean strict) { - super.checkCompatibility(fieldType, conflicts, strict); - LegacyGeoPointFieldType other = (LegacyGeoPointFieldType)fieldType; - if (isLatLonEnabled() != other.isLatLonEnabled()) { - conflicts.add("mapper [" + name() + "] has different [lat_lon]"); - } - if (isLatLonEnabled() && other.isLatLonEnabled() && - latFieldType().numericPrecisionStep() != other.latFieldType().numericPrecisionStep()) { - conflicts.add("mapper [" + name() + "] has different [precision_step]"); - } - if (isGeoHashEnabled() != other.isGeoHashEnabled()) { - conflicts.add("mapper [" + name() + "] has different [geohash]"); - } - if (geoHashPrecision() != other.geoHashPrecision()) { - conflicts.add("mapper [" + name() + "] has different [geohash_precision]"); - } - if (isGeoHashPrefixEnabled() != other.isGeoHashPrefixEnabled()) { - conflicts.add("mapper [" + name() + "] has different [geohash_prefix]"); - } - } - - public boolean isGeoHashEnabled() { - return geoHashFieldType != null; - } - - public MappedFieldType geoHashFieldType() { - return geoHashFieldType; - } - - public int geoHashPrecision() { - return geoHashPrecision; - } - - public boolean isGeoHashPrefixEnabled() { - return geoHashPrefixEnabled; - } - - public void setGeoHashEnabled(MappedFieldType geoHashFieldType, int geoHashPrecision, boolean geoHashPrefixEnabled) { - checkIfFrozen(); - this.geoHashFieldType = geoHashFieldType; - this.geoHashPrecision = geoHashPrecision; - this.geoHashPrefixEnabled = geoHashPrefixEnabled; - } - - public boolean isLatLonEnabled() { - return latFieldType != null; - } - - public MappedFieldType latFieldType() { - return latFieldType; - } - - public MappedFieldType lonFieldType() { - return lonFieldType; - } - - public void setLatLonEnabled(MappedFieldType latFieldType, MappedFieldType lonFieldType) { - checkIfFrozen(); - this.latFieldType = latFieldType; - this.lonFieldType = lonFieldType; + return new GeoPointFieldType(this); } @Override @@ -408,30 +172,15 @@ public Query termQuery(Object value, QueryShardContext context) { } } - protected FieldMapper latMapper; - - protected FieldMapper lonMapper; - - protected FieldMapper geoHashMapper; - protected Explicit ignoreMalformed; protected BaseGeoPointFieldMapper(String simpleName, MappedFieldType fieldType, MappedFieldType defaultFieldType, Settings indexSettings, FieldMapper latMapper, FieldMapper lonMapper, FieldMapper geoHashMapper, MultiFields multiFields, Explicit ignoreMalformed, CopyTo copyTo) { super(simpleName, fieldType, defaultFieldType, indexSettings, multiFields, copyTo); - this.latMapper = latMapper; - this.lonMapper = lonMapper; - this.geoHashMapper = geoHashMapper; this.ignoreMalformed = ignoreMalformed; } - - - public LegacyGeoPointFieldType legacyFieldType() { - return (LegacyGeoPointFieldType) super.fieldType(); - } - @Override protected void doMerge(Mapper mergeWith, boolean updateAllTypes) { super.doMerge(mergeWith, updateAllTypes); @@ -441,26 +190,6 @@ protected void doMerge(Mapper mergeWith, boolean updateAllTypes) { } } - @Override - public Iterator iterator() { - if (this instanceof LatLonPointFieldMapper == false) { - return Iterators.concat(super.iterator(), legacyIterator()); - } - return super.iterator(); - } - - public Iterator legacyIterator() { - List extras = new ArrayList<>(); - if (legacyFieldType().isGeoHashEnabled()) { - extras.add(geoHashMapper); - } - if (legacyFieldType().isLatLonEnabled()) { - extras.add(latMapper); - extras.add(lonMapper); - } - return extras.iterator(); - } - @Override protected String contentType() { return CONTENT_TYPE; @@ -472,16 +201,6 @@ protected void parseCreateField(ParseContext context, List field } protected void parse(ParseContext context, GeoPoint point, String geoHash) throws IOException { - if (legacyFieldType().isGeoHashEnabled()) { - if (geoHash == null) { - geoHash = GeoHashUtils.stringEncode(point.lon(), point.lat()); - } - addGeoHashField(context, geoHash); - } - if (legacyFieldType().isLatLonEnabled()) { - latMapper.parse(context.createExternalValueContext(point.lat())); - lonMapper.parse(context.createExternalValueContext(point.lon())); - } multiFields.parse(this, context.createExternalValueContext(point)); } @@ -552,17 +271,6 @@ public Mapper parse(ParseContext context) throws IOException { return null; } - private void addGeoHashField(ParseContext context, String geoHash) throws IOException { - LegacyGeoPointFieldType ft = (LegacyGeoPointFieldType)fieldType; - int len = Math.min(ft.geoHashPrecision(), geoHash.length()); - int min = ft.isGeoHashPrefixEnabled() ? 1 : len; - - for (int i = len; i >= min; i--) { - // side effect of this call is adding the field - geoHashMapper.parse(context.createExternalValueContext(geoHash.substring(0, i))); - } - } - private void parsePointFromString(ParseContext context, GeoPoint sparse, String point) throws IOException { if (point.indexOf(',') < 0) { parse(context, sparse.resetFromGeoHash(point), point); @@ -574,51 +282,9 @@ private void parsePointFromString(ParseContext context, GeoPoint sparse, String @Override protected void doXContentBody(XContentBuilder builder, boolean includeDefaults, Params params) throws IOException { super.doXContentBody(builder, includeDefaults, params); - if (this instanceof LatLonPointFieldMapper == false) { - legacyDoXContentBody(builder, includeDefaults, params); - } if (includeDefaults || ignoreMalformed.explicit()) { builder.field(Names.IGNORE_MALFORMED, ignoreMalformed.value()); } } - protected void legacyDoXContentBody(XContentBuilder builder, boolean includeDefaults, Params params) throws IOException { - LegacyGeoPointFieldType ft = (LegacyGeoPointFieldType) fieldType; - if (includeDefaults || ft.isLatLonEnabled() != GeoPointFieldMapper.Defaults.ENABLE_LATLON) { - builder.field("lat_lon", ft.isLatLonEnabled()); - } - if (ft.isLatLonEnabled() && (includeDefaults || ft.latFieldType().numericPrecisionStep() != LegacyNumericUtils.PRECISION_STEP_DEFAULT)) { - builder.field("precision_step", ft.latFieldType().numericPrecisionStep()); - } - if (includeDefaults || ft.isGeoHashEnabled() != Defaults.ENABLE_GEOHASH) { - builder.field("geohash", ft.isGeoHashEnabled()); - } - if (includeDefaults || ft.isGeoHashPrefixEnabled() != Defaults.ENABLE_GEOHASH_PREFIX) { - builder.field("geohash_prefix", ft.isGeoHashPrefixEnabled()); - } - if (ft.isGeoHashEnabled() && (includeDefaults || ft.geoHashPrecision() != Defaults.GEO_HASH_PRECISION)) { - builder.field("geohash_precision", ft.geoHashPrecision()); - } - } - - @Override - public FieldMapper updateFieldType(Map fullNameToFieldType) { - BaseGeoPointFieldMapper updated = (BaseGeoPointFieldMapper) super.updateFieldType(fullNameToFieldType); - FieldMapper geoUpdated = geoHashMapper == null ? null : geoHashMapper.updateFieldType(fullNameToFieldType); - FieldMapper latUpdated = latMapper == null ? null : latMapper.updateFieldType(fullNameToFieldType); - FieldMapper lonUpdated = lonMapper == null ? null : lonMapper.updateFieldType(fullNameToFieldType); - if (updated == this - && geoUpdated == geoHashMapper - && latUpdated == latMapper - && lonUpdated == lonMapper) { - return this; - } - if (updated == this) { - updated = (BaseGeoPointFieldMapper) updated.clone(); - } - updated.geoHashMapper = geoUpdated; - updated.latMapper = latUpdated; - updated.lonMapper = lonUpdated; - return updated; - } } diff --git a/core/src/main/java/org/elasticsearch/index/mapper/CompletionFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/CompletionFieldMapper.java index 30840d13a8930..c499c30c607a4 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/CompletionFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/CompletionFieldMapper.java @@ -115,9 +115,6 @@ public static class TypeParser implements Mapper.TypeParser { @Override public Mapper.Builder parse(String name, Map node, ParserContext parserContext) throws MapperParsingException { - if (parserContext.indexVersionCreated().before(Version.V_5_0_0_alpha1)) { - return new CompletionFieldMapper2x.TypeParser().parse(name, node, parserContext); - } CompletionFieldMapper.Builder builder = new CompletionFieldMapper.Builder(name); NamedAnalyzer indexAnalyzer = null; NamedAnalyzer searchAnalyzer = null; diff --git a/core/src/main/java/org/elasticsearch/index/mapper/CompletionFieldMapper2x.java b/core/src/main/java/org/elasticsearch/index/mapper/CompletionFieldMapper2x.java deleted file mode 100644 index 95e3ceb79c495..0000000000000 --- a/core/src/main/java/org/elasticsearch/index/mapper/CompletionFieldMapper2x.java +++ /dev/null @@ -1,603 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.elasticsearch.index.mapper; - -import org.apache.lucene.analysis.Analyzer; -import org.apache.lucene.analysis.TokenStream; -import org.apache.lucene.codecs.PostingsFormat; -import org.apache.lucene.document.Field; -import org.apache.lucene.index.IndexableField; -import org.apache.lucene.search.suggest.analyzing.XAnalyzingSuggester; -import org.apache.lucene.util.BytesRef; -import org.elasticsearch.ElasticsearchParseException; -import org.elasticsearch.Version; -import org.elasticsearch.common.ParseField; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.common.xcontent.XContentFactory; -import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.common.xcontent.XContentParser.NumberType; -import org.elasticsearch.common.xcontent.XContentParser.Token; -import org.elasticsearch.index.analysis.NamedAnalyzer; -import org.elasticsearch.search.suggest.completion2x.AnalyzingCompletionLookupProvider; -import org.elasticsearch.search.suggest.completion2x.Completion090PostingsFormat; -import org.elasticsearch.search.suggest.completion2x.CompletionTokenStream; -import org.elasticsearch.search.suggest.completion2x.context.ContextBuilder; -import org.elasticsearch.search.suggest.completion2x.context.ContextMapping; -import org.elasticsearch.search.suggest.completion2x.context.ContextMapping.ContextConfig; - -import java.io.IOException; -import java.util.ArrayList; -import java.util.HashSet; -import java.util.Iterator; -import java.util.List; -import java.util.Locale; -import java.util.Map; -import java.util.Objects; -import java.util.Set; -import java.util.SortedMap; -import java.util.TreeMap; - -import static org.elasticsearch.index.mapper.TypeParsers.parseMultiField; - -public class CompletionFieldMapper2x extends FieldMapper { - - public static final String CONTENT_TYPE = "completion"; - - public static class Defaults { - public static final CompletionFieldType FIELD_TYPE = new CompletionFieldType(); - - static { - FIELD_TYPE.setOmitNorms(true); - FIELD_TYPE.freeze(); - } - - public static final boolean DEFAULT_PRESERVE_SEPARATORS = true; - public static final boolean DEFAULT_POSITION_INCREMENTS = true; - public static final boolean DEFAULT_HAS_PAYLOADS = false; - public static final int DEFAULT_MAX_INPUT_LENGTH = 50; - } - - public static class Fields { - // Mapping field names - public static final String ANALYZER = "analyzer"; - public static final ParseField SEARCH_ANALYZER = new ParseField("search_analyzer"); - public static final ParseField PRESERVE_SEPARATORS = new ParseField("preserve_separators"); - public static final ParseField PRESERVE_POSITION_INCREMENTS = new ParseField("preserve_position_increments"); - public static final String PAYLOADS = "payloads"; - public static final String TYPE = "type"; - public static final ParseField MAX_INPUT_LENGTH = new ParseField("max_input_length", "max_input_len"); - // Content field names - public static final String CONTENT_FIELD_NAME_INPUT = "input"; - public static final String CONTENT_FIELD_NAME_OUTPUT = "output"; - public static final String CONTENT_FIELD_NAME_PAYLOAD = "payload"; - public static final String CONTENT_FIELD_NAME_WEIGHT = "weight"; - public static final String CONTEXT = "context"; - } - - public static final Set ALLOWED_CONTENT_FIELD_NAMES; - static { - ALLOWED_CONTENT_FIELD_NAMES = new HashSet<>(); - ALLOWED_CONTENT_FIELD_NAMES.add(Fields.CONTENT_FIELD_NAME_INPUT); - ALLOWED_CONTENT_FIELD_NAMES.add(Fields.CONTENT_FIELD_NAME_OUTPUT); - ALLOWED_CONTENT_FIELD_NAMES.add(Fields.CONTENT_FIELD_NAME_PAYLOAD); - ALLOWED_CONTENT_FIELD_NAMES.add(Fields.CONTENT_FIELD_NAME_WEIGHT); - ALLOWED_CONTENT_FIELD_NAMES.add(Fields.CONTEXT); - } - - public static class Builder extends FieldMapper.Builder { - - private boolean preserveSeparators = Defaults.DEFAULT_PRESERVE_SEPARATORS; - private boolean payloads = Defaults.DEFAULT_HAS_PAYLOADS; - private boolean preservePositionIncrements = Defaults.DEFAULT_POSITION_INCREMENTS; - private int maxInputLength = Defaults.DEFAULT_MAX_INPUT_LENGTH; - private SortedMap contextMapping = ContextMapping.EMPTY_MAPPING; - - public Builder(String name) { - super(name, Defaults.FIELD_TYPE, Defaults.FIELD_TYPE); - builder = this; - } - - public Builder payloads(boolean payloads) { - this.payloads = payloads; - return this; - } - - public Builder preserveSeparators(boolean preserveSeparators) { - this.preserveSeparators = preserveSeparators; - return this; - } - - public Builder preservePositionIncrements(boolean preservePositionIncrements) { - this.preservePositionIncrements = preservePositionIncrements; - return this; - } - - public Builder maxInputLength(int maxInputLength) { - if (maxInputLength <= 0) { - throw new IllegalArgumentException( - Fields.MAX_INPUT_LENGTH.getPreferredName() + " must be > 0 but was [" + maxInputLength + "]"); - } - this.maxInputLength = maxInputLength; - return this; - } - - public Builder contextMapping(SortedMap contextMapping) { - this.contextMapping = contextMapping; - return this; - } - - @Override - public CompletionFieldMapper2x build(Mapper.BuilderContext context) { - setupFieldType(context); - CompletionFieldType completionFieldType = (CompletionFieldType) fieldType; - completionFieldType.setProvider( - new AnalyzingCompletionLookupProvider(preserveSeparators, preservePositionIncrements, payloads)); - completionFieldType.setContextMapping(contextMapping); - return new CompletionFieldMapper2x(name, fieldType, maxInputLength, context.indexSettings(), - multiFieldsBuilder.build(this, context), copyTo); - } - - } - - public static class TypeParser implements Mapper.TypeParser { - - @Override - public Mapper.Builder parse(String name, Map node, ParserContext parserContext) - throws MapperParsingException { - CompletionFieldMapper2x.Builder builder = new Builder(name); - NamedAnalyzer indexAnalyzer = null; - NamedAnalyzer searchAnalyzer = null; - for (Iterator> iterator = node.entrySet().iterator(); iterator.hasNext(); ) { - Map.Entry entry = iterator.next(); - String fieldName = entry.getKey(); - Object fieldNode = entry.getValue(); - if (fieldName.equals("type")) { - continue; - } - if (Fields.ANALYZER.equals(fieldName) || // index_analyzer is for backcompat, remove for v3.0 - fieldName.equals("index_analyzer") && parserContext.indexVersionCreated().before(Version.V_2_0_0_beta1)) { - - indexAnalyzer = getNamedAnalyzer(parserContext, fieldNode.toString()); - iterator.remove(); - } else if (parserContext.parseFieldMatcher().match(fieldName, Fields.SEARCH_ANALYZER)) { - searchAnalyzer = getNamedAnalyzer(parserContext, fieldNode.toString()); - iterator.remove(); - } else if (fieldName.equals(Fields.PAYLOADS)) { - builder.payloads(Boolean.parseBoolean(fieldNode.toString())); - iterator.remove(); - } else if (parserContext.parseFieldMatcher().match(fieldName, Fields.PRESERVE_SEPARATORS)) { - builder.preserveSeparators(Boolean.parseBoolean(fieldNode.toString())); - iterator.remove(); - } else if (parserContext.parseFieldMatcher().match(fieldName, Fields.PRESERVE_POSITION_INCREMENTS)) { - builder.preservePositionIncrements(Boolean.parseBoolean(fieldNode.toString())); - iterator.remove(); - } else if (parserContext.parseFieldMatcher().match(fieldName, Fields.MAX_INPUT_LENGTH)) { - builder.maxInputLength(Integer.parseInt(fieldNode.toString())); - iterator.remove(); - } else if (parseMultiField(builder, name, parserContext, fieldName, fieldNode)) { - iterator.remove(); - } else if (fieldName.equals(Fields.CONTEXT)) { - builder.contextMapping(ContextBuilder.loadMappings(fieldNode, parserContext.indexVersionCreated())); - iterator.remove(); - } - } - - if (indexAnalyzer == null) { - if (searchAnalyzer != null) { - throw new MapperParsingException( - "analyzer on completion field [" + name + "] must be set when search_analyzer is set"); - } - indexAnalyzer = searchAnalyzer = parserContext.getIndexAnalyzers().get("simple"); - } else if (searchAnalyzer == null) { - searchAnalyzer = indexAnalyzer; - } - builder.indexAnalyzer(indexAnalyzer); - builder.searchAnalyzer(searchAnalyzer); - - return builder; - } - - private NamedAnalyzer getNamedAnalyzer(ParserContext parserContext, String name) { - NamedAnalyzer analyzer = parserContext.getIndexAnalyzers().get(name); - if (analyzer == null) { - throw new IllegalArgumentException("Can't find default or mapped analyzer with name [" + name + "]"); - } - return analyzer; - } - } - - public static final class CompletionFieldType extends TermBasedFieldType { - private PostingsFormat postingsFormat; - private AnalyzingCompletionLookupProvider analyzingSuggestLookupProvider; - private SortedMap contextMapping = ContextMapping.EMPTY_MAPPING; - - public CompletionFieldType() { - } - - protected CompletionFieldType(CompletionFieldType ref) { - super(ref); - this.postingsFormat = ref.postingsFormat; - this.analyzingSuggestLookupProvider = ref.analyzingSuggestLookupProvider; - this.contextMapping = ref.contextMapping; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (!(o instanceof CompletionFieldType)) return false; - if (!super.equals(o)) return false; - CompletionFieldType fieldType = (CompletionFieldType) o; - return analyzingSuggestLookupProvider.getPreserveSep() == fieldType.analyzingSuggestLookupProvider.getPreserveSep() - && analyzingSuggestLookupProvider.getPreservePositionsIncrements() == - fieldType.analyzingSuggestLookupProvider.getPreservePositionsIncrements() && - analyzingSuggestLookupProvider.hasPayloads() == fieldType.analyzingSuggestLookupProvider.hasPayloads() && - Objects.equals(getContextMapping(), fieldType.getContextMapping()); - } - - @Override - public int hashCode() { - return Objects.hash(super.hashCode(), - analyzingSuggestLookupProvider.getPreserveSep(), - analyzingSuggestLookupProvider.getPreservePositionsIncrements(), - analyzingSuggestLookupProvider.hasPayloads(), - getContextMapping()); - } - - @Override - public CompletionFieldType clone() { - return new CompletionFieldType(this); - } - - @Override - public String typeName() { - return CONTENT_TYPE; - } - - @Override - public void checkCompatibility(MappedFieldType fieldType, List conflicts, boolean strict) { - super.checkCompatibility(fieldType, conflicts, strict); - CompletionFieldType other = (CompletionFieldType) fieldType; - if (analyzingSuggestLookupProvider.hasPayloads() != other.analyzingSuggestLookupProvider.hasPayloads()) { - conflicts.add("mapper [" + name() + "] has different [payload] values"); - } - if (analyzingSuggestLookupProvider.getPreservePositionsIncrements() != - other.analyzingSuggestLookupProvider.getPreservePositionsIncrements()) { - conflicts.add("mapper [" + name() + "] has different [preserve_position_increments] values"); - } - if (analyzingSuggestLookupProvider.getPreserveSep() != other.analyzingSuggestLookupProvider.getPreserveSep()) { - conflicts.add("mapper [" + name() + "] has different [preserve_separators] values"); - } - if (!ContextMapping.mappingsAreEqual(getContextMapping(), other.getContextMapping())) { - conflicts.add("mapper [" + name() + "] has different [context_mapping] values"); - } - } - - public void setProvider(AnalyzingCompletionLookupProvider provider) { - checkIfFrozen(); - this.analyzingSuggestLookupProvider = provider; - } - - public synchronized PostingsFormat postingsFormat(PostingsFormat in) { - if (in instanceof Completion090PostingsFormat) { - throw new IllegalStateException("Double wrapping of " + Completion090PostingsFormat.class); - } - if (postingsFormat == null) { - postingsFormat = new Completion090PostingsFormat(in, analyzingSuggestLookupProvider); - } - return postingsFormat; - } - - public void setContextMapping(SortedMap contextMapping) { - checkIfFrozen(); - this.contextMapping = contextMapping; - } - - /** - * Get the context mapping associated with this completion field - */ - public SortedMap getContextMapping() { - return contextMapping; - } - - /** - * @return true if a context mapping has been defined - */ - public boolean requiresContext() { - return contextMapping.isEmpty() == false; - } - } - - private static final BytesRef EMPTY = new BytesRef(); - - private int maxInputLength; - - public CompletionFieldMapper2x(String simpleName, MappedFieldType fieldType, int maxInputLength, - Settings indexSettings, MultiFields multiFields, CopyTo copyTo) { - super(simpleName, fieldType, Defaults.FIELD_TYPE, indexSettings, multiFields, copyTo); - this.maxInputLength = maxInputLength; - } - - @Override - public CompletionFieldType fieldType() { - return (CompletionFieldType) super.fieldType(); - } - - /** - * Parses and indexes inputs - * Parsing: - * Acceptable format: - * "STRING" - interpreted as field value (input) - * "ARRAY" - each element can be one of "OBJECT" (see below) - * "OBJECT" - { "input": STRING|ARRAY, "weight": STRING|INT, "contexts": ARRAY|OBJECT } - */ - @Override - public Mapper parse(ParseContext context) throws IOException { - XContentParser parser = context.parser(); - XContentParser.Token token = parser.currentToken(); - if (token == XContentParser.Token.VALUE_NULL) { - throw new MapperParsingException("completion field [" + fieldType().name() + "] does not support null values"); - } - - String surfaceForm = null; - BytesRef payload = null; - long weight = -1; - List inputs = new ArrayList<>(4); - - SortedMap contextConfig = null; - - if (token == XContentParser.Token.VALUE_STRING) { - inputs.add(parser.text()); - multiFields.parse(this, context); - } else { - String currentFieldName = null; - while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { - if (token == XContentParser.Token.FIELD_NAME) { - currentFieldName = parser.currentName(); - if (!ALLOWED_CONTENT_FIELD_NAMES.contains(currentFieldName)) { - throw new IllegalArgumentException( - "Unknown field name[" + currentFieldName + "], must be one of " + ALLOWED_CONTENT_FIELD_NAMES); - } - } else if (Fields.CONTEXT.equals(currentFieldName)) { - SortedMap configs = new TreeMap<>(); - if (token == Token.START_OBJECT) { - while ((token = parser.nextToken()) != Token.END_OBJECT) { - String name = parser.currentName(); - ContextMapping mapping = fieldType().getContextMapping().get(name); - if (mapping == null) { - throw new ElasticsearchParseException("context [{}] is not defined", name); - } else { - token = parser.nextToken(); - configs.put(name, mapping.parseContext(context, parser)); - } - } - contextConfig = new TreeMap<>(); - for (ContextMapping mapping : fieldType().getContextMapping().values()) { - ContextConfig config = configs.get(mapping.name()); - contextConfig.put(mapping.name(), config == null ? mapping.defaultConfig() : config); - } - } else { - throw new ElasticsearchParseException("context must be an object"); - } - } else if (Fields.CONTENT_FIELD_NAME_PAYLOAD.equals(currentFieldName)) { - if (!isStoringPayloads()) { - throw new MapperException("Payloads disabled in mapping"); - } - if (token == XContentParser.Token.START_OBJECT) { - XContentBuilder payloadBuilder = - XContentFactory.contentBuilder(parser.contentType()).copyCurrentStructure(parser); - payload = payloadBuilder.bytes().toBytesRef(); - payloadBuilder.close(); - } else if (token.isValue()) { - payload = parser.utf8BytesOrNull(); - } else { - throw new MapperException("payload doesn't support type " + token); - } - } else if (token == XContentParser.Token.VALUE_STRING) { - if (Fields.CONTENT_FIELD_NAME_OUTPUT.equals(currentFieldName)) { - surfaceForm = parser.text(); - } - if (Fields.CONTENT_FIELD_NAME_INPUT.equals(currentFieldName)) { - inputs.add(parser.text()); - } - if (Fields.CONTENT_FIELD_NAME_WEIGHT.equals(currentFieldName)) { - Number weightValue; - try { - weightValue = Long.parseLong(parser.text()); - } catch (NumberFormatException e) { - throw new IllegalArgumentException( - "Weight must be a string representing a numeric value, but was [" + parser.text() + "]"); - } - weight = weightValue.longValue(); // always parse a long to make sure we don't get overflow - checkWeight(weight); - } - } else if (token == XContentParser.Token.VALUE_NUMBER) { - if (Fields.CONTENT_FIELD_NAME_WEIGHT.equals(currentFieldName)) { - NumberType numberType = parser.numberType(); - if (NumberType.LONG != numberType && NumberType.INT != numberType) { - throw new IllegalArgumentException( - "Weight must be an integer, but was [" + parser.numberValue() + "]"); - } - weight = parser.longValue(); // always parse a long to make sure we don't get overflow - checkWeight(weight); - } - } else if (token == XContentParser.Token.START_ARRAY) { - if (Fields.CONTENT_FIELD_NAME_INPUT.equals(currentFieldName)) { - while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) { - inputs.add(parser.text()); - } - } - } - } - } - - if (contextConfig == null) { - contextConfig = new TreeMap<>(); - for (ContextMapping mapping : fieldType().getContextMapping().values()) { - contextConfig.put(mapping.name(), mapping.defaultConfig()); - } - } - - final ContextMapping.Context ctx = new ContextMapping.Context(contextConfig, context.doc()); - - payload = payload == null ? EMPTY : payload; - if (surfaceForm == null) { // no surface form use the input - for (String input : inputs) { - if (input.length() == 0) { - continue; - } - BytesRef suggestPayload = fieldType().analyzingSuggestLookupProvider.buildPayload(new BytesRef( - input), weight, payload); - context.doc().add(getCompletionField(ctx, input, suggestPayload)); - } - } else { - BytesRef suggestPayload = fieldType().analyzingSuggestLookupProvider.buildPayload(new BytesRef( - surfaceForm), weight, payload); - for (String input : inputs) { - if (input.length() == 0) { - continue; - } - context.doc().add(getCompletionField(ctx, input, suggestPayload)); - } - } - return null; - } - - private void checkWeight(long weight) { - if (weight < 0 || weight > Integer.MAX_VALUE) { - throw new IllegalArgumentException("Weight must be in the interval [0..2147483647], but was [" + weight + "]"); - } - } - - public Field getCompletionField(ContextMapping.Context ctx, String input, BytesRef payload) { - final String originalInput = input; - if (input.length() > maxInputLength) { - final int len = correctSubStringLen(input, Math.min(maxInputLength, input.length())); - input = input.substring(0, len); - } - for (int i = 0; i < input.length(); i++) { - if (isReservedChar(input.charAt(i))) { - throw new IllegalArgumentException("Illegal input [" + originalInput + "] UTF-16 codepoint [0x" - + Integer.toHexString(input.charAt(i)).toUpperCase(Locale.ROOT) - + "] at position " + i + " is a reserved character"); - } - } - return new SuggestField( - fieldType().name(), ctx, input, fieldType(), payload, fieldType().analyzingSuggestLookupProvider); - } - - public static int correctSubStringLen(String input, int len) { - if (Character.isHighSurrogate(input.charAt(len - 1))) { - assert input.length() >= len + 1 && Character.isLowSurrogate(input.charAt(len)); - return len + 1; - } - return len; - } - - public BytesRef buildPayload(BytesRef surfaceForm, long weight, BytesRef payload) throws IOException { - return fieldType().analyzingSuggestLookupProvider.buildPayload(surfaceForm, weight, payload); - } - - private static final class SuggestField extends Field { - private final BytesRef payload; - private final CompletionTokenStream.ToFiniteStrings toFiniteStrings; - private final ContextMapping.Context ctx; - - public SuggestField(String name, ContextMapping.Context ctx, - String value, MappedFieldType type, BytesRef payload, - CompletionTokenStream.ToFiniteStrings toFiniteStrings) { - super(name, value, type); - this.payload = payload; - this.toFiniteStrings = toFiniteStrings; - this.ctx = ctx; - } - - @Override - public TokenStream tokenStream(Analyzer analyzer, TokenStream previous) { - TokenStream ts = ctx.wrapTokenStream(super.tokenStream(analyzer, previous)); - return new CompletionTokenStream(ts, payload, toFiniteStrings); - } - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(simpleName()) - .field(Fields.TYPE, CONTENT_TYPE); - - builder.field(Fields.ANALYZER, fieldType().indexAnalyzer().name()); - if (fieldType().indexAnalyzer().name().equals(fieldType().searchAnalyzer().name()) == false) { - builder.field(Fields.SEARCH_ANALYZER.getPreferredName(), fieldType().searchAnalyzer().name()); - } - builder.field(Fields.PAYLOADS, fieldType().analyzingSuggestLookupProvider.hasPayloads()); - builder.field(Fields.PRESERVE_SEPARATORS.getPreferredName(), - fieldType().analyzingSuggestLookupProvider.getPreserveSep()); - builder.field(Fields.PRESERVE_POSITION_INCREMENTS.getPreferredName(), - fieldType().analyzingSuggestLookupProvider.getPreservePositionsIncrements()); - builder.field(Fields.MAX_INPUT_LENGTH.getPreferredName(), this.maxInputLength); - multiFields.toXContent(builder, params); - - if (fieldType().requiresContext()) { - builder.startObject(Fields.CONTEXT); - for (ContextMapping mapping : fieldType().getContextMapping().values()) { - builder.value(mapping); - } - builder.endObject(); - } - - return builder.endObject(); - } - - @Override - protected void parseCreateField(ParseContext context, List fields) throws IOException { - } - - @Override - protected String contentType() { - return CONTENT_TYPE; - } - - public boolean isStoringPayloads() { - return fieldType().analyzingSuggestLookupProvider.hasPayloads(); - } - - @Override - protected void doMerge(Mapper mergeWith, boolean updateAllTypes) { - super.doMerge(mergeWith, updateAllTypes); - CompletionFieldMapper2x fieldMergeWith = (CompletionFieldMapper2x) mergeWith; - this.maxInputLength = fieldMergeWith.maxInputLength; - } - - // this should be package private but our tests don't allow it. - public static boolean isReservedChar(char character) { - /* we use 0x001F as a SEP_LABEL in the suggester but we can use the UTF-16 representation since they - * are equivalent. We also don't need to convert the input character to UTF-8 here to check for - * the 0x00 end label since all multi-byte UTF-8 chars start with 0x10 binary so if the UTF-16 CP is == 0x00 - * it's the single byte UTF-8 CP */ - assert XAnalyzingSuggester.PAYLOAD_SEP == XAnalyzingSuggester.SEP_LABEL; // ensure they are the same! - switch (character) { - case XAnalyzingSuggester.END_BYTE: - case XAnalyzingSuggester.SEP_LABEL: - case XAnalyzingSuggester.HOLE_CHARACTER: - case ContextMapping.SEPARATOR: - return true; - default: - return false; - } - } -} diff --git a/core/src/main/java/org/elasticsearch/index/mapper/DateFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/DateFieldMapper.java index 401444e39b894..3681e4a55bc18 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/DateFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/DateFieldMapper.java @@ -29,7 +29,6 @@ import org.apache.lucene.search.BoostQuery; import org.apache.lucene.search.Query; import org.apache.lucene.util.BytesRef; -import org.elasticsearch.Version; import org.elasticsearch.action.fieldstats.FieldStats; import org.elasticsearch.common.Explicit; import org.elasticsearch.common.Nullable; @@ -42,7 +41,6 @@ import org.elasticsearch.index.fielddata.IndexFieldData; import org.elasticsearch.index.fielddata.IndexNumericFieldData.NumericType; import org.elasticsearch.index.fielddata.plain.DocValuesIndexFieldData; -import org.elasticsearch.index.mapper.LegacyNumberFieldMapper.Defaults; import org.elasticsearch.index.query.QueryRewriteContext; import org.elasticsearch.index.query.QueryShardContext; import org.elasticsearch.search.DocValueFormat; @@ -63,6 +61,10 @@ public class DateFieldMapper extends FieldMapper { public static final FormatDateTimeFormatter DEFAULT_DATE_TIME_FORMATTER = Joda.forPattern( "strict_date_optional_time||epoch_millis", Locale.ROOT); + public static class Defaults { + public static final Explicit IGNORE_MALFORMED = new Explicit<>(false, false); + } + public static class Builder extends FieldMapper.Builder { private Boolean ignoreMalformed; @@ -128,9 +130,6 @@ public TypeParser() { @Override public Mapper.Builder parse(String name, Map node, ParserContext parserContext) throws MapperParsingException { - if (parserContext.indexVersionCreated().before(Version.V_5_0_0_alpha2)) { - return new LegacyDateFieldMapper.TypeParser().parse(name, node, parserContext); - } Builder builder = new Builder(name); TypeParsers.parseField(builder, name, node, parserContext); for (Iterator> iterator = node.entrySet().iterator(); iterator.hasNext();) { diff --git a/core/src/main/java/org/elasticsearch/index/mapper/DocumentMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/DocumentMapper.java index b23c189e5bd2f..c7ee704de4fe2 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/DocumentMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/DocumentMapper.java @@ -235,14 +235,6 @@ public ParentFieldMapper parentFieldMapper() { return metadataMapper(ParentFieldMapper.class); } - public TimestampFieldMapper timestampFieldMapper() { - return metadataMapper(TimestampFieldMapper.class); - } - - public TTLFieldMapper TTLFieldMapper() { - return metadataMapper(TTLFieldMapper.class); - } - public IndexFieldMapper IndexFieldMapper() { return metadataMapper(IndexFieldMapper.class); } diff --git a/core/src/main/java/org/elasticsearch/index/mapper/DocumentParser.java b/core/src/main/java/org/elasticsearch/index/mapper/DocumentParser.java index 75c06a588a869..324b34fef9cdd 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/DocumentParser.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/DocumentParser.java @@ -30,7 +30,6 @@ import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.mapper.DynamicTemplate.XContentFieldType; import org.elasticsearch.index.mapper.KeywordFieldMapper.KeywordFieldType; -import org.elasticsearch.index.mapper.StringFieldMapper.StringFieldType; import org.elasticsearch.index.mapper.TextFieldMapper.TextFieldType; import java.io.IOException; @@ -153,8 +152,6 @@ private static ParsedDocument parsedDocument(SourceToParse source, ParseContext. context.sourceToParse().id(), context.sourceToParse().type(), source.routing(), - source.timestamp(), - source.ttl(), context.docs(), context.sourceToParse().source(), update @@ -636,35 +633,19 @@ private static Mapper.Builder createBuilderFromFieldType(final ParseContext } private static Mapper.Builder newLongBuilder(String name, Version indexCreated) { - if (indexCreated.onOrAfter(Version.V_5_0_0_alpha2)) { - return new NumberFieldMapper.Builder(name, NumberFieldMapper.NumberType.LONG); - } else { - return new LegacyLongFieldMapper.Builder(name); - } + return new NumberFieldMapper.Builder(name, NumberFieldMapper.NumberType.LONG); } private static Mapper.Builder newFloatBuilder(String name, Version indexCreated) { - if (indexCreated.onOrAfter(Version.V_5_0_0_alpha2)) { - return new NumberFieldMapper.Builder(name, NumberFieldMapper.NumberType.FLOAT); - } else { - return new LegacyFloatFieldMapper.Builder(name); - } + return new NumberFieldMapper.Builder(name, NumberFieldMapper.NumberType.FLOAT); } private static Mapper.Builder newDateBuilder(String name, FormatDateTimeFormatter dateTimeFormatter, Version indexCreated) { - if (indexCreated.onOrAfter(Version.V_5_0_0_alpha2)) { - DateFieldMapper.Builder builder = new DateFieldMapper.Builder(name); - if (dateTimeFormatter != null) { - builder.dateTimeFormatter(dateTimeFormatter); - } - return builder; - } else { - LegacyDateFieldMapper.Builder builder = new LegacyDateFieldMapper.Builder(name); - if (dateTimeFormatter != null) { - builder.dateTimeFormatter(dateTimeFormatter); - } - return builder; + DateFieldMapper.Builder builder = new DateFieldMapper.Builder(name); + if (dateTimeFormatter != null) { + builder.dateTimeFormatter(dateTimeFormatter); } + return builder; } private static Mapper.Builder createBuilderFromDynamicValue(final ParseContext context, XContentParser.Token token, String currentFieldName) throws IOException { diff --git a/core/src/main/java/org/elasticsearch/index/mapper/GeoPointFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/GeoPointFieldMapper.java deleted file mode 100644 index 655bf4aad0293..0000000000000 --- a/core/src/main/java/org/elasticsearch/index/mapper/GeoPointFieldMapper.java +++ /dev/null @@ -1,135 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.index.mapper; - -import org.apache.lucene.document.FieldType; -import org.apache.lucene.index.DocValuesType; -import org.apache.lucene.index.IndexOptions; -import org.apache.lucene.spatial.geopoint.document.GeoPointField; -import org.elasticsearch.Version; -import org.elasticsearch.common.Explicit; -import org.elasticsearch.common.geo.GeoPoint; -import org.elasticsearch.common.geo.GeoUtils; -import org.elasticsearch.common.settings.Settings; - -import java.io.IOException; -import java.util.Map; - -/** - * Parsing: We handle: - *

- * - "field" : "geo_hash" - * - "field" : "lat,lon" - * - "field" : { - * "lat" : 1.1, - * "lon" : 2.1 - * } - */ -public class GeoPointFieldMapper extends BaseGeoPointFieldMapper { - - public static final String CONTENT_TYPE = "geo_point"; - - public static class Defaults extends BaseGeoPointFieldMapper.Defaults { - - public static final GeoPointFieldType FIELD_TYPE = new LegacyGeoPointFieldType(); - - static { - FIELD_TYPE.setIndexOptions(IndexOptions.DOCS); - FIELD_TYPE.setTokenized(false); - FIELD_TYPE.setOmitNorms(true); - FIELD_TYPE.setDocValuesType(DocValuesType.SORTED_NUMERIC); - FIELD_TYPE.setHasDocValues(true); - FIELD_TYPE.freeze(); - } - } - - /** - * Concrete builder for indexed GeoPointField type - */ - public static class Builder extends BaseGeoPointFieldMapper.Builder { - - public Builder(String name) { - super(name, Defaults.FIELD_TYPE); - this.builder = this; - } - - @Override - public GeoPointFieldMapper build(BuilderContext context, String simpleName, MappedFieldType fieldType, - MappedFieldType defaultFieldType, Settings indexSettings, FieldMapper latMapper, - FieldMapper lonMapper, FieldMapper geoHashMapper, MultiFields multiFields, Explicit ignoreMalformed, - CopyTo copyTo) { - fieldType.setTokenized(false); - if (context.indexCreatedVersion().before(Version.V_2_3_0)) { - fieldType.setNumericPrecisionStep(GeoPointField.PRECISION_STEP); - fieldType.setNumericType(FieldType.LegacyNumericType.LONG); - } - setupFieldType(context); - return new GeoPointFieldMapper(simpleName, fieldType, defaultFieldType, indexSettings, latMapper, lonMapper, - geoHashMapper, multiFields, ignoreMalformed, copyTo); - } - - @Override - public GeoPointFieldMapper build(BuilderContext context) { - if (context.indexCreatedVersion().before(Version.V_2_3_0)) { - fieldType.setNumericPrecisionStep(GeoPointField.PRECISION_STEP); - fieldType.setNumericType(FieldType.LegacyNumericType.LONG); - } - return super.build(context); - } - } - - public static class TypeParser extends BaseGeoPointFieldMapper.TypeParser { - @Override - public Mapper.Builder parse(String name, Map node, ParserContext parserContext) throws MapperParsingException { - return super.parse(name, node, parserContext); - } - } - - public GeoPointFieldMapper(String simpleName, MappedFieldType fieldType, MappedFieldType defaultFieldType, Settings indexSettings, - FieldMapper latMapper, FieldMapper lonMapper, - FieldMapper geoHashMapper, MultiFields multiFields, Explicit ignoreMalformed, CopyTo copyTo) { - super(simpleName, fieldType, defaultFieldType, indexSettings, latMapper, lonMapper, geoHashMapper, multiFields, - ignoreMalformed, copyTo); - } - - @Override - protected void parse(ParseContext context, GeoPoint point, String geoHash) throws IOException { - if (ignoreMalformed.value() == false) { - if (point.lat() > 90.0 || point.lat() < -90.0) { - throw new IllegalArgumentException("illegal latitude value [" + point.lat() + "] for " + name()); - } - if (point.lon() > 180.0 || point.lon() < -180) { - throw new IllegalArgumentException("illegal longitude value [" + point.lon() + "] for " + name()); - } - } else { - // LUCENE WATCH: This will be folded back into Lucene's GeoPointField - GeoUtils.normalizePoint(point); - } - if (fieldType().indexOptions() != IndexOptions.NONE || fieldType().stored()) { - context.doc().add(new GeoPointField(fieldType().name(), point.lat(), point.lon(), fieldType())); - } - super.parse(context, point, geoHash); - } - - @Override - public LegacyGeoPointFieldType fieldType() { - return (LegacyGeoPointFieldType) super.fieldType(); - } -} diff --git a/core/src/main/java/org/elasticsearch/index/mapper/IpFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/IpFieldMapper.java index 41e92a5722a78..9ccca4db4794b 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/IpFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/IpFieldMapper.java @@ -39,7 +39,6 @@ import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.index.fielddata.IndexFieldData; import org.elasticsearch.index.fielddata.plain.DocValuesIndexFieldData; -import org.elasticsearch.index.mapper.LegacyNumberFieldMapper.Defaults; import org.elasticsearch.index.query.QueryShardContext; import org.elasticsearch.search.DocValueFormat; import org.joda.time.DateTimeZone; @@ -55,6 +54,10 @@ public class IpFieldMapper extends FieldMapper { public static final String CONTENT_TYPE = "ip"; + public static class Defaults { + public static final Explicit IGNORE_MALFORMED = new Explicit<>(false, false); + } + public static class Builder extends FieldMapper.Builder { private Boolean ignoreMalformed; @@ -94,9 +97,6 @@ public TypeParser() { @Override public Mapper.Builder parse(String name, Map node, ParserContext parserContext) throws MapperParsingException { - if (parserContext.indexVersionCreated().before(Version.V_5_0_0_alpha2)) { - return new LegacyIpFieldMapper.TypeParser().parse(name, node, parserContext); - } Builder builder = new Builder(name); TypeParsers.parseField(builder, name, node, parserContext); for (Iterator> iterator = node.entrySet().iterator(); iterator.hasNext();) { diff --git a/core/src/main/java/org/elasticsearch/index/mapper/KeywordFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/KeywordFieldMapper.java index ec73fa9571db6..688072150279e 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/KeywordFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/KeywordFieldMapper.java @@ -25,7 +25,6 @@ import org.apache.lucene.index.IndexableField; import org.apache.lucene.search.Query; import org.apache.lucene.util.BytesRef; -import org.elasticsearch.Version; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; @@ -34,14 +33,10 @@ import org.elasticsearch.index.fielddata.plain.DocValuesIndexFieldData; import java.io.IOException; -import java.util.Arrays; -import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Map; -import java.util.Set; -import static java.util.Collections.unmodifiableList; import static org.elasticsearch.index.mapper.TypeParsers.parseField; /** @@ -51,12 +46,6 @@ public final class KeywordFieldMapper extends FieldMapper { public static final String CONTENT_TYPE = "keyword"; - private static final List SUPPORTED_PARAMETERS_FOR_AUTO_DOWNGRADE_TO_STRING = unmodifiableList(Arrays.asList( - "type", - // common keyword parameters, for which the upgrade is straightforward - "index", "store", "doc_values", "omit_norms", "norms", "boost", "fields", "copy_to", - "include_in_all", "ignore_above", "index_options", "similarity")); - public static class Defaults { public static final MappedFieldType FIELD_TYPE = new KeywordFieldType(); @@ -115,29 +104,6 @@ public KeywordFieldMapper build(BuilderContext context) { public static class TypeParser implements Mapper.TypeParser { @Override public Mapper.Builder parse(String name, Map node, ParserContext parserContext) throws MapperParsingException { - if (parserContext.indexVersionCreated().before(Version.V_5_0_0_alpha1)) { - // Downgrade "keyword" to "string" in indexes created in 2.x so you can use modern syntax against old indexes - Set unsupportedParameters = new HashSet<>(node.keySet()); - unsupportedParameters.removeAll(SUPPORTED_PARAMETERS_FOR_AUTO_DOWNGRADE_TO_STRING); - if (false == SUPPORTED_PARAMETERS_FOR_AUTO_DOWNGRADE_TO_STRING.containsAll(node.keySet())) { - throw new IllegalArgumentException("Automatic downgrade from [keyword] to [string] failed because parameters " - + unsupportedParameters + " are not supported for automatic downgrades."); - } - { // Downgrade "index" - Object index = node.get("index"); - if (index == null || Boolean.TRUE.equals(index)) { - index = "not_analyzed"; - } else if (Boolean.FALSE.equals(index)) { - index = "no"; - } else { - throw new IllegalArgumentException( - "Can't parse [index] value [" + index + "] for field [" + name + "], expected [true] or [false]"); - } - node.put("index", index); - } - - return new StringFieldMapper.TypeParser().parse(name, node, parserContext); - } KeywordFieldMapper.Builder builder = new KeywordFieldMapper.Builder(name); parseField(builder, name, node, parserContext); for (Iterator> iterator = node.entrySet().iterator(); iterator.hasNext();) { diff --git a/core/src/main/java/org/elasticsearch/index/mapper/LatLonPointFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/LatLonPointFieldMapper.java index 647dd315a203c..f5579751e5b57 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/LatLonPointFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/LatLonPointFieldMapper.java @@ -23,7 +23,6 @@ import org.apache.lucene.document.StoredField; import org.apache.lucene.index.IndexOptions; import org.apache.lucene.search.Query; -import org.elasticsearch.Version; import org.elasticsearch.common.Explicit; import org.elasticsearch.common.geo.GeoPoint; import org.elasticsearch.common.geo.GeoUtils; @@ -43,7 +42,6 @@ */ public class LatLonPointFieldMapper extends BaseGeoPointFieldMapper { public static final String CONTENT_TYPE = "geo_point"; - public static final Version LAT_LON_FIELD_VERSION = Version.V_5_0_0_beta1; public static class Defaults extends BaseGeoPointFieldMapper.Defaults { public static final LatLonPointFieldType FIELD_TYPE = new LatLonPointFieldType(); @@ -59,6 +57,7 @@ public static class Defaults extends BaseGeoPointFieldMapper.Defaults { public static class Builder extends BaseGeoPointFieldMapper.Builder { public Builder(String name) { super(name, Defaults.FIELD_TYPE); + builder = this; } @Override diff --git a/core/src/main/java/org/elasticsearch/index/mapper/LegacyByteFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/LegacyByteFieldMapper.java deleted file mode 100644 index ff544d527b783..0000000000000 --- a/core/src/main/java/org/elasticsearch/index/mapper/LegacyByteFieldMapper.java +++ /dev/null @@ -1,328 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.elasticsearch.index.mapper; - -import org.apache.lucene.analysis.Analyzer; -import org.apache.lucene.analysis.TokenStream; -import org.apache.lucene.document.Field; -import org.apache.lucene.index.IndexOptions; -import org.apache.lucene.index.IndexReader; -import org.apache.lucene.index.IndexableField; -import org.apache.lucene.index.Terms; -import org.apache.lucene.search.LegacyNumericRangeQuery; -import org.apache.lucene.search.Query; -import org.apache.lucene.util.BytesRef; -import org.apache.lucene.util.BytesRefBuilder; -import org.apache.lucene.util.LegacyNumericUtils; -import org.elasticsearch.Version; -import org.elasticsearch.action.fieldstats.FieldStats; -import org.elasticsearch.common.Explicit; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.index.fielddata.IndexFieldData; -import org.elasticsearch.index.fielddata.IndexNumericFieldData.NumericType; -import org.elasticsearch.index.fielddata.plain.DocValuesIndexFieldData; -import org.elasticsearch.index.query.QueryShardContext; - -import java.io.IOException; -import java.util.Iterator; -import java.util.List; -import java.util.Map; - -import static org.elasticsearch.common.xcontent.support.XContentMapValues.nodeByteValue; -import static org.elasticsearch.index.mapper.TypeParsers.parseNumberField; - -public class LegacyByteFieldMapper extends LegacyNumberFieldMapper { - - public static final String CONTENT_TYPE = "byte"; - - public static class Defaults extends LegacyNumberFieldMapper.Defaults { - public static final MappedFieldType FIELD_TYPE = new ByteFieldType(); - - static { - FIELD_TYPE.freeze(); - } - } - - public static class Builder extends LegacyNumberFieldMapper.Builder { - - public Builder(String name) { - super(name, Defaults.FIELD_TYPE, Defaults.PRECISION_STEP_8_BIT); - builder = this; - } - - @Override - public LegacyByteFieldMapper build(BuilderContext context) { - if (context.indexCreatedVersion().onOrAfter(Version.V_5_0_0_alpha2)) { - throw new IllegalStateException("Cannot use legacy numeric types after 5.0"); - } - setupFieldType(context); - return new LegacyByteFieldMapper(name, fieldType, defaultFieldType, ignoreMalformed(context), - coerce(context), includeInAll, context.indexSettings(), multiFieldsBuilder.build(this, context), copyTo); - } - - @Override - protected int maxPrecisionStep() { - return 32; - } - } - - public static class TypeParser implements Mapper.TypeParser { - @Override - public Mapper.Builder parse(String name, Map node, ParserContext parserContext) throws MapperParsingException { - LegacyByteFieldMapper.Builder builder = new LegacyByteFieldMapper.Builder(name); - parseNumberField(builder, name, node, parserContext); - for (Iterator> iterator = node.entrySet().iterator(); iterator.hasNext();) { - Map.Entry entry = iterator.next(); - String propName = entry.getKey(); - Object propNode = entry.getValue(); - if (propName.equals("null_value")) { - if (propNode == null) { - throw new MapperParsingException("Property [null_value] cannot be null."); - } - builder.nullValue(nodeByteValue(propNode)); - iterator.remove(); - } - } - return builder; - } - } - - static final class ByteFieldType extends NumberFieldType { - public ByteFieldType() { - super(LegacyNumericType.INT); - } - - protected ByteFieldType(ByteFieldType ref) { - super(ref); - } - - @Override - public NumberFieldType clone() { - return new ByteFieldType(this); - } - - @Override - public String typeName() { - return CONTENT_TYPE; - } - - @Override - public Byte nullValue() { - return (Byte)super.nullValue(); - } - - @Override - public Byte valueForDisplay(Object value) { - if (value == null) { - return null; - } - return ((Number) value).byteValue(); - } - - @Override - public BytesRef indexedValueForSearch(Object value) { - BytesRefBuilder bytesRef = new BytesRefBuilder(); - LegacyNumericUtils.intToPrefixCoded(parseValue(value), 0, bytesRef); // 0 because of exact match - return bytesRef.get(); - } - - @Override - public Query rangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper, QueryShardContext context) { - return LegacyNumericRangeQuery.newIntRange(name(), numericPrecisionStep(), - lowerTerm == null ? null : (int)parseValue(lowerTerm), - upperTerm == null ? null : (int)parseValue(upperTerm), - includeLower, includeUpper); - } - - @Override - public FieldStats.Long stats(IndexReader reader) throws IOException { - int maxDoc = reader.maxDoc(); - Terms terms = org.apache.lucene.index.MultiFields.getTerms(reader, name()); - if (terms == null) { - return null; - } - long minValue = LegacyNumericUtils.getMinInt(terms); - long maxValue = LegacyNumericUtils.getMaxInt(terms); - return new FieldStats.Long(maxDoc, terms.getDocCount(), - terms.getSumDocFreq(), terms.getSumTotalTermFreq(), isSearchable(), isAggregatable(), - minValue, maxValue); - } - - @Override - public IndexFieldData.Builder fielddataBuilder() { - failIfNoDocValues(); - return new DocValuesIndexFieldData.Builder().numericType(NumericType.BYTE); - } - } - - protected LegacyByteFieldMapper(String simpleName, MappedFieldType fieldType, MappedFieldType defaultFieldType, - Explicit ignoreMalformed, Explicit coerce, Boolean includeInAll, - Settings indexSettings, MultiFields multiFields, CopyTo copyTo) { - super(simpleName, fieldType, defaultFieldType, ignoreMalformed, coerce, includeInAll, indexSettings, multiFields, copyTo); - } - - @Override - public ByteFieldType fieldType() { - return (ByteFieldType) super.fieldType(); - } - - private static byte parseValue(Object value) { - if (value instanceof Number) { - return ((Number) value).byteValue(); - } - if (value instanceof BytesRef) { - return Byte.parseByte(((BytesRef) value).utf8ToString()); - } - return Byte.parseByte(value.toString()); - } - - @Override - protected boolean customBoost() { - return true; - } - - @Override - protected void innerParseCreateField(ParseContext context, List fields) throws IOException { - byte value; - float boost = fieldType().boost(); - if (context.externalValueSet()) { - Object externalValue = context.externalValue(); - if (externalValue == null) { - if (fieldType().nullValue() == null) { - return; - } - value = fieldType().nullValue(); - } else if (externalValue instanceof String) { - String sExternalValue = (String) externalValue; - if (sExternalValue.length() == 0) { - if (fieldType().nullValue() == null) { - return; - } - value = fieldType().nullValue(); - } else { - value = Byte.parseByte(sExternalValue); - } - } else { - value = ((Number) externalValue).byteValue(); - } - if (context.includeInAll(includeInAll, this)) { - context.allEntries().addText(fieldType().name(), Byte.toString(value), boost); - } - } else { - XContentParser parser = context.parser(); - if (parser.currentToken() == XContentParser.Token.VALUE_NULL || - (parser.currentToken() == XContentParser.Token.VALUE_STRING && parser.textLength() == 0)) { - if (fieldType().nullValue() == null) { - return; - } - value = fieldType().nullValue(); - if (fieldType().nullValueAsString() != null && (context.includeInAll(includeInAll, this))) { - context.allEntries().addText(fieldType().name(), fieldType().nullValueAsString(), boost); - } - } else if (parser.currentToken() == XContentParser.Token.START_OBJECT - && Version.indexCreated(context.indexSettings()).before(Version.V_5_0_0_alpha1)) { - XContentParser.Token token; - String currentFieldName = null; - Byte objValue = fieldType().nullValue(); - while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { - if (token == XContentParser.Token.FIELD_NAME) { - currentFieldName = parser.currentName(); - } else { - if ("value".equals(currentFieldName) || "_value".equals(currentFieldName)) { - if (parser.currentToken() != XContentParser.Token.VALUE_NULL) { - objValue = (byte) parser.shortValue(coerce.value()); - } - } else if ("boost".equals(currentFieldName) || "_boost".equals(currentFieldName)) { - boost = parser.floatValue(); - } else { - throw new IllegalArgumentException("unknown property [" + currentFieldName + "]"); - } - } - } - if (objValue == null) { - // no value - return; - } - value = objValue; - } else { - value = (byte) parser.shortValue(coerce.value()); - if (context.includeInAll(includeInAll, this)) { - context.allEntries().addText(fieldType().name(), parser.text(), boost); - } - } - } - if (fieldType().indexOptions() != IndexOptions.NONE || fieldType().stored()) { - CustomByteNumericField field = new CustomByteNumericField(value, fieldType()); - if (boost != 1f && Version.indexCreated(context.indexSettings()).before(Version.V_5_0_0_alpha1)) { - field.setBoost(boost); - } - fields.add(field); - } - if (fieldType().hasDocValues()) { - addDocValue(context, fields, value); - } - } - - @Override - protected String contentType() { - return CONTENT_TYPE; - } - - @Override - protected void doXContentBody(XContentBuilder builder, boolean includeDefaults, Params params) throws IOException { - super.doXContentBody(builder, includeDefaults, params); - - if (includeDefaults || fieldType().numericPrecisionStep() != Defaults.PRECISION_STEP_8_BIT) { - builder.field("precision_step", fieldType().numericPrecisionStep()); - } - if (includeDefaults || fieldType().nullValue() != null) { - builder.field("null_value", fieldType().nullValue()); - } - if (includeInAll != null) { - builder.field("include_in_all", includeInAll); - } else if (includeDefaults) { - builder.field("include_in_all", false); - } - } - - public static class CustomByteNumericField extends CustomNumericField { - - private final byte number; - - public CustomByteNumericField(byte number, MappedFieldType fieldType) { - super(number, fieldType); - this.number = number; - } - - @Override - public TokenStream tokenStream(Analyzer analyzer, TokenStream previous) { - if (fieldType().indexOptions() != IndexOptions.NONE) { - return getCachedStream().setIntValue(number); - } - return null; - } - - @Override - public String numericAsString() { - return Byte.toString(number); - } - } -} diff --git a/core/src/main/java/org/elasticsearch/index/mapper/LegacyDateFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/LegacyDateFieldMapper.java deleted file mode 100644 index cbd185a92e95f..0000000000000 --- a/core/src/main/java/org/elasticsearch/index/mapper/LegacyDateFieldMapper.java +++ /dev/null @@ -1,529 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.index.mapper; - -import org.apache.lucene.document.Field; -import org.apache.lucene.index.IndexOptions; -import org.apache.lucene.index.IndexReader; -import org.apache.lucene.index.IndexableField; -import org.apache.lucene.index.Terms; -import org.apache.lucene.search.LegacyNumericRangeQuery; -import org.apache.lucene.search.Query; -import org.apache.lucene.util.BytesRef; -import org.apache.lucene.util.BytesRefBuilder; -import org.apache.lucene.util.LegacyNumericUtils; -import org.elasticsearch.Version; -import org.elasticsearch.action.fieldstats.FieldStats; -import org.elasticsearch.common.Explicit; -import org.elasticsearch.common.Nullable; -import org.elasticsearch.common.joda.DateMathParser; -import org.elasticsearch.common.joda.FormatDateTimeFormatter; -import org.elasticsearch.common.joda.Joda; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.util.LocaleUtils; -import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.index.fielddata.IndexFieldData; -import org.elasticsearch.index.fielddata.IndexNumericFieldData.NumericType; -import org.elasticsearch.index.fielddata.plain.DocValuesIndexFieldData; -import org.elasticsearch.index.mapper.LegacyLongFieldMapper.CustomLongNumericField; -import org.elasticsearch.index.query.QueryRewriteContext; -import org.elasticsearch.index.query.QueryShardContext; -import org.elasticsearch.search.DocValueFormat; -import org.joda.time.DateTimeZone; - -import java.io.IOException; -import java.util.Iterator; -import java.util.List; -import java.util.Locale; -import java.util.Map; -import java.util.Objects; -import java.util.concurrent.TimeUnit; - -import static org.elasticsearch.index.mapper.TypeParsers.parseDateTimeFormatter; -import static org.elasticsearch.index.mapper.TypeParsers.parseNumberField; - -public class LegacyDateFieldMapper extends LegacyNumberFieldMapper { - - public static final String CONTENT_TYPE = "date"; - - public static class Defaults extends LegacyNumberFieldMapper.Defaults { - public static final FormatDateTimeFormatter DATE_TIME_FORMATTER = Joda.forPattern("strict_date_optional_time||epoch_millis", Locale.ROOT); - public static final TimeUnit TIME_UNIT = TimeUnit.MILLISECONDS; - public static final DateFieldType FIELD_TYPE = new DateFieldType(); - - static { - FIELD_TYPE.freeze(); - } - - public static final String NULL_VALUE = null; - } - - public static class Builder extends LegacyNumberFieldMapper.Builder { - - protected String nullValue = Defaults.NULL_VALUE; - - private Locale locale; - - public Builder(String name) { - super(name, Defaults.FIELD_TYPE, Defaults.PRECISION_STEP_64_BIT); - builder = this; - // do *NOT* rely on the default locale - locale = Locale.ROOT; - } - - @Override - public DateFieldType fieldType() { - return (DateFieldType)fieldType; - } - - public Builder timeUnit(TimeUnit timeUnit) { - fieldType().setTimeUnit(timeUnit); - return this; - } - - public Builder nullValue(String nullValue) { - this.nullValue = nullValue; - return this; - } - - public Builder dateTimeFormatter(FormatDateTimeFormatter dateTimeFormatter) { - fieldType().setDateTimeFormatter(dateTimeFormatter); - return this; - } - - @Override - public LegacyDateFieldMapper build(BuilderContext context) { - if (context.indexCreatedVersion().onOrAfter(Version.V_5_0_0_alpha2)) { - throw new IllegalStateException("Cannot use legacy numeric types after 5.0"); - } - setupFieldType(context); - fieldType.setNullValue(nullValue); - return new LegacyDateFieldMapper(name, fieldType, defaultFieldType, ignoreMalformed(context), - coerce(context), includeInAll, context.indexSettings(), multiFieldsBuilder.build(this, context), copyTo); - } - - @Override - protected void setupFieldType(BuilderContext context) { - FormatDateTimeFormatter dateTimeFormatter = fieldType().dateTimeFormatter; - if (!locale.equals(dateTimeFormatter.locale())) { - fieldType().setDateTimeFormatter(new FormatDateTimeFormatter(dateTimeFormatter.format(), dateTimeFormatter.parser(), dateTimeFormatter.printer(), locale)); - } - super.setupFieldType(context); - } - - public Builder locale(Locale locale) { - this.locale = locale; - return this; - } - - @Override - protected int maxPrecisionStep() { - return 64; - } - } - - public static class TypeParser implements Mapper.TypeParser { - @Override - public Mapper.Builder parse(String name, Map node, ParserContext parserContext) throws MapperParsingException { - LegacyDateFieldMapper.Builder builder = new LegacyDateFieldMapper.Builder(name); - parseNumberField(builder, name, node, parserContext); - boolean configuredFormat = false; - for (Iterator> iterator = node.entrySet().iterator(); iterator.hasNext();) { - Map.Entry entry = iterator.next(); - String propName = entry.getKey(); - Object propNode = entry.getValue(); - if (propName.equals("null_value")) { - if (propNode == null) { - throw new MapperParsingException("Property [null_value] cannot be null."); - } - builder.nullValue(propNode.toString()); - iterator.remove(); - } else if (propName.equals("format")) { - builder.dateTimeFormatter(parseDateTimeFormatter(propNode)); - configuredFormat = true; - iterator.remove(); - } else if (propName.equals("numeric_resolution")) { - builder.timeUnit(TimeUnit.valueOf(propNode.toString().toUpperCase(Locale.ROOT))); - iterator.remove(); - } else if (propName.equals("locale")) { - builder.locale(LocaleUtils.parse(propNode.toString())); - iterator.remove(); - } - } - if (!configuredFormat) { - builder.dateTimeFormatter(Defaults.DATE_TIME_FORMATTER); - } - return builder; - } - } - - public static class DateFieldType extends NumberFieldType { - - protected FormatDateTimeFormatter dateTimeFormatter = Defaults.DATE_TIME_FORMATTER; - protected TimeUnit timeUnit = Defaults.TIME_UNIT; - protected DateMathParser dateMathParser = new DateMathParser(dateTimeFormatter); - - public DateFieldType() { - super(LegacyNumericType.LONG); - } - - protected DateFieldType(DateFieldType ref) { - super(ref); - this.dateTimeFormatter = ref.dateTimeFormatter; - this.timeUnit = ref.timeUnit; - this.dateMathParser = ref.dateMathParser; - } - - @Override - public DateFieldType clone() { - return new DateFieldType(this); - } - - @Override - public boolean equals(Object o) { - if (!super.equals(o)) return false; - DateFieldType that = (DateFieldType) o; - return Objects.equals(dateTimeFormatter.format(), that.dateTimeFormatter.format()) && - Objects.equals(dateTimeFormatter.locale(), that.dateTimeFormatter.locale()) && - Objects.equals(timeUnit, that.timeUnit); - } - - @Override - public int hashCode() { - return Objects.hash(super.hashCode(), dateTimeFormatter.format(), timeUnit); - } - - @Override - public String typeName() { - return CONTENT_TYPE; - } - - @Override - public void checkCompatibility(MappedFieldType fieldType, List conflicts, boolean strict) { - super.checkCompatibility(fieldType, conflicts, strict); - if (strict) { - DateFieldType other = (DateFieldType)fieldType; - if (Objects.equals(dateTimeFormatter().format(), other.dateTimeFormatter().format()) == false) { - conflicts.add("mapper [" + name() + "] is used by multiple types. Set update_all_types to true to update [format] across all types."); - } - if (Objects.equals(dateTimeFormatter().locale(), other.dateTimeFormatter().locale()) == false) { - conflicts.add("mapper [" + name() + "] is used by multiple types. Set update_all_types to true to update [locale] across all types."); - } - if (Objects.equals(timeUnit(), other.timeUnit()) == false) { - conflicts.add("mapper [" + name() + "] is used by multiple types. Set update_all_types to true to update [numeric_resolution] across all types."); - } - } - } - - public FormatDateTimeFormatter dateTimeFormatter() { - return dateTimeFormatter; - } - - public void setDateTimeFormatter(FormatDateTimeFormatter dateTimeFormatter) { - checkIfFrozen(); - this.dateTimeFormatter = dateTimeFormatter; - this.dateMathParser = new DateMathParser(dateTimeFormatter); - } - - public TimeUnit timeUnit() { - return timeUnit; - } - - public void setTimeUnit(TimeUnit timeUnit) { - checkIfFrozen(); - this.timeUnit = timeUnit; - this.dateMathParser = new DateMathParser(dateTimeFormatter); - } - - protected DateMathParser dateMathParser() { - return dateMathParser; - } - - private long parseValue(Object value) { - if (value instanceof Number) { - return ((Number) value).longValue(); - } - if (value instanceof BytesRef) { - return dateTimeFormatter().parser().parseMillis(((BytesRef) value).utf8ToString()); - } - return dateTimeFormatter().parser().parseMillis(value.toString()); - } - - protected long parseStringValue(String value) { - return dateTimeFormatter().parser().parseMillis(value); - } - - @Override - public BytesRef indexedValueForSearch(Object value) { - BytesRefBuilder bytesRef = new BytesRefBuilder(); - LegacyNumericUtils.longToPrefixCoded(parseValue(value), 0, bytesRef); // 0 because of exact match - return bytesRef.get(); - } - - @Override - public Object valueForDisplay(Object value) { - Long val = (Long) value; - if (val == null) { - return null; - } - return dateTimeFormatter().printer().print(val); - } - - @Override - public Query rangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper, QueryShardContext context) { - return rangeQuery(lowerTerm, upperTerm, includeLower, includeUpper, null, null, context); - } - - @Override - public FieldStats.Date stats(IndexReader reader) throws IOException { - int maxDoc = reader.maxDoc(); - Terms terms = org.apache.lucene.index.MultiFields.getTerms(reader, name()); - if (terms == null) { - return null; - } - long minValue = LegacyNumericUtils.getMinLong(terms); - long maxValue = LegacyNumericUtils.getMaxLong(terms); - return new FieldStats.Date(maxDoc, terms.getDocCount(), - terms.getSumDocFreq(), terms.getSumTotalTermFreq(), isSearchable(), isAggregatable(), - dateTimeFormatter(), minValue, maxValue); - } - - public Query rangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper, - @Nullable DateTimeZone timeZone, @Nullable DateMathParser forcedDateParser, QueryShardContext context) { - return innerRangeQuery(lowerTerm, upperTerm, includeLower, includeUpper, timeZone, forcedDateParser, context); - } - - private Query innerRangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper, - @Nullable DateTimeZone timeZone, @Nullable DateMathParser forcedDateParser, QueryRewriteContext context) { - return LegacyNumericRangeQuery.newLongRange(name(), numericPrecisionStep(), - lowerTerm == null ? null - : parseToMilliseconds(lowerTerm, !includeLower, timeZone, - forcedDateParser == null ? dateMathParser : forcedDateParser, context), - upperTerm == null ? null - : parseToMilliseconds(upperTerm, includeUpper, timeZone, - forcedDateParser == null ? dateMathParser : forcedDateParser, context), - includeLower, includeUpper); - } - - @Override - public Relation isFieldWithinQuery(IndexReader reader, - Object from, Object to, - boolean includeLower, boolean includeUpper, - DateTimeZone timeZone, DateMathParser dateParser, QueryRewriteContext context) throws IOException { - if (dateParser == null) { - dateParser = this.dateMathParser; - } - - Terms terms = org.apache.lucene.index.MultiFields.getTerms(reader, name()); - if (terms == null) { - // no terms, so nothing matches - return Relation.DISJOINT; - } - - long minValue = LegacyNumericUtils.getMinLong(terms); - long maxValue = LegacyNumericUtils.getMaxLong(terms); - - long fromInclusive = Long.MIN_VALUE; - if (from != null) { - fromInclusive = parseToMilliseconds(from, !includeLower, timeZone, dateParser, context); - if (includeLower == false) { - if (fromInclusive == Long.MAX_VALUE) { - return Relation.DISJOINT; - } - ++fromInclusive; - } - } - - long toInclusive = Long.MAX_VALUE; - if (to != null) { - toInclusive = parseToMilliseconds(to, includeUpper, timeZone, dateParser, context); - if (includeUpper == false) { - if (toInclusive == Long.MIN_VALUE) { - return Relation.DISJOINT; - } - --toInclusive; - } - } - - if (minValue >= fromInclusive && maxValue <= toInclusive) { - return Relation.WITHIN; - } else if (maxValue < fromInclusive || minValue > toInclusive) { - return Relation.DISJOINT; - } else { - return Relation.INTERSECTS; - } - } - - public long parseToMilliseconds(Object value, boolean inclusive, @Nullable DateTimeZone zone, - @Nullable DateMathParser forcedDateParser, QueryRewriteContext context) { - if (value instanceof Long) { - return ((Long) value).longValue(); - } - - DateMathParser dateParser = dateMathParser(); - if (forcedDateParser != null) { - dateParser = forcedDateParser; - } - - String strValue; - if (value instanceof BytesRef) { - strValue = ((BytesRef) value).utf8ToString(); - } else { - strValue = value.toString(); - } - return dateParser.parse(strValue, context::nowInMillis, inclusive, zone); - } - - @Override - public IndexFieldData.Builder fielddataBuilder() { - failIfNoDocValues(); - return new DocValuesIndexFieldData.Builder().numericType(NumericType.LONG); - } - - @Override - public DocValueFormat docValueFormat(@Nullable String format, DateTimeZone timeZone) { - FormatDateTimeFormatter dateTimeFormatter = this.dateTimeFormatter; - if (format != null) { - dateTimeFormatter = Joda.forPattern(format); - } - if (timeZone == null) { - timeZone = DateTimeZone.UTC; - } - return new DocValueFormat.DateTime(dateTimeFormatter, timeZone); - } - } - - protected LegacyDateFieldMapper(String simpleName, MappedFieldType fieldType, MappedFieldType defaultFieldType, Explicit ignoreMalformed,Explicit coerce, - Boolean includeInAll, Settings indexSettings, MultiFields multiFields, CopyTo copyTo) { - super(simpleName, fieldType, defaultFieldType, ignoreMalformed, coerce, includeInAll, indexSettings, multiFields, copyTo); - } - - @Override - public DateFieldType fieldType() { - return (DateFieldType) super.fieldType(); - } - - @Override - protected boolean customBoost() { - return true; - } - - @Override - protected void innerParseCreateField(ParseContext context, List fields) throws IOException { - String dateAsString = null; - float boost = fieldType().boost(); - if (context.externalValueSet()) { - Object externalValue = context.externalValue(); - dateAsString = (String) externalValue; - if (dateAsString == null) { - dateAsString = fieldType().nullValueAsString(); - } - } else { - XContentParser parser = context.parser(); - XContentParser.Token token = parser.currentToken(); - if (token == XContentParser.Token.VALUE_NULL) { - dateAsString = fieldType().nullValueAsString(); - } else if (token == XContentParser.Token.VALUE_NUMBER) { - dateAsString = parser.text(); - } else if (token == XContentParser.Token.START_OBJECT - && Version.indexCreated(context.indexSettings()).before(Version.V_5_0_0_alpha1)) { - String currentFieldName = null; - while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { - if (token == XContentParser.Token.FIELD_NAME) { - currentFieldName = parser.currentName(); - } else { - if ("value".equals(currentFieldName) || "_value".equals(currentFieldName)) { - if (token == XContentParser.Token.VALUE_NULL) { - dateAsString = fieldType().nullValueAsString(); - } else { - dateAsString = parser.text(); - } - } else if ("boost".equals(currentFieldName) || "_boost".equals(currentFieldName)) { - boost = parser.floatValue(); - } else { - throw new IllegalArgumentException("unknown property [" + currentFieldName + "]"); - } - } - } - } else { - dateAsString = parser.text(); - } - } - - Long value = null; - if (dateAsString != null) { - if (context.includeInAll(includeInAll, this)) { - context.allEntries().addText(fieldType().name(), dateAsString, boost); - } - value = fieldType().parseStringValue(dateAsString); - } - - if (value != null) { - if (fieldType().indexOptions() != IndexOptions.NONE || fieldType().stored()) { - CustomLongNumericField field = new CustomLongNumericField(value, fieldType()); - if (boost != 1f && Version.indexCreated(context.indexSettings()).before(Version.V_5_0_0_alpha1)) { - field.setBoost(boost); - } - fields.add(field); - } - if (fieldType().hasDocValues()) { - addDocValue(context, fields, value); - } - } - } - - @Override - protected String contentType() { - return CONTENT_TYPE; - } - - @Override - protected void doXContentBody(XContentBuilder builder, boolean includeDefaults, Params params) throws IOException { - super.doXContentBody(builder, includeDefaults, params); - - if (includeDefaults || fieldType().numericPrecisionStep() != Defaults.PRECISION_STEP_64_BIT) { - builder.field("precision_step", fieldType().numericPrecisionStep()); - } - builder.field("format", fieldType().dateTimeFormatter().format()); - if (includeDefaults || fieldType().nullValueAsString() != null) { - builder.field("null_value", fieldType().nullValueAsString()); - } - if (includeInAll != null) { - builder.field("include_in_all", includeInAll); - } else if (includeDefaults) { - builder.field("include_in_all", false); - } - - if (includeDefaults || fieldType().timeUnit() != Defaults.TIME_UNIT) { - builder.field("numeric_resolution", fieldType().timeUnit().name().toLowerCase(Locale.ROOT)); - } - // only serialize locale if needed, ROOT is the default, so no need to serialize that case as well... - if (fieldType().dateTimeFormatter().locale() != null && fieldType().dateTimeFormatter().locale() != Locale.ROOT) { - builder.field("locale", fieldType().dateTimeFormatter().locale()); - } else if (includeDefaults) { - if (fieldType().dateTimeFormatter().locale() == null) { - builder.field("locale", Locale.ROOT); - } else { - builder.field("locale", fieldType().dateTimeFormatter().locale()); - } - } - } -} diff --git a/core/src/main/java/org/elasticsearch/index/mapper/LegacyDoubleFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/LegacyDoubleFieldMapper.java deleted file mode 100644 index 5e9a6b103ae92..0000000000000 --- a/core/src/main/java/org/elasticsearch/index/mapper/LegacyDoubleFieldMapper.java +++ /dev/null @@ -1,331 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.index.mapper; - -import org.apache.lucene.analysis.Analyzer; -import org.apache.lucene.analysis.TokenStream; -import org.apache.lucene.document.Field; -import org.apache.lucene.index.IndexOptions; -import org.apache.lucene.index.IndexReader; -import org.apache.lucene.index.IndexableField; -import org.apache.lucene.index.Terms; -import org.apache.lucene.search.LegacyNumericRangeQuery; -import org.apache.lucene.search.Query; -import org.apache.lucene.util.BytesRef; -import org.apache.lucene.util.BytesRefBuilder; -import org.apache.lucene.util.LegacyNumericUtils; -import org.apache.lucene.util.NumericUtils; -import org.elasticsearch.Version; -import org.elasticsearch.action.fieldstats.FieldStats; -import org.elasticsearch.common.Explicit; -import org.elasticsearch.common.Numbers; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.index.fielddata.IndexFieldData; -import org.elasticsearch.index.fielddata.IndexNumericFieldData.NumericType; -import org.elasticsearch.index.fielddata.plain.DocValuesIndexFieldData; -import org.elasticsearch.index.query.QueryShardContext; - -import java.io.IOException; -import java.util.Iterator; -import java.util.List; -import java.util.Map; - -import static org.elasticsearch.common.xcontent.support.XContentMapValues.nodeDoubleValue; -import static org.elasticsearch.index.mapper.TypeParsers.parseNumberField; - -public class LegacyDoubleFieldMapper extends LegacyNumberFieldMapper { - - public static final String CONTENT_TYPE = "double"; - - public static class Defaults extends LegacyNumberFieldMapper.Defaults { - public static final MappedFieldType FIELD_TYPE = new DoubleFieldType(); - - static { - FIELD_TYPE.freeze(); - } - } - - public static class Builder extends LegacyNumberFieldMapper.Builder { - - public Builder(String name) { - super(name, Defaults.FIELD_TYPE, Defaults.PRECISION_STEP_64_BIT); - builder = this; - } - - @Override - public LegacyDoubleFieldMapper build(BuilderContext context) { - if (context.indexCreatedVersion().onOrAfter(Version.V_5_0_0_alpha2)) { - throw new IllegalStateException("Cannot use legacy numeric types after 5.0"); - } - setupFieldType(context); - return new LegacyDoubleFieldMapper(name, fieldType, defaultFieldType, ignoreMalformed(context), coerce(context), - includeInAll, context.indexSettings(), multiFieldsBuilder.build(this, context), copyTo); - } - - @Override - protected int maxPrecisionStep() { - return 64; - } - } - - public static class TypeParser implements Mapper.TypeParser { - @Override - public Mapper.Builder parse(String name, Map node, ParserContext parserContext) throws MapperParsingException { - LegacyDoubleFieldMapper.Builder builder = new LegacyDoubleFieldMapper.Builder(name); - parseNumberField(builder, name, node, parserContext); - for (Iterator> iterator = node.entrySet().iterator(); iterator.hasNext();) { - Map.Entry entry = iterator.next(); - String propName = entry.getKey(); - Object propNode = entry.getValue(); - if (propName.equals("nullValue") || propName.equals("null_value")) { - if (propNode == null) { - throw new MapperParsingException("Property [null_value] cannot be null."); - } - builder.nullValue(nodeDoubleValue(propNode)); - iterator.remove(); - } - } - return builder; - } - } - - public static final class DoubleFieldType extends NumberFieldType { - - public DoubleFieldType() { - super(LegacyNumericType.DOUBLE); - } - - protected DoubleFieldType(DoubleFieldType ref) { - super(ref); - } - - @Override - public NumberFieldType clone() { - return new DoubleFieldType(this); - } - - @Override - public String typeName() { - return CONTENT_TYPE; - } - - @Override - public java.lang.Double nullValue() { - return (java.lang.Double)super.nullValue(); - } - - @Override - public java.lang.Double valueForDisplay(Object value) { - if (value == null) { - return null; - } - if (value instanceof Number) { - return ((Number) value).doubleValue(); - } - if (value instanceof BytesRef) { - return Numbers.bytesToDouble((BytesRef) value); - } - return java.lang.Double.parseDouble(value.toString()); - } - - @Override - public BytesRef indexedValueForSearch(Object value) { - long longValue = NumericUtils.doubleToSortableLong(parseDoubleValue(value)); - BytesRefBuilder bytesRef = new BytesRefBuilder(); - LegacyNumericUtils.longToPrefixCoded(longValue, 0, bytesRef); // 0 because of exact match - return bytesRef.get(); - } - - @Override - public Query rangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper, QueryShardContext context) { - return LegacyNumericRangeQuery.newDoubleRange(name(), numericPrecisionStep(), - lowerTerm == null ? null : parseDoubleValue(lowerTerm), - upperTerm == null ? null : parseDoubleValue(upperTerm), - includeLower, includeUpper); - } - - @Override - public FieldStats.Double stats(IndexReader reader) throws IOException { - int maxDoc = reader.maxDoc(); - Terms terms = org.apache.lucene.index.MultiFields.getTerms(reader, name()); - if (terms == null) { - return null; - } - double minValue = NumericUtils.sortableLongToDouble(LegacyNumericUtils.getMinLong(terms)); - double maxValue = NumericUtils.sortableLongToDouble(LegacyNumericUtils.getMaxLong(terms)); - return new FieldStats.Double(maxDoc, terms.getDocCount(), - terms.getSumDocFreq(), terms.getSumTotalTermFreq(), isSearchable(), isAggregatable(), - minValue, maxValue); - } - - @Override - public IndexFieldData.Builder fielddataBuilder() { - failIfNoDocValues(); - return new DocValuesIndexFieldData.Builder().numericType(NumericType.DOUBLE); - } - } - - protected LegacyDoubleFieldMapper(String simpleName, MappedFieldType fieldType, MappedFieldType defaultFieldType, Explicit ignoreMalformed, - Explicit coerce, Boolean includeInAll, Settings indexSettings, MultiFields multiFields, CopyTo copyTo) { - super(simpleName, fieldType, defaultFieldType, ignoreMalformed, coerce, includeInAll, indexSettings, multiFields, copyTo); - } - - @Override - public DoubleFieldType fieldType() { - return (DoubleFieldType) super.fieldType(); - } - - @Override - protected boolean customBoost() { - return true; - } - - @Override - protected void innerParseCreateField(ParseContext context, List fields) throws IOException { - double value; - float boost = fieldType().boost(); - if (context.externalValueSet()) { - Object externalValue = context.externalValue(); - if (externalValue == null) { - if (fieldType().nullValue() == null) { - return; - } - value = fieldType().nullValue(); - } else if (externalValue instanceof String) { - String sExternalValue = (String) externalValue; - if (sExternalValue.length() == 0) { - if (fieldType().nullValue() == null) { - return; - } - value = fieldType().nullValue(); - } else { - value = java.lang.Double.parseDouble(sExternalValue); - } - } else { - value = ((Number) externalValue).doubleValue(); - } - if (context.includeInAll(includeInAll, this)) { - context.allEntries().addText(fieldType().name(), java.lang.Double.toString(value), boost); - } - } else { - XContentParser parser = context.parser(); - if (parser.currentToken() == XContentParser.Token.VALUE_NULL || - (parser.currentToken() == XContentParser.Token.VALUE_STRING && parser.textLength() == 0)) { - if (fieldType().nullValue() == null) { - return; - } - value = fieldType().nullValue(); - if (fieldType().nullValueAsString() != null && (context.includeInAll(includeInAll, this))) { - context.allEntries().addText(fieldType().name(), fieldType().nullValueAsString(), boost); - } - } else if (parser.currentToken() == XContentParser.Token.START_OBJECT - && Version.indexCreated(context.indexSettings()).before(Version.V_5_0_0_alpha1)) { - XContentParser.Token token; - String currentFieldName = null; - java.lang.Double objValue = fieldType().nullValue(); - while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { - if (token == XContentParser.Token.FIELD_NAME) { - currentFieldName = parser.currentName(); - } else { - if ("value".equals(currentFieldName) || "_value".equals(currentFieldName)) { - if (parser.currentToken() != XContentParser.Token.VALUE_NULL) { - objValue = parser.doubleValue(coerce.value()); - } - } else if ("boost".equals(currentFieldName) || "_boost".equals(currentFieldName)) { - boost = parser.floatValue(); - } else { - throw new IllegalArgumentException("unknown property [" + currentFieldName + "]"); - } - } - } - if (objValue == null) { - // no value - return; - } - value = objValue; - } else { - value = parser.doubleValue(coerce.value()); - if (context.includeInAll(includeInAll, this)) { - context.allEntries().addText(fieldType().name(), parser.text(), boost); - } - } - } - - if (fieldType().indexOptions() != IndexOptions.NONE || fieldType().stored()) { - CustomDoubleNumericField field = new CustomDoubleNumericField(value, fieldType()); - if (boost != 1f && Version.indexCreated(context.indexSettings()).before(Version.V_5_0_0_alpha1)) { - field.setBoost(boost); - } - fields.add(field); - } - if (fieldType().hasDocValues()) { - addDocValue(context, fields, NumericUtils.doubleToSortableLong(value)); - } - } - - @Override - protected String contentType() { - return CONTENT_TYPE; - } - - @Override - protected void doXContentBody(XContentBuilder builder, boolean includeDefaults, Params params) throws IOException { - super.doXContentBody(builder, includeDefaults, params); - - if (includeDefaults || fieldType().numericPrecisionStep() != Defaults.PRECISION_STEP_64_BIT) { - builder.field("precision_step", fieldType().numericPrecisionStep()); - } - if (includeDefaults || fieldType().nullValue() != null) { - builder.field("null_value", fieldType().nullValue()); - } - if (includeInAll != null) { - builder.field("include_in_all", includeInAll); - } else if (includeDefaults) { - builder.field("include_in_all", false); - } - - } - - public static class CustomDoubleNumericField extends CustomNumericField { - - private final double number; - - public CustomDoubleNumericField(double number, NumberFieldType fieldType) { - super(number, fieldType); - this.number = number; - } - - @Override - public TokenStream tokenStream(Analyzer analyzer, TokenStream previous) { - if (fieldType().indexOptions() != IndexOptions.NONE) { - return getCachedStream().setDoubleValue(number); - } - return null; - } - - @Override - public String numericAsString() { - return java.lang.Double.toString(number); - } - } - -} diff --git a/core/src/main/java/org/elasticsearch/index/mapper/LegacyFloatFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/LegacyFloatFieldMapper.java deleted file mode 100644 index ea28ba455bfde..0000000000000 --- a/core/src/main/java/org/elasticsearch/index/mapper/LegacyFloatFieldMapper.java +++ /dev/null @@ -1,325 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.index.mapper; - -import org.apache.lucene.analysis.Analyzer; -import org.apache.lucene.analysis.TokenStream; -import org.apache.lucene.document.Field; -import org.apache.lucene.index.IndexOptions; -import org.apache.lucene.index.IndexReader; -import org.apache.lucene.index.IndexableField; -import org.apache.lucene.index.Terms; -import org.apache.lucene.search.LegacyNumericRangeQuery; -import org.apache.lucene.search.Query; -import org.apache.lucene.util.BytesRef; -import org.apache.lucene.util.BytesRefBuilder; -import org.apache.lucene.util.LegacyNumericUtils; -import org.apache.lucene.util.NumericUtils; -import org.elasticsearch.Version; -import org.elasticsearch.action.fieldstats.FieldStats; -import org.elasticsearch.common.Explicit; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.index.fielddata.IndexFieldData; -import org.elasticsearch.index.fielddata.IndexNumericFieldData.NumericType; -import org.elasticsearch.index.fielddata.plain.DocValuesIndexFieldData; -import org.elasticsearch.index.query.QueryShardContext; - -import java.io.IOException; -import java.util.Iterator; -import java.util.List; -import java.util.Map; - -import static org.elasticsearch.common.xcontent.support.XContentMapValues.nodeFloatValue; -import static org.elasticsearch.index.mapper.TypeParsers.parseNumberField; - -public class LegacyFloatFieldMapper extends LegacyNumberFieldMapper { - - public static final String CONTENT_TYPE = "float"; - - public static class Defaults extends LegacyNumberFieldMapper.Defaults { - public static final MappedFieldType FIELD_TYPE = new FloatFieldType(); - - static { - FIELD_TYPE.freeze(); - } - } - - public static class Builder extends LegacyNumberFieldMapper.Builder { - - public Builder(String name) { - super(name, Defaults.FIELD_TYPE, Defaults.PRECISION_STEP_32_BIT); - builder = this; - } - - @Override - public LegacyFloatFieldMapper build(BuilderContext context) { - if (context.indexCreatedVersion().onOrAfter(Version.V_5_0_0_alpha2)) { - throw new IllegalStateException("Cannot use legacy numeric types after 5.0"); - } - setupFieldType(context); - return new LegacyFloatFieldMapper(name, fieldType, defaultFieldType, ignoreMalformed(context), coerce(context), - includeInAll, context.indexSettings(), multiFieldsBuilder.build(this, context), copyTo); - } - - @Override - protected int maxPrecisionStep() { - return 32; - } - } - - public static class TypeParser implements Mapper.TypeParser { - @Override - public Mapper.Builder parse(String name, Map node, ParserContext parserContext) throws MapperParsingException { - LegacyFloatFieldMapper.Builder builder = new LegacyFloatFieldMapper.Builder(name); - parseNumberField(builder, name, node, parserContext); - for (Iterator> iterator = node.entrySet().iterator(); iterator.hasNext();) { - Map.Entry entry = iterator.next(); - String propName = entry.getKey(); - Object propNode = entry.getValue(); - if (propName.equals("null_value")) { - if (propNode == null) { - throw new MapperParsingException("Property [null_value] cannot be null."); - } - builder.nullValue(nodeFloatValue(propNode)); - iterator.remove(); - } - } - return builder; - } - } - - static final class FloatFieldType extends NumberFieldType { - - public FloatFieldType() { - super(LegacyNumericType.FLOAT); - } - - protected FloatFieldType(FloatFieldType ref) { - super(ref); - } - - @Override - public NumberFieldType clone() { - return new FloatFieldType(this); - } - - @Override - public String typeName() { - return CONTENT_TYPE; - } - - @Override - public Float nullValue() { - return (Float)super.nullValue(); - } - - @Override - public BytesRef indexedValueForSearch(Object value) { - int intValue = NumericUtils.floatToSortableInt(parseValue(value)); - BytesRefBuilder bytesRef = new BytesRefBuilder(); - LegacyNumericUtils.intToPrefixCoded(intValue, 0, bytesRef); // 0 because of exact match - return bytesRef.get(); - } - - @Override - public Query rangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper, QueryShardContext context) { - return LegacyNumericRangeQuery.newFloatRange(name(), numericPrecisionStep(), - lowerTerm == null ? null : parseValue(lowerTerm), - upperTerm == null ? null : parseValue(upperTerm), - includeLower, includeUpper); - } - - @Override - public FieldStats.Double stats(IndexReader reader) throws IOException { - int maxDoc = reader.maxDoc(); - Terms terms = org.apache.lucene.index.MultiFields.getTerms(reader, name()); - if (terms == null) { - return null; - } - float minValue = NumericUtils.sortableIntToFloat(LegacyNumericUtils.getMinInt(terms)); - float maxValue = NumericUtils.sortableIntToFloat(LegacyNumericUtils.getMaxInt(terms)); - return new FieldStats.Double(maxDoc, terms.getDocCount(), terms.getSumDocFreq(), terms.getSumTotalTermFreq(), - isSearchable(), isAggregatable(), minValue, maxValue); - } - - @Override - public IndexFieldData.Builder fielddataBuilder() { - failIfNoDocValues(); - return new DocValuesIndexFieldData.Builder().numericType(NumericType.FLOAT); - } - } - - protected LegacyFloatFieldMapper(String simpleName, MappedFieldType fieldType, MappedFieldType defaultFieldType, - Explicit ignoreMalformed, Explicit coerce, Boolean includeInAll, - Settings indexSettings, MultiFields multiFields, CopyTo copyTo) { - super(simpleName, fieldType, defaultFieldType, ignoreMalformed, coerce, includeInAll, indexSettings, multiFields, copyTo); - } - - @Override - public FloatFieldType fieldType() { - return (FloatFieldType) super.fieldType(); - } - - private static float parseValue(Object value) { - if (value instanceof Number) { - return ((Number) value).floatValue(); - } - if (value instanceof BytesRef) { - return Float.parseFloat(((BytesRef) value).utf8ToString()); - } - return Float.parseFloat(value.toString()); - } - - @Override - protected boolean customBoost() { - return true; - } - - @Override - protected void innerParseCreateField(ParseContext context, List fields) throws IOException { - float value; - float boost = fieldType().boost(); - if (context.externalValueSet()) { - Object externalValue = context.externalValue(); - if (externalValue == null) { - if (fieldType().nullValue() == null) { - return; - } - value = fieldType().nullValue(); - } else if (externalValue instanceof String) { - String sExternalValue = (String) externalValue; - if (sExternalValue.length() == 0) { - if (fieldType().nullValue() == null) { - return; - } - value = fieldType().nullValue(); - } else { - value = Float.parseFloat(sExternalValue); - } - } else { - value = ((Number) externalValue).floatValue(); - } - if (context.includeInAll(includeInAll, this)) { - context.allEntries().addText(fieldType().name(), Float.toString(value), boost); - } - } else { - XContentParser parser = context.parser(); - if (parser.currentToken() == XContentParser.Token.VALUE_NULL || - (parser.currentToken() == XContentParser.Token.VALUE_STRING && parser.textLength() == 0)) { - if (fieldType().nullValue() == null) { - return; - } - value = fieldType().nullValue(); - if (fieldType().nullValueAsString() != null && (context.includeInAll(includeInAll, this))) { - context.allEntries().addText(fieldType().name(), fieldType().nullValueAsString(), boost); - } - } else if (parser.currentToken() == XContentParser.Token.START_OBJECT - && Version.indexCreated(context.indexSettings()).before(Version.V_5_0_0_alpha1)) { - XContentParser.Token token; - String currentFieldName = null; - Float objValue = fieldType().nullValue(); - while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { - if (token == XContentParser.Token.FIELD_NAME) { - currentFieldName = parser.currentName(); - } else { - if ("value".equals(currentFieldName) || "_value".equals(currentFieldName)) { - if (parser.currentToken() != XContentParser.Token.VALUE_NULL) { - objValue = parser.floatValue(coerce.value()); - } - } else if ("boost".equals(currentFieldName) || "_boost".equals(currentFieldName)) { - boost = parser.floatValue(); - } else { - throw new IllegalArgumentException("unknown property [" + currentFieldName + "]"); - } - } - } - if (objValue == null) { - // no value - return; - } - value = objValue; - } else { - value = parser.floatValue(coerce.value()); - if (context.includeInAll(includeInAll, this)) { - context.allEntries().addText(fieldType().name(), parser.text(), boost); - } - } - } - - if (fieldType().indexOptions() != IndexOptions.NONE || fieldType().stored()) { - CustomFloatNumericField field = new CustomFloatNumericField(value, fieldType()); - if (boost != 1f && Version.indexCreated(context.indexSettings()).before(Version.V_5_0_0_alpha1)) { - field.setBoost(boost); - } - fields.add(field); - } - if (fieldType().hasDocValues()) { - addDocValue(context, fields, NumericUtils.floatToSortableInt(value)); - } - } - - @Override - protected String contentType() { - return CONTENT_TYPE; - } - - @Override - protected void doXContentBody(XContentBuilder builder, boolean includeDefaults, Params params) throws IOException { - super.doXContentBody(builder, includeDefaults, params); - - if (includeDefaults || fieldType().numericPrecisionStep() != Defaults.PRECISION_STEP_32_BIT) { - builder.field("precision_step", fieldType().numericPrecisionStep()); - } - if (includeDefaults || fieldType().nullValue() != null) { - builder.field("null_value", fieldType().nullValue()); - } - if (includeInAll != null) { - builder.field("include_in_all", includeInAll); - } else if (includeDefaults) { - builder.field("include_in_all", false); - } - - } - - public static class CustomFloatNumericField extends CustomNumericField { - - private final float number; - - public CustomFloatNumericField(float number, NumberFieldType fieldType) { - super(number, fieldType); - this.number = number; - } - - @Override - public TokenStream tokenStream(Analyzer analyzer, TokenStream previous) { - if (fieldType().indexOptions() != IndexOptions.NONE) { - return getCachedStream().setFloatValue(number); - } - return null; - } - - @Override - public String numericAsString() { - return Float.toString(number); - } - } -} diff --git a/core/src/main/java/org/elasticsearch/index/mapper/LegacyGeoPointFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/LegacyGeoPointFieldMapper.java deleted file mode 100644 index fc46a08ce1ae8..0000000000000 --- a/core/src/main/java/org/elasticsearch/index/mapper/LegacyGeoPointFieldMapper.java +++ /dev/null @@ -1,367 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.index.mapper; - -import com.carrotsearch.hppc.ObjectHashSet; -import com.carrotsearch.hppc.cursors.ObjectCursor; -import org.apache.lucene.document.Field; -import org.apache.lucene.index.IndexOptions; -import org.apache.lucene.util.BytesRef; -import org.elasticsearch.common.Explicit; -import org.elasticsearch.common.geo.GeoDistance; -import org.elasticsearch.common.geo.GeoPoint; -import org.elasticsearch.common.geo.GeoUtils; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.unit.DistanceUnit; -import org.elasticsearch.common.util.ByteUtils; -import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.common.xcontent.support.XContentMapValues; - -import java.io.IOException; -import java.util.Iterator; -import java.util.Map; - - -/** - * Parsing: We handle: - *

- * - "field" : "geo_hash" - * - "field" : "lat,lon" - * - "field" : { - * "lat" : 1.1, - * "lon" : 2.1 - * } - */ -public class LegacyGeoPointFieldMapper extends BaseGeoPointFieldMapper implements ArrayValueMapperParser { - - public static final String CONTENT_TYPE = "geo_point"; - - public static class Names extends BaseGeoPointFieldMapper.Names { - public static final String COERCE = "coerce"; - } - - public static class Defaults extends BaseGeoPointFieldMapper.Defaults{ - public static final Explicit COERCE = new Explicit<>(false, false); - - public static final GeoPointFieldType FIELD_TYPE = new LegacyGeoPointFieldType(); - - static { - FIELD_TYPE.setIndexOptions(IndexOptions.DOCS); - FIELD_TYPE.setTokenized(false); - FIELD_TYPE.setOmitNorms(true); - FIELD_TYPE.freeze(); - } - } - - /** - * Concrete builder for legacy GeoPointField - */ - public static class Builder extends BaseGeoPointFieldMapper.Builder { - - private Boolean coerce; - - public Builder(String name) { - super(name, Defaults.FIELD_TYPE); - this.builder = this; - } - - public Builder coerce(boolean coerce) { - this.coerce = coerce; - return builder; - } - - protected Explicit coerce(BuilderContext context) { - if (coerce != null) { - return new Explicit<>(coerce, true); - } - if (context.indexSettings() != null) { - return new Explicit<>(COERCE_SETTING.get(context.indexSettings()), false); - } - return Defaults.COERCE; - } - - @Override - public LegacyGeoPointFieldMapper build(BuilderContext context, String simpleName, MappedFieldType fieldType, - MappedFieldType defaultFieldType, Settings indexSettings, FieldMapper latMapper, FieldMapper lonMapper, - FieldMapper geoHashMapper, MultiFields multiFields, Explicit ignoreMalformed, - CopyTo copyTo) { - fieldType.setTokenized(false); - setupFieldType(context); - fieldType.setHasDocValues(false); - defaultFieldType.setHasDocValues(false); - return new LegacyGeoPointFieldMapper(simpleName, fieldType, defaultFieldType, indexSettings, latMapper, - lonMapper, geoHashMapper, multiFields, ignoreMalformed, coerce(context), copyTo); - } - - @Override - public LegacyGeoPointFieldMapper build(BuilderContext context) { - return super.build(context); - } - } - - public static Builder parse(Builder builder, Map node, Mapper.TypeParser.ParserContext parserContext) - throws MapperParsingException { - for (Iterator> iterator = node.entrySet().iterator(); iterator.hasNext();) { - Map.Entry entry = iterator.next(); - String propName = entry.getKey(); - Object propNode = entry.getValue(); - if (propName.equals(Names.COERCE)) { - builder.coerce = XContentMapValues.lenientNodeBooleanValue(propNode); - iterator.remove(); - } - } - return builder; - } - - /** - * A byte-aligned fixed-length encoding for latitudes and longitudes. - */ - public static final class Encoding { - - // With 14 bytes we already have better precision than a double since a double has 11 bits of exponent - private static final int MAX_NUM_BYTES = 14; - - private static final Encoding[] INSTANCES; - static { - INSTANCES = new Encoding[MAX_NUM_BYTES + 1]; - for (int numBytes = 2; numBytes <= MAX_NUM_BYTES; numBytes += 2) { - INSTANCES[numBytes] = new Encoding(numBytes); - } - } - - /** Get an instance based on the number of bytes that has been used to encode values. */ - public static Encoding of(int numBytesPerValue) { - final Encoding instance = INSTANCES[numBytesPerValue]; - if (instance == null) { - throw new IllegalStateException("No encoding for " + numBytesPerValue + " bytes per value"); - } - return instance; - } - - /** Get an instance based on the expected precision. Here are examples of the number of required bytes per value - * depending on the - * expected precision:

    - *
  • 1km: 4 bytes
  • - *
  • 3m: 6 bytes
  • - *
  • 1m: 8 bytes
  • - *
  • 1cm: 8 bytes
  • - *
  • 1mm: 10 bytes
*/ - public static Encoding of(DistanceUnit.Distance precision) { - for (Encoding encoding : INSTANCES) { - if (encoding != null && encoding.precision().compareTo(precision) <= 0) { - return encoding; - } - } - return INSTANCES[MAX_NUM_BYTES]; - } - - private final DistanceUnit.Distance precision; - private final int numBytes; - private final int numBytesPerCoordinate; - private final double factor; - - private Encoding(int numBytes) { - assert numBytes >= 1 && numBytes <= MAX_NUM_BYTES; - assert (numBytes & 1) == 0; // we don't support odd numBytes for the moment - this.numBytes = numBytes; - this.numBytesPerCoordinate = numBytes / 2; - this.factor = Math.pow(2, - numBytesPerCoordinate * 8 + 9); - assert (1L << (numBytesPerCoordinate * 8 - 1)) * factor > 180 && (1L << (numBytesPerCoordinate * 8 - 2)) - * factor < 180 : numBytesPerCoordinate + " " + factor; - if (numBytes == MAX_NUM_BYTES) { - // no precision loss compared to a double - precision = new DistanceUnit.Distance(0, DistanceUnit.DEFAULT); - } else { - // factor/2 because we use Math.round instead of a cast to convert the double to a long - precision = new DistanceUnit.Distance( - GeoDistance.PLANE.calculate(0, 0, factor / 2, factor / 2, DistanceUnit.DEFAULT), - DistanceUnit.DEFAULT); - } - } - - public DistanceUnit.Distance precision() { - return precision; - } - - /** The number of bytes required to encode a single geo point. */ - public int numBytes() { - return numBytes; - } - - /** The number of bits required to encode a single coordinate of a geo point. */ - public int numBitsPerCoordinate() { - return numBytesPerCoordinate << 3; - } - - /** Return the bits that encode a latitude/longitude. */ - public long encodeCoordinate(double lat) { - return Math.round((lat + 180) / factor); - } - - /** Decode a sequence of bits into the original coordinate. */ - public double decodeCoordinate(long bits) { - return bits * factor - 180; - } - - private void encodeBits(long bits, byte[] out, int offset) { - for (int i = 0; i < numBytesPerCoordinate; ++i) { - out[offset++] = (byte) bits; - bits >>>= 8; - } - assert bits == 0; - } - - private long decodeBits(byte [] in, int offset) { - long r = in[offset++] & 0xFFL; - for (int i = 1; i < numBytesPerCoordinate; ++i) { - r = (in[offset++] & 0xFFL) << (i * 8); - } - return r; - } - - /** Encode a geo point into a byte-array, over {@link #numBytes()} bytes. */ - public void encode(double lat, double lon, byte[] out, int offset) { - encodeBits(encodeCoordinate(lat), out, offset); - encodeBits(encodeCoordinate(lon), out, offset + numBytesPerCoordinate); - } - - /** Decode a geo point from a byte-array, reading {@link #numBytes()} bytes. */ - public GeoPoint decode(byte[] in, int offset, GeoPoint out) { - final long latBits = decodeBits(in, offset); - final long lonBits = decodeBits(in, offset + numBytesPerCoordinate); - return decode(latBits, lonBits, out); - } - - /** Decode a geo point from the bits of the encoded latitude and longitudes. */ - public GeoPoint decode(long latBits, long lonBits, GeoPoint out) { - final double lat = decodeCoordinate(latBits); - final double lon = decodeCoordinate(lonBits); - return out.reset(lat, lon); - } - - } - - protected Explicit coerce; - - public LegacyGeoPointFieldMapper(String simpleName, MappedFieldType fieldType, MappedFieldType defaultFieldType, - Settings indexSettings, FieldMapper latMapper, FieldMapper lonMapper, FieldMapper geoHashMapper, - MultiFields multiFields, Explicit ignoreMalformed, Explicit coerce, CopyTo copyTo) { - super(simpleName, fieldType, defaultFieldType, indexSettings, latMapper, lonMapper, geoHashMapper, multiFields, - ignoreMalformed, copyTo); - this.coerce = coerce; - } - - @Override - protected void doMerge(Mapper mergeWith, boolean updateAllTypes) { - super.doMerge(mergeWith, updateAllTypes); - - LegacyGeoPointFieldMapper gpfmMergeWith = (LegacyGeoPointFieldMapper) mergeWith; - if (gpfmMergeWith.coerce.explicit()) { - if (coerce.explicit() && coerce.value() != gpfmMergeWith.coerce.value()) { - throw new IllegalArgumentException("mapper [" + fieldType().name() + "] has different [coerce]"); - } - } - - if (gpfmMergeWith.coerce.explicit()) { - this.coerce = gpfmMergeWith.coerce; - } - } - - @Override - protected void parse(ParseContext context, GeoPoint point, String geoHash) throws IOException { - boolean validPoint = false; - if (coerce.value() == false && ignoreMalformed.value() == false) { - if (point.lat() > 90.0 || point.lat() < -90.0) { - throw new IllegalArgumentException("illegal latitude value [" + point.lat() + "] for " + name()); - } - if (point.lon() > 180.0 || point.lon() < -180) { - throw new IllegalArgumentException("illegal longitude value [" + point.lon() + "] for " + name()); - } - validPoint = true; - } - - if (coerce.value() && validPoint == false) { - // by setting coerce to false we are assuming all geopoints are already in a valid coordinate system - // thus this extra step can be skipped - GeoUtils.normalizePoint(point, true, true); - } - - if (fieldType().indexOptions() != IndexOptions.NONE || fieldType().stored()) { - Field field = new Field(fieldType().name(), Double.toString(point.lat()) + ',' - + Double.toString(point.lon()), fieldType()); - context.doc().add(field); - } - - super.parse(context, point, geoHash); - - if (fieldType().hasDocValues()) { - CustomGeoPointDocValuesField field = (CustomGeoPointDocValuesField) context.doc() - .getByKey(fieldType().name()); - if (field == null) { - field = new CustomGeoPointDocValuesField(fieldType().name(), point.lat(), point.lon()); - context.doc().addWithKey(fieldType().name(), field); - } else { - field.add(point.lat(), point.lon()); - } - } - } - - @Override - protected void doXContentBody(XContentBuilder builder, boolean includeDefaults, Params params) throws IOException { - super.doXContentBody(builder, includeDefaults, params); - if (includeDefaults || coerce.explicit()) { - builder.field(Names.COERCE, coerce.value()); - } - } - - @Override - public LegacyGeoPointFieldType fieldType() { - return (LegacyGeoPointFieldType) super.fieldType(); - } - - public static class CustomGeoPointDocValuesField extends CustomDocValuesField { - - private final ObjectHashSet points; - - public CustomGeoPointDocValuesField(String name, double lat, double lon) { - super(name); - points = new ObjectHashSet<>(2); - points.add(new GeoPoint(lat, lon)); - } - - public void add(double lat, double lon) { - points.add(new GeoPoint(lat, lon)); - } - - @Override - public BytesRef binaryValue() { - final byte[] bytes = new byte[points.size() * 16]; - int off = 0; - for (Iterator> it = points.iterator(); it.hasNext(); ) { - final GeoPoint point = it.next().value; - ByteUtils.writeDoubleLE(point.getLat(), bytes, off); - ByteUtils.writeDoubleLE(point.getLon(), bytes, off + 8); - off += 16; - } - return new BytesRef(bytes); - } - } - -} diff --git a/core/src/main/java/org/elasticsearch/index/mapper/LegacyIntegerFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/LegacyIntegerFieldMapper.java deleted file mode 100644 index 619b95b404df4..0000000000000 --- a/core/src/main/java/org/elasticsearch/index/mapper/LegacyIntegerFieldMapper.java +++ /dev/null @@ -1,332 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.index.mapper; - -import org.apache.lucene.analysis.Analyzer; -import org.apache.lucene.analysis.TokenStream; -import org.apache.lucene.document.Field; -import org.apache.lucene.index.IndexOptions; -import org.apache.lucene.index.IndexReader; -import org.apache.lucene.index.IndexableField; -import org.apache.lucene.index.Terms; -import org.apache.lucene.search.LegacyNumericRangeQuery; -import org.apache.lucene.search.Query; -import org.apache.lucene.util.BytesRef; -import org.apache.lucene.util.BytesRefBuilder; -import org.apache.lucene.util.LegacyNumericUtils; -import org.elasticsearch.Version; -import org.elasticsearch.action.fieldstats.FieldStats; -import org.elasticsearch.common.Explicit; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.index.fielddata.IndexFieldData; -import org.elasticsearch.index.fielddata.IndexNumericFieldData.NumericType; -import org.elasticsearch.index.fielddata.plain.DocValuesIndexFieldData; -import org.elasticsearch.index.query.QueryShardContext; - -import java.io.IOException; -import java.util.Iterator; -import java.util.List; -import java.util.Map; - -import static org.elasticsearch.common.xcontent.support.XContentMapValues.nodeIntegerValue; -import static org.elasticsearch.index.mapper.TypeParsers.parseNumberField; - -public class LegacyIntegerFieldMapper extends LegacyNumberFieldMapper { - - public static final String CONTENT_TYPE = "integer"; - - public static class Defaults extends LegacyNumberFieldMapper.Defaults { - public static final MappedFieldType FIELD_TYPE = new IntegerFieldType(); - - static { - FIELD_TYPE.freeze(); - } - } - - public static class Builder extends LegacyNumberFieldMapper.Builder { - - public Builder(String name) { - super(name, Defaults.FIELD_TYPE, Defaults.PRECISION_STEP_32_BIT); - builder = this; - } - - public Builder nullValue(int nullValue) { - this.fieldType.setNullValue(nullValue); - return this; - } - - @Override - public LegacyIntegerFieldMapper build(BuilderContext context) { - if (context.indexCreatedVersion().onOrAfter(Version.V_5_0_0_alpha2)) { - throw new IllegalStateException("Cannot use legacy numeric types after 5.0"); - } - setupFieldType(context); - return new LegacyIntegerFieldMapper(name, fieldType, defaultFieldType, - ignoreMalformed(context), coerce(context), includeInAll, - context.indexSettings(), multiFieldsBuilder.build(this, context), copyTo); - } - @Override - protected int maxPrecisionStep() { - return 32; - } - } - - public static class TypeParser implements Mapper.TypeParser { - @Override - public Mapper.Builder parse(String name, Map node, ParserContext parserContext) throws MapperParsingException { - LegacyIntegerFieldMapper.Builder builder = new LegacyIntegerFieldMapper.Builder(name); - parseNumberField(builder, name, node, parserContext); - for (Iterator> iterator = node.entrySet().iterator(); iterator.hasNext();) { - Map.Entry entry = iterator.next(); - String propName = entry.getKey(); - Object propNode = entry.getValue(); - if (propName.equals("null_value")) { - if (propNode == null) { - throw new MapperParsingException("Property [null_value] cannot be null."); - } - builder.nullValue(nodeIntegerValue(propNode)); - iterator.remove(); - } - } - return builder; - } - } - - public static final class IntegerFieldType extends NumberFieldType { - - public IntegerFieldType() { - super(LegacyNumericType.INT); - } - - protected IntegerFieldType(IntegerFieldType ref) { - super(ref); - } - - @Override - public NumberFieldType clone() { - return new IntegerFieldType(this); - } - - @Override - public String typeName() { - return "integer"; - } - - @Override - public Integer nullValue() { - return (Integer)super.nullValue(); - } - - @Override - public BytesRef indexedValueForSearch(Object value) { - BytesRefBuilder bytesRef = new BytesRefBuilder(); - LegacyNumericUtils.intToPrefixCoded(parseValue(value), 0, bytesRef); // 0 because of exact match - return bytesRef.get(); - } - - @Override - public Query rangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper, QueryShardContext context) { - return LegacyNumericRangeQuery.newIntRange(name(), numericPrecisionStep(), - lowerTerm == null ? null : parseValue(lowerTerm), - upperTerm == null ? null : parseValue(upperTerm), - includeLower, includeUpper); - } - - @Override - public FieldStats.Long stats(IndexReader reader) throws IOException { - int maxDoc = reader.maxDoc(); - Terms terms = org.apache.lucene.index.MultiFields.getTerms(reader, name()); - if (terms == null) { - return null; - } - long minValue = LegacyNumericUtils.getMinInt(terms); - long maxValue = LegacyNumericUtils.getMaxInt(terms); - return new FieldStats.Long( - maxDoc, terms.getDocCount(), terms.getSumDocFreq(), terms.getSumTotalTermFreq(), - isSearchable(), isAggregatable(), minValue, maxValue); - } - - @Override - public IndexFieldData.Builder fielddataBuilder() { - failIfNoDocValues(); - return new DocValuesIndexFieldData.Builder().numericType(NumericType.INT); - } - } - - protected LegacyIntegerFieldMapper(String simpleName, MappedFieldType fieldType, MappedFieldType defaultFieldType, - Explicit ignoreMalformed, Explicit coerce, Boolean includeInAll, - Settings indexSettings, MultiFields multiFields, CopyTo copyTo) { - super(simpleName, fieldType, defaultFieldType, ignoreMalformed, coerce, includeInAll, indexSettings, multiFields, copyTo); - } - - @Override - public IntegerFieldType fieldType() { - return (IntegerFieldType) super.fieldType(); - } - - private static int parseValue(Object value) { - if (value instanceof Number) { - return ((Number) value).intValue(); - } - if (value instanceof BytesRef) { - return Integer.parseInt(((BytesRef) value).utf8ToString()); - } - return Integer.parseInt(value.toString()); - } - - @Override - protected boolean customBoost() { - return true; - } - - @Override - protected void innerParseCreateField(ParseContext context, List fields) throws IOException { - int value; - float boost = fieldType().boost(); - if (context.externalValueSet()) { - Object externalValue = context.externalValue(); - if (externalValue == null) { - if (fieldType().nullValue() == null) { - return; - } - value = fieldType().nullValue(); - } else if (externalValue instanceof String) { - String sExternalValue = (String) externalValue; - if (sExternalValue.length() == 0) { - if (fieldType().nullValue() == null) { - return; - } - value = fieldType().nullValue(); - } else { - value = Integer.parseInt(sExternalValue); - } - } else { - value = ((Number) externalValue).intValue(); - } - if (context.includeInAll(includeInAll, this)) { - context.allEntries().addText(fieldType().name(), Integer.toString(value), boost); - } - } else { - XContentParser parser = context.parser(); - if (parser.currentToken() == XContentParser.Token.VALUE_NULL || - (parser.currentToken() == XContentParser.Token.VALUE_STRING && parser.textLength() == 0)) { - if (fieldType().nullValue() == null) { - return; - } - value = fieldType().nullValue(); - if (fieldType().nullValueAsString() != null && (context.includeInAll(includeInAll, this))) { - context.allEntries().addText(fieldType().name(), fieldType().nullValueAsString(), boost); - } - } else if (parser.currentToken() == XContentParser.Token.START_OBJECT - && Version.indexCreated(context.indexSettings()).before(Version.V_5_0_0_alpha1)) { - XContentParser.Token token; - String currentFieldName = null; - Integer objValue = fieldType().nullValue(); - while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { - if (token == XContentParser.Token.FIELD_NAME) { - currentFieldName = parser.currentName(); - } else { - if ("value".equals(currentFieldName) || "_value".equals(currentFieldName)) { - if (parser.currentToken() != XContentParser.Token.VALUE_NULL) { - objValue = parser.intValue(coerce.value()); - } - } else if ("boost".equals(currentFieldName) || "_boost".equals(currentFieldName)) { - boost = parser.floatValue(); - } else { - throw new IllegalArgumentException("unknown property [" + currentFieldName + "]"); - } - } - } - if (objValue == null) { - // no value - return; - } - value = objValue; - } else { - value = parser.intValue(coerce.value()); - if (context.includeInAll(includeInAll, this)) { - context.allEntries().addText(fieldType().name(), parser.text(), boost); - } - } - } - addIntegerFields(context, fields, value, boost); - } - - protected void addIntegerFields(ParseContext context, List fields, int value, float boost) { - if (fieldType().indexOptions() != IndexOptions.NONE || fieldType().stored()) { - CustomIntegerNumericField field = new CustomIntegerNumericField(value, fieldType()); - if (boost != 1f && Version.indexCreated(context.indexSettings()).before(Version.V_5_0_0_alpha1)) { - field.setBoost(boost); - } - fields.add(field); - } - if (fieldType().hasDocValues()) { - addDocValue(context, fields, value); - } - } - - @Override - protected String contentType() { - return CONTENT_TYPE; - } - - @Override - protected void doXContentBody(XContentBuilder builder, boolean includeDefaults, Params params) throws IOException { - super.doXContentBody(builder, includeDefaults, params); - - if (includeDefaults || fieldType().numericPrecisionStep() != Defaults.PRECISION_STEP_32_BIT) { - builder.field("precision_step", fieldType().numericPrecisionStep()); - } - if (includeDefaults || fieldType().nullValue() != null) { - builder.field("null_value", fieldType().nullValue()); - } - if (includeInAll != null) { - builder.field("include_in_all", includeInAll); - } else if (includeDefaults) { - builder.field("include_in_all", false); - } - - } - - public static class CustomIntegerNumericField extends CustomNumericField { - - private final int number; - - public CustomIntegerNumericField(int number, MappedFieldType fieldType) { - super(number, fieldType); - this.number = number; - } - - @Override - public TokenStream tokenStream(Analyzer analyzer, TokenStream previous) { - if (fieldType().indexOptions() != IndexOptions.NONE) { - return getCachedStream().setIntValue(number); - } - return null; - } - - @Override - public String numericAsString() { - return Integer.toString(number); - } - } -} diff --git a/core/src/main/java/org/elasticsearch/index/mapper/LegacyIpFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/LegacyIpFieldMapper.java deleted file mode 100644 index ad6fcb642f546..0000000000000 --- a/core/src/main/java/org/elasticsearch/index/mapper/LegacyIpFieldMapper.java +++ /dev/null @@ -1,341 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.index.mapper; - -import org.apache.lucene.document.Field; -import org.apache.lucene.index.IndexOptions; -import org.apache.lucene.index.IndexReader; -import org.apache.lucene.index.IndexableField; -import org.apache.lucene.index.Terms; -import org.apache.lucene.search.LegacyNumericRangeQuery; -import org.apache.lucene.search.Query; -import org.apache.lucene.util.BytesRef; -import org.apache.lucene.util.BytesRefBuilder; -import org.apache.lucene.util.LegacyNumericUtils; -import org.elasticsearch.Version; -import org.elasticsearch.action.fieldstats.FieldStats; -import org.elasticsearch.common.Explicit; -import org.elasticsearch.common.Nullable; -import org.elasticsearch.common.network.Cidrs; -import org.elasticsearch.common.network.InetAddresses; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.index.IndexSettings; -import org.elasticsearch.index.fielddata.IndexFieldData; -import org.elasticsearch.index.fielddata.IndexFieldDataCache; -import org.elasticsearch.index.mapper.LegacyLongFieldMapper.CustomLongNumericField; -import org.elasticsearch.index.query.QueryShardContext; -import org.elasticsearch.indices.breaker.CircuitBreakerService; -import org.elasticsearch.search.DocValueFormat; -import org.joda.time.DateTimeZone; - -import java.io.IOException; -import java.net.InetAddress; -import java.util.Iterator; -import java.util.List; -import java.util.Map; -import java.util.regex.Pattern; - -import static org.elasticsearch.index.mapper.TypeParsers.parseNumberField; - -public class LegacyIpFieldMapper extends LegacyNumberFieldMapper { - - public static final String CONTENT_TYPE = "ip"; - public static final long MAX_IP = 4294967296L; - - public static String longToIp(long longIp) { - int octet3 = (int) ((longIp >> 24) % 256); - int octet2 = (int) ((longIp >> 16) % 256); - int octet1 = (int) ((longIp >> 8) % 256); - int octet0 = (int) ((longIp) % 256); - return octet3 + "." + octet2 + "." + octet1 + "." + octet0; - } - - private static final Pattern pattern = Pattern.compile("\\."); - - public static long ipToLong(String ip) { - try { - if (!InetAddresses.isInetAddress(ip)) { - throw new IllegalArgumentException("failed to parse ip [" + ip + "], not a valid ip address"); - } - String[] octets = pattern.split(ip); - if (octets.length != 4) { - throw new IllegalArgumentException("failed to parse ip [" + ip + "], not a valid ipv4 address (4 dots)"); - } - return (Long.parseLong(octets[0]) << 24) + (Integer.parseInt(octets[1]) << 16) + - (Integer.parseInt(octets[2]) << 8) + Integer.parseInt(octets[3]); - } catch (Exception e) { - if (e instanceof IllegalArgumentException) { - throw (IllegalArgumentException) e; - } - throw new IllegalArgumentException("failed to parse ip [" + ip + "]", e); - } - } - - public static class Defaults extends LegacyNumberFieldMapper.Defaults { - public static final String NULL_VALUE = null; - - public static final MappedFieldType FIELD_TYPE = new IpFieldType(); - - static { - FIELD_TYPE.freeze(); - } - } - - public static class Builder extends LegacyNumberFieldMapper.Builder { - - protected String nullValue = Defaults.NULL_VALUE; - - public Builder(String name) { - super(name, Defaults.FIELD_TYPE, Defaults.PRECISION_STEP_64_BIT); - builder = this; - } - - @Override - public LegacyIpFieldMapper build(BuilderContext context) { - if (context.indexCreatedVersion().onOrAfter(Version.V_5_0_0_alpha2)) { - throw new IllegalStateException("Cannot use legacy numeric types after 5.0"); - } - setupFieldType(context); - return new LegacyIpFieldMapper(name, fieldType, defaultFieldType, ignoreMalformed(context), - coerce(context), includeInAll, context.indexSettings(), multiFieldsBuilder.build(this, context), copyTo); - } - - @Override - protected int maxPrecisionStep() { - return 64; - } - } - - public static class TypeParser implements Mapper.TypeParser { - @Override - public Mapper.Builder parse(String name, Map node, ParserContext parserContext) throws MapperParsingException { - LegacyIpFieldMapper.Builder builder = new Builder(name); - parseNumberField(builder, name, node, parserContext); - for (Iterator> iterator = node.entrySet().iterator(); iterator.hasNext();) { - Map.Entry entry = iterator.next(); - String propName = entry.getKey(); - Object propNode = entry.getValue(); - if (propName.equals("null_value")) { - if (propNode == null) { - throw new MapperParsingException("Property [null_value] cannot be null."); - } - builder.nullValue(propNode.toString()); - iterator.remove(); - } - } - return builder; - } - } - - public static final class IpFieldType extends LegacyLongFieldMapper.LongFieldType { - - public IpFieldType() { - } - - protected IpFieldType(IpFieldType ref) { - super(ref); - } - - @Override - public NumberFieldType clone() { - return new IpFieldType(this); - } - - @Override - public String typeName() { - return CONTENT_TYPE; - } - - /** - * IPs should return as a string. - */ - @Override - public Object valueForDisplay(Object value) { - Long val = (Long) value; - if (val == null) { - return null; - } - return longToIp(val); - } - - @Override - public BytesRef indexedValueForSearch(Object value) { - BytesRefBuilder bytesRef = new BytesRefBuilder(); - LegacyNumericUtils.longToPrefixCoded(parseValue(value), 0, bytesRef); // 0 because of exact match - return bytesRef.get(); - } - - @Override - public Query termQuery(Object value, @Nullable QueryShardContext context) { - if (value != null) { - String term; - if (value instanceof BytesRef) { - term = ((BytesRef) value).utf8ToString(); - } else { - term = value.toString(); - } - long[] fromTo; - // assume that the term is either a CIDR range or the - // term is a single IPv4 address; if either of these - // assumptions is wrong, the CIDR parsing will fail - // anyway, and that is okay - if (term.contains("/")) { - // treat the term as if it is in CIDR notation - fromTo = Cidrs.cidrMaskToMinMax(term); - } else { - // treat the term as if it is a single IPv4, and - // apply a CIDR mask equivalent to the host route - fromTo = Cidrs.cidrMaskToMinMax(term + "/32"); - } - if (fromTo != null) { - return rangeQuery(fromTo[0] == 0 ? null : fromTo[0], - fromTo[1] == MAX_IP ? null : fromTo[1], true, false, context); - } - } - return super.termQuery(value, context); - } - - @Override - public Query rangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper, QueryShardContext context) { - return LegacyNumericRangeQuery.newLongRange(name(), numericPrecisionStep(), - lowerTerm == null ? null : parseValue(lowerTerm), - upperTerm == null ? null : parseValue(upperTerm), - includeLower, includeUpper); - } - - @Override - public FieldStats stats(IndexReader reader) throws IOException { - int maxDoc = reader.maxDoc(); - Terms terms = org.apache.lucene.index.MultiFields.getTerms(reader, name()); - if (terms == null) { - return null; - } - long minValue = LegacyNumericUtils.getMinLong(terms); - long maxValue = LegacyNumericUtils.getMaxLong(terms); - return new FieldStats.Ip(maxDoc, terms.getDocCount(), terms.getSumDocFreq(), terms.getSumTotalTermFreq(), - isSearchable(), isAggregatable(), - InetAddress.getByName(longToIp(minValue)), - InetAddress.getByName(longToIp(maxValue))); - } - - @Override - public IndexFieldData.Builder fielddataBuilder() { - failIfNoDocValues(); - return new IndexFieldData.Builder() { - @Override - public IndexFieldData build(IndexSettings indexSettings, - MappedFieldType fieldType, IndexFieldDataCache cache, - CircuitBreakerService breakerService, MapperService mapperService) { - return new LegacyIpIndexFieldData(indexSettings.getIndex(), name()); - } - }; - } - - @Override - public DocValueFormat docValueFormat(@Nullable String format, DateTimeZone timeZone) { - if (format != null) { - throw new IllegalArgumentException("Field [" + name() + "] of type [" + typeName() + "] does not support custom formats"); - } - if (timeZone != null) { - throw new IllegalArgumentException("Field [" + name() + "] of type [" + typeName() - + "] does not support custom time zones"); - } - return DocValueFormat.IP; - } - } - - protected LegacyIpFieldMapper(String simpleName, MappedFieldType fieldType, MappedFieldType defaultFieldType, - Explicit ignoreMalformed, Explicit coerce, Boolean includeInAll, - Settings indexSettings, MultiFields multiFields, CopyTo copyTo) { - super(simpleName, fieldType, defaultFieldType, ignoreMalformed, coerce, includeInAll, indexSettings, multiFields, copyTo); - } - - private static long parseValue(Object value) { - if (value instanceof Number) { - return ((Number) value).longValue(); - } - if (value instanceof BytesRef) { - return ipToLong(((BytesRef) value).utf8ToString()); - } - return ipToLong(value.toString()); - } - - @Override - protected void innerParseCreateField(ParseContext context, List fields) throws IOException { - String ipAsString; - if (context.externalValueSet()) { - ipAsString = (String) context.externalValue(); - if (ipAsString == null) { - ipAsString = fieldType().nullValueAsString(); - } - } else { - if (context.parser().currentToken() == XContentParser.Token.VALUE_NULL) { - ipAsString = fieldType().nullValueAsString(); - } else { - ipAsString = context.parser().text(); - } - } - - if (ipAsString == null) { - return; - } - if (context.includeInAll(includeInAll, this)) { - context.allEntries().addText(fieldType().name(), ipAsString, fieldType().boost()); - } - - final long value = ipToLong(ipAsString); - if (fieldType().indexOptions() != IndexOptions.NONE || fieldType().stored()) { - CustomLongNumericField field = new CustomLongNumericField(value, fieldType()); - if (fieldType.boost() != 1f && Version.indexCreated(context.indexSettings()).before(Version.V_5_0_0_alpha1)) { - field.setBoost(fieldType().boost()); - } - fields.add(field); - } - if (fieldType().hasDocValues()) { - addDocValue(context, fields, value); - } - } - - @Override - protected String contentType() { - return CONTENT_TYPE; - } - - @Override - protected void doXContentBody(XContentBuilder builder, boolean includeDefaults, Params params) throws IOException { - super.doXContentBody(builder, includeDefaults, params); - - if (includeDefaults || fieldType().numericPrecisionStep() != Defaults.PRECISION_STEP_64_BIT) { - builder.field("precision_step", fieldType().numericPrecisionStep()); - } - if (includeDefaults || fieldType().nullValueAsString() != null) { - builder.field("null_value", fieldType().nullValueAsString()); - } - if (includeInAll != null) { - builder.field("include_in_all", includeInAll); - } else if (includeDefaults) { - builder.field("include_in_all", false); - } - - } - -} diff --git a/core/src/main/java/org/elasticsearch/index/mapper/LegacyIpIndexFieldData.java b/core/src/main/java/org/elasticsearch/index/mapper/LegacyIpIndexFieldData.java deleted file mode 100644 index feb3328227d0b..0000000000000 --- a/core/src/main/java/org/elasticsearch/index/mapper/LegacyIpIndexFieldData.java +++ /dev/null @@ -1,145 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.elasticsearch.index.mapper; - -import org.apache.logging.log4j.Logger; -import org.apache.lucene.document.InetAddressPoint; -import org.apache.lucene.index.DocValues; -import org.apache.lucene.index.IndexReader; -import org.apache.lucene.index.LeafReaderContext; -import org.apache.lucene.index.SortedNumericDocValues; -import org.apache.lucene.util.BytesRef; -import org.elasticsearch.common.logging.Loggers; -import org.elasticsearch.index.Index; -import org.elasticsearch.index.fielddata.AtomicFieldData; -import org.elasticsearch.index.fielddata.IndexFieldData; -import org.elasticsearch.index.fielddata.IndexFieldData.XFieldComparatorSource.Nested; -import org.elasticsearch.index.fielddata.ScriptDocValues; -import org.elasticsearch.index.fielddata.SortedBinaryDocValues; -import org.elasticsearch.index.fielddata.fieldcomparator.BytesRefFieldComparatorSource; -import org.elasticsearch.search.MultiValueMode; - -import java.io.IOException; -import java.net.InetAddress; -import java.net.UnknownHostException; -import java.nio.ByteBuffer; - -final class LegacyIpIndexFieldData implements IndexFieldData { - - protected final Index index; - protected final String fieldName; - protected final Logger logger; - - public LegacyIpIndexFieldData(Index index, String fieldName) { - this.index = index; - this.fieldName = fieldName; - this.logger = Loggers.getLogger(getClass()); - } - - public String getFieldName() { - return fieldName; - } - - public void clear() { - // nothing to do - } - - public void clear(IndexReader reader) { - // nothing to do - } - - public Index index() { - return index; - } - - @Override - public AtomicFieldData load(LeafReaderContext context) { - return new AtomicFieldData() { - - @Override - public void close() { - // no-op - } - - @Override - public long ramBytesUsed() { - return 0; - } - - @Override - public ScriptDocValues getScriptValues() { - throw new UnsupportedOperationException("Cannot run scripts on ip fields"); - } - - @Override - public SortedBinaryDocValues getBytesValues() { - SortedNumericDocValues values; - try { - values = DocValues.getSortedNumeric(context.reader(), fieldName); - } catch (IOException e) { - throw new IllegalStateException("Cannot load doc values", e); - } - return new SortedBinaryDocValues() { - - final ByteBuffer scratch = ByteBuffer.allocate(4); - - @Override - public BytesRef valueAt(int index) { - // we do not need to reorder ip addresses since both the numeric - // encoding of LegacyIpFieldMapper and the binary encoding of - // IpFieldMapper match the sort order of ip addresses - long ip = values.valueAt(index); - scratch.putInt(0, (int) ip); - InetAddress inet; - try { - inet = InetAddress.getByAddress(scratch.array()); - } catch (UnknownHostException e) { - throw new IllegalStateException("Cannot happen", e); - } - byte[] encoded = InetAddressPoint.encode(inet); - return new BytesRef(encoded); - } - - @Override - public void setDocument(int docId) { - values.setDocument(docId); - } - - @Override - public int count() { - return values.count(); - } - }; - } - }; - } - - @Override - public AtomicFieldData loadDirect(LeafReaderContext context) - throws Exception { - return load(context); - } - - @Override - public IndexFieldData.XFieldComparatorSource comparatorSource( - Object missingValue, MultiValueMode sortMode, Nested nested) { - return new BytesRefFieldComparatorSource(this, missingValue, sortMode, nested); - } - -} diff --git a/core/src/main/java/org/elasticsearch/index/mapper/LegacyLongFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/LegacyLongFieldMapper.java deleted file mode 100644 index db423de0aba2c..0000000000000 --- a/core/src/main/java/org/elasticsearch/index/mapper/LegacyLongFieldMapper.java +++ /dev/null @@ -1,318 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.index.mapper; - -import org.apache.lucene.analysis.Analyzer; -import org.apache.lucene.analysis.TokenStream; -import org.apache.lucene.document.Field; -import org.apache.lucene.index.IndexOptions; -import org.apache.lucene.index.IndexReader; -import org.apache.lucene.index.IndexableField; -import org.apache.lucene.index.Terms; -import org.apache.lucene.search.LegacyNumericRangeQuery; -import org.apache.lucene.search.Query; -import org.apache.lucene.util.BytesRef; -import org.apache.lucene.util.BytesRefBuilder; -import org.apache.lucene.util.LegacyNumericUtils; -import org.elasticsearch.Version; -import org.elasticsearch.action.fieldstats.FieldStats; -import org.elasticsearch.common.Explicit; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.index.fielddata.IndexFieldData; -import org.elasticsearch.index.fielddata.IndexNumericFieldData.NumericType; -import org.elasticsearch.index.fielddata.plain.DocValuesIndexFieldData; -import org.elasticsearch.index.query.QueryShardContext; - -import java.io.IOException; -import java.util.Iterator; -import java.util.List; -import java.util.Map; - -import static org.elasticsearch.common.xcontent.support.XContentMapValues.nodeLongValue; -import static org.elasticsearch.index.mapper.TypeParsers.parseNumberField; - -public class LegacyLongFieldMapper extends LegacyNumberFieldMapper { - - public static final String CONTENT_TYPE = "long"; - - public static class Defaults extends LegacyNumberFieldMapper.Defaults { - public static final MappedFieldType FIELD_TYPE = new LongFieldType(); - - static { - FIELD_TYPE.freeze(); - } - } - - public static class Builder extends LegacyNumberFieldMapper.Builder { - - public Builder(String name) { - super(name, Defaults.FIELD_TYPE, Defaults.PRECISION_STEP_64_BIT); - builder = this; - } - - public Builder nullValue(long nullValue) { - this.fieldType.setNullValue(nullValue); - return this; - } - - @Override - public LegacyLongFieldMapper build(BuilderContext context) { - if (context.indexCreatedVersion().onOrAfter(Version.V_5_0_0_alpha2)) { - throw new IllegalStateException("Cannot use legacy numeric types after 5.0"); - } - setupFieldType(context); - return new LegacyLongFieldMapper(name, fieldType, defaultFieldType, - ignoreMalformed(context), coerce(context), includeInAll, context.indexSettings(), - multiFieldsBuilder.build(this, context), copyTo); - } - - @Override - protected int maxPrecisionStep() { - return 64; - } - } - - public static class TypeParser implements Mapper.TypeParser { - @Override - public Mapper.Builder parse(String name, Map node, ParserContext parserContext) throws MapperParsingException { - LegacyLongFieldMapper.Builder builder = new LegacyLongFieldMapper.Builder(name); - parseNumberField(builder, name, node, parserContext); - for (Iterator> iterator = node.entrySet().iterator(); iterator.hasNext();) { - Map.Entry entry = iterator.next(); - String propName = entry.getKey(); - Object propNode = entry.getValue(); - if (propName.equals("null_value")) { - if (propNode == null) { - throw new MapperParsingException("Property [null_value] cannot be null."); - } - builder.nullValue(nodeLongValue(propNode)); - iterator.remove(); - } - } - return builder; - } - } - - public static class LongFieldType extends NumberFieldType { - - public LongFieldType() { - super(LegacyNumericType.LONG); - } - - protected LongFieldType(LongFieldType ref) { - super(ref); - } - - @Override - public NumberFieldType clone() { - return new LongFieldType(this); - } - - @Override - public String typeName() { - return CONTENT_TYPE; - } - - @Override - public Long nullValue() { - return (Long)super.nullValue(); - } - - @Override - public BytesRef indexedValueForSearch(Object value) { - BytesRefBuilder bytesRef = new BytesRefBuilder(); - LegacyNumericUtils.longToPrefixCoded(parseLongValue(value), 0, bytesRef); // 0 because of exact match - return bytesRef.get(); - } - - @Override - public Query rangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper, QueryShardContext context) { - return LegacyNumericRangeQuery.newLongRange(name(), numericPrecisionStep(), - lowerTerm == null ? null : parseLongValue(lowerTerm), - upperTerm == null ? null : parseLongValue(upperTerm), - includeLower, includeUpper); - } - - @Override - public FieldStats stats(IndexReader reader) throws IOException { - int maxDoc = reader.maxDoc(); - Terms terms = org.apache.lucene.index.MultiFields.getTerms(reader, name()); - if (terms == null) { - return null; - } - long minValue = LegacyNumericUtils.getMinLong(terms); - long maxValue = LegacyNumericUtils.getMaxLong(terms); - return new FieldStats.Long( - maxDoc, terms.getDocCount(), terms.getSumDocFreq(), terms.getSumTotalTermFreq(), - isSearchable(), isAggregatable(), minValue, maxValue); - } - - @Override - public IndexFieldData.Builder fielddataBuilder() { - failIfNoDocValues(); - return new DocValuesIndexFieldData.Builder().numericType(NumericType.LONG); - } - } - - protected LegacyLongFieldMapper(String simpleName, MappedFieldType fieldType, MappedFieldType defaultFieldType, - Explicit ignoreMalformed, Explicit coerce, Boolean includeInAll, - Settings indexSettings, MultiFields multiFields, CopyTo copyTo) { - super(simpleName, fieldType, defaultFieldType, ignoreMalformed, coerce, includeInAll, indexSettings, multiFields, copyTo); - } - - @Override - public LongFieldType fieldType() { - return (LongFieldType) super.fieldType(); - } - - @Override - protected boolean customBoost() { - return true; - } - - @Override - protected void innerParseCreateField(ParseContext context, List fields) throws IOException { - long value; - float boost = fieldType().boost(); - if (context.externalValueSet()) { - Object externalValue = context.externalValue(); - if (externalValue == null) { - if (fieldType().nullValue() == null) { - return; - } - value = fieldType().nullValue(); - } else if (externalValue instanceof String) { - String sExternalValue = (String) externalValue; - if (sExternalValue.length() == 0) { - if (fieldType().nullValue() == null) { - return; - } - value = fieldType().nullValue(); - } else { - value = Long.parseLong(sExternalValue); - } - } else { - value = ((Number) externalValue).longValue(); - } - if (context.includeInAll(includeInAll, this)) { - context.allEntries().addText(fieldType().name(), Long.toString(value), boost); - } - } else { - XContentParser parser = context.parser(); - if (parser.currentToken() == XContentParser.Token.VALUE_NULL || - (parser.currentToken() == XContentParser.Token.VALUE_STRING && parser.textLength() == 0)) { - if (fieldType().nullValue() == null) { - return; - } - value = fieldType().nullValue(); - if (fieldType().nullValueAsString() != null && (context.includeInAll(includeInAll, this))) { - context.allEntries().addText(fieldType().name(), fieldType().nullValueAsString(), boost); - } - } else if (parser.currentToken() == XContentParser.Token.START_OBJECT - && Version.indexCreated(context.indexSettings()).before(Version.V_5_0_0_alpha1)) { - XContentParser.Token token; - String currentFieldName = null; - Long objValue = fieldType().nullValue(); - while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { - if (token == XContentParser.Token.FIELD_NAME) { - currentFieldName = parser.currentName(); - } else { - if ("value".equals(currentFieldName) || "_value".equals(currentFieldName)) { - if (parser.currentToken() != XContentParser.Token.VALUE_NULL) { - objValue = parser.longValue(coerce.value()); - } - } else if ("boost".equals(currentFieldName) || "_boost".equals(currentFieldName)) { - boost = parser.floatValue(); - } else { - throw new IllegalArgumentException("unknown property [" + currentFieldName + "]"); - } - } - } - if (objValue == null) { - // no value - return; - } - value = objValue; - } else { - value = parser.longValue(coerce.value()); - if (context.includeInAll(includeInAll, this)) { - context.allEntries().addText(fieldType().name(), parser.text(), boost); - } - } - } - if (fieldType().indexOptions() != IndexOptions.NONE || fieldType().stored()) { - CustomLongNumericField field = new CustomLongNumericField(value, fieldType()); - if (boost != 1f && Version.indexCreated(context.indexSettings()).before(Version.V_5_0_0_alpha1)) { - field.setBoost(boost); - } - fields.add(field); - } - if (fieldType().hasDocValues()) { - addDocValue(context, fields, value); - } - } - - @Override - protected String contentType() { - return CONTENT_TYPE; - } - - @Override - protected void doXContentBody(XContentBuilder builder, boolean includeDefaults, Params params) throws IOException { - super.doXContentBody(builder, includeDefaults, params); - - if (includeDefaults || fieldType().numericPrecisionStep() != Defaults.PRECISION_STEP_64_BIT) { - builder.field("precision_step", fieldType().numericPrecisionStep()); - } - if (includeDefaults || fieldType().nullValue() != null) { - builder.field("null_value", fieldType().nullValue()); - } - if (includeInAll != null) { - builder.field("include_in_all", includeInAll); - } else if (includeDefaults) { - builder.field("include_in_all", false); - } - } - - public static class CustomLongNumericField extends CustomNumericField { - - private final long number; - - public CustomLongNumericField(long number, MappedFieldType fieldType) { - super(number, fieldType); - this.number = number; - } - - @Override - public TokenStream tokenStream(Analyzer analyzer, TokenStream previous) { - if (fieldType().indexOptions() != IndexOptions.NONE) { - return getCachedStream().setLongValue(number); - } - return null; - } - - @Override - public String numericAsString() { - return Long.toString(number); - } - } -} diff --git a/core/src/main/java/org/elasticsearch/index/mapper/LegacyNumberFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/LegacyNumberFieldMapper.java deleted file mode 100644 index dd5852e1a5ed8..0000000000000 --- a/core/src/main/java/org/elasticsearch/index/mapper/LegacyNumberFieldMapper.java +++ /dev/null @@ -1,321 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.index.mapper; - -import java.io.IOException; -import java.io.Reader; -import java.util.List; - -import org.apache.lucene.analysis.LegacyNumericTokenStream; -import org.apache.lucene.document.Field; -import org.apache.lucene.document.SortedNumericDocValuesField; -import org.apache.lucene.index.IndexOptions; -import org.apache.lucene.index.IndexableField; -import org.apache.lucene.util.BytesRef; -import org.elasticsearch.common.Explicit; -import org.elasticsearch.common.Nullable; -import org.elasticsearch.common.settings.Setting; -import org.elasticsearch.common.settings.Setting.Property; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.search.DocValueFormat; -import org.joda.time.DateTimeZone; - -public abstract class LegacyNumberFieldMapper extends FieldMapper { - // this is private since it has a different default - private static final Setting COERCE_SETTING = - Setting.boolSetting("index.mapping.coerce", true, Property.IndexScope); - - public static class Defaults { - - public static final int PRECISION_STEP_8_BIT = Integer.MAX_VALUE; // 1tpv: 256 terms at most, not useful - public static final int PRECISION_STEP_16_BIT = 8; // 2tpv - public static final int PRECISION_STEP_32_BIT = 8; // 4tpv - public static final int PRECISION_STEP_64_BIT = 16; // 4tpv - - public static final Explicit IGNORE_MALFORMED = new Explicit<>(false, false); - public static final Explicit COERCE = new Explicit<>(true, false); - } - - public abstract static class Builder extends FieldMapper.Builder { - - private Boolean ignoreMalformed; - - private Boolean coerce; - - public Builder(String name, MappedFieldType fieldType, int defaultPrecisionStep) { - super(name, fieldType, fieldType); - this.fieldType.setNumericPrecisionStep(defaultPrecisionStep); - } - - public T precisionStep(int precisionStep) { - fieldType.setNumericPrecisionStep(precisionStep); - return builder; - } - - public T ignoreMalformed(boolean ignoreMalformed) { - this.ignoreMalformed = ignoreMalformed; - return builder; - } - - protected Explicit ignoreMalformed(BuilderContext context) { - if (ignoreMalformed != null) { - return new Explicit<>(ignoreMalformed, true); - } - if (context.indexSettings() != null) { - return new Explicit<>(IGNORE_MALFORMED_SETTING.get(context.indexSettings()), false); - } - return Defaults.IGNORE_MALFORMED; - } - - public T coerce(boolean coerce) { - this.coerce = coerce; - return builder; - } - - protected Explicit coerce(BuilderContext context) { - if (coerce != null) { - return new Explicit<>(coerce, true); - } - if (context.indexSettings() != null) { - return new Explicit<>(COERCE_SETTING.get(context.indexSettings()), false); - } - return Defaults.COERCE; - } - - protected void setupFieldType(BuilderContext context) { - super.setupFieldType(context); - int precisionStep = fieldType.numericPrecisionStep(); - if (precisionStep <= 0 || precisionStep >= maxPrecisionStep()) { - fieldType.setNumericPrecisionStep(Integer.MAX_VALUE); - } - } - - protected abstract int maxPrecisionStep(); - } - - public abstract static class NumberFieldType extends TermBasedFieldType { - - public NumberFieldType(LegacyNumericType numericType) { - setTokenized(false); - setOmitNorms(true); - setIndexOptions(IndexOptions.DOCS); - setStoreTermVectors(false); - setNumericType(numericType); - } - - protected NumberFieldType(NumberFieldType ref) { - super(ref); - } - - @Override - public void checkCompatibility(MappedFieldType other, - List conflicts, boolean strict) { - super.checkCompatibility(other, conflicts, strict); - if (numericPrecisionStep() != other.numericPrecisionStep()) { - conflicts.add("mapper [" + name() + "] has different [precision_step] values"); - } - } - - public abstract NumberFieldType clone(); - - @Override - public DocValueFormat docValueFormat(@Nullable String format, DateTimeZone timeZone) { - if (timeZone != null) { - throw new IllegalArgumentException("Field [" + name() + "] of type [" + typeName() + "] does not support custom time zones"); - } - if (format == null) { - return DocValueFormat.RAW; - } else { - return new DocValueFormat.Decimal(format); - } - } - } - - protected Boolean includeInAll; - - protected Explicit ignoreMalformed; - - protected Explicit coerce; - - protected LegacyNumberFieldMapper(String simpleName, MappedFieldType fieldType, MappedFieldType defaultFieldType, - Explicit ignoreMalformed, Explicit coerce, Boolean includeInAll, - Settings indexSettings, MultiFields multiFields, CopyTo copyTo) { - super(simpleName, fieldType, defaultFieldType, indexSettings, multiFields, copyTo); - assert fieldType.tokenized() == false; - this.ignoreMalformed = ignoreMalformed; - this.coerce = coerce; - this.includeInAll = includeInAll; - } - - @Override - protected LegacyNumberFieldMapper clone() { - return (LegacyNumberFieldMapper) super.clone(); - } - - @Override - protected void parseCreateField(ParseContext context, List fields) throws IOException { - RuntimeException e = null; - try { - innerParseCreateField(context, fields); - } catch (IllegalArgumentException e1) { - e = e1; - } catch (MapperParsingException e2) { - e = e2; - } - - if (e != null && !ignoreMalformed.value()) { - throw e; - } - } - - protected abstract void innerParseCreateField(ParseContext context, List fields) throws IOException; - - protected final void addDocValue(ParseContext context, List fields, long value) { - fields.add(new SortedNumericDocValuesField(fieldType().name(), value)); - } - - /** - * Converts an object value into a double - */ - public static double parseDoubleValue(Object value) { - if (value instanceof Number) { - return ((Number) value).doubleValue(); - } - - if (value instanceof BytesRef) { - return Double.parseDouble(((BytesRef) value).utf8ToString()); - } - - return Double.parseDouble(value.toString()); - } - - /** - * Converts an object value into a long - */ - public static long parseLongValue(Object value) { - if (value instanceof Number) { - return ((Number) value).longValue(); - } - - if (value instanceof BytesRef) { - return Long.parseLong(((BytesRef) value).utf8ToString()); - } - - return Long.parseLong(value.toString()); - } - - @Override - protected void doMerge(Mapper mergeWith, boolean updateAllTypes) { - super.doMerge(mergeWith, updateAllTypes); - LegacyNumberFieldMapper nfmMergeWith = (LegacyNumberFieldMapper) mergeWith; - - this.includeInAll = nfmMergeWith.includeInAll; - if (nfmMergeWith.ignoreMalformed.explicit()) { - this.ignoreMalformed = nfmMergeWith.ignoreMalformed; - } - if (nfmMergeWith.coerce.explicit()) { - this.coerce = nfmMergeWith.coerce; - } - } - - // used to we can use a numeric field in a document that is then parsed twice! - public abstract static class CustomNumericField extends Field { - - private ThreadLocal tokenStream = new ThreadLocal() { - @Override - protected LegacyNumericTokenStream initialValue() { - return new LegacyNumericTokenStream(fieldType().numericPrecisionStep()); - } - }; - - private static ThreadLocal tokenStream4 = new ThreadLocal() { - @Override - protected LegacyNumericTokenStream initialValue() { - return new LegacyNumericTokenStream(4); - } - }; - - private static ThreadLocal tokenStream8 = new ThreadLocal() { - @Override - protected LegacyNumericTokenStream initialValue() { - return new LegacyNumericTokenStream(8); - } - }; - - private static ThreadLocal tokenStream16 = new ThreadLocal() { - @Override - protected LegacyNumericTokenStream initialValue() { - return new LegacyNumericTokenStream(16); - } - }; - - private static ThreadLocal tokenStreamMax = new ThreadLocal() { - @Override - protected LegacyNumericTokenStream initialValue() { - return new LegacyNumericTokenStream(Integer.MAX_VALUE); - } - }; - - public CustomNumericField(Number value, MappedFieldType fieldType) { - super(fieldType.name(), fieldType); - if (value != null) { - this.fieldsData = value; - } - } - - protected LegacyNumericTokenStream getCachedStream() { - if (fieldType().numericPrecisionStep() == 4) { - return tokenStream4.get(); - } else if (fieldType().numericPrecisionStep() == 8) { - return tokenStream8.get(); - } else if (fieldType().numericPrecisionStep() == 16) { - return tokenStream16.get(); - } else if (fieldType().numericPrecisionStep() == Integer.MAX_VALUE) { - return tokenStreamMax.get(); - } - return tokenStream.get(); - } - - @Override - public String stringValue() { - return null; - } - - @Override - public Reader readerValue() { - return null; - } - - public abstract String numericAsString(); - } - - @Override - protected void doXContentBody(XContentBuilder builder, boolean includeDefaults, Params params) throws IOException { - super.doXContentBody(builder, includeDefaults, params); - - if (includeDefaults || ignoreMalformed.explicit()) { - builder.field("ignore_malformed", ignoreMalformed.value()); - } - if (includeDefaults || coerce.explicit()) { - builder.field("coerce", coerce.value()); - } - } -} diff --git a/core/src/main/java/org/elasticsearch/index/mapper/LegacyShortFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/LegacyShortFieldMapper.java deleted file mode 100644 index a7cb4b6386442..0000000000000 --- a/core/src/main/java/org/elasticsearch/index/mapper/LegacyShortFieldMapper.java +++ /dev/null @@ -1,333 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.index.mapper; - -import org.apache.lucene.analysis.Analyzer; -import org.apache.lucene.analysis.TokenStream; -import org.apache.lucene.document.Field; -import org.apache.lucene.index.IndexOptions; -import org.apache.lucene.index.IndexReader; -import org.apache.lucene.index.IndexableField; -import org.apache.lucene.index.Terms; -import org.apache.lucene.search.LegacyNumericRangeQuery; -import org.apache.lucene.search.Query; -import org.apache.lucene.util.BytesRef; -import org.apache.lucene.util.BytesRefBuilder; -import org.apache.lucene.util.LegacyNumericUtils; -import org.elasticsearch.Version; -import org.elasticsearch.action.fieldstats.FieldStats; -import org.elasticsearch.common.Explicit; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.index.fielddata.IndexFieldData; -import org.elasticsearch.index.fielddata.IndexNumericFieldData.NumericType; -import org.elasticsearch.index.fielddata.plain.DocValuesIndexFieldData; -import org.elasticsearch.index.query.QueryShardContext; - -import java.io.IOException; -import java.util.Iterator; -import java.util.List; -import java.util.Map; - -import static org.elasticsearch.common.xcontent.support.XContentMapValues.nodeShortValue; -import static org.elasticsearch.index.mapper.TypeParsers.parseNumberField; - -public class LegacyShortFieldMapper extends LegacyNumberFieldMapper { - - public static final String CONTENT_TYPE = "short"; - public static final int DEFAULT_PRECISION_STEP = 8; - - public static class Defaults extends LegacyNumberFieldMapper.Defaults { - public static final MappedFieldType FIELD_TYPE = new ShortFieldType(); - - static { - FIELD_TYPE.freeze(); - } - } - - public static class Builder extends LegacyNumberFieldMapper.Builder { - - public Builder(String name) { - super(name, Defaults.FIELD_TYPE, DEFAULT_PRECISION_STEP); - builder = this; - } - - @Override - public LegacyShortFieldMapper build(BuilderContext context) { - if (context.indexCreatedVersion().onOrAfter(Version.V_5_0_0_alpha2)) { - throw new IllegalStateException("Cannot use legacy numeric types after 5.0"); - } - setupFieldType(context); - return new LegacyShortFieldMapper(name, fieldType, defaultFieldType, - ignoreMalformed(context), coerce(context), includeInAll, - context.indexSettings(), multiFieldsBuilder.build(this, context), copyTo); - } - - @Override - protected int maxPrecisionStep() { - return 32; - } - } - - public static class TypeParser implements Mapper.TypeParser { - @Override - public Mapper.Builder parse(String name, Map node, ParserContext parserContext) throws MapperParsingException { - LegacyShortFieldMapper.Builder builder = new LegacyShortFieldMapper.Builder(name); - parseNumberField(builder, name, node, parserContext); - for (Iterator> iterator = node.entrySet().iterator(); iterator.hasNext();) { - Map.Entry entry = iterator.next(); - String propName = entry.getKey(); - Object propNode = entry.getValue(); - if (propName.equals("null_value")) { - if (propNode == null) { - throw new MapperParsingException("Property [null_value] cannot be null."); - } - builder.nullValue(nodeShortValue(propNode)); - iterator.remove(); - } - } - return builder; - } - } - - static final class ShortFieldType extends NumberFieldType { - - public ShortFieldType() { - super(LegacyNumericType.INT); - } - - protected ShortFieldType(ShortFieldType ref) { - super(ref); - } - - @Override - public NumberFieldType clone() { - return new ShortFieldType(this); - } - - @Override - public String typeName() { - return CONTENT_TYPE; - } - - @Override - public Short nullValue() { - return (Short)super.nullValue(); - } - - @Override - public Short valueForDisplay(Object value) { - if (value == null) { - return null; - } - return ((Number) value).shortValue(); - } - - @Override - public BytesRef indexedValueForSearch(Object value) { - BytesRefBuilder bytesRef = new BytesRefBuilder(); - LegacyNumericUtils.intToPrefixCoded(parseValue(value), 0, bytesRef); // 0 because of exact match - return bytesRef.get(); - } - - @Override - public Query rangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper, QueryShardContext context) { - return LegacyNumericRangeQuery.newIntRange(name(), numericPrecisionStep(), - lowerTerm == null ? null : (int)parseValue(lowerTerm), - upperTerm == null ? null : (int)parseValue(upperTerm), - includeLower, includeUpper); - } - - @Override - public FieldStats.Long stats(IndexReader reader) throws IOException { - int maxDoc = reader.maxDoc(); - Terms terms = org.apache.lucene.index.MultiFields.getTerms(reader, name()); - if (terms == null) { - return null; - } - long minValue = LegacyNumericUtils.getMinInt(terms); - long maxValue = LegacyNumericUtils.getMaxInt(terms); - return new FieldStats.Long( - maxDoc, terms.getDocCount(), terms.getSumDocFreq(), terms.getSumTotalTermFreq(), - isSearchable(), isAggregatable(), minValue, maxValue); - } - - @Override - public IndexFieldData.Builder fielddataBuilder() { - failIfNoDocValues(); - return new DocValuesIndexFieldData.Builder().numericType(NumericType.SHORT); - } - } - - protected LegacyShortFieldMapper(String simpleName, MappedFieldType fieldType, MappedFieldType defaultFieldType, - Explicit ignoreMalformed, Explicit coerce, Boolean includeInAll, - Settings indexSettings, MultiFields multiFields, CopyTo copyTo) { - super(simpleName, fieldType, defaultFieldType, ignoreMalformed, coerce, includeInAll, indexSettings, multiFields, copyTo); - } - - @Override - public ShortFieldType fieldType() { - return (ShortFieldType) super.fieldType(); - } - - private static short parseValue(Object value) { - if (value instanceof Number) { - return ((Number) value).shortValue(); - } - if (value instanceof BytesRef) { - return Short.parseShort(((BytesRef) value).utf8ToString()); - } - return Short.parseShort(value.toString()); - } - - @Override - protected boolean customBoost() { - return true; - } - - @Override - protected void innerParseCreateField(ParseContext context, List fields) throws IOException { - short value; - float boost = fieldType().boost(); - if (context.externalValueSet()) { - Object externalValue = context.externalValue(); - if (externalValue == null) { - if (fieldType().nullValue() == null) { - return; - } - value = fieldType().nullValue(); - } else if (externalValue instanceof String) { - String sExternalValue = (String) externalValue; - if (sExternalValue.length() == 0) { - if (fieldType().nullValue() == null) { - return; - } - value = fieldType().nullValue(); - } else { - value = Short.parseShort(sExternalValue); - } - } else { - value = ((Number) externalValue).shortValue(); - } - if (context.includeInAll(includeInAll, this)) { - context.allEntries().addText(fieldType().name(), Short.toString(value), boost); - } - } else { - XContentParser parser = context.parser(); - if (parser.currentToken() == XContentParser.Token.VALUE_NULL || - (parser.currentToken() == XContentParser.Token.VALUE_STRING && parser.textLength() == 0)) { - if (fieldType().nullValue() == null) { - return; - } - value = fieldType().nullValue(); - if (fieldType().nullValueAsString() != null && (context.includeInAll(includeInAll, this))) { - context.allEntries().addText(fieldType().name(), fieldType().nullValueAsString(), boost); - } - } else if (parser.currentToken() == XContentParser.Token.START_OBJECT - && Version.indexCreated(context.indexSettings()).before(Version.V_5_0_0_alpha1)) { - XContentParser.Token token; - String currentFieldName = null; - Short objValue = fieldType().nullValue(); - while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { - if (token == XContentParser.Token.FIELD_NAME) { - currentFieldName = parser.currentName(); - } else { - if ("value".equals(currentFieldName) || "_value".equals(currentFieldName)) { - if (parser.currentToken() != XContentParser.Token.VALUE_NULL) { - objValue = parser.shortValue(coerce.value()); - } - } else if ("boost".equals(currentFieldName) || "_boost".equals(currentFieldName)) { - boost = parser.floatValue(); - } else { - throw new IllegalArgumentException("unknown property [" + currentFieldName + "]"); - } - } - } - if (objValue == null) { - // no value - return; - } - value = objValue; - } else { - value = parser.shortValue(coerce.value()); - if (context.includeInAll(includeInAll, this)) { - context.allEntries().addText(fieldType().name(), parser.text(), boost); - } - } - } - if (fieldType().indexOptions() != IndexOptions.NONE || fieldType().stored()) { - CustomShortNumericField field = new CustomShortNumericField(value, fieldType()); - if (boost != 1f && Version.indexCreated(context.indexSettings()).before(Version.V_5_0_0_alpha1)) { - field.setBoost(boost); - } - fields.add(field); - } - if (fieldType().hasDocValues()) { - addDocValue(context, fields, value); - } - } - - @Override - protected String contentType() { - return CONTENT_TYPE; - } - - @Override - protected void doXContentBody(XContentBuilder builder, boolean includeDefaults, Params params) throws IOException { - super.doXContentBody(builder, includeDefaults, params); - - if (includeDefaults || fieldType().numericPrecisionStep() != DEFAULT_PRECISION_STEP) { - builder.field("precision_step", fieldType().numericPrecisionStep()); - } - if (includeDefaults || fieldType().nullValue() != null) { - builder.field("null_value", fieldType().nullValue()); - } - if (includeInAll != null) { - builder.field("include_in_all", includeInAll); - } else if (includeDefaults) { - builder.field("include_in_all", false); - } - - } - - public static class CustomShortNumericField extends CustomNumericField { - - private final short number; - - public CustomShortNumericField(short number, NumberFieldType fieldType) { - super(number, fieldType); - this.number = number; - } - - @Override - public TokenStream tokenStream(Analyzer analyzer, TokenStream previous) { - if (fieldType().indexOptions() != IndexOptions.NONE) { - return getCachedStream().setIntValue(number); - } - return null; - } - - @Override - public String numericAsString() { - return Short.toString(number); - } - } -} diff --git a/core/src/main/java/org/elasticsearch/index/mapper/LegacyTokenCountFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/LegacyTokenCountFieldMapper.java deleted file mode 100644 index 5dfb93d4836f1..0000000000000 --- a/core/src/main/java/org/elasticsearch/index/mapper/LegacyTokenCountFieldMapper.java +++ /dev/null @@ -1,191 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.index.mapper; - -import org.apache.lucene.analysis.Analyzer; -import org.apache.lucene.analysis.TokenStream; -import org.apache.lucene.analysis.tokenattributes.PositionIncrementAttribute; -import org.apache.lucene.document.Field; -import org.apache.lucene.index.IndexableField; -import org.elasticsearch.Version; -import org.elasticsearch.common.Explicit; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.index.analysis.NamedAnalyzer; -import org.elasticsearch.index.mapper.StringFieldMapper.ValueAndBoost; - -import java.io.IOException; -import java.util.Iterator; -import java.util.List; -import java.util.Map; - -import static org.apache.lucene.index.IndexOptions.NONE; -import static org.elasticsearch.common.xcontent.support.XContentMapValues.nodeIntegerValue; -import static org.elasticsearch.index.mapper.TypeParsers.parseNumberField; - -/** - * A {@link FieldMapper} that takes a string and writes a count of the tokens in that string - * to the index. In most ways the mapper acts just like an {@link LegacyIntegerFieldMapper}. - */ -public class LegacyTokenCountFieldMapper extends LegacyIntegerFieldMapper { - public static final String CONTENT_TYPE = "token_count"; - - public static class Defaults extends LegacyIntegerFieldMapper.Defaults { - - } - - public static class Builder extends LegacyNumberFieldMapper.Builder { - private NamedAnalyzer analyzer; - - public Builder(String name) { - super(name, Defaults.FIELD_TYPE, Defaults.PRECISION_STEP_32_BIT); - builder = this; - } - - public Builder analyzer(NamedAnalyzer analyzer) { - this.analyzer = analyzer; - return this; - } - - public NamedAnalyzer analyzer() { - return analyzer; - } - - @Override - public LegacyTokenCountFieldMapper build(BuilderContext context) { - if (context.indexCreatedVersion().onOrAfter(Version.V_5_0_0_alpha2)) { - throw new IllegalStateException("Cannot use legacy numeric types after 5.0"); - } - setupFieldType(context); - return new LegacyTokenCountFieldMapper(name, fieldType, defaultFieldType, - ignoreMalformed(context), coerce(context), includeInAll, context.indexSettings(), - analyzer, multiFieldsBuilder.build(this, context), copyTo); - } - - @Override - protected int maxPrecisionStep() { - return 32; - } - } - - public static class TypeParser implements Mapper.TypeParser { - @Override - @SuppressWarnings("unchecked") - public Mapper.Builder parse(String name, Map node, ParserContext parserContext) throws MapperParsingException { - LegacyTokenCountFieldMapper.Builder builder = new LegacyTokenCountFieldMapper.Builder(name); - for (Iterator> iterator = node.entrySet().iterator(); iterator.hasNext();) { - Map.Entry entry = iterator.next(); - String propName = entry.getKey(); - Object propNode = entry.getValue(); - if (propName.equals("null_value")) { - builder.nullValue(nodeIntegerValue(propNode)); - iterator.remove(); - } else if (propName.equals("analyzer")) { - NamedAnalyzer analyzer = parserContext.getIndexAnalyzers().get(propNode.toString()); - if (analyzer == null) { - throw new MapperParsingException("Analyzer [" + propNode.toString() + "] not found for field [" + name + "]"); - } - builder.analyzer(analyzer); - iterator.remove(); - } - } - parseNumberField(builder, name, node, parserContext); - if (builder.analyzer() == null) { - throw new MapperParsingException("Analyzer must be set for field [" + name + "] but wasn't."); - } - return builder; - } - } - - private NamedAnalyzer analyzer; - - protected LegacyTokenCountFieldMapper(String simpleName, MappedFieldType fieldType, MappedFieldType defaultFieldType, Explicit ignoreMalformed, - Explicit coerce, Boolean includeInAll, Settings indexSettings, NamedAnalyzer analyzer, MultiFields multiFields, CopyTo copyTo) { - super(simpleName, fieldType, defaultFieldType, ignoreMalformed, coerce, includeInAll, indexSettings, multiFields, copyTo); - this.analyzer = analyzer; - } - - @Override - protected void parseCreateField(ParseContext context, List fields) throws IOException { - ValueAndBoost valueAndBoost = StringFieldMapper.parseCreateFieldForString(context, null /* Out null value is an int so we convert*/, fieldType().boost()); - if (valueAndBoost.value() == null && fieldType().nullValue() == null) { - return; - } - - if (fieldType().indexOptions() != NONE || fieldType().stored() || fieldType().hasDocValues()) { - int count; - if (valueAndBoost.value() == null) { - count = fieldType().nullValue(); - } else { - count = countPositions(analyzer, simpleName(), valueAndBoost.value()); - } - addIntegerFields(context, fields, count, valueAndBoost.boost()); - } - } - - /** - * Count position increments in a token stream. Package private for testing. - * @param analyzer analyzer to create token stream - * @param fieldName field name to pass to analyzer - * @param fieldValue field value to pass to analyzer - * @return number of position increments in a token stream - * @throws IOException if tokenStream throws it - */ - static int countPositions(Analyzer analyzer, String fieldName, String fieldValue) throws IOException { - try (TokenStream tokenStream = analyzer.tokenStream(fieldName, fieldValue)) { - int count = 0; - PositionIncrementAttribute position = tokenStream.addAttribute(PositionIncrementAttribute.class); - tokenStream.reset(); - while (tokenStream.incrementToken()) { - count += position.getPositionIncrement(); - } - tokenStream.end(); - count += position.getPositionIncrement(); - return count; - } - } - - /** - * Name of analyzer. - * @return name of analyzer - */ - public String analyzer() { - return analyzer.name(); - } - - @Override - protected String contentType() { - return CONTENT_TYPE; - } - - @Override - protected void doMerge(Mapper mergeWith, boolean updateAllTypes) { - super.doMerge(mergeWith, updateAllTypes); - this.analyzer = ((LegacyTokenCountFieldMapper) mergeWith).analyzer; - } - - @Override - protected void doXContentBody(XContentBuilder builder, boolean includeDefaults, Params params) throws IOException { - super.doXContentBody(builder, includeDefaults, params); - - builder.field("analyzer", analyzer()); - } - -} diff --git a/core/src/main/java/org/elasticsearch/index/mapper/NumberFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/NumberFieldMapper.java index aa3ca2385407d..d903ab109dfac 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/NumberFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/NumberFieldMapper.java @@ -36,7 +36,6 @@ import org.apache.lucene.search.Query; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.NumericUtils; -import org.elasticsearch.Version; import org.elasticsearch.action.fieldstats.FieldStats; import org.elasticsearch.common.Explicit; import org.elasticsearch.common.settings.Setting; @@ -48,7 +47,6 @@ import org.elasticsearch.index.fielddata.IndexFieldData; import org.elasticsearch.index.fielddata.IndexNumericFieldData.NumericType; import org.elasticsearch.index.fielddata.plain.DocValuesIndexFieldData; -import org.elasticsearch.index.mapper.LegacyNumberFieldMapper.Defaults; import org.elasticsearch.index.query.QueryShardContext; import org.elasticsearch.search.DocValueFormat; import org.joda.time.DateTimeZone; @@ -67,6 +65,11 @@ public class NumberFieldMapper extends FieldMapper { static final Setting COERCE_SETTING = Setting.boolSetting("index.mapping.coerce", true, Property.IndexScope); + public static class Defaults { + public static final Explicit IGNORE_MALFORMED = new Explicit<>(false, false); + public static final Explicit COERCE = new Explicit<>(true, false); + } + public static class Builder extends FieldMapper.Builder { private Boolean ignoreMalformed; @@ -131,24 +134,6 @@ public TypeParser(NumberType type) { @Override public Mapper.Builder parse(String name, Map node, ParserContext parserContext) throws MapperParsingException { - if (parserContext.indexVersionCreated().before(Version.V_5_0_0_alpha2)) { - switch (type) { - case BYTE: - return new LegacyByteFieldMapper.TypeParser().parse(name, node, parserContext); - case SHORT: - return new LegacyShortFieldMapper.TypeParser().parse(name, node, parserContext); - case INTEGER: - return new LegacyIntegerFieldMapper.TypeParser().parse(name, node, parserContext); - case LONG: - return new LegacyLongFieldMapper.TypeParser().parse(name, node, parserContext); - case FLOAT: - return new LegacyFloatFieldMapper.TypeParser().parse(name, node, parserContext); - case DOUBLE: - return new LegacyDoubleFieldMapper.TypeParser().parse(name, node, parserContext); - default: - throw new AssertionError(); - } - } Builder builder = new Builder(name, type); TypeParsers.parseField(builder, name, node, parserContext); for (Iterator> iterator = node.entrySet().iterator(); iterator.hasNext();) { diff --git a/core/src/main/java/org/elasticsearch/index/mapper/ParsedDocument.java b/core/src/main/java/org/elasticsearch/index/mapper/ParsedDocument.java index dc0ba197b15c1..eb42c3b964950 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/ParsedDocument.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/ParsedDocument.java @@ -39,10 +39,6 @@ public class ParsedDocument { private final String routing; - private final long timestamp; - - private final long ttl; - private final List documents; private BytesReference source; @@ -57,8 +53,6 @@ public ParsedDocument( String id, String type, String routing, - long timestamp, - long ttl, List documents, BytesReference source, Mapping dynamicMappingsUpdate) { @@ -68,8 +62,6 @@ public ParsedDocument( this.type = type; this.uid = Uid.createUidAsBytes(type, id); this.routing = routing; - this.timestamp = timestamp; - this.ttl = ttl; this.documents = documents; this.source = source; this.dynamicMappingsUpdate = dynamicMappingsUpdate; @@ -99,14 +91,6 @@ public String routing() { return this.routing; } - public long timestamp() { - return this.timestamp; - } - - public long ttl() { - return this.ttl; - } - public Document rootDoc() { return documents.get(documents.size() - 1); } diff --git a/core/src/main/java/org/elasticsearch/index/mapper/ScaledFloatFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/ScaledFloatFieldMapper.java index d9271bc9bf6de..2a76aa1addd60 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/ScaledFloatFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/ScaledFloatFieldMapper.java @@ -49,7 +49,7 @@ import org.elasticsearch.index.fielddata.SortedNumericDoubleValues; import org.elasticsearch.index.fielddata.fieldcomparator.DoubleValuesComparatorSource; import org.elasticsearch.index.fielddata.plain.DocValuesIndexFieldData; -import org.elasticsearch.index.mapper.LegacyNumberFieldMapper.Defaults; +import org.elasticsearch.index.mapper.NumberFieldMapper.Defaults; import org.elasticsearch.index.query.QueryShardContext; import org.elasticsearch.indices.breaker.CircuitBreakerService; import org.elasticsearch.search.DocValueFormat; diff --git a/core/src/main/java/org/elasticsearch/index/mapper/SourceToParse.java b/core/src/main/java/org/elasticsearch/index/mapper/SourceToParse.java index 14f6e9a8587f0..0cafc50bbe2ab 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/SourceToParse.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/SourceToParse.java @@ -23,7 +23,6 @@ import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; -import org.elasticsearch.common.unit.TimeValue; public class SourceToParse { @@ -49,10 +48,6 @@ public static SourceToParse source(Origin origin, String index, String type, Str private String parentId; - private long timestamp; - - private long ttl; - private SourceToParse(Origin origin, String index, String type, String id, BytesReference source) { this.origin = Objects.requireNonNull(origin); this.index = Objects.requireNonNull(index); @@ -101,38 +96,6 @@ public SourceToParse routing(String routing) { return this; } - public long timestamp() { - return this.timestamp; - } - - public SourceToParse timestamp(String timestamp) { - this.timestamp = Long.parseLong(timestamp); - return this; - } - - public SourceToParse timestamp(long timestamp) { - this.timestamp = timestamp; - return this; - } - - public long ttl() { - return this.ttl; - } - - public SourceToParse ttl(TimeValue ttl) { - if (ttl == null) { - this.ttl = -1; - return this; - } - this.ttl = ttl.millis(); - return this; - } - - public SourceToParse ttl(long ttl) { - this.ttl = ttl; - return this; - } - public enum Origin { PRIMARY, REPLICA diff --git a/core/src/main/java/org/elasticsearch/index/mapper/StringFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/StringFieldMapper.java deleted file mode 100644 index 55da99a13f07c..0000000000000 --- a/core/src/main/java/org/elasticsearch/index/mapper/StringFieldMapper.java +++ /dev/null @@ -1,695 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.index.mapper; - -import org.apache.logging.log4j.Logger; -import org.apache.lucene.document.Field; -import org.apache.lucene.document.SortedSetDocValuesField; -import org.apache.lucene.index.IndexOptions; -import org.apache.lucene.index.IndexableField; -import org.apache.lucene.search.Query; -import org.apache.lucene.util.BytesRef; -import org.elasticsearch.Version; -import org.elasticsearch.common.logging.DeprecationLogger; -import org.elasticsearch.common.logging.Loggers; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.common.xcontent.support.XContentMapValues; -import org.elasticsearch.index.analysis.NamedAnalyzer; -import org.elasticsearch.index.fielddata.IndexFieldData; -import org.elasticsearch.index.fielddata.plain.DocValuesIndexFieldData; -import org.elasticsearch.index.fielddata.plain.PagedBytesIndexFieldData; - -import java.io.IOException; -import java.util.Arrays; -import java.util.HashSet; -import java.util.Iterator; -import java.util.List; -import java.util.Map; -import java.util.Objects; -import java.util.Set; - -import static org.apache.lucene.index.IndexOptions.NONE; -import static org.elasticsearch.index.mapper.TypeParsers.parseTextField; - -public class StringFieldMapper extends FieldMapper { - - public static final String CONTENT_TYPE = "string"; - private static final int POSITION_INCREMENT_GAP_USE_ANALYZER = -1; - - // If a string field is created on 5.x and all parameters are in this list then we - // will automatically upgrade to a text/keyword field. Otherwise we will just fail - // saying that string fields are not supported anymore. - private static final Set SUPPORTED_PARAMETERS_FOR_AUTO_UPGRADE_TO_KEYWORD = new HashSet<>(Arrays.asList( - "type", - // common keyword parameters, for which the upgrade is straightforward - "index", "store", "doc_values", "omit_norms", "norms", "fields", "copy_to", - "fielddata", "include_in_all", "ignore_above")); - private static final Set SUPPORTED_PARAMETERS_FOR_AUTO_UPGRADE_TO_TEXT = new HashSet<>(Arrays.asList( - "type", - // common text parameters, for which the upgrade is straightforward - "index", "store", "doc_values", "omit_norms", "norms", "fields", "copy_to", - "fielddata", "include_in_all", "analyzer", "search_analyzer", "search_quote_analyzer", - "index_options", "position_increment_gap")); - - public static class Defaults { - public static double FIELDDATA_MIN_FREQUENCY = 0; - public static double FIELDDATA_MAX_FREQUENCY = Integer.MAX_VALUE; - public static int FIELDDATA_MIN_SEGMENT_SIZE = 0; - - public static final MappedFieldType FIELD_TYPE = new StringFieldType(); - - static { - FIELD_TYPE.freeze(); - } - - // NOTE, when adding defaults here, make sure you add them in the builder - public static final String NULL_VALUE = null; - - public static final int IGNORE_ABOVE = -1; - } - - public static class Builder extends FieldMapper.Builder { - - protected String nullValue = Defaults.NULL_VALUE; - - /** - * The distance between tokens from different values in the same field. - * POSITION_INCREMENT_GAP_USE_ANALYZER means default to the analyzer's - * setting which in turn defaults to Defaults.POSITION_INCREMENT_GAP. - */ - protected int positionIncrementGap = POSITION_INCREMENT_GAP_USE_ANALYZER; - - protected int ignoreAbove = Defaults.IGNORE_ABOVE; - - public Builder(String name) { - super(name, Defaults.FIELD_TYPE, Defaults.FIELD_TYPE); - builder = this; - } - - @Override - public StringFieldType fieldType() { - return (StringFieldType) super.fieldType(); - } - - @Override - public Builder searchAnalyzer(NamedAnalyzer searchAnalyzer) { - super.searchAnalyzer(searchAnalyzer); - return this; - } - - public Builder positionIncrementGap(int positionIncrementGap) { - this.positionIncrementGap = positionIncrementGap; - return this; - } - - public Builder ignoreAbove(int ignoreAbove) { - this.ignoreAbove = ignoreAbove; - return this; - } - - public Builder fielddata(boolean fielddata) { - fieldType().setFielddata(fielddata); - return builder; - } - - public Builder eagerGlobalOrdinals(boolean eagerGlobalOrdinals) { - fieldType().setEagerGlobalOrdinals(eagerGlobalOrdinals); - return builder; - } - - public Builder fielddataFrequencyFilter(double minFreq, double maxFreq, int minSegmentSize) { - fieldType().setFielddataMinFrequency(minFreq); - fieldType().setFielddataMaxFrequency(maxFreq); - fieldType().setFielddataMinSegmentSize(minSegmentSize); - return builder; - } - - @Override - protected void setupFieldType(BuilderContext context) { - super.setupFieldType(context); - if (fieldType().hasDocValues() && ((StringFieldType) fieldType()).fielddata()) { - ((StringFieldType) fieldType()).setFielddata(false); - } - } - - @Override - public StringFieldMapper build(BuilderContext context) { - // if the field is not analyzed, then by default, we should omit norms and have docs only - // index options, as probably what the user really wants - // if they are set explicitly, we will use those values - // we also change the values on the default field type so that toXContent emits what - // differs from the defaults - if (fieldType.indexOptions() != IndexOptions.NONE && !fieldType.tokenized()) { - defaultFieldType.setOmitNorms(true); - defaultFieldType.setIndexOptions(IndexOptions.DOCS); - if (!omitNormsSet && fieldType.boost() == 1.0f) { - fieldType.setOmitNorms(true); - } - if (!indexOptionsSet) { - fieldType.setIndexOptions(IndexOptions.DOCS); - } - } - if (positionIncrementGap != POSITION_INCREMENT_GAP_USE_ANALYZER) { - if (fieldType.indexOptions().compareTo(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS) < 0) { - throw new IllegalArgumentException("Cannot set position_increment_gap on field [" - + name + "] without positions enabled"); - } - fieldType.setIndexAnalyzer(new NamedAnalyzer(fieldType.indexAnalyzer(), positionIncrementGap)); - fieldType.setSearchAnalyzer(new NamedAnalyzer(fieldType.searchAnalyzer(), positionIncrementGap)); - fieldType.setSearchQuoteAnalyzer(new NamedAnalyzer(fieldType.searchQuoteAnalyzer(), positionIncrementGap)); - } - setupFieldType(context); - return new StringFieldMapper( - name, fieldType(), defaultFieldType, positionIncrementGap, ignoreAbove, includeInAll, - context.indexSettings(), multiFieldsBuilder.build(this, context), copyTo); - } - } - - public static class TypeParser implements Mapper.TypeParser { - private final DeprecationLogger deprecationLogger; - - public TypeParser() { - Logger logger = Loggers.getLogger(getClass()); - this.deprecationLogger = new DeprecationLogger(logger); - } - - @Override - public Mapper.Builder parse(String fieldName, Map node, ParserContext parserContext) throws MapperParsingException { - if (parserContext.indexVersionCreated().onOrAfter(Version.V_5_0_0_alpha1)) { - final Object index = node.get("index"); - if (Arrays.asList(null, "no", "not_analyzed", "analyzed").contains(index) == false) { - throw new IllegalArgumentException("Can't parse [index] value [" + index + "] for field [" + fieldName + "], expected [no], [not_analyzed] or [analyzed]"); - } - final boolean keyword = index != null && "analyzed".equals(index) == false; - - // Automatically upgrade simple mappings for ease of upgrade, otherwise fail - Set autoUpgradeParameters = keyword - ? SUPPORTED_PARAMETERS_FOR_AUTO_UPGRADE_TO_KEYWORD - : SUPPORTED_PARAMETERS_FOR_AUTO_UPGRADE_TO_TEXT; - if (autoUpgradeParameters.containsAll(node.keySet())) { - deprecationLogger.deprecated("The [string] field is deprecated, please use [text] or [keyword] instead on [{}]", - fieldName); - { - // upgrade the index setting - node.put("index", "no".equals(index) == false); - } - { - // upgrade norms settings - Object norms = node.remove("norms"); - if (norms instanceof Map) { - norms = ((Map) norms).get("enabled"); - } - if (norms != null) { - node.put("norms", TypeParsers.nodeBooleanValue("norms", norms, parserContext)); - } - Object omitNorms = node.remove("omit_norms"); - if (omitNorms != null) { - node.put("norms", TypeParsers.nodeBooleanValue("omit_norms", omitNorms, parserContext) == false); - } - } - { - // upgrade fielddata settings - Object fielddataO = node.get("fielddata"); - if (fielddataO instanceof Map) { - Map fielddata = (Map) fielddataO; - if (keyword == false) { - node.put("fielddata", "disabled".equals(fielddata.get("format")) == false); - Map fielddataFilter = (Map) fielddata.get("filter"); - if (fielddataFilter != null) { - Map frequencyFilter = (Map) fielddataFilter.get("frequency"); - frequencyFilter.keySet().retainAll(Arrays.asList("min", "max", "min_segment_size")); - node.put("fielddata_frequency_filter", frequencyFilter); - } - } else { - node.remove("fielddata"); - } - final Object loading = fielddata.get("loading"); - if (loading != null) { - node.put("eager_global_ordinals", "eager_global_ordinals".equals(loading)); - } - } - } - if (keyword) { - return new KeywordFieldMapper.TypeParser().parse(fieldName, node, parserContext); - } else { - return new TextFieldMapper.TypeParser().parse(fieldName, node, parserContext); - } - - } - Set unsupportedParameters = new HashSet<>(node.keySet()); - unsupportedParameters.removeAll(autoUpgradeParameters); - throw new IllegalArgumentException("The [string] type is removed in 5.0 and automatic upgrade failed because parameters " - + unsupportedParameters + " are not supported for automatic upgrades. You should now use either a [text] " - + "or [keyword] field instead for field [" + fieldName + "]"); - } - - StringFieldMapper.Builder builder = new StringFieldMapper.Builder(fieldName); - // hack for the fact that string can't just accept true/false for - // the index property and still accepts no/not_analyzed/analyzed - final Object index = node.remove("index"); - if (index != null) { - final String normalizedIndex = index.toString(); - switch (normalizedIndex) { - case "analyzed": - builder.tokenized(true); - node.put("index", true); - break; - case "not_analyzed": - builder.tokenized(false); - node.put("index", true); - break; - case "no": - node.put("index", false); - break; - default: - throw new IllegalArgumentException("Can't parse [index] value [" + index + "] for field [" + fieldName + "], expected [no], [not_analyzed] or [analyzed]"); - } - } - final Object fielddataObject = node.get("fielddata"); - if (fielddataObject instanceof Map) { - Map fielddata = (Map) fielddataObject; - final Object loading = fielddata.get("loading"); - if (loading != null) { - node.put("eager_global_ordinals", "eager_global_ordinals".equals(loading)); - } - Map fielddataFilter = (Map) fielddata.get("filter"); - if (fielddataFilter != null) { - Map frequencyFilter = (Map) fielddataFilter.get("frequency"); - frequencyFilter.keySet().retainAll(Arrays.asList("min", "max", "min_segment_size")); - node.put("fielddata_frequency_filter", frequencyFilter); - } - node.put("fielddata", "disabled".equals(fielddata.get("format")) == false); - } - parseTextField(builder, fieldName, node, parserContext); - for (Iterator> iterator = node.entrySet().iterator(); iterator.hasNext();) { - Map.Entry entry = iterator.next(); - String propName = entry.getKey(); - Object propNode = entry.getValue(); - if (propName.equals("null_value")) { - if (propNode == null) { - throw new MapperParsingException("Property [null_value] cannot be null."); - } - builder.nullValue(propNode.toString()); - iterator.remove(); - } else if (propName.equals("position_increment_gap")) { - int newPositionIncrementGap = XContentMapValues.nodeIntegerValue(propNode, -1); - if (newPositionIncrementGap < 0) { - throw new MapperParsingException("positions_increment_gap less than 0 aren't allowed."); - } - builder.positionIncrementGap(newPositionIncrementGap); - // we need to update to actual analyzers if they are not set in this case... - // so we can inject the position increment gap... - if (builder.fieldType().indexAnalyzer() == null) { - builder.fieldType().setIndexAnalyzer(parserContext.getIndexAnalyzers().getDefaultIndexAnalyzer()); - } - if (builder.fieldType().searchAnalyzer() == null) { - builder.fieldType().setSearchAnalyzer(parserContext.getIndexAnalyzers().getDefaultSearchAnalyzer()); - } - if (builder.fieldType().searchQuoteAnalyzer() == null) { - builder.fieldType().setSearchQuoteAnalyzer(parserContext.getIndexAnalyzers().getDefaultSearchQuoteAnalyzer()); - } - iterator.remove(); - } else if (propName.equals("ignore_above")) { - builder.ignoreAbove(XContentMapValues.nodeIntegerValue(propNode, -1)); - iterator.remove(); - } else if (propName.equals("fielddata")) { - builder.fielddata(XContentMapValues.nodeBooleanValue(propNode)); - iterator.remove(); - } else if (propName.equals("eager_global_ordinals")) { - builder.eagerGlobalOrdinals(XContentMapValues.nodeBooleanValue(propNode)); - iterator.remove(); - } else if (propName.equals("fielddata_frequency_filter")) { - Map frequencyFilter = (Map) propNode; - double minFrequency = XContentMapValues.nodeDoubleValue(frequencyFilter.remove("min"), 0); - double maxFrequency = XContentMapValues.nodeDoubleValue(frequencyFilter.remove("max"), Integer.MAX_VALUE); - int minSegmentSize = XContentMapValues.nodeIntegerValue(frequencyFilter.remove("min_segment_size"), 0); - builder.fielddataFrequencyFilter(minFrequency, maxFrequency, minSegmentSize); - DocumentMapperParser.checkNoRemainingFields(propName, frequencyFilter, parserContext.indexVersionCreated()); - iterator.remove(); - } - } - return builder; - } - } - - public static final class StringFieldType extends org.elasticsearch.index.mapper.StringFieldType { - - private boolean fielddata; - private double fielddataMinFrequency; - private double fielddataMaxFrequency; - private int fielddataMinSegmentSize; - - public StringFieldType() { - fielddata = true; - fielddataMinFrequency = Defaults.FIELDDATA_MIN_FREQUENCY; - fielddataMaxFrequency = Defaults.FIELDDATA_MAX_FREQUENCY; - fielddataMinSegmentSize = Defaults.FIELDDATA_MIN_SEGMENT_SIZE; - } - - protected StringFieldType(StringFieldType ref) { - super(ref); - this.fielddata = ref.fielddata; - this.fielddataMinFrequency = ref.fielddataMinFrequency; - this.fielddataMaxFrequency = ref.fielddataMaxFrequency; - this.fielddataMinSegmentSize = ref.fielddataMinSegmentSize; - } - - @Override - public boolean equals(Object o) { - if (super.equals(o) == false) { - return false; - } - StringFieldType that = (StringFieldType) o; - return fielddata == that.fielddata - && fielddataMinFrequency == that.fielddataMinFrequency - && fielddataMaxFrequency == that.fielddataMaxFrequency - && fielddataMinSegmentSize == that.fielddataMinSegmentSize; - } - - @Override - public int hashCode() { - return Objects.hash(super.hashCode(), fielddata, - fielddataMinFrequency, fielddataMaxFrequency, fielddataMinSegmentSize); - } - - public StringFieldType clone() { - return new StringFieldType(this); - } - - @Override - public String typeName() { - return CONTENT_TYPE; - } - - @Override - public void checkCompatibility(MappedFieldType other, - List conflicts, boolean strict) { - super.checkCompatibility(other, conflicts, strict); - StringFieldType otherType = (StringFieldType) other; - if (strict) { - if (fielddata() != otherType.fielddata()) { - conflicts.add("mapper [" + name() + "] is used by multiple types. Set update_all_types to true to update [fielddata] " - + "across all types."); - } - if (fielddataMinFrequency() != otherType.fielddataMinFrequency()) { - conflicts.add("mapper [" + name() + "] is used by multiple types. Set update_all_types to true to update " - + "[fielddata_frequency_filter.min] across all types."); - } - if (fielddataMaxFrequency() != otherType.fielddataMaxFrequency()) { - conflicts.add("mapper [" + name() + "] is used by multiple types. Set update_all_types to true to update " - + "[fielddata_frequency_filter.max] across all types."); - } - if (fielddataMinSegmentSize() != otherType.fielddataMinSegmentSize()) { - conflicts.add("mapper [" + name() + "] is used by multiple types. Set update_all_types to true to update " - + "[fielddata_frequency_filter.min_segment_size] across all types."); - } - } - } - - public boolean fielddata() { - return fielddata; - } - - public void setFielddata(boolean fielddata) { - checkIfFrozen(); - this.fielddata = fielddata; - } - - public double fielddataMinFrequency() { - return fielddataMinFrequency; - } - - public void setFielddataMinFrequency(double fielddataMinFrequency) { - checkIfFrozen(); - this.fielddataMinFrequency = fielddataMinFrequency; - } - - public double fielddataMaxFrequency() { - return fielddataMaxFrequency; - } - - public void setFielddataMaxFrequency(double fielddataMaxFrequency) { - checkIfFrozen(); - this.fielddataMaxFrequency = fielddataMaxFrequency; - } - - public int fielddataMinSegmentSize() { - return fielddataMinSegmentSize; - } - - public void setFielddataMinSegmentSize(int fielddataMinSegmentSize) { - checkIfFrozen(); - this.fielddataMinSegmentSize = fielddataMinSegmentSize; - } - - @Override - public Query nullValueQuery() { - if (nullValue() == null) { - return null; - } - return termQuery(nullValue(), null); - } - - @Override - public IndexFieldData.Builder fielddataBuilder() { - if (hasDocValues()) { - return new DocValuesIndexFieldData.Builder(); - } else if (fielddata) { - return new PagedBytesIndexFieldData.Builder(fielddataMinFrequency, fielddataMaxFrequency, fielddataMinSegmentSize); - } else { - throw new IllegalArgumentException("Fielddata is disabled on analyzed string fields by default. Set fielddata=true on [" - + name() + "] in order to load fielddata in memory by uninverting the inverted index. Note that this can however " - + "use significant memory."); - } - } - } - - private Boolean includeInAll; - private int positionIncrementGap; - private int ignoreAbove; - - protected StringFieldMapper(String simpleName, StringFieldType fieldType, MappedFieldType defaultFieldType, - int positionIncrementGap, int ignoreAbove, Boolean includeInAll, - Settings indexSettings, MultiFields multiFields, CopyTo copyTo) { - super(simpleName, fieldType, defaultFieldType, indexSettings, multiFields, copyTo); - if (Version.indexCreated(indexSettings).onOrAfter(Version.V_5_0_0_alpha1)) { - throw new IllegalArgumentException("The [string] type is removed in 5.0. You should now use either a [text] " - + "or [keyword] field instead for field [" + fieldType.name() + "]"); - } - if (fieldType.tokenized() && fieldType.indexOptions() != NONE && fieldType().hasDocValues()) { - throw new MapperParsingException("Field [" + fieldType.name() + "] cannot be analyzed and have doc values"); - } - if (fieldType.hasDocValues() && ( - fieldType.fielddataMinFrequency() != Defaults.FIELDDATA_MIN_FREQUENCY - || fieldType.fielddataMaxFrequency() != Defaults.FIELDDATA_MAX_FREQUENCY - || fieldType.fielddataMinSegmentSize() != Defaults.FIELDDATA_MIN_SEGMENT_SIZE)) { - throw new MapperParsingException("Field [" + fieldType.name() + "] cannot have doc values and use fielddata filtering"); - } - this.positionIncrementGap = positionIncrementGap; - this.ignoreAbove = ignoreAbove; - this.includeInAll = includeInAll; - } - - @Override - protected StringFieldMapper clone() { - return (StringFieldMapper) super.clone(); - } - - @Override - protected boolean customBoost() { - return true; - } - - public int getPositionIncrementGap() { - return this.positionIncrementGap; - } - - public int getIgnoreAbove() { - return ignoreAbove; - } - - @Override - protected void parseCreateField(ParseContext context, List fields) throws IOException { - ValueAndBoost valueAndBoost = parseCreateFieldForString(context, fieldType().nullValueAsString(), fieldType().boost()); - if (valueAndBoost.value() == null) { - return; - } - if (ignoreAbove > 0 && valueAndBoost.value().length() > ignoreAbove) { - return; - } - if (context.includeInAll(includeInAll, this)) { - context.allEntries().addText(fieldType().name(), valueAndBoost.value(), valueAndBoost.boost()); - } - - if (fieldType().indexOptions() != IndexOptions.NONE || fieldType().stored()) { - Field field = new Field(fieldType().name(), valueAndBoost.value(), fieldType()); - if (valueAndBoost.boost() != 1f && Version.indexCreated(context.indexSettings()).before(Version.V_5_0_0_alpha1)) { - field.setBoost(valueAndBoost.boost()); - } - fields.add(field); - } - if (fieldType().hasDocValues()) { - fields.add(new SortedSetDocValuesField(fieldType().name(), new BytesRef(valueAndBoost.value()))); - } - } - - /** - * Parse a field as though it were a string. - * @param context parse context used during parsing - * @param nullValue value to use for null - * @param defaultBoost default boost value returned unless overwritten in the field - * @return the parsed field and the boost either parsed or defaulted - * @throws IOException if thrown while parsing - */ - public static ValueAndBoost parseCreateFieldForString(ParseContext context, String nullValue, float defaultBoost) throws IOException { - if (context.externalValueSet()) { - return new ValueAndBoost(context.externalValue().toString(), defaultBoost); - } - XContentParser parser = context.parser(); - if (parser.currentToken() == XContentParser.Token.VALUE_NULL) { - return new ValueAndBoost(nullValue, defaultBoost); - } - if (parser.currentToken() == XContentParser.Token.START_OBJECT - && Version.indexCreated(context.indexSettings()).before(Version.V_5_0_0_alpha1)) { - XContentParser.Token token; - String currentFieldName = null; - String value = nullValue; - float boost = defaultBoost; - while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { - if (token == XContentParser.Token.FIELD_NAME) { - currentFieldName = parser.currentName(); - } else { - if ("value".equals(currentFieldName) || "_value".equals(currentFieldName)) { - value = parser.textOrNull(); - } else if ("boost".equals(currentFieldName) || "_boost".equals(currentFieldName)) { - boost = parser.floatValue(); - } else { - throw new IllegalArgumentException("unknown property [" + currentFieldName + "]"); - } - } - } - return new ValueAndBoost(value, boost); - } - return new ValueAndBoost(parser.textOrNull(), defaultBoost); - } - - @Override - protected String contentType() { - return CONTENT_TYPE; - } - - @Override - protected void doMerge(Mapper mergeWith, boolean updateAllTypes) { - super.doMerge(mergeWith, updateAllTypes); - this.includeInAll = ((StringFieldMapper) mergeWith).includeInAll; - this.ignoreAbove = ((StringFieldMapper) mergeWith).ignoreAbove; - } - - @Override - protected String indexTokenizeOption(boolean indexed, boolean tokenized) { - if (!indexed) { - return "no"; - } else if (tokenized) { - return "analyzed"; - } else { - return "not_analyzed"; - } - } - - @Override - public StringFieldType fieldType() { - return (StringFieldType) super.fieldType(); - } - - @Override - protected void doXContentBody(XContentBuilder builder, boolean includeDefaults, Params params) throws IOException { - super.doXContentBody(builder, includeDefaults, params); - doXContentAnalyzers(builder, includeDefaults); - - if (includeDefaults || fieldType().nullValue() != null) { - builder.field("null_value", fieldType().nullValue()); - } - if (includeInAll != null) { - builder.field("include_in_all", includeInAll); - } else if (includeDefaults) { - builder.field("include_in_all", false); - } - - if (includeDefaults || positionIncrementGap != POSITION_INCREMENT_GAP_USE_ANALYZER) { - builder.field("position_increment_gap", positionIncrementGap); - } - - if (includeDefaults || ignoreAbove != Defaults.IGNORE_ABOVE) { - builder.field("ignore_above", ignoreAbove); - } - if (includeDefaults || fieldType().fielddata() != ((StringFieldType) defaultFieldType).fielddata()) { - builder.field("fielddata", fieldType().fielddata()); - } - if (fieldType().fielddata()) { - if (includeDefaults - || fieldType().fielddataMinFrequency() != Defaults.FIELDDATA_MIN_FREQUENCY - || fieldType().fielddataMaxFrequency() != Defaults.FIELDDATA_MAX_FREQUENCY - || fieldType().fielddataMinSegmentSize() != Defaults.FIELDDATA_MIN_SEGMENT_SIZE) { - builder.startObject("fielddata_frequency_filter"); - if (includeDefaults || fieldType().fielddataMinFrequency() != Defaults.FIELDDATA_MIN_FREQUENCY) { - builder.field("min", fieldType().fielddataMinFrequency()); - } - if (includeDefaults || fieldType().fielddataMaxFrequency() != Defaults.FIELDDATA_MAX_FREQUENCY) { - builder.field("max", fieldType().fielddataMaxFrequency()); - } - if (includeDefaults || fieldType().fielddataMinSegmentSize() != Defaults.FIELDDATA_MIN_SEGMENT_SIZE) { - builder.field("min_segment_size", fieldType().fielddataMinSegmentSize()); - } - builder.endObject(); - } - } - } - - /** - * Parsed value and boost to be returned from {@link #parseCreateFieldForString}. - */ - public static class ValueAndBoost { - private final String value; - private final float boost; - - public ValueAndBoost(String value, float boost) { - this.value = value; - this.boost = boost; - } - - /** - * Value of string field. - * @return value of string field - */ - public String value() { - return value; - } - - /** - * Boost either parsed from the document or defaulted. - * @return boost either parsed from the document or defaulted - */ - public float boost() { - return boost; - } - } -} diff --git a/core/src/main/java/org/elasticsearch/index/mapper/TTLFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/TTLFieldMapper.java deleted file mode 100644 index b9978ed5afee2..0000000000000 --- a/core/src/main/java/org/elasticsearch/index/mapper/TTLFieldMapper.java +++ /dev/null @@ -1,266 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.index.mapper; - -import org.apache.lucene.document.Field; -import org.apache.lucene.index.IndexOptions; -import org.apache.lucene.index.IndexableField; -import org.elasticsearch.Version; -import org.elasticsearch.common.lucene.Lucene; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.unit.TimeValue; -import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.index.AlreadyExpiredException; - -import java.io.IOException; -import java.util.Date; -import java.util.Iterator; -import java.util.List; -import java.util.Map; - -import static org.elasticsearch.common.xcontent.support.XContentMapValues.lenientNodeBooleanValue; -import static org.elasticsearch.common.xcontent.support.XContentMapValues.nodeTimeValue; - -public class TTLFieldMapper extends MetadataFieldMapper { - - public static final String NAME = "_ttl"; - public static final String CONTENT_TYPE = "_ttl"; - - public static class Defaults extends LegacyLongFieldMapper.Defaults { - public static final String NAME = TTLFieldMapper.CONTENT_TYPE; - - public static final TTLFieldType TTL_FIELD_TYPE = new TTLFieldType(); - - static { - TTL_FIELD_TYPE.setIndexOptions(IndexOptions.DOCS); - TTL_FIELD_TYPE.setStored(true); - TTL_FIELD_TYPE.setTokenized(false); - TTL_FIELD_TYPE.setNumericPrecisionStep(Defaults.PRECISION_STEP_64_BIT); - TTL_FIELD_TYPE.setName(NAME); - TTL_FIELD_TYPE.setIndexAnalyzer(Lucene.KEYWORD_ANALYZER); - TTL_FIELD_TYPE.setSearchAnalyzer(Lucene.KEYWORD_ANALYZER); - TTL_FIELD_TYPE.freeze(); - } - - public static final EnabledAttributeMapper ENABLED_STATE = EnabledAttributeMapper.UNSET_DISABLED; - public static final long DEFAULT = -1; - } - - public static class Builder extends MetadataFieldMapper.Builder { - - private EnabledAttributeMapper enabledState = EnabledAttributeMapper.UNSET_DISABLED; - private long defaultTTL = Defaults.DEFAULT; - - public Builder() { - super(Defaults.NAME, Defaults.TTL_FIELD_TYPE, Defaults.FIELD_TYPE); - } - - public Builder enabled(EnabledAttributeMapper enabled) { - this.enabledState = enabled; - return builder; - } - - public Builder defaultTTL(long defaultTTL) { - this.defaultTTL = defaultTTL; - return builder; - } - - @Override - public TTLFieldMapper build(BuilderContext context) { - setupFieldType(context); - fieldType.setHasDocValues(false); - return new TTLFieldMapper(fieldType, enabledState, defaultTTL, context.indexSettings()); - } - } - - public static class TypeParser implements MetadataFieldMapper.TypeParser { - @Override - public MetadataFieldMapper.Builder parse(String name, Map node, ParserContext parserContext) throws MapperParsingException { - if (parserContext.indexVersionCreated().onOrAfter(Version.V_5_0_0_alpha4)) { - throw new IllegalArgumentException("[_ttl] is removed in 5.0. As a replacement, you should use time based indexes or cron a delete-by-query with a range query on a timestamp field."); - } - Builder builder = new Builder(); - for (Iterator> iterator = node.entrySet().iterator(); iterator.hasNext();) { - Map.Entry entry = iterator.next(); - String fieldName = entry.getKey(); - Object fieldNode = entry.getValue(); - if (fieldName.equals("enabled")) { - EnabledAttributeMapper enabledState = lenientNodeBooleanValue(fieldNode) ? EnabledAttributeMapper.ENABLED : EnabledAttributeMapper.DISABLED; - builder.enabled(enabledState); - iterator.remove(); - } else if (fieldName.equals("default")) { - TimeValue ttlTimeValue = nodeTimeValue(fieldNode, null); - if (ttlTimeValue != null) { - builder.defaultTTL(ttlTimeValue.millis()); - } - iterator.remove(); - } - } - return builder; - } - - @Override - public MetadataFieldMapper getDefault(Settings indexSettings, MappedFieldType fieldType, String typeName) { - return new TTLFieldMapper(indexSettings); - } - } - - public static final class TTLFieldType extends LegacyLongFieldMapper.LongFieldType { - - public TTLFieldType() { - } - - protected TTLFieldType(TTLFieldType ref) { - super(ref); - } - - @Override - public TTLFieldType clone() { - return new TTLFieldType(this); - } - - // Overrides valueForSearch to display live value of remaining ttl - @Override - public Object valueForDisplay(Object value) { - final long now = System.currentTimeMillis(); - Long val = (Long) super.valueForDisplay(value); - return val - now; - } - } - - private EnabledAttributeMapper enabledState; - private long defaultTTL; - - private TTLFieldMapper(Settings indexSettings) { - this(Defaults.TTL_FIELD_TYPE.clone(), Defaults.ENABLED_STATE, Defaults.DEFAULT, indexSettings); - } - - private TTLFieldMapper(MappedFieldType fieldType, EnabledAttributeMapper enabled, long defaultTTL, - Settings indexSettings) { - super(NAME, fieldType, Defaults.TTL_FIELD_TYPE, indexSettings); - if (enabled.enabled && Version.indexCreated(indexSettings).onOrAfter(Version.V_5_0_0_alpha4)) { - throw new IllegalArgumentException("[_ttl] is removed in 5.0. As a replacement, you should use time based indexes or cron a delete-by-query with a range query on a timestamp field."); - } - this.enabledState = enabled; - this.defaultTTL = defaultTTL; - } - - public boolean enabled() { - return this.enabledState.enabled; - } - - public long defaultTTL() { - return this.defaultTTL; - } - - @Override - public void preParse(ParseContext context) throws IOException { - } - - @Override - public void postParse(ParseContext context) throws IOException { - super.parse(context); - } - - @Override - public Mapper parse(ParseContext context) throws IOException, MapperParsingException { - if (context.sourceToParse().ttl() < 0) { // no ttl has been provided externally - long ttl; - if (context.parser().currentToken() == XContentParser.Token.VALUE_STRING) { - ttl = TimeValue.parseTimeValue(context.parser().text(), null, "ttl").millis(); - } else { - ttl = context.parser().longValue(true); - } - if (ttl <= 0) { - throw new MapperParsingException("TTL value must be > 0. Illegal value provided [" + ttl + "]"); - } - context.sourceToParse().ttl(ttl); - } - return null; - } - - @Override - protected void parseCreateField(ParseContext context, List fields) throws IOException, AlreadyExpiredException { - if (enabledState.enabled) { - long ttl = context.sourceToParse().ttl(); - if (ttl <= 0 && defaultTTL > 0) { // no ttl provided so we use the default value - ttl = defaultTTL; - context.sourceToParse().ttl(ttl); - } - if (ttl > 0) { // a ttl has been provided either externally or in the _source - long timestamp = context.sourceToParse().timestamp(); - long expire = new Date(timestamp + ttl).getTime(); - long now = System.currentTimeMillis(); - // there is not point indexing already expired doc - if (context.sourceToParse().origin() == SourceToParse.Origin.PRIMARY && now >= expire) { - throw new AlreadyExpiredException(context.sourceToParse().index(), - context.sourceToParse().type(), context.sourceToParse().id(), timestamp, ttl, now); - } - // the expiration timestamp (timestamp + ttl) is set as field - fields.add(new LegacyLongFieldMapper.CustomLongNumericField(expire, fieldType())); - } - } - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - boolean includeDefaults = params.paramAsBoolean("include_defaults", false); - - // if all are defaults, no sense to write it at all - if (!includeDefaults && enabledState == Defaults.ENABLED_STATE && defaultTTL == Defaults.DEFAULT) { - return builder; - } - builder.startObject(CONTENT_TYPE); - if (includeDefaults || enabledState != Defaults.ENABLED_STATE) { - builder.field("enabled", enabledState.enabled); - } - if (includeDefaults || defaultTTL != Defaults.DEFAULT && enabledState.enabled) { - builder.field("default", defaultTTL); - } - builder.endObject(); - return builder; - } - - @Override - protected String contentType() { - return NAME; - } - - @Override - protected void doMerge(Mapper mergeWith, boolean updateAllTypes) { - TTLFieldMapper ttlMergeWith = (TTLFieldMapper) mergeWith; - if (ttlMergeWith.enabledState != Defaults.ENABLED_STATE) {//only do something if actually something was set for the document mapper that we merge with - if (this.enabledState == EnabledAttributeMapper.ENABLED && ttlMergeWith.enabledState == EnabledAttributeMapper.DISABLED) { - throw new IllegalArgumentException("_ttl cannot be disabled once it was enabled."); - } else { - this.enabledState = ttlMergeWith.enabledState; - } - } - if (ttlMergeWith.defaultTTL != -1) { - // we never build the default when the field is disabled so we should also not set it - // (it does not make a difference though as everything that is not build in toXContent will also not be set in the cluster) - if (enabledState == EnabledAttributeMapper.ENABLED) { - this.defaultTTL = ttlMergeWith.defaultTTL; - } - } - } - -} diff --git a/core/src/main/java/org/elasticsearch/index/mapper/TextFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/TextFieldMapper.java index 48478e551951e..bb8c4d77a6311 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/TextFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/TextFieldMapper.java @@ -23,7 +23,6 @@ import org.apache.lucene.index.IndexOptions; import org.apache.lucene.index.IndexableField; import org.apache.lucene.search.Query; -import org.elasticsearch.Version; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.support.XContentMapValues; @@ -32,15 +31,11 @@ import org.elasticsearch.index.fielddata.plain.PagedBytesIndexFieldData; import java.io.IOException; -import java.util.Arrays; -import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Objects; -import java.util.Set; -import static java.util.Collections.unmodifiableList; import static org.elasticsearch.index.mapper.TypeParsers.parseTextField; /** A {@link FieldMapper} for full-text fields. */ @@ -49,14 +44,6 @@ public class TextFieldMapper extends FieldMapper { public static final String CONTENT_TYPE = "text"; private static final int POSITION_INCREMENT_GAP_USE_ANALYZER = -1; - private static final List SUPPORTED_PARAMETERS_FOR_AUTO_DOWNGRADE_TO_STRING = unmodifiableList(Arrays.asList( - "type", - // common text parameters, for which the upgrade is straightforward - "index", "store", "doc_values", "omit_norms", "norms", "boost", "fields", "copy_to", - "fielddata", "eager_global_ordinals", "fielddata_frequency_filter", "include_in_all", - "analyzer", "search_analyzer", "search_quote_analyzer", - "index_options", "position_increment_gap", "similarity")); - public static class Defaults { public static double FIELDDATA_MIN_FREQUENCY = 0; public static double FIELDDATA_MAX_FREQUENCY = Integer.MAX_VALUE; @@ -143,41 +130,6 @@ public TextFieldMapper build(BuilderContext context) { public static class TypeParser implements Mapper.TypeParser { @Override public Mapper.Builder parse(String fieldName, Map node, ParserContext parserContext) throws MapperParsingException { - if (parserContext.indexVersionCreated().before(Version.V_5_0_0_alpha1)) { - // Downgrade "text" to "string" in indexes created in 2.x so you can use modern syntax against old indexes - Set unsupportedParameters = new HashSet<>(node.keySet()); - unsupportedParameters.removeAll(SUPPORTED_PARAMETERS_FOR_AUTO_DOWNGRADE_TO_STRING); - if (false == SUPPORTED_PARAMETERS_FOR_AUTO_DOWNGRADE_TO_STRING.containsAll(node.keySet())) { - throw new IllegalArgumentException("Automatic downgrade from [text] to [string] failed because parameters " - + unsupportedParameters + " are not supported for automatic downgrades."); - } - { // Downgrade "index" - Object index = node.get("index"); - if (index == null || Boolean.TRUE.equals(index)) { - index = "analyzed"; - } else if (Boolean.FALSE.equals(index)) { - index = "no"; - } else { - throw new IllegalArgumentException( - "Can't parse [index] value [" + index + "] for field [" + fieldName + "], expected [true] or [false]"); - } - node.put("index", index); - } - { // Downgrade "fielddata" (default in string is true, default in text is false) - Object fielddata = node.get("fielddata"); - if (fielddata == null || Boolean.FALSE.equals(fielddata)) { - fielddata = false; - } else if (Boolean.TRUE.equals(fielddata)) { - fielddata = true; - } else { - throw new IllegalArgumentException("can't parse [fielddata] value for [" + fielddata + "] for field [" - + fieldName + "], expected [true] or [false]"); - } - node.put("fielddata", fielddata); - } - - return new StringFieldMapper.TypeParser().parse(fieldName, node, parserContext); - } TextFieldMapper.Builder builder = new TextFieldMapper.Builder(fieldName); builder.fieldType().setIndexAnalyzer(parserContext.getIndexAnalyzers().getDefaultIndexAnalyzer()); builder.fieldType().setSearchAnalyzer(parserContext.getIndexAnalyzers().getDefaultSearchAnalyzer()); diff --git a/core/src/main/java/org/elasticsearch/index/mapper/TimestampFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/TimestampFieldMapper.java deleted file mode 100644 index 3e5971796d8fb..0000000000000 --- a/core/src/main/java/org/elasticsearch/index/mapper/TimestampFieldMapper.java +++ /dev/null @@ -1,310 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.index.mapper; - -import org.apache.lucene.document.Field; -import org.apache.lucene.document.NumericDocValuesField; -import org.apache.lucene.index.IndexOptions; -import org.apache.lucene.index.IndexableField; -import org.elasticsearch.Version; -import org.elasticsearch.action.TimestampParsingException; -import org.elasticsearch.common.joda.FormatDateTimeFormatter; -import org.elasticsearch.common.joda.Joda; -import org.elasticsearch.common.lucene.Lucene; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.xcontent.XContentBuilder; - -import java.io.IOException; -import java.util.ArrayList; -import java.util.Iterator; -import java.util.List; -import java.util.Map; - -import static org.elasticsearch.common.xcontent.support.XContentMapValues.lenientNodeBooleanValue; -import static org.elasticsearch.index.mapper.TypeParsers.parseDateTimeFormatter; - -public class TimestampFieldMapper extends MetadataFieldMapper { - - public static final String NAME = "_timestamp"; - public static final String CONTENT_TYPE = "_timestamp"; - public static final String DEFAULT_DATE_TIME_FORMAT = "epoch_millis||strictDateOptionalTime"; - - public static class Defaults extends LegacyDateFieldMapper.Defaults { - public static final String NAME = "_timestamp"; - - // TODO: this should be removed - public static final TimestampFieldType FIELD_TYPE = new TimestampFieldType(); - public static final FormatDateTimeFormatter DATE_TIME_FORMATTER = Joda.forPattern(DEFAULT_DATE_TIME_FORMAT); - - static { - FIELD_TYPE.setStored(true); - FIELD_TYPE.setTokenized(false); - FIELD_TYPE.setNumericPrecisionStep(Defaults.PRECISION_STEP_64_BIT); - FIELD_TYPE.setName(NAME); - FIELD_TYPE.setDateTimeFormatter(DATE_TIME_FORMATTER); - FIELD_TYPE.setIndexAnalyzer(Lucene.KEYWORD_ANALYZER); - FIELD_TYPE.setSearchAnalyzer(Lucene.KEYWORD_ANALYZER); - FIELD_TYPE.setHasDocValues(true); - FIELD_TYPE.freeze(); - } - - public static final EnabledAttributeMapper ENABLED = EnabledAttributeMapper.UNSET_DISABLED; - public static final String DEFAULT_TIMESTAMP = "now"; - } - - public static class Builder extends MetadataFieldMapper.Builder { - - private EnabledAttributeMapper enabledState = EnabledAttributeMapper.UNSET_DISABLED; - private String defaultTimestamp = Defaults.DEFAULT_TIMESTAMP; - private Boolean ignoreMissing = null; - - public Builder(MappedFieldType existing) { - super(Defaults.NAME, existing == null ? Defaults.FIELD_TYPE : existing, Defaults.FIELD_TYPE); - } - - @Override - public LegacyDateFieldMapper.DateFieldType fieldType() { - return (LegacyDateFieldMapper.DateFieldType)fieldType; - } - - public Builder enabled(EnabledAttributeMapper enabledState) { - this.enabledState = enabledState; - return builder; - } - - public Builder dateTimeFormatter(FormatDateTimeFormatter dateTimeFormatter) { - fieldType().setDateTimeFormatter(dateTimeFormatter); - return this; - } - - public Builder defaultTimestamp(String defaultTimestamp) { - this.defaultTimestamp = defaultTimestamp; - return builder; - } - - public Builder ignoreMissing(boolean ignoreMissing) { - this.ignoreMissing = ignoreMissing; - return builder; - } - - @Override - public Builder store(boolean store) { - return super.store(store); - } - - @Override - public TimestampFieldMapper build(BuilderContext context) { - setupFieldType(context); - return new TimestampFieldMapper(fieldType, defaultFieldType, enabledState, defaultTimestamp, - ignoreMissing, context.indexSettings()); - } - } - - public static class TypeParser implements MetadataFieldMapper.TypeParser { - @Override - public MetadataFieldMapper.Builder parse(String name, Map node, ParserContext parserContext) throws MapperParsingException { - if (parserContext.indexVersionCreated().onOrAfter(Version.V_5_0_0_alpha4)) { - throw new IllegalArgumentException("[_timestamp] is removed in 5.0. As a replacement, you can use an ingest pipeline to add a field with the current timestamp to your documents."); - } - Builder builder = new Builder(parserContext.mapperService().fullName(NAME)); - boolean defaultSet = false; - Boolean ignoreMissing = null; - for (Iterator> iterator = node.entrySet().iterator(); iterator.hasNext();) { - Map.Entry entry = iterator.next(); - String fieldName = entry.getKey(); - Object fieldNode = entry.getValue(); - if (fieldName.equals("enabled")) { - EnabledAttributeMapper enabledState = lenientNodeBooleanValue(fieldNode) ? EnabledAttributeMapper.ENABLED : EnabledAttributeMapper.DISABLED; - builder.enabled(enabledState); - iterator.remove(); - } else if (fieldName.equals("format")) { - builder.dateTimeFormatter(parseDateTimeFormatter(fieldNode.toString())); - iterator.remove(); - } else if (fieldName.equals("default")) { - if (fieldNode == null) { - throw new TimestampParsingException("default timestamp can not be set to null"); - } else { - builder.defaultTimestamp(fieldNode.toString()); - defaultSet = true; - } - iterator.remove(); - } else if (fieldName.equals("ignore_missing")) { - ignoreMissing = lenientNodeBooleanValue(fieldNode); - builder.ignoreMissing(ignoreMissing); - iterator.remove(); - } - } - - // We can not accept a default value and rejecting null values at the same time - if (defaultSet && (ignoreMissing != null && ignoreMissing == false)) { - throw new TimestampParsingException("default timestamp can not be set with ignore_missing set to false"); - } - - return builder; - } - - @Override - public MetadataFieldMapper getDefault(Settings indexSettings, MappedFieldType fieldType, String typeName) { - return new TimestampFieldMapper(indexSettings, fieldType); - } - } - - public static final class TimestampFieldType extends LegacyDateFieldMapper.DateFieldType { - - public TimestampFieldType() {} - - protected TimestampFieldType(TimestampFieldType ref) { - super(ref); - } - - @Override - public TimestampFieldType clone() { - return new TimestampFieldType(this); - } - - @Override - public Object valueForDisplay(Object value) { - return value; - } - } - - private EnabledAttributeMapper enabledState; - - private final String defaultTimestamp; - private final Boolean ignoreMissing; - - private TimestampFieldMapper(Settings indexSettings, MappedFieldType existing) { - this(existing != null ? existing : Defaults.FIELD_TYPE, Defaults.FIELD_TYPE, Defaults.ENABLED, Defaults.DEFAULT_TIMESTAMP, null, indexSettings); - } - - private TimestampFieldMapper(MappedFieldType fieldType, MappedFieldType defaultFieldType, EnabledAttributeMapper enabledState, - String defaultTimestamp, Boolean ignoreMissing, Settings indexSettings) { - super(NAME, fieldType, defaultFieldType, indexSettings); - if (enabledState.enabled && Version.indexCreated(indexSettings).onOrAfter(Version.V_5_0_0_alpha4)) { - throw new IllegalArgumentException("[_timestamp] is removed in 5.0. As a replacement, you can use an ingest pipeline to add a field with the current timestamp to your documents."); - } - this.enabledState = enabledState; - this.defaultTimestamp = defaultTimestamp; - this.ignoreMissing = ignoreMissing; - } - - @Override - public TimestampFieldType fieldType() { - return (TimestampFieldType)super.fieldType(); - } - - public boolean enabled() { - return this.enabledState.enabled; - } - - public String defaultTimestamp() { - return this.defaultTimestamp; - } - - public Boolean ignoreMissing() { - return this.ignoreMissing; - } - - @Override - public void preParse(ParseContext context) throws IOException { - super.parse(context); - } - - @Override - public void postParse(ParseContext context) throws IOException { - } - - @Override - public Mapper parse(ParseContext context) throws IOException { - // nothing to do here, we call the parent in preParse - return null; - } - - @Override - protected void parseCreateField(ParseContext context, List fields) throws IOException { - if (enabledState.enabled) { - long timestamp = context.sourceToParse().timestamp(); - if (fieldType().indexOptions() != IndexOptions.NONE || fieldType().stored()) { - fields.add(new LegacyLongFieldMapper.CustomLongNumericField(timestamp, fieldType())); - } - if (fieldType().hasDocValues()) { - fields.add(new NumericDocValuesField(fieldType().name(), timestamp)); - } - } - } - - @Override - protected String contentType() { - return CONTENT_TYPE; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - boolean includeDefaults = params.paramAsBoolean("include_defaults", false); - - // if all are defaults, no sense to write it at all - if (!includeDefaults && enabledState == Defaults.ENABLED - && fieldType().dateTimeFormatter().format().equals(Defaults.DATE_TIME_FORMATTER.format()) - && Defaults.DEFAULT_TIMESTAMP.equals(defaultTimestamp)) { - return builder; - } - builder.startObject(CONTENT_TYPE); - if (includeDefaults || enabledState != Defaults.ENABLED) { - builder.field("enabled", enabledState.enabled); - } - // different format handling depending on index version - String defaultDateFormat = Defaults.DATE_TIME_FORMATTER.format(); - if (includeDefaults || !fieldType().dateTimeFormatter().format().equals(defaultDateFormat)) { - builder.field("format", fieldType().dateTimeFormatter().format()); - } - if (includeDefaults || !Defaults.DEFAULT_TIMESTAMP.equals(defaultTimestamp)) { - builder.field("default", defaultTimestamp); - } - if (includeDefaults || ignoreMissing != null) { - builder.field("ignore_missing", ignoreMissing); - } - - builder.endObject(); - return builder; - } - - @Override - protected void doMerge(Mapper mergeWith, boolean updateAllTypes) { - TimestampFieldMapper timestampFieldMapperMergeWith = (TimestampFieldMapper) mergeWith; - super.doMerge(mergeWith, updateAllTypes); - if (timestampFieldMapperMergeWith.enabledState != enabledState && !timestampFieldMapperMergeWith.enabledState.unset()) { - this.enabledState = timestampFieldMapperMergeWith.enabledState; - } - if (timestampFieldMapperMergeWith.defaultTimestamp() == null && defaultTimestamp == null) { - return; - } - List conflicts = new ArrayList<>(); - if (defaultTimestamp == null) { - conflicts.add("Cannot update default in _timestamp value. Value is null now encountering " + timestampFieldMapperMergeWith.defaultTimestamp()); - } else if (timestampFieldMapperMergeWith.defaultTimestamp() == null) { - conflicts.add("Cannot update default in _timestamp value. Value is \" + defaultTimestamp.toString() + \" now encountering null"); - } else if (!timestampFieldMapperMergeWith.defaultTimestamp().equals(defaultTimestamp)) { - conflicts.add("Cannot update default in _timestamp value. Value is " + defaultTimestamp.toString() + " now encountering " + timestampFieldMapperMergeWith.defaultTimestamp()); - } - if (conflicts.isEmpty() == false) { - throw new IllegalArgumentException("Conflicts: " + conflicts); - } - } -} diff --git a/core/src/main/java/org/elasticsearch/index/mapper/TokenCountFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/TokenCountFieldMapper.java index 1468d985acc52..a2d40cd08bdc9 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/TokenCountFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/TokenCountFieldMapper.java @@ -40,7 +40,7 @@ /** * A {@link FieldMapper} that takes a string and writes a count of the tokens in that string - * to the index. In most ways the mapper acts just like an {@link LegacyIntegerFieldMapper}. + * to the index. In most ways the mapper acts just like an {@link NumberFieldMapper}. */ public class TokenCountFieldMapper extends FieldMapper { public static final String CONTENT_TYPE = "token_count"; @@ -78,9 +78,6 @@ public static class TypeParser implements Mapper.TypeParser { @Override @SuppressWarnings("unchecked") public Mapper.Builder parse(String name, Map node, ParserContext parserContext) throws MapperParsingException { - if (parserContext.indexVersionCreated().before(Version.V_5_0_0_alpha2)) { - return new LegacyTokenCountFieldMapper.TypeParser().parse(name, node, parserContext); - } TokenCountFieldMapper.Builder builder = new TokenCountFieldMapper.Builder(name); for (Iterator> iterator = node.entrySet().iterator(); iterator.hasNext();) { Map.Entry entry = iterator.next(); diff --git a/core/src/main/java/org/elasticsearch/index/mapper/TypeFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/TypeFieldMapper.java index f6e01be0b7f7c..57ce082fca81a 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/TypeFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/TypeFieldMapper.java @@ -34,7 +34,6 @@ import org.apache.lucene.search.Query; import org.apache.lucene.search.TermQuery; import org.apache.lucene.util.BytesRef; -import org.elasticsearch.Version; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.lucene.Lucene; import org.elasticsearch.common.settings.Settings; @@ -257,13 +256,7 @@ private TypeFieldMapper(MappedFieldType fieldType, Settings indexSettings) { private static MappedFieldType defaultFieldType(Settings indexSettings) { MappedFieldType defaultFieldType = Defaults.FIELD_TYPE.clone(); - Version indexCreated = Version.indexCreated(indexSettings); - if (indexCreated.before(Version.V_2_1_0)) { - // enables fielddata loading, doc values was disabled on _type between 2.0 and 2.1. - ((TypeFieldType) defaultFieldType).setFielddata(true); - } else { - defaultFieldType.setHasDocValues(true); - } + defaultFieldType.setHasDocValues(true); return defaultFieldType; } diff --git a/core/src/main/java/org/elasticsearch/index/mapper/TypeParsers.java b/core/src/main/java/org/elasticsearch/index/mapper/TypeParsers.java index 97828e2bfab03..475848989d443 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/TypeParsers.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/TypeParsers.java @@ -71,32 +71,6 @@ public static boolean nodeBooleanValue(String name, Object node, Mapper.TypePars } } - @Deprecated // for legacy ints only - public static void parseNumberField(LegacyNumberFieldMapper.Builder builder, String name, Map numberNode, Mapper.TypeParser.ParserContext parserContext) { - parseField(builder, name, numberNode, parserContext); - for (Iterator> iterator = numberNode.entrySet().iterator(); iterator.hasNext();) { - Map.Entry entry = iterator.next(); - String propName = entry.getKey(); - Object propNode = entry.getValue(); - if (propName.equals("precision_step")) { - builder.precisionStep(nodeIntegerValue(propNode)); - iterator.remove(); - } else if (propName.equals("ignore_malformed")) { - builder.ignoreMalformed(nodeBooleanValue("ignore_malformed", propNode, parserContext)); - iterator.remove(); - } else if (propName.equals("coerce")) { - builder.coerce(nodeBooleanValue("coerce", propNode, parserContext)); - iterator.remove(); - } else if (propName.equals("similarity")) { - SimilarityProvider similarityProvider = resolveSimilarity(parserContext, name, propNode.toString()); - builder.similarity(similarityProvider); - iterator.remove(); - } else if (parseMultiField(builder, name, parserContext, propName, propNode)) { - iterator.remove(); - } - } - } - private static void parseAnalyzersAndTermVectors(FieldMapper.Builder builder, String name, Map fieldNode, Mapper.TypeParser.ParserContext parserContext) { NamedAnalyzer indexAnalyzer = null; NamedAnalyzer searchAnalyzer = null; @@ -270,23 +244,13 @@ && parseNorms(builder, propName, propNode, parserContext)) { iterator.remove(); } else if (propName.equals("copy_to")) { if (parserContext.isWithinMultiField()) { - if (indexVersionCreated.after(Version.V_2_1_0) || - (indexVersionCreated.after(Version.V_2_0_1) && indexVersionCreated.before(Version.V_2_1_0))) { - throw new MapperParsingException("copy_to in multi fields is not allowed. Found the copy_to in field [" + name + "] which is within a multi field."); - } else { - ESLoggerFactory.getLogger("mapping [" + parserContext.type() + "]").warn("Found a copy_to in field [{}] which is within a multi field. This feature has been removed and the copy_to will be removed from the mapping.", name); - } + throw new MapperParsingException("copy_to in multi fields is not allowed. Found the copy_to in field [" + name + "] which is within a multi field."); } else { parseCopyFields(propNode, builder); } iterator.remove(); } } - if (indexVersionCreated.before(Version.V_2_2_0)) { - // analyzer, search_analyzer, term_vectors were accepted on all fields - // before 2.2, even though it made little sense - parseAnalyzersAndTermVectors(builder, name, fieldNode, parserContext); - } } public static boolean parseMultiField(FieldMapper.Builder builder, String name, Mapper.TypeParser.ParserContext parserContext, String propName, Object propNode) { diff --git a/core/src/main/java/org/elasticsearch/index/query/GeoBoundingBoxQueryBuilder.java b/core/src/main/java/org/elasticsearch/index/query/GeoBoundingBoxQueryBuilder.java index d895debd267d4..cec8c59c56a52 100644 --- a/core/src/main/java/org/elasticsearch/index/query/GeoBoundingBoxQueryBuilder.java +++ b/core/src/main/java/org/elasticsearch/index/query/GeoBoundingBoxQueryBuilder.java @@ -23,8 +23,6 @@ import org.apache.lucene.geo.Rectangle; import org.apache.lucene.search.MatchNoDocsQuery; import org.apache.lucene.search.Query; -import org.apache.lucene.spatial.geopoint.document.GeoPointField; -import org.apache.lucene.spatial.geopoint.search.GeoPointInBBoxQuery; import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.Version; import org.elasticsearch.common.Numbers; @@ -37,13 +35,8 @@ import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.index.fielddata.IndexGeoPointFieldData; import org.elasticsearch.index.mapper.BaseGeoPointFieldMapper; -import org.elasticsearch.index.mapper.BaseGeoPointFieldMapper.LegacyGeoPointFieldType; -import org.elasticsearch.index.mapper.LatLonPointFieldMapper; import org.elasticsearch.index.mapper.MappedFieldType; -import org.elasticsearch.index.search.geo.LegacyInMemoryGeoBoundingBoxQuery; -import org.elasticsearch.index.search.geo.LegacyIndexedGeoBoundingBoxQuery; import java.io.IOException; import java.util.Objects; @@ -360,34 +353,8 @@ public Query doToQuery(QueryShardContext context) { } } - if (indexVersionCreated.onOrAfter(LatLonPointFieldMapper.LAT_LON_FIELD_VERSION)) { - return LatLonPoint.newBoxQuery(fieldType.name(), luceneBottomRight.getLat(), luceneTopLeft.getLat(), - luceneTopLeft.getLon(), luceneBottomRight.getLon()); - } else if (indexVersionCreated.onOrAfter(Version.V_2_2_0)) { - // if index created V_2_2 use (soon to be legacy) numeric encoding postings format - // if index created V_2_3 > use prefix encoded postings format - final GeoPointField.TermEncoding encoding = (indexVersionCreated.before(Version.V_2_3_0)) ? - GeoPointField.TermEncoding.NUMERIC : GeoPointField.TermEncoding.PREFIX; - return new GeoPointInBBoxQuery(fieldType.name(), encoding, luceneBottomRight.lat(), luceneTopLeft.lat(), - luceneTopLeft.lon(), luceneBottomRight.lon()); - } - - Query query; - switch(type) { - case INDEXED: - LegacyGeoPointFieldType geoFieldType = ((LegacyGeoPointFieldType) fieldType); - query = LegacyIndexedGeoBoundingBoxQuery.create(luceneTopLeft, luceneBottomRight, geoFieldType, context); - break; - case MEMORY: - IndexGeoPointFieldData indexFieldData = context.getForField(fieldType); - query = new LegacyInMemoryGeoBoundingBoxQuery(luceneTopLeft, luceneBottomRight, indexFieldData); - break; - default: - // Someone extended the type enum w/o adjusting this switch statement. - throw new IllegalStateException("geo bounding box type [" + type + "] not supported."); - } - - return query; + return LatLonPoint.newBoxQuery(fieldType.name(), luceneBottomRight.getLat(), luceneTopLeft.getLat(), + luceneTopLeft.getLon(), luceneBottomRight.getLon()); } @Override diff --git a/core/src/main/java/org/elasticsearch/index/query/GeoDistanceQueryBuilder.java b/core/src/main/java/org/elasticsearch/index/query/GeoDistanceQueryBuilder.java index ea1160f88532b..9b81e4b3d2417 100644 --- a/core/src/main/java/org/elasticsearch/index/query/GeoDistanceQueryBuilder.java +++ b/core/src/main/java/org/elasticsearch/index/query/GeoDistanceQueryBuilder.java @@ -22,8 +22,6 @@ import org.apache.lucene.document.LatLonPoint; import org.apache.lucene.search.MatchNoDocsQuery; import org.apache.lucene.search.Query; -import org.apache.lucene.spatial.geopoint.document.GeoPointField; -import org.apache.lucene.spatial.geopoint.search.GeoPointDistanceQuery; import org.elasticsearch.Version; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.ParsingException; @@ -36,13 +34,8 @@ import org.elasticsearch.common.unit.DistanceUnit; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.index.fielddata.IndexGeoPointFieldData; import org.elasticsearch.index.mapper.BaseGeoPointFieldMapper; -import org.elasticsearch.index.mapper.GeoPointFieldMapper; -import org.elasticsearch.index.mapper.LatLonPointFieldMapper; -import org.elasticsearch.index.mapper.LegacyGeoPointFieldMapper; import org.elasticsearch.index.mapper.MappedFieldType; -import org.elasticsearch.index.search.geo.GeoDistanceRangeQuery; import java.io.IOException; import java.util.Locale; @@ -299,25 +292,7 @@ protected Query doToQuery(QueryShardContext shardContext) throws IOException { double normDistance = geoDistance.normalize(this.distance, DistanceUnit.DEFAULT); - if (indexVersionCreated.onOrAfter(LatLonPointFieldMapper.LAT_LON_FIELD_VERSION)) { - return LatLonPoint.newDistanceQuery(fieldType.name(), center.lat(), center.lon(), normDistance); - } else if (indexVersionCreated.before(Version.V_2_2_0)) { - LegacyGeoPointFieldMapper.LegacyGeoPointFieldType geoFieldType = (LegacyGeoPointFieldMapper.LegacyGeoPointFieldType) fieldType; - IndexGeoPointFieldData indexFieldData = shardContext.getForField(fieldType); - String bboxOptimization = Strings.isEmpty(optimizeBbox) ? DEFAULT_OPTIMIZE_BBOX : optimizeBbox; - return new GeoDistanceRangeQuery(center, null, normDistance, true, false, geoDistance, - geoFieldType, indexFieldData, bboxOptimization, shardContext); - } - - // if index created V_2_2 use (soon to be legacy) numeric encoding postings format - // if index created V_2_3 > use prefix encoded postings format - final GeoPointField.TermEncoding encoding = (indexVersionCreated.before(Version.V_2_3_0)) ? - GeoPointField.TermEncoding.NUMERIC : GeoPointField.TermEncoding.PREFIX; - // Lucene 6.0 and earlier requires a radial restriction - if (indexVersionCreated.before(Version.V_5_0_0_alpha4)) { - normDistance = GeoUtils.maxRadialDistance(center, normDistance); - } - return new GeoPointDistanceQuery(fieldType.name(), encoding, center.lat(), center.lon(), normDistance); + return LatLonPoint.newDistanceQuery(fieldType.name(), center.lat(), center.lon(), normDistance); } @Override @@ -371,11 +346,11 @@ public static Optional fromXContent(QueryParseContext p if (token == XContentParser.Token.FIELD_NAME) { currentName = parser.currentName(); } else if (token.isValue()) { - if (currentName.equals(GeoPointFieldMapper.Names.LAT)) { + if (currentName.equals("lat")) { point.resetLat(parser.doubleValue()); - } else if (currentName.equals(GeoPointFieldMapper.Names.LON)) { + } else if (currentName.equals("lon")) { point.resetLon(parser.doubleValue()); - } else if (currentName.equals(GeoPointFieldMapper.Names.GEOHASH)) { + } else if (currentName.equals("geohash")) { point.resetFromGeoHash(parser.text()); } else { throw new ParsingException(parser.getTokenLocation(), @@ -394,12 +369,12 @@ public static Optional fromXContent(QueryParseContext p unit = DistanceUnit.fromString(parser.text()); } else if (parseContext.getParseFieldMatcher().match(currentFieldName, DISTANCE_TYPE_FIELD)) { geoDistance = GeoDistance.fromString(parser.text()); - } else if (currentFieldName.endsWith(GeoPointFieldMapper.Names.LAT_SUFFIX)) { + } else if (currentFieldName.endsWith(".lat")) { point.resetLat(parser.doubleValue()); - fieldName = currentFieldName.substring(0, currentFieldName.length() - GeoPointFieldMapper.Names.LAT_SUFFIX.length()); - } else if (currentFieldName.endsWith(GeoPointFieldMapper.Names.LON_SUFFIX)) { + fieldName = currentFieldName.substring(0, currentFieldName.length() - ".lat".length()); + } else if (currentFieldName.endsWith(".lon")) { point.resetLon(parser.doubleValue()); - fieldName = currentFieldName.substring(0, currentFieldName.length() - GeoPointFieldMapper.Names.LON_SUFFIX.length()); + fieldName = currentFieldName.substring(0, currentFieldName.length() - ".lon".length()); } else if (parseContext.getParseFieldMatcher().match(currentFieldName, AbstractQueryBuilder.NAME_FIELD)) { queryName = parser.text(); } else if (parseContext.getParseFieldMatcher().match(currentFieldName, AbstractQueryBuilder.BOOST_FIELD)) { diff --git a/core/src/main/java/org/elasticsearch/index/query/GeoDistanceRangeQueryBuilder.java b/core/src/main/java/org/elasticsearch/index/query/GeoDistanceRangeQueryBuilder.java deleted file mode 100644 index 50c8c1d5ef874..0000000000000 --- a/core/src/main/java/org/elasticsearch/index/query/GeoDistanceRangeQueryBuilder.java +++ /dev/null @@ -1,630 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.index.query; - -import org.apache.lucene.search.MatchNoDocsQuery; -import org.apache.lucene.search.Query; -import org.apache.lucene.spatial.geopoint.document.GeoPointField; -import org.apache.lucene.spatial.geopoint.search.XGeoPointDistanceRangeQuery; -import org.elasticsearch.Version; -import org.elasticsearch.common.ParseField; -import org.elasticsearch.common.ParsingException; -import org.elasticsearch.common.Strings; -import org.elasticsearch.common.geo.GeoDistance; -import org.elasticsearch.common.geo.GeoPoint; -import org.elasticsearch.common.geo.GeoUtils; -import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.unit.DistanceUnit; -import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.index.fielddata.IndexGeoPointFieldData; -import org.elasticsearch.index.mapper.BaseGeoPointFieldMapper; -import org.elasticsearch.index.mapper.BaseGeoPointFieldMapper.LegacyGeoPointFieldType; -import org.elasticsearch.index.mapper.GeoPointFieldMapper; -import org.elasticsearch.index.mapper.LatLonPointFieldMapper; -import org.elasticsearch.index.mapper.MappedFieldType; -import org.elasticsearch.index.search.geo.GeoDistanceRangeQuery; - -import java.io.IOException; -import java.util.Locale; -import java.util.Objects; -import java.util.Optional; - -public class GeoDistanceRangeQueryBuilder extends AbstractQueryBuilder { - public static final String NAME = "geo_distance_range"; - - public static final boolean DEFAULT_INCLUDE_LOWER = true; - public static final boolean DEFAULT_INCLUDE_UPPER = true; - public static final GeoDistance DEFAULT_GEO_DISTANCE = GeoDistance.DEFAULT; - public static final DistanceUnit DEFAULT_UNIT = DistanceUnit.DEFAULT; - @Deprecated - public static final String DEFAULT_OPTIMIZE_BBOX = "memory"; - - /** - * The default value for ignore_unmapped. - */ - public static final boolean DEFAULT_IGNORE_UNMAPPED = false; - - private static final ParseField FROM_FIELD = new ParseField("from"); - private static final ParseField TO_FIELD = new ParseField("to"); - private static final ParseField INCLUDE_LOWER_FIELD = new ParseField("include_lower"); - private static final ParseField INCLUDE_UPPER_FIELD = new ParseField("include_upper"); - private static final ParseField GT_FIELD = new ParseField("gt"); - private static final ParseField GTE_FIELD = new ParseField("gte", "ge"); - private static final ParseField LT_FIELD = new ParseField("lt"); - private static final ParseField LTE_FIELD = new ParseField("lte", "le"); - private static final ParseField UNIT_FIELD = new ParseField("unit"); - private static final ParseField DISTANCE_TYPE_FIELD = new ParseField("distance_type"); - private static final ParseField NAME_FIELD = new ParseField("_name"); - private static final ParseField BOOST_FIELD = new ParseField("boost"); - @Deprecated - private static final ParseField OPTIMIZE_BBOX_FIELD = new ParseField("optimize_bbox") - .withAllDeprecated("no replacement: `optimize_bbox` is no longer supported due to recent improvements"); - private static final ParseField COERCE_FIELD = new ParseField("coerce", "normalize") - .withAllDeprecated("use validation_method instead"); - private static final ParseField IGNORE_MALFORMED_FIELD = new ParseField("ignore_malformed") - .withAllDeprecated("use validation_method instead"); - private static final ParseField VALIDATION_METHOD = new ParseField("validation_method"); - private static final ParseField IGNORE_UNMAPPED_FIELD = new ParseField("ignore_unmapped"); - - private final String fieldName; - - private Object from; - private Object to; - private boolean includeLower = DEFAULT_INCLUDE_LOWER; - private boolean includeUpper = DEFAULT_INCLUDE_UPPER; - - private boolean ignoreUnmapped = DEFAULT_IGNORE_UNMAPPED; - - private final GeoPoint point; - - private GeoDistance geoDistance = DEFAULT_GEO_DISTANCE; - - private DistanceUnit unit = DEFAULT_UNIT; - - private String optimizeBbox = null; - - private GeoValidationMethod validationMethod = GeoValidationMethod.DEFAULT; - - public GeoDistanceRangeQueryBuilder(String fieldName, GeoPoint point) { - if (Strings.isEmpty(fieldName)) { - throw new IllegalArgumentException("fieldName must not be null"); - } - if (point == null) { - throw new IllegalArgumentException("point must not be null"); - } - this.fieldName = fieldName; - this.point = point; - } - - public GeoDistanceRangeQueryBuilder(String fieldName, double lat, double lon) { - this(fieldName, new GeoPoint(lat, lon)); - } - - public GeoDistanceRangeQueryBuilder(String fieldName, String geohash) { - this(fieldName, geohash == null ? null : new GeoPoint().resetFromGeoHash(geohash)); - } - - /** - * Read from a stream. - */ - public GeoDistanceRangeQueryBuilder(StreamInput in) throws IOException { - super(in); - fieldName = in.readString(); - point = in.readGeoPoint(); - from = in.readGenericValue(); - to = in.readGenericValue(); - includeLower = in.readBoolean(); - includeUpper = in.readBoolean(); - unit = DistanceUnit.valueOf(in.readString()); - geoDistance = GeoDistance.readFromStream(in); - optimizeBbox = in.readOptionalString(); - validationMethod = GeoValidationMethod.readFromStream(in); - ignoreUnmapped = in.readBoolean(); - } - - @Override - protected void doWriteTo(StreamOutput out) throws IOException { - out.writeString(fieldName); - out.writeGeoPoint(point); - out.writeGenericValue(from); - out.writeGenericValue(to); - out.writeBoolean(includeLower); - out.writeBoolean(includeUpper); - out.writeString(unit.name()); - geoDistance.writeTo(out);; - out.writeOptionalString(optimizeBbox); - validationMethod.writeTo(out); - out.writeBoolean(ignoreUnmapped); - } - - public String fieldName() { - return fieldName; - } - - public GeoPoint point() { - return point; - } - - public GeoDistanceRangeQueryBuilder from(String from) { - if (from == null) { - throw new IllegalArgumentException("[from] must not be null"); - } - this.from = from; - return this; - } - - public GeoDistanceRangeQueryBuilder from(Number from) { - if (from == null) { - throw new IllegalArgumentException("[from] must not be null"); - } - this.from = from; - return this; - } - - public Object from() { - return from; - } - - public GeoDistanceRangeQueryBuilder to(String to) { - if (to == null) { - throw new IllegalArgumentException("[to] must not be null"); - } - this.to = to; - return this; - } - - public GeoDistanceRangeQueryBuilder to(Number to) { - if (to == null) { - throw new IllegalArgumentException("[to] must not be null"); - } - this.to = to; - return this; - } - - public Object to() { - return to; - } - - public GeoDistanceRangeQueryBuilder includeLower(boolean includeLower) { - this.includeLower = includeLower; - return this; - } - - public boolean includeLower() { - return includeLower; - } - - public GeoDistanceRangeQueryBuilder includeUpper(boolean includeUpper) { - this.includeUpper = includeUpper; - return this; - } - - public boolean includeUpper() { - return includeUpper; - } - - public GeoDistanceRangeQueryBuilder geoDistance(GeoDistance geoDistance) { - if (geoDistance == null) { - throw new IllegalArgumentException("geoDistance calculation mode must not be null"); - } - this.geoDistance = geoDistance; - return this; - } - - public GeoDistance geoDistance() { - return geoDistance; - } - - public GeoDistanceRangeQueryBuilder unit(DistanceUnit unit) { - if (unit == null) { - throw new IllegalArgumentException("distance unit must not be null"); - } - this.unit = unit; - return this; - } - - public DistanceUnit unit() { - return unit; - } - - @Deprecated - public GeoDistanceRangeQueryBuilder optimizeBbox(String optimizeBbox) { - this.optimizeBbox = optimizeBbox; - return this; - } - - @Deprecated - public String optimizeBbox() { - return optimizeBbox; - } - - /** Set validation method for coordinates. */ - public GeoDistanceRangeQueryBuilder setValidationMethod(GeoValidationMethod method) { - this.validationMethod = method; - return this; - } - - /** Returns validation method for coordinates. */ - public GeoValidationMethod getValidationMethod() { - return this.validationMethod; - } - - /** - * Sets whether the query builder should ignore unmapped fields (and run a - * {@link MatchNoDocsQuery} in place of this query) or throw an exception if - * the field is unmapped. - */ - public GeoDistanceRangeQueryBuilder ignoreUnmapped(boolean ignoreUnmapped) { - this.ignoreUnmapped = ignoreUnmapped; - return this; - } - - /** - * Gets whether the query builder will ignore unmapped fields (and run a - * {@link MatchNoDocsQuery} in place of this query) or throw an exception if - * the field is unmapped. - */ - public boolean ignoreUnmapped() { - return ignoreUnmapped; - } - - @Override - protected Query doToQuery(QueryShardContext context) throws IOException { - MappedFieldType fieldType = context.fieldMapper(fieldName); - if (fieldType == null) { - if (ignoreUnmapped) { - return new MatchNoDocsQuery(); - } else { - throw new QueryShardException(context, "failed to find geo_point field [" + fieldName + "]"); - } - } - if (!(fieldType instanceof BaseGeoPointFieldMapper.GeoPointFieldType)) { - throw new QueryShardException(context, "field [" + fieldName + "] is not a geo_point field"); - } - - final boolean indexCreatedBeforeV2_0 = context.indexVersionCreated().before(Version.V_2_0_0); - final boolean indexCreatedBeforeV2_2 = context.indexVersionCreated().before(Version.V_2_2_0); - // validation was not available prior to 2.x, so to support bwc - // percolation queries we only ignore_malformed on 2.x created indexes - if (!indexCreatedBeforeV2_0 && !GeoValidationMethod.isIgnoreMalformed(validationMethod)) { - if (!GeoUtils.isValidLatitude(point.lat())) { - throw new QueryShardException(context, "illegal latitude value [{}] for [{}]", point.lat(), NAME); - } - if (!GeoUtils.isValidLongitude(point.lon())) { - throw new QueryShardException(context, "illegal longitude value [{}] for [{}]", point.lon(), NAME); - } - } - - GeoPoint point = new GeoPoint(this.point); - if (indexCreatedBeforeV2_2 == false || GeoValidationMethod.isCoerce(validationMethod)) { - GeoUtils.normalizePoint(point, true, true); - } - - Double fromValue; - Double toValue; - if (from != null) { - if (from instanceof Number) { - fromValue = unit.toMeters(((Number) from).doubleValue()); - } else { - fromValue = DistanceUnit.parse((String) from, unit, DistanceUnit.DEFAULT); - } - if (indexCreatedBeforeV2_2) { - fromValue = geoDistance.normalize(fromValue, DistanceUnit.DEFAULT); - } - } else { - fromValue = 0.0; - } - - if (to != null) { - if (to instanceof Number) { - toValue = unit.toMeters(((Number) to).doubleValue()); - } else { - toValue = DistanceUnit.parse((String) to, unit, DistanceUnit.DEFAULT); - } - if (indexCreatedBeforeV2_2) { - toValue = geoDistance.normalize(toValue, DistanceUnit.DEFAULT); - } - } else { - toValue = GeoUtils.maxRadialDistanceMeters(point.lat(), point.lon()); - } - - final Version indexVersionCreated = context.indexVersionCreated(); - if (indexVersionCreated.onOrAfter(LatLonPointFieldMapper.LAT_LON_FIELD_VERSION)) { - throw new QueryShardException(context, "[{}] queries are no longer supported for geo_point field types. " - + "Use geo_distance sort or aggregations", NAME); - } else if (indexVersionCreated.before(Version.V_2_2_0)) { - LegacyGeoPointFieldType geoFieldType = (LegacyGeoPointFieldType) fieldType; - IndexGeoPointFieldData indexFieldData = context.getForField(fieldType); - String bboxOptimization = Strings.isEmpty(optimizeBbox) ? DEFAULT_OPTIMIZE_BBOX : optimizeBbox; - return new GeoDistanceRangeQuery(point, fromValue, toValue, includeLower, includeUpper, geoDistance, geoFieldType, - indexFieldData, bboxOptimization, context); - } - - // if index created V_2_2 use (soon to be legacy) numeric encoding postings format - // if index created V_2_3 > use prefix encoded postings format - final GeoPointField.TermEncoding encoding = (indexVersionCreated.before(Version.V_2_3_0)) ? - GeoPointField.TermEncoding.NUMERIC : GeoPointField.TermEncoding.PREFIX; - - return new XGeoPointDistanceRangeQuery(fieldType.name(), encoding, point.lat(), point.lon(), - (includeLower) ? fromValue : fromValue + GeoUtils.TOLERANCE, - (includeUpper) ? toValue : toValue - GeoUtils.TOLERANCE); - } - - @Override - protected void doXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(NAME); - builder.startArray(fieldName).value(point.lon()).value(point.lat()).endArray(); - builder.field(FROM_FIELD.getPreferredName(), from); - builder.field(TO_FIELD.getPreferredName(), to); - builder.field(INCLUDE_LOWER_FIELD.getPreferredName(), includeLower); - builder.field(INCLUDE_UPPER_FIELD.getPreferredName(), includeUpper); - builder.field(UNIT_FIELD.getPreferredName(), unit); - builder.field(DISTANCE_TYPE_FIELD.getPreferredName(), geoDistance.name().toLowerCase(Locale.ROOT)); - if (Strings.isEmpty(optimizeBbox) == false) { - builder.field(OPTIMIZE_BBOX_FIELD.getPreferredName(), optimizeBbox); - } - builder.field(VALIDATION_METHOD.getPreferredName(), validationMethod); - builder.field(IGNORE_UNMAPPED_FIELD.getPreferredName(), ignoreUnmapped); - printBoostAndQueryName(builder); - builder.endObject(); - } - - public static Optional fromXContent(QueryParseContext parseContext) throws IOException { - XContentParser parser = parseContext.parser(); - - XContentParser.Token token; - - Float boost = null; - String queryName = null; - String currentFieldName = null; - GeoPoint point = null; - String fieldName = null; - Object vFrom = null; - Object vTo = null; - Boolean includeLower = null; - Boolean includeUpper = null; - DistanceUnit unit = null; - GeoDistance geoDistance = null; - String optimizeBbox = null; - boolean coerce = GeoValidationMethod.DEFAULT_LENIENT_PARSING; - boolean ignoreMalformed = GeoValidationMethod.DEFAULT_LENIENT_PARSING; - GeoValidationMethod validationMethod = null; - boolean ignoreUnmapped = DEFAULT_IGNORE_UNMAPPED; - - while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { - if (token == XContentParser.Token.FIELD_NAME) { - currentFieldName = parser.currentName(); - } else if (parseContext.isDeprecatedSetting(currentFieldName)) { - // skip - } else if (token == XContentParser.Token.START_ARRAY) { - if (fieldName == null) { - if (point == null) { - point = new GeoPoint(); - } - GeoUtils.parseGeoPoint(parser, point); - fieldName = currentFieldName; - } else { - throw new ParsingException(parser.getTokenLocation(), "[" + GeoDistanceRangeQueryBuilder.NAME + - "] field name already set to [" + fieldName + "] but found [" + currentFieldName + "]"); - } - } else if (token == XContentParser.Token.START_OBJECT) { - // the json in the format of -> field : { lat : 30, lon : 12 } - if (fieldName == null) { - fieldName = currentFieldName; - if (point == null) { - point = new GeoPoint(); - } - GeoUtils.parseGeoPoint(parser, point); - } else { - throw new ParsingException(parser.getTokenLocation(), "[" + GeoDistanceRangeQueryBuilder.NAME + - "] field name already set to [" + fieldName + "] but found [" + currentFieldName + "]"); - } - } else if (token.isValue()) { - if (parseContext.getParseFieldMatcher().match(currentFieldName, FROM_FIELD)) { - if (token == XContentParser.Token.VALUE_NULL) { - } else if (token == XContentParser.Token.VALUE_STRING) { - vFrom = parser.text(); // a String - } else { - vFrom = parser.numberValue(); // a Number - } - } else if (parseContext.getParseFieldMatcher().match(currentFieldName, TO_FIELD)) { - if (token == XContentParser.Token.VALUE_NULL) { - } else if (token == XContentParser.Token.VALUE_STRING) { - vTo = parser.text(); // a String - } else { - vTo = parser.numberValue(); // a Number - } - } else if (parseContext.getParseFieldMatcher().match(currentFieldName, INCLUDE_LOWER_FIELD)) { - includeLower = parser.booleanValue(); - } else if (parseContext.getParseFieldMatcher().match(currentFieldName, INCLUDE_UPPER_FIELD)) { - includeUpper = parser.booleanValue(); - } else if (parseContext.getParseFieldMatcher().match(currentFieldName, IGNORE_UNMAPPED_FIELD)) { - ignoreUnmapped = parser.booleanValue(); - } else if (parseContext.getParseFieldMatcher().match(currentFieldName, GT_FIELD)) { - if (token == XContentParser.Token.VALUE_NULL) { - } else if (token == XContentParser.Token.VALUE_STRING) { - vFrom = parser.text(); // a String - } else { - vFrom = parser.numberValue(); // a Number - } - includeLower = false; - } else if (parseContext.getParseFieldMatcher().match(currentFieldName, GTE_FIELD)) { - if (token == XContentParser.Token.VALUE_NULL) { - } else if (token == XContentParser.Token.VALUE_STRING) { - vFrom = parser.text(); // a String - } else { - vFrom = parser.numberValue(); // a Number - } - includeLower = true; - } else if (parseContext.getParseFieldMatcher().match(currentFieldName, LT_FIELD)) { - if (token == XContentParser.Token.VALUE_NULL) { - } else if (token == XContentParser.Token.VALUE_STRING) { - vTo = parser.text(); // a String - } else { - vTo = parser.numberValue(); // a Number - } - includeUpper = false; - } else if (parseContext.getParseFieldMatcher().match(currentFieldName, LTE_FIELD)) { - if (token == XContentParser.Token.VALUE_NULL) { - } else if (token == XContentParser.Token.VALUE_STRING) { - vTo = parser.text(); // a String - } else { - vTo = parser.numberValue(); // a Number - } - includeUpper = true; - } else if (parseContext.getParseFieldMatcher().match(currentFieldName, UNIT_FIELD)) { - unit = DistanceUnit.fromString(parser.text()); - } else if (parseContext.getParseFieldMatcher().match(currentFieldName, DISTANCE_TYPE_FIELD)) { - geoDistance = GeoDistance.fromString(parser.text()); - } else if (currentFieldName.endsWith(GeoPointFieldMapper.Names.LAT_SUFFIX)) { - String maybeFieldName = currentFieldName.substring(0, - currentFieldName.length() - GeoPointFieldMapper.Names.LAT_SUFFIX.length()); - if (fieldName == null || fieldName.equals(maybeFieldName)) { - fieldName = maybeFieldName; - } else { - throw new ParsingException(parser.getTokenLocation(), "[" + GeoDistanceRangeQueryBuilder.NAME + - "] field name already set to [" + fieldName + "] but found [" + currentFieldName + "]"); - } - if (point == null) { - point = new GeoPoint(); - } - point.resetLat(parser.doubleValue()); - } else if (currentFieldName.endsWith(GeoPointFieldMapper.Names.LON_SUFFIX)) { - String maybeFieldName = currentFieldName.substring(0, - currentFieldName.length() - GeoPointFieldMapper.Names.LON_SUFFIX.length()); - if (fieldName == null || fieldName.equals(maybeFieldName)) { - fieldName = maybeFieldName; - } else { - throw new ParsingException(parser.getTokenLocation(), "[" + GeoDistanceRangeQueryBuilder.NAME + - "] field name already set to [" + fieldName + "] but found [" + currentFieldName + "]"); - } - if (point == null) { - point = new GeoPoint(); - } - point.resetLon(parser.doubleValue()); - } else if (parseContext.getParseFieldMatcher().match(currentFieldName, NAME_FIELD)) { - queryName = parser.text(); - } else if (parseContext.getParseFieldMatcher().match(currentFieldName, BOOST_FIELD)) { - boost = parser.floatValue(); - } else if (parseContext.getParseFieldMatcher().match(currentFieldName, OPTIMIZE_BBOX_FIELD)) { - optimizeBbox = parser.textOrNull(); - } else if (parseContext.getParseFieldMatcher().match(currentFieldName, COERCE_FIELD)) { - coerce = parser.booleanValue(); - } else if (parseContext.getParseFieldMatcher().match(currentFieldName, IGNORE_MALFORMED_FIELD)) { - ignoreMalformed = parser.booleanValue(); - } else if (parseContext.getParseFieldMatcher().match(currentFieldName, VALIDATION_METHOD)) { - validationMethod = GeoValidationMethod.fromString(parser.text()); - } else { - if (fieldName == null) { - if (point == null) { - point = new GeoPoint(); - } - point.resetFromString(parser.text()); - fieldName = currentFieldName; - } else { - throw new ParsingException(parser.getTokenLocation(), "[" + GeoDistanceRangeQueryBuilder.NAME + - "] field name already set to [" + fieldName + "] but found [" + currentFieldName + "]"); - } - } - } - } - - GeoDistanceRangeQueryBuilder queryBuilder = new GeoDistanceRangeQueryBuilder(fieldName, point); - if (boost != null) { - queryBuilder.boost(boost); - } - - if (queryName != null) { - queryBuilder.queryName(queryName); - } - - if (vFrom != null) { - if (vFrom instanceof Number) { - queryBuilder.from((Number) vFrom); - } else { - queryBuilder.from((String) vFrom); - } - } - - if (vTo != null) { - if (vTo instanceof Number) { - queryBuilder.to((Number) vTo); - } else { - queryBuilder.to((String) vTo); - } - } - - if (includeUpper != null) { - queryBuilder.includeUpper(includeUpper); - } - - if (includeLower != null) { - queryBuilder.includeLower(includeLower); - } - - if (unit != null) { - queryBuilder.unit(unit); - } - - if (geoDistance != null) { - queryBuilder.geoDistance(geoDistance); - } - - if (optimizeBbox != null) { - queryBuilder.optimizeBbox(optimizeBbox); - } - - if (validationMethod != null) { - // if validation method is set explicitly ignore deprecated coerce/ignore malformed fields if any - queryBuilder.setValidationMethod(validationMethod); - } else { - queryBuilder.setValidationMethod(GeoValidationMethod.infer(coerce, ignoreMalformed)); - } - queryBuilder.ignoreUnmapped(ignoreUnmapped); - return Optional.of(queryBuilder); - } - - @Override - protected boolean doEquals(GeoDistanceRangeQueryBuilder other) { - return ((Objects.equals(fieldName, other.fieldName)) && - (Objects.equals(point, other.point)) && - (Objects.equals(from, other.from)) && - (Objects.equals(to, other.to)) && - (Objects.equals(includeUpper, other.includeUpper)) && - (Objects.equals(includeLower, other.includeLower)) && - (Objects.equals(geoDistance, other.geoDistance)) && - (Objects.equals(optimizeBbox, other.optimizeBbox)) && - (Objects.equals(validationMethod, other.validationMethod))) && - Objects.equals(ignoreUnmapped, other.ignoreUnmapped); - } - - @Override - protected int doHashCode() { - return Objects.hash(fieldName, point, from, to, includeUpper, includeLower, geoDistance, optimizeBbox, validationMethod, - ignoreUnmapped); - } - - @Override - public String getWriteableName() { - return NAME; - } -} diff --git a/core/src/main/java/org/elasticsearch/index/query/GeoPolygonQueryBuilder.java b/core/src/main/java/org/elasticsearch/index/query/GeoPolygonQueryBuilder.java index e5e8e69fd54e9..b86d28c9088c6 100644 --- a/core/src/main/java/org/elasticsearch/index/query/GeoPolygonQueryBuilder.java +++ b/core/src/main/java/org/elasticsearch/index/query/GeoPolygonQueryBuilder.java @@ -23,9 +23,6 @@ import org.apache.lucene.geo.Polygon; import org.apache.lucene.search.MatchNoDocsQuery; import org.apache.lucene.search.Query; -import org.apache.lucene.spatial.geopoint.document.GeoPointField; -import org.apache.lucene.spatial.geopoint.search.GeoPointInPolygonQuery; -import org.elasticsearch.Version; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.Strings; @@ -36,11 +33,8 @@ import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentParser.Token; -import org.elasticsearch.index.fielddata.IndexGeoPointFieldData; import org.elasticsearch.index.mapper.BaseGeoPointFieldMapper; -import org.elasticsearch.index.mapper.LatLonPointFieldMapper; import org.elasticsearch.index.mapper.MappedFieldType; -import org.elasticsearch.index.search.geo.GeoPolygonQuery; import java.io.IOException; import java.util.ArrayList; @@ -181,10 +175,9 @@ protected Query doToQuery(QueryShardContext context) throws IOException { } final int shellSize = shell.size(); - final boolean indexCreatedBeforeV2_0 = context.indexVersionCreated().before(Version.V_2_0_0); // validation was not available prior to 2.x, so to support bwc // percolation queries we only ignore_malformed on 2.x created indexes - if (!indexCreatedBeforeV2_0 && !GeoValidationMethod.isIgnoreMalformed(validationMethod)) { + if (!GeoValidationMethod.isIgnoreMalformed(validationMethod)) { for (GeoPoint point : shell) { if (!GeoUtils.isValidLatitude(point.lat())) { throw new QueryShardException(context, "illegal latitude value [{}] for [{}]", point.lat(), @@ -197,18 +190,12 @@ protected Query doToQuery(QueryShardContext context) throws IOException { } } - final Version indexVersionCreated = context.indexVersionCreated(); - if (indexVersionCreated.onOrAfter(Version.V_2_2_0) || GeoValidationMethod.isCoerce(validationMethod)) { + if (GeoValidationMethod.isCoerce(validationMethod)) { for (GeoPoint point : shell) { GeoUtils.normalizePoint(point, true, true); } } - if (indexVersionCreated.before(Version.V_2_2_0)) { - IndexGeoPointFieldData indexFieldData = context.getForField(fieldType); - return new GeoPolygonQuery(indexFieldData, shell.toArray(new GeoPoint[shellSize])); - } - double[] lats = new double[shellSize]; double[] lons = new double[shellSize]; GeoPoint p; @@ -218,14 +205,7 @@ protected Query doToQuery(QueryShardContext context) throws IOException { lons[i] = p.lon(); } - if (indexVersionCreated.onOrAfter(LatLonPointFieldMapper.LAT_LON_FIELD_VERSION)) { - return LatLonPoint.newPolygonQuery(fieldType.name(), new Polygon(lats, lons)); - } - // if index created V_2_2 use (soon to be legacy) numeric encoding postings format - // if index created V_2_3 > use prefix encoded postings format - final GeoPointField.TermEncoding encoding = (indexVersionCreated.before(Version.V_2_3_0)) ? - GeoPointField.TermEncoding.NUMERIC : GeoPointField.TermEncoding.PREFIX; - return new GeoPointInPolygonQuery(fieldType.name(), encoding, lats, lons); + return LatLonPoint.newPolygonQuery(fieldType.name(), new Polygon(lats, lons)); } @Override diff --git a/core/src/main/java/org/elasticsearch/index/query/GeohashCellQuery.java b/core/src/main/java/org/elasticsearch/index/query/GeohashCellQuery.java deleted file mode 100644 index ab3b23af0fc7a..0000000000000 --- a/core/src/main/java/org/elasticsearch/index/query/GeohashCellQuery.java +++ /dev/null @@ -1,390 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.index.query; - -import org.apache.lucene.search.MatchNoDocsQuery; -import org.apache.lucene.search.Query; -import org.elasticsearch.ElasticsearchParseException; -import org.elasticsearch.common.Nullable; -import org.elasticsearch.common.ParseField; -import org.elasticsearch.common.ParsingException; -import org.elasticsearch.common.Strings; -import org.elasticsearch.common.geo.GeoHashUtils; -import org.elasticsearch.common.geo.GeoPoint; -import org.elasticsearch.common.geo.GeoUtils; -import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.unit.DistanceUnit; -import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.common.xcontent.XContentParser.Token; -import org.elasticsearch.index.mapper.BaseGeoPointFieldMapper; -import org.elasticsearch.index.mapper.LatLonPointFieldMapper; -import org.elasticsearch.index.mapper.MappedFieldType; - -import java.io.IOException; -import java.util.ArrayList; -import java.util.List; -import java.util.Objects; -import java.util.Optional; - -/** - * A geohash cell filter that filters {@link GeoPoint}s by their geohashes. Basically the a - * Geohash prefix is defined by the filter and all geohashes that are matching this - * prefix will be returned. The neighbors flag allows to filter - * geohashes that surround the given geohash. In general the neighborhood of a - * geohash is defined by its eight adjacent cells.
- * The structure of the {@link GeohashCellQuery} is defined as: - *
- * "geohash_bbox" {
- *     "field":"location",
- *     "geohash":"u33d8u5dkx8k",
- *     "neighbors":false
- * }
- * 
- */ -public class GeohashCellQuery { - public static final String NAME = "geohash_cell"; - - public static final boolean DEFAULT_NEIGHBORS = false; - - /** - * The default value for ignore_unmapped. - */ - public static final boolean DEFAULT_IGNORE_UNMAPPED = false; - - private static final ParseField NEIGHBORS_FIELD = new ParseField("neighbors"); - private static final ParseField PRECISION_FIELD = new ParseField("precision"); - private static final ParseField IGNORE_UNMAPPED_FIELD = new ParseField("ignore_unmapped"); - - /** - * Create a new geohash filter for a given set of geohashes. In general this method - * returns a boolean filter combining the geohashes OR-wise. - * - * @param context Context of the filter - * @param fieldType field mapper for geopoints - * @param geohash mandatory geohash - * @param geohashes optional array of additional geohashes - * @return a new GeoBoundinboxfilter - */ - public static Query create(QueryShardContext context, BaseGeoPointFieldMapper.LegacyGeoPointFieldType fieldType, - String geohash, @Nullable List geohashes) { - MappedFieldType geoHashMapper = fieldType.geoHashFieldType(); - if (geoHashMapper == null) { - throw new IllegalArgumentException("geohash filter needs geohash_prefix to be enabled"); - } - - if (geohashes == null || geohashes.size() == 0) { - return geoHashMapper.termQuery(geohash, context); - } else { - geohashes.add(geohash); - return geoHashMapper.termsQuery(geohashes, context); - } - } - - /** - * Builder for a geohashfilter. It needs the fields fieldname and - * geohash to be set. the default for a neighbor filteing is - * false. - */ - public static class Builder extends AbstractQueryBuilder { - // we need to store the geohash rather than the corresponding point, - // because a transformation from a geohash to a point an back to the - // geohash will extend the accuracy of the hash to max precision - // i.e. by filing up with z's. - private String fieldName; - private String geohash; - private Integer levels = null; - private boolean neighbors = DEFAULT_NEIGHBORS; - - private boolean ignoreUnmapped = DEFAULT_IGNORE_UNMAPPED; - - public Builder(String field, GeoPoint point) { - this(field, point == null ? null : point.geohash(), false); - } - - public Builder(String field, String geohash) { - this(field, geohash, false); - } - - public Builder(String field, String geohash, boolean neighbors) { - if (Strings.isEmpty(field)) { - throw new IllegalArgumentException("fieldName must not be null"); - } - if (Strings.isEmpty(geohash)) { - throw new IllegalArgumentException("geohash or point must be defined"); - } - this.fieldName = field; - this.geohash = geohash; - this.neighbors = neighbors; - } - - /** - * Read from a stream. - */ - public Builder(StreamInput in) throws IOException { - super(in); - fieldName = in.readString(); - geohash = in.readString(); - levels = in.readOptionalVInt(); - neighbors = in.readBoolean(); - ignoreUnmapped = in.readBoolean(); - } - - @Override - protected void doWriteTo(StreamOutput out) throws IOException { - out.writeString(fieldName); - out.writeString(geohash); - out.writeOptionalVInt(levels); - out.writeBoolean(neighbors); - out.writeBoolean(ignoreUnmapped); - } - - public Builder point(GeoPoint point) { - this.geohash = point.getGeohash(); - return this; - } - - public Builder point(double lat, double lon) { - this.geohash = GeoHashUtils.stringEncode(lon, lat); - return this; - } - - public Builder geohash(String geohash) { - this.geohash = geohash; - return this; - } - - public String geohash() { - return geohash; - } - - public Builder precision(int levels) { - if (levels <= 0) { - throw new IllegalArgumentException("precision must be greater than 0. Found [" + levels + "]"); - } - this.levels = levels; - return this; - } - - public Integer precision() { - return levels; - } - - public Builder precision(String precision) { - double meters = DistanceUnit.parse(precision, DistanceUnit.DEFAULT, DistanceUnit.METERS); - return precision(GeoUtils.geoHashLevelsForPrecision(meters)); - } - - public Builder neighbors(boolean neighbors) { - this.neighbors = neighbors; - return this; - } - - public boolean neighbors() { - return neighbors; - } - - public Builder fieldName(String fieldName) { - this.fieldName = fieldName; - return this; - } - - public String fieldName() { - return fieldName; - } - - /** - * Sets whether the query builder should ignore unmapped fields (and run - * a {@link MatchNoDocsQuery} in place of this query) or throw an - * exception if the field is unmapped. - */ - public GeohashCellQuery.Builder ignoreUnmapped(boolean ignoreUnmapped) { - this.ignoreUnmapped = ignoreUnmapped; - return this; - } - - /** - * Gets whether the query builder will ignore unmapped fields (and run a - * {@link MatchNoDocsQuery} in place of this query) or throw an - * exception if the field is unmapped. - */ - public boolean ignoreUnmapped() { - return ignoreUnmapped; - } - - @Override - protected Query doToQuery(QueryShardContext context) throws IOException { - MappedFieldType fieldType = context.fieldMapper(fieldName); - if (fieldType == null) { - if (ignoreUnmapped) { - return new MatchNoDocsQuery(); - } else { - throw new QueryShardException(context, "failed to parse [{}] query. missing [{}] field [{}]", NAME, - BaseGeoPointFieldMapper.CONTENT_TYPE, fieldName); - } - } - - if (fieldType instanceof LatLonPointFieldMapper.LatLonPointFieldType) { - throw new QueryShardException(context, "failed to parse [{}] query. " - + "geo_point field no longer supports geohash_cell queries", NAME); - } else if (!(fieldType instanceof BaseGeoPointFieldMapper.LegacyGeoPointFieldType)) { - throw new QueryShardException(context, "failed to parse [{}] query. field [{}] is not a geo_point field", NAME, fieldName); - } - - BaseGeoPointFieldMapper.LegacyGeoPointFieldType geoFieldType = ((BaseGeoPointFieldMapper.LegacyGeoPointFieldType) fieldType); - if (!geoFieldType.isGeoHashPrefixEnabled()) { - throw new QueryShardException(context, "failed to parse [{}] query. [geohash_prefix] is not enabled for field [{}]", NAME, - fieldName); - } - - String geohash = this.geohash; - if (levels != null) { - int len = Math.min(levels, geohash.length()); - geohash = geohash.substring(0, len); - } - - Query query; - if (neighbors) { - query = create(context, geoFieldType, geohash, GeoHashUtils.addNeighbors(geohash, new ArrayList(8))); - } else { - query = create(context, geoFieldType, geohash, null); - } - return query; - } - - @Override - protected void doXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(NAME); - builder.field(NEIGHBORS_FIELD.getPreferredName(), neighbors); - if (levels != null) { - builder.field(PRECISION_FIELD.getPreferredName(), levels); - } - builder.field(fieldName, geohash); - builder.field(IGNORE_UNMAPPED_FIELD.getPreferredName(), ignoreUnmapped); - printBoostAndQueryName(builder); - builder.endObject(); - } - - public static Optional fromXContent(QueryParseContext parseContext) throws IOException { - XContentParser parser = parseContext.parser(); - - String fieldName = null; - String geohash = null; - Integer levels = null; - Boolean neighbors = null; - String queryName = null; - Float boost = null; - boolean ignoreUnmapped = DEFAULT_IGNORE_UNMAPPED; - - XContentParser.Token token; - if ((token = parser.currentToken()) != Token.START_OBJECT) { - throw new ElasticsearchParseException("failed to parse [{}] query. expected an object but found [{}] instead", NAME, token); - } - - while ((token = parser.nextToken()) != Token.END_OBJECT) { - if (token == Token.FIELD_NAME) { - String field = parser.currentName(); - - if (parseContext.isDeprecatedSetting(field)) { - // skip - } else if (parseContext.getParseFieldMatcher().match(field, PRECISION_FIELD)) { - token = parser.nextToken(); - if (token == Token.VALUE_NUMBER) { - levels = parser.intValue(); - } else if (token == Token.VALUE_STRING) { - double meters = DistanceUnit.parse(parser.text(), DistanceUnit.DEFAULT, DistanceUnit.METERS); - levels = GeoUtils.geoHashLevelsForPrecision(meters); - } - } else if (parseContext.getParseFieldMatcher().match(field, NEIGHBORS_FIELD)) { - parser.nextToken(); - neighbors = parser.booleanValue(); - } else if (parseContext.getParseFieldMatcher().match(field, AbstractQueryBuilder.NAME_FIELD)) { - parser.nextToken(); - queryName = parser.text(); - } else if (parseContext.getParseFieldMatcher().match(field, IGNORE_UNMAPPED_FIELD)) { - parser.nextToken(); - ignoreUnmapped = parser.booleanValue(); - } else if (parseContext.getParseFieldMatcher().match(field, AbstractQueryBuilder.BOOST_FIELD)) { - parser.nextToken(); - boost = parser.floatValue(); - } else { - if (fieldName == null) { - fieldName = field; - token = parser.nextToken(); - if (token == Token.VALUE_STRING) { - // A string indicates either a geohash or a - // lat/lon - // string - String location = parser.text(); - if (location.indexOf(",") > 0) { - geohash = GeoUtils.parseGeoPoint(parser).geohash(); - } else { - geohash = location; - } - } else { - geohash = GeoUtils.parseGeoPoint(parser).geohash(); - } - } else { - throw new ParsingException(parser.getTokenLocation(), "[" + NAME + - "] field name already set to [" + fieldName + "] but found [" + field + "]"); - } - } - } else { - throw new ElasticsearchParseException("failed to parse [{}] query. unexpected token [{}]", NAME, token); - } - } - Builder builder = new Builder(fieldName, geohash); - if (levels != null) { - builder.precision(levels); - } - if (neighbors != null) { - builder.neighbors(neighbors); - } - if (queryName != null) { - builder.queryName(queryName); - } - if (boost != null) { - builder.boost(boost); - } - builder.ignoreUnmapped(ignoreUnmapped); - return Optional.of(builder); - } - - @Override - protected boolean doEquals(Builder other) { - return Objects.equals(fieldName, other.fieldName) - && Objects.equals(geohash, other.geohash) - && Objects.equals(levels, other.levels) - && Objects.equals(neighbors, other.neighbors) - && Objects.equals(ignoreUnmapped, other.ignoreUnmapped); - } - - @Override - protected int doHashCode() { - return Objects.hash(fieldName, geohash, levels, neighbors, ignoreUnmapped); - } - - @Override - public String getWriteableName() { - return NAME; - } - } -} diff --git a/core/src/main/java/org/elasticsearch/index/query/MoreLikeThisQueryBuilder.java b/core/src/main/java/org/elasticsearch/index/query/MoreLikeThisQueryBuilder.java index 4ab98cb56dfb9..da3cc85ee98c5 100644 --- a/core/src/main/java/org/elasticsearch/index/query/MoreLikeThisQueryBuilder.java +++ b/core/src/main/java/org/elasticsearch/index/query/MoreLikeThisQueryBuilder.java @@ -55,7 +55,6 @@ import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.UidFieldMapper; import org.elasticsearch.index.mapper.KeywordFieldMapper.KeywordFieldType; -import org.elasticsearch.index.mapper.StringFieldMapper.StringFieldType; import org.elasticsearch.index.mapper.TextFieldMapper.TextFieldType; import java.io.IOException; @@ -93,7 +92,7 @@ public class MoreLikeThisQueryBuilder extends AbstractQueryBuilder> SUPPORTED_FIELD_TYPES = new HashSet<>( - Arrays.asList(StringFieldType.class, TextFieldType.class, KeywordFieldType.class)); + Arrays.asList(TextFieldType.class, KeywordFieldType.class)); private interface Field { ParseField FIELDS = new ParseField("fields"); diff --git a/core/src/main/java/org/elasticsearch/index/query/QueryBuilders.java b/core/src/main/java/org/elasticsearch/index/query/QueryBuilders.java index 2765eaa38cabc..f97103c9fae90 100644 --- a/core/src/main/java/org/elasticsearch/index/query/QueryBuilders.java +++ b/core/src/main/java/org/elasticsearch/index/query/QueryBuilders.java @@ -651,37 +651,6 @@ public static GeoDistanceQueryBuilder geoDistanceQuery(String name) { return new GeoDistanceQueryBuilder(name); } - /** - * A filter to filter based on a specific range from a specific geo location / point. - * - * @param name The location field name. - * @param point The point - */ - public static GeoDistanceRangeQueryBuilder geoDistanceRangeQuery(String name, GeoPoint point) { - return new GeoDistanceRangeQueryBuilder(name, point); - } - - /** - * A filter to filter based on a specific range from a specific geo location / point. - * - * @param name The location field name. - * @param geohash The point as geohash - */ - public static GeoDistanceRangeQueryBuilder geoDistanceRangeQuery(String name, String geohash) { - return new GeoDistanceRangeQueryBuilder(name, geohash); - } - - /** - * A filter to filter based on a specific range from a specific geo location / point. - * - * @param name The location field name. - * @param lat The points latitude - * @param lon The points longitude - */ - public static GeoDistanceRangeQueryBuilder geoDistanceRangeQuery(String name, double lat, double lon) { - return new GeoDistanceRangeQueryBuilder(name, lat, lon); - } - /** * A filter to filter based on a bounding box defined by top left and bottom right locations / points * @@ -691,43 +660,6 @@ public static GeoBoundingBoxQueryBuilder geoBoundingBoxQuery(String name) { return new GeoBoundingBoxQueryBuilder(name); } - /** - * A filter based on a bounding box defined by geohash. The field this filter is applied to - * must have {"type":"geo_point", "geohash":true} - * to work. - * - * @param name The geo point field name. - * @param geohash The Geohash to filter - */ - public static GeohashCellQuery.Builder geoHashCellQuery(String name, String geohash) { - return new GeohashCellQuery.Builder(name, geohash); - } - - /** - * A filter based on a bounding box defined by geohash. The field this filter is applied to - * must have {"type":"geo_point", "geohash":true} - * to work. - * - * @param name The geo point field name. - * @param point a geo point within the geohash bucket - */ - public static GeohashCellQuery.Builder geoHashCellQuery(String name, GeoPoint point) { - return new GeohashCellQuery.Builder(name, point); - } - - /** - * A filter based on a bounding box defined by geohash. The field this filter is applied to - * must have {"type":"geo_point", "geohash":true} - * to work. - * - * @param name The geo point field name - * @param geohash The Geohash to filter - * @param neighbors should the neighbor cell also be filtered - */ - public static GeohashCellQuery.Builder geoHashCellQuery(String name, String geohash, boolean neighbors) { - return new GeohashCellQuery.Builder(name, geohash, neighbors); - } - /** * A filter to filter based on a polygon defined by a set of locations / points. * diff --git a/core/src/main/java/org/elasticsearch/index/query/QueryStringQueryBuilder.java b/core/src/main/java/org/elasticsearch/index/query/QueryStringQueryBuilder.java index c31ad0071789a..c0533ef856be9 100644 --- a/core/src/main/java/org/elasticsearch/index/query/QueryStringQueryBuilder.java +++ b/core/src/main/java/org/elasticsearch/index/query/QueryStringQueryBuilder.java @@ -44,9 +44,7 @@ import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.NumberFieldMapper; import org.elasticsearch.index.mapper.ScaledFloatFieldMapper; -import org.elasticsearch.index.mapper.StringFieldMapper; import org.elasticsearch.index.mapper.TextFieldMapper; -import org.elasticsearch.index.mapper.TimestampFieldMapper; import org.elasticsearch.index.query.support.QueryParsers; import org.joda.time.DateTimeZone; @@ -129,9 +127,7 @@ public class QueryStringQueryBuilder extends AbstractQueryBuilder= inclusiveLowerPoint && d <= inclusiveUpperPoint) { - return true; - } - } - } - return false; - } - - @Override - public float matchCost() { - if (distanceBoundingCheck == GeoDistance.ALWAYS_INSTANCE) { - return 0.0f; - } else { - // TODO: is this right (up to 4 comparisons from GeoDistance.SimpleDistanceBoundingCheck)? - return 4.0f; - } - } - }; - return new ConstantScoreScorer(this, score(), twoPhaseIterator); - } - }; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (sameClassAs(o) == false) return false; - - GeoDistanceRangeQuery filter = (GeoDistanceRangeQuery) o; - - if (Double.compare(filter.inclusiveLowerPoint, inclusiveLowerPoint) != 0) return false; - if (Double.compare(filter.inclusiveUpperPoint, inclusiveUpperPoint) != 0) return false; - if (Double.compare(filter.lat, lat) != 0) return false; - if (Double.compare(filter.lon, lon) != 0) return false; - if (!indexFieldData.getFieldName().equals(filter.indexFieldData.getFieldName())) - return false; - if (geoDistance != filter.geoDistance) return false; - - return true; - } - - @Override - public String toString(String field) { - return "GeoDistanceRangeQuery(" + indexFieldData.getFieldName() + ", " + geoDistance + ", [" - + inclusiveLowerPoint + " - " + inclusiveUpperPoint + "], " + lat + ", " + lon + ")"; - } - - @Override - public int hashCode() { - int result = classHash(); - long temp; - temp = lat != +0.0d ? Double.doubleToLongBits(lat) : 0L; - result = 31 * result + Long.hashCode(temp); - temp = lon != +0.0d ? Double.doubleToLongBits(lon) : 0L; - result = 31 * result + Long.hashCode(temp); - temp = inclusiveLowerPoint != +0.0d ? Double.doubleToLongBits(inclusiveLowerPoint) : 0L; - result = 31 * result + Long.hashCode(temp); - temp = inclusiveUpperPoint != +0.0d ? Double.doubleToLongBits(inclusiveUpperPoint) : 0L; - result = 31 * result + Long.hashCode(temp); - result = 31 * result + (geoDistance != null ? geoDistance.hashCode() : 0); - result = 31 * result + indexFieldData.getFieldName().hashCode(); - return result; - } - -} diff --git a/core/src/main/java/org/elasticsearch/index/search/geo/GeoPolygonQuery.java b/core/src/main/java/org/elasticsearch/index/search/geo/GeoPolygonQuery.java deleted file mode 100644 index bb39d752c0e41..0000000000000 --- a/core/src/main/java/org/elasticsearch/index/search/geo/GeoPolygonQuery.java +++ /dev/null @@ -1,126 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.index.search.geo; - -import org.apache.lucene.index.LeafReaderContext; -import org.apache.lucene.search.IndexSearcher; -import org.apache.lucene.search.Query; -import org.apache.lucene.search.RandomAccessWeight; -import org.apache.lucene.search.Weight; -import org.apache.lucene.util.Bits; -import org.elasticsearch.common.geo.GeoPoint; -import org.elasticsearch.index.fielddata.IndexGeoPointFieldData; -import org.elasticsearch.index.fielddata.MultiGeoPointValues; - -import java.io.IOException; -import java.util.Arrays; - -public class GeoPolygonQuery extends Query { - - private final GeoPoint[] points; - - private final IndexGeoPointFieldData indexFieldData; - - public GeoPolygonQuery(IndexGeoPointFieldData indexFieldData, GeoPoint...points) { - this.points = points; - this.indexFieldData = indexFieldData; - } - - public GeoPoint[] points() { - return points; - } - - public String fieldName() { - return indexFieldData.getFieldName(); - } - - @Override - public Weight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException { - return new RandomAccessWeight(this) { - @Override - protected Bits getMatchingDocs(LeafReaderContext context) throws IOException { - final int maxDoc = context.reader().maxDoc(); - final MultiGeoPointValues values = indexFieldData.load(context).getGeoPointValues(); - return new Bits() { - - private boolean pointInPolygon(GeoPoint[] points, double lat, double lon) { - boolean inPoly = false; - - for (int i = 1; i < points.length; i++) { - if (points[i].lon() < lon && points[i-1].lon() >= lon - || points[i-1].lon() < lon && points[i].lon() >= lon) { - if (points[i].lat() + (lon - points[i].lon()) / - (points[i-1].lon() - points[i].lon()) * (points[i-1].lat() - points[i].lat()) < lat) { - inPoly = !inPoly; - } - } - } - return inPoly; - } - - @Override - public boolean get(int doc) { - values.setDocument(doc); - final int length = values.count(); - for (int i = 0; i < length; i++) { - GeoPoint point = values.valueAt(i); - if (pointInPolygon(points, point.lat(), point.lon())) { - return true; - } - } - return false; - } - - @Override - public int length() { - return maxDoc; - } - - }; - } - }; - } - - @Override - public String toString(String field) { - StringBuilder sb = new StringBuilder("GeoPolygonQuery("); - sb.append(indexFieldData.getFieldName()); - sb.append(", ").append(Arrays.toString(points)).append(')'); - return sb.toString(); - } - - @Override - public boolean equals(Object obj) { - if (sameClassAs(obj) == false) { - return false; - } - GeoPolygonQuery that = (GeoPolygonQuery) obj; - return indexFieldData.getFieldName().equals(that.indexFieldData.getFieldName()) - && Arrays.equals(points, that.points); - } - - @Override - public int hashCode() { - int h = classHash(); - h = 31 * h + indexFieldData.getFieldName().hashCode(); - h = 31 * h + Arrays.hashCode(points); - return h; - } -} diff --git a/core/src/main/java/org/elasticsearch/index/search/geo/LegacyInMemoryGeoBoundingBoxQuery.java b/core/src/main/java/org/elasticsearch/index/search/geo/LegacyInMemoryGeoBoundingBoxQuery.java deleted file mode 100644 index 2d8ea7af49d05..0000000000000 --- a/core/src/main/java/org/elasticsearch/index/search/geo/LegacyInMemoryGeoBoundingBoxQuery.java +++ /dev/null @@ -1,168 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.index.search.geo; - -import org.apache.lucene.index.LeafReaderContext; -import org.apache.lucene.search.IndexSearcher; -import org.apache.lucene.search.Query; -import org.apache.lucene.search.RandomAccessWeight; -import org.apache.lucene.search.Weight; -import org.apache.lucene.util.Bits; -import org.elasticsearch.common.geo.GeoPoint; -import org.elasticsearch.index.fielddata.IndexGeoPointFieldData; -import org.elasticsearch.index.fielddata.MultiGeoPointValues; - -import java.io.IOException; -import java.util.Objects; - -/** - * - * @deprecated This query is no longer used for geo_point indexes created after version 2.1 - */ -@Deprecated -public class LegacyInMemoryGeoBoundingBoxQuery extends Query { - - private final GeoPoint topLeft; - private final GeoPoint bottomRight; - - private final IndexGeoPointFieldData indexFieldData; - - public LegacyInMemoryGeoBoundingBoxQuery(GeoPoint topLeft, GeoPoint bottomRight, IndexGeoPointFieldData indexFieldData) { - this.topLeft = topLeft; - this.bottomRight = bottomRight; - this.indexFieldData = indexFieldData; - } - - public GeoPoint topLeft() { - return topLeft; - } - - public GeoPoint bottomRight() { - return bottomRight; - } - - public String fieldName() { - return indexFieldData.getFieldName(); - } - - @Override - public Weight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException { - return new RandomAccessWeight(this) { - @Override - protected Bits getMatchingDocs(LeafReaderContext context) throws IOException { - final int maxDoc = context.reader().maxDoc(); - final MultiGeoPointValues values = indexFieldData.load(context).getGeoPointValues(); - // checks to see if bounding box crosses 180 degrees - if (topLeft.lon() > bottomRight.lon()) { - return new Meridian180GeoBoundingBoxBits(maxDoc, values, topLeft, bottomRight); - } else { - return new GeoBoundingBoxBits(maxDoc, values, topLeft, bottomRight); - } - } - }; - } - - @Override - public String toString(String field) { - return "GeoBoundingBoxFilter(" + indexFieldData.getFieldName() + ", " + topLeft + ", " + bottomRight + ")"; - } - - @Override - public boolean equals(Object obj) { - if (sameClassAs(obj) == false) { - return false; - } - LegacyInMemoryGeoBoundingBoxQuery other = (LegacyInMemoryGeoBoundingBoxQuery) obj; - return fieldName().equalsIgnoreCase(other.fieldName()) - && topLeft.equals(other.topLeft) - && bottomRight.equals(other.bottomRight); - } - - @Override - public int hashCode() { - return Objects.hash(classHash(), fieldName(), topLeft, bottomRight); - } - - private static class Meridian180GeoBoundingBoxBits implements Bits { - private final int maxDoc; - private final MultiGeoPointValues values; - private final GeoPoint topLeft; - private final GeoPoint bottomRight; - - public Meridian180GeoBoundingBoxBits(int maxDoc, MultiGeoPointValues values, GeoPoint topLeft, GeoPoint bottomRight) { - this.maxDoc = maxDoc; - this.values = values; - this.topLeft = topLeft; - this.bottomRight = bottomRight; - } - - @Override - public boolean get(int doc) { - values.setDocument(doc); - final int length = values.count(); - for (int i = 0; i < length; i++) { - GeoPoint point = values.valueAt(i); - if (((topLeft.lon() <= point.lon() || bottomRight.lon() >= point.lon())) && - (topLeft.lat() >= point.lat() && bottomRight.lat() <= point.lat())) { - return true; - } - } - return false; - } - - @Override - public int length() { - return maxDoc; - } - } - - private static class GeoBoundingBoxBits implements Bits { - private final int maxDoc; - private final MultiGeoPointValues values; - private final GeoPoint topLeft; - private final GeoPoint bottomRight; - - public GeoBoundingBoxBits(int maxDoc, MultiGeoPointValues values, GeoPoint topLeft, GeoPoint bottomRight) { - this.maxDoc = maxDoc; - this.values = values; - this.topLeft = topLeft; - this.bottomRight = bottomRight; - } - - @Override - public boolean get(int doc) { - values.setDocument(doc); - final int length = values.count(); - for (int i = 0; i < length; i++) { - GeoPoint point = values.valueAt(i); - if (topLeft.lon() <= point.lon() && bottomRight.lon() >= point.lon() - && topLeft.lat() >= point.lat() && bottomRight.lat() <= point.lat()) { - return true; - } - } - return false; - } - - @Override - public int length() { - return maxDoc; - } - } -} diff --git a/core/src/main/java/org/elasticsearch/index/search/geo/LegacyIndexedGeoBoundingBoxQuery.java b/core/src/main/java/org/elasticsearch/index/search/geo/LegacyIndexedGeoBoundingBoxQuery.java deleted file mode 100644 index 7b44ac62edee9..0000000000000 --- a/core/src/main/java/org/elasticsearch/index/search/geo/LegacyIndexedGeoBoundingBoxQuery.java +++ /dev/null @@ -1,68 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.index.search.geo; - -import org.apache.lucene.search.BooleanClause.Occur; -import org.apache.lucene.search.BooleanQuery; -import org.apache.lucene.search.ConstantScoreQuery; -import org.apache.lucene.search.Query; -import org.elasticsearch.common.geo.GeoPoint; -import org.elasticsearch.index.mapper.LegacyGeoPointFieldMapper; -import org.elasticsearch.index.query.QueryShardContext; - -/** - * - * @deprecated This query is no longer used for geo_point indexes created after version 2.1 - */ -@Deprecated -public class LegacyIndexedGeoBoundingBoxQuery { - - public static Query create(GeoPoint topLeft, GeoPoint bottomRight, - LegacyGeoPointFieldMapper.LegacyGeoPointFieldType fieldType, QueryShardContext context) { - if (!fieldType.isLatLonEnabled()) { - throw new IllegalArgumentException("lat/lon is not enabled (indexed) for field [" + fieldType.name() - + "], can't use indexed filter on it"); - } - //checks to see if bounding box crosses 180 degrees - if (topLeft.lon() > bottomRight.lon()) { - return westGeoBoundingBoxFilter(topLeft, bottomRight, fieldType, context); - } else { - return eastGeoBoundingBoxFilter(topLeft, bottomRight, fieldType, context); - } - } - - private static Query westGeoBoundingBoxFilter(GeoPoint topLeft, GeoPoint bottomRight, - LegacyGeoPointFieldMapper.LegacyGeoPointFieldType fieldType, QueryShardContext context) { - BooleanQuery.Builder filter = new BooleanQuery.Builder(); - filter.setMinimumNumberShouldMatch(1); - filter.add(fieldType.lonFieldType().rangeQuery(null, bottomRight.lon(), true, true, context), Occur.SHOULD); - filter.add(fieldType.lonFieldType().rangeQuery(topLeft.lon(), null, true, true, context), Occur.SHOULD); - filter.add(fieldType.latFieldType().rangeQuery(bottomRight.lat(), topLeft.lat(), true, true, context), Occur.MUST); - return new ConstantScoreQuery(filter.build()); - } - - private static Query eastGeoBoundingBoxFilter(GeoPoint topLeft, GeoPoint bottomRight, - LegacyGeoPointFieldMapper.LegacyGeoPointFieldType fieldType, QueryShardContext context) { - BooleanQuery.Builder filter = new BooleanQuery.Builder(); - filter.add(fieldType.lonFieldType().rangeQuery(topLeft.lon(), bottomRight.lon(), true, true, context), Occur.MUST); - filter.add(fieldType.latFieldType().rangeQuery(bottomRight.lat(), topLeft.lat(), true, true, context), Occur.MUST); - return new ConstantScoreQuery(filter.build()); - } -} diff --git a/core/src/main/java/org/elasticsearch/index/shard/IndexShard.java b/core/src/main/java/org/elasticsearch/index/shard/IndexShard.java index 06ea008fdbb03..613fec5523c93 100644 --- a/core/src/main/java/org/elasticsearch/index/shard/IndexShard.java +++ b/core/src/main/java/org/elasticsearch/index/shard/IndexShard.java @@ -20,7 +20,6 @@ package org.elasticsearch.index.shard; import org.apache.logging.log4j.Logger; -import org.apache.lucene.codecs.PostingsFormat; import org.apache.lucene.index.CheckIndex; import org.apache.lucene.index.CorruptIndexException; import org.apache.lucene.index.IndexCommit; @@ -125,7 +124,6 @@ import org.elasticsearch.repositories.Repository; import org.elasticsearch.search.suggest.completion.CompletionFieldStats; import org.elasticsearch.search.suggest.completion.CompletionStats; -import org.elasticsearch.search.suggest.completion2x.Completion090PostingsFormat; import org.elasticsearch.threadpool.ThreadPool; import java.io.FileNotFoundException; @@ -753,10 +751,6 @@ public CompletionStats completionStats(String... fields) { CompletionStats completionStats = new CompletionStats(); try (final Engine.Searcher currentSearcher = acquireSearcher("completion_stats")) { completionStats.add(CompletionFieldStats.completionStats(currentSearcher.reader(), fields)); - // Necessary for 2.x shards: - Completion090PostingsFormat postingsFormat = ((Completion090PostingsFormat) - PostingsFormat.forName(Completion090PostingsFormat.CODEC_NAME)); - completionStats.add(postingsFormat.completionStats(currentSearcher.reader(), fields)); } return completionStats; } diff --git a/core/src/main/java/org/elasticsearch/index/shard/TranslogRecoveryPerformer.java b/core/src/main/java/org/elasticsearch/index/shard/TranslogRecoveryPerformer.java index 13ea660b4f042..7dd5cebcb2c63 100644 --- a/core/src/main/java/org/elasticsearch/index/shard/TranslogRecoveryPerformer.java +++ b/core/src/main/java/org/elasticsearch/index/shard/TranslogRecoveryPerformer.java @@ -155,7 +155,7 @@ private void performRecoveryOperation(Engine engine, Translog.Operation operatio // we set canHaveDuplicates to true all the time such that we de-optimze the translog case and ensure that all // autoGeneratedID docs that are coming from the primary are updated correctly. Engine.Index engineIndex = IndexShard.prepareIndex(docMapper(index.type()), source(shardId.getIndexName(), index.type(), index.id(), index.source()) - .routing(index.routing()).parent(index.parent()).timestamp(index.timestamp()).ttl(index.ttl()), index.seqNo(), + .routing(index.routing()).parent(index.parent()), index.seqNo(), index.version(), index.versionType().versionTypeForReplicationAndRecovery(), origin, index.getAutoGeneratedIdTimestamp(), true); maybeAddMappingUpdate(engineIndex.type(), engineIndex.parsedDoc().dynamicMappingsUpdate(), engineIndex.id(), allowMappingUpdates); if (logger.isTraceEnabled()) { diff --git a/core/src/main/java/org/elasticsearch/index/termvectors/TermVectorsService.java b/core/src/main/java/org/elasticsearch/index/termvectors/TermVectorsService.java index 671178dfcc6a9..520cb13390f79 100644 --- a/core/src/main/java/org/elasticsearch/index/termvectors/TermVectorsService.java +++ b/core/src/main/java/org/elasticsearch/index/termvectors/TermVectorsService.java @@ -47,7 +47,6 @@ import org.elasticsearch.index.mapper.ParseContext; import org.elasticsearch.index.mapper.ParsedDocument; import org.elasticsearch.index.mapper.SourceFieldMapper; -import org.elasticsearch.index.mapper.StringFieldMapper; import org.elasticsearch.index.mapper.TextFieldMapper; import org.elasticsearch.index.mapper.Uid; import org.elasticsearch.index.mapper.UidFieldMapper; @@ -163,8 +162,7 @@ private static void handleFieldWildcards(IndexShard indexShard, TermVectorsReque private static boolean isValidField(MappedFieldType fieldType) { // must be a string - if (fieldType instanceof StringFieldMapper.StringFieldType == false - && fieldType instanceof KeywordFieldMapper.KeywordFieldType == false + if (fieldType instanceof KeywordFieldMapper.KeywordFieldType == false && fieldType instanceof TextFieldMapper.TextFieldType == false) { return false; } diff --git a/core/src/main/java/org/elasticsearch/index/translog/Translog.java b/core/src/main/java/org/elasticsearch/index/translog/Translog.java index dd5a633e7fadc..92220fa1922de 100644 --- a/core/src/main/java/org/elasticsearch/index/translog/Translog.java +++ b/core/src/main/java/org/elasticsearch/index/translog/Translog.java @@ -808,15 +808,11 @@ public static class Source { public final BytesReference source; public final String routing; public final String parent; - public final long timestamp; - public final long ttl; - public Source(BytesReference source, String routing, String parent, long timestamp, long ttl) { + public Source(BytesReference source, String routing, String parent) { this.source = source; this.routing = routing; this.parent = parent; - this.timestamp = timestamp; - this.ttl = ttl; } } @@ -834,8 +830,6 @@ public static class Index implements Operation { private final BytesReference source; private final String routing; private final String parent; - private final long timestamp; - private final long ttl; public Index(StreamInput in) throws IOException { final int format = in.readVInt(); // SERIALIZATION_FORMAT @@ -846,8 +840,10 @@ public Index(StreamInput in) throws IOException { routing = in.readOptionalString(); parent = in.readOptionalString(); this.version = in.readLong(); - this.timestamp = in.readLong(); - this.ttl = in.readLong(); + if (format < FORMAT_SEQ_NO) { + in.readLong(); // timestamp + in.readLong(); // ttl + } this.versionType = VersionType.fromValue(in.readByte()); assert versionType.validateVersionForWrites(this.version); if (format >= FORMAT_AUTO_GENERATED_IDS) { @@ -868,8 +864,6 @@ public Index(Engine.Index index, Engine.IndexResult indexResult) { this.parent = index.parent(); this.seqNo = indexResult.getSeqNo(); this.version = indexResult.getVersion(); - this.timestamp = index.timestamp(); - this.ttl = index.ttl(); this.versionType = index.versionType(); this.autoGeneratedIdTimestamp = index.getAutoGeneratedIdTimestamp(); } @@ -883,8 +877,6 @@ public Index(String type, String id, byte[] source) { versionType = VersionType.INTERNAL; routing = null; parent = null; - timestamp = 0; - ttl = 0; autoGeneratedIdTimestamp = -1; } @@ -914,14 +906,6 @@ public String parent() { return this.parent; } - public long timestamp() { - return this.timestamp; - } - - public long ttl() { - return this.ttl; - } - public BytesReference source() { return this.source; } @@ -940,7 +924,7 @@ public VersionType versionType() { @Override public Source getSource() { - return new Source(source, routing, parent, timestamp, ttl); + return new Source(source, routing, parent); } @Override @@ -952,8 +936,7 @@ public void writeTo(StreamOutput out) throws IOException { out.writeOptionalString(routing); out.writeOptionalString(parent); out.writeLong(version); - out.writeLong(timestamp); - out.writeLong(ttl); + out.writeByte(versionType.getValue()); out.writeLong(autoGeneratedIdTimestamp); out.writeVLong(seqNo); @@ -972,8 +955,6 @@ public boolean equals(Object o) { if (version != index.version || seqNo != index.seqNo || - timestamp != index.timestamp || - ttl != index.ttl || id.equals(index.id) == false || type.equals(index.type) == false || versionType != index.versionType || @@ -998,9 +979,7 @@ public int hashCode() { result = 31 * result + source.hashCode(); result = 31 * result + (routing != null ? routing.hashCode() : 0); result = 31 * result + (parent != null ? parent.hashCode() : 0); - result = 31 * result + Long.hashCode(timestamp); result = 31 * result + Long.hashCode(autoGeneratedIdTimestamp); - result = 31 * result + Long.hashCode(ttl); return result; } diff --git a/core/src/main/java/org/elasticsearch/indices/IndicesModule.java b/core/src/main/java/org/elasticsearch/indices/IndicesModule.java index 02a50cae091e5..7e701ff28c033 100644 --- a/core/src/main/java/org/elasticsearch/indices/IndicesModule.java +++ b/core/src/main/java/org/elasticsearch/indices/IndicesModule.java @@ -31,7 +31,6 @@ import org.elasticsearch.index.mapper.CompletionFieldMapper; import org.elasticsearch.index.mapper.DateFieldMapper; import org.elasticsearch.index.mapper.FieldNamesFieldMapper; -import org.elasticsearch.index.mapper.GeoPointFieldMapper; import org.elasticsearch.index.mapper.GeoShapeFieldMapper; import org.elasticsearch.index.mapper.IdFieldMapper; import org.elasticsearch.index.mapper.IndexFieldMapper; @@ -47,10 +46,7 @@ import org.elasticsearch.index.mapper.RoutingFieldMapper; import org.elasticsearch.index.mapper.ScaledFloatFieldMapper; import org.elasticsearch.index.mapper.SourceFieldMapper; -import org.elasticsearch.index.mapper.StringFieldMapper; -import org.elasticsearch.index.mapper.TTLFieldMapper; import org.elasticsearch.index.mapper.TextFieldMapper; -import org.elasticsearch.index.mapper.TimestampFieldMapper; import org.elasticsearch.index.mapper.TokenCountFieldMapper; import org.elasticsearch.index.mapper.TypeFieldMapper; import org.elasticsearch.index.mapper.UidFieldMapper; @@ -65,7 +61,6 @@ import org.elasticsearch.indices.recovery.RecoverySettings; import org.elasticsearch.indices.store.IndicesStore; import org.elasticsearch.indices.store.TransportNodesListShardStoreMetaData; -import org.elasticsearch.indices.ttl.IndicesTTLService; import org.elasticsearch.plugins.MapperPlugin; import java.util.ArrayList; @@ -110,14 +105,12 @@ private Map getMappers(List mapperPlugi mappers.put(DateFieldMapper.CONTENT_TYPE, new DateFieldMapper.TypeParser()); mappers.put(IpFieldMapper.CONTENT_TYPE, new IpFieldMapper.TypeParser()); mappers.put(ScaledFloatFieldMapper.CONTENT_TYPE, new ScaledFloatFieldMapper.TypeParser()); - mappers.put(StringFieldMapper.CONTENT_TYPE, new StringFieldMapper.TypeParser()); mappers.put(TextFieldMapper.CONTENT_TYPE, new TextFieldMapper.TypeParser()); mappers.put(KeywordFieldMapper.CONTENT_TYPE, new KeywordFieldMapper.TypeParser()); mappers.put(TokenCountFieldMapper.CONTENT_TYPE, new TokenCountFieldMapper.TypeParser()); mappers.put(ObjectMapper.CONTENT_TYPE, new ObjectMapper.TypeParser()); mappers.put(ObjectMapper.NESTED_CONTENT_TYPE, new ObjectMapper.TypeParser()); mappers.put(CompletionFieldMapper.CONTENT_TYPE, new CompletionFieldMapper.TypeParser()); - mappers.put(GeoPointFieldMapper.CONTENT_TYPE, new GeoPointFieldMapper.TypeParser()); mappers.put(LatLonPointFieldMapper.CONTENT_TYPE, new LatLonPointFieldMapper.TypeParser()); if (ShapesAvailability.JTS_AVAILABLE && ShapesAvailability.SPATIAL4J_AVAILABLE) { mappers.put(GeoShapeFieldMapper.CONTENT_TYPE, new GeoShapeFieldMapper.TypeParser()); @@ -147,8 +140,6 @@ private Map getMetadataMappers(List INDICES_TTL_INTERVAL_SETTING = - Setting.positiveTimeSetting("indices.ttl.interval", TimeValue.timeValueSeconds(60), - Property.Dynamic, Property.NodeScope); - - private final ClusterService clusterService; - private final IndicesService indicesService; - private final TransportBulkAction bulkAction; - - private final int bulkSize; - private PurgerThread purgerThread; - - @Inject - public IndicesTTLService(Settings settings, ClusterService clusterService, IndicesService indicesService, ClusterSettings clusterSettings, TransportBulkAction bulkAction) { - super(settings); - this.clusterService = clusterService; - this.indicesService = indicesService; - TimeValue interval = INDICES_TTL_INTERVAL_SETTING.get(settings); - this.bulkAction = bulkAction; - this.bulkSize = this.settings.getAsInt("indices.ttl.bulk_size", 10000); - this.purgerThread = new PurgerThread(EsExecutors.threadName(settings, "[ttl_expire]"), interval); - clusterSettings.addSettingsUpdateConsumer(INDICES_TTL_INTERVAL_SETTING, this.purgerThread::resetInterval); - } - - @Override - protected void doStart() { - this.purgerThread.start(); - } - - @Override - protected void doStop() { - try { - this.purgerThread.shutdown(); - } catch (InterruptedException e) { - // we intentionally do not want to restore the interruption flag, we're about to shutdown anyway - } - } - - @Override - protected void doClose() { - } - - private class PurgerThread extends Thread { - private final AtomicBoolean running = new AtomicBoolean(true); - private final Notifier notifier; - private final CountDownLatch shutdownLatch = new CountDownLatch(1); - - - public PurgerThread(String name, TimeValue interval) { - super(name); - setDaemon(true); - this.notifier = new Notifier(interval); - } - - public void shutdown() throws InterruptedException { - if (running.compareAndSet(true, false)) { - notifier.doNotify(); - shutdownLatch.await(); - } - - } - - public void resetInterval(TimeValue interval) { - notifier.setTimeout(interval); - } - - @Override - public void run() { - try { - while (running.get()) { - try { - List shardsToPurge = getShardsToPurge(); - purgeShards(shardsToPurge); - } catch (Exception e) { - if (running.get()) { - logger.warn("failed to execute ttl purge", e); - } - } - if (running.get()) { - notifier.await(); - } - } - } finally { - shutdownLatch.countDown(); - } - } - - /** - * Returns the shards to purge, i.e. the local started primary shards that have ttl enabled and disable_purge to false - */ - private List getShardsToPurge() { - List shardsToPurge = new ArrayList<>(); - MetaData metaData = clusterService.state().metaData(); - for (IndexService indexService : indicesService) { - // check the value of disable_purge for this index - IndexMetaData indexMetaData = metaData.index(indexService.index()); - if (indexMetaData == null) { - continue; - } - if (indexService.getIndexSettings().isTTLPurgeDisabled()) { - continue; - } - - // check if ttl is enabled for at least one type of this index - boolean hasTTLEnabled = false; - for (String type : indexService.mapperService().types()) { - DocumentMapper documentType = indexService.mapperService().documentMapper(type); - if (documentType.TTLFieldMapper().enabled()) { - hasTTLEnabled = true; - break; - } - } - if (hasTTLEnabled) { - for (IndexShard indexShard : indexService) { - if (indexShard.state() == IndexShardState.STARTED && indexShard.routingEntry().primary() && indexShard.routingEntry().started()) { - shardsToPurge.add(indexShard); - } - } - } - } - return shardsToPurge; - } - - public TimeValue getInterval() { - return notifier.getTimeout(); - } - } - - private void purgeShards(List shardsToPurge) { - for (IndexShard shardToPurge : shardsToPurge) { - Query query = shardToPurge.mapperService().fullName(TTLFieldMapper.NAME).rangeQuery(null, System.currentTimeMillis(), false, - true, null); - Engine.Searcher searcher = shardToPurge.acquireSearcher("indices_ttl"); - try { - logger.debug("[{}][{}] purging shard", shardToPurge.routingEntry().index(), shardToPurge.routingEntry().id()); - ExpiredDocsCollector expiredDocsCollector = new ExpiredDocsCollector(); - searcher.searcher().search(query, expiredDocsCollector); - List docsToPurge = expiredDocsCollector.getDocsToPurge(); - - BulkRequest bulkRequest = new BulkRequest(); - for (DocToPurge docToPurge : docsToPurge) { - - bulkRequest.add(new DeleteRequest().index(shardToPurge.routingEntry().getIndexName()).type(docToPurge.type).id(docToPurge.id).version(docToPurge.version).routing(docToPurge.routing)); - bulkRequest = processBulkIfNeeded(bulkRequest, false); - } - processBulkIfNeeded(bulkRequest, true); - } catch (Exception e) { - logger.warn("failed to purge", e); - } finally { - searcher.close(); - } - } - } - - private static class DocToPurge { - public final String type; - public final String id; - public final long version; - public final String routing; - - public DocToPurge(String type, String id, long version, String routing) { - this.type = type; - this.id = id; - this.version = version; - this.routing = routing; - } - } - - private class ExpiredDocsCollector extends SimpleCollector { - private LeafReaderContext context; - private List docsToPurge = new ArrayList<>(); - private NumericDocValues versions; - - public ExpiredDocsCollector() { - } - - @Override - public void setScorer(Scorer scorer) { - } - - @Override - public boolean needsScores() { - return false; - } - - @Override - public void collect(int doc) { - try { - FieldsVisitor fieldsVisitor = new FieldsVisitor(false); - context.reader().document(doc, fieldsVisitor); - Uid uid = fieldsVisitor.uid(); - final long version = versions == null ? Versions.NOT_FOUND : versions.get(doc); - docsToPurge.add(new DocToPurge(uid.type(), uid.id(), version, fieldsVisitor.routing())); - } catch (Exception e) { - logger.trace("failed to collect doc", e); - } - } - - @Override - public void doSetNextReader(LeafReaderContext context) throws IOException { - this.context = context; - this.versions = context.reader().getNumericDocValues(VersionFieldMapper.NAME); - } - - public List getDocsToPurge() { - return this.docsToPurge; - } - } - - private BulkRequest processBulkIfNeeded(BulkRequest bulkRequest, boolean force) { - if ((force && bulkRequest.numberOfActions() > 0) || bulkRequest.numberOfActions() >= bulkSize) { - try { - bulkAction.executeBulk(bulkRequest, new ActionListener() { - @Override - public void onResponse(BulkResponse bulkResponse) { - if (bulkResponse.hasFailures()) { - int failedItems = 0; - for (BulkItemResponse response : bulkResponse) { - if (response.isFailed()) failedItems++; - } - if (logger.isTraceEnabled()) { - logger.trace("bulk deletion failures for [{}]/[{}] items, failure message: [{}]", failedItems, bulkResponse.getItems().length, bulkResponse.buildFailureMessage()); - } else { - logger.error("bulk deletion failures for [{}]/[{}] items", failedItems, bulkResponse.getItems().length); - } - } else { - logger.trace("bulk deletion took {}ms", bulkResponse.getTookInMillis()); - } - } - - @Override - public void onFailure(Exception e) { - if (logger.isTraceEnabled()) { - logger.trace("failed to execute bulk", e); - } else { - logger.warn("failed to execute bulk: ", e); - } - } - }); - } catch (Exception e) { - logger.warn("failed to process bulk", e); - } - bulkRequest = new BulkRequest(); - } - return bulkRequest; - } - - private static final class Notifier { - - private final ReentrantLock lock = new ReentrantLock(); - private final Condition condition = lock.newCondition(); - private volatile TimeValue timeout; - - public Notifier(TimeValue timeout) { - assert timeout != null; - this.timeout = timeout; - } - - public void await() { - lock.lock(); - try { - condition.await(timeout.millis(), TimeUnit.MILLISECONDS); - } catch (InterruptedException e) { - // we intentionally do not want to restore the interruption flag, we're about to shutdown anyway - } finally { - lock.unlock(); - } - - } - - public void setTimeout(TimeValue timeout) { - assert timeout != null; - this.timeout = timeout; - doNotify(); - } - - public TimeValue getTimeout() { - return timeout; - } - - public void doNotify() { - lock.lock(); - try { - condition.signalAll(); - } finally { - lock.unlock(); - } - } - } -} diff --git a/core/src/main/java/org/elasticsearch/ingest/IngestDocument.java b/core/src/main/java/org/elasticsearch/ingest/IngestDocument.java index 670ff19fdabf5..edb92b6e837e2 100644 --- a/core/src/main/java/org/elasticsearch/ingest/IngestDocument.java +++ b/core/src/main/java/org/elasticsearch/ingest/IngestDocument.java @@ -25,8 +25,6 @@ import org.elasticsearch.index.mapper.ParentFieldMapper; import org.elasticsearch.index.mapper.RoutingFieldMapper; import org.elasticsearch.index.mapper.SourceFieldMapper; -import org.elasticsearch.index.mapper.TTLFieldMapper; -import org.elasticsearch.index.mapper.TimestampFieldMapper; import org.elasticsearch.index.mapper.TypeFieldMapper; import java.text.DateFormat; @@ -56,8 +54,7 @@ public final class IngestDocument { private final Map sourceAndMetadata; private final Map ingestMetadata; - public IngestDocument(String index, String type, String id, String routing, String parent, String timestamp, - String ttl, Map source) { + public IngestDocument(String index, String type, String id, String routing, String parent, Map source) { this.sourceAndMetadata = new HashMap<>(); this.sourceAndMetadata.putAll(source); this.sourceAndMetadata.put(MetaData.INDEX.getFieldName(), index); @@ -69,12 +66,6 @@ public IngestDocument(String index, String type, String id, String routing, Stri if (parent != null) { this.sourceAndMetadata.put(MetaData.PARENT.getFieldName(), parent); } - if (timestamp != null) { - this.sourceAndMetadata.put(MetaData.TIMESTAMP.getFieldName(), timestamp); - } - if (ttl != null) { - this.sourceAndMetadata.put(MetaData.TTL.getFieldName(), ttl); - } this.ingestMetadata = new HashMap<>(); DateFormat df = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSSZZ", Locale.ROOT); @@ -639,9 +630,7 @@ public enum MetaData { TYPE(TypeFieldMapper.NAME), ID(IdFieldMapper.NAME), ROUTING(RoutingFieldMapper.NAME), - PARENT(ParentFieldMapper.NAME), - TIMESTAMP(TimestampFieldMapper.NAME), - TTL(TTLFieldMapper.NAME); + PARENT(ParentFieldMapper.NAME); private final String fieldName; diff --git a/core/src/main/java/org/elasticsearch/ingest/PipelineExecutionService.java b/core/src/main/java/org/elasticsearch/ingest/PipelineExecutionService.java index 6c701e59c90af..0ca89ea37b908 100644 --- a/core/src/main/java/org/elasticsearch/ingest/PipelineExecutionService.java +++ b/core/src/main/java/org/elasticsearch/ingest/PipelineExecutionService.java @@ -159,10 +159,8 @@ private void innerExecute(IndexRequest indexRequest, Pipeline pipeline) throws E String id = indexRequest.id(); String routing = indexRequest.routing(); String parent = indexRequest.parent(); - String timestamp = indexRequest.timestamp(); - String ttl = indexRequest.ttl() == null ? null : indexRequest.ttl().toString(); Map sourceAsMap = indexRequest.sourceAsMap(); - IngestDocument ingestDocument = new IngestDocument(index, type, id, routing, parent, timestamp, ttl, sourceAsMap); + IngestDocument ingestDocument = new IngestDocument(index, type, id, routing, parent, sourceAsMap); pipeline.execute(ingestDocument); Map metadataMap = ingestDocument.extractMetadata(); @@ -173,8 +171,6 @@ private void innerExecute(IndexRequest indexRequest, Pipeline pipeline) throws E indexRequest.id(metadataMap.get(IngestDocument.MetaData.ID)); indexRequest.routing(metadataMap.get(IngestDocument.MetaData.ROUTING)); indexRequest.parent(metadataMap.get(IngestDocument.MetaData.PARENT)); - indexRequest.timestamp(metadataMap.get(IngestDocument.MetaData.TIMESTAMP)); - indexRequest.ttl(metadataMap.get(IngestDocument.MetaData.TTL)); indexRequest.source(ingestDocument.getSourceAndMetadata()); } catch (Exception e) { totalStats.ingestFailed(); diff --git a/core/src/main/java/org/elasticsearch/node/Node.java b/core/src/main/java/org/elasticsearch/node/Node.java index 43ab750edb61b..d5d65bd1fe66e 100644 --- a/core/src/main/java/org/elasticsearch/node/Node.java +++ b/core/src/main/java/org/elasticsearch/node/Node.java @@ -98,7 +98,6 @@ import org.elasticsearch.indices.recovery.PeerRecoveryTargetService; import org.elasticsearch.indices.recovery.RecoverySettings; import org.elasticsearch.indices.store.IndicesStore; -import org.elasticsearch.indices.ttl.IndicesTTLService; import org.elasticsearch.ingest.IngestService; import org.elasticsearch.monitor.MonitorService; import org.elasticsearch.monitor.jvm.JvmInfo; @@ -542,7 +541,6 @@ public Node start() throws NodeValidationException { injector.getInstance(MappingUpdatedAction.class).setClient(client); injector.getInstance(IndicesService.class).start(); injector.getInstance(IndicesClusterStateService.class).start(); - injector.getInstance(IndicesTTLService.class).start(); injector.getInstance(SnapshotsService.class).start(); injector.getInstance(SnapshotShardsService.class).start(); injector.getInstance(RoutingService.class).start(); @@ -666,7 +664,6 @@ private Node stop() { // This can confuse other nodes and delay things - mostly if we're the master and we're running tests. injector.getInstance(Discovery.class).stop(); // we close indices first, so operations won't be allowed on it - injector.getInstance(IndicesTTLService.class).stop(); injector.getInstance(RoutingService.class).stop(); injector.getInstance(ClusterService.class).stop(); injector.getInstance(NodeConnectionsService.class).stop(); @@ -717,7 +714,6 @@ public synchronized void close() throws IOException { toClose.add(() -> stopWatch.stop().start("indices_cluster")); toClose.add(injector.getInstance(IndicesClusterStateService.class)); toClose.add(() -> stopWatch.stop().start("indices")); - toClose.add(injector.getInstance(IndicesTTLService.class)); toClose.add(injector.getInstance(IndicesService.class)); // close filter/fielddata caches after indices toClose.add(injector.getInstance(IndicesStore.class)); diff --git a/core/src/main/java/org/elasticsearch/rest/action/document/RestIndexAction.java b/core/src/main/java/org/elasticsearch/rest/action/document/RestIndexAction.java index 82b10361153ba..3880ec6ca9e3d 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/document/RestIndexAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/document/RestIndexAction.java @@ -66,10 +66,6 @@ public RestChannelConsumer prepareRequest(final RestRequest request, final NodeC IndexRequest indexRequest = new IndexRequest(request.param("index"), request.param("type"), request.param("id")); indexRequest.routing(request.param("routing")); indexRequest.parent(request.param("parent")); // order is important, set it after routing, so it will set the routing - indexRequest.timestamp(request.param("timestamp")); - if (request.hasParam("ttl")) { - indexRequest.ttl(request.param("ttl")); - } indexRequest.setPipeline(request.param("pipeline")); indexRequest.source(request.content()); indexRequest.timeout(request.paramAsTime("timeout", IndexRequest.DEFAULT_TIMEOUT)); diff --git a/core/src/main/java/org/elasticsearch/rest/action/document/RestUpdateAction.java b/core/src/main/java/org/elasticsearch/rest/action/document/RestUpdateAction.java index e0211ccec2f44..feb2d39b8f6b1 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/document/RestUpdateAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/document/RestUpdateAction.java @@ -87,10 +87,6 @@ public RestChannelConsumer prepareRequest(final RestRequest request, final NodeC if (upsertRequest != null) { upsertRequest.routing(request.param("routing")); upsertRequest.parent(request.param("parent")); // order is important, set it after routing, so it will set the routing - upsertRequest.timestamp(request.param("timestamp")); - if (request.hasParam("ttl")) { - upsertRequest.ttl(request.param("ttl")); - } upsertRequest.version(RestActions.parseVersion(request)); upsertRequest.versionType(VersionType.fromString(request.param("version_type"), upsertRequest.versionType())); } @@ -98,10 +94,6 @@ public RestChannelConsumer prepareRequest(final RestRequest request, final NodeC if (doc != null) { doc.routing(request.param("routing")); doc.parent(request.param("parent")); // order is important, set it after routing, so it will set the routing - doc.timestamp(request.param("timestamp")); - if (request.hasParam("ttl")) { - doc.ttl(request.param("ttl")); - } doc.version(RestActions.parseVersion(request)); doc.versionType(VersionType.fromString(request.param("version_type"), doc.versionType())); } diff --git a/core/src/main/java/org/elasticsearch/search/SearchModule.java b/core/src/main/java/org/elasticsearch/search/SearchModule.java index 0a66d6eefe00e..b72a10bcd705c 100644 --- a/core/src/main/java/org/elasticsearch/search/SearchModule.java +++ b/core/src/main/java/org/elasticsearch/search/SearchModule.java @@ -38,10 +38,8 @@ import org.elasticsearch.index.query.FuzzyQueryBuilder; import org.elasticsearch.index.query.GeoBoundingBoxQueryBuilder; import org.elasticsearch.index.query.GeoDistanceQueryBuilder; -import org.elasticsearch.index.query.GeoDistanceRangeQueryBuilder; import org.elasticsearch.index.query.GeoPolygonQueryBuilder; import org.elasticsearch.index.query.GeoShapeQueryBuilder; -import org.elasticsearch.index.query.GeohashCellQuery; import org.elasticsearch.index.query.HasChildQueryBuilder; import org.elasticsearch.index.query.HasParentQueryBuilder; import org.elasticsearch.index.query.IdsQueryBuilder; @@ -775,11 +773,8 @@ private void registerQueryParsers(List plugins) { registerQuery(new QuerySpec<>(TypeQueryBuilder.NAME, TypeQueryBuilder::new, TypeQueryBuilder::fromXContent)); registerQuery(new QuerySpec<>(ScriptQueryBuilder.NAME, ScriptQueryBuilder::new, ScriptQueryBuilder::fromXContent)); registerQuery(new QuerySpec<>(GeoDistanceQueryBuilder.NAME, GeoDistanceQueryBuilder::new, GeoDistanceQueryBuilder::fromXContent)); - registerQuery(new QuerySpec<>(GeoDistanceRangeQueryBuilder.NAME, GeoDistanceRangeQueryBuilder::new, - GeoDistanceRangeQueryBuilder::fromXContent)); registerQuery(new QuerySpec<>(GeoBoundingBoxQueryBuilder.NAME, GeoBoundingBoxQueryBuilder::new, GeoBoundingBoxQueryBuilder::fromXContent)); - registerQuery(new QuerySpec<>(GeohashCellQuery.NAME, GeohashCellQuery.Builder::new, GeohashCellQuery.Builder::fromXContent)); registerQuery(new QuerySpec<>(GeoPolygonQueryBuilder.NAME, GeoPolygonQueryBuilder::new, GeoPolygonQueryBuilder::fromXContent)); registerQuery(new QuerySpec<>(ExistsQueryBuilder.NAME, ExistsQueryBuilder::new, ExistsQueryBuilder::fromXContent)); registerQuery(new QuerySpec<>(MatchNoneQueryBuilder.NAME, MatchNoneQueryBuilder::new, MatchNoneQueryBuilder::fromXContent)); diff --git a/core/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/HighlightPhase.java b/core/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/HighlightPhase.java index 84890857c793f..e89d87e4fbf8f 100644 --- a/core/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/HighlightPhase.java +++ b/core/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/HighlightPhase.java @@ -27,7 +27,6 @@ import org.elasticsearch.index.mapper.FieldMapper; import org.elasticsearch.index.mapper.KeywordFieldMapper; import org.elasticsearch.index.mapper.SourceFieldMapper; -import org.elasticsearch.index.mapper.StringFieldMapper; import org.elasticsearch.index.mapper.TextFieldMapper; import org.elasticsearch.search.fetch.FetchSubPhase; import org.elasticsearch.search.internal.SearchContext; @@ -89,8 +88,7 @@ public void hitExecute(SearchContext context, HitContext hitContext) { // what they were doing and try to highlight anyway. if (fieldNameContainsWildcards) { if (fieldMapper.fieldType().typeName().equals(TextFieldMapper.CONTENT_TYPE) == false && - fieldMapper.fieldType().typeName().equals(KeywordFieldMapper.CONTENT_TYPE) == false && - fieldMapper.fieldType().typeName().equals(StringFieldMapper.CONTENT_TYPE) == false) { + fieldMapper.fieldType().typeName().equals(KeywordFieldMapper.CONTENT_TYPE) == false) { continue; } } diff --git a/core/src/main/java/org/elasticsearch/search/suggest/Suggest.java b/core/src/main/java/org/elasticsearch/search/suggest/Suggest.java index 95612693f8b4c..c40b144100082 100644 --- a/core/src/main/java/org/elasticsearch/search/suggest/Suggest.java +++ b/core/src/main/java/org/elasticsearch/search/suggest/Suggest.java @@ -123,9 +123,8 @@ public void readFrom(StreamInput in) throws IOException { case CompletionSuggestion.TYPE: suggestion = new CompletionSuggestion(); break; - case org.elasticsearch.search.suggest.completion2x.CompletionSuggestion.TYPE: - suggestion = new org.elasticsearch.search.suggest.completion2x.CompletionSuggestion(); - break; + case 2: // CompletionSuggestion.TYPE + throw new IllegalArgumentException("Completion suggester 2.x is not supported anymore"); case PhraseSuggestion.TYPE: suggestion = new PhraseSuggestion(); break; diff --git a/core/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggester.java b/core/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggester.java index f471d91c3b33b..049883822de4d 100644 --- a/core/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggester.java +++ b/core/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggester.java @@ -18,11 +18,7 @@ */ package org.elasticsearch.search.suggest.completion; -import org.apache.lucene.index.IndexReader; -import org.apache.lucene.index.LeafReader; import org.apache.lucene.index.LeafReaderContext; -import org.apache.lucene.index.ReaderUtil; -import org.apache.lucene.index.Terms; import org.apache.lucene.search.BulkScorer; import org.apache.lucene.search.CollectionTerminatedException; import org.apache.lucene.search.IndexSearcher; @@ -32,28 +28,19 @@ import org.apache.lucene.search.suggest.document.TopSuggestDocs; import org.apache.lucene.search.suggest.document.TopSuggestDocsCollector; import org.apache.lucene.util.CharsRefBuilder; -import org.apache.lucene.util.CollectionUtil; import org.apache.lucene.util.PriorityQueue; -import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.text.Text; -import org.elasticsearch.index.fielddata.AtomicFieldData; -import org.elasticsearch.index.fielddata.ScriptDocValues; import org.elasticsearch.index.mapper.CompletionFieldMapper; -import org.elasticsearch.index.mapper.MappedFieldType; -import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.query.QueryParseContext; -import org.elasticsearch.index.query.QueryShardContext; import org.elasticsearch.search.suggest.Suggest; import org.elasticsearch.search.suggest.Suggester; import org.elasticsearch.search.suggest.SuggestionBuilder; -import org.elasticsearch.search.suggest.completion2x.Completion090PostingsFormat; import java.io.IOException; import java.util.ArrayList; import java.util.Collections; import java.util.Comparator; -import java.util.HashMap; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; @@ -93,73 +80,11 @@ protected Suggest.Suggestion results = - new HashMap<>(indexReader.leaves().size() * suggestionContext.getSize()); - for (LeafReaderContext atomicReaderContext : indexReader.leaves()) { - LeafReader atomicReader = atomicReaderContext.reader(); - Terms terms = atomicReader.fields().terms(fieldName); - if (terms instanceof Completion090PostingsFormat.CompletionTerms) { - final Completion090PostingsFormat.CompletionTerms lookupTerms = (Completion090PostingsFormat.CompletionTerms) terms; - final Lookup lookup = lookupTerms.getLookup(suggestionContext.getFieldType2x(), suggestionContext); - if (lookup == null) { - // we don't have a lookup for this segment.. this might be possible if a merge dropped all - // docs from the segment that had a value in this segment. - continue; - } - List lookupResults = lookup.lookup(spare.get(), false, suggestionContext.getSize()); - for (Lookup.LookupResult res : lookupResults) { - - final String key = res.key.toString(); - final float score = res.value; - final org.elasticsearch.search.suggest.completion2x.CompletionSuggestion.Entry.Option value = results.get(key); - if (value == null) { - final org.elasticsearch.search.suggest.completion2x.CompletionSuggestion.Entry.Option option = - new org.elasticsearch.search.suggest.completion2x.CompletionSuggestion.Entry.Option(new Text(key), score, - res.payload == null ? null : new BytesArray(res.payload)); - results.put(key, option); - } else if (value.getScore() < score) { - value.setScore(score); - value.setPayload(res.payload == null ? null : new BytesArray(res.payload)); - } - } - } - } - final List options = - new ArrayList<>(results.values()); - CollectionUtil.introSort(options, scoreComparator); - - int optionCount = Math.min(suggestionContext.getSize(), options.size()); - for (int i = 0; i < optionCount; i++) { - completionSuggestEntry.addOption(options.get(i)); - } - return completionSuggestion; } return null; } - private static final ScoreComparator scoreComparator = new ScoreComparator(); - public static class ScoreComparator implements - Comparator { - @Override - public int compare(org.elasticsearch.search.suggest.completion2x.CompletionSuggestion.Entry.Option o1, - org.elasticsearch.search.suggest.completion2x.CompletionSuggestion.Entry.Option o2) { - return Float.compare(o2.getScore(), o1.getScore()); - } - } - private static void suggest(IndexSearcher searcher, CompletionQuery query, TopSuggestDocsCollector collector) throws IOException { query = (CompletionQuery) query.rewrite(searcher.getIndexReader()); Weight weight = query.createWeight(searcher, collector.needsScores()); diff --git a/core/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggestionBuilder.java b/core/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggestionBuilder.java index 3b216d9186ab0..09382d9aaffce 100644 --- a/core/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggestionBuilder.java +++ b/core/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggestionBuilder.java @@ -32,7 +32,6 @@ import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.index.mapper.CompletionFieldMapper; -import org.elasticsearch.index.mapper.CompletionFieldMapper2x; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.query.QueryParseContext; @@ -41,9 +40,6 @@ import org.elasticsearch.search.suggest.SuggestionSearchContext.SuggestionContext; import org.elasticsearch.search.suggest.completion.context.ContextMapping; import org.elasticsearch.search.suggest.completion.context.ContextMappings; -import org.elasticsearch.search.suggest.completion2x.context.CategoryContextMapping; -import org.elasticsearch.search.suggest.completion2x.context.ContextMapping.ContextQuery; -import org.elasticsearch.search.suggest.completion2x.context.GeolocationContextMapping; import java.io.IOException; import java.util.ArrayList; @@ -215,105 +211,6 @@ private CompletionSuggestionBuilder contexts(XContentBuilder contextBuilder) { return this; } - public CompletionSuggestionBuilder contexts(Contexts2x contexts2x) { - Objects.requireNonNull(contexts2x, "contexts must not be null"); - try { - XContentBuilder contentBuilder = XContentFactory.jsonBuilder(); - contentBuilder.startObject(); - for (ContextQuery contextQuery : contexts2x.contextQueries) { - contextQuery.toXContent(contentBuilder, EMPTY_PARAMS); - } - contentBuilder.endObject(); - return contexts(contentBuilder); - } catch (IOException e) { - throw new IllegalArgumentException(e); - } - } - - // for 2.x context support - public static class Contexts2x { - private List contextQueries = new ArrayList<>(); - - @SuppressWarnings("unchecked") - private Contexts2x addContextQuery(ContextQuery ctx) { - this.contextQueries.add(ctx); - return this; - } - - /** - * Setup a Geolocation for suggestions. See {@link GeolocationContextMapping}. - * @param lat Latitude of the location - * @param lon Longitude of the Location - * @return this - */ - @Deprecated - public Contexts2x addGeoLocation(String name, double lat, double lon, int ... precisions) { - return addContextQuery(GeolocationContextMapping.query(name, lat, lon, precisions)); - } - - /** - * Setup a Geolocation for suggestions. See {@link GeolocationContextMapping}. - * @param lat Latitude of the location - * @param lon Longitude of the Location - * @param precisions precisions as string var-args - * @return this - */ - @Deprecated - public Contexts2x addGeoLocationWithPrecision(String name, double lat, double lon, String ... precisions) { - return addContextQuery(GeolocationContextMapping.query(name, lat, lon, precisions)); - } - - /** - * Setup a Geolocation for suggestions. See {@link GeolocationContextMapping}. - * @param geohash Geohash of the location - * @return this - */ - @Deprecated - public Contexts2x addGeoLocation(String name, String geohash) { - return addContextQuery(GeolocationContextMapping.query(name, geohash)); - } - - /** - * Setup a Category for suggestions. See {@link CategoryContextMapping}. - * @param categories name of the category - * @return this - */ - @Deprecated - public Contexts2x addCategory(String name, CharSequence...categories) { - return addContextQuery(CategoryContextMapping.query(name, categories)); - } - - /** - * Setup a Category for suggestions. See {@link CategoryContextMapping}. - * @param categories name of the category - * @return this - */ - @Deprecated - public Contexts2x addCategory(String name, Iterable categories) { - return addContextQuery(CategoryContextMapping.query(name, categories)); - } - - /** - * Setup a Context Field for suggestions. See {@link CategoryContextMapping}. - * @param fieldvalues name of the category - * @return this - */ - @Deprecated - public Contexts2x addContextField(String name, CharSequence...fieldvalues) { - return addContextQuery(CategoryContextMapping.query(name, fieldvalues)); - } - - /** - * Setup a Context Field for suggestions. See {@link CategoryContextMapping}. - * @param fieldvalues name of the category - * @return this - */ - @Deprecated - public Contexts2x addContextField(String name, Iterable fieldvalues) { - return addContextQuery(CategoryContextMapping.query(name, fieldvalues)); - } - } - private static class InnerBuilder extends CompletionSuggestionBuilder { private String field; @@ -366,8 +263,7 @@ public SuggestionContext build(QueryShardContext context) throws IOException { suggestionContext.setRegexOptions(regexOptions); MappedFieldType mappedFieldType = mapperService.fullName(suggestionContext.getField()); if (mappedFieldType == null || - (mappedFieldType instanceof CompletionFieldMapper.CompletionFieldType == false - && mappedFieldType instanceof CompletionFieldMapper2x.CompletionFieldType == false)) { + mappedFieldType instanceof CompletionFieldMapper.CompletionFieldType == false) { throw new IllegalArgumentException("Field [" + suggestionContext.getField() + "] is not a completion suggest field"); } if (mappedFieldType instanceof CompletionFieldMapper.CompletionFieldType) { @@ -395,23 +291,8 @@ public SuggestionContext build(QueryShardContext context) throws IOException { } else if (contextBytes != null) { throw new IllegalArgumentException("suggester [" + type.name() + "] doesn't expect any context"); } - } else if (mappedFieldType instanceof CompletionFieldMapper2x.CompletionFieldType) { - CompletionFieldMapper2x.CompletionFieldType type = ((CompletionFieldMapper2x.CompletionFieldType) mappedFieldType); - suggestionContext.setFieldType2x(type); - if (type.requiresContext()) { - if (contextBytes != null) { - try (XContentParser contextParser = XContentFactory.xContent(contextBytes).createParser(contextBytes)) { - contextParser.nextToken(); - suggestionContext.setContextQueries(ContextQuery.parseQueries(type.getContextMapping(), contextParser)); - } - } else { - throw new IllegalArgumentException("suggester [completion] requires context to be setup"); - } - } else if (contextBytes != null) { - throw new IllegalArgumentException("suggester [completion] doesn't expect any context"); - } } - assert suggestionContext.getFieldType() != null || suggestionContext.getFieldType2x() != null : "no completion field type set"; + assert suggestionContext.getFieldType() != null : "no completion field type set"; return suggestionContext; } diff --git a/core/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggestionContext.java b/core/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggestionContext.java index 273aeb3171750..a4aeec8cb5833 100644 --- a/core/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggestionContext.java +++ b/core/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggestionContext.java @@ -21,12 +21,10 @@ import org.apache.lucene.search.suggest.document.CompletionQuery; import org.elasticsearch.common.unit.Fuzziness; import org.elasticsearch.index.mapper.CompletionFieldMapper; -import org.elasticsearch.index.mapper.CompletionFieldMapper2x; import org.elasticsearch.index.query.QueryShardContext; import org.elasticsearch.search.suggest.SuggestionSearchContext; import org.elasticsearch.search.suggest.completion.context.ContextMapping; import org.elasticsearch.search.suggest.completion.context.ContextMappings; -import org.elasticsearch.search.suggest.completion2x.context.ContextMapping.ContextQuery; import java.util.Collections; import java.util.List; @@ -42,17 +40,11 @@ protected CompletionSuggestionContext(QueryShardContext shardContext) { private FuzzyOptions fuzzyOptions; private RegexOptions regexOptions; private Map> queryContexts = Collections.emptyMap(); - private CompletionFieldMapper2x.CompletionFieldType fieldType2x; - private List contextQueries; CompletionFieldMapper.CompletionFieldType getFieldType() { return this.fieldType; } - CompletionFieldMapper2x.CompletionFieldType getFieldType2x() { - return this.fieldType2x; - } - void setFieldType(CompletionFieldMapper.CompletionFieldType fieldType) { this.fieldType = fieldType; } @@ -113,15 +105,4 @@ CompletionQuery toQuery() { return query; } - public void setFieldType2x(CompletionFieldMapper2x.CompletionFieldType type) { - this.fieldType2x = type; - } - - public void setContextQueries(List contextQueries) { - this.contextQueries = contextQueries; - } - - public List getContextQueries() { - return contextQueries; - } } diff --git a/core/src/main/java/org/elasticsearch/search/suggest/completion/context/GeoContextMapping.java b/core/src/main/java/org/elasticsearch/search/suggest/completion/context/GeoContextMapping.java index bd1449bbfe7a5..9d5838b4b2138 100644 --- a/core/src/main/java/org/elasticsearch/search/suggest/completion/context/GeoContextMapping.java +++ b/core/src/main/java/org/elasticsearch/search/suggest/completion/context/GeoContextMapping.java @@ -29,8 +29,8 @@ import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentParser.Token; +import org.elasticsearch.index.mapper.BaseGeoPointFieldMapper; import org.elasticsearch.index.mapper.FieldMapper; -import org.elasticsearch.index.mapper.GeoPointFieldMapper; import org.elasticsearch.index.mapper.ParseContext; import org.elasticsearch.index.mapper.ParseContext.Document; import org.elasticsearch.index.query.QueryParseContext; @@ -140,7 +140,7 @@ protected XContentBuilder toInnerXContent(XContentBuilder builder, Params params public Set parseContext(ParseContext parseContext, XContentParser parser) throws IOException, ElasticsearchParseException { if (fieldName != null) { FieldMapper mapper = parseContext.docMapper().mappers().getMapper(fieldName); - if (!(mapper instanceof GeoPointFieldMapper)) { + if (!(mapper instanceof BaseGeoPointFieldMapper)) { throw new ElasticsearchParseException("referenced field must be mapped to geo_point"); } } diff --git a/core/src/main/java/org/elasticsearch/search/suggest/completion2x/AnalyzingCompletionLookupProvider.java b/core/src/main/java/org/elasticsearch/search/suggest/completion2x/AnalyzingCompletionLookupProvider.java deleted file mode 100644 index e9095bfb7d576..0000000000000 --- a/core/src/main/java/org/elasticsearch/search/suggest/completion2x/AnalyzingCompletionLookupProvider.java +++ /dev/null @@ -1,413 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.search.suggest.completion2x; - -import com.carrotsearch.hppc.ObjectLongHashMap; - -import org.apache.lucene.analysis.TokenStream; -import org.apache.lucene.codecs.CodecUtil; -import org.apache.lucene.codecs.FieldsConsumer; -import org.apache.lucene.index.PostingsEnum; -import org.apache.lucene.index.Fields; -import org.apache.lucene.index.Terms; -import org.apache.lucene.index.TermsEnum; -import org.apache.lucene.search.DocIdSetIterator; -import org.apache.lucene.search.suggest.Lookup; -import org.apache.lucene.search.suggest.analyzing.XAnalyzingSuggester; -import org.apache.lucene.search.suggest.analyzing.XFuzzySuggester; -import org.apache.lucene.store.IndexInput; -import org.apache.lucene.store.IndexOutput; -import org.apache.lucene.util.Accountable; -import org.apache.lucene.util.Accountables; -import org.apache.lucene.util.BytesRef; -import org.apache.lucene.util.IOUtils; -import org.apache.lucene.util.IntsRef; -import org.apache.lucene.util.automaton.Automaton; -import org.apache.lucene.util.fst.ByteSequenceOutputs; -import org.apache.lucene.util.fst.FST; -import org.apache.lucene.util.fst.PairOutputs; -import org.apache.lucene.util.fst.PairOutputs.Pair; -import org.apache.lucene.util.fst.PositiveIntOutputs; -import org.elasticsearch.common.regex.Regex; -import org.elasticsearch.index.mapper.CompletionFieldMapper2x; -import org.elasticsearch.index.mapper.MappedFieldType; -import org.elasticsearch.search.suggest.completion.CompletionStats; -import org.elasticsearch.search.suggest.completion.CompletionSuggestionContext; -import org.elasticsearch.search.suggest.completion.FuzzyOptions; -import org.elasticsearch.search.suggest.completion2x.Completion090PostingsFormat.CompletionLookupProvider; -import org.elasticsearch.search.suggest.completion2x.Completion090PostingsFormat.LookupFactory; -import org.elasticsearch.search.suggest.completion2x.context.ContextMapping.ContextQuery; - -import java.io.IOException; -import java.util.Collection; -import java.util.Collections; -import java.util.HashMap; -import java.util.Map; -import java.util.Set; -import java.util.TreeMap; - -public class AnalyzingCompletionLookupProvider extends CompletionLookupProvider { - - // for serialization - public static final int SERIALIZE_PRESERVE_SEPARATORS = 1; - public static final int SERIALIZE_HAS_PAYLOADS = 2; - public static final int SERIALIZE_PRESERVE_POSITION_INCREMENTS = 4; - - private static final int MAX_SURFACE_FORMS_PER_ANALYZED_FORM = 256; - private static final int MAX_GRAPH_EXPANSIONS = -1; - - public static final String CODEC_NAME = "analyzing"; - public static final int CODEC_VERSION_START = 1; - public static final int CODEC_VERSION_SERIALIZED_LABELS = 2; - public static final int CODEC_VERSION_CHECKSUMS = 3; - public static final int CODEC_VERSION_LATEST = CODEC_VERSION_CHECKSUMS; - - private final boolean preserveSep; - private final boolean preservePositionIncrements; - private final int maxSurfaceFormsPerAnalyzedForm; - private final int maxGraphExpansions; - private final boolean hasPayloads; - private final XAnalyzingSuggester prototype; - - public AnalyzingCompletionLookupProvider(boolean preserveSep, boolean preservePositionIncrements, boolean hasPayloads) { - this.preserveSep = preserveSep; - this.preservePositionIncrements = preservePositionIncrements; - this.hasPayloads = hasPayloads; - this.maxSurfaceFormsPerAnalyzedForm = MAX_SURFACE_FORMS_PER_ANALYZED_FORM; - this.maxGraphExpansions = MAX_GRAPH_EXPANSIONS; - int options = preserveSep ? XAnalyzingSuggester.PRESERVE_SEP : 0; - // needs to fixed in the suggester first before it can be supported - //options |= exactFirst ? XAnalyzingSuggester.EXACT_FIRST : 0; - prototype = new XAnalyzingSuggester(null, null, null, options, maxSurfaceFormsPerAnalyzedForm, maxGraphExpansions, - preservePositionIncrements, null, false, 1, XAnalyzingSuggester.SEP_LABEL, XAnalyzingSuggester.PAYLOAD_SEP, - XAnalyzingSuggester.END_BYTE, XAnalyzingSuggester.HOLE_CHARACTER); - } - - @Override - public String getName() { - return "analyzing"; - } - - public boolean getPreserveSep() { - return preserveSep; - } - - public boolean getPreservePositionsIncrements() { - return preservePositionIncrements; - } - - public boolean hasPayloads() { - return hasPayloads; - } - - @Override - public FieldsConsumer consumer(final IndexOutput output) throws IOException { - CodecUtil.writeHeader(output, CODEC_NAME, CODEC_VERSION_LATEST); - return new FieldsConsumer() { - private Map fieldOffsets = new HashMap<>(); - - @Override - public void close() throws IOException { - try { - /* - * write the offsets per field such that we know where - * we need to load the FSTs from - */ - long pointer = output.getFilePointer(); - output.writeVInt(fieldOffsets.size()); - for (Map.Entry entry : fieldOffsets.entrySet()) { - output.writeString(entry.getKey()); - output.writeVLong(entry.getValue()); - } - output.writeLong(pointer); - CodecUtil.writeFooter(output); - } finally { - IOUtils.close(output); - } - } - - @Override - public void write(Fields fields) throws IOException { - for(String field : fields) { - Terms terms = fields.terms(field); - if (terms == null) { - continue; - } - TermsEnum termsEnum = terms.iterator(); - PostingsEnum docsEnum = null; - final SuggestPayload spare = new SuggestPayload(); - int maxAnalyzedPathsForOneInput = 0; - final XAnalyzingSuggester.XBuilder builder = new XAnalyzingSuggester.XBuilder( - maxSurfaceFormsPerAnalyzedForm, hasPayloads, XAnalyzingSuggester.PAYLOAD_SEP); - int docCount = 0; - while (true) { - BytesRef term = termsEnum.next(); - if (term == null) { - break; - } - docsEnum = termsEnum.postings(docsEnum, PostingsEnum.PAYLOADS); - builder.startTerm(term); - int docFreq = 0; - while (docsEnum.nextDoc() != DocIdSetIterator.NO_MORE_DOCS) { - for (int i = 0; i < docsEnum.freq(); i++) { - final int position = docsEnum.nextPosition(); - AnalyzingCompletionLookupProvider.this.parsePayload(docsEnum.getPayload(), spare); - builder.addSurface(spare.surfaceForm.get(), spare.payload.get(), spare.weight); - // multi fields have the same surface form so we sum up here - maxAnalyzedPathsForOneInput = Math.max(maxAnalyzedPathsForOneInput, position + 1); - } - docFreq++; - docCount = Math.max(docCount, docsEnum.docID()+1); - } - builder.finishTerm(docFreq); - } - /* - * Here we are done processing the field and we can - * buid the FST and write it to disk. - */ - FST> build = builder.build(); - assert build != null || docCount == 0: "the FST is null but docCount is != 0 actual value: [" + docCount + "]"; - /* - * it's possible that the FST is null if we have 2 segments that get merged - * and all docs that have a value in this field are deleted. This will cause - * a consumer to be created but it doesn't consume any values causing the FSTBuilder - * to return null. - */ - if (build != null) { - fieldOffsets.put(field, output.getFilePointer()); - build.save(output); - /* write some more meta-info */ - output.writeVInt(maxAnalyzedPathsForOneInput); - output.writeVInt(maxSurfaceFormsPerAnalyzedForm); - output.writeInt(maxGraphExpansions); // can be negative - int options = 0; - options |= preserveSep ? SERIALIZE_PRESERVE_SEPARATORS : 0; - options |= hasPayloads ? SERIALIZE_HAS_PAYLOADS : 0; - options |= preservePositionIncrements ? SERIALIZE_PRESERVE_POSITION_INCREMENTS : 0; - output.writeVInt(options); - output.writeVInt(XAnalyzingSuggester.SEP_LABEL); - output.writeVInt(XAnalyzingSuggester.END_BYTE); - output.writeVInt(XAnalyzingSuggester.PAYLOAD_SEP); - output.writeVInt(XAnalyzingSuggester.HOLE_CHARACTER); - } - } - } - }; - } - - - @Override - public LookupFactory load(IndexInput input) throws IOException { - long sizeInBytes = 0; - int version = CodecUtil.checkHeader(input, CODEC_NAME, CODEC_VERSION_START, CODEC_VERSION_LATEST); - if (version >= CODEC_VERSION_CHECKSUMS) { - CodecUtil.checksumEntireFile(input); - } - final long metaPointerPosition = input.length() - (version >= CODEC_VERSION_CHECKSUMS? 8 + CodecUtil.footerLength() : 8); - final Map lookupMap = new HashMap<>(); - input.seek(metaPointerPosition); - long metaPointer = input.readLong(); - input.seek(metaPointer); - int numFields = input.readVInt(); - - Map meta = new TreeMap<>(); - for (int i = 0; i < numFields; i++) { - String name = input.readString(); - long offset = input.readVLong(); - meta.put(offset, name); - } - - for (Map.Entry entry : meta.entrySet()) { - input.seek(entry.getKey()); - FST> fst = new FST<>(input, new PairOutputs<>( - PositiveIntOutputs.getSingleton(), ByteSequenceOutputs.getSingleton())); - int maxAnalyzedPathsForOneInput = input.readVInt(); - int maxSurfaceFormsPerAnalyzedForm = input.readVInt(); - int maxGraphExpansions = input.readInt(); - int options = input.readVInt(); - boolean preserveSep = (options & SERIALIZE_PRESERVE_SEPARATORS) != 0; - boolean hasPayloads = (options & SERIALIZE_HAS_PAYLOADS) != 0; - boolean preservePositionIncrements = (options & SERIALIZE_PRESERVE_POSITION_INCREMENTS) != 0; - - // first version did not include these three fields, so fall back to old default (before the analyzingsuggester - // was updated in Lucene, so we cannot use the suggester defaults) - int sepLabel, payloadSep, endByte, holeCharacter; - switch (version) { - case CODEC_VERSION_START: - sepLabel = 0xFF; - payloadSep = '\u001f'; - endByte = 0x0; - holeCharacter = '\u001E'; - break; - default: - sepLabel = input.readVInt(); - endByte = input.readVInt(); - payloadSep = input.readVInt(); - holeCharacter = input.readVInt(); - } - - AnalyzingSuggestHolder holder = new AnalyzingSuggestHolder(preserveSep, preservePositionIncrements, - maxSurfaceFormsPerAnalyzedForm, maxGraphExpansions, hasPayloads, maxAnalyzedPathsForOneInput, - fst, sepLabel, payloadSep, endByte, holeCharacter); - sizeInBytes += fst.ramBytesUsed(); - lookupMap.put(entry.getValue(), holder); - } - final long ramBytesUsed = sizeInBytes; - return new LookupFactory() { - @Override - public Lookup getLookup(CompletionFieldMapper2x.CompletionFieldType fieldType, CompletionSuggestionContext suggestionContext) { - AnalyzingSuggestHolder analyzingSuggestHolder = lookupMap.get(fieldType.name()); - if (analyzingSuggestHolder == null) { - return null; - } - int flags = analyzingSuggestHolder.getPreserveSeparator() ? XAnalyzingSuggester.PRESERVE_SEP : 0; - - final XAnalyzingSuggester suggester; - final Automaton queryPrefix = fieldType.requiresContext() ? - ContextQuery.toAutomaton(analyzingSuggestHolder.getPreserveSeparator(), suggestionContext.getContextQueries()) : null; - - final FuzzyOptions fuzzyOptions = suggestionContext.getFuzzyOptions(); - if (fuzzyOptions != null) { - suggester = new XFuzzySuggester(fieldType.indexAnalyzer(), queryPrefix, fieldType.searchAnalyzer(), flags, - analyzingSuggestHolder.maxSurfaceFormsPerAnalyzedForm, analyzingSuggestHolder.maxGraphExpansions, - fuzzyOptions.getEditDistance(), fuzzyOptions.isTranspositions(), - fuzzyOptions.getFuzzyPrefixLength(), fuzzyOptions.getFuzzyMinLength(), fuzzyOptions.isUnicodeAware(), - analyzingSuggestHolder.fst, analyzingSuggestHolder.hasPayloads, - analyzingSuggestHolder.maxAnalyzedPathsForOneInput, analyzingSuggestHolder.sepLabel, - analyzingSuggestHolder.payloadSep, analyzingSuggestHolder.endByte, - analyzingSuggestHolder.holeCharacter); - } else { - suggester = new XAnalyzingSuggester(fieldType.indexAnalyzer(), queryPrefix, fieldType.searchAnalyzer(), flags, - analyzingSuggestHolder.maxSurfaceFormsPerAnalyzedForm, analyzingSuggestHolder.maxGraphExpansions, - analyzingSuggestHolder.preservePositionIncrements, analyzingSuggestHolder.fst, analyzingSuggestHolder.hasPayloads, - analyzingSuggestHolder.maxAnalyzedPathsForOneInput, analyzingSuggestHolder.sepLabel, - analyzingSuggestHolder.payloadSep, analyzingSuggestHolder.endByte, analyzingSuggestHolder.holeCharacter); - } - return suggester; - } - - @Override - public CompletionStats stats(String... fields) { - long sizeInBytes = 0; - ObjectLongHashMap completionFields = null; - if (fields != null && fields.length > 0) { - completionFields = new ObjectLongHashMap<>(fields.length); - } - - for (Map.Entry entry : lookupMap.entrySet()) { - sizeInBytes += entry.getValue().fst.ramBytesUsed(); - if (fields == null || fields.length == 0) { - continue; - } - if (Regex.simpleMatch(fields, entry.getKey())) { - long fstSize = entry.getValue().fst.ramBytesUsed(); - completionFields.addTo(entry.getKey(), fstSize); - } - } - - return new CompletionStats(sizeInBytes, completionFields); - } - - @Override - AnalyzingSuggestHolder getAnalyzingSuggestHolder(MappedFieldType fieldType) { - return lookupMap.get(fieldType.name()); - } - - @Override - public long ramBytesUsed() { - return ramBytesUsed; - } - - @Override - public Collection getChildResources() { - return Accountables.namedAccountables("field", lookupMap); - } - }; - } - - static class AnalyzingSuggestHolder implements Accountable { - final boolean preserveSep; - final boolean preservePositionIncrements; - final int maxSurfaceFormsPerAnalyzedForm; - final int maxGraphExpansions; - final boolean hasPayloads; - final int maxAnalyzedPathsForOneInput; - final FST> fst; - final int sepLabel; - final int payloadSep; - final int endByte; - final int holeCharacter; - - public AnalyzingSuggestHolder(boolean preserveSep, boolean preservePositionIncrements, - int maxSurfaceFormsPerAnalyzedForm, int maxGraphExpansions, - boolean hasPayloads, int maxAnalyzedPathsForOneInput, - FST> fst, int sepLabel, int payloadSep, - int endByte, int holeCharacter) { - this.preserveSep = preserveSep; - this.preservePositionIncrements = preservePositionIncrements; - this.maxSurfaceFormsPerAnalyzedForm = maxSurfaceFormsPerAnalyzedForm; - this.maxGraphExpansions = maxGraphExpansions; - this.hasPayloads = hasPayloads; - this.maxAnalyzedPathsForOneInput = maxAnalyzedPathsForOneInput; - this.fst = fst; - this.sepLabel = sepLabel; - this.payloadSep = payloadSep; - this.endByte = endByte; - this.holeCharacter = holeCharacter; - } - - public boolean getPreserveSeparator() { - return preserveSep; - } - - public boolean getPreservePositionIncrements() { - return preservePositionIncrements; - } - - public boolean hasPayloads() { - return hasPayloads; - } - - @Override - public long ramBytesUsed() { - if (fst != null) { - return fst.ramBytesUsed(); - } else { - return 0; - } - } - - @Override - public Collection getChildResources() { - if (fst != null) { - return Collections.singleton(Accountables.namedAccountable("fst", fst)); - } else { - return Collections.emptyList(); - } - } - } - - @Override - public Set toFiniteStrings(TokenStream stream) throws IOException { - return prototype.toFiniteStrings(stream); - } - - -} diff --git a/core/src/main/java/org/elasticsearch/search/suggest/completion2x/Completion090PostingsFormat.java b/core/src/main/java/org/elasticsearch/search/suggest/completion2x/Completion090PostingsFormat.java deleted file mode 100644 index 245f2416b40cb..0000000000000 --- a/core/src/main/java/org/elasticsearch/search/suggest/completion2x/Completion090PostingsFormat.java +++ /dev/null @@ -1,360 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.elasticsearch.search.suggest.completion2x; - -import org.apache.logging.log4j.Logger; -import org.apache.lucene.codecs.CodecUtil; -import org.apache.lucene.codecs.FieldsConsumer; -import org.apache.lucene.codecs.FieldsProducer; -import org.apache.lucene.codecs.PostingsFormat; -import org.apache.lucene.index.Fields; -import org.apache.lucene.index.FilterLeafReader.FilterTerms; -import org.apache.lucene.index.IndexFileNames; -import org.apache.lucene.index.IndexReader; -import org.apache.lucene.index.LeafReader; -import org.apache.lucene.index.LeafReaderContext; -import org.apache.lucene.index.SegmentReadState; -import org.apache.lucene.index.SegmentWriteState; -import org.apache.lucene.index.Terms; -import org.apache.lucene.search.suggest.Lookup; -import org.apache.lucene.store.IOContext.Context; -import org.apache.lucene.store.IndexInput; -import org.apache.lucene.store.IndexOutput; -import org.apache.lucene.store.InputStreamDataInput; -import org.apache.lucene.store.OutputStreamDataOutput; -import org.apache.lucene.util.Accountable; -import org.apache.lucene.util.Accountables; -import org.apache.lucene.util.BytesRef; -import org.apache.lucene.util.IOUtils; -import org.apache.lucene.util.Version; -import org.elasticsearch.common.logging.Loggers; -import org.elasticsearch.index.mapper.CompletionFieldMapper2x; -import org.elasticsearch.index.mapper.MappedFieldType; -import org.elasticsearch.search.suggest.completion.CompletionStats; -import org.elasticsearch.search.suggest.completion.CompletionSuggestionContext; - -import java.io.ByteArrayInputStream; -import java.io.ByteArrayOutputStream; -import java.io.IOException; -import java.util.ArrayList; -import java.util.Collection; -import java.util.Collections; -import java.util.Iterator; -import java.util.List; -import java.util.Map; - -/** - * This {@link PostingsFormat} is basically a T-Sink for a default postings - * format that is used to store postings on disk fitting the lucene APIs and - * builds a suggest FST as an auxiliary data structure next to the actual - * postings format. It uses the delegate postings format for simplicity to - * handle all the merge operations. The auxiliary suggest FST data structure is - * only loaded if a FieldsProducer is requested for reading, for merging it uses - * the low memory delegate postings format. - */ -public class Completion090PostingsFormat extends PostingsFormat { - - public static final String CODEC_NAME = "completion090"; - public static final int SUGGEST_CODEC_VERSION = 1; - public static final int SUGGEST_VERSION_CURRENT = SUGGEST_CODEC_VERSION; - public static final String EXTENSION = "cmp"; - - private static final Logger logger = Loggers.getLogger(Completion090PostingsFormat.class); - private PostingsFormat delegatePostingsFormat; - private static final Map providers; - private CompletionLookupProvider writeProvider; - - - static { - final CompletionLookupProvider provider = new AnalyzingCompletionLookupProvider(true, true, false); - providers = Collections.singletonMap(provider.getName(), provider); - } - - public Completion090PostingsFormat(PostingsFormat delegatePostingsFormat, CompletionLookupProvider provider) { - super(CODEC_NAME); - this.delegatePostingsFormat = delegatePostingsFormat; - this.writeProvider = provider; - assert delegatePostingsFormat != null && writeProvider != null; - } - - /* - * Used only by core Lucene at read-time via Service Provider instantiation - * do not use at Write-time in application code. - */ - public Completion090PostingsFormat() { - super(CODEC_NAME); - } - - @Override - public CompletionFieldsConsumer fieldsConsumer(SegmentWriteState state) throws IOException { - if (delegatePostingsFormat == null) { - throw new UnsupportedOperationException("Error - " + getClass().getName() - + " has been constructed without a choice of PostingsFormat"); - } - assert writeProvider != null; - return new CompletionFieldsConsumer(state); - } - - @Override - public CompletionFieldsProducer fieldsProducer(SegmentReadState state) throws IOException { - return new CompletionFieldsProducer(state); - } - - private class CompletionFieldsConsumer extends FieldsConsumer { - - private FieldsConsumer delegatesFieldsConsumer; - private FieldsConsumer suggestFieldsConsumer; - - public CompletionFieldsConsumer(SegmentWriteState state) throws IOException { - this.delegatesFieldsConsumer = delegatePostingsFormat.fieldsConsumer(state); - String suggestFSTFile = IndexFileNames.segmentFileName(state.segmentInfo.name, state.segmentSuffix, EXTENSION); - IndexOutput output = null; - boolean success = false; - try { - output = state.directory.createOutput(suggestFSTFile, state.context); - CodecUtil.writeIndexHeader(output, CODEC_NAME, SUGGEST_VERSION_CURRENT, state.segmentInfo.getId(), state.segmentSuffix); - /* - * we write the delegate postings format name so we can load it - * without getting an instance in the ctor - */ - output.writeString(delegatePostingsFormat.getName()); - output.writeString(writeProvider.getName()); - this.suggestFieldsConsumer = writeProvider.consumer(output); - success = true; - } finally { - if (!success) { - IOUtils.closeWhileHandlingException(output); - } - } - } - - @Override - public void write(Fields fields) throws IOException { - delegatesFieldsConsumer.write(fields); - suggestFieldsConsumer.write(fields); - } - - @Override - public void close() throws IOException { - IOUtils.close(delegatesFieldsConsumer, suggestFieldsConsumer); - } - } - - private static class CompletionFieldsProducer extends FieldsProducer { - // TODO make this class lazyload all the things in order to take advantage of the new merge instance API - // today we just load everything up-front - private final FieldsProducer delegateProducer; - private final LookupFactory lookupFactory; - private final int version; - - public CompletionFieldsProducer(SegmentReadState state) throws IOException { - String suggestFSTFile = IndexFileNames.segmentFileName(state.segmentInfo.name, state.segmentSuffix, EXTENSION); - IndexInput input = state.directory.openInput(suggestFSTFile, state.context); - if (state.segmentInfo.getVersion().onOrAfter(Version.LUCENE_6_2_0)) { - // Lucene 6.2.0+ requires all index files to use index header, but prior to that we used an ordinary codec header: - version = CodecUtil.checkIndexHeader(input, CODEC_NAME, SUGGEST_CODEC_VERSION, SUGGEST_VERSION_CURRENT, - state.segmentInfo.getId(), state.segmentSuffix); - } else { - version = CodecUtil.checkHeader(input, CODEC_NAME, SUGGEST_CODEC_VERSION, SUGGEST_VERSION_CURRENT); - } - FieldsProducer delegateProducer = null; - boolean success = false; - try { - PostingsFormat delegatePostingsFormat = PostingsFormat.forName(input.readString()); - String providerName = input.readString(); - CompletionLookupProvider completionLookupProvider = providers.get(providerName); - if (completionLookupProvider == null) { - throw new IllegalStateException("no provider with name [" + providerName + "] registered"); - } - // TODO: we could clone the ReadState and make it always forward IOContext.MERGE to prevent unecessary heap usage? - delegateProducer = delegatePostingsFormat.fieldsProducer(state); - /* - * If we are merging we don't load the FSTs at all such that we - * don't consume so much memory during merge - */ - if (state.context.context != Context.MERGE) { - // TODO: maybe we can do this in a fully lazy fashion based on some configuration - // eventually we should have some kind of curciut breaker that prevents us from going OOM here - // with some configuration - this.lookupFactory = completionLookupProvider.load(input); - } else { - this.lookupFactory = null; - } - this.delegateProducer = delegateProducer; - success = true; - } finally { - if (!success) { - IOUtils.closeWhileHandlingException(delegateProducer, input); - } else { - IOUtils.close(input); - } - } - } - - @Override - public void close() throws IOException { - IOUtils.close(delegateProducer); - } - - @Override - public Iterator iterator() { - return delegateProducer.iterator(); - } - - @Override - public Terms terms(String field) throws IOException { - final Terms terms = delegateProducer.terms(field); - if (terms == null || lookupFactory == null) { - return terms; - } - return new CompletionTerms(terms, lookupFactory); - } - - @Override - public int size() { - return delegateProducer.size(); - } - - @Override - public long ramBytesUsed() { - return (lookupFactory == null ? 0 : lookupFactory.ramBytesUsed()) + delegateProducer.ramBytesUsed(); - } - - @Override - public Collection getChildResources() { - List resources = new ArrayList<>(); - if (lookupFactory != null) { - resources.add(Accountables.namedAccountable("lookup", lookupFactory)); - } - resources.add(Accountables.namedAccountable("delegate", delegateProducer)); - return Collections.unmodifiableList(resources); - } - - @Override - public void checkIntegrity() throws IOException { - delegateProducer.checkIntegrity(); - } - - @Override - public FieldsProducer getMergeInstance() throws IOException { - return delegateProducer.getMergeInstance(); - } - } - - public static final class CompletionTerms extends FilterTerms { - private final LookupFactory lookup; - - public CompletionTerms(Terms delegate, LookupFactory lookup) { - super(delegate); - this.lookup = lookup; - } - - public Lookup getLookup(CompletionFieldMapper2x.CompletionFieldType mapper, CompletionSuggestionContext suggestionContext) { - return lookup.getLookup(mapper, suggestionContext); - } - - public CompletionStats stats(String ... fields) { - return lookup.stats(fields); - } - } - - public abstract static class CompletionLookupProvider implements PayloadProcessor, CompletionTokenStream.ToFiniteStrings { - - public static final char UNIT_SEPARATOR = '\u001f'; - - public abstract FieldsConsumer consumer(IndexOutput output) throws IOException; - - public abstract String getName(); - - public abstract LookupFactory load(IndexInput input) throws IOException; - - @Override - public BytesRef buildPayload(BytesRef surfaceForm, long weight, BytesRef payload) throws IOException { - if (weight < -1 || weight > Integer.MAX_VALUE) { - throw new IllegalArgumentException("weight must be >= -1 && <= Integer.MAX_VALUE"); - } - for (int i = 0; i < surfaceForm.length; i++) { - if (surfaceForm.bytes[i] == UNIT_SEPARATOR) { - throw new IllegalArgumentException( - "surface form cannot contain unit separator character U+001F; this character is reserved"); - } - } - ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream(); - OutputStreamDataOutput output = new OutputStreamDataOutput(byteArrayOutputStream); - output.writeVLong(weight + 1); - output.writeVInt(surfaceForm.length); - output.writeBytes(surfaceForm.bytes, surfaceForm.offset, surfaceForm.length); - output.writeVInt(payload.length); - output.writeBytes(payload.bytes, 0, payload.length); - - output.close(); - return new BytesRef(byteArrayOutputStream.toByteArray()); - } - - @Override - public void parsePayload(BytesRef payload, SuggestPayload ref) throws IOException { - ByteArrayInputStream byteArrayInputStream = new ByteArrayInputStream(payload.bytes, payload.offset, payload.length); - InputStreamDataInput input = new InputStreamDataInput(byteArrayInputStream); - ref.weight = input.readVLong() - 1; - int len = input.readVInt(); - ref.surfaceForm.grow(len); - ref.surfaceForm.setLength(len); - input.readBytes(ref.surfaceForm.bytes(), 0, ref.surfaceForm.length()); - len = input.readVInt(); - ref.payload.grow(len); - ref.payload.setLength(len); - input.readBytes(ref.payload.bytes(), 0, ref.payload.length()); - input.close(); - } - } - - /** - * Returns total in-heap bytes used by all suggesters. This method has CPU cost O(numIndexedFields). - * - * @param fieldNamePatterns if non-null, any completion field name matching any of these patterns will break out its in-heap bytes - * separately in the returned {@link CompletionStats} - */ - public CompletionStats completionStats(IndexReader indexReader, String ... fieldNamePatterns) { - CompletionStats completionStats = new CompletionStats(); - for (LeafReaderContext atomicReaderContext : indexReader.leaves()) { - LeafReader atomicReader = atomicReaderContext.reader(); - try { - Fields fields = atomicReader.fields(); - for (String fieldName : fields) { - Terms terms = fields.terms(fieldName); - if (terms instanceof CompletionTerms) { - CompletionTerms completionTerms = (CompletionTerms) terms; - completionStats.add(completionTerms.stats(fieldNamePatterns)); - } - } - } catch (IOException ioe) { - logger.error("Could not get completion stats", ioe); - } - } - - return completionStats; - } - - public abstract static class LookupFactory implements Accountable { - public abstract Lookup getLookup(CompletionFieldMapper2x.CompletionFieldType fieldType, - CompletionSuggestionContext suggestionContext); - public abstract CompletionStats stats(String ... fields); - abstract AnalyzingCompletionLookupProvider.AnalyzingSuggestHolder getAnalyzingSuggestHolder(MappedFieldType fieldType); - } -} diff --git a/core/src/main/java/org/elasticsearch/search/suggest/completion2x/CompletionSuggestion.java b/core/src/main/java/org/elasticsearch/search/suggest/completion2x/CompletionSuggestion.java deleted file mode 100644 index 50518ee0eff9f..0000000000000 --- a/core/src/main/java/org/elasticsearch/search/suggest/completion2x/CompletionSuggestion.java +++ /dev/null @@ -1,141 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.elasticsearch.search.suggest.completion2x; - -import org.elasticsearch.common.bytes.BytesReference; -import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.text.Text; -import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.common.xcontent.XContentFactory; -import org.elasticsearch.common.xcontent.XContentHelper; -import org.elasticsearch.common.xcontent.XContentType; -import org.elasticsearch.search.suggest.Suggest; - -import java.io.IOException; -import java.util.Map; - -public class CompletionSuggestion extends Suggest.Suggestion { - - public static final int TYPE = 2; - - public CompletionSuggestion() { - } - - public CompletionSuggestion(String name, int size) { - super(name, size); - } - - @Override - public int getType() { - return TYPE; - } - - @Override - protected Entry newEntry() { - return new Entry(); - } - - public static class Entry extends org.elasticsearch.search.suggest.Suggest.Suggestion.Entry { - - public Entry(Text text, int offset, int length) { - super(text, offset, length); - } - - protected Entry() { - super(); - } - - @Override - protected Option newOption() { - return new Option(); - } - - public static class Option extends org.elasticsearch.search.suggest.Suggest.Suggestion.Entry.Option { - private BytesReference payload; - - public Option(Text text, float score, BytesReference payload) { - super(text, score); - this.payload = payload; - } - - - protected Option() { - super(); - } - - public void setPayload(BytesReference payload) { - this.payload = payload; - } - - public BytesReference getPayload() { - return payload; - } - - public String getPayloadAsString() { - return payload.utf8ToString(); - } - - public long getPayloadAsLong() { - return Long.parseLong(payload.utf8ToString()); - } - - public double getPayloadAsDouble() { - return Double.parseDouble(payload.utf8ToString()); - } - - public Map getPayloadAsMap() { - return XContentHelper.convertToMap(payload, false).v2(); - } - - @Override - public void setScore(float score) { - super.setScore(score); - } - - @Override - protected XContentBuilder innerToXContent(XContentBuilder builder, Params params) throws IOException { - super.innerToXContent(builder, params); - if (payload != null && payload.length() > 0) { - XContentType contentType = XContentFactory.xContentType(payload); - if (contentType == null) { - // must be a string or number - builder.field("payload", payload.utf8ToString()); - } else { - builder.rawField("payload", payload); - } - } - return builder; - } - - @Override - public void readFrom(StreamInput in) throws IOException { - super.readFrom(in); - payload = in.readBytesReference(); - } - - @Override - public void writeTo(StreamOutput out) throws IOException { - super.writeTo(out); - out.writeBytesReference(payload); - } - } - } - -} diff --git a/core/src/main/java/org/elasticsearch/search/suggest/completion2x/CompletionTokenStream.java b/core/src/main/java/org/elasticsearch/search/suggest/completion2x/CompletionTokenStream.java deleted file mode 100644 index de81caa7e5d96..0000000000000 --- a/core/src/main/java/org/elasticsearch/search/suggest/completion2x/CompletionTokenStream.java +++ /dev/null @@ -1,173 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.elasticsearch.search.suggest.completion2x; - -import org.apache.lucene.analysis.TokenStream; -import org.apache.lucene.analysis.tokenattributes.CharTermAttribute; -import org.apache.lucene.analysis.tokenattributes.PayloadAttribute; -import org.apache.lucene.analysis.tokenattributes.PositionIncrementAttribute; -import org.apache.lucene.analysis.tokenattributes.TermToBytesRefAttribute; -import org.apache.lucene.util.AttributeImpl; -import org.apache.lucene.util.AttributeReflector; -import org.apache.lucene.util.BytesRef; -import org.apache.lucene.util.BytesRefBuilder; -import org.apache.lucene.util.CharsRefBuilder; -import org.apache.lucene.util.IntsRef; -import org.apache.lucene.util.fst.Util; - -import java.io.IOException; -import java.util.Iterator; -import java.util.Set; - -public final class CompletionTokenStream extends TokenStream { - - private final PayloadAttribute payloadAttr = addAttribute(PayloadAttribute.class); - private final PositionIncrementAttribute posAttr = addAttribute(PositionIncrementAttribute.class); - private final ByteTermAttribute bytesAtt = addAttribute(ByteTermAttribute.class);; - - - private final TokenStream input; - private BytesRef payload; - private Iterator finiteStrings; - private ToFiniteStrings toFiniteStrings; - private int posInc = -1; - private static final int MAX_PATHS = 256; - private CharTermAttribute charTermAttribute; - - public CompletionTokenStream(TokenStream input, BytesRef payload, ToFiniteStrings toFiniteStrings) { - // Don't call the super(input) ctor - this is a true delegate and has a new attribute source since we consume - // the input stream entirely in toFiniteStrings(input) - this.input = input; - this.payload = payload; - this.toFiniteStrings = toFiniteStrings; - } - - @Override - public boolean incrementToken() throws IOException { - clearAttributes(); - if (finiteStrings == null) { - Set strings = toFiniteStrings.toFiniteStrings(input); - - if (strings.size() > MAX_PATHS) { - throw new IllegalArgumentException("TokenStream expanded to " + strings.size() + " finite strings. Only <= " + MAX_PATHS - + " finite strings are supported"); - } - posInc = strings.size(); - finiteStrings = strings.iterator(); - } - if (finiteStrings.hasNext()) { - posAttr.setPositionIncrement(posInc); - /* - * this posInc encodes the number of paths that this surface form - * produced. Multi Fields have the same surface form and therefore sum up - */ - posInc = 0; - Util.toBytesRef(finiteStrings.next(), bytesAtt.builder()); // now we have UTF-8 - if (charTermAttribute != null) { - charTermAttribute.setLength(0); - charTermAttribute.append(bytesAtt.toUTF16()); - } - if (payload != null) { - payloadAttr.setPayload(this.payload); - } - return true; - } - - return false; - } - - @Override - public void end() throws IOException { - super.end(); - if (posInc == -1) { - input.end(); - } - } - - @Override - public void close() throws IOException { - input.close(); - } - - public interface ToFiniteStrings { - Set toFiniteStrings(TokenStream stream) throws IOException; - } - - @Override - public void reset() throws IOException { - super.reset(); - if (hasAttribute(CharTermAttribute.class)) { - // we only create this if we really need it to safe the UTF-8 to UTF-16 conversion - charTermAttribute = getAttribute(CharTermAttribute.class); - } - finiteStrings = null; - posInc = -1; - } - - public interface ByteTermAttribute extends TermToBytesRefAttribute { - // marker interface - - /** - * Return the builder from which the term is derived. - */ - BytesRefBuilder builder(); - - CharSequence toUTF16(); - } - - public static final class ByteTermAttributeImpl extends AttributeImpl implements ByteTermAttribute, TermToBytesRefAttribute { - private final BytesRefBuilder bytes = new BytesRefBuilder(); - private CharsRefBuilder charsRef; - - @Override - public BytesRefBuilder builder() { - return bytes; - } - - @Override - public BytesRef getBytesRef() { - return bytes.get(); - } - - @Override - public void clear() { - bytes.clear(); - } - - @Override - public void reflectWith(AttributeReflector reflector) { - - } - - @Override - public void copyTo(AttributeImpl target) { - ByteTermAttributeImpl other = (ByteTermAttributeImpl) target; - other.bytes.copyBytes(bytes); - } - - @Override - public CharSequence toUTF16() { - if (charsRef == null) { - charsRef = new CharsRefBuilder(); - } - charsRef.copyUTF8Bytes(getBytesRef()); - return charsRef.get(); - } - } -} diff --git a/core/src/main/java/org/elasticsearch/search/suggest/completion2x/PayloadProcessor.java b/core/src/main/java/org/elasticsearch/search/suggest/completion2x/PayloadProcessor.java deleted file mode 100644 index eb857ce61e6c7..0000000000000 --- a/core/src/main/java/org/elasticsearch/search/suggest/completion2x/PayloadProcessor.java +++ /dev/null @@ -1,38 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.search.suggest.completion2x; - -import org.apache.lucene.util.BytesRef; -import org.apache.lucene.util.BytesRefBuilder; - -import java.io.IOException; - -interface PayloadProcessor { - - BytesRef buildPayload(BytesRef surfaceForm, long weight, BytesRef payload) throws IOException; - - void parsePayload(BytesRef payload, SuggestPayload ref) throws IOException; - - static class SuggestPayload { - final BytesRefBuilder payload = new BytesRefBuilder(); - long weight = 0; - final BytesRefBuilder surfaceForm = new BytesRefBuilder(); - } -} diff --git a/core/src/main/java/org/elasticsearch/search/suggest/completion2x/context/CategoryContextMapping.java b/core/src/main/java/org/elasticsearch/search/suggest/completion2x/context/CategoryContextMapping.java deleted file mode 100644 index 775d8b031ad64..0000000000000 --- a/core/src/main/java/org/elasticsearch/search/suggest/completion2x/context/CategoryContextMapping.java +++ /dev/null @@ -1,374 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.search.suggest.completion2x.context; - -import org.apache.lucene.analysis.PrefixAnalyzer; -import org.apache.lucene.analysis.TokenStream; -import org.apache.lucene.index.IndexableField; -import org.apache.lucene.util.automaton.Automata; -import org.apache.lucene.util.automaton.Automaton; -import org.apache.lucene.util.automaton.Operations; -import org.elasticsearch.ElasticsearchParseException; -import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.common.xcontent.XContentParser.Token; -import org.elasticsearch.index.mapper.ParseContext; -import org.elasticsearch.index.mapper.ParseContext.Document; - -import java.io.IOException; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Collections; -import java.util.Iterator; -import java.util.List; -import java.util.Map; -import java.util.Objects; - -/** - * The {@link CategoryContextMapping} is used to define a {@link ContextMapping} that - * references a field within a document. The value of the field in turn will be - * used to setup the suggestions made by the completion suggester. - */ -public class CategoryContextMapping extends ContextMapping { - - protected static final String TYPE = "category"; - - private static final String FIELD_FIELDNAME = "path"; - private static final String DEFAULT_FIELDNAME = "_type"; - - private static final Iterable EMPTY_VALUES = Collections.emptyList(); - - private final String fieldName; - private final Iterable defaultValues; - private final FieldConfig defaultConfig; - - /** - * Create a new {@link CategoryContextMapping} with the default field - * [_type] - */ - public CategoryContextMapping(String name) { - this(name, DEFAULT_FIELDNAME, EMPTY_VALUES); - } - - /** - * Create a new {@link CategoryContextMapping} with the default field - * [_type] - */ - public CategoryContextMapping(String name, String fieldName) { - this(name, fieldName, EMPTY_VALUES); - } - - /** - * Create a new {@link CategoryContextMapping} with the default field - * [_type] - */ - public CategoryContextMapping(String name, Iterable defaultValues) { - this(name, DEFAULT_FIELDNAME, defaultValues); - } - - /** - * Create a new {@link CategoryContextMapping} with the default field - * [_type] - */ - public CategoryContextMapping(String name, String fieldName, Iterable defaultValues) { - super(TYPE, name); - this.fieldName = fieldName; - this.defaultValues = defaultValues; - this.defaultConfig = new FieldConfig(fieldName, defaultValues, null); - } - - /** - * Name of the field used by this {@link CategoryContextMapping} - */ - public String getFieldName() { - return fieldName; - } - - public Iterable getDefaultValues() { - return defaultValues; - } - - @Override - public FieldConfig defaultConfig() { - return defaultConfig; - } - - /** - * Load the specification of a {@link CategoryContextMapping} - * - * @param name - * name of the field to use. If null default field - * will be used - * @return new {@link CategoryContextMapping} - */ - protected static CategoryContextMapping load(String name, Map config) throws ElasticsearchParseException { - CategoryContextMapping.Builder mapping = new CategoryContextMapping.Builder(name); - - Object fieldName = config.get(FIELD_FIELDNAME); - Object defaultValues = config.get(FIELD_MISSING); - - if (fieldName != null) { - mapping.fieldName(fieldName.toString()); - config.remove(FIELD_FIELDNAME); - } - - if (defaultValues != null) { - if (defaultValues instanceof Iterable) { - for (Object value : (Iterable) defaultValues) { - mapping.addDefaultValue(value.toString()); - } - } else { - mapping.addDefaultValue(defaultValues.toString()); - } - config.remove(FIELD_MISSING); - } - - return mapping.build(); - } - - @Override - protected XContentBuilder toInnerXContent(XContentBuilder builder, Params params) throws IOException { - if (fieldName != null) { - builder.field(FIELD_FIELDNAME, fieldName); - } - builder.startArray(FIELD_MISSING); - for (CharSequence value : defaultValues) { - builder.value(value); - } - builder.endArray(); - return builder; - } - - @Override - public ContextConfig parseContext(ParseContext parseContext, XContentParser parser) throws IOException, ElasticsearchParseException { - Token token = parser.currentToken(); - if (token == Token.VALUE_NULL) { - return new FieldConfig(fieldName, defaultValues, null); - } else if (token == Token.VALUE_STRING) { - return new FieldConfig(fieldName, null, Collections.singleton(parser.text())); - } else if (token == Token.VALUE_NUMBER) { - return new FieldConfig(fieldName, null, Collections.singleton(parser.text())); - } else if (token == Token.VALUE_BOOLEAN) { - return new FieldConfig(fieldName, null, Collections.singleton(parser.text())); - } else if (token == Token.START_ARRAY) { - ArrayList values = new ArrayList<>(); - while((token = parser.nextToken()) != Token.END_ARRAY) { - values.add(parser.text()); - } - if(values.isEmpty()) { - throw new ElasticsearchParseException("FieldConfig must contain a least one category"); - } - return new FieldConfig(fieldName, null, values); - } else { - throw new ElasticsearchParseException("FieldConfig must be either [null], a string or a list of strings"); - } - } - - @Override - public FieldQuery parseQuery(String name, XContentParser parser) throws IOException, ElasticsearchParseException { - Iterable values; - Token token = parser.currentToken(); - if (token == Token.START_ARRAY) { - ArrayList list = new ArrayList<>(); - while ((token = parser.nextToken()) != Token.END_ARRAY) { - list.add(parser.text()); - } - values = list; - } else if (token == Token.VALUE_NULL) { - values = defaultValues; - } else { - values = Collections.singleton(parser.text()); - } - - return new FieldQuery(name, values); - } - - public static FieldQuery query(String name, CharSequence... fieldvalues) { - return query(name, Arrays.asList(fieldvalues)); - } - - public static FieldQuery query(String name, Iterable fieldvalues) { - return new FieldQuery(name, fieldvalues); - } - - @Override - public boolean equals(Object obj) { - if (obj instanceof CategoryContextMapping) { - CategoryContextMapping other = (CategoryContextMapping) obj; - if (this.fieldName.equals(other.fieldName)) { - return Objects.deepEquals(this.defaultValues, other.defaultValues); - } - } - return false; - } - - @Override - public int hashCode() { - int hashCode = fieldName.hashCode(); - for (CharSequence seq : defaultValues) { - hashCode = 31 * hashCode + seq.hashCode(); - } - return hashCode; - } - - private static class FieldConfig extends ContextConfig { - - private final String fieldname; - private final Iterable defaultValues; - private final Iterable values; - - public FieldConfig(String fieldname, Iterable defaultValues, Iterable values) { - this.fieldname = fieldname; - this.defaultValues = defaultValues; - this.values = values; - } - - @Override - protected TokenStream wrapTokenStream(Document doc, TokenStream stream) { - if (values != null) { - return new PrefixAnalyzer.PrefixTokenFilter(stream, ContextMapping.SEPARATOR, values); - // if fieldname is default, BUT our default values are set, we take that one - } else if ((doc.getFields(fieldname).length == 0 - || fieldname.equals(DEFAULT_FIELDNAME)) && defaultValues.iterator().hasNext()) { - return new PrefixAnalyzer.PrefixTokenFilter(stream, ContextMapping.SEPARATOR, defaultValues); - } else { - IndexableField[] fields = doc.getFields(fieldname); - ArrayList values = new ArrayList<>(fields.length); - for (int i = 0; i < fields.length; i++) { - values.add(fields[i].stringValue()); - } - - return new PrefixAnalyzer.PrefixTokenFilter(stream, ContextMapping.SEPARATOR, values); - } - } - - @Override - public String toString() { - StringBuilder sb = new StringBuilder("FieldConfig(" + fieldname + " = ["); - if (this.values != null && this.values.iterator().hasNext()) { - final Iterator valuesIterator = this.values.iterator(); - sb.append("("); - while (valuesIterator.hasNext()) { - sb.append(valuesIterator.next()); - if (valuesIterator.hasNext()) { - sb.append(", "); - } - } - sb.append(")"); - } - if (this.defaultValues != null && this.defaultValues.iterator().hasNext()) { - final Iterator defaultValuesIterator = this.defaultValues.iterator(); - sb.append(" default("); - while (defaultValuesIterator.hasNext()) { - sb.append(defaultValuesIterator.next()); - if (defaultValuesIterator.hasNext()) { - sb.append(", "); - } - } - sb.append(")"); - } - return sb.append("])").toString(); - } - - } - - private static class FieldQuery extends ContextQuery { - - private final Iterable values; - - public FieldQuery(String name, Iterable values) { - super(name); - this.values = values; - } - - @Override - public Automaton toAutomaton() { - List automatons = new ArrayList<>(); - for (CharSequence value : values) { - automatons.add(Automata.makeString(value.toString())); - } - return Operations.union(automatons); - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startArray(name); - for (CharSequence value : values) { - builder.value(value); - } - builder.endArray(); - return builder; - } - } - - public static class Builder extends ContextBuilder { - - private String fieldname; - private List defaultValues = new ArrayList<>(); - - public Builder(String name) { - this(name, DEFAULT_FIELDNAME); - } - - public Builder(String name, String fieldname) { - super(name); - this.fieldname = fieldname; - } - - /** - * Set the name of the field to use - */ - public Builder fieldName(String fieldname) { - this.fieldname = fieldname; - return this; - } - - /** - * Add value to the default values of the mapping - */ - public Builder addDefaultValue(String defaultValue) { - this.defaultValues.add(defaultValue); - return this; - } - - /** - * Add set of default values to the mapping - */ - public Builder addDefaultValues(String... defaultValues) { - Collections.addAll(this.defaultValues, defaultValues); - return this; - } - - /** - * Add set of default values to the mapping - */ - public Builder addDefaultValues(Iterable defaultValues) { - for (String defaultValue : defaultValues) { - this.defaultValues.add(defaultValue); - } - return this; - } - - @Override - public CategoryContextMapping build() { - return new CategoryContextMapping(name, fieldname, defaultValues); - } - } -} diff --git a/core/src/main/java/org/elasticsearch/search/suggest/completion2x/context/ContextBuilder.java b/core/src/main/java/org/elasticsearch/search/suggest/completion2x/context/ContextBuilder.java deleted file mode 100644 index 08917637f6952..0000000000000 --- a/core/src/main/java/org/elasticsearch/search/suggest/completion2x/context/ContextBuilder.java +++ /dev/null @@ -1,136 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.search.suggest.completion2x.context; - -import org.elasticsearch.ElasticsearchParseException; -import org.elasticsearch.Version; -import org.elasticsearch.index.mapper.DocumentMapperParser; -import org.elasticsearch.index.mapper.MapperParsingException; - -import java.util.Map; -import java.util.Map.Entry; -import java.util.SortedMap; -import java.util.TreeMap; - -public abstract class ContextBuilder { - - protected String name; - - public ContextBuilder(String name) { - this.name = name; - } - - public abstract E build(); - - /** - * Create a new {@link GeolocationContextMapping} - */ - public static GeolocationContextMapping.Builder location(String name) { - return new GeolocationContextMapping.Builder(name); - } - - /** - * Create a new {@link GeolocationContextMapping} with given precision and - * neighborhood usage - * - * @param precision geohash length - * @param neighbors use neighbor cells - */ - public static GeolocationContextMapping.Builder location(String name, int precision, boolean neighbors) { - return new GeolocationContextMapping.Builder(name, neighbors, precision); - } - - /** - * Create a new {@link CategoryContextMapping} - */ - public static CategoryContextMapping.Builder category(String name) { - return new CategoryContextMapping.Builder(name, null); - } - - /** - * Create a new {@link CategoryContextMapping} with default category - * - * @param defaultCategory category to use, if it is not provided - */ - public static CategoryContextMapping.Builder category(String name, String defaultCategory) { - return new CategoryContextMapping.Builder(name, null).addDefaultValue(defaultCategory); - } - - /** - * Create a new {@link CategoryContextMapping} - * - * @param fieldname - * name of the field to use - */ - public static CategoryContextMapping.Builder reference(String name, String fieldname) { - return new CategoryContextMapping.Builder(name, fieldname); - } - - /** - * Create a new {@link CategoryContextMapping} - * - * @param fieldname name of the field to use - * @param defaultValues values to use, if the document not provides - * a field with the given name - */ - public static CategoryContextMapping.Builder reference(String name, String fieldname, Iterable defaultValues) { - return new CategoryContextMapping.Builder(name, fieldname).addDefaultValues(defaultValues); - } - - public static SortedMap loadMappings(Object configuration, Version indexVersionCreated) - throws ElasticsearchParseException { - if (configuration instanceof Map) { - Map configurations = (Map)configuration; - SortedMap mappings = new TreeMap<>(); - for (Entry config : configurations.entrySet()) { - String name = config.getKey(); - mappings.put(name, loadMapping(name, (Map) config.getValue(), indexVersionCreated)); - } - return mappings; - } else if (configuration == null) { - return ContextMapping.EMPTY_MAPPING; - } else { - throw new ElasticsearchParseException("no valid context configuration"); - } - } - - protected static ContextMapping loadMapping(String name, Map config, Version indexVersionCreated) - throws ElasticsearchParseException { - final Object argType = config.get(ContextMapping.FIELD_TYPE); - - if (argType == null) { - throw new ElasticsearchParseException("missing [{}] in context mapping", ContextMapping.FIELD_TYPE); - } - - final String type = argType.toString(); - ContextMapping contextMapping; - if (GeolocationContextMapping.TYPE.equals(type)) { - contextMapping = GeolocationContextMapping.load(name, config); - } else if (CategoryContextMapping.TYPE.equals(type)) { - contextMapping = CategoryContextMapping.load(name, config); - } else { - throw new ElasticsearchParseException("unknown context type [{}]", type); - } - config.remove(ContextMapping.FIELD_TYPE); - DocumentMapperParser.checkNoRemainingFields(name, config, indexVersionCreated); - - return contextMapping; - } -} diff --git a/core/src/main/java/org/elasticsearch/search/suggest/completion2x/context/ContextMapping.java b/core/src/main/java/org/elasticsearch/search/suggest/completion2x/context/ContextMapping.java deleted file mode 100644 index b92d2e1b614c5..0000000000000 --- a/core/src/main/java/org/elasticsearch/search/suggest/completion2x/context/ContextMapping.java +++ /dev/null @@ -1,319 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.search.suggest.completion2x.context; - -import org.apache.lucene.analysis.TokenStream; -import org.apache.lucene.search.suggest.analyzing.XAnalyzingSuggester; -import org.apache.lucene.util.automaton.Automata; -import org.apache.lucene.util.automaton.Automaton; -import org.apache.lucene.util.automaton.Operations; -import org.apache.lucene.util.fst.FST; -import org.elasticsearch.ElasticsearchParseException; -import org.elasticsearch.common.xcontent.ToXContent; -import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.common.xcontent.XContentParser.Token; -import org.elasticsearch.common.xcontent.json.JsonXContent; -import org.elasticsearch.index.mapper.ParseContext; -import org.elasticsearch.index.mapper.ParseContext.Document; - -import java.io.IOException; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Collections; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.Objects; -import java.util.SortedMap; - -/** - * A {@link ContextMapping} is used t define a context that may used - * in conjunction with a suggester. To define a suggester that depends on a - * specific context derived class of {@link ContextMapping} will be - * used to specify the kind of additional information required in order to make - * suggestions. - */ -public abstract class ContextMapping implements ToXContent { - - /** Character used to separate several contexts */ - public static final char SEPARATOR = '\u001D'; - - /** Dummy Context Mapping that should be used if no context is used*/ - public static final SortedMap EMPTY_MAPPING = Collections.emptySortedMap(); - - /** Dummy Context Config matching the Dummy Mapping by providing an empty context*/ - public static final SortedMap EMPTY_CONFIG = Collections.emptySortedMap(); - - /** Dummy Context matching the Dummy Mapping by not wrapping a {@link TokenStream} */ - public static final Context EMPTY_CONTEXT = new Context(EMPTY_CONFIG, null); - - public static final String FIELD_VALUE = "value"; - public static final String FIELD_MISSING = "default"; - public static final String FIELD_TYPE = "type"; - - protected final String type; // Type of the Contextmapping - protected final String name; - - /** - * Define a new context mapping of a specific type - * - * @param type - * name of the new context mapping - */ - protected ContextMapping(String type, String name) { - super(); - this.type = type; - this.name = name; - } - - /** - * @return the type name of the context - */ - protected String type() { - return type; - } - - /** - * @return the name/id of the context - */ - public String name() { - return name; - } - - @Override - public final XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(name); - builder.field(FIELD_TYPE, type); - toInnerXContent(builder, params); - builder.endObject(); - return builder; - } - - /** - * A {@link ContextMapping} combined with the information provided by a document - * form a {@link ContextConfig} which is used to build the underlying FST. - * - * @param parseContext context of parsing phase - * @param parser {@link XContentParser} used to read and setup the configuration - * @return A {@link ContextConfig} related to this mapping - * - */ - public abstract ContextConfig parseContext(ParseContext parseContext, XContentParser parser) - throws IOException, ElasticsearchParseException; - - public abstract ContextConfig defaultConfig(); - - /** - * Parse a query according to the context. Parsing starts at parsers current position - * - * @param name name of the context - * @param parser {@link XContentParser} providing the data of the query - * - * @return {@link ContextQuery} according to this mapping - * - */ - public abstract ContextQuery parseQuery(String name, XContentParser parser) throws IOException, ElasticsearchParseException; - - /** - * Since every context mapping is assumed to have a name given by the field name of an context object, this - * method is used to build the value used to serialize the mapping - * - * @param builder builder to append the mapping to - * @param params parameters passed to the builder - * - * @return the builder used - * - */ - protected abstract XContentBuilder toInnerXContent(XContentBuilder builder, Params params) throws IOException; - - /** - * Test equality of two mapping - * - * @param thisMappings first mapping - * @param otherMappings second mapping - * - * @return true if both arguments are equal - */ - public static boolean mappingsAreEqual(SortedMap thisMappings, - SortedMap otherMappings) { - return Objects.equals(thisMappings, otherMappings); - } - - @Override - public String toString() { - try { - return toXContent(JsonXContent.contentBuilder(), ToXContent.EMPTY_PARAMS).string(); - } catch (IOException e) { - return super.toString(); - } - } - - /** - * A collection of {@link ContextMapping}s, their {@link ContextConfig}uration and a - * Document form a complete {@link Context}. Since this Object provides all information used - * to setup a suggestion, it can be used to wrap the entire {@link TokenStream} used to build a - * path within the {@link FST}. - */ - public static class Context { - - final SortedMap contexts; - final Document doc; - - public Context(SortedMap contexts, Document doc) { - super(); - this.contexts = contexts; - this.doc = doc; - } - - /** - * Wrap the {@link TokenStream} according to the provided informations of {@link ContextConfig} - * and a related {@link Document}. - * - * @param tokenStream {@link TokenStream} to wrap - * - * @return wrapped token stream - */ - public TokenStream wrapTokenStream(TokenStream tokenStream) { - for (ContextConfig context : contexts.values()) { - tokenStream = context.wrapTokenStream(doc, tokenStream); - } - return tokenStream; - } - } - - /** - * A {@link ContextMapping} combined with the information provided by a document - * form a {@link ContextConfig} which is used to build the underlying {@link FST}. This class hold - * a simple method wrapping a {@link TokenStream} by provided document informations. - */ - public abstract static class ContextConfig { - - /** - * Wrap a {@link TokenStream} for building suggestions to use context informations - * provided by a document or a {@link ContextMapping} - * - * @param doc document related to the stream - * @param stream original stream used to build the underlying {@link FST} - * - * @return A new {@link TokenStream} providing additional context information - */ - protected abstract TokenStream wrapTokenStream(Document doc, TokenStream stream); - - } - - /** - * A {@link ContextQuery} defines the context information for a specific {@link ContextMapping} - * defined within a suggestion request. According to the parameters set in the request and the - * {@link ContextMapping} such a query is used to wrap the {@link TokenStream} of the actual - * suggestion request into a {@link TokenStream} with the context settings - */ - public abstract static class ContextQuery implements ToXContent { - - protected final String name; - - protected ContextQuery(String name) { - this.name = name; - } - - public String name() { - return name; - } - - /** - * Create a automaton for a given context query this automaton will be used - * to find the matching paths with the fst - * - * @param preserveSep set an additional char (XAnalyzingSuggester.SEP_LABEL) between each context query - * @param queries list of {@link ContextQuery} defining the lookup context - * - * @return Automaton matching the given Query - */ - public static Automaton toAutomaton(boolean preserveSep, Iterable queries) { - Automaton a = Automata.makeEmptyString(); - - Automaton gap = Automata.makeChar(ContextMapping.SEPARATOR); - if (preserveSep) { - // if separators are preserved the fst contains a SEP_LABEL - // behind each gap. To have a matching automaton, we need to - // include the SEP_LABEL in the query as well - gap = Operations.concatenate(gap, Automata.makeChar(XAnalyzingSuggester.SEP_LABEL)); - } - - for (ContextQuery query : queries) { - a = Operations.concatenate(Arrays.asList(query.toAutomaton(), gap, a)); - } - - // TODO: should we limit this? Do any of our ContextQuery impls really create exponential regexps? - // GeoQuery looks safe (union of strings). - return Operations.determinize(a, Integer.MAX_VALUE); - } - - /** - * Build a LookUp Automaton for this context. - * @return LookUp Automaton - */ - protected abstract Automaton toAutomaton(); - - /** - * Parse a set of {@link ContextQuery} according to a given mapping - * @param mappings List of mapping defined y the suggest field - * @param parser parser holding the settings of the queries. The parsers - * current token is assumed hold an array. The number of elements - * in this array must match the number of elements in the mappings. - * @return List of context queries - * - * @throws IOException if something unexpected happened on the underlying stream - * @throws ElasticsearchParseException if the list of queries could not be parsed - */ - public static List parseQueries(Map mappings, XContentParser parser) - throws IOException, ElasticsearchParseException { - - Map querySet = new HashMap<>(); - Token token = parser.currentToken(); - if(token == Token.START_OBJECT) { - while ((token = parser.nextToken()) != Token.END_OBJECT) { - String name = parser.currentName(); - ContextMapping mapping = mappings.get(name); - if (mapping == null) { - throw new ElasticsearchParseException("no mapping defined for [{}]", name); - } - parser.nextToken(); - querySet.put(name, mapping.parseQuery(name, parser)); - } - } - - List queries = new ArrayList<>(mappings.size()); - for (ContextMapping mapping : mappings.values()) { - queries.add(querySet.get(mapping.name)); - } - return queries; - } - - @Override - public String toString() { - try { - return toXContent(JsonXContent.contentBuilder(), ToXContent.EMPTY_PARAMS).string(); - } catch (IOException e) { - return super.toString(); - } - } - } -} diff --git a/core/src/main/java/org/elasticsearch/search/suggest/completion2x/context/GeolocationContextMapping.java b/core/src/main/java/org/elasticsearch/search/suggest/completion2x/context/GeolocationContextMapping.java deleted file mode 100644 index 5eed19ca00ac3..0000000000000 --- a/core/src/main/java/org/elasticsearch/search/suggest/completion2x/context/GeolocationContextMapping.java +++ /dev/null @@ -1,750 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.search.suggest.completion2x.context; - -import com.carrotsearch.hppc.IntHashSet; -import org.apache.lucene.analysis.PrefixAnalyzer.PrefixTokenFilter; -import org.apache.lucene.analysis.TokenStream; -import org.apache.lucene.document.StringField; -import org.apache.lucene.index.DocValuesType; -import org.apache.lucene.index.IndexableField; -import org.apache.lucene.spatial.geopoint.document.GeoPointField; -import org.apache.lucene.util.automaton.Automata; -import org.apache.lucene.util.automaton.Automaton; -import org.apache.lucene.util.automaton.Operations; -import org.apache.lucene.util.fst.FST; -import org.elasticsearch.ElasticsearchParseException; -import org.elasticsearch.common.geo.GeoHashUtils; -import org.elasticsearch.common.geo.GeoPoint; -import org.elasticsearch.common.geo.GeoUtils; -import org.elasticsearch.common.unit.DistanceUnit; -import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.common.xcontent.XContentParser.Token; -import org.elasticsearch.index.mapper.FieldMapper; -import org.elasticsearch.index.mapper.GeoPointFieldMapper; -import org.elasticsearch.index.mapper.ParseContext; -import org.elasticsearch.index.mapper.ParseContext.Document; - -import java.io.IOException; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Collection; -import java.util.Collections; -import java.util.HashSet; -import java.util.Iterator; -import java.util.Map; - -/** - * The {@link GeolocationContextMapping} allows to take GeoInfomation into account - * during building suggestions. The mapping itself works with geohashes - * explicitly and is configured by three parameters: - *
    - *
  • precision: length of the geohash indexed as prefix of the - * completion field
  • - *
  • neighbors: Should the neighbor cells of the deepest geohash - * level also be indexed as alternatives to the actual geohash
  • - *
  • location: (optional) location assumed if it is not provided
  • - *
- * Internally this mapping wraps the suggestions into a form - * [geohash][suggestion]. If the neighbor option is set the cells - * next to the cell on the deepest geohash level ( precision) will - * be indexed as well. The {@link TokenStream} used to build the {@link FST} for - * suggestion will be wrapped into a {@link PrefixTokenFilter} managing these - * geohases as prefixes. - */ -public class GeolocationContextMapping extends ContextMapping { - - public static final String TYPE = "geo"; - - public static final String FIELD_PRECISION = "precision"; - public static final String FIELD_NEIGHBORS = "neighbors"; - public static final String FIELD_FIELDNAME = "path"; - - private final Collection defaultLocations; - private final int[] precision; - private final boolean neighbors; - private final String fieldName; - private final GeoConfig defaultConfig; - - /** - * Create a new {@link GeolocationContextMapping} with a given precision - * - * @param precision - * length of the geohashes - * @param neighbors - * should neighbors be indexed - * @param defaultLocations - * location to use, if it is not provided by the document - */ - protected GeolocationContextMapping(String name, int[] precision, boolean neighbors, - Collection defaultLocations, String fieldName) { - super(TYPE, name); - this.precision = precision; - this.neighbors = neighbors; - this.defaultLocations = defaultLocations; - this.fieldName = fieldName; - this.defaultConfig = new GeoConfig(this, defaultLocations); - } - - /** - * load a {@link GeolocationContextMapping} by configuration. Such a configuration - * can set the parameters - *
    - *
  • precision [String, Double, - * Float or Integer] defines the length of the - * underlying geohash
  • - *
  • defaultLocation [String] defines the location to use if - * it is not provided by the document
  • - *
  • neighbors [Boolean] defines if the last level of the - * geohash should be extended by neighbor cells
  • - *
- * - * @param config - * Configuration for {@link GeolocationContextMapping} - * @return new {@link GeolocationContextMapping} configured by the parameters of - * config - */ - protected static GeolocationContextMapping load(String name, Map config) { - if (!config.containsKey(FIELD_PRECISION)) { - throw new ElasticsearchParseException("field [precision] is missing"); - } - - final GeolocationContextMapping.Builder builder = new GeolocationContextMapping.Builder(name); - - if (config != null) { - final Object configPrecision = config.get(FIELD_PRECISION); - if (configPrecision == null) { - // ignore precision - } else if (configPrecision instanceof Integer) { - builder.precision((Integer) configPrecision); - config.remove(FIELD_PRECISION); - } else if (configPrecision instanceof Long) { - builder.precision((Long) configPrecision); - config.remove(FIELD_PRECISION); - } else if (configPrecision instanceof Double) { - builder.precision((Double) configPrecision); - config.remove(FIELD_PRECISION); - } else if (configPrecision instanceof Float) { - builder.precision((Float) configPrecision); - config.remove(FIELD_PRECISION); - } else if (configPrecision instanceof Iterable) { - for (Object precision : (Iterable)configPrecision) { - if (precision instanceof Integer) { - builder.precision((Integer) precision); - } else if (precision instanceof Long) { - builder.precision((Long) precision); - } else if (precision instanceof Double) { - builder.precision((Double) precision); - } else if (precision instanceof Float) { - builder.precision((Float) precision); - } else { - builder.precision(precision.toString()); - } - } - config.remove(FIELD_PRECISION); - } else { - builder.precision(configPrecision.toString()); - config.remove(FIELD_PRECISION); - } - - final Object configNeighbors = config.get(FIELD_NEIGHBORS); - if (configNeighbors != null) { - builder.neighbors((Boolean) configNeighbors); - config.remove(FIELD_NEIGHBORS); - } - - final Object def = config.get(FIELD_MISSING); - if (def != null) { - if (def instanceof Iterable) { - for (Object location : (Iterable)def) { - builder.addDefaultLocation(location.toString()); - } - } else if (def instanceof String) { - builder.addDefaultLocation(def.toString()); - } else if (def instanceof Map) { - Map latlonMap = (Map) def; - if (!latlonMap.containsKey("lat") || !(latlonMap.get("lat") instanceof Double)) { - throw new ElasticsearchParseException( - "field [{}] map must have field lat and a valid latitude", FIELD_MISSING); - } - if (!latlonMap.containsKey("lon") || !(latlonMap.get("lon") instanceof Double)) { - throw new ElasticsearchParseException( - "field [{}] map must have field lon and a valid longitude", FIELD_MISSING); - } - builder.addDefaultLocation( - Double.valueOf(latlonMap.get("lat").toString()), Double.valueOf(latlonMap.get("lon").toString())); - } else { - throw new ElasticsearchParseException("field [{}] must be of type string or list", FIELD_MISSING); - } - config.remove(FIELD_MISSING); - } - - final Object fieldName = config.get(FIELD_FIELDNAME); - if (fieldName != null) { - builder.field(fieldName.toString()); - config.remove(FIELD_FIELDNAME); - } - } - return builder.build(); - } - - @Override - protected XContentBuilder toInnerXContent(XContentBuilder builder, Params params) throws IOException { - builder.array(FIELD_PRECISION, precision); - builder.field(FIELD_NEIGHBORS, neighbors); - if (defaultLocations != null) { - builder.startArray(FIELD_MISSING); - for (String defaultLocation : defaultLocations) { - builder.value(defaultLocation); - } - builder.endArray(); - } - if (fieldName != null) { - builder.field(FIELD_FIELDNAME, fieldName); - } - return builder; - } - - protected static Collection parseSinglePointOrList(XContentParser parser) throws IOException { - Token token = parser.currentToken(); - if(token == Token.START_ARRAY) { - token = parser.nextToken(); - // Test if value is a single point in [lon, lat] format - if(token == Token.VALUE_NUMBER) { - double lon = parser.doubleValue(); - if(parser.nextToken() == Token.VALUE_NUMBER) { - double lat = parser.doubleValue(); - if(parser.nextToken() == Token.END_ARRAY) { - return Collections.singleton(GeoHashUtils.stringEncode(lon, lat)); - } else { - throw new ElasticsearchParseException("only two values expected"); - } - } else { - throw new ElasticsearchParseException("latitue must be a numeric value"); - } - } else { - // otherwise it's a list of locations - ArrayList result = new ArrayList<>(); - while (token != Token.END_ARRAY) { - result.add(GeoUtils.parseGeoPoint(parser).geohash()); - token = parser.nextToken(); //infinite loop without this line - } - return result; - } - } else { - // or a single location - return Collections.singleton(GeoUtils.parseGeoPoint(parser).geohash()); - } - } - - @Override - public ContextConfig defaultConfig() { - return defaultConfig; - } - - @Override - public ContextConfig parseContext(ParseContext parseContext, XContentParser parser) - throws IOException, ElasticsearchParseException { - - if(fieldName != null) { - FieldMapper mapper = parseContext.docMapper().mappers().getMapper(fieldName); - if(!(mapper instanceof GeoPointFieldMapper)) { - throw new ElasticsearchParseException("referenced field must be mapped to geo_point"); - } - } - - Collection locations; - if(parser.currentToken() == Token.VALUE_NULL) { - locations = null; - } else { - locations = parseSinglePointOrList(parser); - } - return new GeoConfig(this, locations); - } - - /** - * Create a new geolocation query from a given GeoPoint - * - * @param point - * query location - * @return new geolocation query - */ - public static GeoQuery query(String name, GeoPoint point) { - return query(name, point.getGeohash()); - } - - /** - * Create a new geolocation query from a given geocoordinate - * - * @param lat - * latitude of the location - * @param lon - * longitude of the location - * @return new geolocation query - */ - public static GeoQuery query(String name, double lat, double lon, int ... precisions) { - return query(name, GeoHashUtils.stringEncode(lon, lat), precisions); - } - - public static GeoQuery query(String name, double lat, double lon, String ... precisions) { - int precisionInts[] = new int[precisions.length]; - for (int i = 0 ; i < precisions.length; i++) { - precisionInts[i] = GeoUtils.geoHashLevelsForPrecision(precisions[i]); - } - return query(name, GeoHashUtils.stringEncode(lon, lat), precisionInts); - } - - /** - * Create a new geolocation query from a given geohash - * - * @param geohash - * geohash of the location - * @return new geolocation query - */ - public static GeoQuery query(String name, String geohash, int ... precisions) { - return new GeoQuery(name, geohash, precisions); - } - - private static int parsePrecision(XContentParser parser) throws IOException, ElasticsearchParseException { - switch (parser.currentToken()) { - case VALUE_STRING: - return GeoUtils.geoHashLevelsForPrecision(parser.text()); - case VALUE_NUMBER: - switch (parser.numberType()) { - case INT: - case LONG: - return parser.intValue(); - default: - return GeoUtils.geoHashLevelsForPrecision(parser.doubleValue()); - } - default: - throw new ElasticsearchParseException("invalid precision value"); - } - } - - @Override - public GeoQuery parseQuery(String name, XContentParser parser) throws IOException, ElasticsearchParseException { - if (parser.currentToken() == Token.START_OBJECT) { - double lat = Double.NaN; - double lon = Double.NaN; - GeoPoint point = null; - int[] precision = null; - - while (parser.nextToken() != Token.END_OBJECT) { - final String fieldName = parser.currentName(); - if("lat".equals(fieldName)) { - if(point == null) { - parser.nextToken(); - switch (parser.currentToken()) { - case VALUE_NUMBER: - case VALUE_STRING: - lat = parser.doubleValue(true); - break; - default: - throw new ElasticsearchParseException("latitude must be a number"); - } - } else { - throw new ElasticsearchParseException("only lat/lon or [{}] is allowed", FIELD_VALUE); - } - } else if ("lon".equals(fieldName)) { - if(point == null) { - parser.nextToken(); - switch (parser.currentToken()) { - case VALUE_NUMBER: - case VALUE_STRING: - lon = parser.doubleValue(true); - break; - default: - throw new ElasticsearchParseException("longitude must be a number"); - } - } else { - throw new ElasticsearchParseException("only lat/lon or [{}] is allowed", FIELD_VALUE); - } - } else if (FIELD_PRECISION.equals(fieldName)) { - if(parser.nextToken() == Token.START_ARRAY) { - IntHashSet precisions = new IntHashSet(); - while(parser.nextToken() != Token.END_ARRAY) { - precisions.add(parsePrecision(parser)); - } - precision = precisions.toArray(); - } else { - precision = new int[] { parsePrecision(parser) }; - } - } else if (FIELD_VALUE.equals(fieldName)) { - if(Double.isNaN(lon) && Double.isNaN(lat)) { - parser.nextToken(); - point = GeoUtils.parseGeoPoint(parser); - } else { - throw new ElasticsearchParseException("only lat/lon or [{}] is allowed", FIELD_VALUE); - } - } else { - throw new ElasticsearchParseException("unexpected fieldname [{}]", fieldName); - } - } - - if (point == null) { - if (Double.isNaN(lat) || Double.isNaN(lon)) { - throw new ElasticsearchParseException("location is missing"); - } else { - point = new GeoPoint(lat, lon); - } - } - - if (precision == null || precision.length == 0) { - precision = this.precision; - } - - return new GeoQuery(name, point.geohash(), precision); - } else { - return new GeoQuery(name, GeoUtils.parseGeoPoint(parser).getGeohash(), precision); - } - } - - @Override - public int hashCode() { - final int prime = 31; - int result = 1; - result = prime * result + ((defaultLocations == null) ? 0 : defaultLocations.hashCode()); - result = prime * result + ((fieldName == null) ? 0 : fieldName.hashCode()); - result = prime * result + (neighbors ? 1231 : 1237); - result = prime * result + Arrays.hashCode(precision); - return result; - } - - @Override - public boolean equals(Object obj) { - if (this == obj) - return true; - if (obj == null) - return false; - if (getClass() != obj.getClass()) - return false; - GeolocationContextMapping other = (GeolocationContextMapping) obj; - if (defaultLocations == null) { - if (other.defaultLocations != null) - return false; - } else if (!defaultLocations.equals(other.defaultLocations)) - return false; - if (fieldName == null) { - if (other.fieldName != null) - return false; - } else if (!fieldName.equals(other.fieldName)) - return false; - if (neighbors != other.neighbors) - return false; - if (!Arrays.equals(precision, other.precision)) - return false; - return true; - } - - - - - public static class Builder extends ContextBuilder { - - private IntHashSet precisions = new IntHashSet(); - private boolean neighbors; // take neighbor cell on the lowest level into account - private HashSet defaultLocations = new HashSet<>(); - private String fieldName = null; - - protected Builder(String name) { - this(name, true, null); - } - - protected Builder(String name, boolean neighbors, int...levels) { - super(name); - neighbors(neighbors); - if (levels != null) { - for (int level : levels) { - precision(level); - } - } - } - - /** - * Set the precision use o make suggestions - * - * @param precision - * precision as distance with {@link DistanceUnit}. Default: - * meters - * @return this - */ - public Builder precision(String precision) { - return precision(DistanceUnit.parse(precision, DistanceUnit.METERS, DistanceUnit.METERS)); - } - - /** - * Set the precision use o make suggestions - * - * @param precision - * precision value - * @param unit - * {@link DistanceUnit} to use - * @return this - */ - public Builder precision(double precision, DistanceUnit unit) { - return precision(unit.toMeters(precision)); - } - - /** - * Set the precision use o make suggestions - * - * @param meters - * precision as distance in meters - * @return this - */ - public Builder precision(double meters) { - int level = GeoUtils.geoHashLevelsForPrecision(meters); - // Ceiling precision: we might return more results - if (GeoUtils.geoHashCellSize(level) < meters) { - level = Math.max(1, level - 1); - } - return precision(level); - } - - /** - * Set the precision use o make suggestions - * - * @param level - * maximum length of geohashes - * @return this - */ - public Builder precision(int level) { - this.precisions.add(level); - return this; - } - - /** - * Set neighborhood usage - * - * @param neighbors - * should neighbor cells also be valid - * @return this - */ - public Builder neighbors(boolean neighbors) { - this.neighbors = neighbors; - return this; - } - - /** - * Set a default location that should be used, if no location is - * provided by the query - * - * @param geohash - * geohash of the default location - * @return this - */ - public Builder addDefaultLocation(String geohash) { - this.defaultLocations.add(geohash); - return this; - } - - /** - * Set a default location that should be used, if no location is - * provided by the query - * - * @param geohashes - * geohash of the default location - * @return this - */ - public Builder addDefaultLocations(Collection geohashes) { - this.defaultLocations.addAll(geohashes); - return this; - } - - /** - * Set a default location that should be used, if no location is - * provided by the query - * - * @param lat - * latitude of the default location - * @param lon - * longitude of the default location - * @return this - */ - public Builder addDefaultLocation(double lat, double lon) { - this.defaultLocations.add(GeoHashUtils.stringEncode(lon, lat)); - return this; - } - - /** - * Set a default location that should be used, if no location is - * provided by the query - * - * @param point - * location - * @return this - */ - public Builder defaultLocation(GeoPoint point) { - this.defaultLocations.add(point.geohash()); - return this; - } - - /** - * Set the name of the field containing a geolocation to use - * @param fieldName name of the field - * @return this - */ - public Builder field(String fieldName) { - this.fieldName = fieldName; - return this; - } - - @Override - public GeolocationContextMapping build() { - if(precisions.isEmpty()) { - precisions.add(GeoHashUtils.PRECISION); - } - int[] precisionArray = precisions.toArray(); - Arrays.sort(precisionArray); - return new GeolocationContextMapping(name, precisionArray, neighbors, defaultLocations, fieldName); - } - - } - - private static class GeoConfig extends ContextConfig { - - private final GeolocationContextMapping mapping; - private final Collection locations; - - public GeoConfig(GeolocationContextMapping mapping, Collection locations) { - this.locations = locations; - this.mapping = mapping; - } - - @Override - protected TokenStream wrapTokenStream(Document doc, TokenStream stream) { - Collection geohashes; - - if (locations == null || locations.size() == 0) { - if(mapping.fieldName != null) { - IndexableField[] fields = doc.getFields(mapping.fieldName); - if(fields.length == 0) { - IndexableField[] lonFields = doc.getFields(mapping.fieldName + ".lon"); - IndexableField[] latFields = doc.getFields(mapping.fieldName + ".lat"); - if (lonFields.length > 0 && latFields.length > 0) { - geohashes = new ArrayList<>(fields.length); - GeoPoint spare = new GeoPoint(); - for (int i = 0 ; i < lonFields.length ; i++) { - IndexableField lonField = lonFields[i]; - IndexableField latField = latFields[i]; - assert lonField.fieldType().docValuesType() == latField.fieldType().docValuesType(); - // we write doc values fields differently: one field for all values, - // so we need to only care about indexed fields - if (lonField.fieldType().docValuesType() == DocValuesType.NONE) { - spare.reset(latField.numericValue().doubleValue(), lonField.numericValue().doubleValue()); - geohashes.add(spare.geohash()); - } - } - } else { - geohashes = mapping.defaultLocations; - } - } else { - geohashes = new ArrayList<>(fields.length); - GeoPoint spare = new GeoPoint(); - for (IndexableField field : fields) { - if (field instanceof StringField) { - spare.resetFromString(field.stringValue()); - } else if (field instanceof GeoPointField) { - GeoPointField geoPointField = (GeoPointField) field; - spare.reset(geoPointField.getLat(), geoPointField.getLon()); - } else { - spare.resetFromString(field.stringValue()); - } - geohashes.add(spare.geohash()); - } - } - } else { - geohashes = mapping.defaultLocations; - } - } else { - geohashes = locations; - } - - Collection locations = new HashSet<>(); - for (String geohash : geohashes) { - for (int p : mapping.precision) { - int precision = Math.min(p, geohash.length()); - String truncatedGeohash = geohash.substring(0, precision); - if(mapping.neighbors) { - GeoHashUtils.addNeighbors(truncatedGeohash, precision, locations); - } - locations.add(truncatedGeohash); - } - } - - return new PrefixTokenFilter(stream, ContextMapping.SEPARATOR, locations); - } - - @Override - public String toString() { - StringBuilder sb = new StringBuilder("GeoConfig(location = ["); - Iterator location = this.locations.iterator(); - if (location.hasNext()) { - sb.append(location.next()); - while (location.hasNext()) { - sb.append(", ").append(location.next()); - } - } - return sb.append("])").toString(); - } - } - - private static class GeoQuery extends ContextQuery { - private final String location; - private final int[] precisions; - - public GeoQuery(String name, String location, int...precisions) { - super(name); - this.location = location; - this.precisions = precisions; - } - - @Override - public Automaton toAutomaton() { - Automaton automaton; - if(precisions == null || precisions.length == 0) { - automaton = Automata.makeString(location); - } else { - automaton = Automata.makeString( - location.substring(0, Math.max(1, Math.min(location.length(), precisions[0])))); - for (int i = 1; i < precisions.length; i++) { - final String cell = location.substring(0, Math.max(1, Math.min(location.length(), precisions[i]))); - automaton = Operations.union(automaton, Automata.makeString(cell)); - } - } - return automaton; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - if(precisions == null || precisions.length == 0) { - builder.field(name, location); - } else { - builder.startObject(name); - builder.field(FIELD_VALUE, location); - builder.array(FIELD_PRECISION, precisions); - builder.endObject(); - } - return builder; - } - } -} diff --git a/core/src/main/java/org/elasticsearch/search/suggest/completion2x/context/package-info.java b/core/src/main/java/org/elasticsearch/search/suggest/completion2x/context/package-info.java deleted file mode 100644 index 0d9a9e7196333..0000000000000 --- a/core/src/main/java/org/elasticsearch/search/suggest/completion2x/context/package-info.java +++ /dev/null @@ -1,23 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -/** - * Support for completion suggesters with contexts built on 2.x indices. - */ -package org.elasticsearch.search.suggest.completion2x.context; diff --git a/core/src/main/java/org/elasticsearch/search/suggest/completion2x/package-info.java b/core/src/main/java/org/elasticsearch/search/suggest/completion2x/package-info.java deleted file mode 100644 index b8b14aa7c3f0c..0000000000000 --- a/core/src/main/java/org/elasticsearch/search/suggest/completion2x/package-info.java +++ /dev/null @@ -1,23 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -/** - * Support for completion suggesters built on 2.x indices. - */ -package org.elasticsearch.search.suggest.completion2x; diff --git a/core/src/main/resources/META-INF/services/org.apache.lucene.codecs.PostingsFormat b/core/src/main/resources/META-INF/services/org.apache.lucene.codecs.PostingsFormat index 9220317572762..2c92f0ecd3f51 100644 --- a/core/src/main/resources/META-INF/services/org.apache.lucene.codecs.PostingsFormat +++ b/core/src/main/resources/META-INF/services/org.apache.lucene.codecs.PostingsFormat @@ -1,2 +1 @@ org.apache.lucene.search.suggest.document.Completion50PostingsFormat -org.elasticsearch.search.suggest.completion2x.Completion090PostingsFormat \ No newline at end of file diff --git a/core/src/main/resources/org/elasticsearch/tasks/task-index-mapping.json b/core/src/main/resources/org/elasticsearch/tasks/task-index-mapping.json index 4863cdb539b3a..0f1a32e1bef81 100644 --- a/core/src/main/resources/org/elasticsearch/tasks/task-index-mapping.json +++ b/core/src/main/resources/org/elasticsearch/tasks/task-index-mapping.json @@ -36,7 +36,7 @@ "enabled" : false }, "description": { - "type": "string" + "type": "text" } } }, diff --git a/core/src/test/java/org/elasticsearch/action/admin/indices/template/put/MetaDataIndexTemplateServiceTests.java b/core/src/test/java/org/elasticsearch/action/admin/indices/template/put/MetaDataIndexTemplateServiceTests.java index 7d36ae14739a8..9779ce83a652d 100644 --- a/core/src/test/java/org/elasticsearch/action/admin/indices/template/put/MetaDataIndexTemplateServiceTests.java +++ b/core/src/test/java/org/elasticsearch/action/admin/indices/template/put/MetaDataIndexTemplateServiceTests.java @@ -111,7 +111,7 @@ public void testIndexTemplateWithValidateMapping() throws Exception { PutRequest request = new PutRequest("api", "validate_template"); request.patterns(Collections.singletonList("te*")); request.putMapping("type1", XContentFactory.jsonBuilder().startObject().startObject("type1").startObject("properties") - .startObject("field2").field("type", "string").field("analyzer", "custom_1").endObject() + .startObject("field2").field("type", "text").field("analyzer", "custom_1").endObject() .endObject().endObject().endObject().string()); List errors = putTemplateDetail(request); diff --git a/core/src/test/java/org/elasticsearch/action/index/IndexRequestTests.java b/core/src/test/java/org/elasticsearch/action/index/IndexRequestTests.java index 76447268c7a9b..1d1532c49196f 100644 --- a/core/src/test/java/org/elasticsearch/action/index/IndexRequestTests.java +++ b/core/src/test/java/org/elasticsearch/action/index/IndexRequestTests.java @@ -22,7 +22,6 @@ import org.elasticsearch.action.DocWriteRequest; import org.elasticsearch.action.support.ActiveShardCount; import org.elasticsearch.action.support.replication.ReplicationResponse; -import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.index.VersionType; import org.elasticsearch.index.seqno.SequenceNumbersService; import org.elasticsearch.index.shard.ShardId; @@ -101,45 +100,6 @@ public void testIndexingRejectsLongIds() { assertThat(validate, notNullValue()); assertThat(validate.getMessage(), containsString("id is too long, must be no longer than 512 bytes but was: 513")); -} - - public void testSetTTLAsTimeValue() { - IndexRequest indexRequest = new IndexRequest(); - TimeValue ttl = TimeValue.parseTimeValue(randomTimeValue(), null, "ttl"); - indexRequest.ttl(ttl); - assertThat(indexRequest.ttl(), equalTo(ttl)); - } - - public void testSetTTLAsString() { - IndexRequest indexRequest = new IndexRequest(); - String ttlAsString = randomTimeValue(); - TimeValue ttl = TimeValue.parseTimeValue(ttlAsString, null, "ttl"); - indexRequest.ttl(ttlAsString); - assertThat(indexRequest.ttl(), equalTo(ttl)); - } - - public void testSetTTLAsLong() { - IndexRequest indexRequest = new IndexRequest(); - String ttlAsString = randomTimeValue(); - TimeValue ttl = TimeValue.parseTimeValue(ttlAsString, null, "ttl"); - indexRequest.ttl(ttl.millis()); - assertThat(indexRequest.ttl(), equalTo(ttl)); - } - - public void testValidateTTL() { - IndexRequest indexRequest = new IndexRequest("index", "type"); - if (randomBoolean()) { - indexRequest.ttl(randomIntBetween(Integer.MIN_VALUE, -1)); - } else { - if (randomBoolean()) { - indexRequest.ttl(new TimeValue(randomIntBetween(Integer.MIN_VALUE, -1))); - } else { - indexRequest.ttl(randomIntBetween(Integer.MIN_VALUE, -1) + "ms"); - } - } - ActionRequestValidationException validate = indexRequest.validate(); - assertThat(validate, notNullValue()); - assertThat(validate.getMessage(), containsString("ttl must not be negative")); } public void testWaitForActiveShards() { diff --git a/core/src/test/java/org/elasticsearch/action/update/UpdateRequestTests.java b/core/src/test/java/org/elasticsearch/action/update/UpdateRequestTests.java index 3b27bbff9ce20..cdf6a225620c6 100644 --- a/core/src/test/java/org/elasticsearch/action/update/UpdateRequestTests.java +++ b/core/src/test/java/org/elasticsearch/action/update/UpdateRequestTests.java @@ -181,48 +181,6 @@ public void testUpdateRequest() throws Exception { assertThat(((Map) doc.get("compound")).get("field2").toString(), equalTo("value2")); } - // Related to issue 3256 - public void testUpdateRequestWithTTL() throws Exception { - TimeValue providedTTLValue = TimeValue.parseTimeValue(randomTimeValue(), null, "ttl"); - Settings settings = settings(Version.CURRENT).build(); - - UpdateHelper updateHelper = new UpdateHelper(settings, null); - - // We just upsert one document with ttl - IndexRequest indexRequest = new IndexRequest("test", "type1", "1") - .source(jsonBuilder().startObject().field("foo", "bar").endObject()) - .ttl(providedTTLValue); - UpdateRequest updateRequest = new UpdateRequest("test", "type1", "1") - .doc(jsonBuilder().startObject().field("fooz", "baz").endObject()) - .upsert(indexRequest); - - long nowInMillis = randomPositiveLong(); - // We simulate that the document is not existing yet - GetResult getResult = new GetResult("test", "type1", "1", 0, false, null, null); - UpdateHelper.Result result = updateHelper.prepare(new ShardId("test", "_na_", 0),updateRequest, getResult, () -> nowInMillis); - Streamable action = result.action(); - assertThat(action, instanceOf(IndexRequest.class)); - IndexRequest indexAction = (IndexRequest) action; - assertThat(indexAction.ttl(), is(providedTTLValue)); - - // We just upsert one document with ttl using a script - indexRequest = new IndexRequest("test", "type1", "2") - .source(jsonBuilder().startObject().field("foo", "bar").endObject()) - .ttl(providedTTLValue); - updateRequest = new UpdateRequest("test", "type1", "2") - .upsert(indexRequest) - .script(new Script(";")) - .scriptedUpsert(true); - - // We simulate that the document is not existing yet - getResult = new GetResult("test", "type1", "2", 0, false, null, null); - result = updateHelper.prepare(new ShardId("test", "_na_", 0), updateRequest, getResult, () -> nowInMillis); - action = result.action(); - assertThat(action, instanceOf(IndexRequest.class)); - indexAction = (IndexRequest) action; - assertThat(indexAction.ttl(), is(providedTTLValue)); - } - // Related to issue #15822 public void testInvalidBodyThrowsParseException() throws Exception { UpdateRequest request = new UpdateRequest("test", "type", "1"); @@ -312,15 +270,13 @@ public void testNowInScript() throws IOException { ScriptSettings scriptSettings = new ScriptSettings(scriptEngineRegistry, scriptContextRegistry); ScriptService scriptService = new ScriptService(baseSettings, environment, new ResourceWatcherService(baseSettings, null), scriptEngineRegistry, scriptContextRegistry, scriptSettings); - TimeValue providedTTLValue = TimeValue.parseTimeValue(randomTimeValue(), null, "ttl"); Settings settings = settings(Version.CURRENT).build(); UpdateHelper updateHelper = new UpdateHelper(settings, scriptService); // We just upsert one document with now() using a script IndexRequest indexRequest = new IndexRequest("test", "type1", "2") - .source(jsonBuilder().startObject().field("foo", "bar").endObject()) - .ttl(providedTTLValue); + .source(jsonBuilder().startObject().field("foo", "bar").endObject()); { UpdateRequest updateRequest = new UpdateRequest("test", "type1", "2") @@ -341,14 +297,11 @@ public void testNowInScript() throws IOException { .upsert(indexRequest) .script(new Script(ScriptType.INLINE, "mock", "ctx._timestamp = ctx._now", Collections.emptyMap())) .scriptedUpsert(true); - long nowInMillis = randomPositiveLong(); // We simulate that the document is not existing yet GetResult getResult = new GetResult("test", "type1", "2", 0, true, new BytesArray("{}"), null); - UpdateHelper.Result result = updateHelper.prepare(new ShardId("test", "_na_", 0), updateRequest, getResult, () -> nowInMillis); + UpdateHelper.Result result = updateHelper.prepare(new ShardId("test", "_na_", 0), updateRequest, getResult, () -> 42L); Streamable action = result.action(); assertThat(action, instanceOf(IndexRequest.class)); - IndexRequest indexAction = (IndexRequest) action; - assertEquals(indexAction.timestamp(), Long.toString(nowInMillis)); } } } diff --git a/core/src/test/java/org/elasticsearch/bwcompat/OldIndexBackwardsCompatibilityIT.java b/core/src/test/java/org/elasticsearch/bwcompat/OldIndexBackwardsCompatibilityIT.java index cea041d77772d..a7e3583a5b395 100644 --- a/core/src/test/java/org/elasticsearch/bwcompat/OldIndexBackwardsCompatibilityIT.java +++ b/core/src/test/java/org/elasticsearch/bwcompat/OldIndexBackwardsCompatibilityIT.java @@ -34,6 +34,7 @@ import org.elasticsearch.action.get.GetResponse; import org.elasticsearch.action.search.SearchRequestBuilder; import org.elasticsearch.action.search.SearchResponse; +import org.elasticsearch.action.support.WriteRequest.RefreshPolicy; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.cluster.routing.RecoverySource; @@ -49,7 +50,6 @@ import org.elasticsearch.gateway.MetaDataStateFormat; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.engine.Segment; -import org.elasticsearch.index.mapper.StringFieldMapperPositionIncrementGapTests; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.indices.recovery.RecoveryState; import org.elasticsearch.node.Node; @@ -82,6 +82,7 @@ import java.util.SortedSet; import java.util.TreeSet; +import static org.elasticsearch.index.query.QueryBuilders.matchPhraseQuery; import static org.elasticsearch.test.OldIndexUtils.assertUpgradeWorks; import static org.elasticsearch.test.OldIndexUtils.getIndexDir; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; @@ -188,7 +189,7 @@ public void testAllVersionsTested() throws Exception { for (Version v : VersionUtils.allReleasedVersions()) { if (VersionUtils.isSnapshot(v)) continue; // snapshots are unreleased, so there is no backcompat yet if (v.isRelease() == false) continue; // no guarantees for prereleases - if (v.onOrBefore(Version.V_2_0_0_beta1)) continue; // we can only test back one major lucene version + if (v.before(Version.V_5_0_0)) continue; // we can only support one major version backward if (v.equals(Version.CURRENT)) continue; // the current version is always compatible with itself expectedVersions.add("index-" + v.toString() + ".zip"); } @@ -425,11 +426,31 @@ void assertDeleteByQueryWorked(String indexName, Version version) throws Excepti } void assertPositionIncrementGapDefaults(String indexName, Version version) throws Exception { - if (version.before(Version.V_2_0_0_beta1)) { - StringFieldMapperPositionIncrementGapTests.assertGapIsZero(client(), indexName, "doc"); - } else { - StringFieldMapperPositionIncrementGapTests.assertGapIsOneHundred(client(), indexName, "doc"); - } + client().prepareIndex(indexName, "doc", "position_gap_test").setSource("string", Arrays.asList("one", "two three")) + .setRefreshPolicy(RefreshPolicy.IMMEDIATE).get(); + + // Baseline - phrase query finds matches in the same field value + assertHitCount(client().prepareSearch(indexName).setQuery(matchPhraseQuery("string", "two three")).get(), 1); + + // No match across gaps when slop < position gap + assertHitCount( + client().prepareSearch(indexName).setQuery(matchPhraseQuery("string", "one two").slop(99)).get(), + 0); + + // Match across gaps when slop >= position gap + assertHitCount(client().prepareSearch(indexName).setQuery(matchPhraseQuery("string", "one two").slop(100)).get(), 1); + assertHitCount(client().prepareSearch(indexName).setQuery(matchPhraseQuery("string", "one two").slop(101)).get(), + 1); + + // No match across gap using default slop with default positionIncrementGap + assertHitCount(client().prepareSearch(indexName).setQuery(matchPhraseQuery("string", "one two")).get(), 0); + + // Nor with small-ish values + assertHitCount(client().prepareSearch(indexName).setQuery(matchPhraseQuery("string", "one two").slop(5)).get(), 0); + assertHitCount(client().prepareSearch(indexName).setQuery(matchPhraseQuery("string", "one two").slop(50)).get(), 0); + + // But huge-ish values still match + assertHitCount(client().prepareSearch(indexName).setQuery(matchPhraseQuery("string", "one two").slop(500)).get(), 1); } private static final Version VERSION_5_1_0_UNRELEASED = Version.fromString("5.1.0"); diff --git a/core/src/test/java/org/elasticsearch/bwcompat/RestoreBackwardsCompatIT.java b/core/src/test/java/org/elasticsearch/bwcompat/RestoreBackwardsCompatIT.java index 16321dbd9fec7..8bae0cbe635e5 100644 --- a/core/src/test/java/org/elasticsearch/bwcompat/RestoreBackwardsCompatIT.java +++ b/core/src/test/java/org/elasticsearch/bwcompat/RestoreBackwardsCompatIT.java @@ -45,7 +45,6 @@ import java.nio.file.Files; import java.nio.file.Path; import java.util.ArrayList; -import java.util.Arrays; import java.util.Collections; import java.util.List; import java.util.Locale; @@ -98,7 +97,7 @@ public void testRestoreOldSnapshots() throws Exception { for (Version v : VersionUtils.allReleasedVersions()) { if (VersionUtils.isSnapshot(v)) continue; // snapshots are unreleased, so there is no backcompat yet if (v.isRelease() == false) continue; // no guarantees for prereleases - if (v.onOrBefore(Version.V_2_0_0_beta1)) continue; // we can only test back one major lucene version + if (v.before(Version.V_5_0_0)) continue; // we only support versions N and N-1 if (v.equals(Version.CURRENT)) continue; // the current version is always compatible with itself expectedVersions.add(v.toString()); } @@ -128,44 +127,6 @@ public void testRestoreUnsupportedSnapshots() throws Exception { } } - public void testRestoreSnapshotWithMissingChecksum() throws Exception { - final String repo = "test_repo"; - final String snapshot = "test_1"; - final String indexName = "index-2.3.4"; - final String repoFileId = "missing-checksum-repo-2.3.4"; - Path repoFile = getBwcIndicesPath().resolve(repoFileId + ".zip"); - URI repoFileUri = repoFile.toUri(); - URI repoJarUri = new URI("jar:" + repoFileUri.toString() + "!/repo/"); - logger.info("--> creating repository [{}] for repo file [{}]", repo, repoFileId); - assertAcked(client().admin().cluster().preparePutRepository(repo) - .setType("url") - .setSettings(Settings.builder().put("url", repoJarUri.toString()))); - - logger.info("--> get snapshot and check its indices"); - GetSnapshotsResponse getSnapshotsResponse = client().admin().cluster().prepareGetSnapshots(repo).setSnapshots(snapshot).get(); - assertThat(getSnapshotsResponse.getSnapshots().size(), equalTo(1)); - SnapshotInfo snapshotInfo = getSnapshotsResponse.getSnapshots().get(0); - assertThat(snapshotInfo.indices(), equalTo(Arrays.asList(indexName))); - - logger.info("--> restoring snapshot"); - RestoreSnapshotResponse response = client().admin().cluster().prepareRestoreSnapshot(repo, snapshot).setRestoreGlobalState(true).setWaitForCompletion(true).get(); - assertThat(response.status(), equalTo(RestStatus.OK)); - RestoreInfo restoreInfo = response.getRestoreInfo(); - assertThat(restoreInfo.successfulShards(), greaterThan(0)); - assertThat(restoreInfo.successfulShards(), equalTo(restoreInfo.totalShards())); - assertThat(restoreInfo.failedShards(), equalTo(0)); - String index = restoreInfo.indices().get(0); - assertThat(index, equalTo(indexName)); - - logger.info("--> check search"); - SearchResponse searchResponse = client().prepareSearch(index).get(); - assertThat(searchResponse.getHits().totalHits(), greaterThan(0L)); - - logger.info("--> cleanup"); - cluster().wipeIndices(restoreInfo.indices().toArray(new String[restoreInfo.indices().size()])); - cluster().wipeTemplates(); - } - private List repoVersions() throws Exception { return listRepoVersions("repo"); } diff --git a/core/src/test/java/org/elasticsearch/cluster/metadata/MetaDataIndexUpgradeServiceTests.java b/core/src/test/java/org/elasticsearch/cluster/metadata/MetaDataIndexUpgradeServiceTests.java index 376feb305a171..508c93284fe37 100644 --- a/core/src/test/java/org/elasticsearch/cluster/metadata/MetaDataIndexUpgradeServiceTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/metadata/MetaDataIndexUpgradeServiceTests.java @@ -83,8 +83,8 @@ public void testFailUpgrade() { MetaDataIndexUpgradeService service = new MetaDataIndexUpgradeService(Settings.EMPTY, new MapperRegistry(Collections.emptyMap(), Collections.emptyMap()), IndexScopedSettings.DEFAULT_SCOPED_SETTINGS); final IndexMetaData metaData = newIndexMeta("foo", Settings.builder() - .put(IndexMetaData.SETTING_VERSION_UPGRADED, Version.V_2_0_0_beta1) - .put(IndexMetaData.SETTING_VERSION_CREATED, Version.fromString("1.7.0")) + .put(IndexMetaData.SETTING_VERSION_UPGRADED, Version.V_5_0_0_beta1) + .put(IndexMetaData.SETTING_VERSION_CREATED, Version.fromString("2.4.0")) .put(IndexMetaData.SETTING_VERSION_MINIMUM_COMPATIBLE, Version.CURRENT.luceneVersion.toString()).build()); String message = expectThrows(IllegalStateException.class, () -> service.upgradeIndexMetaData(metaData)).getMessage(); @@ -92,8 +92,8 @@ public void testFailUpgrade() { "before upgrading to " + Version.CURRENT.toString() + "."); IndexMetaData goodMeta = newIndexMeta("foo", Settings.builder() - .put(IndexMetaData.SETTING_VERSION_UPGRADED, Version.V_2_0_0_beta1) - .put(IndexMetaData.SETTING_VERSION_CREATED, Version.fromString("2.1.0")) + .put(IndexMetaData.SETTING_VERSION_UPGRADED, Version.V_5_0_0_beta1) + .put(IndexMetaData.SETTING_VERSION_CREATED, Version.fromString("5.1.0")) .put(IndexMetaData.SETTING_VERSION_MINIMUM_COMPATIBLE, Version.CURRENT.luceneVersion.toString()).build()); service.upgradeIndexMetaData(goodMeta); @@ -105,7 +105,7 @@ public static IndexMetaData newIndexMeta(String name, Settings indexSettings) { .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1) .put(IndexMetaData.SETTING_CREATION_DATE, 1) .put(IndexMetaData.SETTING_INDEX_UUID, "BOOM") - .put(IndexMetaData.SETTING_VERSION_UPGRADED, Version.V_2_0_0_beta1) + .put(IndexMetaData.SETTING_VERSION_UPGRADED, Version.V_5_0_0_beta1) .put(indexSettings) .build(); IndexMetaData metaData = IndexMetaData.builder(name).settings(build).build(); diff --git a/core/src/test/java/org/elasticsearch/codecs/CodecTests.java b/core/src/test/java/org/elasticsearch/codecs/CodecTests.java deleted file mode 100644 index dac83d1dee823..0000000000000 --- a/core/src/test/java/org/elasticsearch/codecs/CodecTests.java +++ /dev/null @@ -1,117 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.codecs; - -import org.apache.lucene.codecs.Codec; -import org.elasticsearch.Version; -import org.elasticsearch.cluster.metadata.IndexMetaData; -import org.elasticsearch.common.compress.CompressedXContent; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.xcontent.XContentFactory; -import org.elasticsearch.index.IndexService; -import org.elasticsearch.index.mapper.DocumentMapperParser; -import org.elasticsearch.index.mapper.MapperParsingException; -import org.elasticsearch.plugins.Plugin; -import org.elasticsearch.test.ESSingleNodeTestCase; -import org.elasticsearch.test.InternalSettingsPlugin; -import org.elasticsearch.test.VersionUtils; -import org.junit.Assert; - -import java.io.IOException; -import java.util.Collection; - -import static org.hamcrest.Matchers.containsString; - -public class CodecTests extends ESSingleNodeTestCase { - - @Override - protected Collection> getPlugins() { - return pluginList(InternalSettingsPlugin.class); - } - - public void testAcceptPostingsFormat() throws IOException { - int i = 0; - for (Version v : VersionUtils.allReleasedVersions()) { - if (v.onOrAfter(Version.V_2_0_0) == false) { - // no need to test, we don't support upgrading from these versions - continue; - } - IndexService indexService = createIndex("test-" + i++, - Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, v).build()); - DocumentMapperParser parser = indexService.mapperService().documentMapperParser(); - try { - String mapping = XContentFactory.jsonBuilder().startObject() - .startObject("type") - .startObject("properties") - .startObject("field") - .field("type", v.onOrAfter(Version.V_5_0_0_alpha1) ? "keyword" : "string") - .field("postings_format", Codec.getDefault().postingsFormat().getName()) - .endObject() - .endObject() - .endObject().endObject().string(); - parser.parse("type", new CompressedXContent(mapping)); - if (v.onOrAfter(Version.V_2_0_0_beta1)) { - fail("Elasticsearch 2.0 should not support custom postings formats"); - } - } catch (MapperParsingException e) { - if (v.before(Version.V_2_0_0_beta1)) { - // Elasticsearch 1.x should ignore custom postings formats - throw e; - } - Assert.assertThat(e.getMessage(), containsString("unsupported parameters: [postings_format")); - } - } - } - - public void testAcceptDocValuesFormat() throws IOException { - int i = 0; - for (Version v : VersionUtils.allReleasedVersions()) { - if (v.onOrAfter(Version.V_2_0_0) == false) { - // no need to test, we don't support upgrading from these versions - continue; - } - IndexService indexService = createIndex("test-" + i++, - Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, v).build()); - DocumentMapperParser parser = indexService.mapperService().documentMapperParser(); - String mapping = XContentFactory.jsonBuilder().startObject() - .startObject("type") - .startObject("properties") - .startObject("field") - .field("type", v.onOrAfter(Version.V_5_0_0_alpha1) ? "keyword" : "string") - .field("doc_values_format", Codec.getDefault().docValuesFormat().getName()) - .endObject() - .endObject() - .endObject().endObject().string(); - try { - parser.parse("type", new CompressedXContent(mapping)); - if (v.onOrAfter(Version.V_2_0_0_beta1)) { - fail("Elasticsearch 2.0 should not support custom postings formats"); - } - } catch (MapperParsingException e) { - if (v.before(Version.V_2_0_0_beta1)) { - // Elasticsearch 1.x should ignore custom postings formats - throw e; - } - Assert.assertThat(e.getMessage(), containsString("unsupported parameters: [doc_values_format")); - } - } - } - -} diff --git a/core/src/test/java/org/elasticsearch/explain/ExplainActionIT.java b/core/src/test/java/org/elasticsearch/explain/ExplainActionIT.java index 30ae9a7b6f7a3..6d8e1a41c5b94 100644 --- a/core/src/test/java/org/elasticsearch/explain/ExplainActionIT.java +++ b/core/src/test/java/org/elasticsearch/explain/ExplainActionIT.java @@ -26,7 +26,6 @@ import org.elasticsearch.common.io.stream.OutputStreamStreamOutput; import org.elasticsearch.common.lucene.Lucene; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.index.mapper.TimestampFieldMapper; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.test.ESIntegTestCase; import org.joda.time.DateTime; @@ -138,7 +137,6 @@ public void testExplainWithFields() throws Exception { assertThat(response.getGetResult().isExists(), equalTo(true)); assertThat(response.getGetResult().getId(), equalTo("1")); Set fields = new HashSet<>(response.getGetResult().getFields().keySet()); - fields.remove(TimestampFieldMapper.NAME); // randomly added via templates assertThat(fields, equalTo(singleton("obj1.field1"))); assertThat(response.getGetResult().getFields().get("obj1.field1").getValue().toString(), equalTo("value1")); assertThat(response.getGetResult().isSourceEmpty(), equalTo(true)); @@ -155,7 +153,6 @@ public void testExplainWithFields() throws Exception { assertThat(response.getGetResult().isExists(), equalTo(true)); assertThat(response.getGetResult().getId(), equalTo("1")); fields = new HashSet<>(response.getGetResult().getFields().keySet()); - fields.remove(TimestampFieldMapper.NAME); // randomly added via templates assertThat(fields, equalTo(singleton("obj1.field1"))); assertThat(response.getGetResult().getFields().get("obj1.field1").getValue().toString(), equalTo("value1")); assertThat(response.getGetResult().isSourceEmpty(), equalTo(false)); diff --git a/core/src/test/java/org/elasticsearch/gateway/GatewayIndexStateIT.java b/core/src/test/java/org/elasticsearch/gateway/GatewayIndexStateIT.java index a998b56f64090..de510eb293f01 100644 --- a/core/src/test/java/org/elasticsearch/gateway/GatewayIndexStateIT.java +++ b/core/src/test/java/org/elasticsearch/gateway/GatewayIndexStateIT.java @@ -414,7 +414,7 @@ public void testRecoverBrokenIndexMetadata() throws Exception { IndexMetaData metaData = state.getMetaData().index("test"); for (NodeEnvironment services : internalCluster().getInstances(NodeEnvironment.class)) { IndexMetaData brokenMeta = IndexMetaData.builder(metaData).settings(Settings.builder().put(metaData.getSettings()) - .put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_2_0_0_beta1.id) + .put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_5_0_0_beta1.id) // this is invalid but should be archived .put("index.similarity.BM25.type", "classic") // this one is not validated ahead of time and breaks allocation diff --git a/core/src/test/java/org/elasticsearch/get/GetActionIT.java b/core/src/test/java/org/elasticsearch/get/GetActionIT.java index 434536ac8d9d7..2a0505b273bfb 100644 --- a/core/src/test/java/org/elasticsearch/get/GetActionIT.java +++ b/core/src/test/java/org/elasticsearch/get/GetActionIT.java @@ -38,7 +38,6 @@ import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.index.engine.VersionConflictEngineException; -import org.elasticsearch.index.mapper.TimestampFieldMapper; import org.elasticsearch.test.ESIntegTestCase; import java.io.IOException; @@ -88,7 +87,6 @@ public void testSimpleGet() { assertThat(response.isExists(), equalTo(true)); assertThat(response.getIndex(), equalTo("test")); Set fields = new HashSet<>(response.getFields().keySet()); - fields.remove(TimestampFieldMapper.NAME); // randomly enabled via templates assertThat(fields, equalTo(Collections.emptySet())); assertThat(response.getSourceAsBytes(), nullValue()); @@ -97,7 +95,6 @@ public void testSimpleGet() { assertThat(response.isExists(), equalTo(true)); assertThat(response.getIndex(), equalTo("test")); fields = new HashSet<>(response.getFields().keySet()); - fields.remove(TimestampFieldMapper.NAME); // randomly enabled via templates assertThat(fields, equalTo(Collections.emptySet())); assertThat(response.getSourceAsBytes(), nullValue()); @@ -276,7 +273,6 @@ public void testGetDocWithMultivaluedFields() throws Exception { assertThat(response.getId(), equalTo("1")); assertThat(response.getType(), equalTo("type1")); Set fields = new HashSet<>(response.getFields().keySet()); - fields.remove(TimestampFieldMapper.NAME); // randomly enabled via templates assertThat(fields, equalTo(singleton("field"))); assertThat(response.getFields().get("field").getValues().size(), equalTo(2)); assertThat(response.getFields().get("field").getValues().get(0).toString(), equalTo("1")); @@ -288,7 +284,6 @@ public void testGetDocWithMultivaluedFields() throws Exception { assertThat(response.getType(), equalTo("type2")); assertThat(response.getId(), equalTo("1")); fields = new HashSet<>(response.getFields().keySet()); - fields.remove(TimestampFieldMapper.NAME); // randomly enabled via templates assertThat(fields, equalTo(singleton("field"))); assertThat(response.getFields().get("field").getValues().size(), equalTo(2)); assertThat(response.getFields().get("field").getValues().get(0).toString(), equalTo("1")); @@ -300,7 +295,6 @@ public void testGetDocWithMultivaluedFields() throws Exception { assertThat(response.isExists(), equalTo(true)); assertThat(response.getId(), equalTo("1")); fields = new HashSet<>(response.getFields().keySet()); - fields.remove(TimestampFieldMapper.NAME); // randomly enabled via templates assertThat(fields, equalTo(singleton("field"))); assertThat(response.getFields().get("field").getValues().size(), equalTo(2)); assertThat(response.getFields().get("field").getValues().get(0).toString(), equalTo("1")); @@ -310,7 +304,6 @@ public void testGetDocWithMultivaluedFields() throws Exception { assertThat(response.isExists(), equalTo(true)); assertThat(response.getId(), equalTo("1")); fields = new HashSet<>(response.getFields().keySet()); - fields.remove(TimestampFieldMapper.NAME); // randomly enabled via templates assertThat(fields, equalTo(singleton("field"))); assertThat(response.getFields().get("field").getValues().size(), equalTo(2)); assertThat(response.getFields().get("field").getValues().get(0).toString(), equalTo("1")); @@ -540,8 +533,6 @@ public void testGetFieldsMetaData() throws Exception { client().prepareIndex("test", "my-type1", "1") .setRouting("1") - .setTimestamp("205097") - .setTTL(10000000000000L) .setParent("parent_1") .setSource(jsonBuilder().startObject().field("field1", "value").endObject()) .get(); @@ -773,7 +764,7 @@ public void testUngeneratedFieldsThatAreAlwaysStored() throws IOException { assertAcked(prepareCreate("test").addAlias(new Alias("alias")).setSource(createIndexSource)); ensureGreen(); - client().prepareIndex("test", "doc").setId("1").setSource("{}").setParent("1").setTTL(TimeValue.timeValueHours(1).getMillis()).get(); + client().prepareIndex("test", "doc").setId("1").setSource("{}").setParent("1").get(); String[] fieldsList = {"_parent"}; // before refresh - document is only in translog @@ -900,7 +891,7 @@ void indexSingleDocumentWithNumericFieldsGeneratedFromText(boolean stored, boole " \"store\": \"" + storedString + "\"" + " },\n" + " \"text\": {\n" + - " \"type\": \"string\",\n" + + " \"type\": \"text\",\n" + " \"fields\": {\n" + " \"token_count\": {\n" + " \"type\": \"token_count\",\n" + diff --git a/core/src/test/java/org/elasticsearch/index/IndexingSlowLogTests.java b/core/src/test/java/org/elasticsearch/index/IndexingSlowLogTests.java index e1d8a878c1453..b068d00b6a22a 100644 --- a/core/src/test/java/org/elasticsearch/index/IndexingSlowLogTests.java +++ b/core/src/test/java/org/elasticsearch/index/IndexingSlowLogTests.java @@ -40,7 +40,7 @@ public class IndexingSlowLogTests extends ESTestCase { public void testSlowLogParsedDocumentPrinterSourceToLog() throws IOException { BytesReference source = JsonXContent.contentBuilder().startObject().field("foo", "bar").endObject().bytes(); ParsedDocument pd = new ParsedDocument(new NumericDocValuesField("version", 1), new NumericDocValuesField("seqNo", 1), "id", - "test", null, 0, -1, null, source, null); + "test", null, null, source, null); Index index = new Index("foo", "123"); // Turning off document logging doesn't log source[] SlowLogParsedDocumentPrinter p = new SlowLogParsedDocumentPrinter(index, pd, 10, true, 0); diff --git a/core/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java b/core/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java index 5d5a2e7b1ceaf..2be63bc2f2b9e 100644 --- a/core/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java +++ b/core/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java @@ -265,14 +265,14 @@ private Document testDocument() { } - private ParsedDocument testParsedDocument(String uid, String id, String type, String routing, long timestamp, long ttl, Document document, BytesReference source, Mapping mappingUpdate) { + private ParsedDocument testParsedDocument(String uid, String id, String type, String routing, Document document, BytesReference source, Mapping mappingUpdate) { Field uidField = new Field("_uid", uid, UidFieldMapper.Defaults.FIELD_TYPE); Field versionField = new NumericDocValuesField("_version", 0); Field seqNoField = new NumericDocValuesField("_seq_no", 0); document.add(uidField); document.add(versionField); document.add(seqNoField); - return new ParsedDocument(versionField, seqNoField, id, type, routing, timestamp, ttl, Arrays.asList(document), source, mappingUpdate); + return new ParsedDocument(versionField, seqNoField, id, type, routing, Arrays.asList(document), source, mappingUpdate); } protected Store createStore() throws IOException { @@ -377,10 +377,10 @@ public void testSegments() throws Exception { assertThat(engine.segmentsStats(false).getMemoryInBytes(), equalTo(0L)); // create two docs and refresh - ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocumentWithTextField(), B_1, null); + ParsedDocument doc = testParsedDocument("1", "1", "test", null, testDocumentWithTextField(), B_1, null); Engine.Index first = new Engine.Index(newUid("1"), doc); Engine.IndexResult firstResult = engine.index(first); - ParsedDocument doc2 = testParsedDocument("2", "2", "test", null, -1, -1, testDocumentWithTextField(), B_2, null); + ParsedDocument doc2 = testParsedDocument("2", "2", "test", null, testDocumentWithTextField(), B_2, null); Engine.Index second = new Engine.Index(newUid("2"), doc2); Engine.IndexResult secondResult = engine.index(second); assertThat(secondResult.getTranslogLocation(), greaterThan(firstResult.getTranslogLocation())); @@ -413,7 +413,7 @@ public void testSegments() throws Exception { assertThat(segments.get(0).getDeletedDocs(), equalTo(0)); assertThat(segments.get(0).isCompound(), equalTo(true)); - ParsedDocument doc3 = testParsedDocument("3", "3", "test", null, -1, -1, testDocumentWithTextField(), B_3, null); + ParsedDocument doc3 = testParsedDocument("3", "3", "test", null, testDocumentWithTextField(), B_3, null); engine.index(new Engine.Index(newUid("3"), doc3)); engine.refresh("test"); @@ -460,7 +460,7 @@ public void testSegments() throws Exception { assertThat(segments.get(1).isCompound(), equalTo(true)); engine.onSettingsChanged(); - ParsedDocument doc4 = testParsedDocument("4", "4", "test", null, -1, -1, testDocumentWithTextField(), B_3, null); + ParsedDocument doc4 = testParsedDocument("4", "4", "test", null, testDocumentWithTextField(), B_3, null); engine.index(new Engine.Index(newUid("4"), doc4)); engine.refresh("test"); @@ -494,7 +494,7 @@ public void testVerboseSegments() throws Exception { List segments = engine.segments(true); assertThat(segments.isEmpty(), equalTo(true)); - ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocumentWithTextField(), B_1, null); + ParsedDocument doc = testParsedDocument("1", "1", "test", null, testDocumentWithTextField(), B_1, null); engine.index(new Engine.Index(newUid("1"), doc)); engine.refresh("test"); @@ -502,10 +502,10 @@ public void testVerboseSegments() throws Exception { assertThat(segments.size(), equalTo(1)); assertThat(segments.get(0).ramTree, notNullValue()); - ParsedDocument doc2 = testParsedDocument("2", "2", "test", null, -1, -1, testDocumentWithTextField(), B_2, null); + ParsedDocument doc2 = testParsedDocument("2", "2", "test", null, testDocumentWithTextField(), B_2, null); engine.index(new Engine.Index(newUid("2"), doc2)); engine.refresh("test"); - ParsedDocument doc3 = testParsedDocument("3", "3", "test", null, -1, -1, testDocumentWithTextField(), B_3, null); + ParsedDocument doc3 = testParsedDocument("3", "3", "test", null, testDocumentWithTextField(), B_3, null); engine.index(new Engine.Index(newUid("3"), doc3)); engine.refresh("test"); @@ -520,7 +520,7 @@ public void testVerboseSegments() throws Exception { public void testSegmentsWithMergeFlag() throws Exception { try (Store store = createStore(); Engine engine = createEngine(defaultSettings, store, createTempDir(), new TieredMergePolicy())) { - ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocument(), B_1, null); + ParsedDocument doc = testParsedDocument("1", "1", "test", null, testDocument(), B_1, null); Engine.Index index = new Engine.Index(newUid("1"), doc); engine.index(index); engine.flush(); @@ -574,7 +574,7 @@ public void testSegmentsStatsIncludingFileSizes() throws Exception { Engine engine = createEngine(defaultSettings, store, createTempDir(), NoMergePolicy.INSTANCE)) { assertThat(engine.segmentsStats(true).getFileSizes().size(), equalTo(0)); - ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocumentWithTextField(), B_1, null); + ParsedDocument doc = testParsedDocument("1", "1", "test", null, testDocumentWithTextField(), B_1, null); engine.index(new Engine.Index(newUid("1"), doc)); engine.refresh("test"); @@ -584,7 +584,7 @@ public void testSegmentsStatsIncludingFileSizes() throws Exception { ObjectObjectCursor firstEntry = stats.getFileSizes().iterator().next(); - ParsedDocument doc2 = testParsedDocument("2", "2", "test", null, -1, -1, testDocumentWithTextField(), B_2, null); + ParsedDocument doc2 = testParsedDocument("2", "2", "test", null, testDocumentWithTextField(), B_2, null); engine.index(new Engine.Index(newUid("2"), doc2)); engine.refresh("test"); @@ -685,7 +685,7 @@ public IndexSearcher wrap(IndexSearcher searcher) throws EngineException { public void testFlushIsDisabledDuringTranslogRecovery() throws IOException { assertFalse(engine.isRecovering()); - ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocumentWithTextField(), B_1, null); + ParsedDocument doc = testParsedDocument("1", "1", "test", null, testDocumentWithTextField(), B_1, null); engine.index(new Engine.Index(newUid("1"), doc)); engine.close(); @@ -694,7 +694,7 @@ public void testFlushIsDisabledDuringTranslogRecovery() throws IOException { assertTrue(engine.isRecovering()); engine.recoverFromTranslog(); assertFalse(engine.isRecovering()); - doc = testParsedDocument("2", "2", "test", null, -1, -1, testDocumentWithTextField(), B_1, null); + doc = testParsedDocument("2", "2", "test", null, testDocumentWithTextField(), B_1, null); engine.index(new Engine.Index(newUid("2"), doc)); engine.flush(); } @@ -706,7 +706,7 @@ public void testTranslogMultipleOperationsSameDocument() throws IOException { try { initialEngine = engine; for (int i = 0; i < ops; i++) { - final ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocumentWithTextField(), SOURCE, null); + final ParsedDocument doc = testParsedDocument("1", "1", "test", null, testDocumentWithTextField(), SOURCE, null); if (randomBoolean()) { final Engine.Index operation = new Engine.Index(newUid("test#1"), doc, SequenceNumbersService.UNASSIGNED_SEQ_NO, i, VersionType.EXTERNAL, Engine.Operation.Origin.PRIMARY, System.nanoTime(), -1, false); operations.add(operation); @@ -742,7 +742,7 @@ public void testTranslogRecoveryDoesNotReplayIntoTranslog() throws IOException { initialEngine = engine; for (int i = 0; i < docs; i++) { final String id = Integer.toString(i); - final ParsedDocument doc = testParsedDocument(id, id, "test", null, -1, -1, testDocumentWithTextField(), SOURCE, null); + final ParsedDocument doc = testParsedDocument(id, id, "test", null, testDocumentWithTextField(), SOURCE, null); initialEngine.index(new Engine.Index(newUid(id), doc)); } } finally { @@ -771,7 +771,7 @@ public CommitId flush(boolean force, boolean waitIfOngoing) throws EngineExcepti } public void testConcurrentGetAndFlush() throws Exception { - ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocumentWithTextField(), B_1, null); + ParsedDocument doc = testParsedDocument("1", "1", "test", null, testDocumentWithTextField(), B_1, null); engine.index(new Engine.Index(newUid("1"), doc)); final AtomicReference latestGetResult = new AtomicReference<>(); @@ -815,7 +815,7 @@ public void testSimpleOperations() throws Exception { // create a document Document document = testDocumentWithTextField(); document.add(new Field(SourceFieldMapper.NAME, BytesReference.toBytes(B_1), SourceFieldMapper.Defaults.FIELD_TYPE)); - ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, document, B_1, null); + ParsedDocument doc = testParsedDocument("1", "1", "test", null, document, B_1, null); engine.index(new Engine.Index(newUid("1"), doc)); // its not there... @@ -854,7 +854,7 @@ public void testSimpleOperations() throws Exception { document = testDocument(); document.add(new TextField("value", "test1", Field.Store.YES)); document.add(new Field(SourceFieldMapper.NAME, BytesReference.toBytes(B_2), SourceFieldMapper.Defaults.FIELD_TYPE)); - doc = testParsedDocument("1", "1", "test", null, -1, -1, document, B_2, null); + doc = testParsedDocument("1", "1", "test", null, document, B_2, null); engine.index(new Engine.Index(newUid("1"), doc)); // its not updated yet... @@ -906,7 +906,7 @@ public void testSimpleOperations() throws Exception { // add it back document = testDocumentWithTextField(); document.add(new Field(SourceFieldMapper.NAME, BytesReference.toBytes(B_1), SourceFieldMapper.Defaults.FIELD_TYPE)); - doc = testParsedDocument("1", "1", "test", null, -1, -1, document, B_1, null); + doc = testParsedDocument("1", "1", "test", null, document, B_1, null); engine.index(new Engine.Index(newUid("1"), doc, Versions.MATCH_DELETED)); // its not there... @@ -939,7 +939,7 @@ public void testSimpleOperations() throws Exception { // now do an update document = testDocument(); document.add(new TextField("value", "test1", Field.Store.YES)); - doc = testParsedDocument("1", "1", "test", null, -1, -1, document, B_1, null); + doc = testParsedDocument("1", "1", "test", null, document, B_1, null); engine.index(new Engine.Index(newUid("1"), doc)); // its not updated yet... @@ -965,7 +965,7 @@ public void testSearchResultRelease() throws Exception { searchResult.close(); // create a document - ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocumentWithTextField(), B_1, null); + ParsedDocument doc = testParsedDocument("1", "1", "test", null, testDocumentWithTextField(), B_1, null); engine.index(new Engine.Index(newUid("1"), doc)); // its not there... @@ -1001,7 +1001,7 @@ public void testSyncedFlush() throws IOException { Engine engine = new InternalEngine(config(defaultSettings, store, createTempDir(), new LogByteSizeMergePolicy(), IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP, null))) { final String syncId = randomUnicodeOfCodepointLengthBetween(10, 20); - ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocumentWithTextField(), B_1, null); + ParsedDocument doc = testParsedDocument("1", "1", "test", null, testDocumentWithTextField(), B_1, null); engine.index(new Engine.Index(newUid("1"), doc)); Engine.CommitId commitID = engine.flush(); assertThat(commitID, equalTo(new Engine.CommitId(store.readLastCommittedSegmentsInfo().getId()))); @@ -1028,7 +1028,7 @@ public void testRenewSyncFlush() throws Exception { InternalEngine engine = new InternalEngine(config(defaultSettings, store, createTempDir(), new LogDocMergePolicy(), IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP, null))) { final String syncId = randomUnicodeOfCodepointLengthBetween(10, 20); - ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocumentWithTextField(), B_1, null); + ParsedDocument doc = testParsedDocument("1", "1", "test", null, testDocumentWithTextField(), B_1, null); Engine.Index doc1 = new Engine.Index(newUid("1"), doc); engine.index(doc1); assertEquals(engine.getLastWriteNanos(), doc1.startTime()); @@ -1081,7 +1081,7 @@ public void testRenewSyncFlush() throws Exception { public void testSyncedFlushSurvivesEngineRestart() throws IOException { final String syncId = randomUnicodeOfCodepointLengthBetween(10, 20); - ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocumentWithTextField(), B_1, null); + ParsedDocument doc = testParsedDocument("1", "1", "test", null, testDocumentWithTextField(), B_1, null); engine.index(new Engine.Index(newUid("1"), doc)); final Engine.CommitId commitID = engine.flush(); assertEquals("should succeed to flush commit with right id and no pending doc", engine.syncFlush(syncId, commitID), @@ -1104,14 +1104,14 @@ public void testSyncedFlushSurvivesEngineRestart() throws IOException { public void testSyncedFlushVanishesOnReplay() throws IOException { final String syncId = randomUnicodeOfCodepointLengthBetween(10, 20); - ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocumentWithTextField(), B_1, null); + ParsedDocument doc = testParsedDocument("1", "1", "test", null, testDocumentWithTextField(), B_1, null); engine.index(new Engine.Index(newUid("1"), doc)); final Engine.CommitId commitID = engine.flush(); assertEquals("should succeed to flush commit with right id and no pending doc", engine.syncFlush(syncId, commitID), Engine.SyncedFlushResult.SUCCESS); assertEquals(store.readLastCommittedSegmentsInfo().getUserData().get(Engine.SYNC_COMMIT_ID), syncId); assertEquals(engine.getLastCommittedSegmentInfos().getUserData().get(Engine.SYNC_COMMIT_ID), syncId); - doc = testParsedDocument("2", "2", "test", null, -1, -1, testDocumentWithTextField(), new BytesArray("{}"), null); + doc = testParsedDocument("2", "2", "test", null, testDocumentWithTextField(), new BytesArray("{}"), null); engine.index(new Engine.Index(newUid("2"), doc)); EngineConfig config = engine.config(); engine.close(); @@ -1121,7 +1121,7 @@ public void testSyncedFlushVanishesOnReplay() throws IOException { } public void testVersioningNewCreate() { - ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocument(), B_1, null); + ParsedDocument doc = testParsedDocument("1", "1", "test", null, testDocument(), B_1, null); Engine.Index create = new Engine.Index(newUid("1"), doc, Versions.MATCH_DELETED); Engine.IndexResult indexResult = engine.index(create); assertThat(indexResult.getVersion(), equalTo(1L)); @@ -1132,7 +1132,7 @@ public void testVersioningNewCreate() { } public void testVersioningNewIndex() { - ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocument(), B_1, null); + ParsedDocument doc = testParsedDocument("1", "1", "test", null, testDocument(), B_1, null); Engine.Index index = new Engine.Index(newUid("1"), doc); Engine.IndexResult indexResult = engine.index(index); assertThat(indexResult.getVersion(), equalTo(1L)); @@ -1143,7 +1143,7 @@ public void testVersioningNewIndex() { } public void testExternalVersioningNewIndex() { - ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocument(), B_1, null); + ParsedDocument doc = testParsedDocument("1", "1", "test", null, testDocument(), B_1, null); Engine.Index index = new Engine.Index(newUid("1"), doc, SequenceNumbersService.UNASSIGNED_SEQ_NO, 12, VersionType.EXTERNAL, PRIMARY, 0, -1, false); Engine.IndexResult indexResult = engine.index(index); assertThat(indexResult.getVersion(), equalTo(12L)); @@ -1154,7 +1154,7 @@ public void testExternalVersioningNewIndex() { } public void testVersioningIndexConflict() { - ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocument(), B_1, null); + ParsedDocument doc = testParsedDocument("1", "1", "test", null, testDocument(), B_1, null); Engine.Index index = new Engine.Index(newUid("1"), doc); Engine.IndexResult indexResult = engine.index(index); assertThat(indexResult.getVersion(), equalTo(1L)); @@ -1176,7 +1176,7 @@ public void testVersioningIndexConflict() { } public void testExternalVersioningIndexConflict() { - ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocument(), B_1, null); + ParsedDocument doc = testParsedDocument("1", "1", "test", null, testDocument(), B_1, null); Engine.Index index = new Engine.Index(newUid("1"), doc, SequenceNumbersService.UNASSIGNED_SEQ_NO, 12, VersionType.EXTERNAL, PRIMARY, 0, -1, false); Engine.IndexResult indexResult = engine.index(index); assertThat(indexResult.getVersion(), equalTo(12L)); @@ -1192,7 +1192,7 @@ public void testExternalVersioningIndexConflict() { } public void testForceVersioningNotAllowedExceptForOlderIndices() throws Exception { - ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocument(), B_1, null); + ParsedDocument doc = testParsedDocument("1", "1", "test", null, testDocument(), B_1, null); Engine.Index index = new Engine.Index(newUid("1"), doc, SequenceNumbersService.UNASSIGNED_SEQ_NO, 42, VersionType.FORCE, PRIMARY, 0, -1, false); Engine.IndexResult indexResult = engine.index(index); @@ -1219,7 +1219,7 @@ public void testForceVersioningNotAllowedExceptForOlderIndices() throws Exceptio } public void testVersioningIndexConflictWithFlush() { - ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocument(), B_1, null); + ParsedDocument doc = testParsedDocument("1", "1", "test", null, testDocument(), B_1, null); Engine.Index index = new Engine.Index(newUid("1"), doc); Engine.IndexResult indexResult = engine.index(index); assertThat(indexResult.getVersion(), equalTo(1L)); @@ -1243,7 +1243,7 @@ public void testVersioningIndexConflictWithFlush() { } public void testExternalVersioningIndexConflictWithFlush() { - ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocument(), B_1, null); + ParsedDocument doc = testParsedDocument("1", "1", "test", null, testDocument(), B_1, null); Engine.Index index = new Engine.Index(newUid("1"), doc, SequenceNumbersService.UNASSIGNED_SEQ_NO, 12, VersionType.EXTERNAL, PRIMARY, 0, -1, false); Engine.IndexResult indexResult = engine.index(index); assertThat(indexResult.getVersion(), equalTo(12L)); @@ -1266,7 +1266,7 @@ public void testForceMerge() throws IOException { new LogByteSizeMergePolicy(), IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP, null))) { // use log MP here we test some behavior in ESMP int numDocs = randomIntBetween(10, 100); for (int i = 0; i < numDocs; i++) { - ParsedDocument doc = testParsedDocument(Integer.toString(i), Integer.toString(i), "test", null, -1, -1, testDocument(), B_1, null); + ParsedDocument doc = testParsedDocument(Integer.toString(i), Integer.toString(i), "test", null, testDocument(), B_1, null); Engine.Index index = new Engine.Index(newUid(Integer.toString(i)), doc); engine.index(index); engine.refresh("test"); @@ -1277,7 +1277,7 @@ public void testForceMerge() throws IOException { engine.forceMerge(true, 1, false, false, false); assertEquals(engine.segments(true).size(), 1); - ParsedDocument doc = testParsedDocument(Integer.toString(0), Integer.toString(0), "test", null, -1, -1, testDocument(), B_1, null); + ParsedDocument doc = testParsedDocument(Integer.toString(0), Integer.toString(0), "test", null, testDocument(), B_1, null); Engine.Index index = new Engine.Index(newUid(Integer.toString(0)), doc); engine.delete(new Engine.Delete(index.type(), index.id(), index.uid())); engine.forceMerge(true, 10, true, false, false); //expunge deletes @@ -1288,7 +1288,7 @@ public void testForceMerge() throws IOException { assertEquals(engine.config().getMergePolicy().toString(), numDocs - 1, test.reader().maxDoc()); } - doc = testParsedDocument(Integer.toString(1), Integer.toString(1), "test", null, -1, -1, testDocument(), B_1, null); + doc = testParsedDocument(Integer.toString(1), Integer.toString(1), "test", null, testDocument(), B_1, null); index = new Engine.Index(newUid(Integer.toString(1)), doc); engine.delete(new Engine.Delete(index.type(), index.id(), index.uid())); engine.forceMerge(true, 10, false, false, false); //expunge deletes @@ -1323,7 +1323,7 @@ public void run() { int numDocs = randomIntBetween(1, 20); for (int j = 0; j < numDocs; j++) { i++; - ParsedDocument doc = testParsedDocument(Integer.toString(i), Integer.toString(i), "test", null, -1, -1, testDocument(), B_1, null); + ParsedDocument doc = testParsedDocument(Integer.toString(i), Integer.toString(i), "test", null, testDocument(), B_1, null); Engine.Index index = new Engine.Index(newUid(Integer.toString(i)), doc); engine.index(index); } @@ -1356,7 +1356,7 @@ public void run() { } public void testVersioningDeleteConflict() { - ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocument(), B_1, null); + ParsedDocument doc = testParsedDocument("1", "1", "test", null, testDocument(), B_1, null); Engine.Index index = new Engine.Index(newUid("1"), doc); Engine.IndexResult indexResult = engine.index(index); assertThat(indexResult.getVersion(), equalTo(1L)); @@ -1389,7 +1389,7 @@ public void testVersioningDeleteConflict() { } public void testVersioningDeleteConflictWithFlush() { - ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocument(), B_1, null); + ParsedDocument doc = testParsedDocument("1", "1", "test", null, testDocument(), B_1, null); Engine.Index index = new Engine.Index(newUid("1"), doc); Engine.IndexResult indexResult = engine.index(index); assertThat(indexResult.getVersion(), equalTo(1L)); @@ -1428,7 +1428,7 @@ public void testVersioningDeleteConflictWithFlush() { } public void testVersioningCreateExistsException() { - ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocument(), B_1, null); + ParsedDocument doc = testParsedDocument("1", "1", "test", null, testDocument(), B_1, null); Engine.Index create = new Engine.Index(newUid("1"), doc, SequenceNumbersService.UNASSIGNED_SEQ_NO, Versions.MATCH_DELETED, VersionType.INTERNAL, PRIMARY, 0, -1, false); Engine.IndexResult indexResult = engine.index(create); assertThat(indexResult.getVersion(), equalTo(1L)); @@ -1440,7 +1440,7 @@ public void testVersioningCreateExistsException() { } public void testVersioningCreateExistsExceptionWithFlush() { - ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocument(), B_1, null); + ParsedDocument doc = testParsedDocument("1", "1", "test", null, testDocument(), B_1, null); Engine.Index create = new Engine.Index(newUid("1"), doc, SequenceNumbersService.UNASSIGNED_SEQ_NO, Versions.MATCH_DELETED, VersionType.INTERNAL, PRIMARY, 0, -1, false); Engine.IndexResult indexResult = engine.index(create); assertThat(indexResult.getVersion(), equalTo(1L)); @@ -1454,7 +1454,7 @@ public void testVersioningCreateExistsExceptionWithFlush() { } public void testVersioningReplicaConflict1() { - ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocument(), B_1, null); + ParsedDocument doc = testParsedDocument("1", "1", "test", null, testDocument(), B_1, null); Engine.Index index = new Engine.Index(newUid("1"), doc); Engine.IndexResult indexResult = engine.index(index); assertThat(indexResult.getVersion(), equalTo(1L)); @@ -1484,7 +1484,7 @@ public void testVersioningReplicaConflict1() { } public void testVersioningReplicaConflict2() { - ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocument(), B_1, null); + ParsedDocument doc = testParsedDocument("1", "1", "test", null, testDocument(), B_1, null); Engine.Index index = new Engine.Index(newUid("1"), doc); Engine.IndexResult indexResult = engine.index(index); assertThat(indexResult.getVersion(), equalTo(1L)); @@ -1526,7 +1526,7 @@ public void testVersioningReplicaConflict2() { } public void testBasicCreatedFlag() { - ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocument(), B_1, null); + ParsedDocument doc = testParsedDocument("1", "1", "test", null, testDocument(), B_1, null); Engine.Index index = new Engine.Index(newUid("1"), doc); Engine.IndexResult indexResult = engine.index(index); assertTrue(indexResult.isCreated()); @@ -1543,7 +1543,7 @@ public void testBasicCreatedFlag() { } public void testCreatedFlagAfterFlush() { - ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocument(), B_1, null); + ParsedDocument doc = testParsedDocument("1", "1", "test", null, testDocument(), B_1, null); Engine.Index index = new Engine.Index(newUid("1"), doc); Engine.IndexResult indexResult = engine.index(index); assertTrue(indexResult.isCreated()); @@ -1597,7 +1597,7 @@ public void testIndexWriterInfoStream() throws IllegalAccessException { try { // First, with DEBUG, which should NOT log IndexWriter output: - ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocumentWithTextField(), B_1, null); + ParsedDocument doc = testParsedDocument("1", "1", "test", null, testDocumentWithTextField(), B_1, null); engine.index(new Engine.Index(newUid("1"), doc)); engine.flush(); assertFalse(mockAppender.sawIndexWriterMessage); @@ -1653,7 +1653,7 @@ public void testSeqNoAndCheckpoints() throws IOException { } else { // index a document id = randomFrom(ids); - ParsedDocument doc = testParsedDocument("test#" + id, id, "test", null, -1, -1, testDocumentWithTextField(), SOURCE, null); + ParsedDocument doc = testParsedDocument("test#" + id, id, "test", null, testDocumentWithTextField(), SOURCE, null); final Engine.Index index = new Engine.Index(newUid("test#" + id), doc, SequenceNumbersService.UNASSIGNED_SEQ_NO, rarely() ? 100 : Versions.MATCH_ANY, VersionType.INTERNAL, @@ -1755,7 +1755,7 @@ public void run() { // index random number of docs for (int i = 0; i < numDocsPerThread; i++) { final String id = "thread" + threadIdx + "#" + i; - ParsedDocument doc = testParsedDocument(id, id, "test", null, -1, -1, testDocument(), B_1, null); + ParsedDocument doc = testParsedDocument(id, id, "test", null, testDocument(), B_1, null); engine.index(new Engine.Index(newUid(id), doc)); } } catch (Exception e) { @@ -1857,7 +1857,7 @@ public void testIndexWriterIFDInfoStream() throws IllegalAccessException { try { // First, with DEBUG, which should NOT log IndexWriter output: - ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocumentWithTextField(), B_1, null); + ParsedDocument doc = testParsedDocument("1", "1", "test", null, testDocumentWithTextField(), B_1, null); engine.index(new Engine.Index(newUid("1"), doc)); engine.flush(); assertFalse(mockAppender.sawIndexWriterMessage); @@ -1886,7 +1886,7 @@ public void testEnableGcDeletes() throws Exception { Document document = testDocument(); document.add(new TextField("value", "test1", Field.Store.YES)); - ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, document, B_2, null); + ParsedDocument doc = testParsedDocument("1", "1", "test", null, document, B_2, null); engine.index(new Engine.Index(newUid("1"), doc, SequenceNumbersService.UNASSIGNED_SEQ_NO, 1, VersionType.EXTERNAL, Engine.Operation.Origin.PRIMARY, System.nanoTime(), -1, false)); // Delete document we just added: @@ -2018,7 +2018,7 @@ public void testMissingTranslog() throws IOException { public void testTranslogReplayWithFailure() throws IOException { final int numDocs = randomIntBetween(1, 10); for (int i = 0; i < numDocs; i++) { - ParsedDocument doc = testParsedDocument(Integer.toString(i), Integer.toString(i), "test", null, -1, -1, testDocument(), new BytesArray("{}"), null); + ParsedDocument doc = testParsedDocument(Integer.toString(i), Integer.toString(i), "test", null, testDocument(), new BytesArray("{}"), null); Engine.Index firstIndexRequest = new Engine.Index(newUid(Integer.toString(i)), doc, SequenceNumbersService.UNASSIGNED_SEQ_NO, Versions.MATCH_DELETED, VersionType.INTERNAL, PRIMARY, System.nanoTime(), -1, false); Engine.IndexResult indexResult = engine.index(firstIndexRequest); assertThat(indexResult.getVersion(), equalTo(1L)); @@ -2068,7 +2068,7 @@ public void testTranslogReplayWithFailure() throws IOException { public void testSkipTranslogReplay() throws IOException { final int numDocs = randomIntBetween(1, 10); for (int i = 0; i < numDocs; i++) { - ParsedDocument doc = testParsedDocument(Integer.toString(i), Integer.toString(i), "test", null, -1, -1, testDocument(), new BytesArray("{}"), null); + ParsedDocument doc = testParsedDocument(Integer.toString(i), Integer.toString(i), "test", null, testDocument(), new BytesArray("{}"), null); Engine.Index firstIndexRequest = new Engine.Index(newUid(Integer.toString(i)), doc, SequenceNumbersService.UNASSIGNED_SEQ_NO, Versions.MATCH_DELETED, VersionType.INTERNAL, PRIMARY, System.nanoTime(), -1, false); Engine.IndexResult indexResult = engine.index(firstIndexRequest); assertThat(indexResult.getVersion(), equalTo(1L)); @@ -2161,7 +2161,7 @@ public void testUpgradeOldIndex() throws IOException { } final int numExtraDocs = randomIntBetween(1, 10); for (int i = 0; i < numExtraDocs; i++) { - ParsedDocument doc = testParsedDocument("extra" + Integer.toString(i), "extra" + Integer.toString(i), "test", null, -1, -1, testDocument(), new BytesArray("{}"), null); + ParsedDocument doc = testParsedDocument("extra" + Integer.toString(i), "extra" + Integer.toString(i), "test", null, testDocument(), new BytesArray("{}"), null); Engine.Index firstIndexRequest = new Engine.Index(newUid(Integer.toString(i)), doc, SequenceNumbersService.UNASSIGNED_SEQ_NO, Versions.MATCH_DELETED, VersionType.INTERNAL, PRIMARY, System.nanoTime(), -1, false); Engine.IndexResult indexResult = engine.index(firstIndexRequest); assertThat(indexResult.getVersion(), equalTo(1L)); @@ -2190,7 +2190,7 @@ private Path[] filterExtraFSFiles(Path[] files) { public void testTranslogReplay() throws IOException { final int numDocs = randomIntBetween(1, 10); for (int i = 0; i < numDocs; i++) { - ParsedDocument doc = testParsedDocument(Integer.toString(i), Integer.toString(i), "test", null, -1, -1, testDocument(), new BytesArray("{}"), null); + ParsedDocument doc = testParsedDocument(Integer.toString(i), Integer.toString(i), "test", null, testDocument(), new BytesArray("{}"), null); Engine.Index firstIndexRequest = new Engine.Index(newUid(Integer.toString(i)), doc, SequenceNumbersService.UNASSIGNED_SEQ_NO, Versions.MATCH_DELETED, VersionType.INTERNAL, PRIMARY, System.nanoTime(), -1, false); Engine.IndexResult indexResult = engine.index(firstIndexRequest); assertThat(indexResult.getVersion(), equalTo(1L)); @@ -2233,7 +2233,7 @@ public void testTranslogReplay() throws IOException { final boolean flush = randomBoolean(); int randomId = randomIntBetween(numDocs + 1, numDocs + 10); String uuidValue = "test#" + Integer.toString(randomId); - ParsedDocument doc = testParsedDocument(uuidValue, Integer.toString(randomId), "test", null, -1, -1, testDocument(), new BytesArray("{}"), null); + ParsedDocument doc = testParsedDocument(uuidValue, Integer.toString(randomId), "test", null, testDocument(), new BytesArray("{}"), null); Engine.Index firstIndexRequest = new Engine.Index(newUid(uuidValue), doc, SequenceNumbersService.UNASSIGNED_SEQ_NO, 1, VersionType.EXTERNAL, PRIMARY, System.nanoTime(), -1, false); Engine.IndexResult indexResult = engine.index(firstIndexRequest); assertThat(indexResult.getVersion(), equalTo(1L)); @@ -2241,7 +2241,7 @@ public void testTranslogReplay() throws IOException { engine.flush(); } - doc = testParsedDocument(uuidValue, Integer.toString(randomId), "test", null, -1, -1, testDocument(), new BytesArray("{}"), null); + doc = testParsedDocument(uuidValue, Integer.toString(randomId), "test", null, testDocument(), new BytesArray("{}"), null); Engine.Index idxRequest = new Engine.Index(newUid(uuidValue), doc, SequenceNumbersService.UNASSIGNED_SEQ_NO, 2, VersionType.EXTERNAL, PRIMARY, System.nanoTime(), -1, false); Engine.IndexResult result = engine.index(idxRequest); engine.refresh("test"); @@ -2307,7 +2307,7 @@ protected void operationProcessed() { public void testRecoverFromForeignTranslog() throws IOException { final int numDocs = randomIntBetween(1, 10); for (int i = 0; i < numDocs; i++) { - ParsedDocument doc = testParsedDocument(Integer.toString(i), Integer.toString(i), "test", null, -1, -1, testDocument(), new BytesArray("{}"), null); + ParsedDocument doc = testParsedDocument(Integer.toString(i), Integer.toString(i), "test", null, testDocument(), new BytesArray("{}"), null); Engine.Index firstIndexRequest = new Engine.Index(newUid(Integer.toString(i)), doc, SequenceNumbersService.UNASSIGNED_SEQ_NO, Versions.MATCH_DELETED, VersionType.INTERNAL, PRIMARY, System.nanoTime(), -1, false); Engine.IndexResult index = engine.index(firstIndexRequest); assertThat(index.getVersion(), equalTo(1L)); @@ -2395,7 +2395,7 @@ public void testCurrentTranslogIDisCommitted() throws IOException { // create { - ParsedDocument doc = testParsedDocument(Integer.toString(0), Integer.toString(0), "test", null, -1, -1, testDocument(), new BytesArray("{}"), null); + ParsedDocument doc = testParsedDocument(Integer.toString(0), Integer.toString(0), "test", null, testDocument(), new BytesArray("{}"), null); Engine.Index firstIndexRequest = new Engine.Index(newUid(Integer.toString(0)), doc, SequenceNumbersService.UNASSIGNED_SEQ_NO, Versions.MATCH_DELETED, VersionType.INTERNAL, PRIMARY, System.nanoTime(), -1, false); try (InternalEngine engine = new InternalEngine(copy(config, EngineConfig.OpenMode.CREATE_INDEX_AND_TRANSLOG))){ @@ -2455,7 +2455,7 @@ public void testCurrentTranslogIDisCommitted() throws IOException { } public void testCheckDocumentFailure() throws Exception { - ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocumentWithTextField(), B_1, null); + ParsedDocument doc = testParsedDocument("1", "1", "test", null, testDocumentWithTextField(), B_1, null); Exception documentFailure = engine.checkIfDocumentFailureOrThrow(new Engine.Index(newUid("1"), doc), new IOException("simulated document failure")); assertThat(documentFailure, instanceOf(IOException.class)); try { @@ -2498,7 +2498,7 @@ public void setThrowDocumentFailure(boolean throwDocumentFailure) { public void testHandleDocumentFailure() throws Exception { try (Store store = createStore()) { - ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocumentWithTextField(), B_1, null); + ParsedDocument doc = testParsedDocument("1", "1", "test", null, testDocumentWithTextField(), B_1, null); ThrowingIndexWriter throwingIndexWriter = new ThrowingIndexWriter(store.directory(), new IndexWriterConfig()); try (Engine engine = createEngine(defaultSettings, store, createTempDir(), NoMergePolicy.INSTANCE, () -> throwingIndexWriter)) { // test document failure while indexing @@ -2522,7 +2522,7 @@ public void testHandleDocumentFailure() throws Exception { public void testDocStats() throws IOException { final int numDocs = randomIntBetween(2, 10); // at least 2 documents otherwise we don't see any deletes below for (int i = 0; i < numDocs; i++) { - ParsedDocument doc = testParsedDocument(Integer.toString(i), Integer.toString(i), "test", null, -1, -1, testDocument(), new BytesArray("{}"), null); + ParsedDocument doc = testParsedDocument(Integer.toString(i), Integer.toString(i), "test", null, testDocument(), new BytesArray("{}"), null); Engine.Index firstIndexRequest = new Engine.Index(newUid(Integer.toString(i)), doc, SequenceNumbersService.UNASSIGNED_SEQ_NO, Versions.MATCH_ANY, VersionType.INTERNAL, PRIMARY, System.nanoTime(), -1, false); Engine.IndexResult indexResult = engine.index(firstIndexRequest); assertThat(indexResult.getVersion(), equalTo(1L)); @@ -2532,7 +2532,7 @@ public void testDocStats() throws IOException { assertEquals(0, docStats.getDeleted()); engine.forceMerge(randomBoolean(), 1, false, false, false); - ParsedDocument doc = testParsedDocument(Integer.toString(0), Integer.toString(0), "test", null, -1, -1, testDocument(), new BytesArray("{}"), null); + ParsedDocument doc = testParsedDocument(Integer.toString(0), Integer.toString(0), "test", null, testDocument(), new BytesArray("{}"), null); Engine.Index firstIndexRequest = new Engine.Index(newUid(Integer.toString(0)), doc, SequenceNumbersService.UNASSIGNED_SEQ_NO, Versions.MATCH_ANY, VersionType.INTERNAL, PRIMARY, System.nanoTime(), -1, false); Engine.IndexResult index = engine.index(firstIndexRequest); assertThat(index.getVersion(), equalTo(2L)); @@ -2547,7 +2547,7 @@ public void testDocStats() throws IOException { } public void testDoubleDelivery() throws IOException { - final ParsedDocument doc = testParsedDocument("1", "1", "test", null, 100, -1, testDocumentWithTextField(), new BytesArray("{}".getBytes(Charset.defaultCharset())), null); + final ParsedDocument doc = testParsedDocument("1", "1", "test", null, testDocumentWithTextField(), new BytesArray("{}".getBytes(Charset.defaultCharset())), null); Engine.Index operation = randomAppendOnly(1, doc, false); Engine.Index retry = randomAppendOnly(1, doc, true); if (randomBoolean()) { @@ -2603,7 +2603,7 @@ public void testDoubleDelivery() throws IOException { public void testRetryWithAutogeneratedIdWorksAndNoDuplicateDocs() throws IOException { - final ParsedDocument doc = testParsedDocument("1", "1", "test", null, 100, -1, testDocumentWithTextField(), new BytesArray("{}".getBytes(Charset.defaultCharset())), null); + final ParsedDocument doc = testParsedDocument("1", "1", "test", null, testDocumentWithTextField(), new BytesArray("{}".getBytes(Charset.defaultCharset())), null); boolean isRetry = false; long autoGeneratedIdTimestamp = 0; @@ -2637,7 +2637,7 @@ public void testRetryWithAutogeneratedIdWorksAndNoDuplicateDocs() throws IOExcep public void testRetryWithAutogeneratedIdsAndWrongOrderWorksAndNoDuplicateDocs() throws IOException { - final ParsedDocument doc = testParsedDocument("1", "1", "test", null, 100, -1, testDocumentWithTextField(), new BytesArray("{}".getBytes(Charset.defaultCharset())), null); + final ParsedDocument doc = testParsedDocument("1", "1", "test", null, testDocumentWithTextField(), new BytesArray("{}".getBytes(Charset.defaultCharset())), null); boolean isRetry = true; long autoGeneratedIdTimestamp = 0; @@ -2680,7 +2680,7 @@ public void testRetryConcurrently() throws InterruptedException, IOException { int numDocs = randomIntBetween(1000, 10000); List docs = new ArrayList<>(); for (int i = 0; i < numDocs; i++) { - final ParsedDocument doc = testParsedDocument(Integer.toString(i), Integer.toString(i), "test", null, i, -1, testDocumentWithTextField(), new BytesArray("{}".getBytes(Charset.defaultCharset())), null); + final ParsedDocument doc = testParsedDocument(Integer.toString(i), Integer.toString(i), "test", null, testDocumentWithTextField(), new BytesArray("{}".getBytes(Charset.defaultCharset())), null); Engine.Index originalIndex = randomAppendOnly(i, doc, false); Engine.Index retryIndex = randomAppendOnly(i, doc, true); docs.add(originalIndex); @@ -2743,7 +2743,7 @@ public void testAppendConcurrently() throws InterruptedException, IOException { assertEquals(0, engine.getNumIndexVersionsLookups()); List docs = new ArrayList<>(); for (int i = 0; i < numDocs; i++) { - final ParsedDocument doc = testParsedDocument(Integer.toString(i), Integer.toString(i), "test", null, i, -1, testDocumentWithTextField(), new BytesArray("{}".getBytes(Charset.defaultCharset())), null); + final ParsedDocument doc = testParsedDocument(Integer.toString(i), Integer.toString(i), "test", null, testDocumentWithTextField(), new BytesArray("{}".getBytes(Charset.defaultCharset())), null); Engine.Index index = randomAppendOnly(i, doc, false); docs.add(index); } @@ -2821,7 +2821,7 @@ public void afterRefresh(boolean didRefresh) throws IOException { }); InternalEngine internalEngine = new InternalEngine(config); int docId = 0; - final ParsedDocument doc = testParsedDocument(Integer.toString(docId), Integer.toString(docId), "test", null, docId, -1, + final ParsedDocument doc = testParsedDocument(Integer.toString(docId), Integer.toString(docId), "test", null, testDocumentWithTextField(), new BytesArray("{}".getBytes(Charset.defaultCharset())), null); Engine.Index index = randomAppendOnly(docId, doc, false); @@ -2882,7 +2882,7 @@ public boolean incrementToken() throws IOException { })); final Document document = testDocument(); document.add(new TextField("value", "test", Field.Store.YES)); - final ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, document, B_1, null); + final ParsedDocument doc = testParsedDocument("1", "1", "test", null, document, B_1, null); final Engine.Index first = new Engine.Index(newUid("1"), doc); expectThrows(error.getClass(), () -> engine.index(first)); failWithFatalError.set(false); diff --git a/core/src/test/java/org/elasticsearch/index/engine/ShadowEngineTests.java b/core/src/test/java/org/elasticsearch/index/engine/ShadowEngineTests.java index 0ab65825bc8b5..50ea9c0871612 100644 --- a/core/src/test/java/org/elasticsearch/index/engine/ShadowEngineTests.java +++ b/core/src/test/java/org/elasticsearch/index/engine/ShadowEngineTests.java @@ -171,14 +171,14 @@ private ParseContext.Document testDocument() { } - private ParsedDocument testParsedDocument(String uid, String id, String type, String routing, long timestamp, long ttl, ParseContext.Document document, BytesReference source, Mapping mappingsUpdate) { + private ParsedDocument testParsedDocument(String uid, String id, String type, String routing, ParseContext.Document document, BytesReference source, Mapping mappingsUpdate) { Field uidField = new Field("_uid", uid, UidFieldMapper.Defaults.FIELD_TYPE); Field versionField = new NumericDocValuesField("_version", 0); Field seqNoField = new NumericDocValuesField("_seq_no", 0); document.add(uidField); document.add(versionField); document.add(new LongPoint("point_field", 42)); // so that points report memory/disk usage - return new ParsedDocument(versionField, seqNoField, id, type, routing, timestamp, ttl, Arrays.asList(document), source, mappingsUpdate); + return new ParsedDocument(versionField, seqNoField, id, type, routing, Arrays.asList(document), source, mappingsUpdate); } protected Store createStore(Path p) throws IOException { @@ -260,7 +260,7 @@ protected Term newUid(String id) { public void testCommitStats() { // create a doc and refresh - ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocumentWithTextField(), B_1, null); + ParsedDocument doc = testParsedDocument("1", "1", "test", null, testDocumentWithTextField(), B_1, null); primaryEngine.index(new Engine.Index(newUid("1"), doc)); CommitStats stats1 = replicaEngine.commitStats(); @@ -292,10 +292,10 @@ public void testSegments() throws Exception { assertThat(primaryEngine.segmentsStats(false).getMemoryInBytes(), equalTo(0L)); // create a doc and refresh - ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocumentWithTextField(), B_1, null); + ParsedDocument doc = testParsedDocument("1", "1", "test", null, testDocumentWithTextField(), B_1, null); primaryEngine.index(new Engine.Index(newUid("1"), doc)); - ParsedDocument doc2 = testParsedDocument("2", "2", "test", null, -1, -1, testDocumentWithTextField(), B_2, null); + ParsedDocument doc2 = testParsedDocument("2", "2", "test", null, testDocumentWithTextField(), B_2, null); primaryEngine.index(new Engine.Index(newUid("2"), doc2)); primaryEngine.refresh("test"); @@ -354,7 +354,7 @@ public void testSegments() throws Exception { assertThat(segments.get(0).isCompound(), equalTo(true)); - ParsedDocument doc3 = testParsedDocument("3", "3", "test", null, -1, -1, testDocumentWithTextField(), B_3, null); + ParsedDocument doc3 = testParsedDocument("3", "3", "test", null, testDocumentWithTextField(), B_3, null); primaryEngine.index(new Engine.Index(newUid("3"), doc3)); primaryEngine.refresh("test"); @@ -426,7 +426,7 @@ public void testSegments() throws Exception { primaryEngine.flush(); replicaEngine.refresh("test"); - ParsedDocument doc4 = testParsedDocument("4", "4", "test", null, -1, -1, testDocumentWithTextField(), B_3, null); + ParsedDocument doc4 = testParsedDocument("4", "4", "test", null, testDocumentWithTextField(), B_3, null); primaryEngine.index(new Engine.Index(newUid("4"), doc4)); primaryEngine.refresh("test"); @@ -459,7 +459,7 @@ public void testVerboseSegments() throws Exception { List segments = primaryEngine.segments(true); assertThat(segments.isEmpty(), equalTo(true)); - ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocumentWithTextField(), B_1, null); + ParsedDocument doc = testParsedDocument("1", "1", "test", null, testDocumentWithTextField(), B_1, null); primaryEngine.index(new Engine.Index(newUid("1"), doc)); primaryEngine.refresh("test"); @@ -467,10 +467,10 @@ public void testVerboseSegments() throws Exception { assertThat(segments.size(), equalTo(1)); assertThat(segments.get(0).ramTree, notNullValue()); - ParsedDocument doc2 = testParsedDocument("2", "2", "test", null, -1, -1, testDocumentWithTextField(), B_2, null); + ParsedDocument doc2 = testParsedDocument("2", "2", "test", null, testDocumentWithTextField(), B_2, null); primaryEngine.index(new Engine.Index(newUid("2"), doc2)); primaryEngine.refresh("test"); - ParsedDocument doc3 = testParsedDocument("3", "3", "test", null, -1, -1, testDocumentWithTextField(), B_3, null); + ParsedDocument doc3 = testParsedDocument("3", "3", "test", null, testDocumentWithTextField(), B_3, null); primaryEngine.index(new Engine.Index(newUid("3"), doc3)); primaryEngine.refresh("test"); @@ -496,7 +496,7 @@ public void testShadowEngineIgnoresWriteOperations() throws Exception { // create a document ParseContext.Document document = testDocumentWithTextField(); document.add(new Field(SourceFieldMapper.NAME, BytesReference.toBytes(B_1), SourceFieldMapper.Defaults.FIELD_TYPE)); - ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, document, B_1, null); + ParsedDocument doc = testParsedDocument("1", "1", "test", null, document, B_1, null); try { replicaEngine.index(new Engine.Index(newUid("1"), doc)); fail("should have thrown an exception"); @@ -515,7 +515,7 @@ public void testShadowEngineIgnoresWriteOperations() throws Exception { // index a document document = testDocument(); document.add(new TextField("value", "test1", Field.Store.YES)); - doc = testParsedDocument("1", "1", "test", null, -1, -1, document, B_1, null); + doc = testParsedDocument("1", "1", "test", null, document, B_1, null); try { replicaEngine.index(new Engine.Index(newUid("1"), doc)); fail("should have thrown an exception"); @@ -534,7 +534,7 @@ public void testShadowEngineIgnoresWriteOperations() throws Exception { // Now, add a document to the primary so we can test shadow engine deletes document = testDocumentWithTextField(); document.add(new Field(SourceFieldMapper.NAME, BytesReference.toBytes(B_1), SourceFieldMapper.Defaults.FIELD_TYPE)); - doc = testParsedDocument("1", "1", "test", null, -1, -1, document, B_1, null); + doc = testParsedDocument("1", "1", "test", null, document, B_1, null); primaryEngine.index(new Engine.Index(newUid("1"), doc)); primaryEngine.flush(); replicaEngine.refresh("test"); @@ -589,7 +589,7 @@ public void testSimpleOperations() throws Exception { // create a document ParseContext.Document document = testDocumentWithTextField(); document.add(new Field(SourceFieldMapper.NAME, BytesReference.toBytes(B_1), SourceFieldMapper.Defaults.FIELD_TYPE)); - ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, document, B_1, null); + ParsedDocument doc = testParsedDocument("1", "1", "test", null, document, B_1, null); primaryEngine.index(new Engine.Index(newUid("1"), doc)); // its not there... @@ -642,7 +642,7 @@ public void testSimpleOperations() throws Exception { document = testDocument(); document.add(new TextField("value", "test1", Field.Store.YES)); document.add(new Field(SourceFieldMapper.NAME, BytesReference.toBytes(B_2), SourceFieldMapper.Defaults.FIELD_TYPE)); - doc = testParsedDocument("1", "1", "test", null, -1, -1, document, B_2, null); + doc = testParsedDocument("1", "1", "test", null, document, B_2, null); primaryEngine.index(new Engine.Index(newUid("1"), doc)); // its not updated yet... @@ -712,7 +712,7 @@ public void testSimpleOperations() throws Exception { // add it back document = testDocumentWithTextField(); document.add(new Field(SourceFieldMapper.NAME, BytesReference.toBytes(B_1), SourceFieldMapper.Defaults.FIELD_TYPE)); - doc = testParsedDocument("1", "1", "test", null, -1, -1, document, B_1, null); + doc = testParsedDocument("1", "1", "test", null, document, B_1, null); primaryEngine.index(new Engine.Index(newUid("1"), doc)); // its not there... @@ -757,7 +757,7 @@ public void testSimpleOperations() throws Exception { // now do an update document = testDocument(); document.add(new TextField("value", "test1", Field.Store.YES)); - doc = testParsedDocument("1", "1", "test", null, -1, -1, document, B_1, null); + doc = testParsedDocument("1", "1", "test", null, document, B_1, null); primaryEngine.index(new Engine.Index(newUid("1"), doc)); // its not updated yet... @@ -793,7 +793,7 @@ public void testSearchResultRelease() throws Exception { searchResult.close(); // create a document - ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocumentWithTextField(), B_1, null); + ParsedDocument doc = testParsedDocument("1", "1", "test", null, testDocumentWithTextField(), B_1, null); primaryEngine.index(new Engine.Index(newUid("1"), doc)); // its not there... @@ -838,7 +838,7 @@ public void testSearchResultRelease() throws Exception { } public void testFailEngineOnCorruption() { - ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocumentWithTextField(), B_1, null); + ParsedDocument doc = testParsedDocument("1", "1", "test", null, testDocumentWithTextField(), B_1, null); primaryEngine.index(new Engine.Index(newUid("1"), doc)); primaryEngine.flush(); MockDirectoryWrapper leaf = DirectoryUtils.getLeaf(replicaEngine.config().getStore().directory(), MockDirectoryWrapper.class); @@ -875,7 +875,7 @@ public void testExtractShardId() { */ public void testFailStart() throws IOException { // Need a commit point for this - ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocumentWithTextField(), B_1, null); + ParsedDocument doc = testParsedDocument("1", "1", "test", null, testDocumentWithTextField(), B_1, null); primaryEngine.index(new Engine.Index(newUid("1"), doc)); primaryEngine.flush(); @@ -961,7 +961,7 @@ public void run() { // create a document ParseContext.Document document = testDocumentWithTextField(); document.add(new Field(SourceFieldMapper.NAME, BytesReference.toBytes(B_1), SourceFieldMapper.Defaults.FIELD_TYPE)); - ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, document, B_1, null); + ParsedDocument doc = testParsedDocument("1", "1", "test", null, document, B_1, null); pEngine.index(new Engine.Index(newUid("1"), doc)); pEngine.flush(true, true); @@ -983,7 +983,7 @@ public void testNoTranslog() { public void testDocStats() throws IOException { final int numDocs = randomIntBetween(2, 10); // at least 2 documents otherwise we don't see any deletes below for (int i = 0; i < numDocs; i++) { - ParsedDocument doc = testParsedDocument(Integer.toString(i), Integer.toString(i), "test", null, -1, -1, testDocument(), new BytesArray("{}"), null); + ParsedDocument doc = testParsedDocument(Integer.toString(i), Integer.toString(i), "test", null, testDocument(), new BytesArray("{}"), null); Engine.Index firstIndexRequest = new Engine.Index(newUid(Integer.toString(i)), doc, SequenceNumbersService.UNASSIGNED_SEQ_NO, Versions.MATCH_ANY, VersionType.INTERNAL, PRIMARY, System.nanoTime(), -1, false); Engine.IndexResult indexResult = primaryEngine.index(firstIndexRequest); assertThat(indexResult.getVersion(), equalTo(1L)); diff --git a/core/src/test/java/org/elasticsearch/index/fielddata/AbstractFieldDataTestCase.java b/core/src/test/java/org/elasticsearch/index/fielddata/AbstractFieldDataTestCase.java index 1fa7272d2d9c1..5e26f26978e20 100644 --- a/core/src/test/java/org/elasticsearch/index/fielddata/AbstractFieldDataTestCase.java +++ b/core/src/test/java/org/elasticsearch/index/fielddata/AbstractFieldDataTestCase.java @@ -41,19 +41,14 @@ import org.elasticsearch.index.cache.bitset.BitsetFilterCache; import org.elasticsearch.index.mapper.BinaryFieldMapper; import org.elasticsearch.index.mapper.ContentPath; -import org.elasticsearch.index.mapper.GeoPointFieldMapper; -import org.elasticsearch.index.mapper.LegacyGeoPointFieldMapper; -import org.elasticsearch.index.mapper.LegacyByteFieldMapper; -import org.elasticsearch.index.mapper.LegacyDoubleFieldMapper; -import org.elasticsearch.index.mapper.LegacyFloatFieldMapper; -import org.elasticsearch.index.mapper.LegacyIntegerFieldMapper; -import org.elasticsearch.index.mapper.LegacyLongFieldMapper; -import org.elasticsearch.index.mapper.LegacyShortFieldMapper; +import org.elasticsearch.index.mapper.KeywordFieldMapper; +import org.elasticsearch.index.mapper.LatLonPointFieldMapper; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.Mapper.BuilderContext; import org.elasticsearch.index.mapper.MapperService; +import org.elasticsearch.index.mapper.NumberFieldMapper; import org.elasticsearch.index.mapper.ParentFieldMapper; -import org.elasticsearch.index.mapper.StringFieldMapper; +import org.elasticsearch.index.mapper.TextFieldMapper; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.indices.IndicesService; import org.elasticsearch.indices.fielddata.cache.IndicesFieldDataCache; @@ -104,25 +99,31 @@ public > IFD getForField(String type, String field final MappedFieldType fieldType; final BuilderContext context = new BuilderContext(indexService.getIndexSettings().getSettings(), new ContentPath(1)); if (type.equals("string")) { - fieldType = new StringFieldMapper.Builder(fieldName).tokenized(false).fielddata(docValues == false).docValues(docValues).build(context).fieldType(); + if (docValues) { + fieldType = new KeywordFieldMapper.Builder(fieldName).build(context).fieldType(); + } else { + fieldType = new TextFieldMapper.Builder(fieldName).fielddata(true).build(context).fieldType(); + } } else if (type.equals("float")) { - fieldType = new LegacyFloatFieldMapper.Builder(fieldName).docValues(docValues).build(context).fieldType(); + fieldType = new NumberFieldMapper.Builder(fieldName, NumberFieldMapper.NumberType.FLOAT) + .docValues(docValues).build(context).fieldType(); } else if (type.equals("double")) { - fieldType = new LegacyDoubleFieldMapper.Builder(fieldName).docValues(docValues).build(context).fieldType(); + fieldType = new NumberFieldMapper.Builder(fieldName, NumberFieldMapper.NumberType.DOUBLE) + .docValues(docValues).build(context).fieldType(); } else if (type.equals("long")) { - fieldType = new LegacyLongFieldMapper.Builder(fieldName).docValues(docValues).build(context).fieldType(); + fieldType = new NumberFieldMapper.Builder(fieldName, NumberFieldMapper.NumberType.LONG) + .docValues(docValues).build(context).fieldType(); } else if (type.equals("int")) { - fieldType = new LegacyIntegerFieldMapper.Builder(fieldName).docValues(docValues).build(context).fieldType(); + fieldType = new NumberFieldMapper.Builder(fieldName, NumberFieldMapper.NumberType.INTEGER) + .docValues(docValues).build(context).fieldType(); } else if (type.equals("short")) { - fieldType = new LegacyShortFieldMapper.Builder(fieldName).docValues(docValues).build(context).fieldType(); + fieldType = new NumberFieldMapper.Builder(fieldName, NumberFieldMapper.NumberType.SHORT) + .docValues(docValues).build(context).fieldType(); } else if (type.equals("byte")) { - fieldType = new LegacyByteFieldMapper.Builder(fieldName).docValues(docValues).build(context).fieldType(); + fieldType = new NumberFieldMapper.Builder(fieldName, NumberFieldMapper.NumberType.BYTE) + .docValues(docValues).build(context).fieldType(); } else if (type.equals("geo_point")) { - if (indexService.getIndexSettings().getIndexVersionCreated().before(Version.V_2_2_0)) { - fieldType = new LegacyGeoPointFieldMapper.Builder(fieldName).docValues(docValues).build(context).fieldType(); - } else { - fieldType = new GeoPointFieldMapper.Builder(fieldName).docValues(docValues).build(context).fieldType(); - } + fieldType = new LatLonPointFieldMapper.Builder(fieldName).docValues(docValues).build(context).fieldType(); } else if (type.equals("_parent")) { fieldType = new ParentFieldMapper.Builder("_type").type(fieldName).build(context).fieldType(); } else if (type.equals("binary")) { diff --git a/core/src/test/java/org/elasticsearch/index/fielddata/AbstractGeoFieldDataTestCase.java b/core/src/test/java/org/elasticsearch/index/fielddata/AbstractGeoFieldDataTestCase.java index b1a3c9c088632..df7df5771cbee 100644 --- a/core/src/test/java/org/elasticsearch/index/fielddata/AbstractGeoFieldDataTestCase.java +++ b/core/src/test/java/org/elasticsearch/index/fielddata/AbstractGeoFieldDataTestCase.java @@ -20,8 +20,8 @@ import org.apache.lucene.document.Document; import org.apache.lucene.document.Field; +import org.apache.lucene.document.LatLonDocValuesField; import org.apache.lucene.document.StringField; -import org.apache.lucene.spatial.geopoint.document.GeoPointField; import org.elasticsearch.Version; import org.elasticsearch.common.geo.GeoPoint; import org.elasticsearch.common.geo.GeoUtils; @@ -39,29 +39,16 @@ public abstract class AbstractGeoFieldDataTestCase extends AbstractFieldDataImpl protected Field randomGeoPointField(String fieldName, Field.Store store) { GeoPoint point = randomPoint(random()); - if (indexService.getIndexSettings().getIndexVersionCreated().before(Version.V_2_2_0)) { - return new StringField(fieldName, point.lat()+","+point.lon(), store); - } - final GeoPointField.TermEncoding termEncoding; - termEncoding = indexService.getIndexSettings().getIndexVersionCreated().onOrAfter(Version.V_2_3_0) ? - GeoPointField.TermEncoding.PREFIX : GeoPointField.TermEncoding.NUMERIC; - return new GeoPointField(fieldName, point.lat(), point.lon(), termEncoding, store); + return new LatLonDocValuesField(fieldName, point.lat(), point.lon()); } @Override protected boolean hasDocValues() { - // prior to 22 docValues were not required - if (indexService.getIndexSettings().getIndexVersionCreated().before(Version.V_2_2_0)) { - return false; - } return true; } @Override protected long minRamBytesUsed() { - if (indexService.getIndexSettings().getIndexVersionCreated().before(Version.V_2_2_0)) { - return super.minRamBytesUsed(); - } return 0; } diff --git a/core/src/test/java/org/elasticsearch/index/fielddata/IndexFieldDataServiceTests.java b/core/src/test/java/org/elasticsearch/index/fielddata/IndexFieldDataServiceTests.java index af60e2e7b099b..cefa9c74ea38d 100644 --- a/core/src/test/java/org/elasticsearch/index/fielddata/IndexFieldDataServiceTests.java +++ b/core/src/test/java/org/elasticsearch/index/fielddata/IndexFieldDataServiceTests.java @@ -38,12 +38,6 @@ import org.elasticsearch.index.mapper.BooleanFieldMapper; import org.elasticsearch.index.mapper.ContentPath; import org.elasticsearch.index.mapper.KeywordFieldMapper; -import org.elasticsearch.index.mapper.LegacyByteFieldMapper; -import org.elasticsearch.index.mapper.LegacyDoubleFieldMapper; -import org.elasticsearch.index.mapper.LegacyFloatFieldMapper; -import org.elasticsearch.index.mapper.LegacyIntegerFieldMapper; -import org.elasticsearch.index.mapper.LegacyLongFieldMapper; -import org.elasticsearch.index.mapper.LegacyShortFieldMapper; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.NumberFieldMapper; import org.elasticsearch.index.mapper.TextFieldMapper; @@ -198,11 +192,11 @@ private void doTestRequireDocValues(MappedFieldType ft) { } public void testRequireDocValuesOnLongs() { - doTestRequireDocValues(new LegacyLongFieldMapper.LongFieldType()); + doTestRequireDocValues(new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.LONG)); } public void testRequireDocValuesOnDoubles() { - doTestRequireDocValues(new LegacyDoubleFieldMapper.DoubleFieldType()); + doTestRequireDocValues(new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.DOUBLE)); } public void testRequireDocValuesOnBools() { diff --git a/core/src/test/java/org/elasticsearch/index/mapper/BinaryFieldMapperTests.java b/core/src/test/java/org/elasticsearch/index/mapper/BinaryFieldMapperTests.java index 989f1fa683577..2243c1182bd60 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/BinaryFieldMapperTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/BinaryFieldMapperTests.java @@ -111,12 +111,5 @@ public void testEmptyName() throws IOException { () -> createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)) ); assertThat(e.getMessage(), containsString("name cannot be empty string")); - - // before 5.x - Version oldVersion = VersionUtils.randomVersionBetween(getRandom(), Version.V_2_0_0, Version.V_2_3_5); - Settings oldIndexSettings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, oldVersion).build(); - - DocumentMapper defaultMapper = createIndex("test_old", oldIndexSettings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); - assertEquals(mapping, defaultMapper.mappingSource().string()); } } diff --git a/core/src/test/java/org/elasticsearch/index/mapper/BooleanFieldMapperTests.java b/core/src/test/java/org/elasticsearch/index/mapper/BooleanFieldMapperTests.java index b07f3b43ff6a5..28e6f50faa6af 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/BooleanFieldMapperTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/BooleanFieldMapperTests.java @@ -24,6 +24,7 @@ import org.apache.lucene.index.DocValuesType; import org.apache.lucene.index.IndexWriter; import org.apache.lucene.index.IndexWriterConfig; +import org.apache.lucene.index.IndexableField; import org.apache.lucene.index.LeafReader; import org.apache.lucene.index.SortedNumericDocValues; import org.apache.lucene.store.Directory; @@ -179,44 +180,16 @@ public void testDocValues() throws Exception { .endObject() .bytes()); Document doc = parsedDoc.rootDoc(); - assertEquals(DocValuesType.SORTED_NUMERIC, LegacyStringMappingTests.docValuesType(doc, "bool1")); - assertEquals(DocValuesType.SORTED_NUMERIC, LegacyStringMappingTests.docValuesType(doc, "bool2")); - assertEquals(DocValuesType.SORTED_NUMERIC, LegacyStringMappingTests.docValuesType(doc, "bool3")); - } - - public void testBwCompatDocValues() throws Exception { - Settings oldIndexSettings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_2_2_0).build(); - indexService = createIndex("test_old", oldIndexSettings); - parser = indexService.mapperService().documentMapperParser(); - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("properties") - .startObject("bool1") - .field("type", "boolean") - .endObject() - .startObject("bool2") - .field("type", "boolean") - .field("index", "no") - .endObject() - .startObject("bool3") - .field("type", "boolean") - .field("index", "not_analyzed") - .endObject() - .endObject() - .endObject().endObject().string(); - - DocumentMapper defaultMapper = indexService.mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); - - ParsedDocument parsedDoc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("bool1", true) - .field("bool2", true) - .field("bool3", true) - .endObject() - .bytes()); - Document doc = parsedDoc.rootDoc(); - assertEquals(DocValuesType.SORTED_NUMERIC, LegacyStringMappingTests.docValuesType(doc, "bool1")); - assertEquals(DocValuesType.NONE, LegacyStringMappingTests.docValuesType(doc, "bool2")); - assertEquals(DocValuesType.SORTED_NUMERIC, LegacyStringMappingTests.docValuesType(doc, "bool3")); + IndexableField[] fields = doc.getFields("bool1"); + assertEquals(2, fields.length); + assertEquals(DocValuesType.NONE, fields[0].fieldType().docValuesType()); + assertEquals(DocValuesType.SORTED_NUMERIC, fields[1].fieldType().docValuesType()); + fields = doc.getFields("bool2"); + assertEquals(1, fields.length); + assertEquals(DocValuesType.SORTED_NUMERIC, fields[0].fieldType().docValuesType()); + fields = doc.getFields("bool3"); + assertEquals(DocValuesType.NONE, fields[0].fieldType().docValuesType()); + assertEquals(DocValuesType.SORTED_NUMERIC, fields[1].fieldType().docValuesType()); } public void testEmptyName() throws IOException { @@ -229,14 +202,5 @@ public void testEmptyName() throws IOException { () -> parser.parse("type", new CompressedXContent(mapping)) ); assertThat(e.getMessage(), containsString("name cannot be empty string")); - - // before 5.x - Version oldVersion = VersionUtils.randomVersionBetween(getRandom(), Version.V_2_0_0, Version.V_2_3_5); - Settings oldIndexSettings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, oldVersion).build(); - indexService = createIndex("test_old", oldIndexSettings); - parser = indexService.mapperService().documentMapperParser(); - - DocumentMapper defaultMapper = parser.parse("type", new CompressedXContent(mapping)); - assertEquals(mapping, defaultMapper.mappingSource().string()); } } diff --git a/core/src/test/java/org/elasticsearch/index/mapper/CompletionFieldMapper2xTests.java b/core/src/test/java/org/elasticsearch/index/mapper/CompletionFieldMapper2xTests.java deleted file mode 100644 index a44941a19d88c..0000000000000 --- a/core/src/test/java/org/elasticsearch/index/mapper/CompletionFieldMapper2xTests.java +++ /dev/null @@ -1,156 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.elasticsearch.index.mapper; - -import org.elasticsearch.Version; -import org.elasticsearch.cluster.metadata.IndexMetaData; -import org.elasticsearch.common.compress.CompressedXContent; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.common.xcontent.XContentFactory; -import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.common.xcontent.json.JsonXContent; -import org.elasticsearch.index.IndexService; -import org.elasticsearch.index.mapper.CompletionFieldMapper2x; -import org.elasticsearch.index.mapper.DocumentMapper; -import org.elasticsearch.index.mapper.DocumentMapperParser; -import org.elasticsearch.index.mapper.FieldMapper; -import org.elasticsearch.plugins.Plugin; -import org.elasticsearch.test.ESSingleNodeTestCase; -import org.elasticsearch.test.InternalSettingsPlugin; -import org.elasticsearch.test.VersionUtils; - -import java.io.IOException; -import java.util.Collection; -import java.util.Map; - -import static com.carrotsearch.randomizedtesting.RandomizedTest.getRandom; -import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; -import static org.hamcrest.Matchers.containsString; -import static org.hamcrest.Matchers.instanceOf; -import static org.hamcrest.Matchers.is; - -public class CompletionFieldMapper2xTests extends ESSingleNodeTestCase { - private final Version PRE2X_VERSION = VersionUtils.randomVersionBetween(getRandom(), Version.V_2_0_0, Version.V_2_3_1); - - @Override - protected Collection> getPlugins() { - return pluginList(InternalSettingsPlugin.class); - } - - public void testDefaultConfiguration() throws IOException { - String mapping = jsonBuilder().startObject().startObject("type1") - .startObject("properties").startObject("completion") - .field("type", "completion") - .endObject().endObject() - .endObject().endObject().string(); - - DocumentMapper defaultMapper = createIndex("test", - Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, PRE2X_VERSION.id).build()) - .mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping)); - - FieldMapper fieldMapper = defaultMapper.mappers().getMapper("completion"); - assertThat(fieldMapper, instanceOf(CompletionFieldMapper2x.class)); - - CompletionFieldMapper2x completionFieldMapper = (CompletionFieldMapper2x) fieldMapper; - assertThat(completionFieldMapper.isStoringPayloads(), is(false)); - } - - public void testThatSerializationIncludesAllElements() throws Exception { - String mapping = jsonBuilder().startObject().startObject("type1") - .startObject("properties").startObject("completion") - .field("type", "completion") - .field("analyzer", "simple") - .field("search_analyzer", "standard") - .field("payloads", true) - .field("preserve_separators", false) - .field("preserve_position_increments", true) - .field("max_input_length", 14) - - .endObject().endObject() - .endObject().endObject().string(); - - DocumentMapper defaultMapper = createIndex("test", - Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, PRE2X_VERSION.id).build()) - .mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping)); - - FieldMapper fieldMapper = defaultMapper.mappers().getMapper("completion"); - assertThat(fieldMapper, instanceOf(CompletionFieldMapper2x.class)); - - CompletionFieldMapper2x completionFieldMapper = (CompletionFieldMapper2x) fieldMapper; - XContentBuilder builder = jsonBuilder().startObject(); - completionFieldMapper.toXContent(builder, null).endObject(); - builder.close(); - Map serializedMap; - try (XContentParser parser = JsonXContent.jsonXContent.createParser(builder.bytes())) { - serializedMap = parser.map(); - } - Map configMap = (Map) serializedMap.get("completion"); - assertThat(configMap.get("analyzer").toString(), is("simple")); - assertThat(configMap.get("search_analyzer").toString(), is("standard")); - assertThat(Boolean.valueOf(configMap.get("payloads").toString()), is(true)); - assertThat(Boolean.valueOf(configMap.get("preserve_separators").toString()), is(false)); - assertThat(Boolean.valueOf(configMap.get("preserve_position_increments").toString()), is(true)); - assertThat(Integer.valueOf(configMap.get("max_input_length").toString()), is(14)); - } - - public void testThatSerializationCombinesToOneAnalyzerFieldIfBothAreEqual() throws Exception { - String mapping = jsonBuilder().startObject().startObject("type1") - .startObject("properties").startObject("completion") - .field("type", "completion") - .field("analyzer", "simple") - .field("search_analyzer", "simple") - .endObject().endObject() - .endObject().endObject().string(); - - DocumentMapper defaultMapper = createIndex("test", - Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, PRE2X_VERSION.id).build()) - .mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping)); - - FieldMapper fieldMapper = defaultMapper.mappers().getMapper("completion"); - assertThat(fieldMapper, instanceOf(CompletionFieldMapper2x.class)); - - CompletionFieldMapper2x completionFieldMapper = (CompletionFieldMapper2x) fieldMapper; - XContentBuilder builder = jsonBuilder().startObject(); - completionFieldMapper.toXContent(builder, null).endObject(); - builder.close(); - Map serializedMap; - try (XContentParser parser = JsonXContent.jsonXContent.createParser(builder.bytes())) { - serializedMap = parser.map(); - } - Map configMap = (Map) serializedMap.get("completion"); - assertThat(configMap.get("analyzer").toString(), is("simple")); - } - - public void testEmptyName() throws IOException { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("properties").startObject("").field("type", "completion").endObject().endObject() - .endObject().endObject().string(); - - DocumentMapper defaultMapper = createIndex("test", - Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, PRE2X_VERSION.id).build()) - .mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); - - FieldMapper fieldMapper = defaultMapper.mappers().getMapper(""); - assertThat(fieldMapper, instanceOf(CompletionFieldMapper2x.class)); - - CompletionFieldMapper2x completionFieldMapper = (CompletionFieldMapper2x) fieldMapper; - assertThat(completionFieldMapper.isStoringPayloads(), is(false)); - } -} diff --git a/core/src/test/java/org/elasticsearch/index/mapper/CompoundTypesTests.java b/core/src/test/java/org/elasticsearch/index/mapper/CompoundTypesTests.java deleted file mode 100644 index 1c00f8219964e..0000000000000 --- a/core/src/test/java/org/elasticsearch/index/mapper/CompoundTypesTests.java +++ /dev/null @@ -1,89 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.index.mapper; - -import org.elasticsearch.Version; -import org.elasticsearch.cluster.metadata.IndexMetaData; -import org.elasticsearch.common.compress.CompressedXContent; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.xcontent.XContentFactory; -import org.elasticsearch.index.mapper.DocumentMapper; -import org.elasticsearch.index.mapper.ParsedDocument; -import org.elasticsearch.plugins.Plugin; -import org.elasticsearch.test.ESSingleNodeTestCase; -import org.elasticsearch.test.InternalSettingsPlugin; - -import java.util.Collection; - -import static org.hamcrest.Matchers.closeTo; -import static org.hamcrest.Matchers.equalTo; - -public class CompoundTypesTests extends ESSingleNodeTestCase { - - @Override - protected Collection> getPlugins() { - return pluginList(InternalSettingsPlugin.class); - } - - private static final Settings BW_SETTINGS = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_2_0_0).build(); - - public void testBackCompatStringType() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("properties") - .startObject("field1").field("type", "string").endObject() - .endObject() - .endObject().endObject().string(); - - DocumentMapper defaultMapper = createIndex("test", BW_SETTINGS).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); - - ParsedDocument doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("field1", "value1") - .field("field2", "value2") - .endObject() - .bytes()); - - assertThat(doc.rootDoc().get("field1"), equalTo("value1")); - assertThat((double) doc.rootDoc().getField("field1").boost(), closeTo(1.0d, 0.000001d)); - assertThat(doc.rootDoc().get("field2"), equalTo("value2")); - - doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .startObject("field1").field("value", "value1").field("boost", 2.0f).endObject() - .field("field2", "value2") - .endObject() - .bytes()); - - assertThat(doc.rootDoc().get("field1"), equalTo("value1")); - assertThat((double) doc.rootDoc().getField("field1").boost(), closeTo(2.0d, 0.000001d)); - assertThat(doc.rootDoc().get("field2"), equalTo("value2")); - - doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("field1", "value1") - .field("field2", "value2") - .endObject() - .bytes()); - - assertThat(doc.rootDoc().get("field1"), equalTo("value1")); - assertThat((double) doc.rootDoc().getField("field1").boost(), closeTo(1.0d, 0.000001d)); - assertThat(doc.rootDoc().get("field2"), equalTo("value2")); - } -} diff --git a/core/src/test/java/org/elasticsearch/index/mapper/CustomBoostMappingTests.java b/core/src/test/java/org/elasticsearch/index/mapper/CustomBoostMappingTests.java deleted file mode 100644 index 391f987e71448..0000000000000 --- a/core/src/test/java/org/elasticsearch/index/mapper/CustomBoostMappingTests.java +++ /dev/null @@ -1,187 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.index.mapper; - -import org.apache.lucene.search.BoostQuery; -import org.apache.lucene.search.TermQuery; -import org.elasticsearch.Version; -import org.elasticsearch.cluster.metadata.IndexMetaData; -import org.elasticsearch.common.compress.CompressedXContent; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.xcontent.XContentFactory; -import org.elasticsearch.index.IndexService; -import org.elasticsearch.index.mapper.DocumentFieldMappers; -import org.elasticsearch.index.mapper.DocumentMapper; -import org.elasticsearch.index.mapper.ParsedDocument; -import org.elasticsearch.index.query.QueryShardContext; -import org.elasticsearch.plugins.Plugin; -import org.elasticsearch.test.ESSingleNodeTestCase; -import org.elasticsearch.test.InternalSettingsPlugin; - -import java.util.Collection; - -import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.instanceOf; - -public class CustomBoostMappingTests extends ESSingleNodeTestCase { - - private static final Settings BW_SETTINGS = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_2_0_0).build(); - - @Override - protected Collection> getPlugins() { - return pluginList(InternalSettingsPlugin.class); - } - - public void testBackCompatCustomBoostValues() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type").startObject("properties") - .startObject("s_field").field("type", "string").endObject() - .startObject("l_field").field("type", "long").startObject("norms").field("enabled", true).endObject().endObject() - .startObject("i_field").field("type", "integer").startObject("norms").field("enabled", true).endObject().endObject() - .startObject("sh_field").field("type", "short").startObject("norms").field("enabled", true).endObject().endObject() - .startObject("b_field").field("type", "byte").startObject("norms").field("enabled", true).endObject().endObject() - .startObject("d_field").field("type", "double").startObject("norms").field("enabled", true).endObject().endObject() - .startObject("f_field").field("type", "float").startObject("norms").field("enabled", true).endObject().endObject() - .startObject("date_field").field("type", "date").startObject("norms").field("enabled", true).endObject().endObject() - .endObject().endObject().endObject().string(); - - DocumentMapper mapper = createIndex("test", BW_SETTINGS).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); - - ParsedDocument doc = mapper.parse("test", "type", "1", XContentFactory.jsonBuilder().startObject() - .startObject("s_field").field("value", "s_value").field("boost", 2.0f).endObject() - .startObject("l_field").field("value", 1L).field("boost", 3.0f).endObject() - .startObject("i_field").field("value", 1).field("boost", 4.0f).endObject() - .startObject("sh_field").field("value", 1).field("boost", 5.0f).endObject() - .startObject("b_field").field("value", 1).field("boost", 6.0f).endObject() - .startObject("d_field").field("value", 1).field("boost", 7.0f).endObject() - .startObject("f_field").field("value", 1).field("boost", 8.0f).endObject() - .startObject("date_field").field("value", "20100101").field("boost", 9.0f).endObject() - .endObject().bytes()); - - assertThat(doc.rootDoc().getField("s_field").boost(), equalTo(2.0f)); - assertThat(doc.rootDoc().getField("l_field").boost(), equalTo(3.0f)); - assertThat(doc.rootDoc().getField("i_field").boost(), equalTo(4.0f)); - assertThat(doc.rootDoc().getField("sh_field").boost(), equalTo(5.0f)); - assertThat(doc.rootDoc().getField("b_field").boost(), equalTo(6.0f)); - assertThat(doc.rootDoc().getField("d_field").boost(), equalTo(7.0f)); - assertThat(doc.rootDoc().getField("f_field").boost(), equalTo(8.0f)); - assertThat(doc.rootDoc().getField("date_field").boost(), equalTo(9.0f)); - } - - public void testBackCompatFieldMappingBoostValues() throws Exception { - { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type").startObject("properties") - .startObject("s_field").field("type", "keyword").field("boost", 2.0f).endObject() - .startObject("l_field").field("type", "long").field("boost", 3.0f).endObject() - .startObject("i_field").field("type", "integer").field("boost", 4.0f).endObject() - .startObject("sh_field").field("type", "short").field("boost", 5.0f).endObject() - .startObject("b_field").field("type", "byte").field("boost", 6.0f).endObject() - .startObject("d_field").field("type", "double").field("boost", 7.0f).endObject() - .startObject("f_field").field("type", "float").field("boost", 8.0f).endObject() - .startObject("date_field").field("type", "date").field("boost", 9.0f).endObject() - .endObject().endObject().endObject().string(); - IndexService indexService = createIndex("test", BW_SETTINGS); - QueryShardContext context = indexService.newQueryShardContext(0, null, () -> 0L); - DocumentMapper mapper = indexService.mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); - DocumentFieldMappers fieldMappers = mapper.mappers(); - assertThat(fieldMappers.getMapper("s_field").fieldType().termQuery("0", context), instanceOf(TermQuery.class)); - assertThat(fieldMappers.getMapper("l_field").fieldType().termQuery("0", context), instanceOf(TermQuery.class)); - assertThat(fieldMappers.getMapper("i_field").fieldType().termQuery("0", context), instanceOf(TermQuery.class)); - assertThat(fieldMappers.getMapper("sh_field").fieldType().termQuery("0", context), instanceOf(TermQuery.class)); - assertThat(fieldMappers.getMapper("b_field").fieldType().termQuery("0", context), instanceOf(TermQuery.class)); - assertThat(fieldMappers.getMapper("d_field").fieldType().termQuery("0", context), instanceOf(TermQuery.class)); - assertThat(fieldMappers.getMapper("f_field").fieldType().termQuery("0", context), instanceOf(TermQuery.class)); - assertThat(fieldMappers.getMapper("date_field").fieldType().termQuery("0", context), instanceOf(TermQuery.class)); - - ParsedDocument doc = mapper.parse("test", "type", "1", XContentFactory.jsonBuilder().startObject() - .field("s_field", "s_value") - .field("l_field", 1L) - .field("i_field", 1) - .field("sh_field", 1) - .field("b_field", 1) - .field("d_field", 1) - .field("f_field", 1) - .field("date_field", "20100101") - .endObject().bytes()); - - assertThat(doc.rootDoc().getField("s_field").boost(), equalTo(2.0f)); - assertThat(doc.rootDoc().getField("s_field").fieldType().omitNorms(), equalTo(false)); - assertThat(doc.rootDoc().getField("l_field").boost(), equalTo(3.0f)); - assertThat(doc.rootDoc().getField("l_field").fieldType().omitNorms(), equalTo(false)); - assertThat(doc.rootDoc().getField("i_field").boost(), equalTo(4.0f)); - assertThat(doc.rootDoc().getField("i_field").fieldType().omitNorms(), equalTo(false)); - assertThat(doc.rootDoc().getField("sh_field").boost(), equalTo(5.0f)); - assertThat(doc.rootDoc().getField("sh_field").fieldType().omitNorms(), equalTo(false)); - assertThat(doc.rootDoc().getField("b_field").boost(), equalTo(6.0f)); - assertThat(doc.rootDoc().getField("b_field").fieldType().omitNorms(), equalTo(false)); - assertThat(doc.rootDoc().getField("d_field").boost(), equalTo(7.0f)); - assertThat(doc.rootDoc().getField("d_field").fieldType().omitNorms(), equalTo(false)); - assertThat(doc.rootDoc().getField("f_field").boost(), equalTo(8.0f)); - assertThat(doc.rootDoc().getField("f_field").fieldType().omitNorms(), equalTo(false)); - assertThat(doc.rootDoc().getField("date_field").boost(), equalTo(9.0f)); - assertThat(doc.rootDoc().getField("date_field").fieldType().omitNorms(), equalTo(false)); - } - - { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type").startObject("properties") - .startObject("s_field").field("type", "keyword").field("boost", 2.0f).endObject() - .startObject("l_field").field("type", "long").field("boost", 3.0f).endObject() - .startObject("i_field").field("type", "integer").field("boost", 4.0f).endObject() - .startObject("sh_field").field("type", "short").field("boost", 5.0f).endObject() - .startObject("b_field").field("type", "byte").field("boost", 6.0f).endObject() - .startObject("d_field").field("type", "double").field("boost", 7.0f).endObject() - .startObject("f_field").field("type", "float").field("boost", 8.0f).endObject() - .startObject("date_field").field("type", "date").field("boost", 9.0f).endObject() - .endObject().endObject().endObject().string(); - IndexService indexService = createIndex("text"); - QueryShardContext context = indexService.newQueryShardContext(0, null, () -> 0L); - DocumentMapper mapper = indexService.mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); - DocumentFieldMappers fieldMappers = mapper.mappers(); - assertThat(fieldMappers.getMapper("s_field").fieldType().termQuery("0", context), instanceOf(BoostQuery.class)); - assertThat(fieldMappers.getMapper("l_field").fieldType().termQuery("0", context), instanceOf(BoostQuery.class)); - assertThat(fieldMappers.getMapper("i_field").fieldType().termQuery("0", context), instanceOf(BoostQuery.class)); - assertThat(fieldMappers.getMapper("sh_field").fieldType().termQuery("0", context), instanceOf(BoostQuery.class)); - assertThat(fieldMappers.getMapper("b_field").fieldType().termQuery("0", context), instanceOf(BoostQuery.class)); - assertThat(fieldMappers.getMapper("d_field").fieldType().termQuery("0", context), instanceOf(BoostQuery.class)); - assertThat(fieldMappers.getMapper("f_field").fieldType().termQuery("0", context), instanceOf(BoostQuery.class)); - assertThat(fieldMappers.getMapper("date_field").fieldType().termQuery("0", context), instanceOf(BoostQuery.class)); - - ParsedDocument doc = mapper.parse("test", "type", "1", XContentFactory.jsonBuilder().startObject() - .field("s_field", "s_value") - .field("l_field", 1L) - .field("i_field", 1) - .field("sh_field", 1) - .field("b_field", 1) - .field("d_field", 1) - .field("f_field", 1) - .field("date_field", "20100101") - .endObject().bytes()); - - assertThat(doc.rootDoc().getField("s_field").boost(), equalTo(1f)); - assertThat(doc.rootDoc().getField("s_field").fieldType().omitNorms(), equalTo(true)); - assertThat(doc.rootDoc().getField("l_field").boost(), equalTo(1f)); - assertThat(doc.rootDoc().getField("i_field").boost(), equalTo(1f)); - assertThat(doc.rootDoc().getField("sh_field").boost(), equalTo(1f)); - assertThat(doc.rootDoc().getField("b_field").boost(), equalTo(1f)); - assertThat(doc.rootDoc().getField("d_field").boost(), equalTo(1f)); - assertThat(doc.rootDoc().getField("f_field").boost(), equalTo(1f)); - assertThat(doc.rootDoc().getField("date_field").boost(), equalTo(1f)); - } - } -} diff --git a/core/src/test/java/org/elasticsearch/index/mapper/DateFieldMapperTests.java b/core/src/test/java/org/elasticsearch/index/mapper/DateFieldMapperTests.java index cf6335c808a79..0a094be1480ba 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/DateFieldMapperTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/DateFieldMapperTests.java @@ -340,15 +340,6 @@ public void testEmptyName() throws IOException { () -> parser.parse("type", new CompressedXContent(mapping)) ); assertThat(e.getMessage(), containsString("name cannot be empty string")); - - // before 5.x - Version oldVersion = VersionUtils.randomVersionBetween(getRandom(), Version.V_2_0_0, Version.V_2_3_5); - Settings oldIndexSettings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, oldVersion).build(); - indexService = createIndex("test_old", oldIndexSettings); - parser = indexService.mapperService().documentMapperParser(); - - DocumentMapper defaultMapper = parser.parse("type", new CompressedXContent(mapping)); - assertEquals(mapping, defaultMapper.mappingSource().toString()); } /** diff --git a/core/src/test/java/org/elasticsearch/index/mapper/DynamicTemplateTests.java b/core/src/test/java/org/elasticsearch/index/mapper/DynamicTemplateTests.java index 9830488989ce7..621e9a8cccc78 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/DynamicTemplateTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/DynamicTemplateTests.java @@ -41,12 +41,6 @@ public void testParseUnknownParam() throws Exception { IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> DynamicTemplate.parse("my_template", templateDef, Version.V_5_0_0_alpha1)); assertEquals("Illegal dynamic template parameter: [random_param]", e.getMessage()); - - // but no issues on 2.x for bw compat - DynamicTemplate template = DynamicTemplate.parse("my_template", templateDef, Version.V_2_3_0); - XContentBuilder builder = JsonXContent.contentBuilder(); - template.toXContent(builder, ToXContent.EMPTY_PARAMS); - assertEquals("{\"match_mapping_type\":\"string\",\"mapping\":{\"store\":true}}", builder.string()); } public void testParseUnknownMatchType() { diff --git a/core/src/test/java/org/elasticsearch/index/mapper/ExternalFieldMapperTests.java b/core/src/test/java/org/elasticsearch/index/mapper/ExternalFieldMapperTests.java index 72f9d09808f20..af5e2553be71b 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/ExternalFieldMapperTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/ExternalFieldMapperTests.java @@ -20,7 +20,6 @@ package org.elasticsearch.index.mapper; import org.apache.lucene.index.IndexableField; -import org.apache.lucene.spatial.geopoint.document.GeoPointField; import org.apache.lucene.util.BytesRef; import org.elasticsearch.Version; import org.elasticsearch.cluster.metadata.IndexMetaData; @@ -86,13 +85,9 @@ public void testExternalValues() throws Exception { assertThat(doc.rootDoc().getField("field.bool").stringValue(), is("T")); assertThat(doc.rootDoc().getField("field.point"), notNullValue()); - if (version.before(Version.V_2_2_0)) { - assertThat(doc.rootDoc().getField("field.point").stringValue(), is("42.0,51.0")); - } else if (version.onOrAfter(LatLonPointFieldMapper.LAT_LON_FIELD_VERSION)) { - GeoPoint point = new GeoPoint().resetFromIndexableField(doc.rootDoc().getField("field.point")); - assertThat(point.lat(), closeTo(42.0, 1e-5)); - assertThat(point.lon(), closeTo(51.0, 1e-5)); - } + GeoPoint point = new GeoPoint().resetFromIndexableField(doc.rootDoc().getField("field.point")); + assertThat(point.lat(), closeTo(42.0, 1e-5)); + assertThat(point.lon(), closeTo(51.0, 1e-5)); assertThat(doc.rootDoc().getField("field.shape"), notNullValue()); @@ -149,15 +144,9 @@ public void testExternalValuesWithMultifield() throws Exception { assertThat(doc.rootDoc().getField("field.bool").stringValue(), is("T")); assertThat(doc.rootDoc().getField("field.point"), notNullValue()); - if (version.before(Version.V_2_2_0)) { - assertThat(doc.rootDoc().getField("field.point").stringValue(), is("42.0,51.0")); - } else if (version.before(LatLonPointFieldMapper.LAT_LON_FIELD_VERSION)) { - assertThat(Long.parseLong(doc.rootDoc().getField("field.point").stringValue()), is(GeoPointField.encodeLatLon(42.0, 51.0))); - } else { - GeoPoint point = new GeoPoint().resetFromIndexableField(doc.rootDoc().getField("field.point")); - assertThat(point.lat(), closeTo(42.0, 1E-5)); - assertThat(point.lon(), closeTo(51.0, 1E-5)); - } + GeoPoint point = new GeoPoint().resetFromIndexableField(doc.rootDoc().getField("field.point")); + assertThat(point.lat(), closeTo(42.0, 1E-5)); + assertThat(point.lon(), closeTo(51.0, 1E-5)); IndexableField shape = doc.rootDoc().getField("field.shape"); assertThat(shape, notNullValue()); @@ -169,11 +158,7 @@ public void testExternalValuesWithMultifield() throws Exception { IndexableField raw = doc.rootDoc().getField("field.field.raw"); assertThat(raw, notNullValue()); - if (version.before(Version.V_5_0_0_alpha1)) { - assertThat(raw.stringValue(), is("foo")); - } else { - assertThat(raw.binaryValue(), is(new BytesRef("foo"))); - } + assertThat(raw.binaryValue(), is(new BytesRef("foo"))); } public void testExternalValuesWithMultifieldTwoLevels() throws Exception { @@ -226,11 +211,6 @@ public void testExternalValuesWithMultifieldTwoLevels() throws Exception { assertThat(doc.rootDoc().getField("field.bool").stringValue(), is("T")); assertThat(doc.rootDoc().getField("field.point"), notNullValue()); - if (version.before(Version.V_2_2_0)) { - assertThat(doc.rootDoc().getField("field.point").stringValue(), is("42.0,51.0")); - } else if (version.before(LatLonPointFieldMapper.LAT_LON_FIELD_VERSION)) { - assertThat(Long.parseLong(doc.rootDoc().getField("field.point").stringValue()), is(GeoPointField.encodeLatLon(42.0, 51.0))); - } assertThat(doc.rootDoc().getField("field.shape"), notNullValue()); diff --git a/core/src/test/java/org/elasticsearch/index/mapper/ExternalMapper.java b/core/src/test/java/org/elasticsearch/index/mapper/ExternalMapper.java index de77e018c5c01..94f129219f216 100755 --- a/core/src/test/java/org/elasticsearch/index/mapper/ExternalMapper.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/ExternalMapper.java @@ -22,7 +22,6 @@ import org.apache.lucene.index.IndexableField; import org.locationtech.spatial4j.shape.Point; import org.apache.lucene.document.Field; -import org.elasticsearch.Version; import org.elasticsearch.common.collect.Iterators; import org.elasticsearch.common.geo.GeoPoint; import org.elasticsearch.common.geo.builders.ShapeBuilders; @@ -58,8 +57,6 @@ public static class Builder extends FieldMapper.Builder private BinaryFieldMapper.Builder binBuilder = new BinaryFieldMapper.Builder(Names.FIELD_BIN); private BooleanFieldMapper.Builder boolBuilder = new BooleanFieldMapper.Builder(Names.FIELD_BOOL); - private GeoPointFieldMapper.Builder pointBuilder = new GeoPointFieldMapper.Builder(Names.FIELD_POINT); - private LegacyGeoPointFieldMapper.Builder legacyPointBuilder = new LegacyGeoPointFieldMapper.Builder(Names.FIELD_POINT); private LatLonPointFieldMapper.Builder latLonPointBuilder = new LatLonPointFieldMapper.Builder(Names.FIELD_POINT); private GeoShapeFieldMapper.Builder shapeBuilder = new GeoShapeFieldMapper.Builder(Names.FIELD_SHAPE); private Mapper.Builder stringBuilder; @@ -84,14 +81,7 @@ public ExternalMapper build(BuilderContext context) { context.path().add(name); BinaryFieldMapper binMapper = binBuilder.build(context); BooleanFieldMapper boolMapper = boolBuilder.build(context); - BaseGeoPointFieldMapper pointMapper; - if (context.indexCreatedVersion().before(Version.V_2_2_0)) { - pointMapper = legacyPointBuilder.build(context); - } else if (context.indexCreatedVersion().onOrAfter(LatLonPointFieldMapper.LAT_LON_FIELD_VERSION)) { - pointMapper = latLonPointBuilder.build(context); - } else { - pointMapper = pointBuilder.build(context); - } + BaseGeoPointFieldMapper pointMapper = latLonPointBuilder.build(context); GeoShapeFieldMapper shapeMapper = shapeBuilder.build(context); FieldMapper stringMapper = (FieldMapper)stringBuilder.build(context); context.path().remove(); diff --git a/core/src/test/java/org/elasticsearch/index/mapper/FakeStringFieldMapper.java b/core/src/test/java/org/elasticsearch/index/mapper/FakeStringFieldMapper.java index 4c6418ee7ba51..2969b8392b5f2 100755 --- a/core/src/test/java/org/elasticsearch/index/mapper/FakeStringFieldMapper.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/FakeStringFieldMapper.java @@ -27,13 +27,11 @@ import org.apache.lucene.util.BytesRef; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.index.mapper.FieldMapper; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.Mapper; import org.elasticsearch.index.mapper.MapperParsingException; import org.elasticsearch.index.mapper.ParseContext; -import org.elasticsearch.index.mapper.StringFieldMapper; import org.elasticsearch.index.mapper.StringFieldType; import java.io.IOException; @@ -123,11 +121,6 @@ protected FakeStringFieldMapper(String simpleName, FakeStringFieldType fieldType super(simpleName, fieldType, defaultFieldType, indexSettings, multiFields, copyTo); } - @Override - protected StringFieldMapper clone() { - return (StringFieldMapper) super.clone(); - } - @Override protected boolean customBoost() { return true; @@ -135,25 +128,24 @@ protected boolean customBoost() { @Override protected void parseCreateField(ParseContext context, List fields) throws IOException { - StringFieldMapper.ValueAndBoost valueAndBoost = parseCreateFieldForString(context, fieldType().boost()); - if (valueAndBoost.value() == null) { + String value; + if (context.externalValueSet()) { + value = context.externalValue().toString(); + } else { + value = context.parser().textOrNull(); + } + + if (value == null) { return; } + if (fieldType().indexOptions() != IndexOptions.NONE || fieldType().stored()) { - Field field = new Field(fieldType().name(), valueAndBoost.value(), fieldType()); + Field field = new Field(fieldType().name(), value, fieldType()); fields.add(field); } if (fieldType().hasDocValues()) { - fields.add(new SortedSetDocValuesField(fieldType().name(), new BytesRef(valueAndBoost.value()))); - } - } - - public static StringFieldMapper.ValueAndBoost parseCreateFieldForString(ParseContext context, float defaultBoost) throws IOException { - if (context.externalValueSet()) { - return new StringFieldMapper.ValueAndBoost(context.externalValue().toString(), defaultBoost); + fields.add(new SortedSetDocValuesField(fieldType().name(), new BytesRef(value))); } - XContentParser parser = context.parser(); - return new StringFieldMapper.ValueAndBoost(parser.textOrNull(), defaultBoost); } @Override diff --git a/core/src/test/java/org/elasticsearch/index/mapper/FieldLevelBoostTests.java b/core/src/test/java/org/elasticsearch/index/mapper/FieldLevelBoostTests.java deleted file mode 100644 index 4208975284229..0000000000000 --- a/core/src/test/java/org/elasticsearch/index/mapper/FieldLevelBoostTests.java +++ /dev/null @@ -1,285 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.index.mapper; - -import org.apache.lucene.index.IndexableField; -import org.elasticsearch.Version; -import org.elasticsearch.cluster.metadata.IndexMetaData; -import org.elasticsearch.common.bytes.BytesReference; -import org.elasticsearch.common.compress.CompressedXContent; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.xcontent.XContentFactory; -import org.elasticsearch.index.mapper.DocumentMapper; -import org.elasticsearch.index.mapper.MapperParsingException; -import org.elasticsearch.index.mapper.ParseContext.Document; -import org.elasticsearch.plugins.Plugin; -import org.elasticsearch.test.ESSingleNodeTestCase; -import org.elasticsearch.test.InternalSettingsPlugin; - -import java.util.Collection; - -import static org.hamcrest.Matchers.closeTo; -import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.instanceOf; - -public class FieldLevelBoostTests extends ESSingleNodeTestCase { - - private static final Settings BW_SETTINGS = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_2_0_0).build(); - - @Override - protected Collection> getPlugins() { - return pluginList(InternalSettingsPlugin.class); - } - - public void testBackCompatFieldLevelBoost() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("person").startObject("properties") - .startObject("str_field").field("type", "string").endObject() - .startObject("int_field").field("type", "integer").startObject("norms").field("enabled", true).endObject().endObject() - .startObject("byte_field").field("type", "byte").startObject("norms").field("enabled", true).endObject().endObject() - .startObject("date_field").field("type", "date").startObject("norms").field("enabled", true).endObject().endObject() - .startObject("double_field").field("type", "double").startObject("norms").field("enabled", true).endObject().endObject() - .startObject("float_field").field("type", "float").startObject("norms").field("enabled", true).endObject().endObject() - .startObject("long_field").field("type", "long").startObject("norms").field("enabled", true).endObject().endObject() - .startObject("short_field").field("type", "short").startObject("norms").field("enabled", true).endObject().endObject() - .endObject().endObject().endObject() - .string(); - - DocumentMapper docMapper = createIndex("test", BW_SETTINGS).mapperService().documentMapperParser().parse("person", new CompressedXContent(mapping)); - BytesReference json = XContentFactory.jsonBuilder().startObject() - .startObject("str_field").field("boost", 2.0).field("value", "some name").endObject() - .startObject("int_field").field("boost", 3.0).field("value", 10).endObject() - .startObject("byte_field").field("boost", 4.0).field("value", 20).endObject() - .startObject("date_field").field("boost", 5.0).field("value", "2012-01-10").endObject() - .startObject("double_field").field("boost", 6.0).field("value", 30.0).endObject() - .startObject("float_field").field("boost", 7.0).field("value", 40.0).endObject() - .startObject("long_field").field("boost", 8.0).field("value", 50).endObject() - .startObject("short_field").field("boost", 9.0).field("value", 60).endObject() - .endObject() - .bytes(); - Document doc = docMapper.parse("test", "person", "1", json).rootDoc(); - - IndexableField f = doc.getField("str_field"); - assertThat((double) f.boost(), closeTo(2.0, 0.001)); - - f = doc.getField("int_field"); - assertThat((double) f.boost(), closeTo(3.0, 0.001)); - - f = doc.getField("byte_field"); - assertThat((double) f.boost(), closeTo(4.0, 0.001)); - - f = doc.getField("date_field"); - assertThat((double) f.boost(), closeTo(5.0, 0.001)); - - f = doc.getField("double_field"); - assertThat((double) f.boost(), closeTo(6.0, 0.001)); - - f = doc.getField("float_field"); - assertThat((double) f.boost(), closeTo(7.0, 0.001)); - - f = doc.getField("long_field"); - assertThat((double) f.boost(), closeTo(8.0, 0.001)); - - f = doc.getField("short_field"); - assertThat((double) f.boost(), closeTo(9.0, 0.001)); - } - - public void testBackCompatFieldLevelMappingBoost() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("person").startObject("properties") - .startObject("str_field").field("type", "keyword").field("boost", "2.0").endObject() - .startObject("int_field").field("type", "integer").field("boost", "3.0").endObject() - .startObject("byte_field").field("type", "byte").field("boost", "4.0").endObject() - .startObject("date_field").field("type", "date").field("boost", "5.0").endObject() - .startObject("double_field").field("type", "double").field("boost", "6.0").endObject() - .startObject("float_field").field("type", "float").field("boost", "7.0").endObject() - .startObject("long_field").field("type", "long").field("boost", "8.0").endObject() - .startObject("short_field").field("type", "short").field("boost", "9.0").endObject() - .endObject().endObject().endObject() - .string(); - - { - DocumentMapper docMapper = createIndex("test", BW_SETTINGS).mapperService().documentMapperParser().parse("person", new CompressedXContent(mapping)); - BytesReference json = XContentFactory.jsonBuilder().startObject() - .field("str_field", "some name") - .field("int_field", 10) - .field("byte_field", 20) - .field("date_field", "2012-01-10") - .field("double_field", 30.0) - .field("float_field", 40.0) - .field("long_field", 50) - .field("short_field", 60) - .endObject() - .bytes(); - Document doc = docMapper.parse("test", "person", "1", json).rootDoc(); - - IndexableField f = doc.getField("str_field"); - assertThat((double) f.boost(), closeTo(2.0, 0.001)); - - f = doc.getField("int_field"); - assertThat((double) f.boost(), closeTo(3.0, 0.001)); - - f = doc.getField("byte_field"); - assertThat((double) f.boost(), closeTo(4.0, 0.001)); - - f = doc.getField("date_field"); - assertThat((double) f.boost(), closeTo(5.0, 0.001)); - - f = doc.getField("double_field"); - assertThat((double) f.boost(), closeTo(6.0, 0.001)); - - f = doc.getField("float_field"); - assertThat((double) f.boost(), closeTo(7.0, 0.001)); - - f = doc.getField("long_field"); - assertThat((double) f.boost(), closeTo(8.0, 0.001)); - - f = doc.getField("short_field"); - assertThat((double) f.boost(), closeTo(9.0, 0.001)); - } - - { - DocumentMapper docMapper = createIndex("test2").mapperService().documentMapperParser().parse("person", new CompressedXContent(mapping)); - BytesReference json = XContentFactory.jsonBuilder().startObject() - .field("str_field", "some name") - .field("int_field", 10) - .field("byte_field", 20) - .field("date_field", "2012-01-10") - .field("double_field", 30.0) - .field("float_field", 40.0) - .field("long_field", 50) - .field("short_field", 60) - .endObject() - .bytes(); - Document doc = docMapper.parse("test", "person", "1", json).rootDoc(); - - IndexableField f = doc.getField("str_field"); - assertThat(f.boost(), equalTo(1f)); - - f = doc.getField("int_field"); - assertThat(f.boost(), equalTo(1f)); - - f = doc.getField("byte_field"); - assertThat(f.boost(), equalTo(1f)); - - f = doc.getField("date_field"); - assertThat(f.boost(), equalTo(1f)); - - f = doc.getField("double_field"); - assertThat(f.boost(), equalTo(1f)); - - f = doc.getField("float_field"); - assertThat(f.boost(), equalTo(1f)); - - f = doc.getField("long_field"); - assertThat(f.boost(), equalTo(1f)); - - f = doc.getField("short_field"); - assertThat(f.boost(), equalTo(1f)); - } - } - - public void testBackCompatInvalidFieldLevelBoost() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("person").startObject("properties") - .startObject("str_field").field("type", "string").endObject() - .startObject("int_field").field("type", "integer").startObject("norms").field("enabled", true).endObject().endObject() - .startObject("byte_field").field("type", "byte").startObject("norms").field("enabled", true).endObject().endObject() - .startObject("date_field").field("type", "date").startObject("norms").field("enabled", true).endObject().endObject() - .startObject("double_field").field("type", "double").startObject("norms").field("enabled", true).endObject().endObject() - .startObject("float_field").field("type", "float").startObject("norms").field("enabled", true).endObject().endObject() - .startObject("long_field").field("type", "long").startObject("norms").field("enabled", true).endObject().endObject() - .startObject("short_field").field("type", "short").startObject("norms").field("enabled", true).endObject().endObject() - .endObject().endObject().endObject() - .string(); - - DocumentMapper docMapper = createIndex("test", BW_SETTINGS).mapperService().documentMapperParser().parse("person", new CompressedXContent(mapping)); - try { - docMapper.parse("test", "person", "1", XContentFactory.jsonBuilder().startObject() - .startObject("str_field").field("foo", "bar") - .endObject().endObject().bytes()).rootDoc(); - fail(); - } catch (Exception ex) { - assertThat(ex, instanceOf(MapperParsingException.class)); - } - - try { - docMapper.parse("test", "person", "1", XContentFactory.jsonBuilder().startObject() - .startObject("int_field").field("foo", "bar") - .endObject().endObject().bytes()).rootDoc(); - fail(); - } catch (Exception ex) { - assertThat(ex, instanceOf(MapperParsingException.class)); - } - - try { - docMapper.parse("test", "person", "1", XContentFactory.jsonBuilder().startObject() - .startObject("byte_field").field("foo", "bar") - .endObject().endObject().bytes()).rootDoc(); - fail(); - } catch (Exception ex) { - assertThat(ex, instanceOf(MapperParsingException.class)); - } - - try { - docMapper.parse("test", "person", "1", XContentFactory.jsonBuilder().startObject() - .startObject("date_field").field("foo", "bar") - .endObject().endObject().bytes()).rootDoc(); - fail(); - } catch (Exception ex) { - assertThat(ex, instanceOf(MapperParsingException.class)); - } - - try { - docMapper.parse("test", "person", "1", XContentFactory.jsonBuilder().startObject() - .startObject("double_field").field("foo", "bar") - .endObject().endObject().bytes()).rootDoc(); - fail(); - } catch (Exception ex) { - assertThat(ex, instanceOf(MapperParsingException.class)); - } - - try { - docMapper.parse("test", "person", "1", XContentFactory.jsonBuilder().startObject() - .startObject("float_field").field("foo", "bar") - .endObject().endObject().bytes()).rootDoc(); - fail(); - } catch (Exception ex) { - assertThat(ex, instanceOf(MapperParsingException.class)); - } - - try { - docMapper.parse("test", "person", "1", XContentFactory.jsonBuilder().startObject() - .startObject("long_field").field("foo", "bar") - .endObject().endObject().bytes()).rootDoc(); - fail(); - } catch (Exception ex) { - assertThat(ex, instanceOf(MapperParsingException.class)); - } - - try { - docMapper.parse("test", "person", "1", XContentFactory.jsonBuilder().startObject() - .startObject("short_field").field("foo", "bar") - .endObject().endObject().bytes()).rootDoc(); - fail(); - } catch (Exception ex) { - assertThat(ex, instanceOf(MapperParsingException.class)); - } - - } - -} diff --git a/core/src/test/java/org/elasticsearch/index/mapper/GeoEncodingTests.java b/core/src/test/java/org/elasticsearch/index/mapper/GeoEncodingTests.java deleted file mode 100644 index 4840dcc71a752..0000000000000 --- a/core/src/test/java/org/elasticsearch/index/mapper/GeoEncodingTests.java +++ /dev/null @@ -1,48 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.index.mapper; - -import org.elasticsearch.common.geo.GeoDistance; -import org.elasticsearch.common.geo.GeoPoint; -import org.elasticsearch.common.unit.DistanceUnit; -import org.elasticsearch.common.unit.DistanceUnit.Distance; -import org.elasticsearch.test.ESTestCase; - -import java.util.Arrays; - -import static org.hamcrest.Matchers.lessThanOrEqualTo; - - -public class GeoEncodingTests extends ESTestCase { - - public void test() { - for (int i = 0; i < 10000; ++i) { - final double lat = randomDouble() * 180 - 90; - final double lon = randomDouble() * 360 - 180; - final Distance precision = new Distance(1+(randomDouble() * 9), randomFrom(Arrays.asList(DistanceUnit.MILLIMETERS, DistanceUnit.METERS, DistanceUnit.KILOMETERS))); - final LegacyGeoPointFieldMapper.Encoding encoding = LegacyGeoPointFieldMapper.Encoding.of(precision); - assertThat(encoding.precision().convert(DistanceUnit.METERS).value, lessThanOrEqualTo(precision.convert(DistanceUnit.METERS).value)); - final GeoPoint geoPoint = encoding.decode(encoding.encodeCoordinate(lat), encoding.encodeCoordinate(lon), new GeoPoint()); - final double error = GeoDistance.PLANE.calculate(lat, lon, geoPoint.lat(), geoPoint.lon(), DistanceUnit.METERS); - assertThat(error, lessThanOrEqualTo(precision.convert(DistanceUnit.METERS).value)); - } - } - -} diff --git a/core/src/test/java/org/elasticsearch/index/mapper/GeoPointFieldMapperTests.java b/core/src/test/java/org/elasticsearch/index/mapper/GeoPointFieldMapperTests.java index a94ff589228ac..d2a7e5a902a2d 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/GeoPointFieldMapperTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/GeoPointFieldMapperTests.java @@ -19,7 +19,6 @@ package org.elasticsearch.index.mapper; import org.apache.lucene.spatial.geopoint.document.GeoPointField; -import org.apache.lucene.util.BytesRef; import org.elasticsearch.Version; import org.elasticsearch.action.admin.indices.create.CreateIndexRequestBuilder; import org.elasticsearch.action.search.SearchResponse; @@ -31,7 +30,6 @@ import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.plugins.Plugin; -import org.elasticsearch.search.SearchHitField; import org.elasticsearch.test.ESSingleNodeTestCase; import org.elasticsearch.test.InternalSettingsPlugin; import org.elasticsearch.test.VersionUtils; @@ -39,20 +37,15 @@ import org.hamcrest.CoreMatchers; import java.util.Collection; -import java.util.List; -import java.util.Map; import static com.carrotsearch.randomizedtesting.RandomizedTest.getRandom; import static org.elasticsearch.action.support.WriteRequest.RefreshPolicy.IMMEDIATE; import static org.elasticsearch.common.geo.GeoHashUtils.stringEncode; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery; -import static org.hamcrest.CoreMatchers.instanceOf; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.notNullValue; -import static org.hamcrest.Matchers.nullValue; public class GeoPointFieldMapperTests extends ESSingleNodeTestCase { @@ -61,115 +54,10 @@ protected Collection> getPlugins() { return pluginList(InternalSettingsPlugin.class); } - public void testLegacyLatLonValues() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true).endObject().endObject() - .endObject().endObject().string(); - - Version version = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.V_5_0_0_alpha5); - Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build(); - DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); - - ParsedDocument doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .startObject("point").field("lat", 1.2).field("lon", 1.3).endObject() - .endObject() - .bytes()); - - boolean indexCreatedBefore22 = version.before(Version.V_2_2_0); - assertThat(doc.rootDoc().getField("point.lat"), notNullValue()); - final boolean stored = false; - assertThat(doc.rootDoc().getField("point.lat").fieldType().stored(), is(stored)); - assertThat(doc.rootDoc().getField("point.lon"), notNullValue()); - assertThat(doc.rootDoc().getField("point.lon").fieldType().stored(), is(stored)); - assertThat(doc.rootDoc().getField("point.geohash"), nullValue()); - if (indexCreatedBefore22) { - assertThat(doc.rootDoc().get("point"), equalTo("1.2,1.3")); - } else { - assertThat(Long.parseLong(doc.rootDoc().get("point")), equalTo(GeoPointField.encodeLatLon(1.2, 1.3))); - } - } - - public void testLegacyLatLonValuesWithGeohash() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true) - .field("geohash", true).endObject().endObject() - .endObject().endObject().string(); - - Version version = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.V_5_0_0_alpha5); - Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build(); - DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); - - ParsedDocument doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .startObject("point").field("lat", 1.2).field("lon", 1.3).endObject() - .endObject() - .bytes()); - - assertThat(doc.rootDoc().getField("point.lat"), notNullValue()); - assertThat(doc.rootDoc().getField("point.lon"), notNullValue()); - if (version.onOrAfter(Version.V_5_0_0_alpha1)) { - assertThat(doc.rootDoc().getBinaryValue("point.geohash"), equalTo(new BytesRef(stringEncode(1.3, 1.2)))); - } else { - assertThat(doc.rootDoc().get("point.geohash"), equalTo(stringEncode(1.3, 1.2))); - } - } - - public void testLegacyLatLonInOneValueWithGeohash() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true) - .field("geohash", true).endObject().endObject().endObject().endObject().string(); - - Version version = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.V_5_0_0_alpha5); - Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build(); - DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); - - ParsedDocument doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("point", "1.2,1.3") - .endObject() - .bytes()); - - assertThat(doc.rootDoc().getField("point.lat"), notNullValue()); - assertThat(doc.rootDoc().getField("point.lon"), notNullValue()); - if (version.onOrAfter(Version.V_5_0_0_alpha1)) { - assertThat(doc.rootDoc().getBinaryValue("point.geohash"), equalTo(new BytesRef(stringEncode(1.3, 1.2)))); - } else { - assertThat(doc.rootDoc().get("point.geohash"), equalTo(stringEncode(1.3, 1.2))); - } - } - - public void testLegacyGeoHashIndexValue() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true) - .field("geohash", true).endObject().endObject().endObject().endObject().string(); - - Version version = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.V_5_0_0_alpha5); - Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build(); - DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); - - ParsedDocument doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("point", stringEncode(1.3, 1.2)) - .endObject() - .bytes()); - - assertThat(doc.rootDoc().getField("point.lat"), notNullValue()); - assertThat(doc.rootDoc().getField("point.lon"), notNullValue()); - if (version.onOrAfter(Version.V_5_0_0_alpha1)) { - assertThat(doc.rootDoc().getBinaryValue("point.geohash"), equalTo(new BytesRef(stringEncode(1.3, 1.2)))); - } else { - assertThat(doc.rootDoc().get("point.geohash"), equalTo(stringEncode(1.3, 1.2))); - } - } - public void testGeoHashValue() throws Exception { Version version = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.CURRENT); XContentBuilder xContentBuilder = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("point").field("type", "geo_point"); - if (version.before(LatLonPointFieldMapper.LAT_LON_FIELD_VERSION)) { - xContentBuilder = xContentBuilder.field("lat_lon", true); - } String mapping = xContentBuilder.endObject().endObject().endObject().endObject().string(); Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build(); DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); @@ -181,204 +69,12 @@ public void testGeoHashValue() throws Exception { .bytes()); assertThat(doc.rootDoc().getField("point"), notNullValue()); - if (version.before(LatLonPointFieldMapper.LAT_LON_FIELD_VERSION)) { - assertThat(doc.rootDoc().getField("point.lat"), notNullValue()); - assertThat(doc.rootDoc().getField("point.lon"), notNullValue()); - } - } - - public void testNormalizeLegacyLatLonValuesDefault() throws Exception { - Version version = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.V_5_0_0_alpha5); - // default to normalize - XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("properties").startObject("point").field("type", "geo_point"); - if (version.before(Version.V_2_2_0)) { - mapping.field("coerce", true); - } - mapping.field("ignore_malformed", true).endObject().endObject().endObject().endObject(); - - Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build(); - DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping.string())); - - ParsedDocument doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .startObject("point").field("lat", 91).field("lon", 181).endObject() - .endObject() - .bytes()); - - if (version.before(Version.V_2_2_0)) { - assertThat(doc.rootDoc().get("point"), equalTo("89.0,1.0")); - } else { - assertThat(Long.parseLong(doc.rootDoc().get("point")), equalTo(GeoPointField.encodeLatLon(89.0, 1.0))); - } - - doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .startObject("point").field("lat", -91).field("lon", -181).endObject() - .endObject() - .bytes()); - - if (version.before(Version.V_2_2_0)) { - assertThat(doc.rootDoc().get("point"), equalTo("-89.0,-1.0")); - } else { - assertThat(Long.parseLong(doc.rootDoc().get("point")), equalTo(GeoPointField.encodeLatLon(-89.0, -1.0))); - } - - doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .startObject("point").field("lat", 181).field("lon", 361).endObject() - .endObject() - .bytes()); - - if (version.before(Version.V_2_2_0)) { - assertThat(doc.rootDoc().get("point"), equalTo("-1.0,-179.0")); - } else { - assertThat(Long.parseLong(doc.rootDoc().get("point")), equalTo(GeoPointField.encodeLatLon(-1.0, -179.0))); - } - } - - public void testLegacyValidateLatLonValues() throws Exception { - Version version = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.V_5_0_0_alpha5); - XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true); - if (version.before(Version.V_2_2_0)) { - mapping.field("coerce", false); - } - mapping.field("ignore_malformed", false).endObject().endObject().endObject().endObject().string(); - - Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build(); - DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping.string())); - - defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .startObject("point").field("lat", 90).field("lon", 1.3).endObject() - .endObject() - .bytes()); - - expectThrows(MapperParsingException.class, () -> - defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .startObject("point").field("lat", -91).field("lon", 1.3).endObject() - .endObject() - .bytes())); - - expectThrows(MapperParsingException.class, () -> - defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .startObject("point").field("lat", 91).field("lon", 1.3).endObject() - .endObject() - .bytes())); - - expectThrows(MapperParsingException.class, () -> - defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .startObject("point").field("lat", 1.2).field("lon", -181).endObject() - .endObject() - .bytes())); - - expectThrows(MapperParsingException.class, () -> - defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .startObject("point").field("lat", 1.2).field("lon", 181).endObject() - .endObject() - .bytes())); - - MapperParsingException e = expectThrows(MapperParsingException.class, () -> - defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .startObject("point").field("lat", "-").field("lon", 1.3).endObject() - .endObject() - .bytes())); - assertThat(e.getRootCause(), instanceOf(NumberFormatException.class)); - assertThat(e.getRootCause().toString(), containsString("java.lang.NumberFormatException: For input string: \"-\"")); - - e = expectThrows(MapperParsingException.class, () -> - defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .startObject("point").field("lat", 1.2).field("lon", "-").endObject() - .endObject() - .bytes())); - assertThat(e.getRootCause(), instanceOf(NumberFormatException.class)); - assertThat(e.getRootCause().toString(), containsString("java.lang.NumberFormatException: For input string: \"-\"")); - - e = expectThrows(MapperParsingException.class, () -> - defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .startObject("point").field("lat", "-").field("lon", "-").endObject() - .endObject() - .bytes())); - assertThat(e.getRootCause(), instanceOf(NumberFormatException.class)); - assertThat(e.getRootCause().toString(), containsString("java.lang.NumberFormatException: For input string: \"-\"")); - } - - public void testNoValidateLegacyLatLonValues() throws Exception { - Version version = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.V_5_0_0_alpha5); - XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true); - if (version.before(Version.V_2_2_0)) { - mapping.field("coerce", false); - } - mapping.field("ignore_malformed", true).endObject().endObject().endObject().endObject().string(); - - Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build(); - DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping.string())); - - defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .startObject("point").field("lat", 90).field("lon", 1.3).endObject() - .endObject() - .bytes()); - - defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .startObject("point").field("lat", -91).field("lon", 1.3).endObject() - .endObject() - .bytes()); - - defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .startObject("point").field("lat", 91).field("lon", 1.3).endObject() - .endObject() - .bytes()); - - defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .startObject("point").field("lat", 1.2).field("lon", -181).endObject() - .endObject() - .bytes()); - - defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .startObject("point").field("lat", 1.2).field("lon", 181).endObject() - .endObject() - .bytes()); - - defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .startObject("point").field("lat", "-").field("lon", 1.3).endObject() - .endObject() - .bytes()); - - defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .startObject("point").field("lat", 1.2).field("lon", "-").endObject() - .endObject() - .bytes()); - - defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .startObject("point").field("lat", "-").field("lon", "-").endObject() - .endObject() - .bytes()); } public void testLatLonValuesStored() throws Exception { Version version = VersionUtils.randomVersionBetween(random(), Version.CURRENT, Version.CURRENT); XContentBuilder xContentBuilder = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("point").field("type", "geo_point"); - if (version.before(LatLonPointFieldMapper.LAT_LON_FIELD_VERSION)) { - xContentBuilder = xContentBuilder.field("lat_lon", true); - } String mapping = xContentBuilder.field("store", true).endObject().endObject().endObject().endObject().string(); Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build(); DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); @@ -390,27 +86,12 @@ public void testLatLonValuesStored() throws Exception { .bytes()); assertThat(doc.rootDoc().getField("point"), notNullValue()); - if (version.before(LatLonPointFieldMapper.LAT_LON_FIELD_VERSION)) { - assertThat(doc.rootDoc().getField("point.lat"), notNullValue()); - assertThat(doc.rootDoc().getField("point.lat").numericValue().doubleValue(), equalTo(1.2)); - assertThat(doc.rootDoc().getField("point.lon"), notNullValue()); - assertThat(doc.rootDoc().getField("point.lon").numericValue().doubleValue(), equalTo(1.3)); - assertThat(doc.rootDoc().getField("point.geohash"), nullValue()); - if (version.before(Version.V_2_2_0)) { - assertThat(doc.rootDoc().get("point"), equalTo("1.2,1.3")); - } else { - assertThat(Long.parseLong(doc.rootDoc().get("point")), equalTo(GeoPointField.encodeLatLon(1.2, 1.3))); - } - } } public void testArrayLatLonValues() throws Exception { Version version = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.CURRENT); XContentBuilder xContentBuilder = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("point").field("type", "geo_point").field("doc_values", false); - if (version.before(LatLonPointFieldMapper.LAT_LON_FIELD_VERSION)) { - xContentBuilder = xContentBuilder.field("lat_lon", true); - } String mapping = xContentBuilder.field("store", true).endObject().endObject().endObject().endObject().string(); Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build(); DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); @@ -426,38 +107,13 @@ public void testArrayLatLonValues() throws Exception { // doc values are enabled by default, but in this test we disable them; we should only have 2 points assertThat(doc.rootDoc().getFields("point"), notNullValue()); - if (version.onOrAfter(LatLonPointFieldMapper.LAT_LON_FIELD_VERSION)) { - assertThat(doc.rootDoc().getFields("point").length, equalTo(4)); - } else { - assertThat(doc.rootDoc().getFields("point").length, equalTo(2)); - } - if (version.before(Version.V_5_0_0_alpha2)) { - assertThat(doc.rootDoc().getFields("point.lat").length, equalTo(2)); - assertThat(doc.rootDoc().getFields("point.lon").length, equalTo(2)); - assertThat(doc.rootDoc().getFields("point.lat")[0].numericValue().doubleValue(), equalTo(1.2)); - assertThat(doc.rootDoc().getFields("point.lon")[0].numericValue().doubleValue(), equalTo(1.3)); - if (version.before(Version.V_2_2_0)) { - assertThat(doc.rootDoc().getFields("point")[0].stringValue(), equalTo("1.2,1.3")); - } else { - assertThat(Long.parseLong(doc.rootDoc().getFields("point")[0].stringValue()), equalTo(GeoPointField.encodeLatLon(1.2, 1.3))); - } - assertThat(doc.rootDoc().getFields("point.lat")[1].numericValue().doubleValue(), equalTo(1.4)); - assertThat(doc.rootDoc().getFields("point.lon")[1].numericValue().doubleValue(), equalTo(1.5)); - if (version.before(Version.V_2_2_0)) { - assertThat(doc.rootDoc().getFields("point")[1].stringValue(), equalTo("1.4,1.5")); - } else { - assertThat(Long.parseLong(doc.rootDoc().getFields("point")[1].stringValue()), equalTo(GeoPointField.encodeLatLon(1.4, 1.5))); - } - } + assertThat(doc.rootDoc().getFields("point").length, equalTo(4)); } public void testLatLonInOneValue() throws Exception { Version version = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.CURRENT); XContentBuilder xContentBuilder = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("point").field("type", "geo_point"); - if (version.before(LatLonPointFieldMapper.LAT_LON_FIELD_VERSION)) { - xContentBuilder = xContentBuilder.field("lat_lon", true); - } String mapping = xContentBuilder.endObject().endObject().endObject().endObject().string(); Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build(); DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser().parse("type", @@ -470,24 +126,12 @@ public void testLatLonInOneValue() throws Exception { .bytes()); assertThat(doc.rootDoc().getField("point"), notNullValue()); - if (version.before(LatLonPointFieldMapper.LAT_LON_FIELD_VERSION)) { - assertThat(doc.rootDoc().getField("point.lat"), notNullValue()); - assertThat(doc.rootDoc().getField("point.lon"), notNullValue()); - if (version.before(Version.V_2_2_0)) { - assertThat(doc.rootDoc().get("point"), equalTo("1.2,1.3")); - } else { - assertThat(Long.parseLong(doc.rootDoc().getFields("point")[0].stringValue()), equalTo(GeoPointField.encodeLatLon(1.2, 1.3))); - } - } } public void testLatLonInOneValueStored() throws Exception { Version version = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.CURRENT); XContentBuilder xContentBuilder = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("point").field("type", "geo_point"); - if (version.before(LatLonPointFieldMapper.LAT_LON_FIELD_VERSION)) { - xContentBuilder = xContentBuilder.field("lat_lon", true); - } String mapping = xContentBuilder.field("store", true).endObject().endObject().endObject().endObject().string(); Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build(); DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser().parse("type", @@ -499,27 +143,12 @@ public void testLatLonInOneValueStored() throws Exception { .endObject() .bytes()); assertThat(doc.rootDoc().getField("point"), notNullValue()); - if (version.before(LatLonPointFieldMapper.LAT_LON_FIELD_VERSION)) { - assertThat(doc.rootDoc().getField("point.lat"), notNullValue()); - assertThat(doc.rootDoc().getField("point.lat").numericValue().doubleValue(), equalTo(1.2)); - assertThat(doc.rootDoc().getField("point.lon"), notNullValue()); - assertThat(doc.rootDoc().getField("point.lon").numericValue().doubleValue(), equalTo(1.3)); - if (version.before(Version.V_2_2_0)) { - assertThat(doc.rootDoc().get("point"), equalTo("1.2,1.3")); - } else { - assertThat(Long.parseLong(doc.rootDoc().getFields("point")[0].stringValue()), - equalTo(GeoPointField.encodeLatLon(1.2, 1.3))); - } - } } public void testLatLonInOneValueArray() throws Exception { Version version = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.CURRENT); XContentBuilder xContentBuilder = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("point").field("type", "geo_point").field("doc_values", false); - if (version.before(LatLonPointFieldMapper.LAT_LON_FIELD_VERSION)) { - xContentBuilder = xContentBuilder.field("lat_lon", true); - } String mapping = xContentBuilder.field("store", true).endObject().endObject().endObject().endObject().string(); Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build(); DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser().parse("type", @@ -536,36 +165,13 @@ public void testLatLonInOneValueArray() throws Exception { // doc values are enabled by default, but in this test we disable them; we should only have 2 points assertThat(doc.rootDoc().getFields("point"), notNullValue()); - if (version.before(Version.V_5_0_0_alpha2)) { - assertThat(doc.rootDoc().getFields("point").length, equalTo(2)); - assertThat(doc.rootDoc().getFields("point.lat").length, equalTo(2)); - assertThat(doc.rootDoc().getFields("point.lon").length, equalTo(2)); - assertThat(doc.rootDoc().getFields("point.lat")[0].numericValue().doubleValue(), equalTo(1.2)); - assertThat(doc.rootDoc().getFields("point.lon")[0].numericValue().doubleValue(), equalTo(1.3)); - assertThat(doc.rootDoc().getFields("point.lat")[1].numericValue().doubleValue(), equalTo(1.4)); - assertThat(doc.rootDoc().getFields("point.lon")[1].numericValue().doubleValue(), equalTo(1.5)); - } else if (version.onOrAfter(LatLonPointFieldMapper.LAT_LON_FIELD_VERSION)) { - assertThat(doc.rootDoc().getFields("point").length, equalTo(4)); - } - if (version.before(Version.V_2_2_0)) { - assertThat(doc.rootDoc().getFields("point")[0].stringValue(), equalTo("1.2,1.3")); - } else if (version.before(LatLonPointFieldMapper.LAT_LON_FIELD_VERSION)) { - assertThat(Long.parseLong(doc.rootDoc().getFields("point")[0].stringValue()), equalTo(GeoPointField.encodeLatLon(1.2, 1.3))); - } - if (version.before(Version.V_2_2_0)) { - assertThat(doc.rootDoc().getFields("point")[1].stringValue(), equalTo("1.4,1.5")); - } else if (version.before(LatLonPointFieldMapper.LAT_LON_FIELD_VERSION)) { - assertThat(Long.parseLong(doc.rootDoc().getFields("point")[1].stringValue()), equalTo(GeoPointField.encodeLatLon(1.4, 1.5))); - } + assertThat(doc.rootDoc().getFields("point").length, equalTo(4)); } public void testLonLatArray() throws Exception { Version version = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.CURRENT); XContentBuilder xContentBuilder = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("point").field("type", "geo_point"); - if (version.before(LatLonPointFieldMapper.LAT_LON_FIELD_VERSION)) { - xContentBuilder = xContentBuilder.field("lat_lon", true); - } String mapping = xContentBuilder.endObject().endObject().endObject().endObject().string(); Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build(); DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); @@ -577,15 +183,6 @@ public void testLonLatArray() throws Exception { .bytes()); assertThat(doc.rootDoc().getField("point"), notNullValue()); - if (version.before(LatLonPointFieldMapper.LAT_LON_FIELD_VERSION)) { - assertThat(doc.rootDoc().getField("point.lat"), notNullValue()); - assertThat(doc.rootDoc().getField("point.lon"), notNullValue()); - if (version.before(Version.V_2_2_0)) { - assertThat(doc.rootDoc().get("point"), equalTo("1.2,1.3")); - } else { - assertThat(Long.parseLong(doc.rootDoc().getFields("point")[0].stringValue()), equalTo(GeoPointField.encodeLatLon(1.2, 1.3))); - } - } } public void testLonLatArrayDynamic() throws Exception { @@ -593,9 +190,6 @@ public void testLonLatArrayDynamic() throws Exception { XContentBuilder xContentBuilder = XContentFactory.jsonBuilder().startObject().startObject("type") .startArray("dynamic_templates").startObject().startObject("point").field("match", "point*") .startObject("mapping").field("type", "geo_point"); - if (version.before(LatLonPointFieldMapper.LAT_LON_FIELD_VERSION)) { - xContentBuilder = xContentBuilder.field("lat_lon", true); - } String mapping = xContentBuilder.endObject().endObject().endObject().endArray().endObject().endObject().string(); Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build(); DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); @@ -607,24 +201,12 @@ public void testLonLatArrayDynamic() throws Exception { .bytes()); assertThat(doc.rootDoc().getField("point"), notNullValue()); - if (version.before(LatLonPointFieldMapper.LAT_LON_FIELD_VERSION)) { - assertThat(doc.rootDoc().getField("point.lat"), notNullValue()); - assertThat(doc.rootDoc().getField("point.lon"), notNullValue()); - if (version.before(Version.V_2_2_0)) { - assertThat(doc.rootDoc().get("point"), equalTo("1.2,1.3")); - } else { - assertThat(Long.parseLong(doc.rootDoc().getFields("point")[0].stringValue()), equalTo(GeoPointField.encodeLatLon(1.2, 1.3))); - } - } } public void testLonLatArrayStored() throws Exception { Version version = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.CURRENT); XContentBuilder xContentBuilder = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("point").field("type", "geo_point"); - if (version.before(LatLonPointFieldMapper.LAT_LON_FIELD_VERSION)) { - xContentBuilder = xContentBuilder.field("lat_lon", true); - } String mapping = xContentBuilder.field("store", true).endObject().endObject().endObject().endObject().string(); Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build(); DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); @@ -636,28 +218,13 @@ public void testLonLatArrayStored() throws Exception { .bytes()); assertThat(doc.rootDoc().getField("point"), notNullValue()); - if (version.before(LatLonPointFieldMapper.LAT_LON_FIELD_VERSION)) { - assertThat(doc.rootDoc().getField("point.lat"), notNullValue()); - assertThat(doc.rootDoc().getField("point.lat").numericValue().doubleValue(), equalTo(1.2)); - assertThat(doc.rootDoc().getField("point.lon"), notNullValue()); - assertThat(doc.rootDoc().getField("point.lon").numericValue().doubleValue(), equalTo(1.3)); - if (version.before(Version.V_2_2_0)) { - assertThat(doc.rootDoc().get("point"), equalTo("1.2,1.3")); - } else { - assertThat(Long.parseLong(doc.rootDoc().getFields("point")[0].stringValue()), equalTo(GeoPointField.encodeLatLon(1.2, 1.3))); - } - } else { - assertThat(doc.rootDoc().getFields("point").length, equalTo(3)); - } + assertThat(doc.rootDoc().getFields("point").length, equalTo(3)); } public void testLonLatArrayArrayStored() throws Exception { Version version = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.CURRENT); XContentBuilder xContentBuilder = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("point").field("type", "geo_point"); - if (version.before(LatLonPointFieldMapper.LAT_LON_FIELD_VERSION)) { - xContentBuilder = xContentBuilder.field("lat_lon", true); - } String mapping = xContentBuilder.field("store", true).field("doc_values", false).endObject().endObject() .endObject().endObject().string(); Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build(); @@ -673,209 +240,7 @@ public void testLonLatArrayArrayStored() throws Exception { .bytes()); assertThat(doc.rootDoc().getFields("point"), notNullValue()); - if (version.before(Version.V_5_0_0_alpha2)) { - assertThat(doc.rootDoc().getFields("point").length, CoreMatchers.equalTo(2)); - assertThat(doc.rootDoc().getFields("point.lat").length, equalTo(2)); - assertThat(doc.rootDoc().getFields("point.lon").length, equalTo(2)); - assertThat(doc.rootDoc().getFields("point.lat")[0].numericValue().doubleValue(), equalTo(1.2)); - assertThat(doc.rootDoc().getFields("point.lon")[0].numericValue().doubleValue(), equalTo(1.3)); - if (version.before(Version.V_2_2_0)) { - assertThat(doc.rootDoc().get("point"), equalTo("1.2,1.3")); - } else { - assertThat(Long.parseLong(doc.rootDoc().getFields("point")[0].stringValue()), equalTo(GeoPointField.encodeLatLon(1.2, 1.3))); - } - assertThat(doc.rootDoc().getFields("point.lat")[1].numericValue().doubleValue(), equalTo(1.4)); - assertThat(doc.rootDoc().getFields("point.lon")[1].numericValue().doubleValue(), equalTo(1.5)); - if (version.before(Version.V_2_2_0)) { - assertThat(doc.rootDoc().get("point"), equalTo("1.2,1.3")); - } else { - assertThat(Long.parseLong(doc.rootDoc().getFields("point")[1].stringValue()), equalTo(GeoPointField.encodeLatLon(1.4, 1.5))); - } - } else if (version.onOrAfter(LatLonPointFieldMapper.LAT_LON_FIELD_VERSION)) { - assertThat(doc.rootDoc().getFields("point").length, CoreMatchers.equalTo(4)); - } - } - - - /** - * Test that expected exceptions are thrown when creating a new index with deprecated options - */ - public void testOptionDeprecation() throws Exception { - Version version = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.CURRENT); - Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build(); - DocumentMapperParser parser = createIndex("test", settings).mapperService().documentMapperParser(); - // test deprecation exceptions on newly created indexes - if (version.onOrAfter(LatLonPointFieldMapper.LAT_LON_FIELD_VERSION)) { - try { - String normalizeMapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true).endObject().endObject() - .endObject().endObject().string(); - parser.parse("type", new CompressedXContent(normalizeMapping)); - } catch (MapperParsingException e) { - assertEquals(e.getMessage(), "Mapping definition for [point] has unsupported parameters: [lat_lon : true]"); - } - } - - if (version.onOrAfter(LatLonPointFieldMapper.LAT_LON_FIELD_VERSION)) { - String normalizeMapping = XContentFactory.jsonBuilder().startObject().startObject("type").startObject("properties") - .startObject("point").field("type", "geo_point").field("geohash", true).endObject().endObject().endObject().endObject() - .string(); - Exception e = expectThrows(MapperParsingException.class, () -> - parser.parse("type", new CompressedXContent(normalizeMapping))); - assertEquals(e.getMessage(), "Mapping definition for [point] has unsupported parameters: [geohash : true]"); - } - - { - XContentBuilder xContentBuilder = XContentFactory.jsonBuilder().startObject().startObject("type").startObject("properties") - .startObject("point").field("type", "geo_point"); - if (version.before(LatLonPointFieldMapper.LAT_LON_FIELD_VERSION)) { - xContentBuilder = xContentBuilder.field("lat_lon", true).field("geohash", true); - } - String validateMapping = xContentBuilder.field("validate", true).endObject().endObject().endObject().endObject().string(); - Exception e = expectThrows(MapperParsingException.class, () -> - parser.parse("type", new CompressedXContent(validateMapping))); - assertEquals(e.getMessage(), "Mapping definition for [point] has unsupported parameters: [validate : true]"); - } - - { - XContentBuilder xContentBuilder = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("properties").startObject("point").field("type", "geo_point"); - if (version.before(LatLonPointFieldMapper.LAT_LON_FIELD_VERSION)) { - xContentBuilder = xContentBuilder.field("lat_lon", true).field("geohash", true); - } - String validateMapping = xContentBuilder.field("validate_lat", true).endObject().endObject().endObject().endObject().string(); - Exception e = expectThrows(MapperParsingException.class, () -> - parser.parse("type", new CompressedXContent(validateMapping))); - assertEquals(e.getMessage(), "Mapping definition for [point] has unsupported parameters: [validate_lat : true]"); - } - - { - XContentBuilder xContentBuilder = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("properties").startObject("point").field("type", "geo_point"); - if (version.before(LatLonPointFieldMapper.LAT_LON_FIELD_VERSION)) { - xContentBuilder = xContentBuilder.field("lat_lon", true).field("geohash", true); - } - String validateMapping = xContentBuilder.field("validate_lon", true).endObject().endObject().endObject().endObject().string(); - Exception e = expectThrows(MapperParsingException.class, () -> - parser.parse("type", new CompressedXContent(validateMapping))); - assertEquals(e.getMessage(), "Mapping definition for [point] has unsupported parameters: [validate_lon : true]"); - } - - // test deprecated normalize - { - XContentBuilder xContentBuilder = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("properties").startObject("point").field("type", "geo_point"); - if (version.before(LatLonPointFieldMapper.LAT_LON_FIELD_VERSION)) { - xContentBuilder = xContentBuilder.field("lat_lon", true).field("geohash", true); - } - String normalizeMapping = xContentBuilder.field("normalize", true).endObject().endObject().endObject().endObject().string(); - Exception e = expectThrows(MapperParsingException.class, () -> - parser.parse("type", new CompressedXContent(normalizeMapping))); - assertEquals(e.getMessage(), "Mapping definition for [point] has unsupported parameters: [normalize : true]"); - } - - { - XContentBuilder xContentBuilder = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("properties").startObject("point").field("type", "geo_point"); - if (version.before(LatLonPointFieldMapper.LAT_LON_FIELD_VERSION)) { - xContentBuilder = xContentBuilder.field("lat_lon", true).field("geohash", true); - } - String normalizeMapping = xContentBuilder.field("normalize_lat", true).endObject().endObject().endObject().endObject().string(); - Exception e = expectThrows(MapperParsingException.class, () -> - parser.parse("type", new CompressedXContent(normalizeMapping))); - assertEquals(e.getMessage(), "Mapping definition for [point] has unsupported parameters: [normalize_lat : true]"); - } - - { - XContentBuilder xContentBuilder = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("properties").startObject("point").field("type", "geo_point"); - if (version.before(LatLonPointFieldMapper.LAT_LON_FIELD_VERSION)) { - xContentBuilder = xContentBuilder.field("lat_lon", true).field("geohash", true); - } - String normalizeMapping = xContentBuilder.field("normalize_lon", true).endObject().endObject().endObject().endObject().string(); - Exception e = expectThrows(MapperParsingException.class, () -> - parser.parse("type", new CompressedXContent(normalizeMapping))); - assertEquals(e.getMessage(), "Mapping definition for [point] has unsupported parameters: [normalize_lon : true]"); - } - } - - public void testLegacyGeoPointMapperMerge() throws Exception { - Version version = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.V_5_0_0_alpha5); - Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build(); - String stage1Mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true) - .field("geohash", true).endObject().endObject().endObject().endObject().string(); - MapperService mapperService = createIndex("test", settings).mapperService(); - DocumentMapper stage1 = mapperService.merge("type", new CompressedXContent(stage1Mapping), MapperService.MergeReason.MAPPING_UPDATE, false); - String stage2Mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", false) - .field("geohash", false).endObject().endObject().endObject().endObject().string(); - Exception e = expectThrows(IllegalArgumentException.class, () -> - mapperService.merge("type", new CompressedXContent(stage2Mapping), MapperService.MergeReason.MAPPING_UPDATE, false)); - assertThat(e.getMessage(), containsString("mapper [point] has different [lat_lon]")); - assertThat(e.getMessage(), containsString("mapper [point] has different [geohash]")); - assertThat(e.getMessage(), containsString("mapper [point] has different [geohash_precision]")); - - // correct mapping and ensure no failures - String stage2MappingCorrect = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true) - .field("geohash", true).endObject().endObject().endObject().endObject().string(); - mapperService.merge("type", new CompressedXContent(stage2MappingCorrect), MapperService.MergeReason.MAPPING_UPDATE, false); - } - - public void testLegacyGeoHashSearch() throws Exception { - // create a geo_point mapping with geohash enabled and random (between 1 and 12) geohash precision - int precision = randomIntBetween(1, 12); - String mapping = XContentFactory.jsonBuilder().startObject().startObject("pin").startObject("properties").startObject("location") - .field("type", "geo_point").field("geohash", true).field("geohash_precision", precision).field("store", true).endObject() - .endObject().endObject().endObject().string(); - - // create index and add a test point (dr5regy6rc6z) - Version version = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.V_5_0_0_alpha1); - Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build(); - CreateIndexRequestBuilder mappingRequest = client().admin().indices().prepareCreate("test").setSettings(settings) - .addMapping("pin", mapping); - mappingRequest.execute().actionGet(); - client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setWaitForGreenStatus().execute().actionGet(); - client().prepareIndex("test", "pin", "1").setSource(jsonBuilder().startObject().startObject("location").field("lat", 40.7143528) - .field("lon", -74.0059731).endObject().endObject()).setRefreshPolicy(IMMEDIATE).get(); - - // match all search with geohash field - SearchResponse searchResponse = client().prepareSearch().addStoredField("location.geohash").setQuery(matchAllQuery()).execute().actionGet(); - Map m = searchResponse.getHits().getAt(0).getFields(); - - // ensure single geohash was indexed - assertEquals("dr5regy6rc6y".substring(0, precision), m.get("location.geohash").value()); - } - - public void testLegacyGeoHashSearchWithPrefix() throws Exception { - // create a geo_point mapping with geohash enabled and random (between 1 and 12) geohash precision - int precision = randomIntBetween(1, 12); - String mapping = XContentFactory.jsonBuilder().startObject().startObject("pin").startObject("properties").startObject("location") - .field("type", "geo_point").field("geohash_prefix", true).field("geohash_precision", precision).field("store", true) - .endObject().endObject().endObject().endObject().string(); - - // create index and add a test point (dr5regy6rc6z) - Version version = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.V_5_0_0_alpha5); - Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build(); - CreateIndexRequestBuilder mappingRequest = client().admin().indices().prepareCreate("test").setSettings(settings) - .addMapping("pin", mapping); - mappingRequest.execute().actionGet(); - client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setWaitForGreenStatus().execute().actionGet(); - client().prepareIndex("test", "pin", "1").setSource(jsonBuilder().startObject().startObject("location").field("lat", 40.7143528) - .field("lon", -74.0059731).endObject().endObject()).setRefreshPolicy(IMMEDIATE).get(); - - // match all search with geohash field (includes prefixes) - SearchResponse searchResponse = client().prepareSearch().addStoredField("location.geohash").setQuery(matchAllQuery()).execute().actionGet(); - Map m = searchResponse.getHits().getAt(0).getFields(); - - List hashes = m.get("location.geohash").values(); - - final int numHashes = hashes.size(); - for(int i=0; i parser.parse("type", new CompressedXContent(mapping)) ); assertThat(e.getMessage(), containsString("name cannot be empty string")); - - // before 5.x - Version oldVersion = VersionUtils.randomVersionBetween(getRandom(), Version.V_2_0_0, Version.V_2_3_5); - Settings oldIndexSettings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, oldVersion).build(); - DocumentMapperParser parser2x = createIndex("test_old", oldIndexSettings).mapperService().documentMapperParser(); - - DocumentMapper defaultMapper = parser2x.parse("type", new CompressedXContent(mapping)); - assertEquals(mapping, defaultMapper.mappingSource().string()); } } diff --git a/core/src/test/java/org/elasticsearch/index/mapper/GeoPointFieldTypeTests.java b/core/src/test/java/org/elasticsearch/index/mapper/GeoPointFieldTypeTests.java index fac30002fbb98..aba53d9e4cf89 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/GeoPointFieldTypeTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/GeoPointFieldTypeTests.java @@ -18,29 +18,11 @@ */ package org.elasticsearch.index.mapper; -import org.elasticsearch.index.mapper.BaseGeoPointFieldMapper.LegacyGeoPointFieldType; -import org.junit.Before; +import org.elasticsearch.index.mapper.BaseGeoPointFieldMapper.GeoPointFieldType; public class GeoPointFieldTypeTests extends FieldTypeTestCase { @Override protected MappedFieldType createDefaultFieldType() { - return new LegacyGeoPointFieldType(); - } - - @Before - public void setupProperties() { - addModifier(new Modifier("geohash", false) { - @Override - public void modify(MappedFieldType ft) { - ((LegacyGeoPointFieldType)ft).setGeoHashEnabled(new StringFieldMapper.StringFieldType(), 1, true); - } - }); - addModifier(new Modifier("lat_lon", false) { - @Override - public void modify(MappedFieldType ft) { - ((LegacyGeoPointFieldType)ft).setLatLonEnabled(new LegacyDoubleFieldMapper.DoubleFieldType(), - new LegacyDoubleFieldMapper.DoubleFieldType()); - } - }); + return new GeoPointFieldType(); } } diff --git a/core/src/test/java/org/elasticsearch/index/mapper/GeoShapeFieldMapperTests.java b/core/src/test/java/org/elasticsearch/index/mapper/GeoShapeFieldMapperTests.java index 4a22d56e8a941..572188d7a5de5 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/GeoShapeFieldMapperTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/GeoShapeFieldMapperTests.java @@ -451,16 +451,6 @@ public void testEmptyName() throws Exception { () -> parser.parse("type1", new CompressedXContent(mapping)) ); assertThat(e.getMessage(), containsString("name cannot be empty string")); - - // before 5.x - Version oldVersion = VersionUtils.randomVersionBetween(getRandom(), Version.V_2_0_0, Version.V_2_3_5); - Settings oldIndexSettings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, oldVersion).build(); - DocumentMapperParser parser2x = createIndex("test_old", oldIndexSettings).mapperService().documentMapperParser(); - - e = expectThrows(IllegalArgumentException.class, - () -> parser2x.parse("type1", new CompressedXContent(mapping)) - ); - assertThat(e.getMessage(), containsString("fieldName is required")); } } diff --git a/core/src/test/java/org/elasticsearch/index/mapper/IndexFieldMapperTests.java b/core/src/test/java/org/elasticsearch/index/mapper/IndexFieldMapperTests.java index eb74297e8e47d..f0a0b818f9d3f 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/IndexFieldMapperTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/IndexFieldMapperTests.java @@ -70,13 +70,4 @@ public void testIndexNotConfigurable() throws IOException { assertEquals("_index is not configurable", e.getMessage()); } - public void testBwCompatIndexNotConfigurable() throws IOException { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("_index").endObject() - .endObject().endObject().string(); - DocumentMapperParser parser = createIndex("test", - Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_2_3_0).build()) - .mapperService().documentMapperParser(); - parser.parse("type", new CompressedXContent(mapping)); // no exception - } } diff --git a/core/src/test/java/org/elasticsearch/index/mapper/IpFieldMapperTests.java b/core/src/test/java/org/elasticsearch/index/mapper/IpFieldMapperTests.java index 3d3a69ea80095..bdf08cce830c3 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/IpFieldMapperTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/IpFieldMapperTests.java @@ -313,14 +313,5 @@ public void testEmptyName() throws IOException { () -> parser.parse("type", new CompressedXContent(mapping)) ); assertThat(e.getMessage(), containsString("name cannot be empty string")); - - // before 5.x - Version oldVersion = VersionUtils.randomVersionBetween(getRandom(), Version.V_2_0_0, Version.V_2_3_5); - Settings oldIndexSettings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, oldVersion).build(); - indexService = createIndex("test_old", oldIndexSettings); - parser = indexService.mapperService().documentMapperParser(); - - DocumentMapper defaultMapper = parser.parse("type", new CompressedXContent(mapping)); - assertEquals(mapping, defaultMapper.mappingSource().string()); } } diff --git a/core/src/test/java/org/elasticsearch/index/mapper/KeywordFieldMapperTests.java b/core/src/test/java/org/elasticsearch/index/mapper/KeywordFieldMapperTests.java index 396cbe49ee9d0..d67244163d6dc 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/KeywordFieldMapperTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/KeywordFieldMapperTests.java @@ -268,37 +268,6 @@ public void testBoost() throws IOException { assertEquals(mapping, mapper.mappingSource().toString()); } - public void testBoostImplicitlyEnablesNormsOnOldIndex() throws IOException { - indexService = createIndex("test2", - Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_2_3_0).build()); - parser = indexService.mapperService().documentMapperParser(); - - String mapping = XContentFactory.jsonBuilder().startObject() - .startObject("type") - .startObject("properties") - .startObject("field") - .field("type", "keyword") - .field("boost", 2f) - .endObject() - .endObject() - .endObject().endObject().string(); - DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); - - String expectedMapping = XContentFactory.jsonBuilder().startObject() - .startObject("type") - .startObject("properties") - .startObject("field") - .field("type", "string") - .field("boost", 2f) - .field("index", "not_analyzed") - .field("norms", true) - .field("fielddata", false) - .endObject() - .endObject() - .endObject().endObject().string(); - assertEquals(expectedMapping, mapper.mappingSource().toString()); - } - public void testEnableNorms() throws IOException { String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("field").field("type", "keyword").field("norms", true).endObject().endObject() @@ -334,24 +303,5 @@ public void testEmptyName() throws IOException { () -> parser.parse("type", new CompressedXContent(mapping)) ); assertThat(e.getMessage(), containsString("name cannot be empty string")); - - // empty name allowed in index created before 5.0 - Version oldVersion = VersionUtils.randomVersionBetween(getRandom(), Version.V_2_0_0, Version.V_2_3_5); - Settings oldIndexSettings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, oldVersion).build(); - indexService = createIndex("test_old", oldIndexSettings); - parser = indexService.mapperService().documentMapperParser(); - - DocumentMapper defaultMapper = parser.parse("type", new CompressedXContent(mapping)); - String downgradedMapping = XContentFactory.jsonBuilder().startObject() - .startObject("type") - .startObject("properties") - .startObject("") - .field("type", "string") - .field("index", "not_analyzed") - .field("fielddata", false) - .endObject() - .endObject() - .endObject().endObject().string(); - assertEquals(downgradedMapping, defaultMapper.mappingSource().string()); } } diff --git a/core/src/test/java/org/elasticsearch/index/mapper/LegacyByteFieldTypeTests.java b/core/src/test/java/org/elasticsearch/index/mapper/LegacyByteFieldTypeTests.java deleted file mode 100644 index 2f3a4ca6fec9c..0000000000000 --- a/core/src/test/java/org/elasticsearch/index/mapper/LegacyByteFieldTypeTests.java +++ /dev/null @@ -1,39 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.elasticsearch.index.mapper; - -import org.junit.Before; - -public class LegacyByteFieldTypeTests extends FieldTypeTestCase { - @Override - protected MappedFieldType createDefaultFieldType() { - return new LegacyByteFieldMapper.ByteFieldType(); - } - - @Before - public void setupProperties() { - setDummyNullValue((byte)10); - } - - public void testValueForSearch() { - MappedFieldType ft = createDefaultFieldType(); - // bytes are stored as ints - assertEquals(Byte.valueOf((byte) 3), ft.valueForDisplay(Integer.valueOf(3))); - } -} diff --git a/core/src/test/java/org/elasticsearch/index/mapper/LegacyDateFieldMapperTests.java b/core/src/test/java/org/elasticsearch/index/mapper/LegacyDateFieldMapperTests.java deleted file mode 100644 index 19f67c488b1a5..0000000000000 --- a/core/src/test/java/org/elasticsearch/index/mapper/LegacyDateFieldMapperTests.java +++ /dev/null @@ -1,495 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.index.mapper; - -import org.apache.lucene.analysis.LegacyNumericTokenStream.LegacyNumericTermAttribute; -import org.apache.lucene.analysis.TokenStream; -import org.apache.lucene.index.DocValuesType; -import org.apache.lucene.index.IndexableField; -import org.apache.lucene.search.LegacyNumericRangeQuery; -import org.apache.lucene.util.Constants; -import org.elasticsearch.Version; -import org.elasticsearch.action.DocWriteResponse; -import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsResponse; -import org.elasticsearch.action.index.IndexResponse; -import org.elasticsearch.cluster.metadata.IndexMetaData; -import org.elasticsearch.common.compress.CompressedXContent; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.unit.TimeValue; -import org.elasticsearch.common.util.LocaleUtils; -import org.elasticsearch.common.xcontent.ToXContent; -import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.common.xcontent.XContentFactory; -import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.common.xcontent.json.JsonXContent; -import org.elasticsearch.index.IndexService; -import org.elasticsearch.index.IndexSettings; -import org.elasticsearch.index.mapper.ParseContext.Document; -import org.elasticsearch.index.query.QueryShardContext; -import org.elasticsearch.plugins.Plugin; -import org.elasticsearch.test.ESSingleNodeTestCase; -import org.elasticsearch.test.InternalSettingsPlugin; -import org.joda.time.DateTime; -import org.joda.time.DateTimeZone; -import org.junit.Before; - -import java.io.IOException; -import java.util.ArrayList; -import java.util.Collection; -import java.util.List; -import java.util.Locale; -import java.util.Map; - -import static com.carrotsearch.randomizedtesting.RandomizedTest.systemPropertyAsBoolean; -import static org.elasticsearch.index.mapper.LegacyStringMappingTests.docValuesType; -import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.hasKey; -import static org.hamcrest.Matchers.instanceOf; -import static org.hamcrest.Matchers.is; -import static org.hamcrest.Matchers.notNullValue; -import static org.hamcrest.Matchers.nullValue; - -public class LegacyDateFieldMapperTests extends ESSingleNodeTestCase { - - private static final Settings BW_SETTINGS = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_2_3_0).build(); - - @Override - protected Collection> getPlugins() { - return pluginList(InternalSettingsPlugin.class); - } - - public void testAutomaticDateParser() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("properties").endObject() - .endObject().endObject().string(); - - IndexService index = createIndex("test", BW_SETTINGS); - client().admin().indices().preparePutMapping("test").setType("type").setSource(mapping).get(); - DocumentMapper defaultMapper = index.mapperService().documentMapper("type"); - - ParsedDocument doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("date_field1", "2011/01/22") - .field("date_field2", "2011/01/22 00:00:00") - .field("wrong_date1", "-4") - .field("wrong_date2", "2012/2") - .field("wrong_date3", "2012/test") - .endObject() - .bytes()); - assertNotNull(doc.dynamicMappingsUpdate()); - client().admin().indices().preparePutMapping("test").setType("type").setSource(doc.dynamicMappingsUpdate().toString()).get(); - - defaultMapper = index.mapperService().documentMapper("type"); - FieldMapper fieldMapper = defaultMapper.mappers().smartNameFieldMapper("date_field1"); - assertThat(fieldMapper, instanceOf(LegacyDateFieldMapper.class)); - LegacyDateFieldMapper dateFieldMapper = (LegacyDateFieldMapper)fieldMapper; - assertEquals("yyyy/MM/dd HH:mm:ss||yyyy/MM/dd||epoch_millis", dateFieldMapper.fieldType().dateTimeFormatter().format()); - assertEquals(1265587200000L, dateFieldMapper.fieldType().dateTimeFormatter().parser().parseMillis("1265587200000")); - fieldMapper = defaultMapper.mappers().smartNameFieldMapper("date_field2"); - assertThat(fieldMapper, instanceOf(LegacyDateFieldMapper.class)); - - fieldMapper = defaultMapper.mappers().smartNameFieldMapper("wrong_date1"); - assertThat(fieldMapper, instanceOf(StringFieldMapper.class)); - fieldMapper = defaultMapper.mappers().smartNameFieldMapper("wrong_date2"); - assertThat(fieldMapper, instanceOf(StringFieldMapper.class)); - fieldMapper = defaultMapper.mappers().smartNameFieldMapper("wrong_date3"); - assertThat(fieldMapper, instanceOf(StringFieldMapper.class)); - } - - public void testParseLocal() { - assertThat(Locale.GERMAN, equalTo(LocaleUtils.parse("de"))); - assertThat(Locale.GERMANY, equalTo(LocaleUtils.parse("de_DE"))); - assertThat(new Locale("de","DE","DE"), equalTo(LocaleUtils.parse("de_DE_DE"))); - - try { - LocaleUtils.parse("de_DE_DE_DE"); - fail(); - } catch(IllegalArgumentException ex) { - // expected - } - assertThat(Locale.ROOT, equalTo(LocaleUtils.parse(""))); - assertThat(Locale.ROOT, equalTo(LocaleUtils.parse("ROOT"))); - } - - public void testLocale() throws IOException { - assumeFalse("Locals are buggy on JDK9EA", Constants.JRE_IS_MINIMUM_JAVA9 && systemPropertyAsBoolean("tests.security.manager", false)); - String mapping = XContentFactory.jsonBuilder() - .startObject() - .startObject("type") - .startObject("properties") - .startObject("date_field_default") - .field("type", "date") - .field("format", "E, d MMM yyyy HH:mm:ss Z") - .endObject() - .startObject("date_field_en") - .field("type", "date") - .field("format", "E, d MMM yyyy HH:mm:ss Z") - .field("locale", "EN") - .endObject() - .startObject("date_field_de") - .field("type", "date") - .field("format", "E, d MMM yyyy HH:mm:ss Z") - .field("locale", "DE_de") - .endObject() - .endObject() - .endObject().endObject().string(); - - DocumentMapper defaultMapper = mapper("test", "type", mapping); - ParsedDocument doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("date_field_en", "Wed, 06 Dec 2000 02:55:00 -0800") - .field("date_field_de", "Mi, 06 Dez 2000 02:55:00 -0800") - .field("date_field_default", "Wed, 06 Dec 2000 02:55:00 -0800") // check default - no exception is a success! - .endObject() - .bytes()); - assertNumericTokensEqual(doc, defaultMapper, "date_field_en", "date_field_de"); - assertNumericTokensEqual(doc, defaultMapper, "date_field_en", "date_field_default"); - } - - @Before - public void reset() { - i = 0; - } - - int i = 0; - - private DocumentMapper mapper(String indexName, String type, String mapping) throws IOException { - IndexService index = createIndex(indexName, BW_SETTINGS); - client().admin().indices().preparePutMapping(indexName).setType(type).setSource(mapping).get(); - return index.mapperService().documentMapper(type); - } - - private void assertNumericTokensEqual(ParsedDocument doc, DocumentMapper defaultMapper, String fieldA, String fieldB) throws IOException { - assertThat(doc.rootDoc().getField(fieldA).tokenStream(defaultMapper.mappers().indexAnalyzer(), null), notNullValue()); - assertThat(doc.rootDoc().getField(fieldB).tokenStream(defaultMapper.mappers().indexAnalyzer(), null), notNullValue()); - - TokenStream tokenStream = doc.rootDoc().getField(fieldA).tokenStream(defaultMapper.mappers().indexAnalyzer(), null); - tokenStream.reset(); - LegacyNumericTermAttribute nta = tokenStream.addAttribute(LegacyNumericTermAttribute.class); - List values = new ArrayList<>(); - while(tokenStream.incrementToken()) { - values.add(nta.getRawValue()); - } - - tokenStream = doc.rootDoc().getField(fieldB).tokenStream(defaultMapper.mappers().indexAnalyzer(), null); - tokenStream.reset(); - nta = tokenStream.addAttribute(LegacyNumericTermAttribute.class); - int pos = 0; - while(tokenStream.incrementToken()) { - assertThat(values.get(pos++), equalTo(nta.getRawValue())); - } - assertThat(pos, equalTo(values.size())); - } - - public void testTimestampAsDate() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("properties").startObject("date_field").field("type", "date").endObject().endObject() - .endObject().endObject().string(); - - DocumentMapper defaultMapper = mapper("test", "type", mapping); - long value = System.currentTimeMillis(); - - ParsedDocument doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("date_field", value) - .endObject() - .bytes()); - - assertThat(doc.rootDoc().getField("date_field").tokenStream(defaultMapper.mappers().indexAnalyzer(), null), notNullValue()); - } - - public void testDateDetection() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .field("date_detection", false) - .startObject("properties").startObject("date_field").field("type", "date").endObject().endObject() - .endObject().endObject().string(); - - DocumentMapper defaultMapper = mapper("test", "type", mapping); - - ParsedDocument doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("date_field", "2010-01-01") - .field("date_field_x", "2010-01-01") - .endObject() - .bytes()); - - assertThat(doc.rootDoc().get("date_field"), equalTo("1262304000000")); - assertThat(doc.rootDoc().get("date_field_x"), equalTo("2010-01-01")); - } - - public void testHourFormat() throws Exception { - long nowInMillis = randomPositiveLong(); - Settings indexSettings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) - .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1).put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 1).build(); - QueryShardContext context = new QueryShardContext(0, - new IndexSettings(IndexMetaData.builder("foo").settings(indexSettings).build(), indexSettings), null, null, null, null, - null, null, null, null, null, () -> nowInMillis); - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .field("date_detection", false) - .startObject("properties").startObject("date_field").field("type", "date").field("format", "HH:mm:ss").endObject().endObject() - .endObject().endObject().string(); - - DocumentMapper defaultMapper = mapper("test", "type", mapping); - - ParsedDocument doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("date_field", "10:00:00") - .endObject() - .bytes()); - assertThat(((LegacyLongFieldMapper.CustomLongNumericField) doc.rootDoc().getField("date_field")).numericAsString(), equalTo(Long.toString(new DateTime(TimeValue.timeValueHours(10).millis(), DateTimeZone.UTC).getMillis()))); - - LegacyNumericRangeQuery rangeQuery = (LegacyNumericRangeQuery) defaultMapper.mappers().smartNameFieldMapper("date_field").fieldType() - .rangeQuery("10:00:00", "11:00:00", true, true, context); - assertThat(rangeQuery.getMax(), equalTo(new DateTime(TimeValue.timeValueHours(11).millis(), DateTimeZone.UTC).getMillis() + 999)); - assertThat(rangeQuery.getMin(), equalTo(new DateTime(TimeValue.timeValueHours(10).millis(), DateTimeZone.UTC).getMillis())); - } - - public void testDayWithoutYearFormat() throws Exception { - long nowInMillis = randomPositiveLong(); - Settings indexSettings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) - .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1).put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 1).build(); - QueryShardContext context = new QueryShardContext(0, - new IndexSettings(IndexMetaData.builder("foo").settings(indexSettings).build(), indexSettings), null, null, null, null, - null, null, null, null, null, () -> nowInMillis); - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .field("date_detection", false) - .startObject("properties").startObject("date_field").field("type", "date").field("format", "MMM dd HH:mm:ss").endObject().endObject() - .endObject().endObject().string(); - - DocumentMapper defaultMapper = mapper("test", "type", mapping); - - ParsedDocument doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("date_field", "Jan 02 10:00:00") - .endObject() - .bytes()); - assertThat(((LegacyLongFieldMapper.CustomLongNumericField) doc.rootDoc().getField("date_field")).numericAsString(), equalTo(Long.toString(new DateTime(TimeValue.timeValueHours(34).millis(), DateTimeZone.UTC).getMillis()))); - - LegacyNumericRangeQuery rangeQuery = (LegacyNumericRangeQuery) defaultMapper.mappers().smartNameFieldMapper("date_field").fieldType() - .rangeQuery("Jan 02 10:00:00", "Jan 02 11:00:00", true, true, context); - assertThat(rangeQuery.getMax(), equalTo(new DateTime(TimeValue.timeValueHours(35).millis() + 999, DateTimeZone.UTC).getMillis())); - assertThat(rangeQuery.getMin(), equalTo(new DateTime(TimeValue.timeValueHours(34).millis(), DateTimeZone.UTC).getMillis())); - } - - public void testIgnoreMalformedOption() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("properties") - .startObject("field1").field("type", "date").field("ignore_malformed", true).endObject() - .startObject("field2").field("type", "date").field("ignore_malformed", false).endObject() - .startObject("field3").field("type", "date").endObject() - .endObject() - .endObject().endObject().string(); - - DocumentMapper defaultMapper = mapper("test", "type", mapping); - - ParsedDocument doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("field1", "a") - .field("field2", "2010-01-01") - .endObject() - .bytes()); - assertThat(doc.rootDoc().getField("field1"), nullValue()); - assertThat(doc.rootDoc().getField("field2"), notNullValue()); - - try { - defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("field2", "a") - .endObject() - .bytes()); - } catch (MapperParsingException e) { - assertThat(e.getCause(), instanceOf(IllegalArgumentException.class)); - assertThat(e.getMessage(), is("failed to parse [field2]")); - } - - // Verify that the default is false - try { - defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("field3", "a") - .endObject() - .bytes()); - } catch (MapperParsingException e) { - assertThat(e.getCause(), instanceOf(IllegalArgumentException.class)); - assertThat(e.getMessage(), is("failed to parse [field3]")); - } - - // Unless the global ignore_malformed option is set to true - Settings indexSettings = Settings.builder() - .put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_2_3_0) - .put("index.mapping.ignore_malformed", true) - .build(); - defaultMapper = createIndex("test2", indexSettings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); - doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("field3", "a") - .endObject() - .bytes()); - assertThat(doc.rootDoc().getField("field3"), nullValue()); - - // This should still throw an exception, since field2 is specifically set to ignore_malformed=false - try { - defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("field2", "a") - .endObject() - .bytes()); - } catch (MapperParsingException e) { - assertThat(e.getCause(), instanceOf(IllegalArgumentException.class)); - assertThat(e.getMessage(), is("failed to parse [field2]")); - } - } - - public void testThatMergingWorks() throws Exception { - String initialMapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("properties") - .startObject("field").field("type", "date") - .field("format", "EEE MMM dd HH:mm:ss.S Z yyyy||EEE MMM dd HH:mm:ss.SSS Z yyyy") - .endObject() - .endObject() - .endObject().endObject().string(); - - String updatedMapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("properties") - .startObject("field") - .field("type", "date") - .field("format", "EEE MMM dd HH:mm:ss.S Z yyyy||EEE MMM dd HH:mm:ss.SSS Z yyyy||yyyy-MM-dd'T'HH:mm:ss.SSSZZ") - .endObject() - .endObject() - .endObject().endObject().string(); - - DocumentMapper defaultMapper = mapper("test1", "type", initialMapping); - DocumentMapper mergeMapper = mapper("test2", "type", updatedMapping); - - assertThat(defaultMapper.mappers().getMapper("field"), is(instanceOf(LegacyDateFieldMapper.class))); - LegacyDateFieldMapper initialDateFieldMapper = (LegacyDateFieldMapper) defaultMapper.mappers().getMapper("field"); - Map config = getConfigurationViaXContent(initialDateFieldMapper); - assertThat(config.get("format"), is("EEE MMM dd HH:mm:ss.S Z yyyy||EEE MMM dd HH:mm:ss.SSS Z yyyy")); - - defaultMapper = defaultMapper.merge(mergeMapper.mapping(), false); - - assertThat(defaultMapper.mappers().getMapper("field"), is(instanceOf(LegacyDateFieldMapper.class))); - - LegacyDateFieldMapper mergedFieldMapper = (LegacyDateFieldMapper) defaultMapper.mappers().getMapper("field"); - Map mergedConfig = getConfigurationViaXContent(mergedFieldMapper); - assertThat(mergedConfig.get("format"), is("EEE MMM dd HH:mm:ss.S Z yyyy||EEE MMM dd HH:mm:ss.SSS Z yyyy||yyyy-MM-dd'T'HH:mm:ss.SSSZZ")); - } - - public void testDefaultDocValues() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("properties").startObject("date_field").field("type", "date").endObject().endObject() - .endObject().endObject().string(); - - DocumentMapper defaultMapper = mapper("test", "type", mapping); - - ParsedDocument parsedDoc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("date_field", "2010-01-01") - .endObject() - .bytes()); - ParseContext.Document doc = parsedDoc.rootDoc(); - assertEquals(DocValuesType.SORTED_NUMERIC, docValuesType(doc, "date_field")); - } - - private Map getConfigurationViaXContent(LegacyDateFieldMapper dateFieldMapper) throws IOException { - XContentBuilder builder = JsonXContent.contentBuilder().startObject(); - dateFieldMapper.toXContent(builder, ToXContent.EMPTY_PARAMS).endObject(); - Map dateFieldMapperMap; - try (XContentParser parser = JsonXContent.jsonXContent.createParser(builder.bytes())) { - dateFieldMapperMap = parser.map(); - } - assertThat(dateFieldMapperMap, hasKey("field")); - assertThat(dateFieldMapperMap.get("field"), is(instanceOf(Map.class))); - return (Map) dateFieldMapperMap.get("field"); - } - - private static long getDateAsMillis(Document doc, String field) { - for (IndexableField f : doc.getFields(field)) { - if (f.numericValue() != null) { - return f.numericValue().longValue(); - } - } - throw new AssertionError("missing"); - } - - public void testThatEpochCanBeIgnoredWithCustomFormat() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("properties").startObject("date_field").field("type", "date").field("format", "yyyyMMddHH").endObject().endObject() - .endObject().endObject().string(); - - DocumentMapper defaultMapper = mapper("test1", "type", mapping); - - XContentBuilder document = XContentFactory.jsonBuilder() - .startObject() - .field("date_field", "2015060210") - .endObject(); - ParsedDocument doc = defaultMapper.parse("test", "type", "1", document.bytes()); - assertThat(getDateAsMillis(doc.rootDoc(), "date_field"), equalTo(1433239200000L)); - IndexResponse indexResponse = client().prepareIndex("test2", "test").setSource(document).get(); - assertEquals(DocWriteResponse.Result.CREATED, indexResponse.getResult()); - - // integers should always be parsed as well... cannot be sure it is a unix timestamp only - doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("date_field", 2015060210) - .endObject() - .bytes()); - assertThat(getDateAsMillis(doc.rootDoc(), "date_field"), equalTo(1433239200000L)); - indexResponse = client().prepareIndex("test", "test").setSource(document).get(); - assertEquals(DocWriteResponse.Result.CREATED, indexResponse.getResult()); - } - - public void testThatNewIndicesOnlyAllowStrictDates() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("properties").startObject("date_field").field("type", "date").endObject().endObject() - .endObject().endObject().string(); - - IndexService index = createIndex("test", BW_SETTINGS); - client().admin().indices().preparePutMapping("test").setType("type").setSource(mapping).get(); - assertDateFormat(LegacyDateFieldMapper.Defaults.DATE_TIME_FORMATTER.format()); - DocumentMapper defaultMapper = index.mapperService().documentMapper("type"); - - // also test normal date - defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("date_field", "2015-06-06T00:00:44.000Z") - .endObject() - .bytes()); - - try { - defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("date_field", "1-1-1T00:00:44.000Z") - .endObject() - .bytes()); - fail("non strict date indexing should have been failed"); - } catch (MapperParsingException e) { - assertThat(e.getCause(), instanceOf(IllegalArgumentException.class)); - } - } - - private void assertDateFormat(String expectedFormat) throws IOException { - GetMappingsResponse response = client().admin().indices().prepareGetMappings("test").setTypes("type").get(); - Map mappingMap = response.getMappings().get("test").get("type").getSourceAsMap(); - Map properties = (Map) mappingMap.get("properties"); - Map dateField = (Map) properties.get("date_field"); - assertThat((String) dateField.get("format"), is(expectedFormat)); - } -} diff --git a/core/src/test/java/org/elasticsearch/index/mapper/LegacyDateFieldTypeTests.java b/core/src/test/java/org/elasticsearch/index/mapper/LegacyDateFieldTypeTests.java deleted file mode 100644 index 10a2a331a792f..0000000000000 --- a/core/src/test/java/org/elasticsearch/index/mapper/LegacyDateFieldTypeTests.java +++ /dev/null @@ -1,153 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.elasticsearch.index.mapper; - -import org.apache.lucene.document.Field.Store; -import org.apache.lucene.document.LegacyLongField; -import org.apache.lucene.index.DirectoryReader; -import org.apache.lucene.index.IndexReader; -import org.apache.lucene.index.IndexWriter; -import org.apache.lucene.index.IndexWriterConfig; -import org.apache.lucene.index.MultiReader; -import org.apache.lucene.store.Directory; -import org.apache.lucene.util.IOUtils; -import org.elasticsearch.common.joda.DateMathParser; -import org.elasticsearch.common.joda.Joda; -import org.elasticsearch.index.mapper.LegacyDateFieldMapper.DateFieldType; -import org.elasticsearch.index.mapper.MappedFieldType.Relation; -import org.elasticsearch.index.mapper.ParseContext.Document; -import org.elasticsearch.index.query.QueryRewriteContext; -import org.joda.time.DateTimeZone; -import org.junit.Before; - -import java.io.IOException; -import java.util.Locale; -import java.util.concurrent.TimeUnit; - -public class LegacyDateFieldTypeTests extends FieldTypeTestCase { - @Override - protected MappedFieldType createDefaultFieldType() { - return new LegacyDateFieldMapper.DateFieldType(); - } - - private static long nowInMillis; - - @Before - public void setupProperties() { - setDummyNullValue(10); - addModifier(new Modifier("format", true) { - @Override - public void modify(MappedFieldType ft) { - ((LegacyDateFieldMapper.DateFieldType) ft).setDateTimeFormatter(Joda.forPattern("basic_week_date", Locale.ROOT)); - } - }); - addModifier(new Modifier("locale", true) { - @Override - public void modify(MappedFieldType ft) { - ((LegacyDateFieldMapper.DateFieldType) ft).setDateTimeFormatter(Joda.forPattern("date_optional_time", Locale.CANADA)); - } - }); - addModifier(new Modifier("numeric_resolution", true) { - @Override - public void modify(MappedFieldType ft) { - ((LegacyDateFieldMapper.DateFieldType)ft).setTimeUnit(TimeUnit.HOURS); - } - }); - nowInMillis = randomPositiveLong(); - } - - public void testIsFieldWithinQueryEmptyReader() throws IOException { - IndexReader reader = new MultiReader(); - DateFieldType ft = new DateFieldType(); - ft.setName("my_date"); - assertEquals(Relation.DISJOINT, ft.isFieldWithinQuery(reader, "2015-10-12", "2016-04-03", - randomBoolean(), randomBoolean(), null, null, null)); - } - - private void doTestIsFieldWithinQuery(DateFieldType ft, DirectoryReader reader, - DateTimeZone zone, DateMathParser alternateFormat) throws IOException { - QueryRewriteContext context = new QueryRewriteContext(null, null, null, null, null, null, null, () -> nowInMillis); - assertEquals(Relation.INTERSECTS, ft.isFieldWithinQuery(reader, "2015-10-09", "2016-01-02", - randomBoolean(), randomBoolean(), null, null, context)); - assertEquals(Relation.INTERSECTS, ft.isFieldWithinQuery(reader, "2016-01-02", "2016-06-20", - randomBoolean(), randomBoolean(), null, null, context)); - assertEquals(Relation.INTERSECTS, ft.isFieldWithinQuery(reader, "2016-01-02", "2016-02-12", - randomBoolean(), randomBoolean(), null, null, context)); - assertEquals(Relation.DISJOINT, ft.isFieldWithinQuery(reader, "2014-01-02", "2015-02-12", - randomBoolean(), randomBoolean(), null, null, context)); - assertEquals(Relation.DISJOINT, ft.isFieldWithinQuery(reader, "2016-05-11", "2016-08-30", - randomBoolean(), randomBoolean(), null, null, context)); - assertEquals(Relation.WITHIN, ft.isFieldWithinQuery(reader, "2015-09-25", "2016-05-29", - randomBoolean(), randomBoolean(), null, null, context)); - assertEquals(Relation.WITHIN, ft.isFieldWithinQuery(reader, "2015-10-12", "2016-04-03", - true, true, null, null, context)); - assertEquals(Relation.INTERSECTS, ft.isFieldWithinQuery(reader, "2015-10-12", "2016-04-03", - false, false, null, null, context)); - assertEquals(Relation.INTERSECTS, ft.isFieldWithinQuery(reader, "2015-10-12", "2016-04-03", - false, true, null, null, context)); - assertEquals(Relation.INTERSECTS, ft.isFieldWithinQuery(reader, "2015-10-12", "2016-04-03", - true, false, null, null, context)); - } - - public void testIsFieldWithinQuery() throws IOException { - Directory dir = newDirectory(); - IndexWriter w = new IndexWriter(dir, new IndexWriterConfig(null)); - long instant1 = LegacyDateFieldMapper.Defaults.DATE_TIME_FORMATTER.parser().parseDateTime("2015-10-12").getMillis(); - long instant2 = LegacyDateFieldMapper.Defaults.DATE_TIME_FORMATTER.parser().parseDateTime("2016-04-03").getMillis(); - Document doc = new Document(); - LegacyLongField field = new LegacyLongField("my_date", instant1, Store.NO); - doc.add(field); - w.addDocument(doc); - field.setLongValue(instant2); - w.addDocument(doc); - DirectoryReader reader = DirectoryReader.open(w); - DateFieldType ft = new DateFieldType(); - ft.setName("my_date"); - DateMathParser alternateFormat = new DateMathParser(LegacyDateFieldMapper.Defaults.DATE_TIME_FORMATTER); - doTestIsFieldWithinQuery(ft, reader, null, null); - doTestIsFieldWithinQuery(ft, reader, null, alternateFormat); - doTestIsFieldWithinQuery(ft, reader, DateTimeZone.UTC, null); - doTestIsFieldWithinQuery(ft, reader, DateTimeZone.UTC, alternateFormat); - IOUtils.close(reader, w, dir); - } - - public void testValueFormat() { - MappedFieldType ft = createDefaultFieldType(); - long instant = LegacyDateFieldMapper.Defaults.DATE_TIME_FORMATTER.parser().parseDateTime("2015-10-12T14:10:55").getMillis(); - assertEquals("2015-10-12T14:10:55.000Z", - ft.docValueFormat(null, DateTimeZone.UTC).format(instant)); - assertEquals("2015-10-12T15:10:55.000+01:00", - ft.docValueFormat(null, DateTimeZone.forOffsetHours(1)).format(instant)); - assertEquals("2015", - createDefaultFieldType().docValueFormat("YYYY", DateTimeZone.UTC).format(instant)); - assertEquals(instant, - ft.docValueFormat(null, DateTimeZone.UTC).parseLong("2015-10-12T14:10:55", false, null)); - assertEquals(instant + 999, - ft.docValueFormat(null, DateTimeZone.UTC).parseLong("2015-10-12T14:10:55", true, null)); - assertEquals(LegacyDateFieldMapper.Defaults.DATE_TIME_FORMATTER.parser().parseDateTime("2015-10-13").getMillis() - 1, - ft.docValueFormat(null, DateTimeZone.UTC).parseLong("2015-10-12||/d", true, null)); - } - - public void testValueForSearch() { - MappedFieldType ft = createDefaultFieldType(); - String date = "2015-10-12T12:09:55.000Z"; - long instant = LegacyDateFieldMapper.Defaults.DATE_TIME_FORMATTER.parser().parseDateTime(date).getMillis(); - assertEquals(date, ft.valueForDisplay(instant)); - } -} diff --git a/core/src/test/java/org/elasticsearch/index/mapper/LegacyDoubleFieldTypeTests.java b/core/src/test/java/org/elasticsearch/index/mapper/LegacyDoubleFieldTypeTests.java deleted file mode 100644 index 93ea0eb35fc06..0000000000000 --- a/core/src/test/java/org/elasticsearch/index/mapper/LegacyDoubleFieldTypeTests.java +++ /dev/null @@ -1,49 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.elasticsearch.index.mapper; - -import org.elasticsearch.index.mapper.LegacyDoubleFieldMapper.DoubleFieldType; -import org.elasticsearch.index.mapper.MappedFieldType.Relation; -import org.junit.Before; - -import java.io.IOException; - -public class LegacyDoubleFieldTypeTests extends FieldTypeTestCase { - @Override - protected MappedFieldType createDefaultFieldType() { - return new LegacyDoubleFieldMapper.DoubleFieldType(); - } - - @Before - public void setupProperties() { - setDummyNullValue(10.0D); - } - - public void testIsFieldWithinQuery() throws IOException { - DoubleFieldType ft = new DoubleFieldType(); - // current impl ignores args and shourd always return INTERSECTS - assertEquals(Relation.INTERSECTS, ft.isFieldWithinQuery(null, randomDouble(), randomDouble(), - randomBoolean(), randomBoolean(), null, null, null)); - } - - public void testValueForSearch() { - MappedFieldType ft = createDefaultFieldType(); - assertEquals(Double.valueOf(1.2), ft.valueForDisplay(1.2)); - } -} diff --git a/core/src/test/java/org/elasticsearch/index/mapper/LegacyFloatFieldTypeTests.java b/core/src/test/java/org/elasticsearch/index/mapper/LegacyFloatFieldTypeTests.java deleted file mode 100644 index a476c81fb4736..0000000000000 --- a/core/src/test/java/org/elasticsearch/index/mapper/LegacyFloatFieldTypeTests.java +++ /dev/null @@ -1,49 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.elasticsearch.index.mapper; - -import org.elasticsearch.index.mapper.LegacyFloatFieldMapper.FloatFieldType; -import org.elasticsearch.index.mapper.MappedFieldType.Relation; -import org.junit.Before; - -import java.io.IOException; - -public class LegacyFloatFieldTypeTests extends FieldTypeTestCase { - @Override - protected MappedFieldType createDefaultFieldType() { - return new LegacyFloatFieldMapper.FloatFieldType(); - } - - @Before - public void setupProperties() { - setDummyNullValue(10.0f); - } - - public void testIsFieldWithinQuery() throws IOException { - FloatFieldType ft = new FloatFieldType(); - // current impl ignores args and shourd always return INTERSECTS - assertEquals(Relation.INTERSECTS, ft.isFieldWithinQuery(null, randomFloat(), randomFloat(), - randomBoolean(), randomBoolean(), null, null, null)); - } - - public void testValueForSearch() { - MappedFieldType ft = createDefaultFieldType(); - assertEquals(Float.valueOf(1.2f), ft.valueForDisplay(1.2f)); - } -} diff --git a/core/src/test/java/org/elasticsearch/index/mapper/LegacyGeohashMappingGeoPointTests.java b/core/src/test/java/org/elasticsearch/index/mapper/LegacyGeohashMappingGeoPointTests.java deleted file mode 100644 index a4d61956a6c6b..0000000000000 --- a/core/src/test/java/org/elasticsearch/index/mapper/LegacyGeohashMappingGeoPointTests.java +++ /dev/null @@ -1,101 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.index.mapper; - -import org.elasticsearch.Version; -import org.elasticsearch.cluster.metadata.IndexMetaData; -import org.elasticsearch.common.compress.CompressedXContent; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.xcontent.XContentFactory; -import org.elasticsearch.index.mapper.BaseGeoPointFieldMapper.LegacyGeoPointFieldType; -import org.elasticsearch.plugins.Plugin; -import org.elasticsearch.test.ESSingleNodeTestCase; -import org.elasticsearch.test.InternalSettingsPlugin; -import org.elasticsearch.test.VersionUtils; - -import java.util.Collection; - -import static org.elasticsearch.common.geo.GeoHashUtils.stringEncode; -import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.instanceOf; -import static org.hamcrest.Matchers.is; -import static org.hamcrest.Matchers.notNullValue; -import static org.hamcrest.Matchers.nullValue; - -public class LegacyGeohashMappingGeoPointTests extends ESSingleNodeTestCase { - - @Override - protected Collection> getPlugins() { - return pluginList(InternalSettingsPlugin.class); - } - - public void testGeoHashValue() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("properties").startObject("point").field("type", "geo_point").field("geohash", true) - .endObject().endObject().endObject().endObject().string(); - - Version version = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.V_2_4_0); - Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build(); - DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser() - .parse("type", new CompressedXContent(mapping)); - - ParsedDocument doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("point", stringEncode(1.3, 1.2)) - .endObject() - .bytes()); - - assertThat(doc.rootDoc().getField("point.lat"), nullValue()); - assertThat(doc.rootDoc().getField("point.lon"), nullValue()); - assertThat(doc.rootDoc().getField("point.geohash").stringValue(), equalTo(stringEncode(1.3, 1.2))); - assertThat(doc.rootDoc().get("point"), notNullValue()); - } - - public void testGeoHashPrecisionAsInteger() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("properties").startObject("point").field("type", "geo_point").field("geohash", true) - .field("geohash_precision", 10).endObject().endObject().endObject().endObject().string(); - - Version version = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.V_2_4_0); - Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build(); - DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser() - .parse("type", new CompressedXContent(mapping)); - FieldMapper mapper = defaultMapper.mappers().smartNameFieldMapper("point"); - assertThat(mapper, instanceOf(BaseGeoPointFieldMapper.class)); - BaseGeoPointFieldMapper geoPointFieldMapper = (BaseGeoPointFieldMapper) mapper; - assertThat(((LegacyGeoPointFieldType)geoPointFieldMapper.fieldType()).geoHashPrecision(), is(10)); - } - - public void testGeoHashPrecisionAsLength() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("properties").startObject("point").field("type", "geo_point").field("geohash", true) - .field("geohash_precision", "5m").endObject().endObject() - .endObject().endObject().string(); - - Version version = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.V_2_4_0); - Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build(); - DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser() - .parse("type", new CompressedXContent(mapping)); - FieldMapper mapper = defaultMapper.mappers().smartNameFieldMapper("point"); - assertThat(mapper, instanceOf(BaseGeoPointFieldMapper.class)); - BaseGeoPointFieldMapper geoPointFieldMapper = (BaseGeoPointFieldMapper) mapper; - assertThat(((LegacyGeoPointFieldType)geoPointFieldMapper.fieldType()).geoHashPrecision(), is(10)); - } -} diff --git a/core/src/test/java/org/elasticsearch/index/mapper/LegacyIntegerFieldTypeTests.java b/core/src/test/java/org/elasticsearch/index/mapper/LegacyIntegerFieldTypeTests.java deleted file mode 100644 index 7fd6cfcfba8bf..0000000000000 --- a/core/src/test/java/org/elasticsearch/index/mapper/LegacyIntegerFieldTypeTests.java +++ /dev/null @@ -1,49 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.elasticsearch.index.mapper; - -import org.elasticsearch.index.mapper.LegacyIntegerFieldMapper.IntegerFieldType; -import org.elasticsearch.index.mapper.MappedFieldType.Relation; -import org.junit.Before; - -import java.io.IOException; - -public class LegacyIntegerFieldTypeTests extends FieldTypeTestCase { - @Override - protected MappedFieldType createDefaultFieldType() { - return new LegacyIntegerFieldMapper.IntegerFieldType(); - } - - @Before - public void setupProperties() { - setDummyNullValue(10); - } - - public void testIsFieldWithinQuery() throws IOException { - IntegerFieldType ft = new IntegerFieldType(); - // current impl ignores args and shourd always return INTERSECTS - assertEquals(Relation.INTERSECTS, ft.isFieldWithinQuery(null, randomInt(), randomInt(), - randomBoolean(), randomBoolean(), null, null, null)); - } - - public void testValueForSearch() { - MappedFieldType ft = createDefaultFieldType(); - assertEquals(Integer.valueOf(3), ft.valueForDisplay(Integer.valueOf(3))); - } -} diff --git a/core/src/test/java/org/elasticsearch/index/mapper/LegacyIpFieldMapperTests.java b/core/src/test/java/org/elasticsearch/index/mapper/LegacyIpFieldMapperTests.java deleted file mode 100644 index a78cb7a7177a2..0000000000000 --- a/core/src/test/java/org/elasticsearch/index/mapper/LegacyIpFieldMapperTests.java +++ /dev/null @@ -1,134 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.index.mapper; - -import org.elasticsearch.Version; -import org.elasticsearch.cluster.metadata.IndexMetaData; -import org.elasticsearch.common.compress.CompressedXContent; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.xcontent.XContentFactory; -import org.elasticsearch.index.mapper.DocumentMapper; -import org.elasticsearch.index.mapper.LegacyIpFieldMapper; -import org.elasticsearch.index.mapper.MapperParsingException; -import org.elasticsearch.index.mapper.ParsedDocument; -import org.elasticsearch.plugins.Plugin; -import org.elasticsearch.test.ESSingleNodeTestCase; -import org.elasticsearch.test.InternalSettingsPlugin; - -import java.util.Collection; - -import static org.hamcrest.Matchers.containsString; -import static org.hamcrest.Matchers.instanceOf; -import static org.hamcrest.Matchers.is; -import static org.hamcrest.Matchers.notNullValue; -import static org.hamcrest.Matchers.nullValue; - -public class LegacyIpFieldMapperTests extends ESSingleNodeTestCase { - - private static final Settings BW_SETTINGS = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_2_3_0).build(); - - @Override - protected Collection> getPlugins() { - return pluginList(InternalSettingsPlugin.class); - } - - public void testSimpleMapping() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("properties").startObject("ip").field("type", "ip").endObject().endObject() - .endObject().endObject().string(); - - DocumentMapper defaultMapper = createIndex("test", BW_SETTINGS).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); - - ParsedDocument doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("ip", "127.0.0.1") - .endObject() - .bytes()); - - assertThat(doc.rootDoc().getField("ip").numericValue().longValue(), is(2130706433L)); - assertThat(doc.rootDoc().get("ip"), is("2130706433")); - } - - public void testThatValidIpCanBeConvertedToLong() throws Exception { - assertThat(LegacyIpFieldMapper.ipToLong("127.0.0.1"), is(2130706433L)); - } - - public void testThatInvalidIpThrowsException() throws Exception { - try { - LegacyIpFieldMapper.ipToLong("127.0.011.1111111"); - fail("Expected ip address parsing to fail but did not happen"); - } catch (IllegalArgumentException e) { - assertThat(e.getMessage(), containsString("not a valid ip address")); - } - } - - public void testThatIpv6AddressThrowsException() throws Exception { - try { - LegacyIpFieldMapper.ipToLong("2001:db8:0:8d3:0:8a2e:70:7344"); - fail("Expected ip address parsing to fail but did not happen"); - } catch (IllegalArgumentException e) { - assertThat(e.getMessage(), containsString("not a valid ipv4 address")); - } - } - - public void testIgnoreMalformedOption() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type").startObject("properties").startObject("field1") - .field("type", "ip").field("ignore_malformed", true).endObject().startObject("field2").field("type", "ip") - .field("ignore_malformed", false).endObject().startObject("field3").field("type", "ip").endObject().endObject().endObject() - .endObject().string(); - - DocumentMapper defaultMapper = createIndex("test", BW_SETTINGS).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); - - ParsedDocument doc = defaultMapper.parse("test", "type", "1", - XContentFactory.jsonBuilder().startObject().field("field1", "").field("field2", "10.20.30.40").endObject().bytes()); - assertThat(doc.rootDoc().getField("field1"), nullValue()); - assertThat(doc.rootDoc().getField("field2"), notNullValue()); - - try { - defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder().startObject().field("field2", "").endObject().bytes()); - } catch (MapperParsingException e) { - assertThat(e.getCause(), instanceOf(IllegalArgumentException.class)); - } - - // Verify that the default is false - try { - defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder().startObject().field("field3", "").endObject().bytes()); - } catch (MapperParsingException e) { - assertThat(e.getCause(), instanceOf(IllegalArgumentException.class)); - } - - // Unless the global ignore_malformed option is set to true - Settings indexSettings = Settings.builder() - .put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_2_3_0) - .put("index.mapping.ignore_malformed", true) - .build(); - defaultMapper = createIndex("test2", indexSettings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); - doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder().startObject().field("field3", "").endObject().bytes()); - assertThat(doc.rootDoc().getField("field3"), nullValue()); - - // This should still throw an exception, since field2 is specifically set to ignore_malformed=false - try { - defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder().startObject().field("field2", "").endObject().bytes()); - } catch (MapperParsingException e) { - assertThat(e.getCause(), instanceOf(IllegalArgumentException.class)); - } - } - -} diff --git a/core/src/test/java/org/elasticsearch/index/mapper/LegacyLongFieldTypeTests.java b/core/src/test/java/org/elasticsearch/index/mapper/LegacyLongFieldTypeTests.java deleted file mode 100644 index 2177bcff67556..0000000000000 --- a/core/src/test/java/org/elasticsearch/index/mapper/LegacyLongFieldTypeTests.java +++ /dev/null @@ -1,49 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.elasticsearch.index.mapper; - -import org.elasticsearch.index.mapper.LegacyLongFieldMapper.LongFieldType; -import org.elasticsearch.index.mapper.MappedFieldType.Relation; -import org.junit.Before; - -import java.io.IOException; - -public class LegacyLongFieldTypeTests extends FieldTypeTestCase { - @Override - protected MappedFieldType createDefaultFieldType() { - return new LegacyLongFieldMapper.LongFieldType(); - } - - @Before - public void setupProperties() { - setDummyNullValue((long)10); - } - - public void testIsFieldWithinQuery() throws IOException { - LongFieldType ft = new LongFieldType(); - // current impl ignores args and shourd always return INTERSECTS - assertEquals(Relation.INTERSECTS, ft.isFieldWithinQuery(null, randomLong(), randomLong(), - randomBoolean(), randomBoolean(), null, null, null)); - } - - public void testValueForSearch() { - MappedFieldType ft = createDefaultFieldType(); - assertEquals(Long.valueOf(3), ft.valueForDisplay(Long.valueOf(3))); - } -} diff --git a/core/src/test/java/org/elasticsearch/index/mapper/LegacyNumberFieldMapperTests.java b/core/src/test/java/org/elasticsearch/index/mapper/LegacyNumberFieldMapperTests.java deleted file mode 100644 index 1ce13d5137a57..0000000000000 --- a/core/src/test/java/org/elasticsearch/index/mapper/LegacyNumberFieldMapperTests.java +++ /dev/null @@ -1,620 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.index.mapper; - -import org.apache.lucene.analysis.LegacyNumericTokenStream; -import org.apache.lucene.analysis.TokenStream; -import org.apache.lucene.document.Field; -import org.apache.lucene.index.DocValuesType; -import org.apache.lucene.index.IndexOptions; -import org.apache.lucene.index.IndexableField; -import org.elasticsearch.Version; -import org.elasticsearch.cluster.metadata.IndexMetaData; -import org.elasticsearch.common.compress.CompressedXContent; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.xcontent.XContentFactory; -import org.elasticsearch.index.IndexService; -import org.elasticsearch.index.mapper.DocumentMapper; -import org.elasticsearch.index.mapper.DocumentMapperParser; -import org.elasticsearch.index.mapper.FieldMapper; -import org.elasticsearch.index.mapper.LegacyFloatFieldMapper; -import org.elasticsearch.index.mapper.LegacyLongFieldMapper; -import org.elasticsearch.index.mapper.LegacyNumberFieldMapper; -import org.elasticsearch.index.mapper.MapperParsingException; -import org.elasticsearch.index.mapper.ParseContext.Document; -import org.elasticsearch.index.mapper.ParsedDocument; -import org.elasticsearch.index.mapper.TextFieldMapper; -import org.elasticsearch.plugins.Plugin; -import org.elasticsearch.test.ESSingleNodeTestCase; -import org.elasticsearch.test.InternalSettingsPlugin; - -import java.io.IOException; -import java.util.Arrays; -import java.util.Collection; - -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; -import static org.hamcrest.Matchers.containsString; -import static org.hamcrest.Matchers.instanceOf; -import static org.hamcrest.Matchers.notNullValue; -import static org.hamcrest.Matchers.nullValue; - -public class LegacyNumberFieldMapperTests extends ESSingleNodeTestCase { - - private static final Settings BW_SETTINGS = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_2_3_0).build(); - - @Override - protected Collection> getPlugins() { - return pluginList(InternalSettingsPlugin.class); - } - - public void testIgnoreMalformedOption() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("properties") - .startObject("field1").field("type", "integer").field("ignore_malformed", true).endObject() - .startObject("field2").field("type", "integer").field("ignore_malformed", false).endObject() - .startObject("field3").field("type", "integer").endObject() - .endObject() - .endObject().endObject().string(); - - DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); - - ParsedDocument doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("field1", "a") - .field("field2", "1") - .endObject() - .bytes()); - assertThat(doc.rootDoc().getField("field1"), nullValue()); - assertThat(doc.rootDoc().getField("field2"), notNullValue()); - - try { - defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("field2", "a") - .endObject() - .bytes()); - } catch (MapperParsingException e) { - assertThat(e.getCause(), instanceOf(NumberFormatException.class)); - } - - // Verify that the default is false - try { - defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("field3", "a") - .endObject() - .bytes()); - } catch (MapperParsingException e) { - assertThat(e.getCause(), instanceOf(NumberFormatException.class)); - } - - // Unless the global ignore_malformed option is set to true - Settings indexSettings = Settings.builder() - .put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_2_3_0) - .put("index.mapping.ignore_malformed", true).build(); - defaultMapper = createIndex("test2", indexSettings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); - doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("field3", "a") - .endObject() - .bytes()); - assertThat(doc.rootDoc().getField("field3"), nullValue()); - - // This should still throw an exception, since field2 is specifically set to ignore_malformed=false - try { - defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("field2", "a") - .endObject() - .bytes()); - } catch (MapperParsingException e) { - assertThat(e.getCause(), instanceOf(NumberFormatException.class)); - } - } - - public void testCoerceOption() throws Exception { - String [] nonFractionNumericFieldTypes={"integer","long","short"}; - //Test co-ercion policies on all non-fraction numerics - DocumentMapperParser parser = createIndex("test", BW_SETTINGS).mapperService().documentMapperParser(); - for (String nonFractionNumericFieldType : nonFractionNumericFieldTypes) { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("properties") - .startObject("noErrorNoCoerceField").field("type", nonFractionNumericFieldType).field("ignore_malformed", true) - .field("coerce", false).endObject() - .startObject("noErrorCoerceField").field("type", nonFractionNumericFieldType).field("ignore_malformed", true) - .field("coerce", true).endObject() - .startObject("errorDefaultCoerce").field("type", nonFractionNumericFieldType).field("ignore_malformed", false).endObject() - .startObject("errorNoCoerce").field("type", nonFractionNumericFieldType).field("ignore_malformed", false) - .field("coerce", false).endObject() - .endObject() - .endObject().endObject().string(); - - DocumentMapper defaultMapper = parser.parse("type", new CompressedXContent(mapping)); - - //Test numbers passed as strings - String invalidJsonNumberAsString="1"; - ParsedDocument doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("noErrorNoCoerceField", invalidJsonNumberAsString) - .field("noErrorCoerceField", invalidJsonNumberAsString) - .field("errorDefaultCoerce", invalidJsonNumberAsString) - .endObject() - .bytes()); - assertThat(doc.rootDoc().getField("noErrorNoCoerceField"), nullValue()); - assertThat(doc.rootDoc().getField("noErrorCoerceField"), notNullValue()); - //Default is ignore_malformed=true and coerce=true - assertThat(doc.rootDoc().getField("errorDefaultCoerce"), notNullValue()); - - //Test valid case of numbers passed as numbers - int validNumber=1; - doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("noErrorNoCoerceField", validNumber) - .field("noErrorCoerceField", validNumber) - .field("errorDefaultCoerce", validNumber) - .endObject() - .bytes()); - assertEquals(validNumber,doc.rootDoc().getField("noErrorNoCoerceField").numericValue().intValue()); - assertEquals(validNumber,doc.rootDoc().getField("noErrorCoerceField").numericValue().intValue()); - assertEquals(validNumber,doc.rootDoc().getField("errorDefaultCoerce").numericValue().intValue()); - - //Test valid case of negative numbers passed as numbers - int validNegativeNumber=-1; - doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("noErrorNoCoerceField", validNegativeNumber) - .field("noErrorCoerceField", validNegativeNumber) - .field("errorDefaultCoerce", validNegativeNumber) - .endObject() - .bytes()); - assertEquals(validNegativeNumber,doc.rootDoc().getField("noErrorNoCoerceField").numericValue().intValue()); - assertEquals(validNegativeNumber,doc.rootDoc().getField("noErrorCoerceField").numericValue().intValue()); - assertEquals(validNegativeNumber,doc.rootDoc().getField("errorDefaultCoerce").numericValue().intValue()); - - - try { - defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("errorNoCoerce", invalidJsonNumberAsString) - .endObject() - .bytes()); - } catch (MapperParsingException e) { - assertThat(e.getCause(), instanceOf(IllegalArgumentException.class)); - } - - - //Test questionable case of floats passed to ints - float invalidJsonForInteger=1.9f; - int coercedFloatValue=1; //This is what the JSON parser will do to a float - truncate not round - doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("noErrorNoCoerceField", invalidJsonForInteger) - .field("noErrorCoerceField", invalidJsonForInteger) - .field("errorDefaultCoerce", invalidJsonForInteger) - .endObject() - .bytes()); - assertThat(doc.rootDoc().getField("noErrorNoCoerceField"), nullValue()); - assertEquals(coercedFloatValue,doc.rootDoc().getField("noErrorCoerceField").numericValue().intValue()); - //Default is ignore_malformed=true and coerce=true - assertEquals(coercedFloatValue,doc.rootDoc().getField("errorDefaultCoerce").numericValue().intValue()); - - try { - defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("errorNoCoerce", invalidJsonForInteger) - .endObject() - .bytes()); - } catch (MapperParsingException e) { - assertThat(e.getCause(), instanceOf(IllegalArgumentException.class)); - } - } - } - - public void testDocValues() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("properties") - .startObject("int1") - .field("type", "integer") - .endObject() - .startObject("int2") - .field("type", "integer") - .field("index", "no") - .endObject() - .startObject("double1") - .field("type", "double") - .endObject() - .startObject("double2") - .field("type", "integer") - .field("index", "no") - .endObject() - .endObject() - .endObject().endObject().string(); - - DocumentMapper defaultMapper = createIndex("test", BW_SETTINGS).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); - - ParsedDocument parsedDoc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("int1", "1234") - .field("double1", "1234") - .field("int2", "1234") - .field("double2", "1234") - .endObject() - .bytes()); - Document doc = parsedDoc.rootDoc(); - assertEquals(DocValuesType.SORTED_NUMERIC, LegacyStringMappingTests.docValuesType(doc, "int1")); - assertEquals(DocValuesType.SORTED_NUMERIC, LegacyStringMappingTests.docValuesType(doc, "double1")); - assertEquals(DocValuesType.NONE, LegacyStringMappingTests.docValuesType(doc, "int2")); - assertEquals(DocValuesType.NONE, LegacyStringMappingTests.docValuesType(doc, "double2")); - } - - public void testUnIndex() throws IOException { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("properties") - .startObject("int") - .field("type", "integer") - .field("index", false) - .endObject() - .startObject("double") - .field("type", "double") - .field("index", false) - .endObject() - .endObject() - .endObject().endObject().string(); - - DocumentMapper defaultMapper = createIndex("test", BW_SETTINGS).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); - - assertEquals("{\"type\":{\"properties\":{\"double\":{\"type\":\"double\",\"index\":false},\"int\":{\"type\":\"integer\",\"index\":false}}}}", - defaultMapper.mapping().toString()); - - ParsedDocument parsedDoc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("int", "1234") - .field("double", "1234") - .endObject() - .bytes()); - final Document doc = parsedDoc.rootDoc(); - for (IndexableField field : doc.getFields("int")) { - assertEquals(IndexOptions.NONE, field.fieldType().indexOptions()); - } - for (IndexableField field : doc.getFields("double")) { - assertEquals(IndexOptions.NONE, field.fieldType().indexOptions()); - } - } - - public void testBwCompatIndex() throws IOException { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("properties") - .startObject("int") - .field("type", "integer") - .field("index", "no") - .endObject() - .startObject("double") - .field("type", "double") - .field("index", "not_analyzed") - .endObject() - .endObject() - .endObject().endObject().string(); - - Settings oldSettings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_2_2_0).build(); - DocumentMapper defaultMapper = createIndex("test", oldSettings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); - assertEquals("{\"type\":{\"properties\":{\"double\":{\"type\":\"double\"},\"int\":{\"type\":\"integer\",\"index\":false}}}}", - defaultMapper.mapping().toString()); - } - - public void testDocValuesOnNested() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("properties") - .startObject("nested") - .field("type", "nested") - .startObject("properties") - .startObject("int") - .field("type", "integer") - .field("doc_values", true) - .endObject() - .startObject("double") - .field("type", "double") - .field("doc_values", true) - .endObject() - .endObject() - .endObject() - .endObject() - .endObject().endObject().string(); - - DocumentMapper defaultMapper = createIndex("test", BW_SETTINGS).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); - - ParsedDocument parsedDoc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .startArray("nested") - .startObject() - .field("int", "1234") - .field("double", "1234") - .endObject() - .startObject() - .field("int", "-1") - .field("double", "-2") - .endObject() - .endArray() - .endObject() - .bytes()); - for (Document doc : parsedDoc.docs()) { - if (doc == parsedDoc.rootDoc()) { - continue; - } - assertEquals(DocValuesType.SORTED_NUMERIC, LegacyStringMappingTests.docValuesType(doc, "nested.int")); - assertEquals(DocValuesType.SORTED_NUMERIC, LegacyStringMappingTests.docValuesType(doc, "nested.double")); - } - } - - /** Test default precision step for autodetected numeric types */ - public void testPrecisionStepDefaultsDetected() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .field("numeric_detection", true) - .field("date_detection", true) - .endObject().endObject().string(); - - DocumentMapper mapper = createIndex("test", BW_SETTINGS).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); - - ParsedDocument doc = mapper.parse("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("long", "100") - .field("double", "100.0") - .field("date", "2010-01-01") - .endObject() - .bytes()); - - assertEquals(1, doc.docs().size()); - Document luceneDoc = doc.docs().get(0); - - assertPrecisionStepEquals(LegacyNumberFieldMapper.Defaults.PRECISION_STEP_64_BIT, luceneDoc.getField("long")); - assertThat(luceneDoc.getField("double").numericValue(), instanceOf(Float.class)); - assertPrecisionStepEquals(LegacyNumberFieldMapper.Defaults.PRECISION_STEP_32_BIT, luceneDoc.getField("double")); - assertPrecisionStepEquals(LegacyNumberFieldMapper.Defaults.PRECISION_STEP_64_BIT, luceneDoc.getField("date")); - } - - /** Test default precision step for numeric types */ - public void testPrecisionStepDefaultsMapped() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("properties") - .startObject("int") - .field("type", "integer") - .endObject() - .startObject("float") - .field("type", "float") - .endObject() - .startObject("long") - .field("type", "long") - .endObject() - .startObject("double") - .field("type", "double") - .endObject() - .startObject("short") - .field("type", "short") - .endObject() - .startObject("byte") - .field("type", "byte") - .endObject() - .startObject("date") - .field("type", "date") - .endObject() - .startObject("ip") - .field("type", "ip") - .endObject() - - .endObject() - .endObject().endObject().string(); - - DocumentMapper mapper = createIndex("test", BW_SETTINGS).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); - - ParsedDocument doc = mapper.parse("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("int", "100") - .field("float", "100.0") - .field("long", "5000") - .field("double", "34.545") - .field("short", "1645") - .field("byte", "50") - .field("date", "2010-01-01") - .field("ip", "255.255.255.255") - .endObject() - .bytes()); - - assertEquals(1, doc.docs().size()); - Document luceneDoc = doc.docs().get(0); - - assertPrecisionStepEquals(LegacyNumberFieldMapper.Defaults.PRECISION_STEP_64_BIT, luceneDoc.getField("long")); - assertPrecisionStepEquals(LegacyNumberFieldMapper.Defaults.PRECISION_STEP_64_BIT, luceneDoc.getField("double")); - assertPrecisionStepEquals(LegacyNumberFieldMapper.Defaults.PRECISION_STEP_64_BIT, luceneDoc.getField("date")); - assertPrecisionStepEquals(LegacyNumberFieldMapper.Defaults.PRECISION_STEP_64_BIT, luceneDoc.getField("ip")); - - assertPrecisionStepEquals(LegacyNumberFieldMapper.Defaults.PRECISION_STEP_32_BIT, luceneDoc.getField("int")); - assertPrecisionStepEquals(LegacyNumberFieldMapper.Defaults.PRECISION_STEP_32_BIT, luceneDoc.getField("float")); - - assertPrecisionStepEquals(LegacyNumberFieldMapper.Defaults.PRECISION_STEP_16_BIT, luceneDoc.getField("short")); - assertPrecisionStepEquals(LegacyNumberFieldMapper.Defaults.PRECISION_STEP_8_BIT, luceneDoc.getField("byte")); - } - - /** Test precision step set to silly explicit values */ - public void testPrecisionStepExplicit() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("properties") - .startObject("int") - .field("type", "integer") - .field("precision_step", "1") - .endObject() - .startObject("float") - .field("type", "float") - .field("precision_step", "2") - .endObject() - .startObject("long") - .field("type", "long") - .field("precision_step", "1") - .endObject() - .startObject("double") - .field("type", "double") - .field("precision_step", "2") - .endObject() - .startObject("short") - .field("type", "short") - .field("precision_step", "1") - .endObject() - .startObject("byte") - .field("type", "byte") - .field("precision_step", "2") - .endObject() - .startObject("date") - .field("type", "date") - .field("precision_step", "1") - .endObject() - .startObject("ip") - .field("type", "ip") - .field("precision_step", "2") - .endObject() - - .endObject() - .endObject().endObject().string(); - - DocumentMapper mapper = createIndex("test", BW_SETTINGS).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); - - ParsedDocument doc = mapper.parse("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("int", "100") - .field("float", "100.0") - .field("long", "5000") - .field("double", "34.545") - .field("short", "1645") - .field("byte", "50") - .field("date", "2010-01-01") - .field("ip", "255.255.255.255") - .endObject() - .bytes()); - - assertEquals(1, doc.docs().size()); - Document luceneDoc = doc.docs().get(0); - - assertPrecisionStepEquals(1, luceneDoc.getField("int")); - assertPrecisionStepEquals(2, luceneDoc.getField("float")); - assertPrecisionStepEquals(1, luceneDoc.getField("long")); - assertPrecisionStepEquals(2, luceneDoc.getField("double")); - assertPrecisionStepEquals(1, luceneDoc.getField("short")); - assertPrecisionStepEquals(2, luceneDoc.getField("byte")); - assertPrecisionStepEquals(1, luceneDoc.getField("date")); - assertPrecisionStepEquals(2, luceneDoc.getField("ip")); - - } - - /** checks precisionstep on both the fieldtype and the tokenstream */ - private static void assertPrecisionStepEquals(int expected, IndexableField field) throws IOException { - assertNotNull(field); - assertThat(field, instanceOf(Field.class)); - - // check fieldtype's precisionstep - assertEquals(expected, ((Field)field).fieldType().numericPrecisionStep()); - - // check the tokenstream actually used by the indexer - TokenStream ts = field.tokenStream(null, null); - assertThat(ts, instanceOf(LegacyNumericTokenStream.class)); - assertEquals(expected, ((LegacyNumericTokenStream)ts).getPrecisionStep()); - } - - public void testTermVectorsBackCompat() throws Exception { - for (String type : Arrays.asList("byte", "short", "integer", "long", "float", "double")) { - doTestTermVectorsBackCompat(type); - } - } - - private void doTestTermVectorsBackCompat(String type) throws Exception { - DocumentMapperParser parser = createIndex("index-" + type).mapperService().documentMapperParser(); - String mappingWithTV = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("properties") - .startObject("foo") - .field("type", type) - .field("term_vector", "yes") - .endObject() - .endObject().endObject().endObject().string(); - try { - parser.parse("type", new CompressedXContent(mappingWithTV)); - fail(); - } catch (MapperParsingException e) { - assertThat(e.getMessage(), containsString("Mapping definition for [foo] has unsupported parameters: [term_vector : yes]")); - } - - Settings oldIndexSettings = Settings.builder() - .put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_2_1_0) - .build(); - parser = createIndex("index2-" + type, oldIndexSettings).mapperService().documentMapperParser(); - parser.parse("type", new CompressedXContent(mappingWithTV)); // no exception - } - - public void testAnalyzerBackCompat() throws Exception { - for (String type : Arrays.asList("byte", "short", "integer", "long", "float", "double")) { - doTestAnalyzerBackCompat(type); - } - } - - private void doTestAnalyzerBackCompat(String type) throws Exception { - DocumentMapperParser parser = createIndex("index-" + type).mapperService().documentMapperParser(); - String mappingWithTV = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("properties") - .startObject("foo") - .field("type", type) - .field("analyzer", "keyword") - .endObject() - .endObject().endObject().endObject().string(); - try { - parser.parse("type", new CompressedXContent(mappingWithTV)); - fail(); - } catch (MapperParsingException e) { - assertThat(e.getMessage(), containsString("Mapping definition for [foo] has unsupported parameters: [analyzer : keyword]")); - } - - Settings oldIndexSettings = Settings.builder() - .put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_2_1_0) - .build(); - parser = createIndex("index2-" + type, oldIndexSettings).mapperService().documentMapperParser(); - parser.parse("type", new CompressedXContent(mappingWithTV)); // no exception - } - - - public void testIgnoreFielddata() throws IOException { - for (String type : Arrays.asList("byte", "short", "integer", "long", "float", "double")) { - Settings oldIndexSettings = Settings.builder() - .put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_2_1_0) - .build(); - DocumentMapperParser parser = createIndex("index-" + type, oldIndexSettings).mapperService().documentMapperParser(); - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("properties") - .startObject("foo") - .field("type", type) - .startObject("fielddata") - .field("loading", "eager") - .endObject() - .endObject() - .endObject().endObject().endObject().string(); - DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); - String expectedMapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("properties") - .startObject("foo") - .field("type", type) - .endObject() - .endObject().endObject().endObject().string(); - assertEquals(expectedMapping, mapper.mappingSource().string()); - } - } -} diff --git a/core/src/test/java/org/elasticsearch/index/mapper/LegacyShortFieldTypeTests.java b/core/src/test/java/org/elasticsearch/index/mapper/LegacyShortFieldTypeTests.java deleted file mode 100644 index 2e22bac6e9582..0000000000000 --- a/core/src/test/java/org/elasticsearch/index/mapper/LegacyShortFieldTypeTests.java +++ /dev/null @@ -1,39 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.elasticsearch.index.mapper; - -import org.junit.Before; - -public class LegacyShortFieldTypeTests extends FieldTypeTestCase { - @Override - protected MappedFieldType createDefaultFieldType() { - return new LegacyShortFieldMapper.ShortFieldType(); - } - - @Before - public void setupProperties() { - setDummyNullValue((short)10); - } - - public void testValueForSearch() { - MappedFieldType ft = createDefaultFieldType(); - // shorts are stored as ints - assertEquals(Short.valueOf((short) 3), ft.valueForDisplay(Integer.valueOf(3))); - } -} diff --git a/core/src/test/java/org/elasticsearch/index/mapper/LegacyStringMappingTests.java b/core/src/test/java/org/elasticsearch/index/mapper/LegacyStringMappingTests.java deleted file mode 100644 index a158309829220..0000000000000 --- a/core/src/test/java/org/elasticsearch/index/mapper/LegacyStringMappingTests.java +++ /dev/null @@ -1,1196 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.index.mapper; - -import org.apache.lucene.index.DocValuesType; -import org.apache.lucene.index.IndexOptions; -import org.apache.lucene.index.IndexableField; -import org.apache.lucene.index.IndexableFieldType; -import org.elasticsearch.Version; -import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsResponse; -import org.elasticsearch.cluster.metadata.IndexMetaData; -import org.elasticsearch.cluster.metadata.MappingMetaData; -import org.elasticsearch.common.collect.ImmutableOpenMap; -import org.elasticsearch.common.compress.CompressedXContent; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.xcontent.ToXContent; -import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.common.xcontent.XContentFactory; -import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.common.xcontent.json.JsonXContent; -import org.elasticsearch.index.IndexService; -import org.elasticsearch.index.mapper.Mapper.BuilderContext; -import org.elasticsearch.index.mapper.ParseContext.Document; -import org.elasticsearch.index.mapper.StringFieldMapper.Builder; -import org.elasticsearch.index.mapper.StringFieldMapper.StringFieldType; -import org.elasticsearch.plugins.Plugin; -import org.elasticsearch.test.ESSingleNodeTestCase; -import org.elasticsearch.test.InternalSettingsPlugin; -import org.junit.Before; - -import java.io.IOException; -import java.util.Arrays; -import java.util.Collection; -import java.util.Collections; -import java.util.HashMap; -import java.util.Map; - -import static java.util.Collections.emptyMap; -import static java.util.Collections.singletonList; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; -import static org.hamcrest.Matchers.containsString; -import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.notNullValue; -import static org.hamcrest.Matchers.nullValue; - -public class LegacyStringMappingTests extends ESSingleNodeTestCase { - - @Override - protected Collection> getPlugins() { - return pluginList(InternalSettingsPlugin.class); - } - - IndexService indexService; - DocumentMapperParser parser; - - @Before - public void before() { - indexService = createIndex("test", - // we need 2.x since string is deprecated in 5.0 - Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_2_3_0).build()); - parser = indexService.mapperService().documentMapperParser(); - } - - public void testLimit() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("properties").startObject("field").field("type", "string").field("ignore_above", 5).endObject().endObject() - .endObject().endObject().string(); - - DocumentMapper defaultMapper = parser.parse("type", new CompressedXContent(mapping)); - - ParsedDocument doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("field", "1234") - .endObject() - .bytes()); - - assertThat(doc.rootDoc().getField("field"), notNullValue()); - - doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("field", "12345") - .endObject() - .bytes()); - - assertThat(doc.rootDoc().getField("field"), notNullValue()); - - doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("field", "123456") - .endObject() - .bytes()); - - assertThat(doc.rootDoc().getField("field"), nullValue()); - } - - private void assertDefaultAnalyzedFieldType(IndexableFieldType fieldType) { - assertThat(fieldType.omitNorms(), equalTo(false)); - assertThat(fieldType.indexOptions(), equalTo(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS)); - assertThat(fieldType.storeTermVectors(), equalTo(false)); - assertThat(fieldType.storeTermVectorOffsets(), equalTo(false)); - assertThat(fieldType.storeTermVectorPositions(), equalTo(false)); - assertThat(fieldType.storeTermVectorPayloads(), equalTo(false)); - } - - private void assertEquals(IndexableFieldType ft1, IndexableFieldType ft2) { - assertEquals(ft1.tokenized(), ft2.tokenized()); - assertEquals(ft1.omitNorms(), ft2.omitNorms()); - assertEquals(ft1.indexOptions(), ft2.indexOptions()); - assertEquals(ft1.storeTermVectors(), ft2.storeTermVectors()); - assertEquals(ft1.docValuesType(), ft2.docValuesType()); - } - - private void assertParseIdemPotent(IndexableFieldType expected, DocumentMapper mapper) throws Exception { - String mapping = mapper.toXContent(XContentFactory.jsonBuilder().startObject(), new ToXContent.MapParams(emptyMap())).endObject().string(); - mapper = parser.parse("type", new CompressedXContent(mapping)); - ParsedDocument doc = mapper.parse("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("field", "2345") - .endObject() - .bytes()); - assertEquals(expected, doc.rootDoc().getField("field").fieldType()); - } - - public void testDefaultsForAnalyzed() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("properties").startObject("field").field("type", "string").endObject().endObject() - .endObject().endObject().string(); - - DocumentMapper defaultMapper = parser.parse("type", new CompressedXContent(mapping)); - - ParsedDocument doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("field", "1234") - .endObject() - .bytes()); - - IndexableFieldType fieldType = doc.rootDoc().getField("field").fieldType(); - assertDefaultAnalyzedFieldType(fieldType); - assertParseIdemPotent(fieldType, defaultMapper); - } - - public void testDefaultsForNotAnalyzed() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("properties").startObject("field").field("type", "string").field("index", "not_analyzed").endObject().endObject() - .endObject().endObject().string(); - - DocumentMapper defaultMapper = parser.parse("type", new CompressedXContent(mapping)); - - ParsedDocument doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("field", "1234") - .endObject() - .bytes()); - - IndexableFieldType fieldType = doc.rootDoc().getField("field").fieldType(); - assertThat(fieldType.omitNorms(), equalTo(true)); - assertThat(fieldType.indexOptions(), equalTo(IndexOptions.DOCS)); - assertThat(fieldType.storeTermVectors(), equalTo(false)); - assertThat(fieldType.storeTermVectorOffsets(), equalTo(false)); - assertThat(fieldType.storeTermVectorPositions(), equalTo(false)); - assertThat(fieldType.storeTermVectorPayloads(), equalTo(false)); - assertParseIdemPotent(fieldType, defaultMapper); - - // now test it explicitly set - - mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("properties").startObject("field").field("type", "string").field("index", "not_analyzed").startObject("norms").field("enabled", true).endObject().field("index_options", "freqs").endObject().endObject() - .endObject().endObject().string(); - - defaultMapper = parser.parse("type", new CompressedXContent(mapping)); - - doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("field", "1234") - .endObject() - .bytes()); - - fieldType = doc.rootDoc().getField("field").fieldType(); - assertThat(fieldType.omitNorms(), equalTo(false)); - assertThat(fieldType.indexOptions(), equalTo(IndexOptions.DOCS_AND_FREQS)); - assertThat(fieldType.storeTermVectors(), equalTo(false)); - assertThat(fieldType.storeTermVectorOffsets(), equalTo(false)); - assertThat(fieldType.storeTermVectorPositions(), equalTo(false)); - assertThat(fieldType.storeTermVectorPayloads(), equalTo(false)); - assertParseIdemPotent(fieldType, defaultMapper); - - // also test the deprecated omit_norms - - mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("properties").startObject("field").field("type", "string").field("index", "not_analyzed").field("omit_norms", false).endObject().endObject() - .endObject().endObject().string(); - - defaultMapper = parser.parse("type", new CompressedXContent(mapping)); - - doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("field", "1234") - .endObject() - .bytes()); - - fieldType = doc.rootDoc().getField("field").fieldType(); - assertThat(fieldType.omitNorms(), equalTo(false)); - assertParseIdemPotent(fieldType, defaultMapper); - } - - public void testSearchQuoteAnalyzerSerialization() throws Exception { - // Cases where search_quote_analyzer should not be added to the mapping. - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("properties") - .startObject("field1") - .field("type", "string") - .field("position_increment_gap", 1000) - .endObject() - .startObject("field2") - .field("type", "string") - .field("position_increment_gap", 1000) - .field("analyzer", "standard") - .endObject() - .startObject("field3") - .field("type", "string") - .field("position_increment_gap", 1000) - .field("analyzer", "standard") - .field("search_analyzer", "simple") - .endObject() - .startObject("field4") - .field("type", "string") - .field("position_increment_gap", 1000) - .field("analyzer", "standard") - .field("search_analyzer", "simple") - .field("search_quote_analyzer", "simple") - .endObject() - .endObject() - .endObject().endObject().string(); - - DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); - for (String fieldName : Arrays.asList("field1", "field2", "field3", "field4")) { - Map serializedMap = getSerializedMap(fieldName, mapper); - assertFalse(fieldName, serializedMap.containsKey("search_quote_analyzer")); - } - - // Cases where search_quote_analyzer should be present. - mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("properties") - .startObject("field") - .field("type", "string") - .field("position_increment_gap", 1000) - .field("analyzer", "standard") - .field("search_analyzer", "standard") - .field("search_quote_analyzer", "simple") - .endObject() - .endObject() - .endObject().endObject().string(); - - mapper = parser.parse("type", new CompressedXContent(mapping)); - Map serializedMap = getSerializedMap("field", mapper); - assertEquals(serializedMap.get("search_quote_analyzer"), "simple"); - } - - public void testSearchAnalyzerSerialization() throws IOException { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("properties") - .startObject("field") - .field("type", "string") - .field("analyzer", "standard") - .field("search_analyzer", "keyword") - .endObject() - .endObject().endObject().endObject().string(); - - DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); - assertEquals(mapping, mapper.mappingSource().toString()); - - // special case: default index analyzer - mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("properties") - .startObject("field") - .field("type", "string") - .field("analyzer", "default") - .field("search_analyzer", "keyword") - .endObject() - .endObject().endObject().endObject().string(); - - mapper = parser.parse("type", new CompressedXContent(mapping)); - assertEquals(mapping, mapper.mappingSource().toString()); - - // special case: default search analyzer - mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("properties") - .startObject("field") - .field("type", "string") - .field("analyzer", "keyword") - .endObject() - .endObject().endObject().endObject().string(); - - mapper = parser.parse("type", new CompressedXContent(mapping)); - assertEquals(mapping, mapper.mappingSource().toString()); - - // special case: default search analyzer - mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("properties") - .startObject("field") - .field("type", "string") - .field("analyzer", "keyword") - .field("search_analyzer", "default") - .endObject() - .endObject().endObject().endObject().string(); - - mapper = parser.parse("type", new CompressedXContent(mapping)); - assertEquals(mapping, mapper.mappingSource().toString()); - - - mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("properties") - .startObject("field") - .field("type", "string") - .field("analyzer", "keyword") - .endObject() - .endObject().endObject().endObject().string(); - mapper = parser.parse("type", new CompressedXContent(mapping)); - - XContentBuilder builder = XContentFactory.jsonBuilder(); - builder.startObject(); - mapper.toXContent(builder, new ToXContent.MapParams(Collections.singletonMap("include_defaults", "true"))); - builder.endObject(); - - String mappingString = builder.string(); - assertTrue(mappingString.contains("analyzer")); - assertTrue(mappingString.contains("search_analyzer")); - assertTrue(mappingString.contains("search_quote_analyzer")); - } - - private Map getSerializedMap(String fieldName, DocumentMapper mapper) throws Exception { - FieldMapper fieldMapper = mapper.mappers().smartNameFieldMapper(fieldName); - XContentBuilder builder = JsonXContent.contentBuilder().startObject(); - fieldMapper.toXContent(builder, ToXContent.EMPTY_PARAMS).endObject(); - builder.close(); - - Map fieldMap; - try (XContentParser parser = JsonXContent.jsonXContent.createParser(builder.bytes())) { - fieldMap = parser.map(); - } - @SuppressWarnings("unchecked") - Map result = (Map) fieldMap.get(fieldName); - return result; - } - - public void testTermVectors() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("properties") - .startObject("field1") - .field("type", "string") - .field("term_vector", "no") - .endObject() - .startObject("field2") - .field("type", "string") - .field("term_vector", "yes") - .endObject() - .startObject("field3") - .field("type", "string") - .field("term_vector", "with_offsets") - .endObject() - .startObject("field4") - .field("type", "string") - .field("term_vector", "with_positions") - .endObject() - .startObject("field5") - .field("type", "string") - .field("term_vector", "with_positions_offsets") - .endObject() - .startObject("field6") - .field("type", "string") - .field("term_vector", "with_positions_offsets_payloads") - .endObject() - .endObject() - .endObject().endObject().string(); - - DocumentMapper defaultMapper = parser.parse("type", new CompressedXContent(mapping)); - - ParsedDocument doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("field1", "1234") - .field("field2", "1234") - .field("field3", "1234") - .field("field4", "1234") - .field("field5", "1234") - .field("field6", "1234") - .endObject() - .bytes()); - - assertThat(doc.rootDoc().getField("field1").fieldType().storeTermVectors(), equalTo(false)); - assertThat(doc.rootDoc().getField("field1").fieldType().storeTermVectorOffsets(), equalTo(false)); - assertThat(doc.rootDoc().getField("field1").fieldType().storeTermVectorPositions(), equalTo(false)); - assertThat(doc.rootDoc().getField("field1").fieldType().storeTermVectorPayloads(), equalTo(false)); - - assertThat(doc.rootDoc().getField("field2").fieldType().storeTermVectors(), equalTo(true)); - assertThat(doc.rootDoc().getField("field2").fieldType().storeTermVectorOffsets(), equalTo(false)); - assertThat(doc.rootDoc().getField("field2").fieldType().storeTermVectorPositions(), equalTo(false)); - assertThat(doc.rootDoc().getField("field2").fieldType().storeTermVectorPayloads(), equalTo(false)); - - assertThat(doc.rootDoc().getField("field3").fieldType().storeTermVectors(), equalTo(true)); - assertThat(doc.rootDoc().getField("field3").fieldType().storeTermVectorOffsets(), equalTo(true)); - assertThat(doc.rootDoc().getField("field3").fieldType().storeTermVectorPositions(), equalTo(false)); - assertThat(doc.rootDoc().getField("field3").fieldType().storeTermVectorPayloads(), equalTo(false)); - - assertThat(doc.rootDoc().getField("field4").fieldType().storeTermVectors(), equalTo(true)); - assertThat(doc.rootDoc().getField("field4").fieldType().storeTermVectorOffsets(), equalTo(false)); - assertThat(doc.rootDoc().getField("field4").fieldType().storeTermVectorPositions(), equalTo(true)); - assertThat(doc.rootDoc().getField("field4").fieldType().storeTermVectorPayloads(), equalTo(false)); - - assertThat(doc.rootDoc().getField("field5").fieldType().storeTermVectors(), equalTo(true)); - assertThat(doc.rootDoc().getField("field5").fieldType().storeTermVectorOffsets(), equalTo(true)); - assertThat(doc.rootDoc().getField("field5").fieldType().storeTermVectorPositions(), equalTo(true)); - assertThat(doc.rootDoc().getField("field5").fieldType().storeTermVectorPayloads(), equalTo(false)); - - assertThat(doc.rootDoc().getField("field6").fieldType().storeTermVectors(), equalTo(true)); - assertThat(doc.rootDoc().getField("field6").fieldType().storeTermVectorOffsets(), equalTo(true)); - assertThat(doc.rootDoc().getField("field6").fieldType().storeTermVectorPositions(), equalTo(true)); - assertThat(doc.rootDoc().getField("field6").fieldType().storeTermVectorPayloads(), equalTo(true)); - } - - public void testDocValues() throws Exception { - // doc values only work on non-analyzed content - final BuilderContext ctx = new BuilderContext(indexService.getIndexSettings().getSettings(), new ContentPath(1)); - try { - new StringFieldMapper.Builder("anything").docValues(true).build(ctx); - fail(); - } catch (Exception e) { /* OK */ } - - assertFalse(new Builder("anything").index(false).build(ctx).fieldType().hasDocValues()); - assertTrue(new Builder("anything").index(true).tokenized(false).build(ctx).fieldType().hasDocValues()); - assertFalse(new Builder("anything").index(true).tokenized(true).build(ctx).fieldType().hasDocValues()); - assertFalse(new Builder("anything").index(false).tokenized(false).docValues(false).build(ctx).fieldType().hasDocValues()); - assertTrue(new Builder("anything").index(false).docValues(true).build(ctx).fieldType().hasDocValues()); - - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("properties") - .startObject("str1") - .field("type", "string") - .field("index", "no") - .endObject() - .startObject("str2") - .field("type", "string") - .field("index", "not_analyzed") - .endObject() - .startObject("str3") - .field("type", "string") - .field("index", "analyzed") - .endObject() - .startObject("str4") - .field("type", "string") - .field("index", "not_analyzed") - .field("doc_values", false) - .endObject() - .startObject("str5") - .field("type", "string") - .field("index", "no") - .field("doc_values", false) - .endObject() - .endObject() - .endObject().endObject().string(); - - DocumentMapper defaultMapper = parser.parse("type", new CompressedXContent(mapping)); - - ParsedDocument parsedDoc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("str1", "1234") - .field("str2", "1234") - .field("str3", "1234") - .field("str4", "1234") - .field("str5", "1234") - .endObject() - .bytes()); - final Document doc = parsedDoc.rootDoc(); - assertEquals(DocValuesType.NONE, docValuesType(doc, "str1")); - assertEquals(DocValuesType.SORTED_SET, docValuesType(doc, "str2")); - assertEquals(DocValuesType.NONE, docValuesType(doc, "str3")); - assertEquals(DocValuesType.NONE, docValuesType(doc, "str4")); - assertEquals(DocValuesType.NONE, docValuesType(doc, "str5")); - - } - - public void testBwCompatDocValues() throws Exception { - Settings oldIndexSettings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_2_2_0).build(); - indexService = createIndex("test_old", oldIndexSettings); - parser = indexService.mapperService().documentMapperParser(); - // doc values only work on non-analyzed content - final BuilderContext ctx = new BuilderContext(indexService.getIndexSettings().getSettings(), new ContentPath(1)); - try { - new StringFieldMapper.Builder("anything").docValues(true).build(ctx); - fail(); - } catch (Exception e) { /* OK */ } - - assertFalse(new Builder("anything").index(false).build(ctx).fieldType().hasDocValues()); - assertTrue(new Builder("anything").index(true).tokenized(false).build(ctx).fieldType().hasDocValues()); - assertFalse(new Builder("anything").index(true).tokenized(true).build(ctx).fieldType().hasDocValues()); - assertFalse(new Builder("anything").index(false).tokenized(false).docValues(false).build(ctx).fieldType().hasDocValues()); - assertTrue(new Builder("anything").index(false).docValues(true).build(ctx).fieldType().hasDocValues()); - - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("properties") - .startObject("str1") - .field("type", "string") - .field("index", "no") - .endObject() - .startObject("str2") - .field("type", "string") - .field("index", "not_analyzed") - .endObject() - .startObject("str3") - .field("type", "string") - .field("index", "analyzed") - .endObject() - .startObject("str4") - .field("type", "string") - .field("index", "not_analyzed") - .field("doc_values", false) - .endObject() - .startObject("str5") - .field("type", "string") - .field("index", "no") - .field("doc_values", true) - .endObject() - .endObject() - .endObject().endObject().string(); - - DocumentMapper defaultMapper = parser.parse("type", new CompressedXContent(mapping)); - - ParsedDocument parsedDoc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("str1", "1234") - .field("str2", "1234") - .field("str3", "1234") - .field("str4", "1234") - .field("str5", "1234") - .endObject() - .bytes()); - final Document doc = parsedDoc.rootDoc(); - assertEquals(DocValuesType.NONE, docValuesType(doc, "str1")); - assertEquals(DocValuesType.SORTED_SET, docValuesType(doc, "str2")); - assertEquals(DocValuesType.NONE, docValuesType(doc, "str3")); - assertEquals(DocValuesType.NONE, docValuesType(doc, "str4")); - assertEquals(DocValuesType.SORTED_SET, docValuesType(doc, "str5")); - - } - - // TODO: this function shouldn't be necessary. parsing should just add a single field that is indexed and dv - public static DocValuesType docValuesType(Document document, String fieldName) { - for (IndexableField field : document.getFields(fieldName)) { - if (field.fieldType().docValuesType() != DocValuesType.NONE) { - return field.fieldType().docValuesType(); - } - } - return DocValuesType.NONE; - } - - public void testDisableNorms() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("properties").startObject("field").field("type", "string").endObject().endObject() - .endObject().endObject().string(); - - MapperService mapperService = indexService.mapperService(); - DocumentMapper defaultMapper = mapperService.merge("type", new CompressedXContent(mapping), MapperService.MergeReason.MAPPING_UPDATE, false); - - ParsedDocument doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("field", "1234") - .endObject() - .bytes()); - - IndexableFieldType fieldType = doc.rootDoc().getField("field").fieldType(); - assertEquals(false, fieldType.omitNorms()); - - String updatedMapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("properties").startObject("field").field("type", "string").startObject("norms").field("enabled", false).endObject() - .endObject().endObject().endObject().endObject().string(); - defaultMapper = mapperService.merge("type", new CompressedXContent(updatedMapping), MapperService.MergeReason.MAPPING_UPDATE, false); - - doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("field", "1234") - .endObject() - .bytes()); - - fieldType = doc.rootDoc().getField("field").fieldType(); - assertEquals(true, fieldType.omitNorms()); - - updatedMapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("properties").startObject("field").field("type", "string").startObject("norms").field("enabled", true).endObject() - .endObject().endObject().endObject().endObject().string(); - try { - mapperService.merge("type", new CompressedXContent(updatedMapping), MapperService.MergeReason.MAPPING_UPDATE, false); - fail(); - } catch (IllegalArgumentException e) { - assertThat(e.getMessage(), containsString("different [norms]")); - } - } - - /** - * Test that expected exceptions are thrown when creating a new index with position_offset_gap - */ - public void testPositionOffsetGapDeprecation() throws Exception { - // test deprecation exceptions on newly created indexes - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("properties") - .startObject("field1") - .field("type", "string") - .field("position_increment_gap", 10) - .endObject() - .startObject("field2") - .field("type", "string") - .field("position_offset_gap", 50) - .field("analyzer", "standard") - .endObject().endObject().endObject().endObject().string(); - try { - parser.parse("type", new CompressedXContent(mapping)); - fail("Mapping definition should fail with the position_offset_gap setting"); - }catch (MapperParsingException e) { - assertEquals(e.getMessage(), "Mapping definition for [field2] has unsupported parameters: [position_offset_gap : 50]"); - } - } - - public void testFielddataLoading() throws IOException { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("properties").startObject("field") - .field("type", "string") - .startObject("fielddata") - .field("loading", "eager_global_ordinals") - .endObject() - .endObject().endObject() - .endObject().endObject().string(); - - DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); - - String expectedMapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("properties").startObject("field") - .field("type", "string") - .field("eager_global_ordinals", true) - .endObject().endObject() - .endObject().endObject().string(); - - assertEquals(expectedMapping, mapper.mappingSource().toString()); - assertTrue(mapper.mappers().getMapper("field").fieldType().eagerGlobalOrdinals()); - } - - public void testFielddataFilter() throws IOException { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("properties").startObject("field") - .field("type", "string") - .startObject("fielddata") - .startObject("filter") - .startObject("frequency") - .field("min", 2d) - .field("min_segment_size", 1000) - .endObject() - .startObject("regex") - .field("pattern", "^#.*") - .endObject() - .endObject() - .endObject() - .endObject().endObject() - .endObject().endObject().string(); - - DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); - - String expectedMapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("properties").startObject("field") - .field("type", "string") - .startObject("fielddata_frequency_filter") - .field("min", 2d) - .field("min_segment_size", 1000) - .endObject() - .endObject().endObject() - .endObject().endObject().string(); - - assertEquals(expectedMapping, mapper.mappingSource().toString()); - StringFieldType fieldType = (StringFieldType) mapper.mappers().getMapper("field").fieldType(); - assertThat(fieldType.fielddataMinFrequency(), equalTo(2d)); - assertThat(fieldType.fielddataMaxFrequency(), equalTo((double) Integer.MAX_VALUE)); - assertThat(fieldType.fielddataMinSegmentSize(), equalTo(1000)); - } - - public void testDisabledFielddata() throws IOException { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("properties").startObject("field") - .field("type", "string") - .startObject("fielddata") - .field("format", "disabled") - .endObject() - .endObject().endObject() - .endObject().endObject().string(); - - DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); - - String expectedMapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("properties").startObject("field") - .field("type", "string") - .field("fielddata", false) - .endObject().endObject() - .endObject().endObject().string(); - - assertEquals(expectedMapping, mapper.mappingSource().toString()); - IllegalArgumentException e = expectThrows(IllegalArgumentException.class, - () -> mapper.mappers().getMapper("field").fieldType().fielddataBuilder()); - assertThat(e.getMessage(), containsString("Fielddata is disabled")); - } - - public void testNonAnalyzedFieldPositionIncrement() throws IOException { - for (String index : Arrays.asList("no", "not_analyzed")) { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("properties").startObject("field") - .field("type", "string") - .field("index", index) - .field("position_increment_gap", 10) - .endObject().endObject().endObject().endObject().string(); - - IllegalArgumentException e = expectThrows(IllegalArgumentException.class, - () -> parser.parse("type", new CompressedXContent(mapping))); - assertEquals("Cannot set position_increment_gap on field [field] without positions enabled", e.getMessage()); - } - } - - public void testAnalyzedFieldPositionIncrementWithoutPositions() throws IOException { - for (String indexOptions : Arrays.asList("docs", "freqs")) { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("properties").startObject("field") - .field("type", "string") - .field("index_options", indexOptions) - .field("position_increment_gap", 10) - .endObject().endObject().endObject().endObject().string(); - - IllegalArgumentException e = expectThrows(IllegalArgumentException.class, - () -> parser.parse("type", new CompressedXContent(mapping))); - assertEquals("Cannot set position_increment_gap on field [field] without positions enabled", e.getMessage()); - } - } - - public void testKeywordFieldAsStringWithUnsupportedField() throws IOException { - String mapping = mappingForTestField(b -> b.field("type", "keyword").field("fielddata", true)).string(); - Exception e = expectThrows(IllegalArgumentException.class, () -> parser.parse("test_type", new CompressedXContent(mapping))); - assertEquals("Automatic downgrade from [keyword] to [string] failed because parameters [fielddata] are not supported for " - + "automatic downgrades.", e.getMessage()); - } - - public void testMergeKeywordIntoString() throws IOException { - Map expectedMapping = new HashMap<>(); - expectedMapping.put("type", "string"); - expectedMapping.put("index", "not_analyzed"); - expectedMapping.put("fielddata", false); - mergeMappingStep(expectedMapping, b -> b.field("type", "string").field("index", "not_analyzed")); - mergeMappingStep(expectedMapping, b -> b.field("type", "keyword")); - } - - public void testMergeKeywordIntoStringWithIndexFalse() throws IOException { - Map expectedMapping = new HashMap<>(); - expectedMapping.put("type", "string"); - expectedMapping.put("index", "no"); - mergeMappingStep(expectedMapping, b -> b.field("type", "string").field("index", "no")); - mergeMappingStep(expectedMapping, b -> b.field("type", "keyword").field("index", false)); - } - - public void testMergeKeywordIntoStringWithStore() throws IOException { - Map expectedMapping = new HashMap<>(); - expectedMapping.put("type", "string"); - expectedMapping.put("index", "not_analyzed"); - expectedMapping.put("fielddata", false); - expectedMapping.put("store", true); - mergeMappingStep(expectedMapping, b -> b.field("type", "string").field("index", "not_analyzed").field("store", true)); - mergeMappingStep(expectedMapping, b -> b.field("type", "keyword").field("store", true)); - } - - public void testMergeKeywordIntoStringWithDocValues() throws IOException { - Map expectedMapping = new HashMap<>(); - expectedMapping.put("type", "string"); - expectedMapping.put("index", "not_analyzed"); - expectedMapping.put("doc_values", false); - mergeMappingStep(expectedMapping, b -> b.field("type", "string").field("index", "not_analyzed").field("doc_values", false)); - mergeMappingStep(expectedMapping, b -> b.field("type", "keyword").field("doc_values", false)); - } - - public void testMergeKeywordIntoStringWithNorms() throws IOException { - Map expectedMapping = new HashMap<>(); - expectedMapping.put("type", "string"); - expectedMapping.put("index", "not_analyzed"); - expectedMapping.put("fielddata", false); - expectedMapping.put("norms", true); - mergeMappingStep(expectedMapping, b -> b.field("type", "string").field("index", "not_analyzed").field("norms", true)); - mergeMappingStep(expectedMapping, b -> b.field("type", "keyword").field("norms", true)); - // norms can be an array but it'll just get squashed into true/false - mergeMappingStep(expectedMapping, b -> b.field("type", "string").field("index", "not_analyzed") - .startObject("norms") - .field("enabled", true) - .field("loading", randomAsciiOfLength(5)) // Totally ignored even though it used to be eager/lazy - .endObject()); - mergeMappingStep(expectedMapping, b -> b.field("type", "keyword") - .startObject("norms") - .field("enabled", true) - .field("loading", randomAsciiOfLength(5)) - .endObject()); - } - - public void testMergeKeywordIntoStringWithBoost() throws IOException { - Map expectedMapping = new HashMap<>(); - expectedMapping.put("type", "string"); - expectedMapping.put("index", "not_analyzed"); - expectedMapping.put("fielddata", false); - expectedMapping.put("boost", 1.5); - expectedMapping.put("norms", true); // Implied by having a boost - mergeMappingStep(expectedMapping, b -> b.field("type", "string").field("index", "not_analyzed").field("boost", 1.5)); - mergeMappingStep(expectedMapping, b -> b.field("type", "keyword").field("boost", 1.5)); - expectedMapping.put("boost", 1.4); - mergeMappingStep(expectedMapping, b -> b.field("type", "keyword").field("boost", 1.4)); - } - - public void testMergeKeywordIntoStringWithFields() throws IOException { - Map expectedMapping = new HashMap<>(); - expectedMapping.put("type", "string"); - expectedMapping.put("index", "not_analyzed"); - expectedMapping.put("fielddata", false); - Map expectedFields = new HashMap<>(); - expectedMapping.put("fields", expectedFields); - Map expectedFoo = new HashMap<>(); - expectedFields.put("foo", expectedFoo); - expectedFoo.put("type", "string"); - expectedFoo.put("analyzer", "standard"); - mergeMappingStep(expectedMapping, b -> b.field("type", "string").field("index", "not_analyzed") - .startObject("fields") - .startObject("foo") - .field("type", "string") - .field("analyzer", "standard") - .endObject() - .endObject()); - mergeMappingStep(expectedMapping, b -> b.field("type", "keyword") - .startObject("fields") - .startObject("foo") - .field("type", "string") - .field("analyzer", "standard") - .endObject() - .endObject()); - - Map expectedBar = new HashMap<>(); - expectedFields.put("bar", expectedBar); - expectedBar.put("type", "string"); - expectedBar.put("analyzer", "whitespace"); - mergeMappingStep(expectedMapping, b -> b.field("type", "string").field("index", "not_analyzed") - .startObject("fields") - .startObject("foo") - .field("type", "string") - .field("analyzer", "standard") - .endObject() - .startObject("bar") - .field("type", "string") - .field("analyzer", "whitespace") - .endObject() - .endObject()); - mergeMappingStep(expectedMapping, b -> b.field("type", "keyword") - .startObject("fields") - .startObject("foo") - .field("type", "string") - .field("analyzer", "standard") - .endObject() - .startObject("bar") - .field("type", "string") - .field("analyzer", "whitespace") - .endObject() - .endObject()); - } - - public void testMergeKeywordIntoStringWithCopyTo() throws IOException { - Map expectedMapping = new HashMap<>(); - expectedMapping.put("type", "string"); - expectedMapping.put("index", "not_analyzed"); - expectedMapping.put("fielddata", false); - expectedMapping.put("copy_to", singletonList("another_field")); - mergeMappingStep(expectedMapping, b -> b.field("type", "string").field("index", "not_analyzed").field("copy_to", "another_field")); - mergeMappingStep(expectedMapping, b -> b.field("type", "keyword").field("copy_to", "another_field")); - } - - public void testMergeKeywordIntoStringWithIncludeInAll() throws IOException { - Map expectedMapping = new HashMap<>(); - expectedMapping.put("type", "string"); - expectedMapping.put("index", "not_analyzed"); - expectedMapping.put("fielddata", false); - expectedMapping.put("include_in_all", false); - mergeMappingStep(expectedMapping, b -> b.field("type", "string").field("index", "not_analyzed").field("include_in_all", false)); - mergeMappingStep(expectedMapping, b -> b.field("type", "keyword").field("include_in_all", false)); - } - - public void testMergeKeywordIntoStringWithIgnoreAbove() throws IOException { - Map expectedMapping = new HashMap<>(); - expectedMapping.put("type", "string"); - expectedMapping.put("index", "not_analyzed"); - expectedMapping.put("fielddata", false); - expectedMapping.put("ignore_above", 128); - mergeMappingStep(expectedMapping, b -> b.field("type", "string").field("index", "not_analyzed").field("ignore_above", 128)); - mergeMappingStep(expectedMapping, b -> b.field("type", "keyword").field("ignore_above", 128)); - } - - public void testMergeKeywordIntoStringWithIndexOptions() throws IOException { - Map expectedMapping = new HashMap<>(); - expectedMapping.put("type", "string"); - expectedMapping.put("index", "not_analyzed"); - expectedMapping.put("fielddata", false); - expectedMapping.put("index_options", "freqs"); - mergeMappingStep(expectedMapping, b -> b.field("type", "string").field("index", "not_analyzed").field("index_options", "freqs")); - mergeMappingStep(expectedMapping, b -> b.field("type", "keyword").field("index_options", "freqs")); - } - - public void testMergeKeywordIntoStringWithSimilarity() throws IOException { - Map expectedMapping = new HashMap<>(); - expectedMapping.put("type", "string"); - expectedMapping.put("index", "not_analyzed"); - expectedMapping.put("fielddata", false); - expectedMapping.put("similarity", "BM25"); - mergeMappingStep(expectedMapping, b -> b.field("type", "string").field("index", "not_analyzed").field("similarity", "BM25")); - mergeMappingStep(expectedMapping, b -> b.field("type", "keyword").field("similarity", "BM25")); - } - - public void testTextFieldAsStringWithUnsupportedField() throws IOException { - String mapping = mappingForTestField(b -> b.field("type", "text").field("null_value", "kitten")).string(); - Exception e = expectThrows(IllegalArgumentException.class, () -> parser.parse("test_type", new CompressedXContent(mapping))); - assertEquals("Automatic downgrade from [text] to [string] failed because parameters [null_value] are not supported for " - + "automatic downgrades.", e.getMessage()); - } - - public void testMergeTextIntoString() throws IOException { - Map expectedMapping = new HashMap<>(); - expectedMapping.put("type", "string"); - mergeMappingStep(expectedMapping, b -> b.field("type", "string")); - mergeMappingStep(expectedMapping, b -> b.field("type", "text").field("fielddata", true)); - } - - public void testMergeTextIntoStringWithStore() throws IOException { - Map expectedMapping = new HashMap<>(); - expectedMapping.put("type", "string"); - expectedMapping.put("store", true); - mergeMappingStep(expectedMapping, b -> b.field("type", "string").field("store", true)); - mergeMappingStep(expectedMapping, b -> b.field("type", "text").field("store", true).field("fielddata", true)); - } - - public void testMergeTextIntoStringWithDocValues() throws IOException { - Map expectedMapping = new HashMap<>(); - expectedMapping.put("type", "string"); - mergeMappingStep(expectedMapping, b -> b.field("type", "string").field("doc_values", false)); - mergeMappingStep(expectedMapping, b -> b.field("type", "text").field("doc_values", false).field("fielddata", true)); - } - - public void testMergeTextIntoStringWithNorms() throws IOException { - Map expectedMapping = new HashMap<>(); - expectedMapping.put("type", "string"); - expectedMapping.put("norms", false); - mergeMappingStep(expectedMapping, b -> b.field("type", "string").field("norms", false)); - mergeMappingStep(expectedMapping, b -> b.field("type", "text").field("norms", false).field("fielddata", true)); - } - - public void testMergeTextIntoStringWithBoost() throws IOException { - Map expectedMapping = new HashMap<>(); - expectedMapping.put("type", "string"); - expectedMapping.put("boost", 1.5); - mergeMappingStep(expectedMapping, b -> b.field("type", "string").field("boost", 1.5)); - mergeMappingStep(expectedMapping, b -> b.field("type", "text").field("boost", 1.5).field("fielddata", true)); - expectedMapping.put("boost", 1.4); - mergeMappingStep(expectedMapping, b -> b.field("type", "text").field("boost", 1.4).field("fielddata", true)); - } - - public void testMergeTextIntoStringWithFields() throws IOException { - Map expectedMapping = new HashMap<>(); - expectedMapping.put("type", "string"); - Map expectedFields = new HashMap<>(); - expectedMapping.put("fields", expectedFields); - Map expectedFoo = new HashMap<>(); - expectedFields.put("foo", expectedFoo); - expectedFoo.put("type", "string"); - expectedFoo.put("analyzer", "standard"); - mergeMappingStep(expectedMapping, b -> b.field("type", "string") - .startObject("fields") - .startObject("foo") - .field("type", "string") - .field("analyzer", "standard") - .endObject() - .endObject()); - mergeMappingStep(expectedMapping, b -> b.field("type", "text").field("fielddata", true) - .startObject("fields") - .startObject("foo") - .field("type", "string") - .field("analyzer", "standard") - .endObject() - .endObject()); - - Map expectedBar = new HashMap<>(); - expectedFields.put("bar", expectedBar); - expectedBar.put("type", "string"); - expectedBar.put("analyzer", "whitespace"); - mergeMappingStep(expectedMapping, b -> b.field("type", "string") - .startObject("fields") - .startObject("foo") - .field("type", "string") - .field("analyzer", "standard") - .endObject() - .startObject("bar") - .field("type", "string") - .field("analyzer", "whitespace") - .endObject() - .endObject()); - mergeMappingStep(expectedMapping, b -> b.field("type", "text").field("fielddata", true) - .startObject("fields") - .startObject("foo") - .field("type", "string") - .field("analyzer", "standard") - .endObject() - .startObject("bar") - .field("type", "string") - .field("analyzer", "whitespace") - .endObject() - .endObject()); - } - - public void testMergeTextIntoStringWithCopyTo() throws IOException { - Map expectedMapping = new HashMap<>(); - expectedMapping.put("type", "string"); - expectedMapping.put("copy_to", singletonList("another_field")); - mergeMappingStep(expectedMapping, b -> b.field("type", "string").field("copy_to", "another_field")); - mergeMappingStep(expectedMapping, b -> b.field("type", "text").field("copy_to", "another_field").field("fielddata", true)); - } - - public void testMergeTextIntoStringWithFileddataDisabled() throws IOException { - Map expectedMapping = new HashMap<>(); - expectedMapping.put("type", "string"); - expectedMapping.put("fielddata", false); - mergeMappingStep(expectedMapping, b -> b.field("type", "string").field("fielddata", false)); - mergeMappingStep(expectedMapping, b -> b.field("type", "text")); - } - - public void testMergeTextIntoStringWithEagerGlobalOrdinals() throws IOException { - Map expectedMapping = new HashMap<>(); - expectedMapping.put("type", "string"); - expectedMapping.put("eager_global_ordinals", true); - mergeMappingStep(expectedMapping, b -> b.field("type", "string").startObject("fielddata") - .field("format", "pagedbytes") - .field("loading", "eager_global_ordinals") - .endObject()); - mergeMappingStep(expectedMapping, b -> b.field("type", "text").field("fielddata", true).field("eager_global_ordinals", true)); - } - - public void testMergeTextIntoStringWithFielddataFrequencyFilter() throws IOException { - Map expectedMapping = new HashMap<>(); - expectedMapping.put("type", "string"); - Map fielddataFrequencyFilter = new HashMap<>(); - expectedMapping.put("fielddata_frequency_filter", fielddataFrequencyFilter); - fielddataFrequencyFilter.put("min", 0.001); - fielddataFrequencyFilter.put("max", 0.1); - fielddataFrequencyFilter.put("min_segment_size", 100); - mergeMappingStep(expectedMapping, b -> b.field("type", "string").startObject("fielddata") - .field("format", "pagedbytes") - .startObject("filter") - .startObject("frequency") - .field("min", 0.001) - .field("max", 0.1) - .field("min_segment_size", 100) - .endObject() - .endObject() - .endObject()); - mergeMappingStep(expectedMapping, b -> b.field("type", "text").field("fielddata", true) - .startObject("fielddata_frequency_filter") - .field("min", 0.001) - .field("max", 0.1) - .field("min_segment_size", 100) - .endObject()); - } - - public void testMergeTextIntoStringWithIncludeInAll() throws IOException { - Map expectedMapping = new HashMap<>(); - expectedMapping.put("type", "string"); - expectedMapping.put("include_in_all", false); - mergeMappingStep(expectedMapping, b -> b.field("type", "string").field("include_in_all", false)); - mergeMappingStep(expectedMapping, b -> b.field("type", "text").field("include_in_all", false).field("fielddata", true)); - } - - public void testMergeTextIntoStringWithSearchQuoteAnayzer() throws IOException { - Map expectedMapping = new HashMap<>(); - expectedMapping.put("type", "string"); - expectedMapping.put("analyzer", "standard"); - expectedMapping.put("search_analyzer", "whitespace"); - expectedMapping.put("search_quote_analyzer", "keyword"); - mergeMappingStep(expectedMapping, b -> b - .field("type", "string") - .field("analyzer", "standard") - .field("search_analyzer", "whitespace") - .field("search_quote_analyzer", "keyword")); - mergeMappingStep(expectedMapping, b -> b - .field("type", "text") - .field("analyzer", "standard") - .field("search_analyzer", "whitespace") - .field("search_quote_analyzer", "keyword") - .field("fielddata", true)); - } - - public void testMergeTextIntoStringWithIndexOptions() throws IOException { - String indexOptions = randomIndexOptions(); - Map expectedMapping = new HashMap<>(); - expectedMapping.put("type", "string"); - if (false == "positions".equals(indexOptions)) { - expectedMapping.put("index_options", indexOptions); - } - mergeMappingStep(expectedMapping, b -> b.field("type", "string").field("index_options", indexOptions)); - mergeMappingStep(expectedMapping, b -> b.field("type", "text").field("index_options", indexOptions).field("fielddata", true)); - } - - public void testMergeTextIntoStringWithPositionIncrementGap() throws IOException { - int positionIncrementGap = between(0, 10000); - Map expectedMapping = new HashMap<>(); - expectedMapping.put("type", "string"); - expectedMapping.put("position_increment_gap", positionIncrementGap); - mergeMappingStep(expectedMapping, b -> b - .field("type", "string") - .field("position_increment_gap", positionIncrementGap)); - mergeMappingStep(expectedMapping, b -> b - .field("type", "text") - .field("position_increment_gap", positionIncrementGap) - .field("fielddata", true)); - } - - public void testMergeStringIntoStringWithSimilarity() throws IOException { - Map expectedMapping = new HashMap<>(); - expectedMapping.put("type", "string"); - expectedMapping.put("similarity", "BM25"); - mergeMappingStep(expectedMapping, b -> b.field("type", "string").field("similarity", "BM25")); - mergeMappingStep(expectedMapping, b -> b.field("type", "text").field("similarity", "BM25").field("fielddata", true)); - } - - private interface FieldBuilder { - void populateMappingForField(XContentBuilder b) throws IOException; - } - private void mergeMappingStep(Map expectedMapping, FieldBuilder fieldBuilder) throws IOException { - XContentBuilder b = mappingForTestField(fieldBuilder); - if (logger.isInfoEnabled()) { - logger.info("--> Updating mapping to {}", b.string()); - } - assertAcked(client().admin().indices().preparePutMapping("test").setType("test_type").setSource(b)); - GetMappingsResponse response = client().admin().indices().prepareGetMappings("test").get(); - ImmutableOpenMap index = response.getMappings().get("test"); - assertNotNull("mapping for index not found", index); - MappingMetaData type = index.get("test_type"); - assertNotNull("mapping for type not found", type); - Map properties = (Map) type.sourceAsMap().get("properties"); - assertEquals(expectedMapping, properties.get("test_field")); - } - - private XContentBuilder mappingForTestField(FieldBuilder fieldBuilder) throws IOException { - XContentBuilder b = JsonXContent.contentBuilder(); - b.startObject(); { - b.startObject("test_type"); { - b.startObject("properties"); { - b.startObject("test_field"); { - fieldBuilder.populateMappingForField(b); - } - b.endObject(); - } - b.endObject(); - } - b.endObject(); - } - return b.endObject(); - } - - private String randomIndexOptions() { - IndexOptions options = randomValueOtherThan(IndexOptions.NONE, () -> randomFrom(IndexOptions.values())); - switch (options) { - case DOCS: - return "docs"; - case DOCS_AND_FREQS: - return "freqs"; - case DOCS_AND_FREQS_AND_POSITIONS: - return "positions"; - case DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS: - return "offsets"; - default: - throw new IllegalArgumentException("Unknown options [" + options + "]"); - } - } -} diff --git a/core/src/test/java/org/elasticsearch/index/mapper/LegacyTokenCountFieldMapperTests.java b/core/src/test/java/org/elasticsearch/index/mapper/LegacyTokenCountFieldMapperTests.java deleted file mode 100644 index 125c6fbc8302a..0000000000000 --- a/core/src/test/java/org/elasticsearch/index/mapper/LegacyTokenCountFieldMapperTests.java +++ /dev/null @@ -1,110 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.index.mapper; - -import org.apache.lucene.analysis.Analyzer; -import org.apache.lucene.analysis.CannedTokenStream; -import org.apache.lucene.analysis.MockTokenizer; -import org.apache.lucene.analysis.Token; -import org.apache.lucene.analysis.TokenStream; -import org.elasticsearch.Version; -import org.elasticsearch.cluster.metadata.IndexMetaData; -import org.elasticsearch.common.compress.CompressedXContent; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.xcontent.XContentFactory; -import org.elasticsearch.index.mapper.DocumentMapper; -import org.elasticsearch.index.mapper.DocumentMapperParser; -import org.elasticsearch.index.mapper.LegacyTokenCountFieldMapper; -import org.elasticsearch.index.mapper.MapperService; -import org.elasticsearch.plugins.Plugin; -import org.elasticsearch.test.ESSingleNodeTestCase; -import org.elasticsearch.test.InternalSettingsPlugin; - -import java.io.IOException; -import java.util.Arrays; -import java.util.Collection; -import java.util.Collections; - -import static org.hamcrest.Matchers.equalTo; - -/** - * Test for {@link LegacyTokenCountFieldMapper}. - */ -public class LegacyTokenCountFieldMapperTests extends ESSingleNodeTestCase { - - private static final Settings BW_SETTINGS = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_2_3_0).build(); - - @Override - protected Collection> getPlugins() { - return pluginList(InternalSettingsPlugin.class); - } - - public void testMerge() throws IOException { - String stage1Mapping = XContentFactory.jsonBuilder().startObject() - .startObject("person") - .startObject("properties") - .startObject("tc") - .field("type", "token_count") - .field("analyzer", "keyword") - .endObject() - .endObject() - .endObject().endObject().string(); - MapperService mapperService = createIndex("test", BW_SETTINGS).mapperService(); - DocumentMapper stage1 = mapperService.merge("person", new CompressedXContent(stage1Mapping), MapperService.MergeReason.MAPPING_UPDATE, false); - - String stage2Mapping = XContentFactory.jsonBuilder().startObject() - .startObject("person") - .startObject("properties") - .startObject("tc") - .field("type", "token_count") - .field("analyzer", "standard") - .endObject() - .endObject() - .endObject().endObject().string(); - DocumentMapper stage2 = mapperService.merge("person", new CompressedXContent(stage2Mapping), MapperService.MergeReason.MAPPING_UPDATE, false); - - // previous mapper has not been modified - assertThat(((LegacyTokenCountFieldMapper) stage1.mappers().smartNameFieldMapper("tc")).analyzer(), equalTo("keyword")); - // but the new one has the change - assertThat(((LegacyTokenCountFieldMapper) stage2.mappers().smartNameFieldMapper("tc")).analyzer(), equalTo("standard")); - } - - public void testCountPositions() throws IOException { - // We're looking to make sure that we: - Token t1 = new Token(); // Don't count tokens without an increment - t1.setPositionIncrement(0); - Token t2 = new Token(); - t2.setPositionIncrement(1); // Count normal tokens with one increment - Token t3 = new Token(); - t2.setPositionIncrement(2); // Count funny tokens with more than one increment - int finalTokenIncrement = 4; // Count the final token increment on the rare token streams that have them - Token[] tokens = new Token[] {t1, t2, t3}; - Collections.shuffle(Arrays.asList(tokens), random()); - final TokenStream tokenStream = new CannedTokenStream(finalTokenIncrement, 0, tokens); - // TODO: we have no CannedAnalyzer? - Analyzer analyzer = new Analyzer() { - @Override - public TokenStreamComponents createComponents(String fieldName) { - return new TokenStreamComponents(new MockTokenizer(), tokenStream); - } - }; - assertThat(LegacyTokenCountFieldMapper.countPositions(analyzer, "", ""), equalTo(7)); - } -} diff --git a/core/src/test/java/org/elasticsearch/index/mapper/MapperServiceTests.java b/core/src/test/java/org/elasticsearch/index/mapper/MapperServiceTests.java index 4eaf58a602f2c..a6270dfc953f9 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/MapperServiceTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/MapperServiceTests.java @@ -119,7 +119,7 @@ public void testTotalFieldsExceedsLimit() throws Throwable { Function mapping = type -> { try { return XContentFactory.jsonBuilder().startObject().startObject(type).startObject("properties") - .startObject("field1").field("type", "string") + .startObject("field1").field("type", "keyword") .endObject().endObject().endObject().endObject().string(); } catch (IOException e) { throw new UncheckedIOException(e); diff --git a/core/src/test/java/org/elasticsearch/index/mapper/MultiFieldCopyToMapperTests.java b/core/src/test/java/org/elasticsearch/index/mapper/MultiFieldCopyToMapperTests.java index 8ef5bffba501a..5a5e2ddb509a3 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/MultiFieldCopyToMapperTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/MultiFieldCopyToMapperTests.java @@ -20,19 +20,13 @@ package org.elasticsearch.index.mapper; -import org.elasticsearch.Version; -import org.elasticsearch.cluster.metadata.IndexMetaData; -import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.index.MapperTestUtils; import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.test.VersionUtils; import java.io.IOException; -import java.util.ArrayList; -import java.util.List; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; import static org.hamcrest.core.IsEqual.equalTo; @@ -41,23 +35,15 @@ public class MultiFieldCopyToMapperTests extends ESTestCase { public void testExceptionForCopyToInMultiFields() throws IOException { XContentBuilder mapping = createMappinmgWithCopyToInMultiField(); - Tuple, List> versionsWithAndWithoutExpectedExceptions = versionsWithAndWithoutExpectedExceptions(); // first check that for newer versions we throw exception if copy_to is found withing multi field - Version indexVersion = randomFrom(versionsWithAndWithoutExpectedExceptions.v1()); - MapperService mapperService = MapperTestUtils.newMapperService(createTempDir(), Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, indexVersion).build()); + MapperService mapperService = MapperTestUtils.newMapperService(createTempDir(), Settings.EMPTY); try { mapperService.parse("type", new CompressedXContent(mapping.string()), true); fail("Parsing should throw an exception because the mapping contains a copy_to in a multi field"); } catch (MapperParsingException e) { assertThat(e.getMessage(), equalTo("copy_to in multi fields is not allowed. Found the copy_to in field [c] which is within a multi field.")); } - - // now test that with an older version the parsing just works - indexVersion = randomFrom(versionsWithAndWithoutExpectedExceptions.v2()); - mapperService = MapperTestUtils.newMapperService(createTempDir(), Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, indexVersion).build()); - DocumentMapper documentMapper = mapperService.parse("type", new CompressedXContent(mapping.string()), true); - assertFalse(documentMapper.mapping().toString().contains("copy_to")); } private static XContentBuilder createMappinmgWithCopyToInMultiField() throws IOException { @@ -83,20 +69,4 @@ private static XContentBuilder createMappinmgWithCopyToInMultiField() throws IOE return mapping; } - // returns a tuple where - // v1 is a list of versions for which we expect an exception when a copy_to in multi fields is found and - // v2 is older versions where we throw no exception and we just log a warning - private static Tuple, List> versionsWithAndWithoutExpectedExceptions() { - List versionsWithException = new ArrayList<>(); - List versionsWithoutException = new ArrayList<>(); - for (Version version : VersionUtils.allReleasedVersions()) { - if (version.after(Version.V_2_1_0) || - (version.after(Version.V_2_0_1) && version.before(Version.V_2_1_0))) { - versionsWithException.add(version); - } else { - versionsWithoutException.add(version); - } - } - return new Tuple<>(versionsWithException, versionsWithoutException); - } } diff --git a/core/src/test/java/org/elasticsearch/index/mapper/NumberFieldMapperTests.java b/core/src/test/java/org/elasticsearch/index/mapper/NumberFieldMapperTests.java index 070b5b2852ce0..f639b9c2041d6 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/NumberFieldMapperTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/NumberFieldMapperTests.java @@ -328,4 +328,18 @@ protected void doTestNullValue(String type) throws IOException { assertEquals(DocValuesType.SORTED_NUMERIC, dvField.fieldType().docValuesType()); assertFalse(dvField.fieldType().stored()); } + + public void testEmptyName() throws IOException { + // after version 5 + for (String type : TYPES) { + String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + .startObject("properties").startObject("").field("type", type).endObject().endObject() + .endObject().endObject().string(); + + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, + () -> parser.parse("type", new CompressedXContent(mapping)) + ); + assertThat(e.getMessage(), containsString("name cannot be empty string")); + } + } } diff --git a/core/src/test/java/org/elasticsearch/index/mapper/ObjectMapperTests.java b/core/src/test/java/org/elasticsearch/index/mapper/ObjectMapperTests.java index 68959ccc6842a..e974a02943b37 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/ObjectMapperTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/ObjectMapperTests.java @@ -207,23 +207,6 @@ public void testEmptyName() throws Exception { createIndex("test").mapperService().documentMapperParser().parse("", new CompressedXContent(mapping)); }); assertThat(e.getMessage(), containsString("name cannot be empty string")); - - // empty name allowed in index created before 5.0 - Version oldVersion = VersionUtils.randomVersionBetween(getRandom(), Version.V_2_0_0, Version.V_2_3_5); - Settings oldIndexSettings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, oldVersion).build(); - DocumentMapperParser parser = createIndex("test_old", oldIndexSettings).mapperService().documentMapperParser(); - - DocumentMapper defaultMapper = parser.parse("", new CompressedXContent(mapping)); - String downgradedMapping = XContentFactory.jsonBuilder().startObject() - .startObject("") - .startObject("properties") - .startObject("name") - .field("type", "string") - .field("fielddata", false) - .endObject() - .endObject() - .endObject().endObject().string(); - assertEquals(downgradedMapping, defaultMapper.mappingSource().string()); } @Override diff --git a/core/src/test/java/org/elasticsearch/index/mapper/ParentFieldMapperTests.java b/core/src/test/java/org/elasticsearch/index/mapper/ParentFieldMapperTests.java index 9f026c5992242..9ee9ed16bd691 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/ParentFieldMapperTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/ParentFieldMapperTests.java @@ -19,10 +19,7 @@ package org.elasticsearch.index.mapper; import org.apache.lucene.analysis.standard.StandardAnalyzer; -import org.apache.lucene.index.DocValuesType; import org.apache.lucene.index.IndexableField; -import org.elasticsearch.Version; -import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.settings.Settings; @@ -46,8 +43,6 @@ import static java.util.Collections.emptyList; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; -import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.is; public class ParentFieldMapperTests extends ESSingleNodeTestCase { @@ -130,41 +125,4 @@ private static int getNumberOfFieldWithParentPrefix(ParseContext.Document doc) { return numFieldWithParentPrefix; } - public void testPost2Dot0LazyLoading() { - ParentFieldMapper.Builder builder = new ParentFieldMapper.Builder("child"); - builder.type("parent"); - builder.eagerGlobalOrdinals(false); - - ParentFieldMapper parentFieldMapper = builder.build(new Mapper.BuilderContext(post2Dot0IndexSettings(), new ContentPath(0))); - - assertThat(parentFieldMapper.getParentJoinFieldType().name(), equalTo("_parent#child")); - assertThat(parentFieldMapper.getParentJoinFieldType().hasDocValues(), is(true)); - assertThat(parentFieldMapper.getParentJoinFieldType().docValuesType(), equalTo(DocValuesType.SORTED)); - - assertThat(parentFieldMapper.fieldType().name(), equalTo("_parent#parent")); - assertThat(parentFieldMapper.fieldType().eagerGlobalOrdinals(), equalTo(false)); - assertThat(parentFieldMapper.fieldType().hasDocValues(), is(true)); - assertThat(parentFieldMapper.fieldType().docValuesType(), equalTo(DocValuesType.SORTED)); - } - - public void testPost2Dot0EagerLoading() { - ParentFieldMapper.Builder builder = new ParentFieldMapper.Builder("child"); - builder.type("parent"); - builder.eagerGlobalOrdinals(true); - - ParentFieldMapper parentFieldMapper = builder.build(new Mapper.BuilderContext(post2Dot0IndexSettings(), new ContentPath(0))); - - assertThat(parentFieldMapper.getParentJoinFieldType().name(), equalTo("_parent#child")); - assertThat(parentFieldMapper.getParentJoinFieldType().hasDocValues(), is(true)); - assertThat(parentFieldMapper.getParentJoinFieldType().docValuesType(), equalTo(DocValuesType.SORTED)); - - assertThat(parentFieldMapper.fieldType().name(), equalTo("_parent#parent")); - assertThat(parentFieldMapper.fieldType().eagerGlobalOrdinals(), equalTo(true)); - assertThat(parentFieldMapper.fieldType().hasDocValues(), is(true)); - assertThat(parentFieldMapper.fieldType().docValuesType(), equalTo(DocValuesType.SORTED)); - } - - private static Settings post2Dot0IndexSettings() { - return Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_2_1_0).build(); - } } diff --git a/core/src/test/java/org/elasticsearch/index/mapper/ScaledFloatFieldMapperTests.java b/core/src/test/java/org/elasticsearch/index/mapper/ScaledFloatFieldMapperTests.java index 3556cea23adf8..0772ca5cca3d7 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/ScaledFloatFieldMapperTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/ScaledFloatFieldMapperTests.java @@ -21,29 +21,21 @@ import org.apache.lucene.index.DocValuesType; import org.apache.lucene.index.IndexableField; -import org.elasticsearch.Version; -import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.compress.CompressedXContent; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.xcontent.ToXContent; -import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.index.IndexService; import org.elasticsearch.index.mapper.DocumentMapper; import org.elasticsearch.index.mapper.DocumentMapperParser; -import org.elasticsearch.index.mapper.FieldMapper; import org.elasticsearch.index.mapper.MapperParsingException; import org.elasticsearch.index.mapper.ParsedDocument; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.ESSingleNodeTestCase; import org.elasticsearch.test.InternalSettingsPlugin; -import org.elasticsearch.test.VersionUtils; import org.junit.Before; import java.io.IOException; import java.util.Collection; -import static com.carrotsearch.randomizedtesting.RandomizedTest.getRandom; import static org.hamcrest.Matchers.containsString; public class ScaledFloatFieldMapperTests extends ESSingleNodeTestCase { @@ -365,14 +357,5 @@ public void testEmptyName() throws IOException { () -> parser.parse("type", new CompressedXContent(mapping)) ); assertThat(e.getMessage(), containsString("name cannot be empty string")); - - // before 5.x - Version oldVersion = VersionUtils.randomVersionBetween(getRandom(), Version.V_2_0_0, Version.V_2_3_5); - Settings oldIndexSettings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, oldVersion).build(); - indexService = createIndex("test_old", oldIndexSettings); - parser = indexService.mapperService().documentMapperParser(); - - DocumentMapper defaultMapper = parser.parse("type", new CompressedXContent(mapping)); - assertEquals(mapping, defaultMapper.mappingSource().toString()); } } diff --git a/core/src/test/java/org/elasticsearch/index/mapper/SourceFieldMapperTests.java b/core/src/test/java/org/elasticsearch/index/mapper/SourceFieldMapperTests.java index c6f9615623c0e..8aa3d25aebe33 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/SourceFieldMapperTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/SourceFieldMapperTests.java @@ -68,18 +68,6 @@ public void testNoFormat() throws Exception { assertThat(XContentFactory.xContentType(doc.source()), equalTo(XContentType.SMILE)); } - public void testFormatBackCompat() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("_source").field("format", "json").endObject() - .endObject().endObject().string(); - Settings settings = Settings.builder() - .put(IndexMetaData.SETTING_VERSION_CREATED, VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.V_2_2_0)) - .build(); - - DocumentMapperParser parser = createIndex("test", settings).mapperService().documentMapperParser(); - parser.parse("type", new CompressedXContent(mapping)); // no exception - } - public void testIncludes() throws Exception { String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("_source").array("includes", new String[]{"path1*"}).endObject() diff --git a/core/src/test/java/org/elasticsearch/index/mapper/StringFieldMapperPositionIncrementGapTests.java b/core/src/test/java/org/elasticsearch/index/mapper/StringFieldMapperPositionIncrementGapTests.java deleted file mode 100644 index 6a68c537da116..0000000000000 --- a/core/src/test/java/org/elasticsearch/index/mapper/StringFieldMapperPositionIncrementGapTests.java +++ /dev/null @@ -1,182 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.index.mapper; - -import org.elasticsearch.ExceptionsHelper; -import org.elasticsearch.Version; -import org.elasticsearch.client.Client; -import org.elasticsearch.cluster.metadata.IndexMetaData; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.common.xcontent.XContentFactory; -import org.elasticsearch.index.mapper.MapperParsingException; -import org.elasticsearch.plugins.Plugin; -import org.elasticsearch.test.ESSingleNodeTestCase; -import org.elasticsearch.test.InternalSettingsPlugin; - -import java.io.IOException; -import java.util.Arrays; -import java.util.Collection; - -import static org.elasticsearch.action.support.WriteRequest.RefreshPolicy.IMMEDIATE; -import static org.elasticsearch.index.query.QueryBuilders.matchPhraseQuery; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; -import static org.hamcrest.Matchers.containsString; - -/** - * Tests that position_increment_gap is read from the mapper and applies as - * expected in queries. - */ -public class StringFieldMapperPositionIncrementGapTests extends ESSingleNodeTestCase { - - @Override - protected Collection> getPlugins() { - return pluginList(InternalSettingsPlugin.class); - } - - /** - * The default position_increment_gap should be large enough that most - * "sensible" queries phrase slops won't match across values. - */ - public void testDefault() throws IOException { - assertGapIsOneHundred(client(), "test", "test"); - } - - /** - * Asserts that the post-2.0 default is being applied. - */ - public static void assertGapIsOneHundred(Client client, String indexName, String type) throws IOException { - testGap(client, indexName, type, 100); - - // No match across gap using default slop with default positionIncrementGap - assertHitCount(client.prepareSearch(indexName).setQuery(matchPhraseQuery("string", "one two")).get(), 0); - - // Nor with small-ish values - assertHitCount(client.prepareSearch(indexName).setQuery(matchPhraseQuery("string", "one two").slop(5)).get(), 0); - assertHitCount(client.prepareSearch(indexName).setQuery(matchPhraseQuery("string", "one two").slop(50)).get(), 0); - - // But huge-ish values still match - assertHitCount(client.prepareSearch(indexName).setQuery(matchPhraseQuery("string", "one two").slop(500)).get(), 1); - } - - public void testZero() throws IOException { - setupGapInMapping(0); - assertGapIsZero(client(), "test", "test"); - } - - /** - * Asserts that the pre-2.0 default has been applied or explicitly - * configured. - */ - public static void assertGapIsZero(Client client, String indexName, String type) throws IOException { - testGap(client, indexName, type, 0); - /* - * Phrases match across different values using default slop with pre-2.0 default - * position_increment_gap. - */ - assertHitCount(client.prepareSearch(indexName).setQuery(matchPhraseQuery("string", "one two")).get(), 1); - } - - public void testLargerThanDefault() throws IOException { - setupGapInMapping(10000); - testGap(client(), "test", "test", 10000); - } - - public void testSmallerThanDefault() throws IOException { - setupGapInMapping(2); - testGap(client(), "test", "test", 2); - } - - public void testNegativeIsError() throws IOException { - try { - setupGapInMapping(-1); - fail("Expected an error"); - } catch (MapperParsingException e) { - assertThat(ExceptionsHelper.detailedMessage(e), containsString("positions_increment_gap less than 0 aren't allowed")); - } - } - - /** - * Tests that the default actually defaults to the position_increment_gap - * configured in the analyzer. This behavior is very old and a little - * strange but not worth breaking some thought. - */ - public void testDefaultDefaultsToAnalyzer() throws IOException { - Settings settings = Settings.builder() - .put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_2_3_0) - .put("analysis.analyzer.gappy.type", "custom") - .put("analysis.analyzer.gappy.tokenizer", "standard") - .put("analysis.analyzer.gappy.position_increment_gap", "2") - .build(); - setupAnalyzer(settings, "gappy"); - testGap(client(), "test", "test", 2); - } - - /** - * Build an index named "test" with a field named "string" with the provided - * positionIncrementGap that uses the standard analyzer. - */ - private void setupGapInMapping(int positionIncrementGap) throws IOException { - XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("properties").startObject("string"); - mapping.field("type", "string"); - mapping.field("position_increment_gap", positionIncrementGap); - mapping.endObject().endObject().endObject(); - client().admin().indices().prepareCreate("test") - .setSettings(Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_2_3_0).build()) - .addMapping("test", mapping) - .get(); - } - - /** - * Build an index named "test" with the provided settings and and a field - * named "string" that uses the specified analyzer and default - * position_increment_gap. - */ - private void setupAnalyzer(Settings settings, String analyzer) throws IOException { - XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("properties").startObject("string"); - mapping.field("type", "string"); - mapping.field("analyzer", analyzer); - mapping.endObject().endObject().endObject(); - client().admin().indices().prepareCreate("test") - .addMapping("test", mapping) - .setSettings(settings) - .get(); - } - - private static void testGap(Client client, String indexName, String type, int positionIncrementGap) throws IOException { - client.prepareIndex(indexName, type, "position_gap_test").setSource("string", Arrays.asList("one", "two three")) - .setRefreshPolicy(IMMEDIATE).get(); - - // Baseline - phrase query finds matches in the same field value - assertHitCount(client.prepareSearch(indexName).setQuery(matchPhraseQuery("string", "two three")).get(), 1); - - if (positionIncrementGap > 0) { - // No match across gaps when slop < position gap - assertHitCount( - client.prepareSearch(indexName).setQuery(matchPhraseQuery("string", "one two").slop(positionIncrementGap - 1)).get(), - 0); - } - - // Match across gaps when slop >= position gap - assertHitCount(client.prepareSearch(indexName).setQuery(matchPhraseQuery("string", "one two").slop(positionIncrementGap)).get(), 1); - assertHitCount(client.prepareSearch(indexName).setQuery(matchPhraseQuery("string", "one two").slop(positionIncrementGap + 1)).get(), - 1); - } -} diff --git a/core/src/test/java/org/elasticsearch/index/mapper/StringFieldTypeTests.java b/core/src/test/java/org/elasticsearch/index/mapper/StringFieldTypeTests.java deleted file mode 100644 index 558253c463d94..0000000000000 --- a/core/src/test/java/org/elasticsearch/index/mapper/StringFieldTypeTests.java +++ /dev/null @@ -1,61 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.elasticsearch.index.mapper; - -import org.elasticsearch.index.mapper.MappedFieldType; -import org.elasticsearch.index.mapper.StringFieldMapper; -import org.junit.Before; - -public class StringFieldTypeTests extends FieldTypeTestCase { - @Override - protected MappedFieldType createDefaultFieldType() { - return new StringFieldMapper.StringFieldType(); - } - @Before - public void setupProperties() { - addModifier(new Modifier("fielddata", true) { - @Override - public void modify(MappedFieldType ft) { - StringFieldMapper.StringFieldType tft = (StringFieldMapper.StringFieldType)ft; - tft.setFielddata(tft.fielddata() == false); - } - }); - addModifier(new Modifier("fielddata_frequency_filter.min", true) { - @Override - public void modify(MappedFieldType ft) { - StringFieldMapper.StringFieldType tft = (StringFieldMapper.StringFieldType)ft; - tft.setFielddataMinFrequency(3); - } - }); - addModifier(new Modifier("fielddata_frequency_filter.max", true) { - @Override - public void modify(MappedFieldType ft) { - StringFieldMapper.StringFieldType tft = (StringFieldMapper.StringFieldType)ft; - tft.setFielddataMaxFrequency(0.2); - } - }); - addModifier(new Modifier("fielddata_frequency_filter.min_segment_size", true) { - @Override - public void modify(MappedFieldType ft) { - StringFieldMapper.StringFieldType tft = (StringFieldMapper.StringFieldType)ft; - tft.setFielddataMinSegmentSize(1000); - } - }); - } -} diff --git a/core/src/test/java/org/elasticsearch/index/mapper/StringMappingUpgradeTests.java b/core/src/test/java/org/elasticsearch/index/mapper/StringMappingUpgradeTests.java deleted file mode 100644 index 311bf0205ed40..0000000000000 --- a/core/src/test/java/org/elasticsearch/index/mapper/StringMappingUpgradeTests.java +++ /dev/null @@ -1,446 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.index.mapper; - -import com.carrotsearch.randomizedtesting.generators.RandomPicks; - -import org.apache.lucene.index.IndexOptions; -import org.elasticsearch.Version; -import org.elasticsearch.cluster.metadata.IndexMetaData; -import org.elasticsearch.common.bytes.BytesReference; -import org.elasticsearch.common.compress.CompressedXContent; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.common.xcontent.XContentFactory; -import org.elasticsearch.index.IndexService; -import org.elasticsearch.index.mapper.DocumentMapper; -import org.elasticsearch.index.mapper.DocumentMapperParser; -import org.elasticsearch.index.mapper.FieldMapper; -import org.elasticsearch.index.mapper.KeywordFieldMapper; -import org.elasticsearch.index.mapper.Mapper; -import org.elasticsearch.index.mapper.ParsedDocument; -import org.elasticsearch.index.mapper.StringFieldMapper; -import org.elasticsearch.index.mapper.TextFieldMapper; -import org.elasticsearch.index.mapper.TextFieldMapper.TextFieldType; -import org.elasticsearch.plugins.Plugin; -import org.elasticsearch.test.ESSingleNodeTestCase; -import org.elasticsearch.test.InternalSettingsPlugin; - -import java.io.IOException; -import java.util.Arrays; -import java.util.Collection; -import java.util.Collections; -import java.util.HashMap; -import java.util.Map; - -import static org.hamcrest.Matchers.containsString; -import static org.hamcrest.Matchers.instanceOf; - -public class StringMappingUpgradeTests extends ESSingleNodeTestCase { - - @Override - protected Collection> getPlugins() { - return pluginList(InternalSettingsPlugin.class); - } - - public void testUpgradeDefaults() throws IOException { - IndexService indexService = createIndex("test"); - DocumentMapperParser parser = indexService.mapperService().documentMapperParser(); - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("properties").startObject("field").field("type", "string").endObject().endObject() - .endObject().endObject().string(); - DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); - FieldMapper field = mapper.mappers().getMapper("field"); - assertThat(field, instanceOf(TextFieldMapper.class)); - } - - public void testUpgradeAnalyzedString() throws IOException { - IndexService indexService = createIndex("test"); - DocumentMapperParser parser = indexService.mapperService().documentMapperParser(); - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("properties").startObject("field").field("type", "string").field("index", "analyzed").endObject().endObject() - .endObject().endObject().string(); - DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); - FieldMapper field = mapper.mappers().getMapper("field"); - assertThat(field, instanceOf(TextFieldMapper.class)); - } - - public void testUpgradeNotAnalyzedString() throws IOException { - IndexService indexService = createIndex("test"); - DocumentMapperParser parser = indexService.mapperService().documentMapperParser(); - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("properties").startObject("field").field("type", "string") - .field("index", "not_analyzed").endObject().endObject() - .endObject().endObject().string(); - DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); - FieldMapper field = mapper.mappers().getMapper("field"); - assertThat(field, instanceOf(KeywordFieldMapper.class)); - } - - public void testUpgradeNotIndexedString() throws IOException { - IndexService indexService = createIndex("test"); - DocumentMapperParser parser = indexService.mapperService().documentMapperParser(); - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("properties").startObject("field").field("type", "string").field("index", "no").endObject().endObject() - .endObject().endObject().string(); - DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); - FieldMapper field = mapper.mappers().getMapper("field"); - assertThat(field, instanceOf(KeywordFieldMapper.class)); - assertEquals(IndexOptions.NONE, field.fieldType().indexOptions()); - } - - public void testUpgradeIndexOptions() throws IOException { - IndexService indexService = createIndex("test"); - DocumentMapperParser parser = indexService.mapperService().documentMapperParser(); - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("properties").startObject("field").field("type", "string") - .field("index_options", "offsets").endObject().endObject() - .endObject().endObject().string(); - DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); - FieldMapper field = mapper.mappers().getMapper("field"); - assertThat(field, instanceOf(TextFieldMapper.class)); - assertEquals(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS, field.fieldType().indexOptions()); - } - - public void testUpgradePositionGap() throws IOException { - IndexService indexService = createIndex("test"); - DocumentMapperParser parser = indexService.mapperService().documentMapperParser(); - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("properties").startObject("field").field("type", "string") - .field("position_increment_gap", 42).endObject().endObject() - .endObject().endObject().string(); - DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); - FieldMapper field = mapper.mappers().getMapper("field"); - assertThat(field, instanceOf(TextFieldMapper.class)); - assertEquals(42, field.fieldType().indexAnalyzer().getPositionIncrementGap("field")); - } - - public void testIllegalIndexValue() throws IOException { - IndexService indexService = createIndex("test"); - DocumentMapperParser parser = indexService.mapperService().documentMapperParser(); - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("properties") - .startObject("field") - .field("type", "string") - .field("index", false) - .endObject() - .endObject() .endObject().endObject().string(); - IllegalArgumentException e = expectThrows(IllegalArgumentException.class, - () -> parser.parse("type", new CompressedXContent(mapping))); - assertThat(e.getMessage(), - containsString("Can't parse [index] value [false] for field [field], expected [no], [not_analyzed] or [analyzed]")); - } - - public void testNotSupportedUpgrade() throws IOException { - IndexService indexService = createIndex("test"); - DocumentMapperParser parser = indexService.mapperService().documentMapperParser(); - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("properties").startObject("field").field("type", "string") - .field("index", "not_analyzed").field("analyzer", "keyword").endObject().endObject() - .endObject().endObject().string(); - IllegalArgumentException e = expectThrows(IllegalArgumentException.class, - () -> parser.parse("type", new CompressedXContent(mapping))); - assertThat(e.getMessage(), containsString("The [string] type is removed in 5.0")); - } - - public void testUpgradeFielddataSettings() throws IOException { - IndexService indexService = createIndex("test"); - DocumentMapperParser parser = indexService.mapperService().documentMapperParser(); - String format = randomFrom("paged_bytes", "disabled"); - String loading = randomFrom("lazy", "eager", "eager_global_ordinals"); - boolean keyword = random().nextBoolean(); - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("properties") - .startObject("field") - .field("type", "string") - .field("index", keyword ? "not_analyzed" : "analyzed") - .startObject("fielddata") - .field("format", format) - .field("loading", loading) - .startObject("filter") - .startObject("frequency") - .field("min", 3) - .endObject() - .endObject() - .endObject() - .endObject() - .endObject() - .endObject().endObject().string(); - DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); - FieldMapper field = mapper.mappers().getMapper("field"); - if (keyword) { - assertThat(field, instanceOf(KeywordFieldMapper.class)); - } else { - assertThat(field, instanceOf(TextFieldMapper.class)); - TextFieldType fieldType = (TextFieldType) field.fieldType(); - assertEquals("disabled".equals(format) == false, fieldType.fielddata()); - assertEquals(3, fieldType.fielddataMinFrequency(), 0d); - assertEquals(Integer.MAX_VALUE, fieldType.fielddataMaxFrequency(), 0d); - } - assertEquals("eager_global_ordinals".equals(loading), field.fieldType().eagerGlobalOrdinals()); - } - - public void testUpgradeIgnoreAbove() throws IOException { - IndexService indexService = createIndex("test"); - DocumentMapperParser parser = indexService.mapperService().documentMapperParser(); - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("properties").startObject("field").field("type", "string") - .field("index", "not_analyzed").field("ignore_above", 200).endObject().endObject() - .endObject().endObject().string(); - DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); - FieldMapper field = mapper.mappers().getMapper("field"); - assertThat(field, instanceOf(KeywordFieldMapper.class)); - assertEquals(200, ((KeywordFieldMapper) field).ignoreAbove()); - } - - public void testUpgradeAnalyzer() throws IOException { - IndexService indexService = createIndex("test"); - DocumentMapperParser parser = indexService.mapperService().documentMapperParser(); - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("properties").startObject("field").field("type", "string") - .field("analyzer", "standard") - .field("search_analyzer", "whitespace") - .field("search_quote_analyzer", "keyword").endObject().endObject() - .endObject().endObject().string(); - DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); - FieldMapper field = mapper.mappers().getMapper("field"); - assertThat(field, instanceOf(TextFieldMapper.class)); - assertEquals("standard", field.fieldType().indexAnalyzer().name()); - assertEquals("whitespace", field.fieldType().searchAnalyzer().name()); - assertEquals("keyword", field.fieldType().searchQuoteAnalyzer().name()); - } - - public void testUpgradeTextIncludeInAll() throws IOException { - IndexService indexService = createIndex("test"); - DocumentMapperParser parser = indexService.mapperService().documentMapperParser(); - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("properties").startObject("field").field("type", "string") - .field("include_in_all", false).endObject().endObject() - .endObject().endObject().string(); - DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); - FieldMapper field = mapper.mappers().getMapper("field"); - assertThat(field, instanceOf(TextFieldMapper.class)); - assertFalse(((TextFieldMapper) field).includeInAll()); - } - - public void testUpgradeKeywordIncludeInAll() throws IOException { - IndexService indexService = createIndex("test"); - DocumentMapperParser parser = indexService.mapperService().documentMapperParser(); - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("properties").startObject("field").field("type", "string") - .field("index", "not_analyzed").field("include_in_all", true).endObject().endObject() - .endObject().endObject().string(); - DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); - FieldMapper field = mapper.mappers().getMapper("field"); - assertThat(field, instanceOf(KeywordFieldMapper.class)); - assertTrue(((KeywordFieldMapper) field).includeInAll()); - } - - public void testUpgradeRandomMapping() throws IOException { - final int iters = 20; - for (int i = 0; i < iters; ++i) { - doTestUpgradeRandomMapping(i); - } - } - - private void doTestUpgradeRandomMapping(int iter) throws IOException { - IndexService indexService; - boolean oldIndex = randomBoolean(); - String indexName = "test" + iter; - if (oldIndex) { - Settings settings = Settings.builder() - .put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_2_3_0) - .build(); - indexService = createIndex(indexName, settings); - } else { - indexService = createIndex(indexName); - } - DocumentMapperParser parser = indexService.mapperService().documentMapperParser(); - XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("properties").startObject("field").field("type", "string"); - boolean keyword = randomBoolean(); - boolean hasNorms = keyword == false; - boolean shouldUpgrade = true; - if (keyword) { - mapping.field("index", randomBoolean() ? "not_analyzed" : "no"); - } else if (randomBoolean()) { - mapping.field("index", "analyzed"); - } - if (randomBoolean()) { - mapping.field("store", RandomPicks.randomFrom(random(), Arrays.asList("yes", "no", true, false))); - } - if (keyword && randomBoolean()) { - mapping.field("doc_values", randomBoolean()); - } - if (keyword == false && randomBoolean()) { - mapping.field("analyzer", "keyword"); - } - if (randomBoolean()) { - hasNorms = randomBoolean(); - if (randomBoolean()) { - mapping.field("omit_norms", hasNorms == false); - } else { - mapping.field("norms", Collections.singletonMap("enabled", hasNorms)); - } - } - if (randomBoolean()) { - Map fielddata = new HashMap<>(); - if (randomBoolean()) { - fielddata.put("format", randomFrom("paged_bytes", "disabled")); - } - if (randomBoolean()) { - fielddata.put("loading", randomFrom("lazy", "eager", "eager_global_ordinals")); - } - if (randomBoolean()) { - Map frequencyFilter = new HashMap<>(); - frequencyFilter.put("min", 10); - frequencyFilter.put("max", 1000); - frequencyFilter.put("min_segment_size", 10000); - } - } - if (randomBoolean()) { - mapping.startObject("fields").startObject("raw").field("type", "keyword").endObject().endObject(); - } - if (randomBoolean()) { - mapping.field("copy_to", "bar"); - } - if (randomBoolean()) { - // this option is not upgraded automatically - if (keyword) { - mapping.field("index_options", "docs"); - } else { - mapping.field("ignore_above", 30); - } - shouldUpgrade = false; - } - mapping.endObject().endObject().endObject().endObject(); - - if (oldIndex == false && shouldUpgrade == false) { - IllegalArgumentException e = expectThrows(IllegalArgumentException.class, - () -> parser.parse("type", new CompressedXContent(mapping.string()))); - assertThat(e.getMessage(), containsString("The [string] type is removed in 5.0")); - } else { - DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping.string())); - FieldMapper field = mapper.mappers().getMapper("field"); - if (oldIndex) { - assertThat(field, instanceOf(StringFieldMapper.class)); - } else if (keyword) { - assertThat(field, instanceOf(KeywordFieldMapper.class)); - } else { - assertThat(field, instanceOf(TextFieldMapper.class)); - } - if (field.fieldType().indexOptions() != IndexOptions.NONE) { - assertEquals(hasNorms, field.fieldType().omitNorms() == false); - } - } - } - - public void testUpgradeTemplateWithDynamicType() throws IOException { - IndexService indexService = createIndex("test"); - DocumentMapperParser parser = indexService.mapperService().documentMapperParser(); - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startArray("dynamic_templates") - .startObject() - .startObject("my_template") - .field("match_mapping_type", "string") - .startObject("mapping") - .field("store", true) - .endObject() - .endObject() - .endObject() - .endArray() - .endObject().endObject().string(); - DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); - BytesReference source = XContentFactory.jsonBuilder().startObject().field("foo", "bar").endObject().bytes(); - ParsedDocument doc = mapper.parse("test", "type", "id", source); - Mapper fooMapper = doc.dynamicMappingsUpdate().root().getMapper("foo"); - assertThat(fooMapper, instanceOf(TextFieldMapper.class)); - assertTrue(((TextFieldMapper) fooMapper).fieldType().stored()); - } - - public void testUpgradeTemplateWithDynamicType2() throws IOException { - IndexService indexService = createIndex("test"); - DocumentMapperParser parser = indexService.mapperService().documentMapperParser(); - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startArray("dynamic_templates") - .startObject() - .startObject("my_template") - .field("match_mapping_type", "string") - .startObject("mapping") - .field("type", "{dynamic_type}") - .field("store", true) - .endObject() - .endObject() - .endObject() - .endArray() - .endObject().endObject().string(); - DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); - BytesReference source = XContentFactory.jsonBuilder().startObject().field("foo", "bar").endObject().bytes(); - ParsedDocument doc = mapper.parse("test", "type", "id", source); - Mapper fooMapper = doc.dynamicMappingsUpdate().root().getMapper("foo"); - assertThat(fooMapper, instanceOf(TextFieldMapper.class)); - assertTrue(((TextFieldMapper) fooMapper).fieldType().stored()); - } - - public void testUpgradeTemplateWithDynamicTypeKeyword() throws IOException { - IndexService indexService = createIndex("test"); - DocumentMapperParser parser = indexService.mapperService().documentMapperParser(); - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startArray("dynamic_templates") - .startObject() - .startObject("my_template") - .field("match_mapping_type", "string") - .startObject("mapping") - .field("index", "not_analyzed") - .endObject() - .endObject() - .endObject() - .endArray() - .endObject().endObject().string(); - DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); - BytesReference source = XContentFactory.jsonBuilder().startObject().field("foo", "bar").endObject().bytes(); - ParsedDocument doc = mapper.parse("test", "type", "id", source); - Mapper fooMapper = doc.dynamicMappingsUpdate().root().getMapper("foo"); - assertThat(fooMapper, instanceOf(KeywordFieldMapper.class)); - } - - public void testUpgradeTemplateWithDynamicTypeKeyword2() throws IOException { - IndexService indexService = createIndex("test"); - DocumentMapperParser parser = indexService.mapperService().documentMapperParser(); - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startArray("dynamic_templates") - .startObject() - .startObject("my_template") - .field("match_mapping_type", "string") - .startObject("mapping") - .field("type", "{dynamic_type}") - .field("index", "not_analyzed") - .endObject() - .endObject() - .endObject() - .endArray() - .endObject().endObject().string(); - DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); - BytesReference source = XContentFactory.jsonBuilder().startObject().field("foo", "bar").endObject().bytes(); - ParsedDocument doc = mapper.parse("test", "type", "id", source); - Mapper fooMapper = doc.dynamicMappingsUpdate().root().getMapper("foo"); - assertThat(fooMapper, instanceOf(KeywordFieldMapper.class)); - } -} diff --git a/core/src/test/java/org/elasticsearch/index/mapper/TTLFieldMapperTests.java b/core/src/test/java/org/elasticsearch/index/mapper/TTLFieldMapperTests.java deleted file mode 100644 index bf51be3c2d438..0000000000000 --- a/core/src/test/java/org/elasticsearch/index/mapper/TTLFieldMapperTests.java +++ /dev/null @@ -1,322 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.index.mapper; - -import org.apache.lucene.index.IndexOptions; -import org.elasticsearch.Version; -import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsResponse; -import org.elasticsearch.cluster.metadata.IndexMetaData; -import org.elasticsearch.common.bytes.BytesReference; -import org.elasticsearch.common.compress.CompressedXContent; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.common.xcontent.XContentFactory; -import org.elasticsearch.common.xcontent.json.JsonXContent; -import org.elasticsearch.index.IndexService; -import org.elasticsearch.index.mapper.DocumentMapper; -import org.elasticsearch.index.mapper.MapperParsingException; -import org.elasticsearch.index.mapper.MapperService; -import org.elasticsearch.index.mapper.MapperService.MergeReason; -import org.elasticsearch.index.mapper.ParsedDocument; -import org.elasticsearch.index.mapper.SourceToParse; -import org.elasticsearch.index.mapper.TTLFieldMapper; -import org.elasticsearch.plugins.Plugin; -import org.elasticsearch.test.ESSingleNodeTestCase; -import org.elasticsearch.test.InternalSettingsPlugin; - -import java.io.IOException; -import java.util.Collection; - -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; -import static org.hamcrest.Matchers.containsString; -import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.notNullValue; -import static org.hamcrest.Matchers.startsWith; - -public class TTLFieldMapperTests extends ESSingleNodeTestCase { - - private static final Settings BW_SETTINGS = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_2_3_0).build(); - - @Override - protected Collection> getPlugins() { - return pluginList(InternalSettingsPlugin.class); - } - - public void testRejectedOn5x() throws IOException { - String mapping = XContentFactory.jsonBuilder().startObject() - .startObject("type") - .startObject("_ttl") - .field("enabled", true) - .endObject() - .endObject().endObject().string(); - IndexService index = createIndex("test"); - IllegalArgumentException expected = expectThrows(IllegalArgumentException.class, - () -> index.mapperService().merge("type", new CompressedXContent(mapping), MergeReason.MAPPING_UPDATE, false)); - assertThat(expected.getMessage(), startsWith("[_ttl] is removed")); - } - - public void testSimpleDisabled() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type").endObject().endObject().string(); - DocumentMapper docMapper = createIndex("test", BW_SETTINGS).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); - BytesReference source = XContentFactory.jsonBuilder() - .startObject() - .field("field", "value") - .endObject() - .bytes(); - ParsedDocument doc = docMapper.parse(SourceToParse.source("test", "type", "1", source).ttl(Long.MAX_VALUE)); - - assertThat(doc.rootDoc().getField("_ttl"), equalTo(null)); - } - - public void testEnabled() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("_ttl").field("enabled", "yes").endObject() - .endObject().endObject().string(); - DocumentMapper docMapper = createIndex("test", BW_SETTINGS).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); - BytesReference source = XContentFactory.jsonBuilder() - .startObject() - .field("field", "value") - .endObject() - .bytes(); - ParsedDocument doc = docMapper.parse(SourceToParse.source("test", "type", "1", source).ttl(Long.MAX_VALUE)); - - assertThat(doc.rootDoc().getField("_ttl").fieldType().stored(), equalTo(true)); - assertNotSame(IndexOptions.NONE, doc.rootDoc().getField("_ttl").fieldType().indexOptions()); - assertThat(doc.rootDoc().getField("_ttl").tokenStream(docMapper.mappers().indexAnalyzer(), null), notNullValue()); - } - - public void testDefaultValues() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type").endObject().endObject().string(); - DocumentMapper docMapper = createIndex("test", BW_SETTINGS).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); - assertThat(docMapper.TTLFieldMapper().enabled(), equalTo(TTLFieldMapper.Defaults.ENABLED_STATE.enabled)); - assertThat(docMapper.TTLFieldMapper().fieldType().stored(), equalTo(TTLFieldMapper.Defaults.TTL_FIELD_TYPE.stored())); - assertThat(docMapper.TTLFieldMapper().fieldType().indexOptions(), equalTo(TTLFieldMapper.Defaults.TTL_FIELD_TYPE.indexOptions())); - } - - public void testThatEnablingTTLFieldOnMergeWorks() throws Exception { - String mappingWithoutTtl = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("properties").field("field").startObject().field("type", "text").endObject().endObject() - .endObject().endObject().string(); - - String mappingWithTtl = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("_ttl") - .field("enabled", "yes") - .endObject() - .startObject("properties").field("field").startObject().field("type", "text").endObject().endObject() - .endObject().endObject().string(); - - MapperService mapperService = createIndex("test", BW_SETTINGS).mapperService(); - DocumentMapper mapperWithoutTtl = mapperService.merge("type", new CompressedXContent(mappingWithoutTtl), MapperService.MergeReason.MAPPING_UPDATE, false); - DocumentMapper mapperWithTtl = mapperService.merge("type", new CompressedXContent(mappingWithTtl), MapperService.MergeReason.MAPPING_UPDATE, false); - - assertThat(mapperWithoutTtl.TTLFieldMapper().enabled(), equalTo(false)); - assertThat(mapperWithTtl.TTLFieldMapper().enabled(), equalTo(true)); - } - - public void testThatChangingTTLKeepsMapperEnabled() throws Exception { - String mappingWithTtl = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("_ttl") - .field("enabled", "yes") - .endObject() - .startObject("properties").field("field").startObject().field("type", "text").endObject().endObject() - .endObject().endObject().string(); - - String updatedMapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("_ttl") - .field("default", "7d") - .endObject() - .startObject("properties").field("field").startObject().field("type", "text").endObject().endObject() - .endObject().endObject().string(); - - MapperService mapperService = createIndex("test", BW_SETTINGS).mapperService(); - DocumentMapper initialMapper = mapperService.merge("type", new CompressedXContent(mappingWithTtl), MapperService.MergeReason.MAPPING_UPDATE, false); - DocumentMapper updatedMapper = mapperService.merge("type", new CompressedXContent(updatedMapping), MapperService.MergeReason.MAPPING_UPDATE, false); - - assertThat(initialMapper.TTLFieldMapper().enabled(), equalTo(true)); - assertThat(updatedMapper.TTLFieldMapper().enabled(), equalTo(true)); - } - - public void testThatDisablingTTLReportsConflict() throws Exception { - String mappingWithTtl = getMappingWithTtlEnabled().string(); - String mappingWithTtlDisabled = getMappingWithTtlDisabled().string(); - MapperService mapperService = createIndex("test", BW_SETTINGS).mapperService(); - DocumentMapper initialMapper = mapperService.merge("type", new CompressedXContent(mappingWithTtl), MapperService.MergeReason.MAPPING_UPDATE, false); - - try { - mapperService.merge("type", new CompressedXContent(mappingWithTtlDisabled), MapperService.MergeReason.MAPPING_UPDATE, false); - fail(); - } catch (IllegalArgumentException e) { - // expected - } - - assertThat(initialMapper.TTLFieldMapper().enabled(), equalTo(true)); - } - - public void testThatDisablingTTLReportsConflictOnCluster() throws Exception { - String mappingWithTtl = getMappingWithTtlEnabled().string(); - String mappingWithTtlDisabled = getMappingWithTtlDisabled().string(); - assertAcked(client().admin().indices().prepareCreate("testindex").setSettings(BW_SETTINGS).addMapping("type", mappingWithTtl)); - GetMappingsResponse mappingsBeforeUpdateResponse = client().admin().indices().prepareGetMappings("testindex").addTypes("type").get(); - try { - client().admin().indices().preparePutMapping("testindex").setSource(mappingWithTtlDisabled).setType("type").get(); - fail(); - } catch (IllegalArgumentException e) { - assertThat(e.getMessage(), containsString("_ttl cannot be disabled once it was enabled.")); - } - GetMappingsResponse mappingsAfterUpdateResponse = client().admin().indices().prepareGetMappings("testindex").addTypes("type").get(); - assertThat(mappingsBeforeUpdateResponse.getMappings().get("testindex").get("type").source(), equalTo(mappingsAfterUpdateResponse.getMappings().get("testindex").get("type").source())); - } - - public void testThatEnablingTTLAfterFirstDisablingWorks() throws Exception { - String mappingWithTtl = getMappingWithTtlEnabled().string(); - String withTtlDisabled = getMappingWithTtlDisabled().string(); - assertAcked(client().admin().indices().prepareCreate("testindex").setSettings(BW_SETTINGS).addMapping("type", withTtlDisabled)); - GetMappingsResponse mappingsAfterUpdateResponse = client().admin().indices().prepareGetMappings("testindex").addTypes("type").get(); - assertThat(mappingsAfterUpdateResponse.getMappings().get("testindex").get("type").sourceAsMap().get("_ttl").toString(), equalTo("{enabled=false}")); - client().admin().indices().preparePutMapping("testindex").setSource(mappingWithTtl).setType("type").get(); - mappingsAfterUpdateResponse = client().admin().indices().prepareGetMappings("testindex").addTypes("type").get(); - assertThat(mappingsAfterUpdateResponse.getMappings().get("testindex").get("type").sourceAsMap().get("_ttl").toString(), equalTo("{enabled=true}")); - } - - public void testNoConflictIfNothingSetAndDisabledLater() throws Exception { - IndexService indexService = createIndex("testindex", BW_SETTINGS, "type"); - XContentBuilder mappingWithTtlDisabled = getMappingWithTtlDisabled("7d"); - indexService.mapperService().merge("type", new CompressedXContent(mappingWithTtlDisabled.string()), MapperService.MergeReason.MAPPING_UPDATE, false); - } - - public void testNoConflictIfNothingSetAndEnabledLater() throws Exception { - IndexService indexService = createIndex("testindex", BW_SETTINGS, "type"); - XContentBuilder mappingWithTtlEnabled = getMappingWithTtlEnabled("7d"); - indexService.mapperService().merge("type", new CompressedXContent(mappingWithTtlEnabled.string()), MapperService.MergeReason.MAPPING_UPDATE, false); - } - - public void testMergeWithOnlyDefaultSet() throws Exception { - XContentBuilder mappingWithTtlEnabled = getMappingWithTtlEnabled("7d"); - IndexService indexService = createIndex("testindex", BW_SETTINGS, "type", mappingWithTtlEnabled); - XContentBuilder mappingWithOnlyDefaultSet = getMappingWithOnlyTtlDefaultSet("6m"); - indexService.mapperService().merge("type", new CompressedXContent(mappingWithOnlyDefaultSet.string()), - MapperService.MergeReason.MAPPING_UPDATE, false); - CompressedXContent mappingAfterMerge = indexService.mapperService().documentMapper("type").mappingSource(); - assertEquals(JsonXContent.contentBuilder().startObject() - .startObject("type") - .startObject("_ttl") - .field("enabled", true) - .field("default", 360000) - .endObject() - .startObject("properties") - .startObject("field") - .field("type", "string") - .field("fielddata", false) - .endObject() - .endObject() - .endObject().endObject().string(), - mappingAfterMerge.string()); - } - - public void testMergeWithOnlyDefaultSetTtlDisabled() throws Exception { - XContentBuilder mappingWithTtlEnabled = getMappingWithTtlDisabled("7d"); - IndexService indexService = createIndex("testindex", BW_SETTINGS, "type", mappingWithTtlEnabled); - CompressedXContent mappingAfterCreation = indexService.mapperService().documentMapper("type").mappingSource(); - assertEquals(JsonXContent.contentBuilder().startObject() - .startObject("type") - .startObject("_ttl") - .field("enabled", false) - .endObject() - .startObject("properties") - .startObject("field") - .field("type", "string") - .field("fielddata", false) - .endObject() - .endObject() - .endObject().endObject().string(), - mappingAfterCreation.string()); - XContentBuilder mappingWithOnlyDefaultSet = getMappingWithOnlyTtlDefaultSet("6m"); - indexService.mapperService().merge("type", new CompressedXContent(mappingWithOnlyDefaultSet.string()), - MapperService.MergeReason.MAPPING_UPDATE, false); - CompressedXContent mappingAfterMerge = indexService.mapperService().documentMapper("type").mappingSource(); - assertEquals(JsonXContent.contentBuilder().startObject() - .startObject("type") - .startObject("_ttl") - .field("enabled", false) - .endObject() - .startObject("properties") - .startObject("field") - .field("type", "string") - .field("fielddata", false) - .endObject() - .endObject() - .endObject().endObject().string(), - mappingAfterMerge.string()); - } - - public void testIncludeInObjectNotAllowed() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("_ttl").field("enabled", true).endObject() - .endObject().endObject().string(); - DocumentMapper docMapper = createIndex("test", BW_SETTINGS).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); - - try { - docMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() - .startObject().field("_ttl", "2d").endObject().bytes()); - fail("Expected failure to parse metadata field"); - } catch (MapperParsingException e) { - assertTrue(e.getMessage(), e.getMessage().contains("Field [_ttl] is a metadata field and cannot be added inside a document")); - } - } - - private org.elasticsearch.common.xcontent.XContentBuilder getMappingWithTtlEnabled() throws IOException { - return getMappingWithTtlEnabled(null); - } - - private org.elasticsearch.common.xcontent.XContentBuilder getMappingWithTtlDisabled() throws IOException { - return getMappingWithTtlDisabled(null); - } - - private org.elasticsearch.common.xcontent.XContentBuilder getMappingWithTtlEnabled(String defaultValue) throws IOException { - XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("_ttl") - .field("enabled", true); - if (defaultValue != null) { - mapping.field("default", defaultValue); - } - return mapping.endObject() - .startObject("properties").field("field").startObject().field("type", "text").endObject().endObject() - .endObject().endObject(); - } - - private org.elasticsearch.common.xcontent.XContentBuilder getMappingWithTtlDisabled(String defaultValue) throws IOException { - XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("_ttl") - .field("enabled", false); - if (defaultValue != null) { - mapping.field("default", defaultValue); - } - return mapping.endObject() - .startObject("properties").field("field").startObject().field("type", "text").endObject().endObject() - .endObject().endObject(); - } - - private org.elasticsearch.common.xcontent.XContentBuilder getMappingWithOnlyTtlDefaultSet(String defaultValue) throws IOException { - return XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("_ttl").field("default", defaultValue).endObject() - .startObject("properties").field("field").startObject().field("type", "text").endObject().endObject() - .endObject().endObject(); - } -} diff --git a/core/src/test/java/org/elasticsearch/index/mapper/TextFieldMapperTests.java b/core/src/test/java/org/elasticsearch/index/mapper/TextFieldMapperTests.java index 846d2c5666984..d62bef165db6e 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/TextFieldMapperTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/TextFieldMapperTests.java @@ -573,23 +573,5 @@ public void testEmptyName() throws IOException { () -> parser.parse("type", new CompressedXContent(mapping)) ); assertThat(e.getMessage(), containsString("name cannot be empty string")); - - // empty name allowed in index created before 5.0 - Version oldVersion = VersionUtils.randomVersionBetween(getRandom(), Version.V_2_0_0, Version.V_2_3_5); - Settings oldIndexSettings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, oldVersion).build(); - indexService = createIndex("test_old", oldIndexSettings); - parser = indexService.mapperService().documentMapperParser(); - - DocumentMapper defaultMapper = parser.parse("type", new CompressedXContent(mapping)); - String downgradedMapping = XContentFactory.jsonBuilder().startObject() - .startObject("type") - .startObject("properties") - .startObject("") - .field("type", "string") - .field("fielddata", false) - .endObject() - .endObject() - .endObject().endObject().string(); - assertEquals(downgradedMapping, defaultMapper.mappingSource().string()); } } diff --git a/core/src/test/java/org/elasticsearch/index/mapper/TimestampFieldMapperTests.java b/core/src/test/java/org/elasticsearch/index/mapper/TimestampFieldMapperTests.java deleted file mode 100644 index 6b156fa36e15c..0000000000000 --- a/core/src/test/java/org/elasticsearch/index/mapper/TimestampFieldMapperTests.java +++ /dev/null @@ -1,459 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.index.mapper; - -import org.apache.lucene.index.IndexOptions; -import org.elasticsearch.Version; -import org.elasticsearch.action.TimestampParsingException; -import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsResponse; -import org.elasticsearch.action.index.IndexRequest; -import org.elasticsearch.cluster.metadata.IndexMetaData; -import org.elasticsearch.cluster.metadata.MappingMetaData; -import org.elasticsearch.cluster.metadata.MetaData; -import org.elasticsearch.common.bytes.BytesReference; -import org.elasticsearch.common.compress.CompressedXContent; -import org.elasticsearch.common.io.stream.BytesStreamOutput; -import org.elasticsearch.common.joda.Joda; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.common.xcontent.XContentFactory; -import org.elasticsearch.index.IndexService; -import org.elasticsearch.index.mapper.MapperService.MergeReason; -import org.elasticsearch.plugins.Plugin; -import org.elasticsearch.test.ESSingleNodeTestCase; -import org.elasticsearch.test.InternalSettingsPlugin; -import org.elasticsearch.test.VersionUtils; - -import java.io.IOException; -import java.nio.charset.StandardCharsets; -import java.util.Arrays; -import java.util.Collection; -import java.util.LinkedHashMap; - -import static org.elasticsearch.test.StreamsUtils.copyToStringFromClasspath; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; -import static org.hamcrest.Matchers.containsString; -import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.instanceOf; -import static org.hamcrest.Matchers.is; -import static org.hamcrest.Matchers.lessThanOrEqualTo; -import static org.hamcrest.Matchers.notNullValue; -import static org.hamcrest.Matchers.startsWith; - -public class TimestampFieldMapperTests extends ESSingleNodeTestCase { - - private static final Settings BW_SETTINGS = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_2_3_0).build(); - - @Override - protected Collection> getPlugins() { - return pluginList(InternalSettingsPlugin.class); - } - - public void testRejectedOn5x() throws IOException { - String mapping = XContentFactory.jsonBuilder().startObject() - .startObject("type") - .startObject("_timestamp") - .field("enabled", true) - .endObject() - .endObject().endObject().string(); - IndexService index = createIndex("test"); - IllegalArgumentException expected = expectThrows(IllegalArgumentException.class, - () -> index.mapperService().merge("type", new CompressedXContent(mapping), MergeReason.MAPPING_UPDATE, false)); - assertThat(expected.getMessage(), startsWith("[_timestamp] is removed")); - } - - public void testSimpleDisabled() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type").endObject().endObject().string(); - DocumentMapper docMapper = createIndex("test", BW_SETTINGS).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); - BytesReference source = XContentFactory.jsonBuilder() - .startObject() - .field("field", "value") - .endObject() - .bytes(); - ParsedDocument doc = docMapper.parse(SourceToParse.source("test", "type", "1", source).timestamp(1)); - - assertThat(doc.rootDoc().getField("_timestamp"), equalTo(null)); - } - - public void testEnabled() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("_timestamp").field("enabled", "yes").endObject() - .endObject().endObject().string(); - DocumentMapper docMapper = createIndex("test", BW_SETTINGS).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); - BytesReference source = XContentFactory.jsonBuilder() - .startObject() - .field("field", "value") - .endObject() - .bytes(); - ParsedDocument doc = docMapper.parse(SourceToParse.source("test", "type", "1", source).timestamp(1)); - - assertThat(doc.rootDoc().getField("_timestamp").fieldType().stored(), equalTo(true)); - assertNotSame(IndexOptions.NONE, doc.rootDoc().getField("_timestamp").fieldType().indexOptions()); - assertThat(doc.rootDoc().getField("_timestamp").tokenStream(docMapper.mappers().indexAnalyzer(), null), notNullValue()); - } - - public void testDefaultValues() throws Exception { - Version version = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0_beta1, Version.V_5_0_0_alpha3); - for (String mapping : Arrays.asList( - XContentFactory.jsonBuilder().startObject().startObject("type").endObject().endObject().string(), - XContentFactory.jsonBuilder().startObject().startObject("type").startObject("_timestamp").endObject().endObject().endObject().string())) { - DocumentMapper docMapper = createIndex("test", Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build()).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); - assertThat(docMapper.timestampFieldMapper().enabled(), equalTo(TimestampFieldMapper.Defaults.ENABLED.enabled)); - assertThat(docMapper.timestampFieldMapper().fieldType().stored(), equalTo(version.onOrAfter(Version.V_2_0_0_beta1))); - assertThat(docMapper.timestampFieldMapper().fieldType().indexOptions(), equalTo(TimestampFieldMapper.Defaults.FIELD_TYPE.indexOptions())); - assertThat(docMapper.timestampFieldMapper().fieldType().hasDocValues(), equalTo(version.onOrAfter(Version.V_2_0_0_beta1))); - assertThat(docMapper.timestampFieldMapper().fieldType().dateTimeFormatter().format(), equalTo(TimestampFieldMapper.DEFAULT_DATE_TIME_FORMAT)); - assertAcked(client().admin().indices().prepareDelete("test").execute().get()); - } - } - - public void testThatDisablingDuringMergeIsWorking() throws Exception { - String enabledMapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("_timestamp").field("enabled", true).endObject() - .endObject().endObject().string(); - MapperService mapperService = createIndex("test", BW_SETTINGS).mapperService(); - DocumentMapper enabledMapper = mapperService.merge("type", new CompressedXContent(enabledMapping), MapperService.MergeReason.MAPPING_UPDATE, false); - - String disabledMapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("_timestamp").field("enabled", false).endObject() - .endObject().endObject().string(); - DocumentMapper disabledMapper = mapperService.merge("type", new CompressedXContent(disabledMapping), MapperService.MergeReason.MAPPING_UPDATE, false); - - assertThat(enabledMapper.timestampFieldMapper().enabled(), is(true)); - assertThat(disabledMapper.timestampFieldMapper().enabled(), is(false)); - } - - // Issue 4718: was throwing a TimestampParsingException: failed to parse timestamp [null] - public void testTimestampMissingDefaultToEpochValue() throws Exception { - XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("_timestamp") - .field("enabled", "yes") - .field("default", "1970-01-01") - .field("format", "YYYY-MM-dd") - .endObject() - .endObject().endObject(); - XContentBuilder doc = XContentFactory.jsonBuilder() - .startObject() - .field("foo", "bar") - .endObject(); - - DocumentMapper docMapper = createIndex("test", BW_SETTINGS).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping.string())); - MetaData metaData = client().admin().cluster().prepareState().get().getState().getMetaData(); - - MappingMetaData mappingMetaData = new MappingMetaData(docMapper); - - IndexRequest request = new IndexRequest("test", "type", "1").source(doc); - request.process(mappingMetaData, true, "test"); - assertThat(request.timestamp(), notNullValue()); - assertThat(request.timestamp(), is(MappingMetaData.Timestamp.parseStringTimestamp("1970-01-01", Joda.forPattern("YYYY-MM-dd")))); - } - - // Issue 4718: was throwing a TimestampParsingException: failed to parse timestamp [null] - public void testTimestampMissingNowDefaultValue() throws Exception { - XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("_timestamp") - .field("enabled", "yes") - .field("default", "now") - .field("format", "YYYY-MM-dd") - .endObject() - .endObject().endObject(); - XContentBuilder doc = XContentFactory.jsonBuilder() - .startObject() - .field("foo", "bar") - .endObject(); - - MetaData metaData = MetaData.builder().build(); - DocumentMapper docMapper = createIndex("test", BW_SETTINGS).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping.string())); - - MappingMetaData mappingMetaData = new MappingMetaData(docMapper); - - IndexRequest request = new IndexRequest("test", "type", "1").source(doc); - request.process(mappingMetaData, true, "test"); - assertThat(request.timestamp(), notNullValue()); - - // We should have less than one minute (probably some ms) - long delay = System.currentTimeMillis() - Long.parseLong(request.timestamp()); - assertThat(delay, lessThanOrEqualTo(60000L)); - } - - // Issue 4718: was throwing a TimestampParsingException: failed to parse timestamp [null] - public void testPathMissingWithForcedNullDefaultShouldFail() throws Exception { - XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("_timestamp") - .field("enabled", "yes") - .field("path", "timestamp") - .field("default", (String) null) - .endObject() - .endObject().endObject(); - TimestampParsingException e = expectThrows(TimestampParsingException.class, () -> createIndex("test", BW_SETTINGS).mapperService() - .documentMapperParser().parse("type", new CompressedXContent(mapping.string()))); - assertThat(e.getDetailedMessage(), containsString("default timestamp can not be set to null")); - } - - // Issue 4718: was throwing a TimestampParsingException: failed to parse timestamp [null] - public void testTimestampMissingWithForcedNullDefaultShouldFail() throws Exception { - XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("_timestamp") - .field("enabled", "yes") - .field("default", (String) null) - .endObject() - .endObject().endObject(); - - TimestampParsingException e = expectThrows(TimestampParsingException.class, () -> createIndex("test", BW_SETTINGS).mapperService() - .documentMapperParser().parse("type", new CompressedXContent(mapping.string()))); - assertThat(e.getDetailedMessage(), containsString("default timestamp can not be set to null")); - } - - // Issue 4718: was throwing a TimestampParsingException: failed to parse timestamp [null] - public void testTimestampDefaultAndIgnore() throws Exception { - XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("_timestamp") - .field("enabled", "yes") - .field("default", "1971-12-26") - .field("ignore_missing", false) - .endObject() - .endObject().endObject(); - - TimestampParsingException e = expectThrows(TimestampParsingException.class, () -> createIndex("test", BW_SETTINGS).mapperService() - .documentMapperParser().parse("type", new CompressedXContent(mapping.string()))); - assertThat(e.getDetailedMessage(), containsString("default timestamp can not be set with ignore_missing set to false")); - } - - // Issue 4718: was throwing a TimestampParsingException: failed to parse timestamp [null] - public void testTimestampMissingShouldNotFail() throws Exception { - XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("_timestamp") - .field("enabled", "yes") - .endObject() - .endObject().endObject(); - XContentBuilder doc = XContentFactory.jsonBuilder() - .startObject() - .field("foo", "bar") - .endObject(); - - MetaData metaData = MetaData.builder().build(); - DocumentMapper docMapper = createIndex("test", BW_SETTINGS).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping.string())); - - MappingMetaData mappingMetaData = new MappingMetaData(docMapper); - - IndexRequest request = new IndexRequest("test", "type", "1").source(doc); - request.process(mappingMetaData, true, "test"); - - assertThat(request.timestamp(), notNullValue()); - - // We should have less than one minute (probably some ms) - long delay = System.currentTimeMillis() - Long.parseLong(request.timestamp()); - assertThat(delay, lessThanOrEqualTo(60000L)); - } - - public void testDefaultTimestampStream() throws IOException { - // Testing null value for default timestamp - { - MappingMetaData.Timestamp timestamp = new MappingMetaData.Timestamp(true, - TimestampFieldMapper.DEFAULT_DATE_TIME_FORMAT, null, null); - MappingMetaData expected = new MappingMetaData("type", new CompressedXContent("{}".getBytes(StandardCharsets.UTF_8)), - new MappingMetaData.Routing(false), timestamp, false); - - BytesStreamOutput out = new BytesStreamOutput(); - expected.writeTo(out); - out.close(); - BytesReference bytes = out.bytes(); - - MappingMetaData metaData = MappingMetaData.PROTO.readFrom(bytes.streamInput()); - - assertThat(metaData, is(expected)); - } - - // Testing "now" value for default timestamp - { - MappingMetaData.Timestamp timestamp = new MappingMetaData.Timestamp(true, - TimestampFieldMapper.DEFAULT_DATE_TIME_FORMAT, "now", null); - MappingMetaData expected = new MappingMetaData("type", new CompressedXContent("{}".getBytes(StandardCharsets.UTF_8)), - new MappingMetaData.Routing(false), timestamp, false); - - BytesStreamOutput out = new BytesStreamOutput(); - expected.writeTo(out); - out.close(); - BytesReference bytes = out.bytes(); - - MappingMetaData metaData = MappingMetaData.PROTO.readFrom(bytes.streamInput()); - - assertThat(metaData, is(expected)); - } - - // Testing "ignore_missing" value for default timestamp - { - MappingMetaData.Timestamp timestamp = new MappingMetaData.Timestamp(true, - TimestampFieldMapper.DEFAULT_DATE_TIME_FORMAT, "now", false); - MappingMetaData expected = new MappingMetaData("type", new CompressedXContent("{}".getBytes(StandardCharsets.UTF_8)), - new MappingMetaData.Routing(false), timestamp, false); - - BytesStreamOutput out = new BytesStreamOutput(); - expected.writeTo(out); - out.close(); - BytesReference bytes = out.bytes(); - - MappingMetaData metaData = MappingMetaData.PROTO.readFrom(bytes.streamInput()); - - assertThat(metaData, is(expected)); - } - } - - public void testParsingNotDefaultTwiceDoesNotChangeMapping() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("_timestamp") - .field("enabled", true) - .field("default", "1970-01-01") - .endObject().endObject().endObject().string(); - DocumentMapperParser parser = createIndex("test", BW_SETTINGS).mapperService().documentMapperParser(); - - DocumentMapper docMapper = parser.parse("type", new CompressedXContent(mapping)); - docMapper = parser.parse("type", docMapper.mappingSource()); - assertThat(docMapper.mappingSource().string(), equalTo(mapping)); - } - - /** - * Test for issue #9223 - */ - public void testInitMappers() throws IOException { - String mapping = XContentFactory.jsonBuilder().startObject() - .startObject("type") - .startObject("_timestamp") - .field("enabled", true) - .field("default", (String) null) - .endObject() - .endObject().endObject().string(); - // This was causing a NPE - new MappingMetaData(new CompressedXContent(mapping)); - } - - void assertConflict(MapperService mapperService, String type, String mapping1, String mapping2, String conflict) throws IOException { - mapperService.merge("type", new CompressedXContent(mapping1), MapperService.MergeReason.MAPPING_UPDATE, false); - try { - mapperService.merge("type", new CompressedXContent(mapping2), MapperService.MergeReason.MAPPING_UPDATE, false); - assertNull(conflict); - } catch (IllegalArgumentException e) { - assertNotNull(conflict); - assertThat(e.getMessage(), containsString(conflict)); - } - } - - public void testIncludeInObjectNotAllowed() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("_timestamp").field("enabled", true).field("default", "1970").field("format", "YYYY").endObject() - .endObject().endObject().string(); - DocumentMapper docMapper = createIndex("test", BW_SETTINGS).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); - - try { - docMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() - .startObject().field("_timestamp", 2000000).endObject().bytes()); - fail("Expected failure to parse metadata field"); - } catch (MapperParsingException e) { - assertTrue(e.getMessage(), e.getMessage().contains("Field [_timestamp] is a metadata field and cannot be added inside a document")); - } - } - - public void testThatEpochCanBeIgnoredWithCustomFormat() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("_timestamp").field("enabled", true).field("format", "yyyyMMddHH").endObject() - .endObject().endObject().string(); - DocumentMapper docMapper = createIndex("test", BW_SETTINGS).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); - MetaData metaData = client().admin().cluster().prepareState().get().getState().getMetaData(); - - XContentBuilder doc = XContentFactory.jsonBuilder().startObject().endObject(); - IndexRequest request = new IndexRequest("test", "type", "1").source(doc).timestamp("2015060210"); - MappingMetaData mappingMetaData = new MappingMetaData(docMapper); - request.process(mappingMetaData, true, "test"); - - assertThat(request.timestamp(), is("1433239200000")); - } - - public void testThatIndicesAfter2_0DontSupportUnixTimestampsInAnyDateFormat() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("_timestamp").field("enabled", true).field("format", "dateOptionalTime").endObject() - .endObject().endObject().string(); - BytesReference source = XContentFactory.jsonBuilder().startObject().field("field", "value").endObject().bytes(); - // test with 2.x - DocumentMapper currentMapper = createIndex("new-index", BW_SETTINGS).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); - - // this works with 2.x - IndexRequest request = new IndexRequest("new-index", "type", "1").source(source).timestamp("1970-01-01"); - request.process(new MappingMetaData(currentMapper), true, "new-index"); - - // this fails with 2.x - request = new IndexRequest("new-index", "type", "1").source(source).timestamp("1234567890"); - try { - request.process(new MappingMetaData(currentMapper), true, "new-index"); - } catch (Exception e) { - assertThat(e.getCause(), instanceOf(IllegalArgumentException.class)); - assertThat(e.getMessage(), containsString("failed to parse timestamp [1234567890]")); - } - } - - public void testSizeTimestampIndexParsing() throws IOException { - IndexService indexService = createIndex("test", BW_SETTINGS); - String mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/update/default_mapping_with_disabled_root_types.json"); - DocumentMapper documentMapper = indexService.mapperService().parse("type", new CompressedXContent(mapping), true); - assertThat(documentMapper.mappingSource().string(), equalTo(mapping)); - documentMapper = indexService.mapperService().parse("type", new CompressedXContent(documentMapper.mappingSource().string()), true); - assertThat(documentMapper.mappingSource().string(), equalTo(mapping)); - } - - public void testDefaultApplied() throws IOException { - createIndex("test1", BW_SETTINGS); - createIndex("test2", BW_SETTINGS); - XContentBuilder defaultMapping = XContentFactory.jsonBuilder().startObject() - .startObject(MapperService.DEFAULT_MAPPING).startObject("_timestamp").field("enabled", true).endObject().endObject() - .endObject(); - client().admin().indices().preparePutMapping().setType(MapperService.DEFAULT_MAPPING).setSource(defaultMapping).get(); - XContentBuilder typeMapping = XContentFactory.jsonBuilder().startObject() - .startObject("type").startObject("_all").field("enabled", false).endObject().endObject() - .endObject(); - client().admin().indices().preparePutMapping("test1").setType("type").setSource(typeMapping).get(); - client().admin().indices().preparePutMapping("test1", "test2").setType("type").setSource(typeMapping).get(); - - GetMappingsResponse response = client().admin().indices().prepareGetMappings("test2").get(); - assertNotNull(response.getMappings().get("test2").get("type").getSourceAsMap().get("_all")); - assertFalse((Boolean) ((LinkedHashMap) response.getMappings().get("test2").get("type").getSourceAsMap().get("_all")).get("enabled")); - assertNotNull(response.getMappings().get("test2").get("type").getSourceAsMap().get("_timestamp")); - assertTrue((Boolean)((LinkedHashMap)response.getMappings().get("test2").get("type").getSourceAsMap().get("_timestamp")).get("enabled")); - } - - public void testTimestampParsing() throws IOException { - IndexService indexService = createIndex("test", BW_SETTINGS); - XContentBuilder indexMapping = XContentFactory.jsonBuilder(); - boolean enabled = randomBoolean(); - indexMapping.startObject() - .startObject("type") - .startObject("_timestamp") - .field("enabled", enabled) - .endObject() - .endObject() - .endObject(); - DocumentMapper documentMapper = indexService.mapperService().parse("type", new CompressedXContent(indexMapping.string()), true); - assertThat(documentMapper.timestampFieldMapper().enabled(), equalTo(enabled)); - assertTrue(documentMapper.timestampFieldMapper().fieldType().stored()); - assertTrue(documentMapper.timestampFieldMapper().fieldType().hasDocValues()); - documentMapper = indexService.mapperService().parse("type", new CompressedXContent(documentMapper.mappingSource().string()), true); - assertThat(documentMapper.timestampFieldMapper().enabled(), equalTo(enabled)); - assertTrue(documentMapper.timestampFieldMapper().fieldType().hasDocValues()); - assertTrue(documentMapper.timestampFieldMapper().fieldType().stored()); - } -} diff --git a/core/src/test/java/org/elasticsearch/index/mapper/TimestampFieldTypeTests.java b/core/src/test/java/org/elasticsearch/index/mapper/TimestampFieldTypeTests.java deleted file mode 100644 index 53c0c89f8c392..0000000000000 --- a/core/src/test/java/org/elasticsearch/index/mapper/TimestampFieldTypeTests.java +++ /dev/null @@ -1,34 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.elasticsearch.index.mapper; - -public class TimestampFieldTypeTests extends LegacyDateFieldTypeTests { - @Override - protected MappedFieldType createDefaultFieldType() { - return new TimestampFieldMapper.TimestampFieldType(); - } - - @Override - public void testValueForSearch() { - MappedFieldType ft = createDefaultFieldType(); - String date = "2015-10-12T12:09:55.000Z"; - long instant = DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.parser().parseDateTime(date).getMillis(); - assertEquals(instant, ft.valueForDisplay(instant)); - } -} diff --git a/core/src/test/java/org/elasticsearch/index/mapper/TokenCountFieldMapperTests.java b/core/src/test/java/org/elasticsearch/index/mapper/TokenCountFieldMapperTests.java index 835295def4fe0..02128a4254ac1 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/TokenCountFieldMapperTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/TokenCountFieldMapperTests.java @@ -125,14 +125,5 @@ public void testEmptyName() throws IOException { () -> parser.parse("type", new CompressedXContent(mapping)) ); assertThat(e.getMessage(), containsString("name cannot be empty string")); - - // empty name allowed in index created before 5.0 - Version oldVersion = VersionUtils.randomVersionBetween(getRandom(), Version.V_2_0_0, Version.V_2_3_5); - Settings oldIndexSettings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, oldVersion).build(); - indexService = createIndex("test_old", oldIndexSettings); - DocumentMapperParser parser2x = indexService.mapperService().documentMapperParser(); - - DocumentMapper defaultMapper = parser2x.parse("type", new CompressedXContent(mapping)); - assertEquals(mapping, defaultMapper.mappingSource().string()); } } diff --git a/core/src/test/java/org/elasticsearch/index/mapper/TypeFieldMapperTests.java b/core/src/test/java/org/elasticsearch/index/mapper/TypeFieldMapperTests.java index 3573cfd8b60fe..6fade26ca02f4 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/TypeFieldMapperTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/TypeFieldMapperTests.java @@ -19,15 +19,9 @@ package org.elasticsearch.index.mapper; -import org.elasticsearch.Version; -import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.compress.CompressedXContent; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.index.fielddata.plain.DocValuesIndexFieldData; -import org.elasticsearch.index.fielddata.plain.PagedBytesIndexFieldData; -import org.elasticsearch.index.mapper.DocumentMapper; -import org.elasticsearch.index.mapper.TypeFieldMapper; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.ESSingleNodeTestCase; import org.elasticsearch.test.InternalSettingsPlugin; @@ -52,14 +46,4 @@ public void testDocValues() throws Exception { assertThat(typeMapper.fieldType().fielddataBuilder(), instanceOf(DocValuesIndexFieldData.Builder.class)); } - public void testDocValuesPre21() throws Exception { - // between 2.0 and 2.1, doc values was disabled for _type - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type").endObject().endObject().string(); - Settings bwcSettings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_2_0_0_beta1.id).build(); - - DocumentMapper docMapper = createIndex("test", bwcSettings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); - TypeFieldMapper typeMapper = docMapper.metadataMapper(TypeFieldMapper.class); - assertFalse(typeMapper.fieldType().hasDocValues()); - assertThat(typeMapper.fieldType().fielddataBuilder(), instanceOf(PagedBytesIndexFieldData.Builder.class)); - } } diff --git a/core/src/test/java/org/elasticsearch/index/mapper/UpdateMappingTests.java b/core/src/test/java/org/elasticsearch/index/mapper/UpdateMappingTests.java index 73cf070cba7bf..7aec1ecd0bb91 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/UpdateMappingTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/UpdateMappingTests.java @@ -235,30 +235,6 @@ public void testReuseMetaField() throws IOException { } } - public void testReuseMetaFieldBackCompat() throws IOException { - XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("properties").startObject("_id").field("type", "text").endObject() - .endObject().endObject().endObject(); - // the logic is different for 2.x indices since they record some meta mappers (including _id) - // in the root object - Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_2_1_0).build(); - MapperService mapperService = createIndex("test", settings).mapperService(); - - try { - mapperService.merge("type", new CompressedXContent(mapping.string()), MapperService.MergeReason.MAPPING_UPDATE, false); - fail(); - } catch (IllegalArgumentException e) { - assertTrue(e.getMessage().contains("Field [_id] is defined twice in [type]")); - } - - try { - mapperService.merge("type", new CompressedXContent(mapping.string()), MapperService.MergeReason.MAPPING_UPDATE, false); - fail(); - } catch (IllegalArgumentException e) { - assertTrue(e.getMessage().contains("Field [_id] is defined twice in [type]")); - } - } - public void testRejectFieldDefinedTwice() throws IOException { String mapping1 = XContentFactory.jsonBuilder().startObject() .startObject("type1") diff --git a/core/src/test/java/org/elasticsearch/index/query/GeoBoundingBoxQueryBuilderTests.java b/core/src/test/java/org/elasticsearch/index/query/GeoBoundingBoxQueryBuilderTests.java index d2c210db6fa4d..49266ebe9fd04 100644 --- a/core/src/test/java/org/elasticsearch/index/query/GeoBoundingBoxQueryBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/index/query/GeoBoundingBoxQueryBuilderTests.java @@ -22,7 +22,6 @@ import org.apache.lucene.search.BooleanClause; import org.apache.lucene.search.BooleanQuery; import org.apache.lucene.search.ConstantScoreQuery; -import org.apache.lucene.search.LegacyNumericRangeQuery; import org.apache.lucene.search.MatchNoDocsQuery; import org.apache.lucene.search.Query; import org.apache.lucene.spatial.geopoint.search.GeoPointInBBoxQuery; @@ -30,7 +29,6 @@ import org.elasticsearch.common.geo.GeoPoint; import org.elasticsearch.common.geo.GeoUtils; import org.elasticsearch.index.mapper.MappedFieldType; -import org.elasticsearch.index.search.geo.LegacyInMemoryGeoBoundingBoxQuery; import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.test.AbstractQueryTestCase; import org.elasticsearch.test.geo.RandomShapeGenerator; @@ -42,7 +40,6 @@ import static org.hamcrest.CoreMatchers.containsString; import static org.hamcrest.CoreMatchers.instanceOf; import static org.hamcrest.CoreMatchers.notNullValue; -import static org.hamcrest.Matchers.closeTo; import static org.hamcrest.Matchers.equalTo; public class GeoBoundingBoxQueryBuilderTests extends AbstractQueryTestCase { @@ -223,31 +220,6 @@ public void testLeftRightCanBeFlipped() { builder.setValidationMethod(GeoValidationMethod.STRICT).setCorners(top, right, bottom, left); } - public void testNormalization() throws IOException { - assumeTrue("test runs only when at least a type is registered", getCurrentTypes().length > 0); - GeoBoundingBoxQueryBuilder qb = createTestQueryBuilder(); - if (getCurrentTypes().length != 0 && "mapped_geo".equals(qb.fieldName())) { - // only execute this test if we are running on a valid geo field - qb.setCorners(200, 200, qb.bottomRight().getLat(), qb.bottomRight().getLon()); - qb.setValidationMethod(GeoValidationMethod.COERCE); - Query query = qb.toQuery(createShardContext()); - if (query instanceof ConstantScoreQuery) { - ConstantScoreQuery result = (ConstantScoreQuery) query; - BooleanQuery bboxFilter = (BooleanQuery) result.getQuery(); - for (BooleanClause clause : bboxFilter.clauses()) { - LegacyNumericRangeQuery boundary = (LegacyNumericRangeQuery) clause.getQuery(); - if (boundary.getMax() != null) { - assertTrue("If defined, non of the maximum range values should be larger than 180", - boundary.getMax().intValue() <= 180); - } - } - } else { - assertTrue("memory queries should result in LegacyInMemoryGeoBoundingBoxQuery", - query instanceof LegacyInMemoryGeoBoundingBoxQuery); - } - } - } - public void testStrictnessDefault() { assertFalse("Someone changed the default for coordinate validation - were the docs changed as well?", GeoValidationMethod.DEFAULT_LENIENT_PARSING); @@ -260,18 +232,6 @@ protected void doAssertLuceneQuery(GeoBoundingBoxQueryBuilder queryBuilder, Quer MappedFieldType fieldType = context.fieldMapper(queryBuilder.fieldName()); if (fieldType == null) { assertTrue("Found no indexed geo query.", query instanceof MatchNoDocsQuery); - } else { - if (context.indexVersionCreated().before(Version.V_2_2_0)) { - if (queryBuilder.type() == GeoExecType.INDEXED) { - assertTrue("Found no indexed geo query.", query instanceof ConstantScoreQuery); - } else { - assertTrue("Found no indexed geo query.", query instanceof LegacyInMemoryGeoBoundingBoxQuery); - } - } else if (context.indexVersionCreated().before(Version.V_5_0_0_beta1)) { - assertTrue("Found no indexed geo query.", query instanceof GeoPointInBBoxQuery); - } else { - assertTrue("Found no indexed geo query.", query instanceof Query); - } } } @@ -425,21 +385,6 @@ public void testParsingAndToQuery6() throws IOException { private void assertGeoBoundingBoxQuery(String query) throws IOException { QueryShardContext shardContext = createShardContext(); Query parsedQuery = parseQuery(query).toQuery(shardContext); - if (shardContext.indexVersionCreated().before(Version.V_2_2_0)) { - LegacyInMemoryGeoBoundingBoxQuery filter = (LegacyInMemoryGeoBoundingBoxQuery) parsedQuery; - assertThat(filter.fieldName(), equalTo(GEO_POINT_FIELD_NAME)); - assertThat(filter.topLeft().lat(), closeTo(40, 1E-5)); - assertThat(filter.topLeft().lon(), closeTo(-70, 1E-5)); - assertThat(filter.bottomRight().lat(), closeTo(30, 1E-5)); - assertThat(filter.bottomRight().lon(), closeTo(-80, 1E-5)); - } else if (shardContext.indexVersionCreated().before(Version.V_5_0_0_beta1)) { - GeoPointInBBoxQuery q = (GeoPointInBBoxQuery) parsedQuery; - assertThat(q.getField(), equalTo(GEO_POINT_FIELD_NAME)); - assertThat(q.getMaxLat(), closeTo(40, 1E-5)); - assertThat(q.getMinLon(), closeTo(-70, 1E-5)); - assertThat(q.getMinLat(), closeTo(30, 1E-5)); - assertThat(q.getMaxLon(), closeTo(-80, 1E-5)); - } } public void testFromJson() throws IOException { diff --git a/core/src/test/java/org/elasticsearch/index/query/GeoDistanceQueryBuilderTests.java b/core/src/test/java/org/elasticsearch/index/query/GeoDistanceQueryBuilderTests.java index 3373623b6e968..6c92fde684315 100644 --- a/core/src/test/java/org/elasticsearch/index/query/GeoDistanceQueryBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/index/query/GeoDistanceQueryBuilderTests.java @@ -21,15 +21,10 @@ import org.apache.lucene.search.MatchNoDocsQuery; import org.apache.lucene.search.Query; -import org.apache.lucene.spatial.geopoint.search.GeoPointDistanceQuery; -import org.elasticsearch.Version; import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.geo.GeoDistance; import org.elasticsearch.common.geo.GeoPoint; -import org.elasticsearch.common.geo.GeoUtils; import org.elasticsearch.common.unit.DistanceUnit; -import org.elasticsearch.index.mapper.LatLonPointFieldMapper; -import org.elasticsearch.index.search.geo.GeoDistanceRangeQuery; import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.test.AbstractQueryTestCase; import org.elasticsearch.test.geo.RandomShapeGenerator; @@ -40,8 +35,6 @@ import static org.hamcrest.CoreMatchers.containsString; import static org.hamcrest.CoreMatchers.instanceOf; import static org.hamcrest.CoreMatchers.notNullValue; -import static org.hamcrest.Matchers.closeTo; -import static org.hamcrest.Matchers.equalTo; public class GeoDistanceQueryBuilderTests extends AbstractQueryTestCase { @@ -131,48 +124,7 @@ public void testToQuery() throws IOException { @Override protected void doAssertLuceneQuery(GeoDistanceQueryBuilder queryBuilder, Query query, SearchContext context) throws IOException { - Version version = context.getQueryShardContext().indexVersionCreated(); - if (version.before(Version.V_2_2_0)) { - assertLegacyQuery(queryBuilder, query); - } else { - assertGeoPointQuery(queryBuilder, query); - } - } - - private void assertLegacyQuery(GeoDistanceQueryBuilder queryBuilder, Query query) throws IOException { - assertThat(query, instanceOf(GeoDistanceRangeQuery.class)); - GeoDistanceRangeQuery geoQuery = (GeoDistanceRangeQuery) query; - assertThat(geoQuery.fieldName(), equalTo(queryBuilder.fieldName())); - if (queryBuilder.point() != null) { - assertThat(geoQuery.lat(), equalTo(queryBuilder.point().lat())); - assertThat(geoQuery.lon(), equalTo(queryBuilder.point().lon())); - } - assertThat(geoQuery.geoDistance(), equalTo(queryBuilder.geoDistance())); - assertThat(geoQuery.minInclusiveDistance(), equalTo(Double.NEGATIVE_INFINITY)); - double distance = queryBuilder.distance(); - if (queryBuilder.geoDistance() != null) { - distance = queryBuilder.geoDistance().normalize(distance, DistanceUnit.DEFAULT); - } - assertThat(geoQuery.maxInclusiveDistance(), closeTo(distance, Math.abs(distance) / 1000)); - } - - private void assertGeoPointQuery(GeoDistanceQueryBuilder queryBuilder, Query query) throws IOException { - Version version = createShardContext().indexVersionCreated(); - if (version.before(LatLonPointFieldMapper.LAT_LON_FIELD_VERSION)) { - assertThat(query, instanceOf(GeoPointDistanceQuery.class)); - GeoPointDistanceQuery geoQuery = (GeoPointDistanceQuery) query; - assertThat(geoQuery.getField(), equalTo(queryBuilder.fieldName())); - if (queryBuilder.point() != null) { - assertThat(geoQuery.getCenterLat(), equalTo(queryBuilder.point().lat())); - assertThat(geoQuery.getCenterLon(), equalTo(queryBuilder.point().lon())); - } - double distance = queryBuilder.distance(); - if (queryBuilder.geoDistance() != null) { - distance = queryBuilder.geoDistance().normalize(distance, DistanceUnit.DEFAULT); - distance = org.elasticsearch.common.geo.GeoUtils.maxRadialDistance(queryBuilder.point(), distance); - assertThat(geoQuery.getRadiusMeters(), closeTo(distance, GeoUtils.TOLERANCE)); - } - } + // TODO: what can we check } public void testParsingAndToQuery1() throws IOException { @@ -342,21 +294,7 @@ public void testParsingAndToQuery12() throws IOException { private void assertGeoDistanceRangeQuery(String query, double lat, double lon, double distance, DistanceUnit distanceUnit) throws IOException { assumeTrue("test runs only when at least a type is registered", getCurrentTypes().length > 0); Query parsedQuery = parseQuery(query).toQuery(createShardContext()); - Version version = createShardContext().indexVersionCreated(); - if (version.before(Version.V_2_2_0)) { - GeoDistanceRangeQuery q = (GeoDistanceRangeQuery) parsedQuery; - assertThat(q.fieldName(), equalTo(GEO_POINT_FIELD_NAME)); - assertThat(q.lat(), closeTo(lat, 1E-5D)); - assertThat(q.lon(), closeTo(lon, 1E-5D)); - assertThat(q.minInclusiveDistance(), equalTo(Double.NEGATIVE_INFINITY)); - assertThat(q.maxInclusiveDistance(), closeTo(distanceUnit.convert(distance, DistanceUnit.MILES), 1E-5D)); - } else if (version.before(LatLonPointFieldMapper.LAT_LON_FIELD_VERSION)) { - GeoPointDistanceQuery q = (GeoPointDistanceQuery) parsedQuery; - assertThat(q.getField(), equalTo(GEO_POINT_FIELD_NAME)); - assertThat(q.getCenterLat(), closeTo(lat, 1E-5D)); - assertThat(q.getCenterLon(), closeTo(lon, 1E-5D)); - assertThat(q.getRadiusMeters(), closeTo(distanceUnit.convert(distance, DistanceUnit.MILES), 1E-5D)); - } + // TODO: what can we check? } public void testFromJson() throws IOException { diff --git a/core/src/test/java/org/elasticsearch/index/query/GeoDistanceRangeQueryTests.java b/core/src/test/java/org/elasticsearch/index/query/GeoDistanceRangeQueryTests.java deleted file mode 100644 index e2c04834e365b..0000000000000 --- a/core/src/test/java/org/elasticsearch/index/query/GeoDistanceRangeQueryTests.java +++ /dev/null @@ -1,397 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.index.query; - -import org.apache.lucene.search.MatchNoDocsQuery; -import org.apache.lucene.search.Query; -import org.apache.lucene.spatial.geopoint.search.XGeoPointDistanceRangeQuery; -import org.apache.lucene.util.NumericUtils; -import org.elasticsearch.Version; -import org.elasticsearch.common.compress.CompressedXContent; -import org.elasticsearch.common.geo.GeoDistance; -import org.elasticsearch.common.geo.GeoPoint; -import org.elasticsearch.common.geo.GeoUtils; -import org.elasticsearch.common.unit.DistanceUnit; -import org.elasticsearch.index.mapper.LatLonPointFieldMapper; -import org.elasticsearch.index.mapper.MapperService; -import org.elasticsearch.index.search.geo.GeoDistanceRangeQuery; -import org.elasticsearch.search.internal.SearchContext; -import org.elasticsearch.test.AbstractQueryTestCase; -import org.elasticsearch.test.geo.RandomGeoGenerator; - -import java.io.IOException; - -import static org.hamcrest.CoreMatchers.containsString; -import static org.hamcrest.CoreMatchers.instanceOf; -import static org.hamcrest.CoreMatchers.notNullValue; -import static org.hamcrest.Matchers.closeTo; -import static org.hamcrest.Matchers.equalTo; - -public class GeoDistanceRangeQueryTests extends AbstractQueryTestCase { - - @Override - protected GeoDistanceRangeQueryBuilder doCreateTestQueryBuilder() { - GeoDistanceRangeQueryBuilder builder; - GeoPoint randomPoint = RandomGeoGenerator.randomPointIn(random(), -180.0, -89.9, 180.0, 89.9); - if (randomBoolean()) { - builder = new GeoDistanceRangeQueryBuilder(GEO_POINT_FIELD_NAME, randomPoint.geohash()); - } else { - if (randomBoolean()) { - builder = new GeoDistanceRangeQueryBuilder(GEO_POINT_FIELD_NAME, randomPoint); - } else { - builder = new GeoDistanceRangeQueryBuilder(GEO_POINT_FIELD_NAME, randomPoint.lat(), randomPoint.lon()); - } - } - GeoPoint point = builder.point(); - final double maxRadius = GeoUtils.maxRadialDistanceMeters(point.lat(), point.lon()); - final int fromValueMeters = randomInt((int)(maxRadius*0.5)); - final int toValueMeters = randomIntBetween(fromValueMeters + 1, (int)maxRadius); - DistanceUnit fromToUnits = randomFrom(DistanceUnit.values()); - final String fromToUnitsStr = fromToUnits.toString(); - final double fromValue = DistanceUnit.convert(fromValueMeters, DistanceUnit.DEFAULT, fromToUnits); - final double toValue = DistanceUnit.convert(toValueMeters, DistanceUnit.DEFAULT, fromToUnits); - - if (randomBoolean()) { - int branch = randomInt(2); - fromToUnits = DistanceUnit.DEFAULT; - switch (branch) { - case 0: - builder.from(fromValueMeters); - break; - case 1: - builder.to(toValueMeters); - break; - case 2: - builder.from(fromValueMeters); - builder.to(toValueMeters); - break; - } - } else { - int branch = randomInt(2); - switch (branch) { - case 0: - builder.from(fromValue + fromToUnitsStr); - break; - case 1: - builder.to(toValue + fromToUnitsStr); - break; - case 2: - builder.from(fromValue + fromToUnitsStr); - builder.to(toValue + fromToUnitsStr); - break; - } - } - if (randomBoolean()) { - builder.includeLower(randomBoolean()); - } - if (randomBoolean()) { - builder.includeUpper(randomBoolean()); - } - if (randomBoolean()) { - builder.geoDistance(randomFrom(GeoDistance.values())); - } - builder.unit(fromToUnits); - if (randomBoolean()) { - builder.setValidationMethod(randomFrom(GeoValidationMethod.values())); - } - - if (randomBoolean()) { - builder.ignoreUnmapped(randomBoolean()); - } - return builder; - } - - @Override - protected void doAssertLuceneQuery(GeoDistanceRangeQueryBuilder queryBuilder, Query query, SearchContext context) - throws IOException { - Version version = context.getQueryShardContext().indexVersionCreated(); - if (version.before(Version.V_2_2_0)) { - assertLegacyQuery(queryBuilder, query); - } else { - assertGeoPointQuery(queryBuilder, query); - } - } - - private void assertLegacyQuery(GeoDistanceRangeQueryBuilder queryBuilder, Query query) throws IOException { - assertThat(query, instanceOf(GeoDistanceRangeQuery.class)); - GeoDistanceRangeQuery geoQuery = (GeoDistanceRangeQuery) query; - assertThat(geoQuery.fieldName(), equalTo(queryBuilder.fieldName())); - if (queryBuilder.point() != null) { - GeoPoint expectedPoint = new GeoPoint(queryBuilder.point()); - if (GeoValidationMethod.isCoerce(queryBuilder.getValidationMethod())) { - GeoUtils.normalizePoint(expectedPoint, true, true); - } - assertThat(geoQuery.lat(), equalTo(expectedPoint.lat())); - assertThat(geoQuery.lon(), equalTo(expectedPoint.lon())); - } - assertThat(geoQuery.geoDistance(), equalTo(queryBuilder.geoDistance())); - if (queryBuilder.from() != null && queryBuilder.from() instanceof Number) { - double fromValue = ((Number) queryBuilder.from()).doubleValue(); - if (queryBuilder.unit() != null) { - fromValue = queryBuilder.unit().toMeters(fromValue); - } - if (queryBuilder.geoDistance() != null) { - fromValue = queryBuilder.geoDistance().normalize(fromValue, DistanceUnit.DEFAULT); - } - double fromSlop = Math.abs(fromValue) / 1000; - if (queryBuilder.includeLower() == false) { - fromSlop = NumericUtils.sortableLongToDouble((NumericUtils.doubleToSortableLong(Math.abs(fromValue)) + 1L)) / 1000.0; - } - assertThat(geoQuery.minInclusiveDistance(), closeTo(fromValue, fromSlop)); - } - if (queryBuilder.to() != null && queryBuilder.to() instanceof Number) { - double toValue = ((Number) queryBuilder.to()).doubleValue(); - if (queryBuilder.unit() != null) { - toValue = queryBuilder.unit().toMeters(toValue); - } - if (queryBuilder.geoDistance() != null) { - toValue = queryBuilder.geoDistance().normalize(toValue, DistanceUnit.DEFAULT); - } - double toSlop = Math.abs(toValue) / 1000; - if (queryBuilder.includeUpper() == false) { - toSlop = NumericUtils.sortableLongToDouble((NumericUtils.doubleToSortableLong(Math.abs(toValue)) - 1L)) / 1000.0; - } - assertThat(geoQuery.maxInclusiveDistance(), closeTo(toValue, toSlop)); - } - } - - private void assertGeoPointQuery(GeoDistanceRangeQueryBuilder queryBuilder, Query query) throws IOException { - assertThat(query, instanceOf(XGeoPointDistanceRangeQuery.class)); - XGeoPointDistanceRangeQuery geoQuery = (XGeoPointDistanceRangeQuery) query; - assertThat(geoQuery.getField(), equalTo(queryBuilder.fieldName())); - if (queryBuilder.point() != null) { - GeoPoint expectedPoint = new GeoPoint(queryBuilder.point()); - GeoUtils.normalizePoint(expectedPoint); - assertThat(geoQuery.getCenterLat(), equalTo(expectedPoint.lat())); - assertThat(geoQuery.getCenterLon(), equalTo(expectedPoint.lon())); - } - if (queryBuilder.from() != null && queryBuilder.from() instanceof Number) { - double fromValue = ((Number) queryBuilder.from()).doubleValue(); - if (queryBuilder.unit() != null) { - fromValue = queryBuilder.unit().toMeters(fromValue); - } - assertThat(geoQuery.getMinRadiusMeters(), closeTo(fromValue, 1E-5)); - } - if (queryBuilder.to() != null && queryBuilder.to() instanceof Number) { - double toValue = ((Number) queryBuilder.to()).doubleValue(); - if (queryBuilder.unit() != null) { - toValue = queryBuilder.unit().toMeters(toValue); - } - assertThat(geoQuery.getMaxRadiusMeters(), closeTo(toValue, 1E-5)); - } - } - - /** - * Overridden here to ensure the test is only run if at least one type is - * present in the mappings. Geo queries do not execute if the field is not - * explicitly mapped - */ - @Override - public void testToQuery() throws IOException { - assumeTrue("test runs only when at least a type is registered", getCurrentTypes().length > 0); - if (createShardContext().indexVersionCreated().before(LatLonPointFieldMapper.LAT_LON_FIELD_VERSION)) { - super.testToQuery(); - } - } - - public void testNullFieldName() { - IllegalArgumentException e = expectThrows(IllegalArgumentException.class, - () -> new GeoDistanceRangeQueryBuilder(null, new GeoPoint())); - assertEquals("fieldName must not be null", e.getMessage()); - e = expectThrows(IllegalArgumentException.class, - () -> new GeoDistanceRangeQueryBuilder("", new GeoPoint())); - assertEquals("fieldName must not be null", e.getMessage()); - } - - public void testNoPoint() { - IllegalArgumentException e = expectThrows(IllegalArgumentException.class, - () -> new GeoDistanceRangeQueryBuilder(GEO_POINT_FIELD_NAME, (GeoPoint) null)); - assertEquals("point must not be null", e.getMessage()); - e = expectThrows(IllegalArgumentException.class, - () -> new GeoDistanceRangeQueryBuilder(GEO_POINT_FIELD_NAME, (String) null)); - assertEquals("point must not be null", e.getMessage()); - } - - public void testInvalidFrom() { - GeoDistanceRangeQueryBuilder builder = new GeoDistanceRangeQueryBuilder(GEO_POINT_FIELD_NAME, new GeoPoint()); - IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> builder.from((String) null)); - assertEquals("[from] must not be null", e.getMessage()); - e = expectThrows(IllegalArgumentException.class, () -> builder.from((Number) null)); - assertEquals("[from] must not be null", e.getMessage()); - } - - public void testInvalidTo() { - GeoDistanceRangeQueryBuilder builder = new GeoDistanceRangeQueryBuilder(GEO_POINT_FIELD_NAME, new GeoPoint()); - IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> builder.to((String) null)); - assertEquals("[to] must not be null", e.getMessage()); - e = expectThrows(IllegalArgumentException.class, () -> builder.to((Number) null)); - assertEquals("[to] must not be null", e.getMessage()); - } - - public void testInvalidGeoDistance() { - GeoDistanceRangeQueryBuilder builder = new GeoDistanceRangeQueryBuilder(GEO_POINT_FIELD_NAME, new GeoPoint()); - IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> builder.geoDistance(null)); - assertEquals("geoDistance calculation mode must not be null", e.getMessage()); - } - - public void testInvalidDistanceUnit() { - GeoDistanceRangeQueryBuilder builder = new GeoDistanceRangeQueryBuilder(GEO_POINT_FIELD_NAME, new GeoPoint()); - IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> builder.unit(null)); - assertEquals("distance unit must not be null", e.getMessage()); - } - - public void testNestedRangeQuery() throws IOException { - // geo distance range queries are no longer supported in 5.0 they are replaced by using aggregations or sort - if (createShardContext().indexVersionCreated().onOrAfter(LatLonPointFieldMapper.LAT_LON_FIELD_VERSION)) { - return; - } - - // create a nested geo_point type with a subfield named "geohash" (explicit testing for ISSUE #15179) - MapperService mapperService = createShardContext().getMapperService(); - String nestedMapping = - "{\"nested_doc\" : {\"properties\" : {" + - "\"locations\": {\"properties\": {" + - "\"geohash\": {\"type\": \"geo_point\"}}," + - "\"type\": \"nested\"}" + - "}}}"; - mapperService.merge("nested_doc", new CompressedXContent(nestedMapping), MapperService.MergeReason.MAPPING_UPDATE, false); - - // create a range query on the nested locations.geohash sub-field - String queryJson = - "{\n" + - " \"nested\": {\n" + - " \"path\": \"locations\",\n" + - " \"query\": {\n" + - " \"geo_distance_range\": {\n" + - " \"from\": \"0.0km\",\n" + - " \"to\" : \"200.0km\",\n" + - " \"locations.geohash\": \"s7ws01wyd7ws\"\n" + - " }\n" + - " }\n" + - " }\n" + - "}\n"; - NestedQueryBuilder builder = (NestedQueryBuilder) parseQuery(queryJson); - QueryShardContext context = createShardContext(); - builder.toQuery(context); - } - - public void testFromJson() throws IOException { - String json = - "{\n" + - " \"geo_distance_range\" : {\n" + - " \"pin.location\" : [ -70.0, 40.0 ],\n" + - " \"from\" : \"200km\",\n" + - " \"to\" : \"400km\",\n" + - " \"include_lower\" : true,\n" + - " \"include_upper\" : true,\n" + - " \"unit\" : \"m\",\n" + - " \"distance_type\" : \"sloppy_arc\",\n" + - " \"validation_method\" : \"STRICT\",\n" + - " \"ignore_unmapped\" : false,\n" + - " \"boost\" : 1.0\n" + - " }\n" + - "}"; - GeoDistanceRangeQueryBuilder parsed = (GeoDistanceRangeQueryBuilder) parseQuery(json); - checkGeneratedJson(json, parsed); - assertEquals(json, -70.0, parsed.point().lon(), 0.0001); - } - - public void testFromJsonOptimizeBboxFails() throws IOException { - String json = - "{\n" + - " \"geo_distance_range\" : {\n" + - " \"pin.location\" : [ -70.0, 40.0 ],\n" + - " \"from\" : \"200km\",\n" + - " \"to\" : \"400km\",\n" + - " \"include_lower\" : true,\n" + - " \"include_upper\" : true,\n" + - " \"unit\" : \"m\",\n" + - " \"distance_type\" : \"sloppy_arc\",\n" + - " \"optimize_bbox\" : \"memory\",\n" + - " \"ignore_unmapped\" : false,\n" + - " \"boost\" : 1.0\n" + - " }\n" + - "}"; - IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> parseQuery(json)); - assertTrue(e.getMessage().startsWith("Deprecated field ")); - } - - public void testFromJsonCoerceFails() throws IOException { - String json = - "{\n" + - " \"geo_distance_range\" : {\n" + - " \"pin.location\" : [ -70.0, 40.0 ],\n" + - " \"from\" : \"200km\",\n" + - " \"to\" : \"400km\",\n" + - " \"include_lower\" : true,\n" + - " \"include_upper\" : true,\n" + - " \"unit\" : \"m\",\n" + - " \"distance_type\" : \"sloppy_arc\",\n" + - " \"coerce\" : true,\n" + - " \"ignore_unmapped\" : false,\n" + - " \"boost\" : 1.0\n" + - " }\n" + - "}"; - IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> parseQuery(json)); - assertTrue(e.getMessage().startsWith("Deprecated field ")); - } - - public void testFromJsonIgnoreMalformedFails() throws IOException { - String json = - "{\n" + - " \"geo_distance_range\" : {\n" + - " \"pin.location\" : [ -70.0, 40.0 ],\n" + - " \"from\" : \"200km\",\n" + - " \"to\" : \"400km\",\n" + - " \"include_lower\" : true,\n" + - " \"include_upper\" : true,\n" + - " \"unit\" : \"m\",\n" + - " \"distance_type\" : \"sloppy_arc\",\n" + - " \"ignore_malformed\" : true,\n" + - " \"ignore_unmapped\" : false,\n" + - " \"boost\" : 1.0\n" + - " }\n" + - "}"; - IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> parseQuery(json)); - assertTrue(e.getMessage().startsWith("Deprecated field ")); - } - - @Override - public void testMustRewrite() throws IOException { - assumeTrue("test runs only when at least a type is registered", getCurrentTypes().length > 0); - if (createShardContext().indexVersionCreated().before(LatLonPointFieldMapper.LAT_LON_FIELD_VERSION)) { - super.testMustRewrite(); - } - } - - public void testIgnoreUnmapped() throws IOException { - final GeoDistanceRangeQueryBuilder queryBuilder = new GeoDistanceRangeQueryBuilder("unmapped", new GeoPoint(0.0, 0.0)).from("20m"); - queryBuilder.ignoreUnmapped(true); - Query query = queryBuilder.toQuery(createShardContext()); - assertThat(query, notNullValue()); - assertThat(query, instanceOf(MatchNoDocsQuery.class)); - - final GeoDistanceRangeQueryBuilder failingQueryBuilder = new GeoDistanceRangeQueryBuilder("unmapped", new GeoPoint(0.0, 0.0)) - .from("20m"); - failingQueryBuilder.ignoreUnmapped(false); - QueryShardException e = expectThrows(QueryShardException.class, () -> failingQueryBuilder.toQuery(createShardContext())); - assertThat(e.getMessage(), containsString("failed to find geo_point field [unmapped]")); - } -} diff --git a/core/src/test/java/org/elasticsearch/index/query/GeoPolygonQueryBuilderTests.java b/core/src/test/java/org/elasticsearch/index/query/GeoPolygonQueryBuilderTests.java index 97505a6ee8ef0..b77ff3bbdef88 100644 --- a/core/src/test/java/org/elasticsearch/index/query/GeoPolygonQueryBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/index/query/GeoPolygonQueryBuilderTests.java @@ -22,16 +22,11 @@ import com.vividsolutions.jts.geom.Coordinate; import org.apache.lucene.search.MatchNoDocsQuery; import org.apache.lucene.search.Query; -import org.apache.lucene.spatial.geopoint.search.GeoPointInPolygonQuery; -import org.elasticsearch.Version; import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.geo.GeoPoint; -import org.elasticsearch.common.geo.GeoUtils; import org.elasticsearch.common.geo.builders.ShapeBuilder; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; -import org.elasticsearch.index.mapper.LatLonPointFieldMapper; -import org.elasticsearch.index.search.geo.GeoPolygonQuery; import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.test.AbstractQueryTestCase; import org.elasticsearch.test.geo.RandomShapeGenerator; @@ -47,8 +42,6 @@ import static org.hamcrest.CoreMatchers.containsString; import static org.hamcrest.CoreMatchers.instanceOf; import static org.hamcrest.CoreMatchers.notNullValue; -import static org.hamcrest.Matchers.closeTo; -import static org.hamcrest.Matchers.equalTo; public class GeoPolygonQueryBuilderTests extends AbstractQueryTestCase { @Override @@ -67,55 +60,9 @@ protected GeoPolygonQueryBuilder doCreateTestQueryBuilder() { @Override protected void doAssertLuceneQuery(GeoPolygonQueryBuilder queryBuilder, Query query, SearchContext context) throws IOException { - Version version = context.getQueryShardContext().indexVersionCreated(); - if (version.before(Version.V_2_2_0)) { - assertLegacyQuery(queryBuilder, query); - } else if (version.onOrAfter(LatLonPointFieldMapper.LAT_LON_FIELD_VERSION)) { - assertGeoPointQuery(queryBuilder, query); - } // todo LatLonPointInPolygon is package private } - private void assertLegacyQuery(GeoPolygonQueryBuilder queryBuilder, Query query) { - assertThat(query, instanceOf(GeoPolygonQuery.class)); - GeoPolygonQuery geoQuery = (GeoPolygonQuery) query; - assertThat(geoQuery.fieldName(), equalTo(queryBuilder.fieldName())); - List queryBuilderPoints = queryBuilder.points(); - GeoPoint[] queryPoints = geoQuery.points(); - assertThat(queryPoints.length, equalTo(queryBuilderPoints.size())); - if (GeoValidationMethod.isCoerce(queryBuilder.getValidationMethod())) { - for (int i = 0; i < queryBuilderPoints.size(); i++) { - GeoPoint queryBuilderPoint = queryBuilderPoints.get(i); - GeoPoint pointCopy = new GeoPoint(queryBuilderPoint); - GeoUtils.normalizePoint(pointCopy, true, true); - assertThat(queryPoints[i], equalTo(pointCopy)); - } - } else { - for (int i = 0; i < queryBuilderPoints.size(); i++) { - assertThat(queryPoints[i], equalTo(queryBuilderPoints.get(i))); - } - } - } - - private void assertGeoPointQuery(GeoPolygonQueryBuilder queryBuilder, Query query) { - assertThat(query, instanceOf(GeoPointInPolygonQuery.class)); - GeoPointInPolygonQuery geoQuery = (GeoPointInPolygonQuery) query; - assertThat(geoQuery.getField(), equalTo(queryBuilder.fieldName())); - List queryBuilderPoints = queryBuilder.points(); - assertEquals(1, geoQuery.getPolygons().length); - double[] lats = geoQuery.getPolygons()[0].getPolyLats(); - double[] lons = geoQuery.getPolygons()[0].getPolyLons(); - assertThat(lats.length, equalTo(queryBuilderPoints.size())); - assertThat(lons.length, equalTo(queryBuilderPoints.size())); - for (int i=0; i < queryBuilderPoints.size(); ++i) { - final GeoPoint queryBuilderPoint = queryBuilderPoints.get(i); - final GeoPoint pointCopy = new GeoPoint(queryBuilderPoint); - GeoUtils.normalizePoint(pointCopy); - assertThat(lats[i], closeTo(pointCopy.getLat(), 1E-5D)); - assertThat(lons[i], closeTo(pointCopy.getLon(), 1E-5D)); - } - } - /** * Overridden here to ensure the test is only run if at least one type is * present in the mappings. Geo queries do not execute if the field is not @@ -124,9 +71,7 @@ private void assertGeoPointQuery(GeoPolygonQueryBuilder queryBuilder, Query quer @Override public void testToQuery() throws IOException { assumeTrue("test runs only when at least a type is registered", getCurrentTypes().length > 0); - if (createShardContext().indexVersionCreated().before(LatLonPointFieldMapper.LAT_LON_FIELD_VERSION)) { - super.testToQuery(); - } + super.testToQuery(); } private static List randomPolygon() { @@ -287,38 +232,9 @@ public void testParsingAndToQuery4() throws IOException { private void assertGeoPolygonQuery(String query) throws IOException { QueryShardContext context = createShardContext(); - Version version = context.indexVersionCreated(); Query parsedQuery = parseQuery(query).toQuery(context); - if (version.before(Version.V_2_2_0)) { - GeoPolygonQuery filter = (GeoPolygonQuery) parsedQuery; - assertThat(filter.fieldName(), equalTo(GEO_POINT_FIELD_NAME)); - assertThat(filter.points().length, equalTo(4)); - assertThat(filter.points()[0].lat(), closeTo(40, 0.00001)); - assertThat(filter.points()[0].lon(), closeTo(-70, 0.00001)); - assertThat(filter.points()[1].lat(), closeTo(30, 0.00001)); - assertThat(filter.points()[1].lon(), closeTo(-80, 0.00001)); - assertThat(filter.points()[2].lat(), closeTo(20, 0.00001)); - assertThat(filter.points()[2].lon(), closeTo(-90, 0.00001)); - } else if (version.before(LatLonPointFieldMapper.LAT_LON_FIELD_VERSION)) { - GeoPointInPolygonQuery q = (GeoPointInPolygonQuery) parsedQuery; - assertThat(q.getField(), equalTo(GEO_POINT_FIELD_NAME)); - assertEquals(1, q.getPolygons().length); - final double[] lats = q.getPolygons()[0].getPolyLats(); - final double[] lons = q.getPolygons()[0].getPolyLons(); - assertThat(lats.length, equalTo(4)); - assertThat(lons.length, equalTo(4)); - assertThat(lats[0], closeTo(40, 1E-5)); - assertThat(lons[0], closeTo(-70, 1E-5)); - assertThat(lats[1], closeTo(30, 1E-5)); - assertThat(lons[1], closeTo(-80, 1E-5)); - assertThat(lats[2], closeTo(20, 1E-5)); - assertThat(lons[2], closeTo(-90, 1E-5)); - assertThat(lats[3], equalTo(lats[0])); - assertThat(lons[3], equalTo(lons[0])); - } else { - // todo LatLonPointInPolygon is package private, need a closeTo check on the query - // since some points can be computed from the geohash - } + // todo LatLonPointInPolygon is package private, need a closeTo check on the query + // since some points can be computed from the geohash } public void testFromJson() throws IOException { diff --git a/core/src/test/java/org/elasticsearch/index/query/GeohashCellQueryBuilderTests.java b/core/src/test/java/org/elasticsearch/index/query/GeohashCellQueryBuilderTests.java deleted file mode 100644 index 73eb4e91bcd4c..0000000000000 --- a/core/src/test/java/org/elasticsearch/index/query/GeohashCellQueryBuilderTests.java +++ /dev/null @@ -1,168 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.index.query; - -import org.apache.lucene.index.Term; -import org.apache.lucene.queries.TermsQuery; -import org.apache.lucene.search.MatchNoDocsQuery; -import org.apache.lucene.search.Query; -import org.apache.lucene.search.TermQuery; -import org.elasticsearch.Version; -import org.elasticsearch.common.geo.GeoPoint; -import org.elasticsearch.common.unit.DistanceUnit; -import org.elasticsearch.index.mapper.BaseGeoPointFieldMapper; -import org.elasticsearch.index.mapper.GeoPointFieldMapper; -import org.elasticsearch.index.mapper.LatLonPointFieldMapper; -import org.elasticsearch.index.query.GeohashCellQuery.Builder; -import org.elasticsearch.search.internal.SearchContext; -import org.elasticsearch.test.AbstractQueryTestCase; -import org.elasticsearch.test.geo.RandomShapeGenerator; -import org.locationtech.spatial4j.shape.Point; - -import java.io.IOException; - -import static org.hamcrest.CoreMatchers.containsString; -import static org.hamcrest.CoreMatchers.instanceOf; -import static org.hamcrest.CoreMatchers.notNullValue; -import static org.hamcrest.Matchers.equalTo; - -public class GeohashCellQueryBuilderTests extends AbstractQueryTestCase { - - @Override - protected Builder doCreateTestQueryBuilder() { - GeohashCellQuery.Builder builder = new Builder(GEO_POINT_FIELD_NAME, randomGeohash(1, 12)); - if (randomBoolean()) { - builder.neighbors(randomBoolean()); - } - if (randomBoolean()) { - if (randomBoolean()) { - builder.precision(randomIntBetween(1, 12)); - } else { - builder.precision(randomIntBetween(1, 1000000) + randomFrom(DistanceUnit.values()).toString()); - } - } - if (randomBoolean()) { - builder.ignoreUnmapped(randomBoolean()); - } - return builder; - } - - @Override - protected void doAssertLuceneQuery(Builder queryBuilder, Query query, SearchContext context) throws IOException { - if (queryBuilder.neighbors()) { - assertThat(query, instanceOf(TermsQuery.class)); - } else { - assertThat(query, instanceOf(TermQuery.class)); - TermQuery termQuery = (TermQuery) query; - Term term = termQuery.getTerm(); - assertThat(term.field(), equalTo(queryBuilder.fieldName() + "." + GeoPointFieldMapper.Names.GEOHASH)); - String geohash = queryBuilder.geohash(); - if (queryBuilder.precision() != null) { - int len = Math.min(queryBuilder.precision(), geohash.length()); - geohash = geohash.substring(0, len); - } - assertThat(term.text(), equalTo(geohash)); - } - } - - /** - * Overridden here to ensure the test is only run if at least one type is - * present in the mappings. Geo queries do not execute if the field is not - * explicitly mapped - */ - @Override - public void testToQuery() throws IOException { - assumeTrue("test runs only when at least a type is registered", getCurrentTypes().length > 0); - Version version = createShardContext().indexVersionCreated(); - if (version.before(LatLonPointFieldMapper.LAT_LON_FIELD_VERSION)) { - super.testToQuery(); - } - } - - public void testNullField() { - IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> new Builder(null, new GeoPoint())); - assertEquals("fieldName must not be null", e.getMessage()); - e = expectThrows(IllegalArgumentException.class, () -> new Builder("", new GeoPoint())); - assertEquals("fieldName must not be null", e.getMessage()); - } - - public void testNullGeoPoint() { - IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> new Builder(GEO_POINT_FIELD_NAME, (GeoPoint) null)); - assertEquals("geohash or point must be defined", e.getMessage()); - e = expectThrows(IllegalArgumentException.class, () -> new Builder(GEO_POINT_FIELD_NAME, "")); - assertEquals("geohash or point must be defined", e.getMessage()); - } - - public void testInvalidPrecision() { - GeohashCellQuery.Builder builder = new Builder(GEO_POINT_FIELD_NAME, new GeoPoint()); - IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> builder.precision(-1)); - assertThat(e.getMessage(), containsString("precision must be greater than 0")); - } - - public void testLocationParsing() throws IOException { - Point point = RandomShapeGenerator.xRandomPoint(random()); - Builder pointTestBuilder = new GeohashCellQuery.Builder("pin", new GeoPoint(point.getY(), point.getX())); - String pointTest1 = "{\"geohash_cell\": {\"pin\": {\"lat\": " + point.getY() + ",\"lon\": " + point.getX() + "}}}"; - assertParsedQuery(pointTest1, pointTestBuilder); - String pointTest2 = "{\"geohash_cell\": {\"pin\": \"" + point.getY() + "," + point.getX() + "\"}}"; - assertParsedQuery(pointTest2, pointTestBuilder); - String pointTest3 = "{\"geohash_cell\": {\"pin\": [" + point.getX() + "," + point.getY() + "]}}"; - assertParsedQuery(pointTest3, pointTestBuilder); - } - - public void testFromJson() throws IOException { - String json = - "{\n" + - " \"geohash_cell\" : {\n" + - " \"neighbors\" : true,\n" + - " \"precision\" : 3,\n" + - " \"pin\" : \"t4mk70fgk067\",\n" + - " \"ignore_unmapped\" : false,\n" + - " \"boost\" : 1.0\n" + - " }\n" + - "}"; - GeohashCellQuery.Builder parsed = (GeohashCellQuery.Builder) parseQuery(json); - checkGeneratedJson(json, parsed); - assertEquals(json, 3, parsed.precision().intValue()); - } - - @Override - public void testMustRewrite() throws IOException { - assumeTrue("test runs only when at least a type is registered", getCurrentTypes().length > 0); - Version version = createShardContext().indexVersionCreated(); - if (version.before(LatLonPointFieldMapper.LAT_LON_FIELD_VERSION)) { - super.testMustRewrite(); - } - } - - public void testIgnoreUnmapped() throws IOException { - final GeohashCellQuery.Builder queryBuilder = new GeohashCellQuery.Builder("unmapped", "c"); - queryBuilder.ignoreUnmapped(true); - Query query = queryBuilder.toQuery(createShardContext()); - assertThat(query, notNullValue()); - assertThat(query, instanceOf(MatchNoDocsQuery.class)); - - final GeohashCellQuery.Builder failingQueryBuilder = new GeohashCellQuery.Builder("unmapped", "c"); - failingQueryBuilder.ignoreUnmapped(false); - QueryShardException e = expectThrows(QueryShardException.class, () -> failingQueryBuilder.toQuery(createShardContext())); - assertThat(e.getMessage(), containsString("failed to parse [" + GeohashCellQuery.NAME + "] query. missing [" - + BaseGeoPointFieldMapper.CONTENT_TYPE + "] field [unmapped]")); - } -} diff --git a/core/src/test/java/org/elasticsearch/index/query/NestedQueryBuilderTests.java b/core/src/test/java/org/elasticsearch/index/query/NestedQueryBuilderTests.java index 05d461e12788a..988f6f5c4ba28 100644 --- a/core/src/test/java/org/elasticsearch/index/query/NestedQueryBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/index/query/NestedQueryBuilderTests.java @@ -25,10 +25,8 @@ import org.apache.lucene.search.Query; import org.apache.lucene.search.join.ScoreMode; import org.apache.lucene.search.join.ToParentBlockJoinQuery; -import org.elasticsearch.Version; import org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequest; import org.elasticsearch.common.compress.CompressedXContent; -import org.elasticsearch.index.mapper.LatLonPointFieldMapper; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.search.fetch.subphase.InnerHitsContext; import org.elasticsearch.search.internal.SearchContext; @@ -51,9 +49,6 @@ public class NestedQueryBuilderTests extends AbstractQueryTestCase points = new ArrayList(); points.add(new GeoPoint(40, -70)); @@ -193,13 +181,6 @@ public void testGeoShape() throws IOException { .indexedShapePath("location"); } - public void testGeoHashCell() { - geoHashCellQuery("pin.location", - new GeoPoint(13.4080, 52.5186)) - .neighbors(true) - .precision(3); - } - public void testHasChild() { hasChildQuery( "blog_tag", diff --git a/core/src/test/java/org/elasticsearch/index/query/RangeQueryBuilderTests.java b/core/src/test/java/org/elasticsearch/index/query/RangeQueryBuilderTests.java index a378fc62fd3c4..09627d00d76d9 100644 --- a/core/src/test/java/org/elasticsearch/index/query/RangeQueryBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/index/query/RangeQueryBuilderTests.java @@ -22,7 +22,6 @@ import com.carrotsearch.randomizedtesting.generators.RandomPicks; import org.apache.lucene.document.IntPoint; import org.apache.lucene.document.LongPoint; -import org.apache.lucene.search.LegacyNumericRangeQuery; import org.apache.lucene.search.PointRangeQuery; import org.apache.lucene.search.Query; import org.apache.lucene.search.TermRangeQuery; @@ -32,7 +31,6 @@ import org.elasticsearch.common.geo.ShapeRelation; import org.elasticsearch.common.lucene.BytesRefs; import org.elasticsearch.index.mapper.DateFieldMapper; -import org.elasticsearch.index.mapper.LegacyDateFieldMapper; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.MappedFieldType.Relation; import org.elasticsearch.index.mapper.MapperService; @@ -49,11 +47,8 @@ import static org.elasticsearch.index.query.QueryBuilders.rangeQuery; import static org.hamcrest.Matchers.containsString; -import static org.hamcrest.Matchers.either; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.instanceOf; -import static org.hamcrest.Matchers.is; -import static org.hamcrest.Matchers.lessThanOrEqualTo; import static org.hamcrest.Matchers.sameInstance; public class RangeQueryBuilderTests extends AbstractQueryTestCase { @@ -140,24 +135,13 @@ protected void doAssertLuceneQuery(RangeQueryBuilder queryBuilder, Query query, assertThat(termRangeQuery.includesLower(), equalTo(queryBuilder.includeLower())); assertThat(termRangeQuery.includesUpper(), equalTo(queryBuilder.includeUpper())); } else if (queryBuilder.fieldName().equals(DATE_FIELD_NAME)) { - assertThat(query, either(instanceOf(LegacyNumericRangeQuery.class)).or(instanceOf(PointRangeQuery.class))); + assertThat(query, instanceOf(PointRangeQuery.class)); MapperService mapperService = context.getQueryShardContext().getMapperService(); MappedFieldType mappedFieldType = mapperService.fullName(DATE_FIELD_NAME); final Long fromInMillis; final Long toInMillis; // we have to normalize the incoming value into milliseconds since it could be literally anything - if (mappedFieldType instanceof LegacyDateFieldMapper.DateFieldType) { - fromInMillis = queryBuilder.from() == null ? null : - ((LegacyDateFieldMapper.DateFieldType) mappedFieldType).parseToMilliseconds(queryBuilder.from(), - queryBuilder.includeLower(), - queryBuilder.getDateTimeZone(), - queryBuilder.getForceDateParser(), context.getQueryShardContext()); - toInMillis = queryBuilder.to() == null ? null : - ((LegacyDateFieldMapper.DateFieldType) mappedFieldType).parseToMilliseconds(queryBuilder.to(), - queryBuilder.includeUpper(), - queryBuilder.getDateTimeZone(), - queryBuilder.getForceDateParser(), context.getQueryShardContext()); - } else if (mappedFieldType instanceof DateFieldMapper.DateFieldType) { + if (mappedFieldType instanceof DateFieldMapper.DateFieldType) { fromInMillis = queryBuilder.from() == null ? null : ((DateFieldMapper.DateFieldType) mappedFieldType).parseToMilliseconds(queryBuilder.from(), queryBuilder.includeLower(), @@ -173,65 +157,46 @@ protected void doAssertLuceneQuery(RangeQueryBuilder queryBuilder, Query query, fail("unexpected mapped field type: [" + mappedFieldType.getClass() + "] " + mappedFieldType.toString()); } - if (query instanceof LegacyNumericRangeQuery) { - LegacyNumericRangeQuery numericRangeQuery = (LegacyNumericRangeQuery) query; - assertThat(numericRangeQuery.getField(), equalTo(queryBuilder.fieldName())); - assertThat(numericRangeQuery.getMin(), equalTo(fromInMillis)); - assertThat(numericRangeQuery.getMax(), equalTo(toInMillis)); - assertThat(numericRangeQuery.includesMin(), equalTo(queryBuilder.includeLower())); - assertThat(numericRangeQuery.includesMax(), equalTo(queryBuilder.includeUpper())); + Long min = fromInMillis; + Long max = toInMillis; + long minLong, maxLong; + if (min == null) { + minLong = Long.MIN_VALUE; } else { - Long min = fromInMillis; - Long max = toInMillis; - long minLong, maxLong; - if (min == null) { - minLong = Long.MIN_VALUE; - } else { - minLong = min.longValue(); - if (queryBuilder.includeLower() == false && minLong != Long.MAX_VALUE) { - minLong++; - } + minLong = min.longValue(); + if (queryBuilder.includeLower() == false && minLong != Long.MAX_VALUE) { + minLong++; } - if (max == null) { - maxLong = Long.MAX_VALUE; - } else { - maxLong = max.longValue(); - if (queryBuilder.includeUpper() == false && maxLong != Long.MIN_VALUE) { - maxLong--; - } + } + if (max == null) { + maxLong = Long.MAX_VALUE; + } else { + maxLong = max.longValue(); + if (queryBuilder.includeUpper() == false && maxLong != Long.MIN_VALUE) { + maxLong--; } - assertEquals(LongPoint.newRangeQuery(DATE_FIELD_NAME, minLong, maxLong), query); } + assertEquals(LongPoint.newRangeQuery(DATE_FIELD_NAME, minLong, maxLong), query); } else if (queryBuilder.fieldName().equals(INT_FIELD_NAME)) { - assertThat(query, either(instanceOf(LegacyNumericRangeQuery.class)).or(instanceOf(PointRangeQuery.class))); - if (query instanceof LegacyNumericRangeQuery) { - LegacyNumericRangeQuery numericRangeQuery = (LegacyNumericRangeQuery) query; - assertThat(numericRangeQuery.getField(), equalTo(queryBuilder.fieldName())); - assertThat(numericRangeQuery.getMin(), equalTo(queryBuilder.from())); - assertThat(numericRangeQuery.getMax(), equalTo(queryBuilder.to())); - assertThat(numericRangeQuery.includesMin(), equalTo(queryBuilder.includeLower())); - assertThat(numericRangeQuery.includesMax(), equalTo(queryBuilder.includeUpper())); + assertThat(query, instanceOf(PointRangeQuery.class)); + Integer min = (Integer) queryBuilder.from(); + Integer max = (Integer) queryBuilder.to(); + int minInt, maxInt; + if (min == null) { + minInt = Integer.MIN_VALUE; } else { - Integer min = (Integer) queryBuilder.from(); - Integer max = (Integer) queryBuilder.to(); - int minInt, maxInt; - if (min == null) { - minInt = Integer.MIN_VALUE; - } else { - minInt = min.intValue(); - if (queryBuilder.includeLower() == false && minInt != Integer.MAX_VALUE) { - minInt++; - } + minInt = min.intValue(); + if (queryBuilder.includeLower() == false && minInt != Integer.MAX_VALUE) { + minInt++; } - if (max == null) { - maxInt = Integer.MAX_VALUE; - } else { - maxInt = max.intValue(); - if (queryBuilder.includeUpper() == false && maxInt != Integer.MIN_VALUE) { - maxInt--; - } + } + if (max == null) { + maxInt = Integer.MAX_VALUE; + } else { + maxInt = max.intValue(); + if (queryBuilder.includeUpper() == false && maxInt != Integer.MIN_VALUE) { + maxInt--; } - assertEquals(IntPoint.newRangeQuery(INT_FIELD_NAME, minInt, maxInt), query); } } else if (queryBuilder.fieldName().equals(DATE_RANGE_FIELD_NAME) || queryBuilder.fieldName().equals(INT_RANGE_FIELD_NAME)) { @@ -276,17 +241,8 @@ public void testToQueryNumericField() throws IOException { assumeTrue("test runs only when at least a type is registered", getCurrentTypes().length > 0); Query parsedQuery = rangeQuery(INT_FIELD_NAME).from(23).to(54).includeLower(true).includeUpper(false).toQuery(createShardContext()); // since age is automatically registered in data, we encode it as numeric - assertThat(parsedQuery, either(instanceOf(LegacyNumericRangeQuery.class)).or(instanceOf(PointRangeQuery.class))); - if (parsedQuery instanceof LegacyNumericRangeQuery) { - LegacyNumericRangeQuery rangeQuery = (LegacyNumericRangeQuery) parsedQuery; - assertThat(rangeQuery.getField(), equalTo(INT_FIELD_NAME)); - assertThat(rangeQuery.getMin().intValue(), equalTo(23)); - assertThat(rangeQuery.getMax().intValue(), equalTo(54)); - assertThat(rangeQuery.includesMin(), equalTo(true)); - assertThat(rangeQuery.includesMax(), equalTo(false)); - } else { - assertEquals(IntPoint.newRangeQuery(INT_FIELD_NAME, 23, 53), parsedQuery); - } + assertThat(parsedQuery, instanceOf(PointRangeQuery.class)); + assertEquals(IntPoint.newRangeQuery(INT_FIELD_NAME, 23, 53), parsedQuery); } public void testDateRangeQueryFormat() throws IOException { @@ -302,22 +258,12 @@ public void testDateRangeQueryFormat() throws IOException { " }\n" + "}"; Query parsedQuery = parseQuery(query).toQuery(createShardContext()); - assertThat(parsedQuery, either(instanceOf(LegacyNumericRangeQuery.class)).or(instanceOf(PointRangeQuery.class))); + assertThat(parsedQuery, instanceOf(PointRangeQuery.class)); - if (parsedQuery instanceof LegacyNumericRangeQuery) { - // Min value was 01/01/2012 (dd/MM/yyyy) - DateTime min = DateTime.parse("2012-01-01T00:00:00.000+00"); - assertThat(((LegacyNumericRangeQuery) parsedQuery).getMin().longValue(), is(min.getMillis())); - - // Max value was 2030 (yyyy) - DateTime max = DateTime.parse("2030-01-01T00:00:00.000+00"); - assertThat(((LegacyNumericRangeQuery) parsedQuery).getMax().longValue(), is(max.getMillis())); - } else { - assertEquals(LongPoint.newRangeQuery(DATE_FIELD_NAME, - DateTime.parse("2012-01-01T00:00:00.000+00").getMillis(), - DateTime.parse("2030-01-01T00:00:00.000+00").getMillis() - 1), - parsedQuery); - } + assertEquals(LongPoint.newRangeQuery(DATE_FIELD_NAME, + DateTime.parse("2012-01-01T00:00:00.000+00").getMillis(), + DateTime.parse("2030-01-01T00:00:00.000+00").getMillis() - 1), + parsedQuery); // Test Invalid format final String invalidQuery = "{\n" + @@ -343,23 +289,11 @@ public void testDateRangeBoundaries() throws IOException { " }\n" + "}\n"; Query parsedQuery = parseQuery(query).toQuery(createShardContext()); - assertThat(parsedQuery, either(instanceOf(LegacyNumericRangeQuery.class)).or(instanceOf(PointRangeQuery.class))); - if (parsedQuery instanceof LegacyNumericRangeQuery) { - LegacyNumericRangeQuery rangeQuery = (LegacyNumericRangeQuery) parsedQuery; - - DateTime min = DateTime.parse("2014-11-01T00:00:00.000+00"); - assertThat(rangeQuery.getMin().longValue(), is(min.getMillis())); - assertTrue(rangeQuery.includesMin()); - - DateTime max = DateTime.parse("2014-12-08T23:59:59.999+00"); - assertThat(rangeQuery.getMax().longValue(), is(max.getMillis())); - assertTrue(rangeQuery.includesMax()); - } else { - assertEquals(LongPoint.newRangeQuery(DATE_FIELD_NAME, - DateTime.parse("2014-11-01T00:00:00.000+00").getMillis(), - DateTime.parse("2014-12-08T23:59:59.999+00").getMillis()), - parsedQuery); - } + assertThat(parsedQuery, instanceOf(PointRangeQuery.class)); + assertEquals(LongPoint.newRangeQuery(DATE_FIELD_NAME, + DateTime.parse("2014-11-01T00:00:00.000+00").getMillis(), + DateTime.parse("2014-12-08T23:59:59.999+00").getMillis()), + parsedQuery); query = "{\n" + " \"range\" : {\n" + @@ -370,28 +304,15 @@ public void testDateRangeBoundaries() throws IOException { " }\n" + "}"; parsedQuery = parseQuery(query).toQuery(createShardContext()); - assertThat(parsedQuery, either(instanceOf(LegacyNumericRangeQuery.class)).or(instanceOf(PointRangeQuery.class))); - if (parsedQuery instanceof LegacyNumericRangeQuery) { - LegacyNumericRangeQuery rangeQuery = (LegacyNumericRangeQuery) parsedQuery; - - DateTime min = DateTime.parse("2014-11-30T23:59:59.999+00"); - assertThat(rangeQuery.getMin().longValue(), is(min.getMillis())); - assertFalse(rangeQuery.includesMin()); - - DateTime max = DateTime.parse("2014-12-08T00:00:00.000+00"); - assertThat(rangeQuery.getMax().longValue(), is(max.getMillis())); - assertFalse(rangeQuery.includesMax()); - } else { - assertEquals(LongPoint.newRangeQuery(DATE_FIELD_NAME, - DateTime.parse("2014-11-30T23:59:59.999+00").getMillis() + 1, - DateTime.parse("2014-12-08T00:00:00.000+00").getMillis() - 1), - parsedQuery); - } + assertThat(parsedQuery, instanceOf(PointRangeQuery.class)); + assertEquals(LongPoint.newRangeQuery(DATE_FIELD_NAME, + DateTime.parse("2014-11-30T23:59:59.999+00").getMillis() + 1, + DateTime.parse("2014-12-08T00:00:00.000+00").getMillis() - 1), + parsedQuery); } public void testDateRangeQueryTimezone() throws IOException { assumeTrue("test runs only when at least a type is registered", getCurrentTypes().length > 0); - long startDate = System.currentTimeMillis(); String query = "{\n" + " \"range\" : {\n" + " \"" + DATE_FIELD_NAME + "\" : {\n" + @@ -403,21 +324,8 @@ public void testDateRangeQueryTimezone() throws IOException { "}"; QueryShardContext context = createShardContext(); Query parsedQuery = parseQuery(query).toQuery(context); - if (parsedQuery instanceof PointRangeQuery) { - // TODO what can we assert - } else { - assertThat(parsedQuery, instanceOf(LegacyNumericRangeQuery.class)); - - // Min value was 2012-01-01 (UTC) so we need to remove one hour - DateTime min = DateTime.parse("2012-01-01T00:00:00.000+01:00"); - // Max value is the nowInMillis set by the query shard context - long max = context.nowInMillis(); - - assertThat(((LegacyNumericRangeQuery) parsedQuery).getMin().longValue(), is(min.getMillis())); - - // We should not have a big difference here (should be some ms) - assertThat(((LegacyNumericRangeQuery) parsedQuery).getMax().longValue() - max, lessThanOrEqualTo(60000L)); - } + assertThat(parsedQuery, instanceOf(PointRangeQuery.class)); + // TODO what else can we assert query = "{\n" + " \"range\" : {\n" + diff --git a/core/src/test/java/org/elasticsearch/index/shard/IndexShardIT.java b/core/src/test/java/org/elasticsearch/index/shard/IndexShardIT.java index e75261326bb1c..d32ae8e03eda4 100644 --- a/core/src/test/java/org/elasticsearch/index/shard/IndexShardIT.java +++ b/core/src/test/java/org/elasticsearch/index/shard/IndexShardIT.java @@ -56,7 +56,6 @@ import org.elasticsearch.index.mapper.ParseContext; import org.elasticsearch.index.mapper.ParsedDocument; import org.elasticsearch.index.mapper.UidFieldMapper; -import org.elasticsearch.index.mapper.internal.SeqNoFieldMapper; import org.elasticsearch.index.seqno.SequenceNumbersService; import org.elasticsearch.index.translog.Translog; import org.elasticsearch.indices.IndicesService; @@ -100,14 +99,14 @@ protected Collection> getPlugins() { return pluginList(InternalSettingsPlugin.class); } - private ParsedDocument testParsedDocument(String uid, String id, String type, String routing, long seqNo, long timestamp, long ttl, + private ParsedDocument testParsedDocument(String uid, String id, String type, String routing, long seqNo, ParseContext.Document document, BytesReference source, Mapping mappingUpdate) { Field uidField = new Field("_uid", uid, UidFieldMapper.Defaults.FIELD_TYPE); Field seqNoField = new NumericDocValuesField("_seq_no", seqNo); Field versionField = new NumericDocValuesField("_version", 0); document.add(uidField); document.add(versionField); - return new ParsedDocument(versionField, seqNoField, id, type, routing, timestamp, ttl, Collections.singletonList(document), source, + return new ParsedDocument(versionField, seqNoField, id, type, routing, Collections.singletonList(document), source, mappingUpdate); } @@ -318,7 +317,7 @@ public void testMaybeFlush() throws Exception { assertFalse(shard.shouldFlush()); client().admin().indices().prepareUpdateSettings("test").setSettings(Settings.builder() .put(IndexSettings.INDEX_TRANSLOG_FLUSH_THRESHOLD_SIZE_SETTING.getKey(), - new ByteSizeValue(133 /* size of the operation + header&footer*/, ByteSizeUnit.BYTES)).build()).get(); + new ByteSizeValue(117 /* size of the operation + header&footer*/, ByteSizeUnit.BYTES)).build()).get(); client().prepareIndex("test", "test", "0").setSource("{}").setRefreshPolicy(randomBoolean() ? IMMEDIATE : NONE).get(); assertFalse(shard.shouldFlush()); ParsedDocument doc = testParsedDocument( @@ -327,8 +326,6 @@ public void testMaybeFlush() throws Exception { "test", null, SequenceNumbersService.UNASSIGNED_SEQ_NO, - -1, - -1, new ParseContext.Document(), new BytesArray(new byte[]{1}), null); Engine.Index index = new Engine.Index(new Term("_uid", "1"), doc); @@ -367,7 +364,7 @@ public void testStressMaybeFlush() throws Exception { assertFalse(shard.shouldFlush()); client().admin().indices().prepareUpdateSettings("test").setSettings(Settings.builder().put( IndexSettings.INDEX_TRANSLOG_FLUSH_THRESHOLD_SIZE_SETTING.getKey(), - new ByteSizeValue(133/* size of the operation + header&footer*/, ByteSizeUnit.BYTES)).build()).get(); + new ByteSizeValue(117/* size of the operation + header&footer*/, ByteSizeUnit.BYTES)).build()).get(); client().prepareIndex("test", "test", "0").setSource("{}").setRefreshPolicy(randomBoolean() ? IMMEDIATE : NONE).get(); assertFalse(shard.shouldFlush()); final AtomicBoolean running = new AtomicBoolean(true); diff --git a/core/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java b/core/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java index 52861f85def68..4ab702c33de5b 100644 --- a/core/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java +++ b/core/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java @@ -62,7 +62,6 @@ import org.elasticsearch.common.util.concurrent.ConcurrentCollections; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.env.NodeEnvironment; -import org.elasticsearch.index.IndexService; import org.elasticsearch.index.engine.Engine; import org.elasticsearch.index.engine.EngineException; import org.elasticsearch.index.fielddata.FieldDataStats; @@ -539,14 +538,14 @@ public void testShardStats() throws IOException { closeShards(shard); } - private ParsedDocument testParsedDocument(String uid, String id, String type, String routing, long timestamp, long ttl, + private ParsedDocument testParsedDocument(String uid, String id, String type, String routing, ParseContext.Document document, BytesReference source, Mapping mappingUpdate) { Field uidField = new Field("_uid", uid, UidFieldMapper.Defaults.FIELD_TYPE); Field versionField = new NumericDocValuesField("_version", 0); Field seqNoField = new NumericDocValuesField("_seq_no", 0); document.add(uidField); document.add(versionField); - return new ParsedDocument(versionField, seqNoField, id, type, routing, timestamp, ttl, Arrays.asList(document), source, mappingUpdate); + return new ParsedDocument(versionField, seqNoField, id, type, routing, Arrays.asList(document), source, mappingUpdate); } public void testIndexingOperationsListeners() throws IOException { @@ -608,7 +607,7 @@ public void postDelete(Engine.Delete delete, Exception ex) { }); recoveryShardFromStore(shard); - ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, new ParseContext.Document(), + ParsedDocument doc = testParsedDocument("1", "1", "test", null, new ParseContext.Document(), new BytesArray(new byte[]{1}), null); Engine.Index index = new Engine.Index(new Term("_uid", "1"), doc); shard.index(index); diff --git a/core/src/test/java/org/elasticsearch/index/shard/RefreshListenersTests.java b/core/src/test/java/org/elasticsearch/index/shard/RefreshListenersTests.java index 6f537574d2e6c..0598646c1639c 100644 --- a/core/src/test/java/org/elasticsearch/index/shard/RefreshListenersTests.java +++ b/core/src/test/java/org/elasticsearch/index/shard/RefreshListenersTests.java @@ -277,7 +277,7 @@ private Engine.IndexResult index(String id, String testFieldValue) { document.add(uidField); document.add(versionField); BytesReference source = new BytesArray(new byte[] { 1 }); - ParsedDocument doc = new ParsedDocument(versionField, seqNoField, id, type, null, -1, -1, Arrays.asList(document), source, null); + ParsedDocument doc = new ParsedDocument(versionField, seqNoField, id, type, null, Arrays.asList(document), source, null); Engine.Index index = new Engine.Index(new Term("_uid", uid), doc); return engine.index(index); } diff --git a/core/src/test/java/org/elasticsearch/index/translog/TranslogTests.java b/core/src/test/java/org/elasticsearch/index/translog/TranslogTests.java index b4a5c2970ff42..0bd1c9c614015 100644 --- a/core/src/test/java/org/elasticsearch/index/translog/TranslogTests.java +++ b/core/src/test/java/org/elasticsearch/index/translog/TranslogTests.java @@ -86,7 +86,6 @@ import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicLong; import java.util.concurrent.atomic.AtomicReference; -import java.util.function.LongSupplier; import java.util.stream.Collectors; import static org.hamcrest.Matchers.equalTo; @@ -302,7 +301,7 @@ public void testStats() throws IOException { assertThat(stats.estimatedNumberOfOperations(), equalTo(0L)); assertThat(stats.getTranslogSizeInBytes(), equalTo(firstOperationPosition)); assertEquals(6, total.estimatedNumberOfOperations()); - assertEquals(461, total.getTranslogSizeInBytes()); + assertEquals(413, total.getTranslogSizeInBytes()); BytesStreamOutput out = new BytesStreamOutput(); total.writeTo(out); @@ -310,13 +309,13 @@ public void testStats() throws IOException { copy.readFrom(out.bytes().streamInput()); assertEquals(6, copy.estimatedNumberOfOperations()); - assertEquals(461, copy.getTranslogSizeInBytes()); + assertEquals(413, copy.getTranslogSizeInBytes()); try (XContentBuilder builder = XContentFactory.jsonBuilder()) { builder.startObject(); copy.toXContent(builder, ToXContent.EMPTY_PARAMS); builder.endObject(); - assertEquals("{\"translog\":{\"operations\":6,\"size_in_bytes\":461}}", builder.string()); + assertEquals("{\"translog\":{\"operations\":6,\"size_in_bytes\":413}}", builder.string()); } try { @@ -1138,7 +1137,7 @@ public void testRecoveryUncommittedCorruptedCheckpoint() throws IOException { try (Translog ignored = new Translog(config, translogGeneration, () -> SequenceNumbersService.UNASSIGNED_SEQ_NO)) { fail("corrupted"); } catch (IllegalStateException ex) { - assertEquals(ex.getMessage(), "Checkpoint file translog-2.ckp already exists but has corrupted content expected: Checkpoint{offset=3178, numOps=55, translogFileGeneration=2, globalCheckpoint=-2} but got: Checkpoint{offset=0, numOps=0, translogFileGeneration=0, globalCheckpoint=-2}"); + assertEquals(ex.getMessage(), "Checkpoint file translog-2.ckp already exists but has corrupted content expected: Checkpoint{offset=2298, numOps=55, translogFileGeneration=2, globalCheckpoint=-2} but got: Checkpoint{offset=0, numOps=0, translogFileGeneration=0, globalCheckpoint=-2}"); } Checkpoint.write(FileChannel::open, config.getTranslogPath().resolve(Translog.getCommitCheckpointFileName(read.generation)), read, StandardOpenOption.WRITE, StandardOpenOption.TRUNCATE_EXISTING); try (Translog translog = new Translog(config, translogGeneration, () -> SequenceNumbersService.UNASSIGNED_SEQ_NO)) { diff --git a/core/src/test/java/org/elasticsearch/indices/IndicesServiceTests.java b/core/src/test/java/org/elasticsearch/indices/IndicesServiceTests.java index 9750cd35d01b5..b94e864fdd6fc 100644 --- a/core/src/test/java/org/elasticsearch/indices/IndicesServiceTests.java +++ b/core/src/test/java/org/elasticsearch/indices/IndicesServiceTests.java @@ -38,9 +38,9 @@ import org.elasticsearch.index.IndexModule; import org.elasticsearch.index.IndexService; import org.elasticsearch.index.IndexSettings; +import org.elasticsearch.index.mapper.KeywordFieldMapper; import org.elasticsearch.index.mapper.Mapper; import org.elasticsearch.index.mapper.MapperService; -import org.elasticsearch.index.mapper.StringFieldMapper; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.index.shard.ShardPath; import org.elasticsearch.index.similarity.BM25SimilarityProvider; @@ -90,7 +90,7 @@ public TestPlugin() {} @Override public Map getMappers() { - return Collections.singletonMap("fake-mapper", new StringFieldMapper.TypeParser()); + return Collections.singletonMap("fake-mapper", new KeywordFieldMapper.TypeParser()); } @Override diff --git a/core/src/test/java/org/elasticsearch/indices/template/SimpleIndexTemplateIT.java b/core/src/test/java/org/elasticsearch/indices/template/SimpleIndexTemplateIT.java index 612e1d1e16bf0..5777745397603 100644 --- a/core/src/test/java/org/elasticsearch/indices/template/SimpleIndexTemplateIT.java +++ b/core/src/test/java/org/elasticsearch/indices/template/SimpleIndexTemplateIT.java @@ -695,7 +695,7 @@ public void testCombineTemplates() throws Exception{ .setCreate(true) .setOrder(1) .addMapping("type1", XContentFactory.jsonBuilder().startObject().startObject("type1").startObject("properties") - .startObject("field2").field("type", "string").field("analyzer", "custom_1").endObject() + .startObject("field2").field("type", "text").field("analyzer", "custom_1").endObject() .endObject().endObject().endObject()) .get()); assertThat(e.getMessage(), containsString("analyzer [custom_1] not found for field [field2]")); diff --git a/core/src/test/java/org/elasticsearch/ingest/IngestClientIT.java b/core/src/test/java/org/elasticsearch/ingest/IngestClientIT.java index 2c3730dc4222c..14335bde076de 100644 --- a/core/src/test/java/org/elasticsearch/ingest/IngestClientIT.java +++ b/core/src/test/java/org/elasticsearch/ingest/IngestClientIT.java @@ -120,7 +120,7 @@ public void testSimulate() throws Exception { source.put("foo", "bar"); source.put("fail", false); source.put("processed", true); - IngestDocument ingestDocument = new IngestDocument("index", "type", "id", null, null, null, null, source); + IngestDocument ingestDocument = new IngestDocument("index", "type", "id", null, null, source); assertThat(simulateDocumentBaseResult.getIngestDocument().getSourceAndMetadata(), equalTo(ingestDocument.getSourceAndMetadata())); assertThat(simulateDocumentBaseResult.getFailure(), nullValue()); } diff --git a/core/src/test/java/org/elasticsearch/ingest/IngestDocumentTests.java b/core/src/test/java/org/elasticsearch/ingest/IngestDocumentTests.java index 3906f82dc03f4..e16be95d2e663 100644 --- a/core/src/test/java/org/elasticsearch/ingest/IngestDocumentTests.java +++ b/core/src/test/java/org/elasticsearch/ingest/IngestDocumentTests.java @@ -75,7 +75,7 @@ public void setIngestDocument() { list.add(null); document.put("list", list); - ingestDocument = new IngestDocument("index", "type", "id", null, null, null, null, document); + ingestDocument = new IngestDocument("index", "type", "id", null, null, document); } public void testSimpleGetFieldValue() { diff --git a/core/src/test/java/org/elasticsearch/ingest/PipelineExecutionServiceTests.java b/core/src/test/java/org/elasticsearch/ingest/PipelineExecutionServiceTests.java index 947cb3f18d1c4..c00e9254ab289 100644 --- a/core/src/test/java/org/elasticsearch/ingest/PipelineExecutionServiceTests.java +++ b/core/src/test/java/org/elasticsearch/ingest/PipelineExecutionServiceTests.java @@ -20,14 +20,12 @@ package org.elasticsearch.ingest; import org.elasticsearch.ElasticsearchException; -import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.action.DocWriteRequest; import org.elasticsearch.action.bulk.BulkRequest; import org.elasticsearch.action.delete.DeleteRequest; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.update.UpdateRequest; import org.elasticsearch.common.bytes.BytesArray; -import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.threadpool.ThreadPool; @@ -160,12 +158,7 @@ public void testExecutePropagateAllMetaDataUpdates() throws Exception { doAnswer((InvocationOnMock invocationOnMock) -> { IngestDocument ingestDocument = (IngestDocument) invocationOnMock.getArguments()[0]; for (IngestDocument.MetaData metaData : IngestDocument.MetaData.values()) { - if (metaData == IngestDocument.MetaData.TTL) { - ingestDocument.setFieldValue(IngestDocument.MetaData.TTL.getFieldName(), "35d"); - } else { - ingestDocument.setFieldValue(metaData.getFieldName(), "update" + metaData.getFieldName()); - } - + ingestDocument.setFieldValue(metaData.getFieldName(), "update" + metaData.getFieldName()); } return null; }).when(processor).execute(any()); @@ -186,8 +179,6 @@ public void testExecutePropagateAllMetaDataUpdates() throws Exception { assertThat(indexRequest.id(), equalTo("update_id")); assertThat(indexRequest.routing(), equalTo("update_routing")); assertThat(indexRequest.parent(), equalTo("update_parent")); - assertThat(indexRequest.timestamp(), equalTo("update_timestamp")); - assertThat(indexRequest.ttl(), equalTo(new TimeValue(3024000000L))); } public void testExecuteFailure() throws Exception { @@ -266,53 +257,6 @@ public void testExecuteFailureWithNestedOnFailure() throws Exception { verify(completionHandler, never()).accept(anyBoolean()); } - public void testExecuteSetTTL() throws Exception { - Processor processor = new TestProcessor(ingestDocument -> ingestDocument.setFieldValue("_ttl", "5d")); - when(store.get("_id")).thenReturn(new Pipeline("_id", "_description", version, new CompoundProcessor(processor))); - - IndexRequest indexRequest = new IndexRequest("_index", "_type", "_id").source(Collections.emptyMap()).setPipeline("_id"); - @SuppressWarnings("unchecked") - Consumer failureHandler = mock(Consumer.class); - @SuppressWarnings("unchecked") - Consumer completionHandler = mock(Consumer.class); - executionService.executeIndexRequest(indexRequest, failureHandler, completionHandler); - - assertThat(indexRequest.ttl(), equalTo(TimeValue.parseTimeValue("5d", null, "ttl"))); - verify(failureHandler, never()).accept(any()); - verify(completionHandler, times(1)).accept(true); - } - - public void testExecuteSetInvalidTTL() throws Exception { - Processor processor = new TestProcessor(ingestDocument -> ingestDocument.setFieldValue("_ttl", "abc")); - when(store.get("_id")).thenReturn(new Pipeline("_id", "_description", version, new CompoundProcessor(processor))); - - IndexRequest indexRequest = new IndexRequest("_index", "_type", "_id").source(Collections.emptyMap()).setPipeline("_id"); - @SuppressWarnings("unchecked") - Consumer failureHandler = mock(Consumer.class); - @SuppressWarnings("unchecked") - Consumer completionHandler = mock(Consumer.class); - executionService.executeIndexRequest(indexRequest, failureHandler, completionHandler); - verify(failureHandler, times(1)).accept(any(ElasticsearchParseException.class)); - verify(completionHandler, never()).accept(anyBoolean()); - } - - public void testExecuteProvidedTTL() throws Exception { - when(store.get("_id")).thenReturn(new Pipeline("_id", "_description", version, mock(CompoundProcessor.class))); - - IndexRequest indexRequest = new IndexRequest("_index", "_type", "_id").setPipeline("_id") - .source(Collections.emptyMap()) - .ttl(1000L); - @SuppressWarnings("unchecked") - Consumer failureHandler = mock(Consumer.class); - @SuppressWarnings("unchecked") - Consumer completionHandler = mock(Consumer.class); - executionService.executeIndexRequest(indexRequest, failureHandler, completionHandler); - - assertThat(indexRequest.ttl(), equalTo(new TimeValue(1000L))); - verify(failureHandler, never()).accept(any()); - verify(completionHandler, times(1)).accept(true); - } - public void testBulkRequestExecutionWithFailures() throws Exception { BulkRequest bulkRequest = new BulkRequest(); String pipelineId = "_id"; @@ -439,7 +383,7 @@ private class IngestDocumentMatcher extends ArgumentMatcher { private final IngestDocument ingestDocument; public IngestDocumentMatcher(String index, String type, String id, Map source) { - this.ingestDocument = new IngestDocument(index, type, id, null, null, null, null, source); + this.ingestDocument = new IngestDocument(index, type, id, null, null, source); } @Override diff --git a/core/src/test/java/org/elasticsearch/search/SearchModuleTests.java b/core/src/test/java/org/elasticsearch/search/SearchModuleTests.java index bdc2555209d98..84ded0a257755 100644 --- a/core/src/test/java/org/elasticsearch/search/SearchModuleTests.java +++ b/core/src/test/java/org/elasticsearch/search/SearchModuleTests.java @@ -259,10 +259,8 @@ public List getPipelineAggregations() { "fuzzy", "geo_bounding_box", "geo_distance", - "geo_distance_range", "geo_polygon", "geo_shape", - "geohash_cell", "has_child", "has_parent", "ids", diff --git a/core/src/test/java/org/elasticsearch/search/basic/TransportSearchFailuresIT.java b/core/src/test/java/org/elasticsearch/search/basic/TransportSearchFailuresIT.java index 9493ec048e70b..8e81a3a852ec5 100644 --- a/core/src/test/java/org/elasticsearch/search/basic/TransportSearchFailuresIT.java +++ b/core/src/test/java/org/elasticsearch/search/basic/TransportSearchFailuresIT.java @@ -29,7 +29,7 @@ import org.elasticsearch.cluster.health.ClusterHealthStatus; import org.elasticsearch.common.Priority; import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.index.query.GeohashCellQuery; +import org.elasticsearch.index.query.MatchQueryBuilder; import org.elasticsearch.search.builder.SearchSourceBuilder; import org.elasticsearch.test.ESIntegTestCase; @@ -52,7 +52,7 @@ protected int maximumNumberOfReplicas() { public void testFailedSearchWithWrongQuery() throws Exception { logger.info("Start Testing failed search with wrong query"); - assertAcked(prepareCreate("test", 1)); + assertAcked(prepareCreate("test", 1).addMapping("type", "foo", "type=geo_point")); NumShards test = getNumShards("test"); @@ -66,7 +66,7 @@ public void testFailedSearchWithWrongQuery() throws Exception { for (int i = 0; i < 5; i++) { try { SearchResponse searchResponse = client().search( - searchRequest("test").source(new SearchSourceBuilder().query(new GeohashCellQuery.Builder("foo", "biz")))) + searchRequest("test").source(new SearchSourceBuilder().query(new MatchQueryBuilder("foo", "biz")))) .actionGet(); assertThat(searchResponse.getTotalShards(), equalTo(test.numPrimaries)); assertThat(searchResponse.getSuccessfulShards(), equalTo(0)); @@ -101,7 +101,7 @@ public void testFailedSearchWithWrongQuery() throws Exception { for (int i = 0; i < 5; i++) { try { SearchResponse searchResponse = client().search( - searchRequest("test").source(new SearchSourceBuilder().query(new GeohashCellQuery.Builder("foo", "biz")))) + searchRequest("test").source(new SearchSourceBuilder().query(new MatchQueryBuilder("foo", "biz")))) .actionGet(); assertThat(searchResponse.getTotalShards(), equalTo(test.numPrimaries)); assertThat(searchResponse.getSuccessfulShards(), equalTo(0)); diff --git a/core/src/test/java/org/elasticsearch/search/basic/TransportTwoNodesSearchIT.java b/core/src/test/java/org/elasticsearch/search/basic/TransportTwoNodesSearchIT.java index 9ed6c60b008df..c201f03a7d80c 100644 --- a/core/src/test/java/org/elasticsearch/search/basic/TransportTwoNodesSearchIT.java +++ b/core/src/test/java/org/elasticsearch/search/basic/TransportTwoNodesSearchIT.java @@ -28,7 +28,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.index.query.GeohashCellQuery; +import org.elasticsearch.index.query.MatchQueryBuilder; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.index.query.functionscore.ScriptScoreFunctionBuilder; import org.elasticsearch.script.Script; @@ -89,7 +89,8 @@ private Set prepareData(int numShards) throws Exception { } client().admin().indices().create(createIndexRequest("test") - .settings(settingsBuilder)) + .settings(settingsBuilder) + .mapping("type", "foo", "type=geo_point")) .actionGet(); ensureGreen(); @@ -399,7 +400,7 @@ public void testFailedSearchWithWrongQuery() throws Exception { logger.info("Start Testing failed search with wrong query"); try { SearchResponse searchResponse = client().search( - searchRequest("test").source(new SearchSourceBuilder().query(new GeohashCellQuery.Builder("foo", "biz")))).actionGet(); + searchRequest("test").source(new SearchSourceBuilder().query(new MatchQueryBuilder("foo", "biz")))).actionGet(); assertThat(searchResponse.getTotalShards(), equalTo(test.numPrimaries)); assertThat(searchResponse.getSuccessfulShards(), equalTo(0)); assertThat(searchResponse.getFailedShards(), equalTo(test.numPrimaries)); @@ -447,8 +448,7 @@ public void testFailedMultiSearchWithWrongQuery() throws Exception { logger.info("Start Testing failed multi search with a wrong query"); MultiSearchResponse response = client().prepareMultiSearch() - // Add geo distance range query against a field that doesn't exist (should be a geo point for the query to work) - .add(client().prepareSearch("test").setQuery(QueryBuilders.geoDistanceRangeQuery("non_existing_field", 1, 1).from(10).to(15))) + .add(client().prepareSearch("test").setQuery(new MatchQueryBuilder("foo", "biz"))) .add(client().prepareSearch("test").setQuery(QueryBuilders.termQuery("nid", 2))) .add(client().prepareSearch("test").setQuery(QueryBuilders.matchAllQuery())) .execute().actionGet(); diff --git a/core/src/test/java/org/elasticsearch/search/fetch/subphase/highlight/HighlighterSearchIT.java b/core/src/test/java/org/elasticsearch/search/fetch/subphase/highlight/HighlighterSearchIT.java index 0a73c79ff9fc0..9b54363936f0f 100644 --- a/core/src/test/java/org/elasticsearch/search/fetch/subphase/highlight/HighlighterSearchIT.java +++ b/core/src/test/java/org/elasticsearch/search/fetch/subphase/highlight/HighlighterSearchIT.java @@ -2798,7 +2798,7 @@ public void testGeoFieldHighlightingWhenQueryGetsRewritten() throws IOException .field("type", "geo_point") .endObject() .startObject("jd") - .field("type", "string") + .field("type", "text") .endObject() .endObject() .endObject(); @@ -2851,35 +2851,6 @@ public void testKeywordFieldHighlighting() throws IOException { equalTo("some text")); } - public void testStringFieldHighlighting() throws IOException { - // check that string field highlighting on old indexes works - XContentBuilder mappings = jsonBuilder(); - mappings.startObject(); - mappings.startObject("type") - .startObject("properties") - .startObject("string_field") - .field("type", "string") - .endObject() - .endObject() - .endObject(); - mappings.endObject(); - assertAcked(prepareCreate("test") - .addMapping("type", mappings) - .setSettings(Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_2_3_2))); - - client().prepareIndex("test", "type", "1") - .setSource(jsonBuilder().startObject().field("string_field", "some text").endObject()) - .get(); - refresh(); - SearchResponse search = client().prepareSearch().setSource(new SearchSourceBuilder() - .query(QueryBuilders.matchQuery("string_field", "some text")) - .highlighter(new HighlightBuilder().field("*"))).get(); - assertNoFailures(search); - assertThat(search.getHits().totalHits(), equalTo(1L)); - assertThat(search.getHits().getAt(0).getHighlightFields().get("string_field").getFragments()[0].string(), - equalTo("some text")); - } - public void testACopyFieldWithNestedQuery() throws Exception { String mapping = jsonBuilder().startObject().startObject("type").startObject("properties") .startObject("foo") diff --git a/core/src/test/java/org/elasticsearch/search/fields/SearchFieldsIT.java b/core/src/test/java/org/elasticsearch/search/fields/SearchFieldsIT.java index da844b1969e27..f20c87ad3874c 100644 --- a/core/src/test/java/org/elasticsearch/search/fields/SearchFieldsIT.java +++ b/core/src/test/java/org/elasticsearch/search/fields/SearchFieldsIT.java @@ -30,7 +30,6 @@ import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.support.XContentMapValues; import org.elasticsearch.index.fielddata.ScriptDocValues; -import org.elasticsearch.index.mapper.TimestampFieldMapper; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.rest.RestStatus; @@ -299,21 +298,18 @@ public void testScriptDocAndFields() throws Exception { assertFalse(response.getHits().getAt(0).hasSource()); assertThat(response.getHits().getAt(0).id(), equalTo("1")); Set fields = new HashSet<>(response.getHits().getAt(0).fields().keySet()); - fields.remove(TimestampFieldMapper.NAME); // randomly enabled via templates assertThat(fields, equalTo(newHashSet("sNum1", "sNum1_field", "date1"))); assertThat(response.getHits().getAt(0).fields().get("sNum1").values().get(0), equalTo(1.0)); assertThat(response.getHits().getAt(0).fields().get("sNum1_field").values().get(0), equalTo(1.0)); assertThat(response.getHits().getAt(0).fields().get("date1").values().get(0), equalTo(0L)); assertThat(response.getHits().getAt(1).id(), equalTo("2")); fields = new HashSet<>(response.getHits().getAt(0).fields().keySet()); - fields.remove(TimestampFieldMapper.NAME); // randomly enabled via templates assertThat(fields, equalTo(newHashSet("sNum1", "sNum1_field", "date1"))); assertThat(response.getHits().getAt(1).fields().get("sNum1").values().get(0), equalTo(2.0)); assertThat(response.getHits().getAt(1).fields().get("sNum1_field").values().get(0), equalTo(2.0)); assertThat(response.getHits().getAt(1).fields().get("date1").values().get(0), equalTo(25000L)); assertThat(response.getHits().getAt(2).id(), equalTo("3")); fields = new HashSet<>(response.getHits().getAt(0).fields().keySet()); - fields.remove(TimestampFieldMapper.NAME); // randomly enabled via templates assertThat(fields, equalTo(newHashSet("sNum1", "sNum1_field", "date1"))); assertThat(response.getHits().getAt(2).fields().get("sNum1").values().get(0), equalTo(3.0)); assertThat(response.getHits().getAt(2).fields().get("sNum1_field").values().get(0), equalTo(3.0)); @@ -330,17 +326,14 @@ public void testScriptDocAndFields() throws Exception { assertThat(response.getHits().totalHits(), equalTo(3L)); assertThat(response.getHits().getAt(0).id(), equalTo("1")); fields = new HashSet<>(response.getHits().getAt(0).fields().keySet()); - fields.remove(TimestampFieldMapper.NAME); // randomly enabled via templates assertThat(fields, equalTo(singleton("sNum1"))); assertThat(response.getHits().getAt(0).fields().get("sNum1").values().get(0), equalTo(2.0)); assertThat(response.getHits().getAt(1).id(), equalTo("2")); fields = new HashSet<>(response.getHits().getAt(0).fields().keySet()); - fields.remove(TimestampFieldMapper.NAME); // randomly enabled via templates assertThat(fields, equalTo(singleton("sNum1"))); assertThat(response.getHits().getAt(1).fields().get("sNum1").values().get(0), equalTo(4.0)); assertThat(response.getHits().getAt(2).id(), equalTo("3")); fields = new HashSet<>(response.getHits().getAt(0).fields().keySet()); - fields.remove(TimestampFieldMapper.NAME); // randomly enabled via templates assertThat(fields, equalTo(singleton("sNum1"))); assertThat(response.getHits().getAt(2).fields().get("sNum1").values().get(0), equalTo(6.0)); } @@ -369,7 +362,6 @@ public void testUidBasedScriptFields() throws Exception { for (int i = 0; i < numDocs; i++) { assertThat(response.getHits().getAt(i).id(), equalTo(Integer.toString(i))); Set fields = new HashSet<>(response.getHits().getAt(i).fields().keySet()); - fields.remove(TimestampFieldMapper.NAME); // randomly enabled via templates assertThat(fields, equalTo(singleton("uid"))); assertThat(response.getHits().getAt(i).fields().get("uid").value(), equalTo("type1#" + Integer.toString(i))); } @@ -387,7 +379,6 @@ public void testUidBasedScriptFields() throws Exception { for (int i = 0; i < numDocs; i++) { assertThat(response.getHits().getAt(i).id(), equalTo(Integer.toString(i))); Set fields = new HashSet<>(response.getHits().getAt(i).fields().keySet()); - fields.remove(TimestampFieldMapper.NAME); // randomly enabled via templates assertThat(fields, equalTo(singleton("id"))); assertThat(response.getHits().getAt(i).fields().get("id").value(), equalTo(Integer.toString(i))); } @@ -406,7 +397,6 @@ public void testUidBasedScriptFields() throws Exception { for (int i = 0; i < numDocs; i++) { assertThat(response.getHits().getAt(i).id(), equalTo(Integer.toString(i))); Set fields = new HashSet<>(response.getHits().getAt(i).fields().keySet()); - fields.remove(TimestampFieldMapper.NAME); // randomly enabled via templates assertThat(fields, equalTo(singleton("type"))); assertThat(response.getHits().getAt(i).fields().get("type").value(), equalTo("type1")); } @@ -427,7 +417,6 @@ public void testUidBasedScriptFields() throws Exception { for (int i = 0; i < numDocs; i++) { assertThat(response.getHits().getAt(i).id(), equalTo(Integer.toString(i))); Set fields = new HashSet<>(response.getHits().getAt(i).fields().keySet()); - fields.remove(TimestampFieldMapper.NAME); // randomly enabled via templates assertThat(fields, equalTo(newHashSet("uid", "type", "id"))); assertThat(response.getHits().getAt(i).fields().get("uid").value(), equalTo("type1#" + Integer.toString(i))); assertThat(response.getHits().getAt(i).fields().get("type").value(), equalTo("type1")); @@ -600,7 +589,6 @@ public void testStoredFieldsWithoutSource() throws Exception { assertThat(searchResponse.getHits().getTotalHits(), equalTo(1L)); assertThat(searchResponse.getHits().hits().length, equalTo(1)); Set fields = new HashSet<>(searchResponse.getHits().getAt(0).fields().keySet()); - fields.remove(TimestampFieldMapper.NAME); // randomly enabled via templates assertThat(fields, equalTo(newHashSet("byte_field", "short_field", "integer_field", "long_field", "float_field", "double_field", "date_field", "boolean_field", "binary_field"))); @@ -816,7 +804,6 @@ public void testFieldsPulledFromFieldData() throws Exception { assertThat(searchResponse.getHits().getTotalHits(), equalTo(1L)); assertThat(searchResponse.getHits().hits().length, equalTo(1)); Set fields = new HashSet<>(searchResponse.getHits().getAt(0).fields().keySet()); - fields.remove(TimestampFieldMapper.NAME); // randomly enabled via templates assertThat(fields, equalTo(newHashSet("byte_field", "short_field", "integer_field", "long_field", "float_field", "double_field", "date_field", "boolean_field", "text_field", "keyword_field"))); @@ -880,8 +867,6 @@ public void testLoadMetadata() throws Exception { indexRandom(true, client().prepareIndex("test", "my-type1", "1") .setRouting("1") - .setTimestamp("205097") - .setTTL(10000000000000L) .setParent("parent_1") .setSource(jsonBuilder().startObject().field("field1", "value").endObject())); diff --git a/core/src/test/java/org/elasticsearch/search/geo/GeoBoundingBoxIT.java b/core/src/test/java/org/elasticsearch/search/geo/GeoBoundingBoxIT.java index 97615f63c9dca..646cc310fc866 100644 --- a/core/src/test/java/org/elasticsearch/search/geo/GeoBoundingBoxIT.java +++ b/core/src/test/java/org/elasticsearch/search/geo/GeoBoundingBoxIT.java @@ -55,9 +55,6 @@ public void testSimpleBoundingBoxTest() throws Exception { Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build(); XContentBuilder xContentBuilder = XContentFactory.jsonBuilder().startObject().startObject("type1") .startObject("properties").startObject("location").field("type", "geo_point"); - if (version.before(Version.V_2_2_0)) { - xContentBuilder.field("lat_lon", true); - } xContentBuilder.endObject().endObject().endObject().endObject(); assertAcked(prepareCreate("test").setSettings(settings).addMapping("type1", xContentBuilder)); ensureGreen(); @@ -129,9 +126,6 @@ public void testLimit2BoundingBox() throws Exception { Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build(); XContentBuilder xContentBuilder = XContentFactory.jsonBuilder().startObject().startObject("type1") .startObject("properties").startObject("location").field("type", "geo_point"); - if (version.before(Version.V_2_2_0)) { - xContentBuilder.field("lat_lon", true); - } xContentBuilder.endObject().endObject().endObject().endObject(); assertAcked(prepareCreate("test").setSettings(settings).addMapping("type1", xContentBuilder)); ensureGreen(); @@ -184,9 +178,6 @@ public void testCompleteLonRange() throws Exception { Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build(); XContentBuilder xContentBuilder = XContentFactory.jsonBuilder().startObject().startObject("type1") .startObject("properties").startObject("location").field("type", "geo_point"); - if (version.before(Version.V_2_2_0)) { - xContentBuilder.field("lat_lon", true); - } xContentBuilder.endObject().endObject().endObject().endObject(); assertAcked(prepareCreate("test").setSettings(settings).addMapping("type1", xContentBuilder)); ensureGreen(); diff --git a/core/src/test/java/org/elasticsearch/search/geo/GeoDistanceIT.java b/core/src/test/java/org/elasticsearch/search/geo/GeoDistanceIT.java index 6c9acd7e8a771..3594f51c722bc 100644 --- a/core/src/test/java/org/elasticsearch/search/geo/GeoDistanceIT.java +++ b/core/src/test/java/org/elasticsearch/search/geo/GeoDistanceIT.java @@ -105,9 +105,6 @@ public void setupTestIndex() throws IOException { Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build(); XContentBuilder xContentBuilder = XContentFactory.jsonBuilder().startObject().startObject("type1") .startObject("properties").startObject("location").field("type", "geo_point"); - if (version.before(Version.V_2_2_0)) { - xContentBuilder.field("lat_lon", true); - } xContentBuilder.endObject().endObject().endObject().endObject(); assertAcked(prepareCreate("test").setSettings(settings).addMapping("type1", xContentBuilder)); ensureGreen(); diff --git a/core/src/test/java/org/elasticsearch/search/geo/GeoFilterIT.java b/core/src/test/java/org/elasticsearch/search/geo/GeoFilterIT.java index 7f880211c3bda..2707e3c0b275c 100644 --- a/core/src/test/java/org/elasticsearch/search/geo/GeoFilterIT.java +++ b/core/src/test/java/org/elasticsearch/search/geo/GeoFilterIT.java @@ -21,7 +21,6 @@ import org.apache.logging.log4j.message.ParameterizedMessage; import org.apache.logging.log4j.util.Supplier; -import org.apache.lucene.geo.GeoEncodingUtils; import org.apache.lucene.spatial.prefix.RecursivePrefixTreeStrategy; import org.apache.lucene.spatial.prefix.tree.GeohashPrefixTree; import org.apache.lucene.spatial.query.SpatialArgs; @@ -34,7 +33,6 @@ import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.Priority; -import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.geo.GeoHashUtils; import org.elasticsearch.common.geo.GeoPoint; @@ -49,8 +47,6 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; -import org.elasticsearch.index.mapper.LatLonPointFieldMapper; -import org.elasticsearch.index.query.GeohashCellQuery; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.search.SearchHit; @@ -70,22 +66,16 @@ import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; -import java.util.HashMap; -import java.util.Iterator; -import java.util.List; -import java.util.Map; import java.util.Random; import java.util.zip.GZIPInputStream; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; import static org.elasticsearch.index.query.QueryBuilders.geoBoundingBoxQuery; import static org.elasticsearch.index.query.QueryBuilders.geoDistanceQuery; -import static org.elasticsearch.index.query.QueryBuilders.geoHashCellQuery; import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery; import static org.elasticsearch.index.query.QueryBuilders.matchQuery; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertFirstHit; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchHits; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.hasId; import static org.hamcrest.Matchers.anyOf; import static org.hamcrest.Matchers.closeTo; @@ -383,9 +373,6 @@ public void testBulk() throws Exception { .startObject("properties") .startObject("pin") .field("type", "geo_point"); - if (version.before(Version.V_2_2_0)) { - xContentBuilder.field("lat_lon", true); - } xContentBuilder.field("store", true) .endObject() .startObject("location") @@ -430,13 +417,7 @@ public void testBulk() throws Exception { GeoPoint point = new GeoPoint(); for (SearchHit hit : distance.getHits()) { String name = hit.getId(); - if (version.before(Version.V_2_2_0)) { - point.resetFromString(hit.fields().get("pin").getValue().toString()); - } else if (version.before(LatLonPointFieldMapper.LAT_LON_FIELD_VERSION)) { - point.resetFromIndexHash(hit.fields().get("pin").getValue()); - } else { - point.resetFromString(hit.getFields().get("pin").getValue()); - } + point.resetFromString(hit.getFields().get("pin").getValue()); double dist = distance(point.getLat(), point.getLon(), 51.11, 9.851); assertThat("distance to '" + name + "'", dist, lessThanOrEqualTo(425000d)); @@ -447,82 +428,6 @@ public void testBulk() throws Exception { } } - public void testLegacyGeohashCellFilter() throws IOException { - String geohash = randomhash(10); - logger.info("Testing geohash_cell filter for [{}]", geohash); - - Collection neighbors = GeoHashUtils.neighbors(geohash); - Collection parentNeighbors = GeoHashUtils.neighbors(geohash.substring(0, geohash.length() - 1)); - - logger.info("Neighbors {}", neighbors); - logger.info("Parent Neighbors {}", parentNeighbors); - - ensureYellow(); - Version version = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.V_5_0_0_alpha5); - Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build(); - - client().admin().indices().prepareCreate("locations").setSettings(settings).addMapping("location", "pin", - "type=geo_point,geohash_prefix=true,lat_lon=false").execute().actionGet(); - - // Index a pin - client().prepareIndex("locations", "location", "1").setCreate(true).setSource("pin", geohash).execute().actionGet(); - - // index neighbors - Iterator iterator = neighbors.iterator(); - for (int i = 0; iterator.hasNext(); i++) { - client().prepareIndex("locations", "location", "N" + i).setCreate(true).setSource("pin", iterator.next()).execute().actionGet(); - } - - // Index parent cell - client().prepareIndex("locations", "location", "p").setCreate(true).setSource("pin", geohash.substring(0, geohash.length() - 1)).execute().actionGet(); - - // index neighbors - iterator = parentNeighbors.iterator(); - for (int i = 0; iterator.hasNext(); i++) { - client().prepareIndex("locations", "location", "p" + i).setCreate(true).setSource("pin", iterator.next()).execute().actionGet(); - } - - client().admin().indices().prepareRefresh("locations").execute().actionGet(); - - Map expectedCounts = new HashMap<>(); - Map expectedResults = new HashMap<>(); - - expectedCounts.put(geoHashCellQuery("pin", geohash, false), 1L); - - expectedCounts.put(geoHashCellQuery("pin", geohash.substring(0, geohash.length() - 1), true), 2L + neighbors.size() + parentNeighbors.size()); - - // Testing point formats and precision - GeoPoint point = GeoPoint.fromGeohash(geohash); - int precision = geohash.length(); - - expectedCounts.put(geoHashCellQuery("pin", point).neighbors(true).precision(precision), 1L + neighbors.size()); - - - List filterBuilders = new ArrayList<>(expectedCounts.keySet()); - for (GeohashCellQuery.Builder builder : filterBuilders) { - try { - long expectedCount = expectedCounts.get(builder); - SearchResponse response = client().prepareSearch("locations").setQuery(QueryBuilders.matchAllQuery()) - .setPostFilter(builder).setSize((int) expectedCount).get(); - assertHitCount(response, expectedCount); - String[] expectedIds = expectedResults.get(builder); - if (expectedIds == null) { - ArrayList ids = new ArrayList<>(); - for (SearchHit hit : response.getHits()) { - ids.add(hit.id()); - } - expectedResults.put(builder, ids.toArray(Strings.EMPTY_ARRAY)); - continue; - } - - assertSearchHits(response, expectedIds); - - } catch (AssertionError error) { - throw new AssertionError(error.getMessage() + "\n geohash_cell filter:" + builder, error); - } - } - } - public void testNeighbors() { // Simple root case assertThat(GeoHashUtils.addNeighbors("7", new ArrayList()), containsInAnyOrder("4", "5", "6", "d", "e", "h", "k", "s")); diff --git a/core/src/test/java/org/elasticsearch/search/geo/GeoPolygonIT.java b/core/src/test/java/org/elasticsearch/search/geo/GeoPolygonIT.java index 92ed3875e3e9e..1d864c14dec80 100644 --- a/core/src/test/java/org/elasticsearch/search/geo/GeoPolygonIT.java +++ b/core/src/test/java/org/elasticsearch/search/geo/GeoPolygonIT.java @@ -59,9 +59,6 @@ protected void setupSuiteScopeCluster() throws Exception { Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build(); XContentBuilder xContentBuilder = XContentFactory.jsonBuilder().startObject().startObject("type1") .startObject("properties").startObject("location").field("type", "geo_point"); - if (version.before(Version.V_2_2_0)) { - xContentBuilder.field("lat_lon", true); - } xContentBuilder.endObject().endObject().endObject().endObject(); assertAcked(prepareCreate("test").setSettings(settings).addMapping("type1", xContentBuilder)); ensureGreen(); diff --git a/core/src/test/java/org/elasticsearch/search/query/MultiMatchQueryIT.java b/core/src/test/java/org/elasticsearch/search/query/MultiMatchQueryIT.java index 1a10a700948ee..b5067dbf74a2d 100644 --- a/core/src/test/java/org/elasticsearch/search/query/MultiMatchQueryIT.java +++ b/core/src/test/java/org/elasticsearch/search/query/MultiMatchQueryIT.java @@ -668,7 +668,7 @@ public void testFuzzyFieldLevelBoosting() throws InterruptedException, Execution .put(SETTING_NUMBER_OF_SHARDS, 3) .put(SETTING_NUMBER_OF_REPLICAS, 0) ); - assertAcked(builder.addMapping("type", "title", "type=string", "body", "type=string")); + assertAcked(builder.addMapping("type", "title", "type=text", "body", "type=text")); ensureGreen(); List builders = new ArrayList<>(); builders.add(client().prepareIndex(idx, "type", "1").setSource( diff --git a/core/src/test/java/org/elasticsearch/search/query/SimpleQueryStringIT.java b/core/src/test/java/org/elasticsearch/search/query/SimpleQueryStringIT.java index 1af3bb99b2877..41085a9901ec7 100644 --- a/core/src/test/java/org/elasticsearch/search/query/SimpleQueryStringIT.java +++ b/core/src/test/java/org/elasticsearch/search/query/SimpleQueryStringIT.java @@ -345,7 +345,7 @@ public void testEmptySimpleQueryStringWithAnalysis() throws Exception { .startObject("type1") .startObject("properties") .startObject("body") - .field("type", "string") + .field("type", "text") .field("analyzer", "stop") .endObject() .endObject() diff --git a/core/src/test/java/org/elasticsearch/search/sort/AbstractSortTestCase.java b/core/src/test/java/org/elasticsearch/search/sort/AbstractSortTestCase.java index 006f69e477495..df50d3126c7af 100644 --- a/core/src/test/java/org/elasticsearch/search/sort/AbstractSortTestCase.java +++ b/core/src/test/java/org/elasticsearch/search/sort/AbstractSortTestCase.java @@ -39,9 +39,9 @@ import org.elasticsearch.index.cache.bitset.BitsetFilterCache; import org.elasticsearch.index.fielddata.IndexFieldDataService; import org.elasticsearch.index.mapper.ContentPath; -import org.elasticsearch.index.mapper.LegacyDoubleFieldMapper.DoubleFieldType; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.Mapper.BuilderContext; +import org.elasticsearch.index.mapper.NumberFieldMapper; import org.elasticsearch.index.mapper.ObjectMapper; import org.elasticsearch.index.mapper.ObjectMapper.Nested; import org.elasticsearch.index.query.IdsQueryBuilder; @@ -227,11 +227,11 @@ public ObjectMapper getObjectMapper(String name) { } /** - * Return a field type. We use {@link DoubleFieldType} by default since it is compatible with all sort modes + * Return a field type. We use {@link NumberFieldMapper.NumberFieldType} by default since it is compatible with all sort modes * Tests that require other field type than double can override this. */ protected MappedFieldType provideMappedFieldType(String name) { - DoubleFieldType doubleFieldType = new DoubleFieldType(); + NumberFieldMapper.NumberFieldType doubleFieldType = new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.DOUBLE); doubleFieldType.setName(name); doubleFieldType.setHasDocValues(true); return doubleFieldType; diff --git a/core/src/test/java/org/elasticsearch/search/sort/GeoDistanceIT.java b/core/src/test/java/org/elasticsearch/search/sort/GeoDistanceIT.java index 38f6178caee3c..d6863d0393122 100644 --- a/core/src/test/java/org/elasticsearch/search/sort/GeoDistanceIT.java +++ b/core/src/test/java/org/elasticsearch/search/sort/GeoDistanceIT.java @@ -20,33 +20,25 @@ package org.elasticsearch.search.sort; import org.elasticsearch.Version; -import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.geo.GeoDistance; import org.elasticsearch.common.geo.GeoHashUtils; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.unit.DistanceUnit; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.json.JsonXContent; -import org.elasticsearch.index.query.GeoDistanceQueryBuilder; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.plugins.Plugin; -import org.elasticsearch.search.SearchHit; import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.InternalSettingsPlugin; import org.elasticsearch.test.VersionUtils; import java.io.IOException; -import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; -import java.util.List; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; -import static org.elasticsearch.index.query.QueryBuilders.geoDistanceQuery; -import static org.elasticsearch.index.query.QueryBuilders.geoDistanceRangeQuery; import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery; import static org.elasticsearch.index.query.QueryBuilders.termQuery; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; @@ -54,9 +46,7 @@ import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertOrderedSearchHits; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchHits; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchResponse; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.hasId; -import static org.hamcrest.Matchers.anyOf; import static org.hamcrest.Matchers.closeTo; import static org.hamcrest.Matchers.equalTo; @@ -67,157 +57,11 @@ protected Collection> nodePlugins() { return Arrays.asList(InternalSettingsPlugin.class); } - public void testLegacyGeoDistanceRangeQuery() throws Exception { - Version version = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.V_2_4_0); - Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build(); - XContentBuilder xContentBuilder = XContentFactory.jsonBuilder().startObject().startObject("type1").startObject("properties") - .startObject("location").field("type", "geo_point"); - if (version.before(Version.V_2_2_0)) { - xContentBuilder.field("lat_lon", true); - } - xContentBuilder.endObject().endObject().endObject().endObject(); - assertAcked(prepareCreate("test").setSettings(settings).addMapping("type1", xContentBuilder)); - ensureGreen(); - - indexRandom(true, - client().prepareIndex("test", "type1", "1") - .setSource(jsonBuilder().startObject().field("name", "New York").startObject("location").field("lat", 40.7143528) - .field("lon", -74.0059731).endObject().endObject()), - // to NY: 5.286 km - client().prepareIndex("test", "type1", "2") - .setSource(jsonBuilder().startObject().field("name", "Times Square").startObject("location").field("lat", 40.759011) - .field("lon", -73.9844722).endObject().endObject()), - // to NY: 0.4621 km - client().prepareIndex("test", "type1", "3") - .setSource(jsonBuilder().startObject().field("name", "Tribeca").startObject("location").field("lat", 40.718266) - .field("lon", -74.007819).endObject().endObject()), - // to NY: 1.055 km - client().prepareIndex("test", "type1", "4") - .setSource(jsonBuilder().startObject().field("name", "Wall Street").startObject("location").field("lat", 40.7051157) - .field("lon", -74.0088305).endObject().endObject()), - // to NY: 1.258 km - client().prepareIndex("test", "type1", "5") - .setSource(jsonBuilder().startObject().field("name", "Soho").startObject("location").field("lat", 40.7247222) - .field("lon", -74).endObject().endObject()), - // to NY: 2.029 km - client().prepareIndex("test", "type1", "6") - .setSource(jsonBuilder().startObject().field("name", "Greenwich Village").startObject("location") - .field("lat", 40.731033).field("lon", -73.9962255).endObject().endObject()), - // to NY: 8.572 km - client().prepareIndex("test", "type1", "7").setSource(jsonBuilder().startObject().field("name", "Brooklyn") - .startObject("location").field("lat", 40.65).field("lon", -73.95).endObject().endObject())); - - SearchResponse searchResponse = client().prepareSearch() // from NY - .setQuery(geoDistanceQuery("location").distance("3km").point(40.7143528, -74.0059731)).execute().actionGet(); - assertHitCount(searchResponse, 5); - assertThat(searchResponse.getHits().hits().length, equalTo(5)); - for (SearchHit hit : searchResponse.getHits()) { - assertThat(hit.id(), anyOf(equalTo("1"), equalTo("3"), equalTo("4"), equalTo("5"), equalTo("6"))); - } - searchResponse = client().prepareSearch() // from NY - .setQuery(geoDistanceQuery("location").distance("3km").point(40.7143528, -74.0059731).optimizeBbox("indexed")).execute() - .actionGet(); - assertHitCount(searchResponse, 5); - assertThat(searchResponse.getHits().hits().length, equalTo(5)); - for (SearchHit hit : searchResponse.getHits()) { - assertThat(hit.id(), anyOf(equalTo("1"), equalTo("3"), equalTo("4"), equalTo("5"), equalTo("6"))); - } - - // now with a PLANE type - searchResponse = client().prepareSearch() // from NY - .setQuery(geoDistanceQuery("location").distance("3km").geoDistance(GeoDistance.PLANE).point(40.7143528, -74.0059731)) - .execute().actionGet(); - assertHitCount(searchResponse, 5); - assertThat(searchResponse.getHits().hits().length, equalTo(5)); - for (SearchHit hit : searchResponse.getHits()) { - assertThat(hit.id(), anyOf(equalTo("1"), equalTo("3"), equalTo("4"), equalTo("5"), equalTo("6"))); - } - - // factor type is really too small for this resolution - - searchResponse = client().prepareSearch() // from NY - .setQuery(geoDistanceQuery("location").distance("2km").point(40.7143528, -74.0059731)).execute().actionGet(); - assertHitCount(searchResponse, 4); - assertThat(searchResponse.getHits().hits().length, equalTo(4)); - for (SearchHit hit : searchResponse.getHits()) { - assertThat(hit.id(), anyOf(equalTo("1"), equalTo("3"), equalTo("4"), equalTo("5"))); - } - searchResponse = client().prepareSearch() // from NY - .setQuery(geoDistanceQuery("location").distance("2km").point(40.7143528, -74.0059731).optimizeBbox("indexed")).execute() - .actionGet(); - assertHitCount(searchResponse, 4); - assertThat(searchResponse.getHits().hits().length, equalTo(4)); - for (SearchHit hit : searchResponse.getHits()) { - assertThat(hit.id(), anyOf(equalTo("1"), equalTo("3"), equalTo("4"), equalTo("5"))); - } - - searchResponse = client().prepareSearch() // from NY - .setQuery(geoDistanceQuery("location").distance("1.242mi").point(40.7143528, -74.0059731)).execute().actionGet(); - assertHitCount(searchResponse, 4); - assertThat(searchResponse.getHits().hits().length, equalTo(4)); - for (SearchHit hit : searchResponse.getHits()) { - assertThat(hit.id(), anyOf(equalTo("1"), equalTo("3"), equalTo("4"), equalTo("5"))); - } - searchResponse = client().prepareSearch() // from NY - .setQuery(geoDistanceQuery("location").distance("1.242mi").point(40.7143528, -74.0059731).optimizeBbox("indexed")).execute() - .actionGet(); - assertHitCount(searchResponse, 4); - assertThat(searchResponse.getHits().hits().length, equalTo(4)); - for (SearchHit hit : searchResponse.getHits()) { - assertThat(hit.id(), anyOf(equalTo("1"), equalTo("3"), equalTo("4"), equalTo("5"))); - } - - searchResponse = client().prepareSearch() // from NY - .setQuery(geoDistanceRangeQuery("location", 40.7143528, -74.0059731).from("1.0km").to("2.0km")).execute().actionGet(); - assertHitCount(searchResponse, 2); - assertThat(searchResponse.getHits().hits().length, equalTo(2)); - for (SearchHit hit : searchResponse.getHits()) { - assertThat(hit.id(), anyOf(equalTo("4"), equalTo("5"))); - } - searchResponse = client().prepareSearch() // from NY - .setQuery(geoDistanceRangeQuery("location", 40.7143528, -74.0059731).from("1.0km").to("2.0km").optimizeBbox("indexed")) - .execute().actionGet(); - assertHitCount(searchResponse, 2); - assertThat(searchResponse.getHits().hits().length, equalTo(2)); - for (SearchHit hit : searchResponse.getHits()) { - assertThat(hit.id(), anyOf(equalTo("4"), equalTo("5"))); - } - - searchResponse = client().prepareSearch() // from NY - .setQuery(geoDistanceRangeQuery("location", 40.7143528, -74.0059731).to("2.0km")).execute().actionGet(); - assertHitCount(searchResponse, 4); - assertThat(searchResponse.getHits().hits().length, equalTo(4)); - - searchResponse = client().prepareSearch() // from NY - .setQuery(geoDistanceRangeQuery("location", 40.7143528, -74.0059731).from("2.0km")).execute().actionGet(); - assertHitCount(searchResponse, 3); - assertThat(searchResponse.getHits().hits().length, equalTo(3)); - - // SORTING - - searchResponse = client().prepareSearch().setQuery(matchAllQuery()) - .addSort(SortBuilders.geoDistanceSort("location", 40.7143528, -74.0059731).order(SortOrder.ASC)).execute() - .actionGet(); - - assertHitCount(searchResponse, 7); - assertOrderedSearchHits(searchResponse, "1", "3", "4", "5", "6", "2", "7"); - - searchResponse = client().prepareSearch().setQuery(matchAllQuery()) - .addSort(SortBuilders.geoDistanceSort("location", 40.7143528, -74.0059731).order(SortOrder.DESC)).execute() - .actionGet(); - - assertHitCount(searchResponse, 7); - assertOrderedSearchHits(searchResponse, "7", "2", "6", "5", "4", "3", "1"); - } - public void testDistanceSortingMVFields() throws Exception { Version version = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.CURRENT); Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build(); XContentBuilder xContentBuilder = XContentFactory.jsonBuilder().startObject().startObject("type1").startObject("properties") .startObject("locations").field("type", "geo_point"); - if (version.before(Version.V_2_2_0)) { - xContentBuilder.field("lat_lon", true).field("coerce", true); - } xContentBuilder.field("ignore_malformed", true).endObject().endObject().endObject().endObject(); assertAcked(prepareCreate("test").setSettings(settings).addMapping("type1", xContentBuilder)); ensureGreen(); @@ -347,9 +191,6 @@ public void testDistanceSortingWithMissingGeoPoint() throws Exception { Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build(); XContentBuilder xContentBuilder = XContentFactory.jsonBuilder().startObject().startObject("type1").startObject("properties") .startObject("locations").field("type", "geo_point"); - if (version.before(Version.V_2_2_0)) { - xContentBuilder.field("lat_lon", true); - } xContentBuilder.endObject().endObject().endObject().endObject(); assertAcked(prepareCreate("test").setSettings(settings).addMapping("type1", xContentBuilder)); ensureGreen(); @@ -396,9 +237,6 @@ public void testDistanceSortingNestedFields() throws Exception { .startObject("name").field("type", "text").endObject().startObject("branches").field("type", "nested") .startObject("properties").startObject("name").field("type", "text").endObject().startObject("location") .field("type", "geo_point"); - if (version.before(Version.V_2_2_0)) { - xContentBuilder.field("lat_lon", true); - } xContentBuilder.endObject().endObject().endObject().endObject().endObject().endObject(); assertAcked(prepareCreate("companies").setSettings(settings).addMapping("company", xContentBuilder)); @@ -548,9 +386,6 @@ public void testGeoDistanceFilter() throws IOException { XContentBuilder mapping = JsonXContent.contentBuilder().startObject().startObject("location").startObject("properties") .startObject("pin").field("type", "geo_point"); - if (version.before(Version.V_2_2_0)) { - mapping.field("lat_lon", true); - } mapping.endObject().endObject().endObject().endObject(); XContentBuilder source = JsonXContent.contentBuilder().startObject().field("pin", GeoHashUtils.stringEncode(lon, lat)).endObject(); @@ -567,55 +402,4 @@ public void testGeoDistanceFilter() throws IOException { assertHitCount(result, 1); } - private static double randomLon() { - return randomDouble() * 360 - 180; - } - - private static double randomLat() { - return randomDouble() * 180 - 90; - } - - public void testDuelOptimizations() throws Exception { - Version version = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.V_2_1_2); - Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build(); - assertAcked(prepareCreate("index").setSettings(settings).addMapping("type", "location", "type=geo_point,lat_lon=true")); - final int numDocs = scaledRandomIntBetween(3000, 10000); - List docs = new ArrayList<>(); - for (int i = 0; i < numDocs; ++i) { - docs.add(client().prepareIndex("index", "type").setSource(jsonBuilder().startObject().startObject("location") - .field("lat", randomLat()).field("lon", randomLon()).endObject().endObject())); - } - indexRandom(true, docs); - ensureSearchable(); - - for (int i = 0; i < 10; ++i) { - final double originLat = randomLat(); - final double originLon = randomLon(); - final String distance = DistanceUnit.KILOMETERS.toString(randomIntBetween(1, 10000)); - for (GeoDistance geoDistance : Arrays.asList(GeoDistance.ARC, GeoDistance.SLOPPY_ARC)) { - logger.info("Now testing GeoDistance={}, distance={}, origin=({}, {})", geoDistance, distance, originLat, originLon); - GeoDistanceQueryBuilder qb = QueryBuilders.geoDistanceQuery("location").point(originLat, originLon).distance(distance) - .geoDistance(geoDistance); - long matches; - for (String optimizeBbox : Arrays.asList("none", "memory", "indexed")) { - qb.optimizeBbox(optimizeBbox); - SearchResponse resp = client().prepareSearch("index").setSize(0).setQuery(QueryBuilders.constantScoreQuery(qb)) - .execute().actionGet(); - matches = assertDuelOptimization(resp); - logger.info("{} -> {} hits", optimizeBbox, matches); - } - } - } - } - - private static long assertDuelOptimization(SearchResponse resp) { - long matches = -1; - assertSearchResponse(resp); - if (matches < 0) { - matches = resp.getHits().totalHits(); - } else { - assertEquals(matches, matches = resp.getHits().totalHits()); - } - return matches; - } } diff --git a/core/src/test/java/org/elasticsearch/search/suggest/CompletionSuggestSearch2xIT.java b/core/src/test/java/org/elasticsearch/search/suggest/CompletionSuggestSearch2xIT.java deleted file mode 100644 index 172183c57c875..0000000000000 --- a/core/src/test/java/org/elasticsearch/search/suggest/CompletionSuggestSearch2xIT.java +++ /dev/null @@ -1,1199 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.elasticsearch.search.suggest; - -import com.carrotsearch.hppc.ObjectLongHashMap; -import com.carrotsearch.randomizedtesting.generators.RandomStrings; - -import org.apache.lucene.util.LuceneTestCase.SuppressCodecs; -import org.elasticsearch.Version; -import org.elasticsearch.action.admin.indices.forcemerge.ForceMergeResponse; -import org.elasticsearch.action.admin.indices.mapping.put.PutMappingResponse; -import org.elasticsearch.action.admin.indices.segments.IndexShardSegments; -import org.elasticsearch.action.admin.indices.segments.ShardSegments; -import org.elasticsearch.action.admin.indices.stats.IndicesStatsResponse; -import org.elasticsearch.action.index.IndexRequestBuilder; -import org.elasticsearch.action.search.SearchPhaseExecutionException; -import org.elasticsearch.action.search.SearchResponse; -import org.elasticsearch.action.support.WriteRequest.RefreshPolicy; -import org.elasticsearch.cluster.metadata.IndexMetaData; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.unit.Fuzziness; -import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.index.mapper.CompletionFieldMapper2x; -import org.elasticsearch.index.mapper.MapperException; -import org.elasticsearch.index.mapper.MapperParsingException; -import org.elasticsearch.plugins.Plugin; -import org.elasticsearch.search.aggregations.AggregationBuilders; -import org.elasticsearch.search.aggregations.Aggregator.SubAggCollectionMode; -import org.elasticsearch.search.sort.FieldSortBuilder; -import org.elasticsearch.search.suggest.completion.CompletionStats; -import org.elasticsearch.search.suggest.completion.CompletionSuggestionBuilder; -import org.elasticsearch.search.suggest.completion.FuzzyOptions; -import org.elasticsearch.search.suggest.completion2x.CompletionSuggestion; -import org.elasticsearch.test.ESIntegTestCase; -import org.elasticsearch.test.InternalSettingsPlugin; -import org.elasticsearch.test.VersionUtils; - -import java.io.IOException; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Collection; -import java.util.List; -import java.util.Locale; -import java.util.Map; -import java.util.concurrent.ExecutionException; - -import static com.carrotsearch.randomizedtesting.RandomizedTest.getRandom; -import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_REPLICAS; -import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_SHARDS; -import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAllSuccessful; -import static org.hamcrest.Matchers.containsString; -import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.greaterThan; -import static org.hamcrest.Matchers.hasItems; -import static org.hamcrest.core.Is.is; -import static org.hamcrest.core.IsCollectionContaining.hasItem; -import static org.hamcrest.core.IsInstanceOf.instanceOf; -import static org.hamcrest.core.IsNull.notNullValue; - -@SuppressCodecs("*") // requires custom completion format -public class CompletionSuggestSearch2xIT extends ESIntegTestCase { - - private final String INDEX = RandomStrings.randomAsciiOfLength(getRandom(), 10).toLowerCase(Locale.ROOT); - private final String TYPE = RandomStrings.randomAsciiOfLength(getRandom(), 10).toLowerCase(Locale.ROOT); - private final String FIELD = RandomStrings.randomAsciiOfLength(getRandom(), 10).toLowerCase(Locale.ROOT); - private final Version PRE2X_VERSION = VersionUtils.randomVersionBetween(getRandom(), Version.V_2_0_0, Version.V_2_3_1); - private final CompletionMappingBuilder completionMappingBuilder = new CompletionMappingBuilder(); - - @Override - protected Collection> nodePlugins() { - return Arrays.asList(InternalSettingsPlugin.class); - } - - public void testSimple() throws Exception { - createIndexAndMapping(completionMappingBuilder); - String[][] input = {{"Foo Fighters"}, {"Foo Fighters"}, {"Foo Fighters"}, {"Foo Fighters"}, - {"Generator", "Foo Fighters Generator"}, {"Learn to Fly", "Foo Fighters Learn to Fly"}, - {"The Prodigy"}, {"The Prodigy"}, {"The Prodigy"}, {"Firestarter", "The Prodigy Firestarter"}, - {"Turbonegro"}, {"Turbonegro"}, {"Get it on", "Turbonegro Get it on"}}; // work with frequencies - for (int i = 0; i < input.length; i++) { - client().prepareIndex(INDEX, TYPE, "" + i) - .setSource(jsonBuilder() - .startObject().startObject(FIELD) - .array("input", input[i]) - .endObject() - .endObject() - ) - .execute().actionGet(); - } - - refresh(); - - assertSuggestionsNotInOrder("f", "Foo Fighters", "Firestarter", "Foo Fighters Generator", "Foo Fighters Learn to Fly"); - assertSuggestionsNotInOrder("t", "The Prodigy", "Turbonegro", "Turbonegro Get it on", "The Prodigy Firestarter"); - } - - public void testBasicPrefixSuggestion() throws Exception { - completionMappingBuilder.payloads(true); - createIndexAndMapping(completionMappingBuilder); - for (int i = 0; i < 2; i++) { - createData(i == 0); - assertSuggestions("f", "Firestarter - The Prodigy", "Foo Fighters", - "Generator - Foo Fighters", "Learn to Fly - Foo Fighters"); - assertSuggestions("ge", "Generator - Foo Fighters", "Get it on - Turbonegro"); - assertSuggestions("ge", "Generator - Foo Fighters", "Get it on - Turbonegro"); - assertSuggestions("t", "The Prodigy", "Firestarter - The Prodigy", "Get it on - Turbonegro", "Turbonegro"); - } - } - - public void testThatWeightsAreWorking() throws Exception { - createIndexAndMapping(completionMappingBuilder); - - List similarNames = Arrays.asList("the", "The Prodigy", "The Verve", "The the"); - // the weight is 1000 divided by string length, so the results are easy to to check - for (String similarName : similarNames) { - client().prepareIndex(INDEX, TYPE, similarName).setSource(jsonBuilder() - .startObject().startObject(FIELD) - .startArray("input").value(similarName).endArray() - .field("weight", 1000 / similarName.length()) - .endObject().endObject() - ).get(); - } - - refresh(); - - assertSuggestions("the", "the", "The the", "The Verve", "The Prodigy"); - } - - public void testThatWeightMustBeAnInteger() throws Exception { - createIndexAndMapping(completionMappingBuilder); - - try { - client().prepareIndex(INDEX, TYPE, "1").setSource(jsonBuilder() - .startObject().startObject(FIELD) - .startArray("input").value("sth").endArray() - .field("weight", 2.5) - .endObject().endObject() - ).get(); - fail("Indexing with a float weight was successful, but should not be"); - } catch (MapperParsingException e) { - assertThat(e.toString(), containsString("2.5")); - } - } - - public void testThatWeightCanBeAString() throws Exception { - createIndexAndMapping(completionMappingBuilder); - - client().prepareIndex(INDEX, TYPE, "1").setSource(jsonBuilder() - .startObject().startObject(FIELD) - .startArray("input").value("testing").endArray() - .field("weight", "10") - .endObject().endObject() - ).get(); - - refresh(); - - SearchResponse suggestResponse = client().prepareSearch(INDEX).suggest( - new SuggestBuilder().addSuggestion("testSuggestions", - new CompletionSuggestionBuilder(FIELD).text("test").size(10)) - ).execute().actionGet(); - - assertSuggestions(suggestResponse, "testSuggestions", "testing"); - Suggest.Suggestion.Entry.Option option = suggestResponse.getSuggest().getSuggestion("testSuggestions") - .getEntries().get(0).getOptions().get(0); - assertThat(option, is(instanceOf(CompletionSuggestion.Entry.Option.class))); - CompletionSuggestion.Entry.Option prefixOption = (CompletionSuggestion.Entry.Option) option; - - assertThat(prefixOption.getText().string(), equalTo("testing")); - assertThat(prefixOption.getScore(), equalTo(10F)); - } - - public void testThatWeightMustNotBeANonNumberString() throws Exception { - createIndexAndMapping(completionMappingBuilder); - - try { - client().prepareIndex(INDEX, TYPE, "1").setSource(jsonBuilder() - .startObject().startObject(FIELD) - .startArray("input").value("sth").endArray() - .field("weight", "thisIsNotValid") - .endObject().endObject() - ).get(); - fail("Indexing with a non-number representing string as weight was successful, but should not be"); - } catch (MapperParsingException e) { - assertThat(e.toString(), containsString("thisIsNotValid")); - } - } - - public void testThatWeightAsStringMustBeInt() throws Exception { - createIndexAndMapping(completionMappingBuilder); - - String weight = String.valueOf(Long.MAX_VALUE - 4); - try { - client().prepareIndex(INDEX, TYPE, "1").setSource(jsonBuilder() - .startObject().startObject(FIELD) - .startArray("input").value("testing").endArray() - .field("weight", weight) - .endObject().endObject() - ).get(); - fail("Indexing with weight string representing value > Int.MAX_VALUE was successful, but should not be"); - } catch (MapperParsingException e) { - assertThat(e.toString(), containsString(weight)); - } - } - - public void testThatInputCanBeAStringInsteadOfAnArray() throws Exception { - createIndexAndMapping(completionMappingBuilder); - - client().prepareIndex(INDEX, TYPE, "1").setSource(jsonBuilder() - .startObject().startObject(FIELD) - .field("input", "Foo Fighters") - .field("output", "Boo Fighters") - .endObject().endObject() - ).get(); - - refresh(); - - assertSuggestions("f", "Boo Fighters"); - } - - public void testThatPayloadsAreArbitraryJsonObjects() throws Exception { - completionMappingBuilder.payloads(true); - createIndexAndMapping(completionMappingBuilder); - - client().prepareIndex(INDEX, TYPE, "1").setSource(jsonBuilder() - .startObject().startObject(FIELD) - .startArray("input").value("Foo Fighters").endArray() - .field("output", "Boo Fighters") - .startObject("payload").field("foo", "bar").startArray("test").value("spam").value("eggs") - .endArray().endObject() - .endObject().endObject() - ).get(); - - refresh(); - - SearchResponse suggestResponse = client().prepareSearch(INDEX).suggest( - new SuggestBuilder().addSuggestion("testSuggestions", - new CompletionSuggestionBuilder(FIELD).text("foo").size(10)) - ).execute().actionGet(); - - assertSuggestions(suggestResponse, "testSuggestions", "Boo Fighters"); - Suggest.Suggestion.Entry.Option option = suggestResponse.getSuggest() - .getSuggestion("testSuggestions").getEntries().get(0).getOptions().get(0); - assertThat(option, is(instanceOf(CompletionSuggestion.Entry.Option.class))); - CompletionSuggestion.Entry.Option prefixOption = (CompletionSuggestion.Entry.Option) option; - assertThat(prefixOption.getPayload(), is(notNullValue())); - - // parse JSON - Map jsonMap = prefixOption.getPayloadAsMap(); - assertThat(jsonMap.size(), is(2)); - assertThat(jsonMap.get("foo").toString(), is("bar")); - assertThat(jsonMap.get("test"), is(instanceOf(List.class))); - List listValues = (List) jsonMap.get("test"); - assertThat(listValues, hasItems("spam", "eggs")); - } - - public void testPayloadAsNumeric() throws Exception { - completionMappingBuilder.payloads(true); - createIndexAndMapping(completionMappingBuilder); - - client().prepareIndex(INDEX, TYPE, "1").setSource(jsonBuilder() - .startObject().startObject(FIELD) - .startArray("input").value("Foo Fighters").endArray() - .field("output", "Boo Fighters") - .field("payload", 1) - .endObject().endObject() - ).get(); - - refresh(); - - SearchResponse suggestResponse = client().prepareSearch(INDEX).suggest( - new SuggestBuilder().addSuggestion("testSuggestions", - new CompletionSuggestionBuilder(FIELD).text("foo").size(10)) - ).execute().actionGet(); - - assertSuggestions(suggestResponse, "testSuggestions", "Boo Fighters"); - Suggest.Suggestion.Entry.Option option = suggestResponse.getSuggest() - .getSuggestion("testSuggestions").getEntries().get(0).getOptions().get(0); - assertThat(option, is(instanceOf(CompletionSuggestion.Entry.Option.class))); - CompletionSuggestion.Entry.Option prefixOption = (CompletionSuggestion.Entry.Option) option; - assertThat(prefixOption.getPayload(), is(notNullValue())); - - assertThat(prefixOption.getPayloadAsLong(), equalTo(1L)); - } - - public void testPayloadAsString() throws Exception { - completionMappingBuilder.payloads(true); - createIndexAndMapping(completionMappingBuilder); - - client().prepareIndex(INDEX, TYPE, "1").setSource(jsonBuilder() - .startObject().startObject(FIELD) - .startArray("input").value("Foo Fighters").endArray() - .field("output", "Boo Fighters") - .field("payload", "test") - .endObject().endObject() - ).get(); - - refresh(); - - SearchResponse suggestResponse = client().prepareSearch(INDEX).suggest( - new SuggestBuilder().addSuggestion("testSuggestions", - new CompletionSuggestionBuilder(FIELD).text("foo").size(10)) - ).execute().actionGet(); - - assertSuggestions(suggestResponse, "testSuggestions", "Boo Fighters"); - Suggest.Suggestion.Entry.Option option = suggestResponse.getSuggest() - .getSuggestion("testSuggestions").getEntries().get(0).getOptions().get(0); - assertThat(option, is(instanceOf(CompletionSuggestion.Entry.Option.class))); - CompletionSuggestion.Entry.Option prefixOption = (CompletionSuggestion.Entry.Option) option; - assertThat(prefixOption.getPayload(), is(notNullValue())); - - assertThat(prefixOption.getPayloadAsString(), equalTo("test")); - } - - public void testThatExceptionIsThrownWhenPayloadsAreDisabledButInIndexRequest() throws Exception { - completionMappingBuilder.payloads(false); - createIndexAndMapping(completionMappingBuilder); - - try { - client().prepareIndex(INDEX, TYPE, "1").setSource(jsonBuilder() - .startObject().startObject(FIELD) - .startArray("input").value("Foo Fighters").endArray() - .field("output", "Boo Fighters") - .startArray("payload").value("spam").value("eggs").endArray() - .endObject().endObject() - ).get(); - fail("expected MapperException"); - } catch (MapperException expected) { - } - } - - public void testDisabledPreserveSeparators() throws Exception { - completionMappingBuilder.preserveSeparators(false); - createIndexAndMapping(completionMappingBuilder); - - client().prepareIndex(INDEX, TYPE, "1").setSource(jsonBuilder() - .startObject().startObject(FIELD) - .startArray("input").value("Foo Fighters").endArray() - .field("weight", 10) - .endObject().endObject() - ).get(); - - client().prepareIndex(INDEX, TYPE, "2").setSource(jsonBuilder() - .startObject().startObject(FIELD) - .startArray("input").value("Foof").endArray() - .field("weight", 20) - .endObject().endObject() - ).get(); - - refresh(); - - assertSuggestions("foof", "Foof", "Foo Fighters"); - } - - public void testEnabledPreserveSeparators() throws Exception { - completionMappingBuilder.preserveSeparators(true); - createIndexAndMapping(completionMappingBuilder); - - client().prepareIndex(INDEX, TYPE, "1").setSource(jsonBuilder() - .startObject().startObject(FIELD) - .startArray("input").value("Foo Fighters").endArray() - .endObject().endObject() - ).get(); - - client().prepareIndex(INDEX, TYPE, "2").setSource(jsonBuilder() - .startObject().startObject(FIELD) - .startArray("input").value("Foof").endArray() - .endObject().endObject() - ).get(); - - refresh(); - - assertSuggestions("foof", "Foof"); - } - - public void testThatMultipleInputsAreSupported() throws Exception { - createIndexAndMapping(completionMappingBuilder); - - client().prepareIndex(INDEX, TYPE, "1").setSource(jsonBuilder() - .startObject().startObject(FIELD) - .startArray("input").value("Foo Fighters").value("Fu Fighters").endArray() - .field("output", "The incredible Foo Fighters") - .endObject().endObject() - ).get(); - - refresh(); - - assertSuggestions("foo", "The incredible Foo Fighters"); - assertSuggestions("fu", "The incredible Foo Fighters"); - } - - public void testThatShortSyntaxIsWorking() throws Exception { - createIndexAndMapping(completionMappingBuilder); - - client().prepareIndex(INDEX, TYPE, "1").setSource(jsonBuilder() - .startObject().startArray(FIELD) - .value("The Prodigy Firestarter").value("Firestarter") - .endArray().endObject() - ).get(); - - refresh(); - - assertSuggestions("t", "The Prodigy Firestarter"); - assertSuggestions("f", "Firestarter"); - } - - public void testThatDisablingPositionIncrementsWorkForStopwords() throws Exception { - // analyzer which removes stopwords... so may not be the simple one - completionMappingBuilder.searchAnalyzer("classic").indexAnalyzer("classic").preservePositionIncrements(false); - createIndexAndMapping(completionMappingBuilder); - - client().prepareIndex(INDEX, TYPE, "1").setSource(jsonBuilder() - .startObject().startObject(FIELD) - .startArray("input").value("The Beatles").endArray() - .endObject().endObject() - ).get(); - - refresh(); - - assertSuggestions("b", "The Beatles"); - } - - public void testThatSynonymsWork() throws Exception { - Settings.Builder settingsBuilder = Settings.builder() - .put("analysis.analyzer.suggest_analyzer_synonyms.type", "custom") - .put("analysis.analyzer.suggest_analyzer_synonyms.tokenizer", "standard") - .putArray("analysis.analyzer.suggest_analyzer_synonyms.filter", "standard", "lowercase", "my_synonyms") - .put("analysis.filter.my_synonyms.type", "synonym") - .putArray("analysis.filter.my_synonyms.synonyms", "foo,renamed"); - completionMappingBuilder.searchAnalyzer("suggest_analyzer_synonyms").indexAnalyzer("suggest_analyzer_synonyms"); - createIndexAndMappingAndSettings(settingsBuilder.build(), completionMappingBuilder); - - client().prepareIndex(INDEX, TYPE, "1").setSource(jsonBuilder() - .startObject().startObject(FIELD) - .startArray("input").value("Foo Fighters").endArray() - .endObject().endObject() - ).get(); - - refresh(); - - // get suggestions for renamed - assertSuggestions("r", "Foo Fighters"); - } - - public void testThatUpgradeToMultiFieldTypeWorks() throws Exception { - final XContentBuilder mapping = jsonBuilder() - .startObject() - .startObject(TYPE) - .startObject("properties") - .startObject(FIELD) - .field("type", "string") - .endObject() - .endObject() - .endObject() - .endObject(); - assertAcked(prepareCreate(INDEX) - .setSettings(Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, PRE2X_VERSION.id)) - .addMapping(TYPE, mapping)); - client().prepareIndex(INDEX, TYPE, "1") - .setRefreshPolicy(RefreshPolicy.IMMEDIATE) - .setSource(jsonBuilder().startObject().field(FIELD, "Foo Fighters").endObject()).get(); - ensureGreen(INDEX); - - PutMappingResponse putMappingResponse = client().admin().indices().preparePutMapping(INDEX).setType(TYPE) - .setSource(jsonBuilder().startObject() - .startObject(TYPE).startObject("properties") - .startObject(FIELD) - .field("type", "string") - .startObject("fields") - .startObject("suggest").field("type", "completion").field("analyzer", "simple").endObject() - .endObject() - .endObject() - .endObject().endObject() - .endObject()) - .get(); - assertThat(putMappingResponse.isAcknowledged(), is(true)); - - SearchResponse suggestResponse = client().prepareSearch(INDEX).suggest(new SuggestBuilder().addSuggestion("suggs", - new CompletionSuggestionBuilder(FIELD + ".suggest").text("f").size(10)) - ).execute().actionGet(); - assertSuggestions(suggestResponse, "suggs"); - - client().prepareIndex(INDEX, TYPE, "1").setRefreshPolicy(RefreshPolicy.IMMEDIATE) - .setSource(jsonBuilder().startObject().field(FIELD, "Foo Fighters").endObject()).get(); - ensureGreen(INDEX); - - SearchResponse afterReindexingResponse = client().prepareSearch(INDEX).suggest( - new SuggestBuilder().addSuggestion("suggs", - SuggestBuilders.completionSuggestion(FIELD + ".suggest").text("f").size(10)) - ).execute().actionGet(); - assertSuggestions(afterReindexingResponse, "suggs", "Foo Fighters"); - } - - public void testThatUpgradeToMultiFieldsWorks() throws Exception { - final XContentBuilder mapping = jsonBuilder() - .startObject() - .startObject(TYPE) - .startObject("properties") - .startObject(FIELD) - .field("type", "string") - .endObject() - .endObject() - .endObject() - .endObject(); - assertAcked(prepareCreate(INDEX) - .addMapping(TYPE, mapping) - .setSettings(Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, PRE2X_VERSION.id))); - client().prepareIndex(INDEX, TYPE, "1") - .setRefreshPolicy(RefreshPolicy.IMMEDIATE) - .setSource(jsonBuilder().startObject().field(FIELD, "Foo Fighters").endObject()).get(); - ensureGreen(INDEX); - - PutMappingResponse putMappingResponse = client().admin().indices() - .preparePutMapping(INDEX).setType(TYPE).setSource(jsonBuilder().startObject() - .startObject(TYPE).startObject("properties") - .startObject(FIELD) - .field("type", "string") - .startObject("fields") - .startObject("suggest").field("type", "completion").field("analyzer", "simple").endObject() - .endObject() - .endObject() - .endObject().endObject() - .endObject()) - .get(); - assertThat(putMappingResponse.isAcknowledged(), is(true)); - - SearchResponse suggestResponse = client().prepareSearch(INDEX).suggest(new SuggestBuilder().addSuggestion("suggs", - SuggestBuilders.completionSuggestion(FIELD + ".suggest").text("f").size(10)) - ).execute().actionGet(); - assertSuggestions(suggestResponse, "suggs"); - - client().prepareIndex(INDEX, TYPE, "1").setRefreshPolicy(RefreshPolicy.IMMEDIATE) - .setSource(jsonBuilder().startObject().field(FIELD, "Foo Fighters").endObject()).get(); - ensureGreen(INDEX); - - SearchResponse afterReindexingResponse = client().prepareSearch(INDEX).suggest( - new SuggestBuilder().addSuggestion("suggs", - SuggestBuilders.completionSuggestion(FIELD + ".suggest").text("f").size(10)) - ).execute().actionGet(); - assertSuggestions(afterReindexingResponse, "suggs", "Foo Fighters"); - } - - public void testThatFuzzySuggesterWorks() throws Exception { - createIndexAndMapping(completionMappingBuilder); - - client().prepareIndex(INDEX, TYPE, "1").setSource(jsonBuilder() - .startObject().startObject(FIELD) - .startArray("input").value("Nirvana").endArray() - .endObject().endObject() - ).get(); - - refresh(); - - SearchResponse suggestResponse = client().prepareSearch(INDEX).suggest(new SuggestBuilder().addSuggestion("foo", - SuggestBuilders.completionSuggestion(FIELD).prefix("Nirv", Fuzziness.ONE).size(10)) - ).execute().actionGet(); - assertSuggestions(suggestResponse, false, "foo", "Nirvana"); - - suggestResponse = client().prepareSearch(INDEX).suggest(new SuggestBuilder().addSuggestion("foo", - SuggestBuilders.completionSuggestion(FIELD).prefix("Nirw", Fuzziness.ONE).size(10)) - ).execute().actionGet(); - assertSuggestions(suggestResponse, false, "foo", "Nirvana"); - } - - public void testThatFuzzySuggesterSupportsEditDistances() throws Exception { - createIndexAndMapping(completionMappingBuilder); - - client().prepareIndex(INDEX, TYPE, "1").setSource(jsonBuilder() - .startObject().startObject(FIELD) - .startArray("input").value("Nirvana").endArray() - .endObject().endObject() - ).get(); - - refresh(); - - // edit distance 1 - SearchResponse suggestResponse = client().prepareSearch(INDEX).suggest(new SuggestBuilder().addSuggestion("foo", - SuggestBuilders.completionSuggestion(FIELD).prefix("Norw", Fuzziness.ONE).size(10)) - ).execute().actionGet(); - assertSuggestions(suggestResponse, false, "foo"); - - // edit distance 2 - suggestResponse = client().prepareSearch(INDEX).suggest(new SuggestBuilder().addSuggestion("foo", - SuggestBuilders.completionSuggestion(FIELD).prefix("Norw", Fuzziness.TWO).size(10)) - ).execute().actionGet(); - assertSuggestions(suggestResponse, false, "foo", "Nirvana"); - } - - public void testThatFuzzySuggesterSupportsTranspositions() throws Exception { - createIndexAndMapping(completionMappingBuilder); - - client().prepareIndex(INDEX, TYPE, "1").setSource(jsonBuilder() - .startObject().startObject(FIELD) - .startArray("input").value("Nirvana").endArray() - .endObject().endObject() - ).get(); - - refresh(); - - SearchResponse suggestResponse = client().prepareSearch(INDEX).suggest(new SuggestBuilder().addSuggestion("foo", - SuggestBuilders.completionSuggestion(FIELD).prefix("Nriv", - FuzzyOptions.builder().setTranspositions(false).build()).size(10)) - ).execute().actionGet(); - assertSuggestions(suggestResponse, false, "foo"); - - suggestResponse = client().prepareSearch(INDEX).suggest(new SuggestBuilder().addSuggestion("foo", - SuggestBuilders.completionSuggestion(FIELD).prefix("Nriv", - FuzzyOptions.builder().setTranspositions(true).build()).size(10)) - ).execute().actionGet(); - assertSuggestions(suggestResponse, false, "foo", "Nirvana"); - } - - public void testThatFuzzySuggesterSupportsMinPrefixLength() throws Exception { - createIndexAndMapping(completionMappingBuilder); - - client().prepareIndex(INDEX, TYPE, "1").setSource(jsonBuilder() - .startObject().startObject(FIELD) - .startArray("input").value("Nirvana").endArray() - .endObject().endObject() - ).get(); - - refresh(); - - SearchResponse suggestResponse = client().prepareSearch(INDEX).suggest( - new SuggestBuilder().addSuggestion("foo", - SuggestBuilders.completionSuggestion(FIELD).prefix("Nriva", - FuzzyOptions.builder().setFuzzyMinLength(6).build()).size(10)) - ).execute().actionGet(); - assertSuggestions(suggestResponse, false, "foo"); - - suggestResponse = client().prepareSearch(INDEX).suggest( - new SuggestBuilder().addSuggestion("foo", - SuggestBuilders.completionSuggestion(FIELD).prefix("Nrivan", - FuzzyOptions.builder().setFuzzyMinLength(6).build()).size(10)) - ).execute().actionGet(); - assertSuggestions(suggestResponse, false, "foo", "Nirvana"); - } - - public void testThatFuzzySuggesterSupportsNonPrefixLength() throws Exception { - createIndexAndMapping(completionMappingBuilder); - - client().prepareIndex(INDEX, TYPE, "1").setSource(jsonBuilder() - .startObject().startObject(FIELD) - .startArray("input").value("Nirvana").endArray() - .endObject().endObject() - ).get(); - - refresh(); - - SearchResponse suggestResponse = client().prepareSearch(INDEX).suggest( - new SuggestBuilder().addSuggestion("foo", - SuggestBuilders.completionSuggestion(FIELD).prefix("Nirw", - FuzzyOptions.builder().setFuzzyPrefixLength(4).build()).size(10)) - ).execute().actionGet(); - assertSuggestions(suggestResponse, false, "foo"); - - suggestResponse = client().prepareSearch(INDEX).suggest( - new SuggestBuilder().addSuggestion("foo", - SuggestBuilders.completionSuggestion(FIELD).prefix("Nirvo", - FuzzyOptions.builder().setFuzzyPrefixLength(4).build()).size(10)) - ).execute().actionGet(); - assertSuggestions(suggestResponse, false, "foo", "Nirvana"); - } - - public void testThatFuzzySuggesterIsUnicodeAware() throws Exception { - createIndexAndMapping(completionMappingBuilder); - - client().prepareIndex(INDEX, TYPE, "1").setSource(jsonBuilder() - .startObject().startObject(FIELD) - .startArray("input").value("ööööö").endArray() - .endObject().endObject() - ).get(); - - refresh(); - - // suggestion with a character, which needs unicode awareness - CompletionSuggestionBuilder completionSuggestionBuilder = - SuggestBuilders.completionSuggestion(FIELD).prefix("öööи", - FuzzyOptions.builder().setUnicodeAware(true).build()).size(10); - - SearchResponse suggestResponse = client().prepareSearch(INDEX).suggest( - new SuggestBuilder().addSuggestion("foo", completionSuggestionBuilder)).execute().actionGet(); - assertSuggestions(suggestResponse, false, "foo", "ööööö"); - - // removing unicode awareness leads to no result - completionSuggestionBuilder.prefix("öööи", FuzzyOptions.builder().setUnicodeAware(false).build()); - suggestResponse = client().prepareSearch(INDEX).suggest( - new SuggestBuilder().addSuggestion("foo" ,completionSuggestionBuilder)).execute().actionGet(); - assertSuggestions(suggestResponse, false, "foo"); - - // increasing edit distance instead of unicode awareness works again, as this is only a single character - completionSuggestionBuilder.prefix("öööи", FuzzyOptions.builder().setFuzziness(2).build()); - suggestResponse = client().prepareSearch(INDEX).suggest( - new SuggestBuilder().addSuggestion("foo", completionSuggestionBuilder)).execute().actionGet(); - assertSuggestions(suggestResponse, false, "foo", "ööööö"); - } - - public void testThatStatsAreWorking() throws Exception { - String otherField = "testOtherField"; - - assertAcked(prepareCreate(INDEX) - .setSettings(Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, PRE2X_VERSION.id))); - - PutMappingResponse putMappingResponse = client().admin().indices() - .preparePutMapping(INDEX).setType(TYPE).setSource(jsonBuilder().startObject() - .startObject(TYPE).startObject("properties") - .startObject(FIELD.toString()) - .field("type", "completion").field("analyzer", "simple") - .endObject() - .startObject(otherField) - .field("type", "completion").field("analyzer", "simple") - .endObject() - .endObject().endObject().endObject()) - .get(); - assertThat(putMappingResponse.isAcknowledged(), is(true)); - - // Index two entities - client().prepareIndex(INDEX, TYPE, "1").setRefreshPolicy(RefreshPolicy.IMMEDIATE) - .setSource(jsonBuilder().startObject().field(FIELD, "Foo Fighters").field(otherField, "WHATEVER").endObject()) - .get(); - client().prepareIndex(INDEX, TYPE, "2").setRefreshPolicy(RefreshPolicy.IMMEDIATE) - .setSource(jsonBuilder().startObject().field(FIELD, "Bar Fighters").field(otherField, "WHATEVER2").endObject()) - .get(); - - // Get all stats - IndicesStatsResponse indicesStatsResponse = client().admin().indices().prepareStats(INDEX).setIndices(INDEX) - .setCompletion(true).get(); - CompletionStats completionStats = indicesStatsResponse.getIndex(INDEX).getPrimaries().completion; - assertThat(completionStats, notNullValue()); - long totalSizeInBytes = completionStats.getSizeInBytes(); - assertThat(totalSizeInBytes, is(greaterThan(0L))); - - IndicesStatsResponse singleFieldStats = client().admin().indices().prepareStats(INDEX) - .setIndices(INDEX).setCompletion(true).setCompletionFields(FIELD).get(); - long singleFieldSizeInBytes = singleFieldStats.getIndex(INDEX).getPrimaries().completion.getFields().get(FIELD); - IndicesStatsResponse otherFieldStats = client().admin().indices().prepareStats(INDEX) - .setIndices(INDEX).setCompletion(true).setCompletionFields(otherField).get(); - long otherFieldSizeInBytes = otherFieldStats.getIndex(INDEX).getPrimaries().completion.getFields().get(otherField); - assertThat(singleFieldSizeInBytes + otherFieldSizeInBytes, is(totalSizeInBytes)); - - // regexes - IndicesStatsResponse regexFieldStats = client().admin().indices().prepareStats(INDEX) - .setIndices(INDEX).setCompletion(true).setCompletionFields("*").get(); - ObjectLongHashMap fields = regexFieldStats.getIndex(INDEX).getPrimaries().completion.getFields(); - long regexSizeInBytes = fields.get(FIELD) + fields.get(otherField); - assertThat(regexSizeInBytes, is(totalSizeInBytes)); - } - - public void testThatSortingOnCompletionFieldReturnsUsefulException() throws Exception { - createIndexAndMapping(completionMappingBuilder); - - client().prepareIndex(INDEX, TYPE, "1").setSource(jsonBuilder() - .startObject().startObject(FIELD) - .startArray("input").value("Nirvana").endArray() - .endObject().endObject() - ).get(); - - refresh(); - try { - client().prepareSearch(INDEX).setTypes(TYPE).addSort(new FieldSortBuilder(FIELD)).execute().actionGet(); - fail("Expected an exception due to trying to sort on completion field, but did not happen"); - } catch (SearchPhaseExecutionException e) { - assertThat(e.status().getStatus(), is(400)); - assertThat(e.toString(), containsString("Fielddata is not supported on field [" + FIELD + "] of type [completion]]")); - } - } - - public void testThatSuggestStopFilterWorks() throws Exception { - Settings.Builder settingsBuilder = Settings.builder() - .put("index.analysis.analyzer.stoptest.tokenizer", "standard") - .putArray("index.analysis.analyzer.stoptest.filter", "standard", "suggest_stop_filter") - .put("index.analysis.filter.suggest_stop_filter.type", "stop") - .put("index.analysis.filter.suggest_stop_filter.remove_trailing", false); - - CompletionMappingBuilder completionMappingBuilder = new CompletionMappingBuilder(); - completionMappingBuilder.preserveSeparators(true).preservePositionIncrements(true); - completionMappingBuilder.searchAnalyzer("stoptest"); - completionMappingBuilder.indexAnalyzer("simple"); - createIndexAndMappingAndSettings(settingsBuilder.build(), completionMappingBuilder); - - client().prepareIndex(INDEX, TYPE, "1").setSource(jsonBuilder() - .startObject().startObject(FIELD) - .startArray("input").value("Feed trolls").endArray() - .field("weight", 5).endObject().endObject() - ).get(); - - // Higher weight so it's ranked first: - client().prepareIndex(INDEX, TYPE, "2").setSource(jsonBuilder() - .startObject().startObject(FIELD) - .startArray("input").value("Feed the trolls").endArray() - .field("weight", 10).endObject().endObject() - ).get(); - - refresh(); - - assertSuggestions("f", "Feed the trolls", "Feed trolls"); - assertSuggestions("fe", "Feed the trolls", "Feed trolls"); - assertSuggestions("fee", "Feed the trolls", "Feed trolls"); - assertSuggestions("feed", "Feed the trolls", "Feed trolls"); - assertSuggestions("feed t", "Feed the trolls", "Feed trolls"); - assertSuggestions("feed the", "Feed the trolls"); - // stop word complete, gets ignored on query time, makes it "feed" only - assertSuggestions("feed the ", "Feed the trolls", "Feed trolls"); - // stopword gets removed, but position increment kicks in, which doesnt work for the prefix suggester - assertSuggestions("feed the t"); - } - - public void testThatIndexingInvalidFieldsInCompletionFieldResultsInException() throws Exception { - CompletionMappingBuilder completionMappingBuilder = new CompletionMappingBuilder(); - createIndexAndMapping(completionMappingBuilder); - - try { - client().prepareIndex(INDEX, TYPE, "1").setSource(jsonBuilder() - .startObject().startObject(FIELD) - .startArray("FRIGGININVALID").value("Nirvana").endArray() - .endObject().endObject()).get(); - fail("expected MapperParsingException"); - } catch (MapperParsingException expected) {} - } - - - public void assertSuggestions(String suggestion, String... suggestions) { - String suggestionName = RandomStrings.randomAsciiOfLength(random(), 10); - SearchResponse suggestResponse = client().prepareSearch(INDEX).suggest( - new SuggestBuilder().addSuggestion(suggestionName, - SuggestBuilders.completionSuggestion(FIELD).text(suggestion).size(10)) - ).execute().actionGet(); - - assertSuggestions(suggestResponse, suggestionName, suggestions); - } - - public void assertSuggestionsNotInOrder(String suggestString, String... suggestions) { - String suggestionName = RandomStrings.randomAsciiOfLength(random(), 10); - SearchResponse suggestResponse = client().prepareSearch(INDEX).suggest( - new SuggestBuilder().addSuggestion(suggestionName, - SuggestBuilders.completionSuggestion(FIELD).text(suggestString).size(10)) - ).execute().actionGet(); - - assertSuggestions(suggestResponse, false, suggestionName, suggestions); - } - - private void assertSuggestions(SearchResponse suggestResponse, String name, String... suggestions) { - assertSuggestions(suggestResponse, true, name, suggestions); - } - - private void assertSuggestions(SearchResponse suggestResponse, boolean suggestionOrderStrict, String name, - String... suggestions) { - assertAllSuccessful(suggestResponse); - - List suggestionNames = new ArrayList<>(); - for (Suggest.Suggestion> suggestion : - suggestResponse.getSuggest()) { - suggestionNames.add(suggestion.getName()); - } - String expectFieldInResponseMsg = - String.format(Locale.ROOT, "Expected suggestion named %s in response, got %s", name, suggestionNames); - assertThat(expectFieldInResponseMsg, suggestResponse.getSuggest().getSuggestion(name), is(notNullValue())); - - Suggest.Suggestion> suggestion = - suggestResponse.getSuggest().getSuggestion(name); - - List suggestionList = getNames(suggestion.getEntries().get(0)); - List options = suggestion.getEntries().get(0).getOptions(); - - String assertMsg = String.format(Locale.ROOT, "Expected options %s length to be %s, but was %s", - suggestionList, suggestions.length, options.size()); - assertThat(assertMsg, options.size(), is(suggestions.length)); - if (suggestionOrderStrict) { - for (int i = 0; i < suggestions.length; i++) { - String errMsg = String.format(Locale.ROOT, "Expected elem %s in list %s to be [%s] score: %s", - i, suggestionList, suggestions[i], options.get(i).getScore()); - assertThat(errMsg, options.get(i).getText().toString(), is(suggestions[i])); - } - } else { - for (String expectedSuggestion : suggestions) { - String errMsg = String.format(Locale.ROOT, "Expected elem %s to be in list %s", - expectedSuggestion, suggestionList); - assertThat(errMsg, suggestionList, hasItem(expectedSuggestion)); - } - } - } - - private List getNames(Suggest.Suggestion.Entry suggestEntry) { - List names = new ArrayList<>(); - for (Suggest.Suggestion.Entry.Option entry : suggestEntry.getOptions()) { - names.add(entry.getText().string()); - } - return names; - } - - private void createIndexAndMappingAndSettings(Settings settings, CompletionMappingBuilder completionMappingBuilder) - throws IOException { - assertAcked(prepareCreate(INDEX) - .setSettings(Settings.builder().put(indexSettings()).put(settings) - .put(IndexMetaData.SETTING_VERSION_CREATED, PRE2X_VERSION.id).build()) - .addMapping(TYPE, jsonBuilder().startObject() - .startObject(TYPE).startObject("properties") - .startObject(FIELD) - .field("type", "completion") - .field("analyzer", completionMappingBuilder.indexAnalyzer) - .field("search_analyzer", completionMappingBuilder.searchAnalyzer) - .field("payloads", completionMappingBuilder.payloads) - .field("preserve_separators", completionMappingBuilder.preserveSeparators) - .field("preserve_position_increments", completionMappingBuilder.preservePositionIncrements) - .endObject() - .endObject().endObject() - .endObject()) - .get()); - } - - private void createIndexAndMapping(CompletionMappingBuilder completionMappingBuilder) throws IOException { - createIndexAndMappingAndSettings(Settings.EMPTY, completionMappingBuilder); - } - - private void createData(boolean optimize) throws IOException, InterruptedException, ExecutionException { - String[][] input = {{"Foo Fighters"}, {"Generator", "Foo Fighters Generator"}, - {"Learn to Fly", "Foo Fighters Learn to Fly"}, {"The Prodigy"}, {"Firestarter", "The Prodigy Firestarter"}, - {"Turbonegro"}, {"Get it on", "Turbonegro Get it on"}}; - String[] surface = {"Foo Fighters", "Generator - Foo Fighters", "Learn to Fly - Foo Fighters", "The Prodigy", - "Firestarter - The Prodigy", "Turbonegro", "Get it on - Turbonegro"}; - int[] weight = {10, 9, 8, 12, 11, 6, 7}; - IndexRequestBuilder[] builders = new IndexRequestBuilder[input.length]; - for (int i = 0; i < builders.length; i++) { - builders[i] = client().prepareIndex(INDEX, TYPE, "" + i) - .setSource(jsonBuilder() - .startObject().startObject(FIELD) - .array("input", input[i]) - .field("output", surface[i]) - .startObject("payload").field("id", i).endObject() - .field("weight", 1) // WE FORCEFULLY INDEX A BOGUS WEIGHT - .endObject() - .endObject() - ); - } - indexRandom(false, builders); - - for (int i = 0; i < builders.length; i++) { // add them again to make sure we deduplicate on the surface form - builders[i] = client().prepareIndex(INDEX, TYPE, "n" + i) - .setSource(jsonBuilder() - .startObject().startObject(FIELD) - .array("input", input[i]) - .field("output", surface[i]) - .startObject("payload").field("id", i).endObject() - .field("weight", weight[i]) - .endObject() - .endObject() - ); - } - indexRandom(false, builders); - - client().admin().indices().prepareRefresh(INDEX).execute().actionGet(); - if (optimize) { - // make sure merging works just fine - client().admin().indices().prepareFlush(INDEX).execute().actionGet(); - client().admin().indices().prepareForceMerge(INDEX).setMaxNumSegments(randomIntBetween(1, 5)).get(); - } - } - - // see #3555 - public void testPrunedSegments() throws IOException { - createIndexAndMappingAndSettings( - Settings.builder().put(SETTING_NUMBER_OF_SHARDS, 1).put(SETTING_NUMBER_OF_REPLICAS, 0).build(), - completionMappingBuilder); - - client().prepareIndex(INDEX, TYPE, "1").setSource(jsonBuilder() - .startObject().startObject(FIELD) - .startArray("input").value("The Beatles").endArray() - .endObject().endObject() - ).get(); - client().prepareIndex(INDEX, TYPE, "2").setSource(jsonBuilder() - .startObject() - .field("somefield", "somevalue") - .endObject() - ).get(); // we have 2 docs in a segment... - ForceMergeResponse actionGet = client().admin().indices().prepareForceMerge().setFlush(true) - .setMaxNumSegments(1).execute().actionGet(); - assertAllSuccessful(actionGet); - refresh(); - // update the first one and then merge.. the target segment will have no value in FIELD - client().prepareIndex(INDEX, TYPE, "1").setSource(jsonBuilder() - .startObject() - .field("somefield", "somevalue") - .endObject() - ).get(); - actionGet = client().admin().indices().prepareForceMerge().setFlush(true).setMaxNumSegments(1) - .execute().actionGet(); - assertAllSuccessful(actionGet); - refresh(); - - assertSuggestions("b"); - assertThat(2L, equalTo(client().prepareSearch(INDEX).get().getHits().totalHits())); - for (IndexShardSegments seg : client().admin().indices().prepareSegments().get().getIndices().get(INDEX)) { - ShardSegments[] shards = seg.getShards(); - for (ShardSegments shardSegments : shards) { - assertThat(shardSegments.getSegments().size(), equalTo(1)); - } - } - } - - public void testMaxFieldLength() throws IOException { - client().admin().indices().prepareCreate(INDEX) - .setSettings(Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, PRE2X_VERSION.id)).get(); - ensureGreen(); - int iters = scaledRandomIntBetween(10, 20); - for (int i = 0; i < iters; i++) { - int maxInputLen = between(3, 50); - String str = replaceReservedChars( - randomRealisticUnicodeOfCodepointLengthBetween(maxInputLen + 1, - maxInputLen + scaledRandomIntBetween(2, 50)), (char) 0x01); - assertAcked(client().admin().indices().preparePutMapping(INDEX) - .setType(TYPE).setSource(jsonBuilder().startObject() - .startObject(TYPE).startObject("properties") - .startObject(FIELD) - .field("type", "completion") - .field("max_input_length", maxInputLen) - // upgrade mapping each time - .field("analyzer", "keyword") - .endObject() - .endObject().endObject() - .endObject())); - client().prepareIndex(INDEX, TYPE, "1").setSource(jsonBuilder() - .startObject().startObject(FIELD) - .startArray("input").value(str).endArray() - .field("output", "foobar") - .endObject().endObject() - ).setRefreshPolicy(RefreshPolicy.IMMEDIATE).get(); - // need to flush and refresh, because we keep changing the same document - // we have to make sure that segments without any live documents are deleted - flushAndRefresh(); - int prefixLen = CompletionFieldMapper2x.correctSubStringLen(str, between(1, maxInputLen - 1)); - assertSuggestions(str.substring(0, prefixLen), "foobar"); - if (maxInputLen + 1 < str.length()) { - int offset = Character.isHighSurrogate(str.charAt(maxInputLen - 1)) ? 2 : 1; - int correctSubStringLen = CompletionFieldMapper2x.correctSubStringLen(str, maxInputLen + offset); - String shortenedSuggestion = str.substring(0, correctSubStringLen); - assertSuggestions(shortenedSuggestion); - } - } - } - - // see #3596 - public void testVeryLongInput() throws IOException { - assertAcked(client().admin().indices().prepareCreate(INDEX) - .setSettings(Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, PRE2X_VERSION.id)) - .addMapping(TYPE, jsonBuilder().startObject() - .startObject(TYPE).startObject("properties") - .startObject(FIELD) - .field("type", "completion") - .endObject() - .endObject().endObject() - .endObject()).get()); - // can cause stack overflow without the default max_input_length - String longString = replaceReservedChars(randomRealisticUnicodeOfLength(randomIntBetween(5000, 10000)), (char) 0x01); - client().prepareIndex(INDEX, TYPE, "1").setSource(jsonBuilder() - .startObject().startObject(FIELD) - .startArray("input").value(longString).endArray() - .field("output", "foobar") - .endObject().endObject() - ).setRefreshPolicy(RefreshPolicy.IMMEDIATE).get(); - - } - - // see #3648 - public void testReservedChars() throws IOException { - assertAcked(client().admin().indices().prepareCreate(INDEX).addMapping(TYPE, jsonBuilder().startObject() - .startObject(TYPE).startObject("properties") - .startObject(FIELD) - .field("type", "completion") - .endObject() - .endObject().endObject() - .endObject()).get()); - // can cause stack overflow without the default max_input_length - String string = "foo" + (char) 0x00 + "bar"; - try { - client().prepareIndex(INDEX, TYPE, "1").setSource(jsonBuilder() - .startObject().startObject(FIELD) - .startArray("input").value(string).endArray() - .field("output", "foobar") - .endObject().endObject() - ).setRefreshPolicy(RefreshPolicy.IMMEDIATE).get(); - fail("expected MapperParsingException"); - } catch (MapperParsingException expected) {} - } - - // see #5930 - public void testIssue5930() throws IOException { - assertAcked(client().admin().indices().prepareCreate(INDEX).addMapping(TYPE, jsonBuilder().startObject() - .startObject(TYPE).startObject("properties") - .startObject(FIELD) - .field("type", "completion") - .endObject() - .endObject().endObject() - .endObject()).get()); - String string = "foo bar"; - client().prepareIndex(INDEX, TYPE, "1").setSource(jsonBuilder() - .startObject() - .field(FIELD, string) - .endObject() - ).setRefreshPolicy(RefreshPolicy.IMMEDIATE).get(); - - try { - client().prepareSearch(INDEX).addAggregation(AggregationBuilders.terms("suggest_agg").field(FIELD) - .collectMode(randomFrom(SubAggCollectionMode.values()))).execute().actionGet(); - // Exception must be thrown - assertFalse(true); - } catch (SearchPhaseExecutionException e) { - assertTrue(e.toString().contains("Fielddata is not supported on field [" + FIELD + "] of type [completion]")); - } - } - - // see issue #6399 - public void testIndexingUnrelatedNullValue() throws Exception { - String mapping = jsonBuilder() - .startObject() - .startObject(TYPE) - .startObject("properties") - .startObject(FIELD) - .field("type", "completion") - .endObject() - .endObject() - .endObject() - .endObject() - .string(); - - assertAcked(client().admin().indices().prepareCreate(INDEX) - .setSettings(Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, PRE2X_VERSION.id)) - .addMapping(TYPE, mapping).get()); - ensureGreen(); - - client().prepareIndex(INDEX, TYPE, "1").setSource(FIELD, "strings make me happy", FIELD + "_1", "nulls make me sad") - .setRefreshPolicy(RefreshPolicy.IMMEDIATE).get(); - - try { - client().prepareIndex(INDEX, TYPE, "2").setSource(FIELD, null, FIELD + "_1", "nulls make me sad") - .setRefreshPolicy(RefreshPolicy.IMMEDIATE).get(); - fail("Expected MapperParsingException for null value"); - } catch (MapperParsingException e) { - // make sure that the exception has the name of the field causing the error - assertTrue(e.getDetailedMessage().contains(FIELD)); - } - - } - - private static String replaceReservedChars(String input, char replacement) { - char[] charArray = input.toCharArray(); - for (int i = 0; i < charArray.length; i++) { - if (CompletionFieldMapper2x.isReservedChar(charArray[i])) { - charArray[i] = replacement; - } - } - return new String(charArray); - } - - private static class CompletionMappingBuilder { - private String searchAnalyzer = "simple"; - private String indexAnalyzer = "simple"; - private Boolean payloads = getRandom().nextBoolean(); - private Boolean preserveSeparators = getRandom().nextBoolean(); - private Boolean preservePositionIncrements = getRandom().nextBoolean(); - - public CompletionMappingBuilder searchAnalyzer(String searchAnalyzer) { - this.searchAnalyzer = searchAnalyzer; - return this; - } - public CompletionMappingBuilder indexAnalyzer(String indexAnalyzer) { - this.indexAnalyzer = indexAnalyzer; - return this; - } - public CompletionMappingBuilder payloads(Boolean payloads) { - this.payloads = payloads; - return this; - } - public CompletionMappingBuilder preserveSeparators(Boolean preserveSeparators) { - this.preserveSeparators = preserveSeparators; - return this; - } - public CompletionMappingBuilder preservePositionIncrements(Boolean preservePositionIncrements) { - this.preservePositionIncrements = preservePositionIncrements; - return this; - } - } -} diff --git a/core/src/test/java/org/elasticsearch/search/suggest/CompletionSuggestSearchIT.java b/core/src/test/java/org/elasticsearch/search/suggest/CompletionSuggestSearchIT.java index 14fab9d72b2e5..74920fb8fc72c 100644 --- a/core/src/test/java/org/elasticsearch/search/suggest/CompletionSuggestSearchIT.java +++ b/core/src/test/java/org/elasticsearch/search/suggest/CompletionSuggestSearchIT.java @@ -24,17 +24,14 @@ import org.apache.lucene.analysis.TokenStreamToAutomaton; import org.apache.lucene.search.suggest.document.ContextSuggestField; import org.apache.lucene.util.LuceneTestCase.SuppressCodecs; -import org.elasticsearch.Version; import org.elasticsearch.action.admin.indices.forcemerge.ForceMergeResponse; import org.elasticsearch.action.admin.indices.mapping.put.PutMappingResponse; import org.elasticsearch.action.admin.indices.segments.IndexShardSegments; import org.elasticsearch.action.admin.indices.segments.ShardSegments; import org.elasticsearch.action.admin.indices.stats.IndicesStatsResponse; import org.elasticsearch.action.index.IndexRequestBuilder; -import org.elasticsearch.action.search.ReduceSearchPhaseException; import org.elasticsearch.action.search.SearchPhaseExecutionException; import org.elasticsearch.action.search.SearchResponse; -import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.Fuzziness; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -52,13 +49,11 @@ import org.elasticsearch.search.suggest.completion.context.GeoContextMapping; import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.InternalSettingsPlugin; -import org.elasticsearch.test.VersionUtils; import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; -import java.util.Collections; import java.util.LinkedHashMap; import java.util.List; import java.util.Locale; @@ -162,58 +157,6 @@ public void testFuzzy() throws Exception { assertSuggestions("foo", prefix, "sugxgestion10", "sugxgestion9", "sugxgestion8", "sugxgestion7", "sugxgestion6"); } - public void testMixedCompletion() throws Exception { - final CompletionMappingBuilder mapping = new CompletionMappingBuilder(); - createIndexAndMapping(mapping); - String otherIndex = INDEX + "_1"; - assertAcked(client().admin().indices().prepareCreate(otherIndex) - .setSettings(Settings.builder().put(indexSettings()).put(IndexMetaData.SETTING_VERSION_CREATED, - VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.V_2_3_1).id)) - .addMapping(TYPE, jsonBuilder().startObject() - .startObject(TYPE).startObject("properties") - .startObject(FIELD) - .field("type", "completion") - .field("analyzer", mapping.indexAnalyzer) - .field("search_analyzer", mapping.searchAnalyzer) - .field("preserve_separators", mapping.preserveSeparators) - .field("preserve_position_increments", mapping.preservePositionIncrements) - .endObject() - .endObject().endObject() - .endObject()) - .get()); - int numDocs = 10; - List indexRequestBuilders = new ArrayList<>(); - for (int i = 1; i <= numDocs; i++) { - indexRequestBuilders.add(client().prepareIndex(otherIndex, TYPE, "" + i) - .setSource(jsonBuilder() - .startObject() - .startObject(FIELD) - .field("input", "suggestion" + i) - .field("weight", i) - .endObject() - .endObject() - )); - indexRequestBuilders.add(client().prepareIndex(INDEX, TYPE, "" + i) - .setSource(jsonBuilder() - .startObject() - .startObject(FIELD) - .field("input", "suggestion" + i) - .field("weight", i) - .endObject() - .endObject() - )); - } - indexRandom(true, indexRequestBuilders); - CompletionSuggestionBuilder prefix = SuggestBuilders.completionSuggestion(FIELD).text("sugg"); - try { - client().prepareSearch(INDEX, otherIndex).suggest(new SuggestBuilder().addSuggestion("foo", prefix)) - .execute().actionGet(); - fail("querying on mixed completion suggester should throw an error"); - } catch (ReduceSearchPhaseException e) { - assertThat(e.getCause().getMessage(), containsString("detected mixed suggestion results")); - } - } - public void testEarlyTermination() throws Exception { final CompletionMappingBuilder mapping = new CompletionMappingBuilder(); createIndexAndMapping(mapping); diff --git a/core/src/test/java/org/elasticsearch/search/suggest/ContextSuggestSearch2xIT.java b/core/src/test/java/org/elasticsearch/search/suggest/ContextSuggestSearch2xIT.java deleted file mode 100644 index 50733f108383b..0000000000000 --- a/core/src/test/java/org/elasticsearch/search/suggest/ContextSuggestSearch2xIT.java +++ /dev/null @@ -1,1096 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.elasticsearch.search.suggest; - -import org.elasticsearch.Version; -import org.elasticsearch.action.admin.indices.create.CreateIndexRequestBuilder; -import org.elasticsearch.action.search.SearchRequestBuilder; -import org.elasticsearch.action.search.SearchResponse; -import org.elasticsearch.cluster.metadata.IndexMetaData; -import org.elasticsearch.common.geo.GeoPoint; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.unit.Fuzziness; -import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.index.mapper.MapperParsingException; -import org.elasticsearch.plugins.Plugin; -import org.elasticsearch.search.suggest.Suggest.Suggestion; -import org.elasticsearch.search.suggest.Suggest.Suggestion.Entry; -import org.elasticsearch.search.suggest.Suggest.Suggestion.Entry.Option; -import org.elasticsearch.search.suggest.completion.CompletionSuggestionBuilder; -import org.elasticsearch.search.suggest.completion2x.CompletionSuggestion; -import org.elasticsearch.search.suggest.completion2x.context.ContextBuilder; -import org.elasticsearch.search.suggest.completion2x.context.ContextMapping; -import org.apache.lucene.util.LuceneTestCase.SuppressCodecs; -import org.elasticsearch.test.ESIntegTestCase; -import org.elasticsearch.test.InternalSettingsPlugin; -import org.elasticsearch.test.VersionUtils; -import org.hamcrest.Matchers; - -import java.io.IOException; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Collection; -import java.util.Collections; -import java.util.HashSet; -import java.util.List; -import java.util.Map; -import java.util.Set; - -import static com.carrotsearch.randomizedtesting.RandomizedTest.getRandom; -import static org.elasticsearch.action.support.WriteRequest.RefreshPolicy.IMMEDIATE; -import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSuggestion; -import static org.elasticsearch.test.hamcrest.ElasticsearchGeoAssertions.assertDistance; -import static org.hamcrest.Matchers.containsString; - -@SuppressCodecs("*") // requires custom completion format -public class ContextSuggestSearch2xIT extends ESIntegTestCase { - - private static final String INDEX = "test"; - private static final String TYPE = "testType"; - private static final String FIELD = "testField"; - private final Version PRE2X_VERSION = VersionUtils.randomVersionBetween(getRandom(), Version.V_2_0_0, Version.V_2_3_1); - - private static final String[][] HEROS = { - { "Afari, Jamal", "Jamal Afari", "Jamal" }, - { "Allerdyce, St. John", "Allerdyce, John", "St. John", "St. John Allerdyce" }, - { "Beaubier, Jean-Paul", "Jean-Paul Beaubier", "Jean-Paul" }, - { "Beaubier, Jeanne-Marie", "Jeanne-Marie Beaubier", "Jeanne-Marie" }, - { "Braddock, Elizabeth \"Betsy\"", "Betsy", "Braddock, Elizabeth", "Elizabeth Braddock", "Elizabeth" }, - { "Cody Mushumanski gun Man", "the hunter", "gun man", "Cody Mushumanski" }, - { "Corbo, Adrian", "Adrian Corbo", "Adrian" }, - { "Corbo, Jared", "Jared Corbo", "Jared" }, - { "Creel, Carl \"Crusher\"", "Creel, Carl", "Crusher", "Carl Creel", "Carl" }, - { "Crichton, Lady Jacqueline Falsworth", "Lady Jacqueline Falsworth Crichton", "Lady Jacqueline Falsworth", - "Jacqueline Falsworth" }, { "Crichton, Kenneth", "Kenneth Crichton", "Kenneth" }, - { "MacKenzie, Al", "Al MacKenzie", "Al" }, - { "MacPherran, Mary \"Skeeter\"", "Mary MacPherran \"Skeeter\"", "MacPherran, Mary", "Skeeter", "Mary MacPherran" }, - { "MacTaggert, Moira", "Moira MacTaggert", "Moira" }, { "Rasputin, Illyana", "Illyana Rasputin", "Illyana" }, - { "Rasputin, Mikhail", "Mikhail Rasputin", "Mikhail" }, { "Rasputin, Piotr", "Piotr Rasputin", "Piotr" }, - { "Smythe, Alistair", "Alistair Smythe", "Alistair" }, { "Smythe, Spencer", "Spencer Smythe", "Spencer" }, - { "Whitemane, Aelfyre", "Aelfyre Whitemane", "Aelfyre" }, { "Whitemane, Kofi", "Kofi Whitemane", "Kofi" } }; - - @Override - protected Collection> nodePlugins() { - return Arrays.asList(InternalSettingsPlugin.class); - } - - public void testBasicGeo() throws Exception { - assertAcked(prepareCreate(INDEX) - .setSettings(Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, PRE2X_VERSION.id)) - .addMapping(TYPE, createMapping(TYPE, ContextBuilder.location("st").precision("5km").neighbors(true)))); - - XContentBuilder source1 = jsonBuilder() - .startObject() - .startObject(FIELD) - .array("input", "Hotel Amsterdam", "Amsterdam") - .field("output", "Hotel Amsterdam in Berlin") - .startObject("context").latlon("st", 52.529172, 13.407333).endObject() - .endObject() - .endObject(); - client().prepareIndex(INDEX, TYPE, "1").setSource(source1).execute().actionGet(); - - XContentBuilder source2 = jsonBuilder() - .startObject() - .startObject(FIELD) - .array("input", "Hotel Berlin", "Berlin") - .field("output", "Hotel Berlin in Amsterdam") - .startObject("context").latlon("st", 52.363389, 4.888695).endObject() - .endObject() - .endObject(); - client().prepareIndex(INDEX, TYPE, "2").setSource(source2).execute().actionGet(); - - client().admin().indices().prepareRefresh(INDEX).get(); - - String suggestionName = randomAsciiOfLength(10); - CompletionSuggestionBuilder context = SuggestBuilders.completionSuggestion(FIELD).text("h").size(10).contexts( - new CompletionSuggestionBuilder.Contexts2x().addGeoLocation("st", 52.52, 13.4)); - - SearchResponse suggestResponse = client().prepareSearch(INDEX).suggest( - new SuggestBuilder().addSuggestion(suggestionName, context)).execute().actionGet(); - - assertEquals(suggestResponse.getSuggest().size(), 1); - assertEquals("Hotel Amsterdam in Berlin", suggestResponse.getSuggest() - .getSuggestion(suggestionName).iterator().next().getOptions().iterator().next().getText().string()); - } - - public void testMultiLevelGeo() throws Exception { - assertAcked(prepareCreate(INDEX).setSettings(Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, PRE2X_VERSION.id)) - .addMapping(TYPE, createMapping(TYPE, ContextBuilder.location("st") - .precision(1) - .precision(2) - .precision(3) - .precision(4) - .precision(5) - .precision(6) - .precision(7) - .precision(8) - .precision(9) - .precision(10) - .precision(11) - .precision(12) - .neighbors(true)))); - - XContentBuilder source1 = jsonBuilder() - .startObject() - .startObject(FIELD) - .array("input", "Hotel Amsterdam", "Amsterdam") - .field("output", "Hotel Amsterdam in Berlin") - .startObject("context").latlon("st", 52.529172, 13.407333).endObject() - .endObject() - .endObject(); - client().prepareIndex(INDEX, TYPE, "1").setSource(source1).execute().actionGet(); - - client().admin().indices().prepareRefresh(INDEX).get(); - - for (int precision = 1; precision <= 12; precision++) { - String suggestionName = randomAsciiOfLength(10); - CompletionSuggestionBuilder context = new CompletionSuggestionBuilder(FIELD).text("h").size(10) - .contexts(new CompletionSuggestionBuilder.Contexts2x().addGeoLocation("st", 52.529172, 13.407333, precision)); - - SearchResponse suggestResponse = client().prepareSearch(INDEX).suggest( - new SuggestBuilder().addSuggestion(suggestionName, context)).execute().actionGet(); - assertEquals(suggestResponse.getSuggest().size(), 1); - assertEquals("Hotel Amsterdam in Berlin", suggestResponse.getSuggest().getSuggestion(suggestionName).iterator().next() - .getOptions().iterator().next().getText().string()); - } - } - - public void testMappingIdempotency() throws Exception { - final int nPrecision = randomIntBetween(4, 12); - List precisions = new ArrayList<>(nPrecision); - for (int i = 0; i < nPrecision; i++) { - precisions.add(i+1); - } - Collections.shuffle(precisions, getRandom()); - int[] precision = new int[nPrecision]; - for (int i = 0; i < precision.length; i++) { - precision[i] = precisions.get(i); - } - XContentBuilder mapping = jsonBuilder().startObject().startObject(TYPE) - .startObject("properties").startObject("completion") - .field("type", "completion") - .startObject("context") - .startObject("location") - .field("type", "geo") - .array("precision", precision) - .endObject() - .endObject() - .endObject().endObject() - .endObject().endObject(); - - assertAcked(prepareCreate(INDEX).setSettings( - Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, PRE2X_VERSION.id)).addMapping(TYPE, mapping.string())); - - Collections.shuffle(precisions, getRandom()); - for (int i = 0; i < precision.length; i++) { - precision[i] = precisions.get(i); - } - mapping = jsonBuilder().startObject().startObject(TYPE) - .startObject("properties").startObject("completion") - .field("type", "completion") - .startObject("context") - .startObject("location") - .field("type", "geo") - .array("precision", precision) - .endObject() - .endObject() - .endObject().endObject() - .endObject().endObject(); - assertAcked(client().admin().indices().preparePutMapping(INDEX).setType(TYPE).setSource(mapping.string()).get()); - } - - - public void testGeoField() throws Exception { - - XContentBuilder mapping = jsonBuilder(); - mapping.startObject(); - mapping.startObject(TYPE); - mapping.startObject("properties"); - mapping.startObject("pin"); - mapping.field("type", "geo_point"); - mapping.endObject(); - mapping.startObject(FIELD); - mapping.field("type", "completion"); - mapping.field("analyzer", "simple"); - - mapping.startObject("context"); - mapping.value(ContextBuilder.location("st", 5, true).field("pin").build()); - mapping.endObject(); - - mapping.endObject(); - mapping.endObject(); - mapping.endObject(); - mapping.endObject(); - - assertAcked(prepareCreate(INDEX).setSettings( - Settings.builder() - .put(IndexMetaData.SETTING_VERSION_CREATED, PRE2X_VERSION.id) - ).addMapping(TYPE, mapping)); - - XContentBuilder source1 = jsonBuilder() - .startObject() - .latlon("pin", 52.529172, 13.407333) - .startObject(FIELD) - .array("input", "Hotel Amsterdam", "Amsterdam") - .field("output", "Hotel Amsterdam in Berlin") - .startObject("context").endObject() - .endObject() - .endObject(); - client().prepareIndex(INDEX, TYPE, "1").setSource(source1).execute().actionGet(); - - XContentBuilder source2 = jsonBuilder() - .startObject() - .latlon("pin", 52.363389, 4.888695) - .startObject(FIELD) - .array("input", "Hotel Berlin", "Berlin") - .field("output", "Hotel Berlin in Amsterdam") - .startObject("context").endObject() - .endObject() - .endObject(); - client().prepareIndex(INDEX, TYPE, "2").setSource(source2).execute().actionGet(); - - refresh(); - - String suggestionName = randomAsciiOfLength(10); - CompletionSuggestionBuilder context = SuggestBuilders.completionSuggestion(FIELD).text("h").size(10) - .contexts(new CompletionSuggestionBuilder.Contexts2x() - .addGeoLocation("st", 52.52, 13.4)); - SearchResponse suggestResponse = client().prepareSearch(INDEX).suggest( - new SuggestBuilder().addSuggestion(suggestionName, context)).execute().actionGet(); - - assertEquals(suggestResponse.getSuggest().size(), 1); - assertEquals("Hotel Amsterdam in Berlin", suggestResponse.getSuggest() - .getSuggestion(suggestionName).iterator().next().getOptions().iterator().next().getText().string()); - } - - public void testSimpleGeo() throws Exception { - String reinickendorf = "u337p3mp11e2"; - String pankow = "u33e0cyyjur4"; - String koepenick = "u33dm4f7fn40"; - String bernau = "u33etnjf1yjn"; - String berlin = "u33dc1v0xupz"; - String mitte = "u33dc0cpke4q"; - String steglitz = "u336m36rjh2p"; - String wilmersdorf = "u336wmw0q41s"; - String spandau = "u336uqek7gh6"; - String tempelhof = "u33d91jh3by0"; - String schoeneberg = "u336xdrkzbq7"; - String treptow = "u33d9unn7fp7"; - - double precision = 100.0; // meters - - assertAcked(prepareCreate(INDEX) - .setSettings(Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, PRE2X_VERSION.id)) - .addMapping(TYPE, createMapping(TYPE, ContextBuilder.location("st").precision(precision).neighbors(true)))); - - String[] locations = { reinickendorf, pankow, koepenick, bernau, berlin, mitte, steglitz, wilmersdorf, spandau, tempelhof, - schoeneberg, treptow }; - - String[][] input = { { "pizza - reinickendorf", "pizza", "food" }, { "pizza - pankow", "pizza", "food" }, - { "pizza - koepenick", "pizza", "food" }, { "pizza - bernau", "pizza", "food" }, { "pizza - berlin", "pizza", "food" }, - { "pizza - mitte", "pizza - berlin mitte", "pizza", "food" }, - { "pizza - steglitz", "pizza - Berlin-Steglitz", "pizza", "food" }, { "pizza - wilmersdorf", "pizza", "food" }, - { "pizza - spandau", "spandau bei berlin", "pizza", "food" }, - { "pizza - tempelhof", "pizza - berlin-tempelhof", "pizza", "food" }, - { "pizza - schoeneberg", "pizza - schöneberg", "pizza - berlin schoeneberg", "pizza", "food" }, - { "pizza - treptow", "pizza", "food" } }; - - for (int i = 0; i < locations.length; i++) { - XContentBuilder source = jsonBuilder().startObject().startObject(FIELD).array("input", input[i]) - .startObject("context").field("st", locations[i]).endObject().field("payload", locations[i]) - .endObject().endObject(); - client().prepareIndex(INDEX, TYPE, "" + i).setSource(source).execute().actionGet(); - } - - refresh(); - - assertGeoSuggestionsInRange(berlin, "pizza", precision); - assertGeoSuggestionsInRange(reinickendorf, "pizza", precision); - assertGeoSuggestionsInRange(spandau, "pizza", precision); - assertGeoSuggestionsInRange(koepenick, "pizza", precision); - assertGeoSuggestionsInRange(schoeneberg, "pizza", precision); - assertGeoSuggestionsInRange(tempelhof, "pizza", precision); - assertGeoSuggestionsInRange(bernau, "pizza", precision); - assertGeoSuggestionsInRange(pankow, "pizza", precision); - assertGeoSuggestionsInRange(mitte, "pizza", precision); - assertGeoSuggestionsInRange(steglitz, "pizza", precision); - assertGeoSuggestionsInRange(mitte, "pizza", precision); - assertGeoSuggestionsInRange(wilmersdorf, "pizza", precision); - assertGeoSuggestionsInRange(treptow, "pizza", precision); - } - - public void testSimplePrefix() throws Exception { - assertAcked(prepareCreate(INDEX) - .setSettings(Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, PRE2X_VERSION.id)) - .addMapping(TYPE, createMapping(TYPE, ContextBuilder.category("st")))); - - for (int i = 0; i < HEROS.length; i++) { - XContentBuilder source = jsonBuilder().startObject().startObject(FIELD).array("input", HEROS[i]) - .startObject("context").field("st", i%3).endObject() - .startObject("payload").field("group", i % 3).field("id", i).endObject() - .endObject().endObject(); - client().prepareIndex(INDEX, TYPE, "" + i).setSource(source).execute().actionGet(); - } - - refresh(); - - assertPrefixSuggestions(0, "a", "Afari, Jamal", "Adrian Corbo", "Adrian"); - assertPrefixSuggestions(0, "b", "Beaubier, Jeanne-Marie"); - assertPrefixSuggestions(0, "c", "Corbo, Adrian", "Crichton, Lady Jacqueline Falsworth"); - assertPrefixSuggestions(0, "mary", "Mary MacPherran \"Skeeter\"", "Mary MacPherran"); - assertPrefixSuggestions(0, "s", "Skeeter", "Smythe, Spencer", "Spencer Smythe", "Spencer"); - assertPrefixSuggestions(1, "s", "St. John", "St. John Allerdyce"); - assertPrefixSuggestions(2, "s", "Smythe, Alistair"); - assertPrefixSuggestions(1, "w", "Whitemane, Aelfyre"); - assertPrefixSuggestions(2, "w", "Whitemane, Kofi"); - } - - public void testTypeCategoryIsActuallyCalledCategory() throws Exception { - XContentBuilder mapping = jsonBuilder(); - mapping.startObject().startObject(TYPE).startObject("properties") - .startObject("suggest_field").field("type", "completion") - .startObject("context").startObject("color").field("type", "category").endObject().endObject() - .endObject() - .endObject().endObject().endObject(); - assertAcked(prepareCreate(INDEX) - .setSettings(Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, PRE2X_VERSION.id)) - .addMapping(TYPE, mapping)); - XContentBuilder doc1 = jsonBuilder(); - doc1.startObject().startObject("suggest_field") - .field("input", "backpack_red") - .startObject("context").array("color", "red", "all_colors").endObject() - .endObject().endObject(); - XContentBuilder doc2 = jsonBuilder(); - doc2.startObject().startObject("suggest_field") - .field("input", "backpack_green") - .startObject("context").array("color", "green", "all_colors").endObject() - .endObject().endObject(); - - client().prepareIndex(INDEX, TYPE, "1") - .setSource(doc1).execute() - .actionGet(); - client().prepareIndex(INDEX, TYPE, "2") - .setSource(doc2).execute() - .actionGet(); - - refresh(); - getBackpackSuggestionAndCompare("all_colors", "backpack_red", "backpack_green"); - getBackpackSuggestionAndCompare("red", "backpack_red"); - getBackpackSuggestionAndCompare("green", "backpack_green"); - getBackpackSuggestionAndCompare("not_existing_color"); - - } - - private void getBackpackSuggestionAndCompare(String contextValue, String... expectedText) { - Set expected = new HashSet<>(); - Collections.addAll(expected, expectedText); - CompletionSuggestionBuilder context = SuggestBuilders.completionSuggestion("suggest_field") - .text("back").size(10).contexts( - new CompletionSuggestionBuilder.Contexts2x().addContextField("color", contextValue)); - SearchRequestBuilder suggestionRequest = client().prepareSearch(INDEX).suggest( - new SuggestBuilder().addSuggestion("suggestion", context)); - SearchResponse suggestResponse = suggestionRequest.execute().actionGet(); - Suggest suggest = suggestResponse.getSuggest(); - assertEquals(suggest.size(), 1); - for (Suggestion> s : suggest) { - CompletionSuggestion suggestion = (CompletionSuggestion) s; - for (CompletionSuggestion.Entry entry : suggestion) { - List options = entry.getOptions(); - assertEquals(options.size(), expectedText.length); - for (CompletionSuggestion.Entry.Option option : options) { - assertTrue(expected.contains(option.getText().string())); - expected.remove(option.getText().string()); - } - } - } - } - - public void testBasic() throws Exception { - assertAcked(prepareCreate(INDEX). - setSettings(Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, PRE2X_VERSION.id)) - .addMapping(TYPE, createMapping(TYPE, false, ContextBuilder.reference("st", "_type"), - ContextBuilder.reference("nd", "_type")))); - - client().prepareIndex(INDEX, TYPE, "1") - .setSource( - jsonBuilder().startObject().startObject(FIELD) - .startArray("input").value("my hotel").value("this hotel").endArray() - .startObject("context").endObject() - .field("payload", TYPE + "|" + TYPE).endObject().endObject()).execute() - .actionGet(); - - refresh(); - - assertDoubleFieldSuggestions(TYPE, TYPE, "m", "my hotel"); - } - - public void testSimpleField() throws Exception { - assertAcked(prepareCreate(INDEX) - .setSettings(Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, PRE2X_VERSION.id)) - .addMapping(TYPE, createMapping(TYPE, ContextBuilder.reference("st", "category")))); - - for (int i = 0; i < HEROS.length; i++) { - client().prepareIndex(INDEX, TYPE, "" + i) - .setSource( - jsonBuilder().startObject().field("category", Integer.toString(i % 3)).startObject(FIELD) - .array("input", HEROS[i]) - .startObject("context").endObject().field("payload", Integer.toString(i % 3)) - .endObject().endObject()).execute().actionGet(); - } - - refresh(); - - assertFieldSuggestions("0", "a", "Afari, Jamal", "Adrian Corbo", "Adrian"); - assertFieldSuggestions("0", "b", "Beaubier, Jeanne-Marie"); - assertFieldSuggestions("0", "c", "Corbo, Adrian", "Crichton, Lady Jacqueline Falsworth"); - assertFieldSuggestions("0", "mary", "Mary MacPherran \"Skeeter\"", "Mary MacPherran"); - assertFieldSuggestions("0", "s", "Skeeter", "Smythe, Spencer", "Spencer Smythe", "Spencer"); - assertFieldSuggestions("1", "s", "St. John", "St. John Allerdyce"); - assertFieldSuggestions("2", "s", "Smythe, Alistair"); - assertFieldSuggestions("1", "w", "Whitemane, Aelfyre"); - assertFieldSuggestions("2", "w", "Whitemane, Kofi"); - - } - - // see issue #10987 - public void testEmptySuggestion() throws Exception { - String mapping = jsonBuilder() - .startObject() - .startObject(TYPE) - .startObject("properties") - .startObject(FIELD) - .field("type", "completion") - .startObject("context") - .startObject("type_context") - .field("path", "_type") - .field("type", "category") - .endObject() - .endObject() - .endObject() - .endObject() - .endObject() - .endObject() - .string(); - - assertAcked(client().admin().indices().prepareCreate(INDEX) - .setSettings(Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, PRE2X_VERSION.id)) - .addMapping(TYPE, mapping).get()); - ensureGreen(); - - client().prepareIndex(INDEX, TYPE, "1").setSource(FIELD, "") - .setRefreshPolicy(IMMEDIATE).get(); - - } - - public void testMultiValueField() throws Exception { - assertAcked(prepareCreate(INDEX).setSettings(Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, PRE2X_VERSION.id)) - .addMapping(TYPE, createMapping(TYPE, ContextBuilder.reference("st", "category")))); - - for (int i = 0; i < HEROS.length; i++) { - client().prepareIndex(INDEX, TYPE, "" + i) - .setSource( - jsonBuilder().startObject().startArray("category").value(Integer.toString(i % 3)).value("other").endArray() - .startObject(FIELD).array("input", HEROS[i]).startObject("context").endObject() - .field("payload", Integer.toString(i % 3)).endObject().endObject()).execute().actionGet(); - } - - refresh(); - - assertFieldSuggestions("0", "a", "Afari, Jamal", "Adrian Corbo", "Adrian"); - assertFieldSuggestions("0", "b", "Beaubier, Jeanne-Marie"); - assertFieldSuggestions("0", "c", "Corbo, Adrian", "Crichton, Lady Jacqueline Falsworth"); - assertFieldSuggestions("0", "mary", "Mary MacPherran \"Skeeter\"", "Mary MacPherran"); - assertFieldSuggestions("0", "s", "Skeeter", "Smythe, Spencer", "Spencer Smythe", "Spencer"); - assertFieldSuggestions("1", "s", "St. John", "St. John Allerdyce"); - assertFieldSuggestions("2", "s", "Smythe, Alistair"); - assertFieldSuggestions("1", "w", "Whitemane, Aelfyre"); - assertFieldSuggestions("2", "w", "Whitemane, Kofi"); - } - - public void testMultiContext() throws Exception { - assertAcked(prepareCreate(INDEX) - .setSettings(Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, PRE2X_VERSION.id)) - .addMapping(TYPE, createMapping(TYPE, ContextBuilder.reference("st", "categoryA"), - ContextBuilder.reference("nd", "categoryB")))); - - for (int i = 0; i < HEROS.length; i++) { - client().prepareIndex(INDEX, TYPE, "" + i) - .setSource( - jsonBuilder().startObject().field("categoryA").value("" + (char) ('0' + (i % 3))).field("categoryB") - .value("" + (char) ('A' + (i % 3))).startObject(FIELD).array("input", HEROS[i]) - .startObject("context").endObject().field("payload", ((char) ('0' + (i % 3))) + "" + (char) ('A' + (i % 3))) - .endObject().endObject()).execute().actionGet(); - } - - refresh(); - - assertMultiContextSuggestions("0", "A", "a", "Afari, Jamal", "Adrian Corbo", "Adrian"); - assertMultiContextSuggestions("0", "A", "b", "Beaubier, Jeanne-Marie"); - assertMultiContextSuggestions("0", "A", "c", "Corbo, Adrian", "Crichton, Lady Jacqueline Falsworth"); - assertMultiContextSuggestions("0", "A", "mary", "Mary MacPherran \"Skeeter\"", "Mary MacPherran"); - assertMultiContextSuggestions("0", "A", "s", "Skeeter", "Smythe, Spencer", "Spencer Smythe", "Spencer"); - assertMultiContextSuggestions("1", "B", "s", "St. John", "St. John Allerdyce"); - assertMultiContextSuggestions("2", "C", "s", "Smythe, Alistair"); - assertMultiContextSuggestions("1", "B", "w", "Whitemane, Aelfyre"); - assertMultiContextSuggestions("2", "C", "w", "Whitemane, Kofi"); - } - - public void testMultiContextWithFuzzyLogic() throws Exception { - assertAcked(prepareCreate(INDEX) - .setSettings(Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, PRE2X_VERSION.id)) - .addMapping(TYPE, - createMapping(TYPE, ContextBuilder.reference("st", "categoryA"), ContextBuilder.reference("nd", "categoryB")))); - - for (int i = 0; i < HEROS.length; i++) { - String source = jsonBuilder().startObject().field("categoryA", "" + (char) ('0' + (i % 3))) - .field("categoryB", "" + (char) ('a' + (i % 3))).startObject(FIELD).array("input", HEROS[i]) - .startObject("context").endObject().startObject("payload").field("categoryA", "" + (char) ('0' + (i % 3))) - .field("categoryB", "" + (char) ('a' + (i % 3))).endObject().endObject().endObject().string(); - client().prepareIndex(INDEX, TYPE, "" + i).setSource(source).execute().actionGet(); - } - - refresh(); - - String[] prefix1 = { "0", "1", "2" }; - String[] prefix2 = { "a", "b", "c" }; - String[] prefix3 = { "0", "1" }; - String[] prefix4 = { "a", "b" }; - - assertContextWithFuzzySuggestions(prefix1, prefix2, "mary", "MacKenzie, Al", "MacPherran, Mary", "MacPherran, Mary \"Skeeter\"", - "MacTaggert, Moira", "Mary MacPherran", "Mary MacPherran \"Skeeter\""); - assertContextWithFuzzySuggestions(prefix1, prefix2, "mac", "Mikhail", "Mary MacPherran \"Skeeter\"", "MacTaggert, Moira", - "Moira MacTaggert", "Moira", "MacKenzie, Al", "Mary MacPherran", "Mikhail Rasputin", "MacPherran, Mary", - "MacPherran, Mary \"Skeeter\""); - assertContextWithFuzzySuggestions(prefix3, prefix4, "mary", "MacPherran, Mary", "MacPherran, Mary \"Skeeter\"", - "MacTaggert, Moira", "Mary MacPherran", "Mary MacPherran \"Skeeter\""); - assertContextWithFuzzySuggestions(prefix3, prefix4, "mac", "MacPherran, Mary", "MacPherran, Mary \"Skeeter\"", "MacTaggert, Moira", - "Mary MacPherran", "Mary MacPherran \"Skeeter\"", "Mikhail", "Mikhail Rasputin", "Moira", "Moira MacTaggert"); - } - - public void testSimpleType() throws Exception { - String[] types = { TYPE + "A", TYPE + "B", TYPE + "C" }; - - CreateIndexRequestBuilder createIndexRequestBuilder = prepareCreate(INDEX) - .setSettings(Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, PRE2X_VERSION.id)); - for (String type : types) { - createIndexRequestBuilder.addMapping(type, createMapping(type, ContextBuilder.reference("st", "_type"))); - } - assertAcked(createIndexRequestBuilder); - - for (int i = 0; i < HEROS.length; i++) { - String type = types[i % types.length]; - client().prepareIndex(INDEX, type, "" + i) - .setSource( - jsonBuilder().startObject().startObject(FIELD).array("input", HEROS[i]) - .startObject("context").endObject().field("payload", type).endObject().endObject()).execute().actionGet(); - } - - refresh(); - - assertFieldSuggestions(types[0], "a", "Afari, Jamal", "Adrian Corbo", "Adrian"); - assertFieldSuggestions(types[0], "b", "Beaubier, Jeanne-Marie"); - assertFieldSuggestions(types[0], "c", "Corbo, Adrian", "Crichton, Lady Jacqueline Falsworth"); - assertFieldSuggestions(types[0], "mary", "Mary MacPherran \"Skeeter\"", "Mary MacPherran"); - assertFieldSuggestions(types[0], "s", "Skeeter", "Smythe, Spencer", "Spencer Smythe", "Spencer"); - assertFieldSuggestions(types[1], "s", "St. John", "St. John Allerdyce"); - assertFieldSuggestions(types[2], "s", "Smythe, Alistair"); - assertFieldSuggestions(types[1], "w", "Whitemane, Aelfyre"); - assertFieldSuggestions(types[2], "w", "Whitemane, Kofi"); - } - - // issue 5525, default location didnt work with lat/lon map, and did not set default location appropriately - public void testGeoContextDefaultMapping() throws Exception { - GeoPoint berlinAlexanderplatz = GeoPoint.fromGeohash("u33dc1"); - - XContentBuilder xContentBuilder = jsonBuilder().startObject() - .startObject("poi").startObject("properties").startObject("suggest") - .field("type", "completion") - .startObject("context").startObject("location") - .field("type", "geo") - .field("precision", "500m") - .startObject("default").field("lat", berlinAlexanderplatz.lat()).field("lon", berlinAlexanderplatz.lon()).endObject() - .endObject().endObject() - .endObject().endObject().endObject() - .endObject(); - - assertAcked(prepareCreate(INDEX) - .setSettings(Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, PRE2X_VERSION.id)) - .addMapping("poi", xContentBuilder)); - - index(INDEX, "poi", "1", jsonBuilder().startObject() - .startObject("suggest").field("input", "Berlin Alexanderplatz").endObject().endObject()); - refresh(); - - final String suggestionName = "suggestion"; - CompletionSuggestionBuilder suggestionBuilder = SuggestBuilders.completionSuggestion("suggest").text("b").size(10) - .contexts(new CompletionSuggestionBuilder.Contexts2x().addGeoLocation("location", - berlinAlexanderplatz.lat(), berlinAlexanderplatz.lon())); - SearchResponse suggestResponse = client().prepareSearch(INDEX).suggest( - new SuggestBuilder().addSuggestion(suggestionName, suggestionBuilder)).get(); - assertSuggestion(suggestResponse.getSuggest(), 0, suggestionName, "Berlin Alexanderplatz"); - } - - // issue 5525, setting the path of a category context and then indexing a document without that field returned an error - public void testThatMissingPrefixesForContextReturnException() throws Exception { - XContentBuilder xContentBuilder = jsonBuilder().startObject() - .startObject("service").startObject("properties").startObject("suggest") - .field("type", "completion") - .startObject("context").startObject("color") - .field("type", "category") - .field("path", "color") - .endObject().endObject() - .endObject().endObject().endObject() - .endObject(); - - assertAcked(prepareCreate(INDEX) - .setSettings(Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, PRE2X_VERSION.id)) - .addMapping("service", xContentBuilder)); - - // now index a document with color field - index(INDEX, "service", "1", jsonBuilder().startObject() - .field("color", "red").startObject("suggest").field("input", "backback").endObject().endObject()); - - // now index a document without a color field - try { - index(INDEX, "service", "2", jsonBuilder().startObject() - .startObject("suggest").field("input", "backback").endObject().endObject()); - fail("index operation was not supposed to be successful"); - } catch (IllegalArgumentException e) { - assertThat(e.getMessage(), containsString("one or more prefixes needed")); - } - } - - public void testThatCategoryDefaultWorks() throws Exception { - XContentBuilder xContentBuilder = jsonBuilder().startObject() - .startObject("item").startObject("properties").startObject("suggest") - .field("type", "completion") - .startObject("context").startObject("color") - .field("type", "category").field("default", "red") - .endObject().endObject() - .endObject().endObject().endObject() - .endObject(); - - assertAcked(prepareCreate(INDEX) - .setSettings(Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, PRE2X_VERSION.id)) - .addMapping("item", xContentBuilder)); - - index(INDEX, "item", "1", jsonBuilder().startObject() - .startObject("suggest").field("input", "Hoodie red").endObject().endObject()); - index(INDEX, "item", "2", jsonBuilder().startObject() - .startObject("suggest").field("input", "Hoodie blue").startObject("context").field("color", "blue") - .endObject().endObject().endObject()); - refresh(); - - final String suggestionName = "suggestion"; - CompletionSuggestionBuilder suggestionBuilder = SuggestBuilders.completionSuggestion("suggest").text("h").size(10) - .contexts(new CompletionSuggestionBuilder.Contexts2x().addContextField("color", "red")); - SearchResponse suggestResponse = client().prepareSearch(INDEX).suggest( - new SuggestBuilder().addSuggestion(suggestionName, suggestionBuilder)).get(); - assertSuggestion(suggestResponse.getSuggest(), 0, suggestionName, "Hoodie red"); - } - - public void testThatDefaultCategoryAndPathWorks() throws Exception { - XContentBuilder xContentBuilder = jsonBuilder().startObject() - .startObject("item").startObject("properties").startObject("suggest") - .field("type", "completion") - .startObject("context").startObject("color") - .field("type", "category") - .field("default", "red") - .field("path", "color") - .endObject().endObject() - .endObject().endObject().endObject() - .endObject(); - - assertAcked(prepareCreate(INDEX) - .setSettings(Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, PRE2X_VERSION.id)) - .addMapping("item", xContentBuilder)); - - index(INDEX, "item", "1", jsonBuilder().startObject() - .startObject("suggest").field("input", "Hoodie red").endObject().endObject()); - index(INDEX, "item", "2", jsonBuilder().startObject() - .startObject("suggest").field("input", "Hoodie blue").endObject().field("color", "blue").endObject()); - refresh(); - - final String suggestionName = "suggestion"; - CompletionSuggestionBuilder suggestionBuilder = SuggestBuilders.completionSuggestion("suggest").text("h").size(10) - .contexts(new CompletionSuggestionBuilder.Contexts2x().addContextField("color", "red")); - SearchResponse suggestResponse = client().prepareSearch(INDEX).suggest( - new SuggestBuilder().addSuggestion(suggestionName, suggestionBuilder)).get(); - assertSuggestion(suggestResponse.getSuggest(), 0, suggestionName, "Hoodie red"); - } - - public void testThatGeoPrecisionIsWorking() throws Exception { - XContentBuilder xContentBuilder = jsonBuilder().startObject() - .startObject("item").startObject("properties").startObject("suggest") - .field("type", "completion") - .startObject("context").startObject("location") - .field("type", "geo") - .field("precision", 4) // this means geo hashes with a length of four are used, like u345 - .endObject().endObject() - .endObject().endObject().endObject() - .endObject(); - - assertAcked(prepareCreate(INDEX) - .setSettings(Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, PRE2X_VERSION.id)) - .addMapping("item", xContentBuilder)); - - // lets create some locations by geohashes in different cells with the precision 4 - // this means, that poelchaustr is not a neighour to alexanderplatz, but they share the same prefix until the fourth char! - GeoPoint alexanderplatz = GeoPoint.fromGeohash("u33dc1"); - GeoPoint poelchaustr = GeoPoint.fromGeohash("u33du5"); - GeoPoint dahlem = GeoPoint.fromGeohash("u336q"); // berlin dahlem, should be included with that precision - GeoPoint middleOfNoWhere = GeoPoint.fromGeohash("u334"); // location for west from berlin, should not be included - - index(INDEX, "item", "1", jsonBuilder().startObject() - .startObject("suggest").field("input", "Berlin Alexanderplatz").field("weight", 3).startObject("context") - .startObject("location").field("lat", alexanderplatz.lat()).field("lon", alexanderplatz.lon()).endObject() - .endObject().endObject().endObject()); - index(INDEX, "item", "2", jsonBuilder().startObject().startObject("suggest").field("input", "Berlin Poelchaustr.") - .field("weight", 2).startObject("context").startObject("location").field("lat", poelchaustr.lat()) - .field("lon", poelchaustr.lon()).endObject().endObject().endObject().endObject()); - index(INDEX, "item", "3", jsonBuilder().startObject().startObject("suggest").field("input", "Berlin Far Away") - .field("weight", 1).startObject("context").startObject("location").field("lat", middleOfNoWhere.lat()) - .field("lon", middleOfNoWhere.lon()).endObject().endObject().endObject().endObject()); - index(INDEX, "item", "4", jsonBuilder().startObject().startObject("suggest") - .field("input", "Berlin Dahlem").field("weight", 1).startObject("context").startObject("location") - .field("lat", dahlem.lat()).field("lon", dahlem.lon()).endObject().endObject().endObject().endObject()); - refresh(); - - final String suggestionName = "suggestion"; - CompletionSuggestionBuilder suggestionBuilder = SuggestBuilders.completionSuggestion("suggest") - .text("b").size(10).contexts(new CompletionSuggestionBuilder.Contexts2x().addGeoLocation("location", - alexanderplatz.lat(), alexanderplatz.lon())); - SearchResponse suggestResponse = client().prepareSearch(INDEX).suggest( - new SuggestBuilder().addSuggestion(suggestionName, suggestionBuilder)).get(); - assertSuggestion(suggestResponse.getSuggest(), 0, suggestionName, - "Berlin Alexanderplatz", "Berlin Poelchaustr.", "Berlin Dahlem"); - } - - public void testThatNeighborsCanBeExcluded() throws Exception { - XContentBuilder xContentBuilder = jsonBuilder().startObject() - .startObject("item").startObject("properties").startObject("suggest") - .field("type", "completion") - .startObject("context").startObject("location") - .field("type", "geo") - .field("precision", 6) - .field("neighbors", false) - .endObject().endObject() - .endObject().endObject().endObject() - .endObject(); - - assertAcked(prepareCreate(INDEX) - .setSettings(Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, PRE2X_VERSION.id)) - .addMapping("item", xContentBuilder)); - - GeoPoint alexanderplatz = GeoPoint.fromGeohash("u33dc1"); - // does not look like it, but is a direct neighbor - // this test would fail, if the precision was set 4, as then both cells would be the same, u33d - GeoPoint cellNeighbourOfAlexanderplatz = GeoPoint.fromGeohash("u33dbc"); - - index(INDEX, "item", "1", jsonBuilder().startObject() - .startObject("suggest").field("input", "Berlin Alexanderplatz") - .field("weight", 3).startObject("context").startObject("location") - .field("lat", alexanderplatz.lat()).field("lon", alexanderplatz.lon()) - .endObject().endObject().endObject().endObject()); - index(INDEX, "item", "2", jsonBuilder().startObject() - .startObject("suggest").field("input", "Berlin Hackescher Markt") - .field("weight", 2).startObject("context").startObject("location") - .field("lat", cellNeighbourOfAlexanderplatz.lat()).field("lon", cellNeighbourOfAlexanderplatz.lon()) - .endObject().endObject().endObject().endObject()); - refresh(); - - final String suggestionName = "suggestion"; - CompletionSuggestionBuilder suggestionBuilder = SuggestBuilders.completionSuggestion("suggest").text("b").size(10) - .contexts(new CompletionSuggestionBuilder.Contexts2x() - .addGeoLocation("location", alexanderplatz.lat(), alexanderplatz.lon())); - SearchResponse suggestResponse = client().prepareSearch(INDEX).suggest( - new SuggestBuilder().addSuggestion(suggestionName, suggestionBuilder)).get(); - assertSuggestion(suggestResponse.getSuggest(), 0, suggestionName, "Berlin Alexanderplatz"); - } - - public void testThatGeoPathCanBeSelected() throws Exception { - XContentBuilder xContentBuilder = jsonBuilder().startObject() - .startObject("item").startObject("properties").startObject("suggest") - .field("type", "completion") - .startObject("context").startObject("location") - .field("type", "geo") - .field("precision", "5m") - .field("path", "loc") - .endObject().endObject() - .endObject().endObject().endObject() - .endObject(); - - assertAcked(prepareCreate(INDEX) - .setSettings(Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, PRE2X_VERSION.id)) - .addMapping("item", xContentBuilder)); - - GeoPoint alexanderplatz = GeoPoint.fromGeohash("u33dc1"); - index(INDEX, "item", "1", jsonBuilder().startObject() - .startObject("suggest").field("input", "Berlin Alexanderplatz").endObject() - .startObject("loc").field("lat", alexanderplatz.lat()).field("lon", alexanderplatz.lon()).endObject() - .endObject()); - refresh(); - - final String suggestionName = "suggestion"; - CompletionSuggestionBuilder suggestionBuilder = SuggestBuilders.completionSuggestion("suggest").text("b").size(10) - .contexts(new CompletionSuggestionBuilder.Contexts2x() - .addGeoLocation("location", alexanderplatz.lat(), alexanderplatz.lon())); - SearchResponse suggestResponse = client().prepareSearch(INDEX).suggest( - new SuggestBuilder().addSuggestion(suggestionName, suggestionBuilder)).get(); - assertSuggestion(suggestResponse.getSuggest(), 0, suggestionName, "Berlin Alexanderplatz"); - } - - public void testThatPrecisionIsRequired() throws Exception { - XContentBuilder xContentBuilder = jsonBuilder().startObject() - .startObject("item").startObject("properties").startObject("suggest") - .field("type", "completion") - .startObject("context").startObject("location") - .field("type", "geo") - .field("path", "loc") - .endObject().endObject() - .endObject().endObject().endObject() - .endObject(); - - try { - assertAcked(prepareCreate(INDEX) - .setSettings(Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, PRE2X_VERSION.id)) - .addMapping("item", xContentBuilder)); - fail("expected MapperParsingException"); - } catch (MapperParsingException expected) {} - } - - public void assertGeoSuggestionsInRange(String location, String suggest, double precision) throws IOException { - String suggestionName = randomAsciiOfLength(10); - CompletionSuggestionBuilder context = SuggestBuilders.completionSuggestion(FIELD).text(suggest).size(10) - .contexts(new CompletionSuggestionBuilder.Contexts2x().addGeoLocation("st", location)); - SearchRequestBuilder suggestionRequest = client().prepareSearch(INDEX) - .suggest(new SuggestBuilder().addSuggestion(suggestionName, context)); - SearchResponse suggestResponse = suggestionRequest.execute().actionGet(); - - Suggest suggest2 = suggestResponse.getSuggest(); - assertTrue(suggest2.iterator().hasNext()); - for (Suggestion> s : suggest2) { - CompletionSuggestion suggestion = (CompletionSuggestion) s; - assertTrue(suggestion.iterator().hasNext()); - for (CompletionSuggestion.Entry entry : suggestion) { - List options = entry.getOptions(); - assertTrue(options.iterator().hasNext()); - for (CompletionSuggestion.Entry.Option option : options) { - String target = option.getPayloadAsString(); - assertDistance(location, target, Matchers.lessThanOrEqualTo(precision)); - } - } - } - } - - public void assertPrefixSuggestions(long prefix, String suggest, String... hits) throws IOException { - String suggestionName = randomAsciiOfLength(10); - CompletionSuggestionBuilder context = SuggestBuilders.completionSuggestion(FIELD).text(suggest) - .size(hits.length + 1).contexts(new CompletionSuggestionBuilder.Contexts2x() - .addCategory("st", Long.toString(prefix))); - SearchResponse suggestResponse = client().prepareSearch(INDEX).suggest( - new SuggestBuilder().addSuggestion(suggestionName, context)).execute().actionGet(); - ArrayList suggestions = new ArrayList<>(); - Suggest suggest2 = suggestResponse.getSuggest(); - assertTrue(suggest2.iterator().hasNext()); - for (Suggestion> s : suggest2) { - CompletionSuggestion suggestion = (CompletionSuggestion) s; - for (CompletionSuggestion.Entry entry : suggestion) { - List options = entry.getOptions(); - for (CompletionSuggestion.Entry.Option option : options) { - Map payload = option.getPayloadAsMap(); - int group = (Integer) payload.get("group"); - String text = option.getText().string(); - assertEquals(prefix, group); - suggestions.add(text); - } - } - } - assertSuggestionsMatch(suggestions, hits); - } - - public void assertContextWithFuzzySuggestions(String[] prefix1, String[] prefix2, String suggest, String... hits) - throws IOException { - String suggestionName = randomAsciiOfLength(10); - CompletionSuggestionBuilder context = SuggestBuilders.completionSuggestion(FIELD).prefix(suggest, Fuzziness.TWO) - .size(hits.length + 10).contexts( - new CompletionSuggestionBuilder.Contexts2x().addContextField("st", prefix1).addContextField("nd", prefix2)); - SearchRequestBuilder suggestionRequest = client().prepareSearch(INDEX).suggest( - new SuggestBuilder().addSuggestion(suggestionName, context)); - SearchResponse suggestResponse = suggestionRequest.execute().actionGet(); - - ArrayList suggestions = new ArrayList<>(); - - Suggest suggest2 = suggestResponse.getSuggest(); - assertTrue(suggest2.iterator().hasNext()); - for (Suggestion> s : suggest2) { - CompletionSuggestion suggestion = (CompletionSuggestion) s; - for (CompletionSuggestion.Entry entry : suggestion) { - List options = entry.getOptions(); - for (CompletionSuggestion.Entry.Option option : options) { - Map payload = option.getPayloadAsMap(); - String text = option.getText().string(); - assertThat(prefix1, Matchers.hasItemInArray(payload.get("categoryA"))); - assertThat(prefix2, Matchers.hasItemInArray(payload.get("categoryB"))); - suggestions.add(text); - } - } - } - - assertSuggestionsMatch(suggestions, hits); - } - - public void assertFieldSuggestions(String value, String suggest, String... hits) throws IOException { - String suggestionName = randomAsciiOfLength(10); - CompletionSuggestionBuilder context = SuggestBuilders.completionSuggestion(FIELD).text(suggest).size(10) - .contexts(new CompletionSuggestionBuilder.Contexts2x().addContextField("st", value)); - SearchRequestBuilder suggestionRequest = client().prepareSearch(INDEX).suggest( - new SuggestBuilder().addSuggestion(suggestionName, context)); - SearchResponse suggestResponse = suggestionRequest.execute().actionGet(); - - ArrayList suggestions = new ArrayList<>(); - - Suggest suggest2 = suggestResponse.getSuggest(); - for (Suggestion> s : suggest2) { - CompletionSuggestion suggestion = (CompletionSuggestion) s; - for (CompletionSuggestion.Entry entry : suggestion) { - List options = entry.getOptions(); - for (CompletionSuggestion.Entry.Option option : options) { - String payload = option.getPayloadAsString(); - String text = option.getText().string(); - assertEquals(value, payload); - suggestions.add(text); - } - } - } - assertSuggestionsMatch(suggestions, hits); - } - - public void assertDoubleFieldSuggestions(String field1, String field2, String suggest, String... hits) throws IOException { - String suggestionName = randomAsciiOfLength(10); - CompletionSuggestionBuilder context = SuggestBuilders.completionSuggestion(FIELD).text(suggest).size(10) - .contexts(new CompletionSuggestionBuilder.Contexts2x() - .addContextField("st", field1).addContextField("nd", field2)); - SearchRequestBuilder suggestionRequest = client().prepareSearch(INDEX).suggest( - new SuggestBuilder().addSuggestion(suggestionName, context)); - SearchResponse suggestResponse = suggestionRequest.execute().actionGet(); - ArrayList suggestions = new ArrayList<>(); - - Suggest suggest2 = suggestResponse.getSuggest(); - for (Suggestion> s : suggest2) { - CompletionSuggestion suggestion = (CompletionSuggestion) s; - for (CompletionSuggestion.Entry entry : suggestion) { - List options = entry.getOptions(); - for (CompletionSuggestion.Entry.Option option : options) { - String payload = option.getPayloadAsString(); - String text = option.getText().string(); - assertEquals(field1 + "|" + field2, payload); - suggestions.add(text); - } - } - } - assertSuggestionsMatch(suggestions, hits); - } - - public void assertMultiContextSuggestions(String value1, String value2, String suggest, String... hits) throws IOException { - String suggestionName = randomAsciiOfLength(10); - CompletionSuggestionBuilder context = SuggestBuilders.completionSuggestion(FIELD).text(suggest).size(10) - .contexts(new CompletionSuggestionBuilder.Contexts2x() - .addContextField("st", value1).addContextField("nd", value2)); - - SearchRequestBuilder suggestionRequest = client().prepareSearch(INDEX).suggest( - new SuggestBuilder().addSuggestion(suggestionName, context)); - SearchResponse suggestResponse = suggestionRequest.execute().actionGet(); - ArrayList suggestions = new ArrayList<>(); - - Suggest suggest2 = suggestResponse.getSuggest(); - for (Suggestion> s : suggest2) { - CompletionSuggestion suggestion = (CompletionSuggestion) s; - for (CompletionSuggestion.Entry entry : suggestion) { - List options = entry.getOptions(); - for (CompletionSuggestion.Entry.Option option : options) { - String payload = option.getPayloadAsString(); - String text = option.getText().string(); - assertEquals(value1 + value2, payload); - suggestions.add(text); - } - } - } - assertSuggestionsMatch(suggestions, hits); - } - - private void assertSuggestionsMatch(List suggestions, String... hits) { - boolean[] suggested = new boolean[hits.length]; - Arrays.sort(hits); - Arrays.fill(suggested, false); - int numSuggestions = 0; - - for (String suggestion : suggestions) { - int hitpos = Arrays.binarySearch(hits, suggestion); - - assertEquals(hits[hitpos], suggestion); - assertTrue(hitpos >= 0); - assertTrue(!suggested[hitpos]); - - suggested[hitpos] = true; - numSuggestions++; - - } - assertEquals(hits.length, numSuggestions); - } - - private XContentBuilder createMapping(String type, ContextBuilder... context) throws IOException { - return createMapping(type, false, context); - } - - private XContentBuilder createMapping(String type, boolean preserveSeparators, ContextBuilder... context) - throws IOException { - return createMapping(type, "simple", "simple", true, preserveSeparators, true, context); - } - - private XContentBuilder createMapping(String type, String indexAnalyzer, String searchAnalyzer, boolean payloads, - boolean preserveSeparators, boolean preservePositionIncrements, - ContextBuilder... contexts) throws IOException { - XContentBuilder mapping = jsonBuilder(); - mapping.startObject(); - mapping.startObject(type); - mapping.startObject("properties"); - mapping.startObject(FIELD); - mapping.field("type", "completion"); - mapping.field("analyzer", indexAnalyzer); - mapping.field("search_analyzer", searchAnalyzer); - mapping.field("payloads", payloads); - mapping.field("preserve_separators", preserveSeparators); - mapping.field("preserve_position_increments", preservePositionIncrements); - - mapping.startObject("context"); - for (ContextBuilder context : contexts) { - mapping.value(context.build()); - } - mapping.endObject(); - - mapping.endObject(); - mapping.endObject(); - mapping.endObject(); - mapping.endObject(); - return mapping; - } -} diff --git a/core/src/test/java/org/elasticsearch/snapshots/DedicatedClusterSnapshotRestoreIT.java b/core/src/test/java/org/elasticsearch/snapshots/DedicatedClusterSnapshotRestoreIT.java index acf14bc45e23b..2375b7519ccdf 100644 --- a/core/src/test/java/org/elasticsearch/snapshots/DedicatedClusterSnapshotRestoreIT.java +++ b/core/src/test/java/org/elasticsearch/snapshots/DedicatedClusterSnapshotRestoreIT.java @@ -47,7 +47,6 @@ import org.elasticsearch.discovery.zen.ElectMasterService; import org.elasticsearch.discovery.zen.ZenDiscovery; import org.elasticsearch.indices.recovery.RecoveryState; -import org.elasticsearch.indices.ttl.IndicesTTLService; import org.elasticsearch.node.Node; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.repositories.RepositoryMissingException; @@ -69,7 +68,6 @@ import java.util.Collection; import java.util.EnumSet; import java.util.List; -import java.util.concurrent.CopyOnWriteArrayList; import java.util.concurrent.CountDownLatch; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicReference; @@ -113,12 +111,9 @@ public void testRestorePersistentSettings() throws Exception { logger.info("--> set test persistent setting"); client.admin().cluster().prepareUpdateSettings().setPersistentSettings( Settings.builder() - .put(ElectMasterService.DISCOVERY_ZEN_MINIMUM_MASTER_NODES_SETTING.getKey(), 2) - .put(IndicesTTLService.INDICES_TTL_INTERVAL_SETTING.getKey(), random, TimeUnit.MINUTES)) + .put(ElectMasterService.DISCOVERY_ZEN_MINIMUM_MASTER_NODES_SETTING.getKey(), 2)) .execute().actionGet(); - assertThat(client.admin().cluster().prepareState().setRoutingTable(false).setNodes(false).execute().actionGet().getState() - .getMetaData().persistentSettings().getAsTime(IndicesTTLService.INDICES_TTL_INTERVAL_SETTING.getKey(), TimeValue.timeValueMinutes(1)).millis(), equalTo(TimeValue.timeValueMinutes(random).millis())); assertThat(client.admin().cluster().prepareState().setRoutingTable(false).setNodes(false).execute().actionGet().getState() .getMetaData().persistentSettings().getAsInt(ElectMasterService.DISCOVERY_ZEN_MINIMUM_MASTER_NODES_SETTING.getKey(), -1), equalTo(2)); @@ -136,11 +131,10 @@ public void testRestorePersistentSettings() throws Exception { logger.info("--> clean the test persistent setting"); client.admin().cluster().prepareUpdateSettings().setPersistentSettings( Settings.builder() - .put(ElectMasterService.DISCOVERY_ZEN_MINIMUM_MASTER_NODES_SETTING.getKey(), 1) - .put(IndicesTTLService.INDICES_TTL_INTERVAL_SETTING.getKey(), TimeValue.timeValueMinutes(1))) + .put(ElectMasterService.DISCOVERY_ZEN_MINIMUM_MASTER_NODES_SETTING.getKey(), 1)) .execute().actionGet(); assertThat(client.admin().cluster().prepareState().setRoutingTable(false).setNodes(false).execute().actionGet().getState() - .getMetaData().persistentSettings().getAsTime(IndicesTTLService.INDICES_TTL_INTERVAL_SETTING.getKey(), TimeValue.timeValueMinutes(1)).millis(), equalTo(TimeValue.timeValueMinutes(1).millis())); + .getMetaData().persistentSettings().getAsInt(ElectMasterService.DISCOVERY_ZEN_MINIMUM_MASTER_NODES_SETTING.getKey(), -1), equalTo(1)); stopNode(secondNode); assertThat(client.admin().cluster().prepareHealth().setWaitForNodes("1").get().isTimedOut(), equalTo(false)); diff --git a/core/src/test/java/org/elasticsearch/snapshots/SharedClusterSnapshotRestoreIT.java b/core/src/test/java/org/elasticsearch/snapshots/SharedClusterSnapshotRestoreIT.java index 769866641e58c..4765292be1d10 100644 --- a/core/src/test/java/org/elasticsearch/snapshots/SharedClusterSnapshotRestoreIT.java +++ b/core/src/test/java/org/elasticsearch/snapshots/SharedClusterSnapshotRestoreIT.java @@ -446,8 +446,8 @@ public void testRestoreTemplates() throws Exception { logger.info("--> creating test template"); assertThat(client.admin().indices().preparePutTemplate("test-template").setPatterns(Collections.singletonList("te*")).addMapping("test-mapping", XContentFactory.jsonBuilder().startObject().startObject("test-mapping").startObject("properties") - .startObject("field1").field("type", "string").field("store", "yes").endObject() - .startObject("field2").field("type", "string").field("store", "yes").field("index", "not_analyzed").endObject() + .startObject("field1").field("type", "text").field("store", "yes").endObject() + .startObject("field2").field("type", "keyword").field("store", "yes").endObject() .endObject().endObject().endObject()).get().isAcknowledged(), equalTo(true)); logger.info("--> snapshot"); @@ -487,8 +487,8 @@ public void testIncludeGlobalState() throws Exception { if(testTemplate) { logger.info("--> creating test template"); assertThat(client.admin().indices().preparePutTemplate("test-template").setPatterns(Collections.singletonList("te*")).addMapping("test-mapping", XContentFactory.jsonBuilder().startObject().startObject("test-mapping").startObject("properties") - .startObject("field1").field("type", "string").field("store", "yes").endObject() - .startObject("field2").field("type", "string").field("store", "yes").field("index", "not_analyzed").endObject() + .startObject("field1").field("type", "text").field("store", "yes").endObject() + .startObject("field2").field("type", "keyword").field("store", "yes").endObject() .endObject().endObject().endObject()).get().isAcknowledged(), equalTo(true)); } diff --git a/core/src/test/java/org/elasticsearch/timestamp/SimpleTimestampIT.java b/core/src/test/java/org/elasticsearch/timestamp/SimpleTimestampIT.java deleted file mode 100644 index e64a695d88840..0000000000000 --- a/core/src/test/java/org/elasticsearch/timestamp/SimpleTimestampIT.java +++ /dev/null @@ -1,146 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.timestamp; - -import org.elasticsearch.Version; -import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsResponse; -import org.elasticsearch.action.admin.indices.mapping.put.PutMappingResponse; -import org.elasticsearch.action.get.GetResponse; -import org.elasticsearch.cluster.metadata.IndexMetaData; -import org.elasticsearch.cluster.metadata.MappingMetaData; -import org.elasticsearch.common.Priority; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.plugins.Plugin; -import org.elasticsearch.test.ESIntegTestCase; -import org.elasticsearch.test.InternalSettingsPlugin; - -import java.util.Arrays; -import java.util.Collection; -import java.util.Locale; - -import static org.elasticsearch.action.support.WriteRequest.RefreshPolicy.IMMEDIATE; -import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; -import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.greaterThanOrEqualTo; -import static org.hamcrest.Matchers.is; -import static org.hamcrest.Matchers.lessThanOrEqualTo; -import static org.hamcrest.Matchers.notNullValue; - -public class SimpleTimestampIT extends ESIntegTestCase { - - private static final Settings BW_SETTINGS = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_2_3_0).build(); - - @Override - protected Collection> nodePlugins() { - return Arrays.asList(InternalSettingsPlugin.class); - } - - public void testSimpleTimestamp() throws Exception { - client().admin().indices().prepareCreate("test") - .setSettings(BW_SETTINGS) - .addMapping("type1", jsonBuilder().startObject().startObject("type1").startObject("_timestamp").field("enabled", true).endObject().endObject().endObject()) - .execute().actionGet(); - client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setWaitForGreenStatus().execute().actionGet(); - - logger.info("--> check with automatic timestamp"); - long now1 = System.currentTimeMillis(); - client().prepareIndex("test", "type1", "1").setSource("field1", "value1").setRefreshPolicy(IMMEDIATE).get(); - long now2 = System.currentTimeMillis(); - - // non realtime get (stored) - GetResponse getResponse = client().prepareGet("test", "type1", "1").setStoredFields("_timestamp").setRealtime(randomBoolean()).execute().actionGet(); - long timestamp = ((Number) getResponse.getField("_timestamp").getValue()).longValue(); - assertThat(timestamp, greaterThanOrEqualTo(now1)); - assertThat(timestamp, lessThanOrEqualTo(now2)); - // verify its the same timestamp when going the replica - getResponse = client().prepareGet("test", "type1", "1").setStoredFields("_timestamp").setRealtime(randomBoolean()).execute().actionGet(); - assertThat(((Number) getResponse.getField("_timestamp").getValue()).longValue(), equalTo(timestamp)); - - logger.info("--> check with custom timestamp (numeric)"); - client().prepareIndex("test", "type1", "1").setSource("field1", "value1").setTimestamp("10").setRefreshPolicy(IMMEDIATE).get(); - - getResponse = client().prepareGet("test", "type1", "1").setStoredFields("_timestamp").setRealtime(false).execute().actionGet(); - timestamp = ((Number) getResponse.getField("_timestamp").getValue()).longValue(); - assertThat(timestamp, equalTo(10L)); - // verify its the same timestamp when going the replica - getResponse = client().prepareGet("test", "type1", "1").setStoredFields("_timestamp").setRealtime(false).execute().actionGet(); - assertThat(((Number) getResponse.getField("_timestamp").getValue()).longValue(), equalTo(timestamp)); - - logger.info("--> check with custom timestamp (string)"); - client().prepareIndex("test", "type1", "1").setSource("field1", "value1").setTimestamp("1970-01-01T00:00:00.020") - .setRefreshPolicy(IMMEDIATE).get(); - - getResponse = client().prepareGet("test", "type1", "1").setStoredFields("_timestamp").setRealtime(false).execute().actionGet(); - timestamp = ((Number) getResponse.getField("_timestamp").getValue()).longValue(); - assertThat(timestamp, equalTo(20L)); - // verify its the same timestamp when going the replica - getResponse = client().prepareGet("test", "type1", "1").setStoredFields("_timestamp").setRealtime(false).execute().actionGet(); - assertThat(((Number) getResponse.getField("_timestamp").getValue()).longValue(), equalTo(timestamp)); - } - - // issue #5053 - public void testThatUpdatingMappingShouldNotRemoveTimestampConfiguration() throws Exception { - String index = "foo"; - String type = "mytype"; - - XContentBuilder builder = jsonBuilder().startObject().startObject("_timestamp").field("enabled", true).endObject().endObject(); - assertAcked(client().admin().indices().prepareCreate(index).setSettings(BW_SETTINGS).addMapping(type, builder)); - - // check mapping again - assertTimestampMappingEnabled(index, type, true); - - // update some field in the mapping - XContentBuilder updateMappingBuilder = jsonBuilder().startObject().startObject("properties").startObject("otherField").field("type", "text").endObject().endObject().endObject(); - PutMappingResponse putMappingResponse = client().admin().indices().preparePutMapping(index).setType(type).setSource(updateMappingBuilder).get(); - assertAcked(putMappingResponse); - - // make sure timestamp field is still in mapping - assertTimestampMappingEnabled(index, type, true); - } - - public void testThatTimestampCanBeSwitchedOnAndOff() throws Exception { - String index = "foo"; - String type = "mytype"; - - XContentBuilder builder = jsonBuilder().startObject().startObject("_timestamp").field("enabled", true).endObject().endObject(); - assertAcked(client().admin().indices().prepareCreate(index).setSettings(BW_SETTINGS).addMapping(type, builder)); - - // check mapping again - assertTimestampMappingEnabled(index, type, true); - - // update some field in the mapping - XContentBuilder updateMappingBuilder = jsonBuilder().startObject().startObject("_timestamp").field("enabled", false).endObject().endObject(); - PutMappingResponse putMappingResponse = client().admin().indices().preparePutMapping(index).setType(type).setSource(updateMappingBuilder).get(); - assertAcked(putMappingResponse); - - // make sure timestamp field is still in mapping - assertTimestampMappingEnabled(index, type, false); - } - - private void assertTimestampMappingEnabled(String index, String type, boolean enabled) { - GetMappingsResponse getMappingsResponse = client().admin().indices().prepareGetMappings(index).addTypes(type).get(); - MappingMetaData.Timestamp timestamp = getMappingsResponse.getMappings().get(index).get(type).timestamp(); - assertThat(timestamp, is(notNullValue())); - String errMsg = String.format(Locale.ROOT, "Expected timestamp field mapping to be "+ (enabled ? "enabled" : "disabled") +" for %s/%s", index, type); - assertThat(errMsg, timestamp.enabled(), is(enabled)); - } -} diff --git a/core/src/test/java/org/elasticsearch/ttl/SimpleTTLIT.java b/core/src/test/java/org/elasticsearch/ttl/SimpleTTLIT.java deleted file mode 100644 index 5716e57c96fb7..0000000000000 --- a/core/src/test/java/org/elasticsearch/ttl/SimpleTTLIT.java +++ /dev/null @@ -1,305 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.ttl; - -import org.elasticsearch.Version; -import org.elasticsearch.action.DocWriteResponse; -import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsResponse; -import org.elasticsearch.action.admin.indices.mapping.put.PutMappingResponse; -import org.elasticsearch.action.admin.indices.stats.IndicesStatsResponse; -import org.elasticsearch.action.get.GetResponse; -import org.elasticsearch.action.index.IndexResponse; -import org.elasticsearch.action.update.UpdateRequestBuilder; -import org.elasticsearch.action.update.UpdateResponse; -import org.elasticsearch.cluster.metadata.IndexMetaData; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.common.xcontent.XContentFactory; -import org.elasticsearch.plugins.Plugin; -import org.elasticsearch.test.ESIntegTestCase; -import org.elasticsearch.test.InternalSettingsPlugin; -import org.elasticsearch.test.ESIntegTestCase.ClusterScope; -import org.elasticsearch.test.ESIntegTestCase.Scope; - -import java.io.IOException; -import java.util.Collection; -import java.util.Collections; -import java.util.Locale; -import java.util.Map; -import java.util.concurrent.TimeUnit; - -import static org.elasticsearch.action.support.WriteRequest.RefreshPolicy.IMMEDIATE; -import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; -import static org.hamcrest.Matchers.both; -import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.greaterThan; -import static org.hamcrest.Matchers.hasKey; -import static org.hamcrest.Matchers.is; -import static org.hamcrest.Matchers.lessThanOrEqualTo; -import static org.hamcrest.Matchers.notNullValue; -import static org.hamcrest.Matchers.nullValue; - -@ClusterScope(scope= Scope.SUITE, supportsDedicatedMasters = false, numDataNodes = 1) -public class SimpleTTLIT extends ESIntegTestCase { - - private static final long PURGE_INTERVAL = 200; - - @Override - protected int numberOfShards() { - return 2; - } - - @Override - protected Collection> nodePlugins() { - return Collections.singleton(InternalSettingsPlugin.class); - } - - @Override - protected Settings nodeSettings(int nodeOrdinal) { - return Settings.builder() - .put(super.nodeSettings(nodeOrdinal)) - .put("indices.ttl.interval", PURGE_INTERVAL, TimeUnit.MILLISECONDS) - .build(); - } - - public void testSimpleTTL() throws Exception { - assertAcked(prepareCreate("test") - .setSettings(IndexMetaData.SETTING_VERSION_CREATED, Version.V_2_3_0.id) - .addMapping("type1", XContentFactory.jsonBuilder() - .startObject() - .startObject("type1") - .startObject("_timestamp").field("enabled", true).endObject() - .startObject("_ttl").field("enabled", true).endObject() - .endObject() - .endObject()) - .addMapping("type2", XContentFactory.jsonBuilder() - .startObject() - .startObject("type2") - .startObject("_timestamp").field("enabled", true).endObject() - .startObject("_ttl").field("enabled", true).field("default", "1d").endObject() - .endObject() - .endObject())); - - final NumShards test = getNumShards("test"); - - long providedTTLValue = 3000; - logger.info("--> checking ttl"); - // Index one doc without routing, one doc with routing, one doc with not TTL and no default and one doc with default TTL - long now = System.currentTimeMillis(); - IndexResponse indexResponse = client().prepareIndex("test", "type1", "1").setSource("field1", "value1") - .setTimestamp(String.valueOf(now)).setTTL(providedTTLValue).setRefreshPolicy(IMMEDIATE).get(); - assertEquals(DocWriteResponse.Result.CREATED, indexResponse.getResult()); - indexResponse = client().prepareIndex("test", "type1", "with_routing").setSource("field1", "value1") - .setTimestamp(String.valueOf(now)).setTTL(providedTTLValue).setRouting("routing").setRefreshPolicy(IMMEDIATE).get(); - assertEquals(DocWriteResponse.Result.CREATED, indexResponse.getResult()); - indexResponse = client().prepareIndex("test", "type1", "no_ttl").setSource("field1", "value1").get(); - assertEquals(DocWriteResponse.Result.CREATED, indexResponse.getResult()); - indexResponse = client().prepareIndex("test", "type2", "default_ttl").setSource("field1", "value1").get(); - assertEquals(DocWriteResponse.Result.CREATED, indexResponse.getResult()); - - // realtime get check - long currentTime = System.currentTimeMillis(); - GetResponse getResponse = client().prepareGet("test", "type1", "1").setStoredFields("_ttl").get(); - long ttl0; - if (getResponse.isExists()) { - ttl0 = ((Number) getResponse.getField("_ttl").getValue()).longValue(); - assertThat(ttl0, lessThanOrEqualTo(providedTTLValue - (currentTime - now))); - } else { - assertThat(providedTTLValue - (currentTime - now), lessThanOrEqualTo(0L)); - } - // verify the ttl is still decreasing when going to the replica - currentTime = System.currentTimeMillis(); - getResponse = client().prepareGet("test", "type1", "1").setStoredFields("_ttl").get(); - if (getResponse.isExists()) { - ttl0 = ((Number) getResponse.getField("_ttl").getValue()).longValue(); - assertThat(ttl0, lessThanOrEqualTo(providedTTLValue - (currentTime - now))); - } else { - assertThat(providedTTLValue - (currentTime - now), lessThanOrEqualTo(0L)); - } - // non realtime get (stored) - currentTime = System.currentTimeMillis(); - getResponse = client().prepareGet("test", "type1", "1").setStoredFields("_ttl").setRealtime(false).get(); - if (getResponse.isExists()) { - ttl0 = ((Number) getResponse.getField("_ttl").getValue()).longValue(); - assertThat(ttl0, lessThanOrEqualTo(providedTTLValue - (currentTime - now))); - } else { - assertThat(providedTTLValue - (currentTime - now), lessThanOrEqualTo(0L)); - } - // non realtime get going the replica - currentTime = System.currentTimeMillis(); - getResponse = client().prepareGet("test", "type1", "1").setStoredFields("_ttl").setRealtime(false).get(); - if (getResponse.isExists()) { - ttl0 = ((Number) getResponse.getField("_ttl").getValue()).longValue(); - assertThat(ttl0, lessThanOrEqualTo(providedTTLValue - (currentTime - now))); - } else { - assertThat(providedTTLValue - (currentTime - now), lessThanOrEqualTo(0L)); - } - - // no TTL provided so no TTL fetched - getResponse = client().prepareGet("test", "type1", "no_ttl").setStoredFields("_ttl").setRealtime(true).execute().actionGet(); - assertThat(getResponse.getField("_ttl"), nullValue()); - // no TTL provided make sure it has default TTL - getResponse = client().prepareGet("test", "type2", "default_ttl").setStoredFields("_ttl").setRealtime(true).execute().actionGet(); - ttl0 = ((Number) getResponse.getField("_ttl").getValue()).longValue(); - assertThat(ttl0, greaterThan(0L)); - - IndicesStatsResponse response = client().admin().indices().prepareStats("test").clear().setIndexing(true).get(); - assertThat(response.getIndices().get("test").getTotal().getIndexing().getTotal().getDeleteCount(), equalTo(0L)); - - // make sure the purger has done its job for all indexed docs that are expired - long shouldBeExpiredDate = now + providedTTLValue + PURGE_INTERVAL + 2000; - currentTime = System.currentTimeMillis(); - if (shouldBeExpiredDate - currentTime > 0) { - Thread.sleep(shouldBeExpiredDate - currentTime); - } - - // We can't assume that after waiting for ttl + purgeInterval (waitTime) that the document have actually been deleted. - // The ttl purging happens in the background in a different thread, and might not have been completed after waiting for waitTime. - // But we can use index statistics' delete count to be sure that deletes have been executed, that must be incremented before - // ttl purging has finished. - logger.info("--> checking purger"); - assertTrue(awaitBusy(() -> { - if (rarely()) { - client().admin().indices().prepareFlush("test").get(); - } else if (rarely()) { - client().admin().indices().prepareForceMerge("test").setMaxNumSegments(1).get(); - } - IndicesStatsResponse indicesStatsResponse = client().admin().indices().prepareStats("test").clear().setIndexing(true).get(); - // TTL deletes two docs, but it is indexed in the primary shard and replica shard. - return indicesStatsResponse.getIndices().get("test").getTotal().getIndexing().getTotal().getDeleteCount() == 2L * test.dataCopies; - }, - 5, TimeUnit.SECONDS - )); - - // realtime get check - getResponse = client().prepareGet("test", "type1", "1").setStoredFields("_ttl").setRealtime(true).execute().actionGet(); - assertThat(getResponse.isExists(), equalTo(false)); - getResponse = client().prepareGet("test", "type1", "with_routing").setRouting("routing").setStoredFields("_ttl").setRealtime(true).execute().actionGet(); - assertThat(getResponse.isExists(), equalTo(false)); - // replica realtime get check - getResponse = client().prepareGet("test", "type1", "1").setStoredFields("_ttl").setRealtime(true).execute().actionGet(); - assertThat(getResponse.isExists(), equalTo(false)); - getResponse = client().prepareGet("test", "type1", "with_routing").setRouting("routing").setStoredFields("_ttl").setRealtime(true).execute().actionGet(); - assertThat(getResponse.isExists(), equalTo(false)); - - // Need to run a refresh, in order for the non realtime get to work. - client().admin().indices().prepareRefresh("test").execute().actionGet(); - - // non realtime get (stored) check - getResponse = client().prepareGet("test", "type1", "1").setStoredFields("_ttl").setRealtime(false).execute().actionGet(); - assertThat(getResponse.isExists(), equalTo(false)); - getResponse = client().prepareGet("test", "type1", "with_routing").setRouting("routing").setStoredFields("_ttl").setRealtime(false).execute().actionGet(); - assertThat(getResponse.isExists(), equalTo(false)); - // non realtime get going the replica check - getResponse = client().prepareGet("test", "type1", "1").setStoredFields("_ttl").setRealtime(false).execute().actionGet(); - assertThat(getResponse.isExists(), equalTo(false)); - getResponse = client().prepareGet("test", "type1", "with_routing").setRouting("routing").setStoredFields("_ttl").setRealtime(false).execute().actionGet(); - assertThat(getResponse.isExists(), equalTo(false)); - } - - // issue 5053 - public void testThatUpdatingMappingShouldNotRemoveTTLConfiguration() throws Exception { - String index = "foo"; - String type = "mytype"; - - XContentBuilder builder = jsonBuilder().startObject().startObject("_ttl").field("enabled", true).endObject().endObject(); - assertAcked(client().admin().indices().prepareCreate(index).setSettings(IndexMetaData.SETTING_VERSION_CREATED, Version.V_2_3_0.id).addMapping(type, builder)); - - // check mapping again - assertTTLMappingEnabled(index, type); - - // update some field in the mapping - XContentBuilder updateMappingBuilder = jsonBuilder().startObject().startObject("properties").startObject("otherField").field("type", "text").endObject().endObject().endObject(); - PutMappingResponse putMappingResponse = client().admin().indices().preparePutMapping(index).setType(type).setSource(updateMappingBuilder).get(); - assertAcked(putMappingResponse); - - // make sure timestamp field is still in mapping - assertTTLMappingEnabled(index, type); - } - - /** - * Test that updates with detect_noop set to true (the default) that don't - * change the source don't change the ttl. This is unexpected behavior and - * documented in ttl-field.asciidoc. If this behavior changes it is safe to - * rewrite this test to reflect the new behavior and to change the - * documentation. - */ - public void testNoopUpdate() throws IOException { - assertAcked(prepareCreate("test") - .setSettings(IndexMetaData.SETTING_VERSION_CREATED, Version.V_2_3_0.id) - .addMapping("type1", XContentFactory.jsonBuilder() - .startObject() - .startObject("type1") - .startObject("_timestamp").field("enabled", true).endObject() - .startObject("_ttl").field("enabled", true).endObject() - .endObject() - .endObject())); - - long aLongTime = 10000000; - long firstTtl = aLongTime * 3; - long secondTtl = aLongTime * 2; - long thirdTtl = aLongTime * 1; - IndexResponse indexResponse = client().prepareIndex("test", "type1", "1").setSource("field1", "value1") - .setTTL(firstTtl).setRefreshPolicy(IMMEDIATE).get(); - assertTrue(indexResponse.getResult() == DocWriteResponse.Result.CREATED); - assertThat(getTtl("type1", 1), both(lessThanOrEqualTo(firstTtl)).and(greaterThan(secondTtl))); - - // Updating with the default detect_noop without a change to the document doesn't change the ttl. - UpdateRequestBuilder update = client().prepareUpdate("test", "type1", "1").setDoc("field1", "value1").setTtl(secondTtl); - assertThat(updateAndGetTtl(update), both(lessThanOrEqualTo(firstTtl)).and(greaterThan(secondTtl))); - - // Updating with the default detect_noop with a change to the document does change the ttl. - update = client().prepareUpdate("test", "type1", "1").setDoc("field1", "value2").setTtl(secondTtl); - assertThat(updateAndGetTtl(update), both(lessThanOrEqualTo(secondTtl)).and(greaterThan(thirdTtl))); - - // Updating with detect_noop=true without a change to the document doesn't change the ttl. - update = client().prepareUpdate("test", "type1", "1").setDoc("field1", "value2").setTtl(secondTtl).setDetectNoop(true); - assertThat(updateAndGetTtl(update), both(lessThanOrEqualTo(secondTtl)).and(greaterThan(thirdTtl))); - - // Updating with detect_noop=false without a change to the document does change the ttl. - update = client().prepareUpdate("test", "type1", "1").setDoc("field1", "value2").setTtl(thirdTtl).setDetectNoop(false); - assertThat(updateAndGetTtl(update), lessThanOrEqualTo(thirdTtl)); - } - - private long updateAndGetTtl(UpdateRequestBuilder update) { - UpdateResponse updateResponse = update.setFields("_ttl").get(); - assertThat(updateResponse.getShardInfo().getFailed(), equalTo(0)); - // You can't actually fetch _ttl from an update so we use a get. - return getTtl(updateResponse.getType(), updateResponse.getId()); - } - - private long getTtl(String type, Object id) { - GetResponse getResponse = client().prepareGet("test", type, id.toString()).setStoredFields("_ttl").execute() - .actionGet(); - return ((Number) getResponse.getField("_ttl").getValue()).longValue(); - } - - private void assertTTLMappingEnabled(String index, String type) throws IOException { - String errMsg = String.format(Locale.ROOT, "Expected ttl field mapping to be enabled for %s/%s", index, type); - - GetMappingsResponse getMappingsResponse = client().admin().indices().prepareGetMappings(index).addTypes(type).get(); - Map mappingSource = getMappingsResponse.getMappings().get(index).get(type).getSourceAsMap(); - assertThat(errMsg, mappingSource, hasKey("_ttl")); - String ttlAsString = mappingSource.get("_ttl").toString(); - assertThat(ttlAsString, is(notNullValue())); - assertThat(errMsg, ttlAsString, is("{enabled=true}")); - } -} diff --git a/core/src/test/java/org/elasticsearch/update/TimestampTTLBWIT.java b/core/src/test/java/org/elasticsearch/update/TimestampTTLBWIT.java deleted file mode 100644 index 39f1c7746344a..0000000000000 --- a/core/src/test/java/org/elasticsearch/update/TimestampTTLBWIT.java +++ /dev/null @@ -1,237 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.update; - -import org.elasticsearch.Version; -import org.elasticsearch.action.admin.indices.alias.Alias; -import org.elasticsearch.action.get.GetResponse; -import org.elasticsearch.action.index.IndexRequestBuilder; -import org.elasticsearch.action.search.SearchResponse; -import org.elasticsearch.action.update.UpdateResponse; -import org.elasticsearch.cluster.metadata.IndexMetaData; -import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.common.xcontent.XContentFactory; -import org.elasticsearch.index.engine.DocumentMissingException; -import org.elasticsearch.plugins.Plugin; -import org.elasticsearch.script.Script; -import org.elasticsearch.script.ScriptType; -import org.elasticsearch.search.SearchHit; -import org.elasticsearch.search.SearchHitField; -import org.elasticsearch.search.sort.SortOrder; -import org.elasticsearch.test.ESIntegTestCase; -import org.elasticsearch.test.InternalSettingsPlugin; - -import java.util.Arrays; -import java.util.Collection; -import java.util.Collections; -import java.util.Map; - -import static org.elasticsearch.action.support.WriteRequest.RefreshPolicy.IMMEDIATE; -import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; -import static org.hamcrest.Matchers.allOf; -import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.greaterThan; -import static org.hamcrest.Matchers.greaterThanOrEqualTo; -import static org.hamcrest.Matchers.lessThanOrEqualTo; - -public class TimestampTTLBWIT extends ESIntegTestCase { - - @Override - protected Collection> nodePlugins() { - return Arrays.asList( - UpdateIT.FieldIncrementScriptPlugin.class, - UpdateIT.ExtractContextInSourceScriptPlugin.class, - UpdateIT.PutFieldValuesScriptPlugin.class, - InternalSettingsPlugin.class - ); - } - - public void testSort() throws Exception { - XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("_timestamp").field("enabled", true).endObject() - .endObject().endObject(); - assertAcked(prepareCreate("test") - .setSettings(IndexMetaData.SETTING_VERSION_CREATED, Version.V_2_3_0.id) - .addMapping("type", mapping)); - ensureGreen(); - final int numDocs = randomIntBetween(10, 20); - IndexRequestBuilder[] indexReqs = new IndexRequestBuilder[numDocs]; - for (int i = 0; i < numDocs; ++i) { - indexReqs[i] = client().prepareIndex("test", "type", Integer.toString(i)).setTimestamp(Integer.toString(randomInt(1000))) - .setSource(); - } - indexRandom(true, indexReqs); - - SortOrder order = randomFrom(SortOrder.values()); - - SearchResponse searchResponse = client().prepareSearch() - .setQuery(matchAllQuery()) - .setSize(randomIntBetween(1, numDocs + 5)) - .addSort("_timestamp", order) - .addStoredField("_timestamp") - .execute().actionGet(); - assertNoFailures(searchResponse); - SearchHit[] hits = searchResponse.getHits().hits(); - Long previousTs = order == SortOrder.ASC ? 0 : Long.MAX_VALUE; - for (int i = 0; i < hits.length; ++i) { - SearchHitField timestampField = hits[i].getFields().get("_timestamp"); - Long timestamp = timestampField.getValue(); - assertThat(previousTs, order == SortOrder.ASC ? lessThanOrEqualTo(timestamp) : greaterThanOrEqualTo(timestamp)); - previousTs = timestamp; - } - } - - public void testUpdate() throws Exception { - assertAcked(prepareCreate("test").addAlias(new Alias("alias")) - .setSettings(IndexMetaData.SETTING_VERSION_CREATED, Version.V_2_3_0.id) - .addMapping("type1", XContentFactory.jsonBuilder() - .startObject() - .startObject("type1") - .startObject("_timestamp").field("enabled", true).endObject() - .startObject("_ttl").field("enabled", true).endObject() - .endObject() - .endObject())); - - ensureGreen(); - - try { - client().prepareUpdate(indexOrAlias(), "type1", "1") - .setScript(new Script(ScriptType.INLINE, "field_inc", "field", Collections.emptyMap())).execute().actionGet(); - fail(); - } catch (DocumentMissingException e) { - // all is well - } - - // check TTL is kept after an update without TTL - client().prepareIndex("test", "type1", "2").setSource("field", 1).setTTL(86400000L).setRefreshPolicy(IMMEDIATE).get(); - GetResponse getResponse = client().prepareGet("test", "type1", "2").setStoredFields("_ttl").execute().actionGet(); - long ttl = ((Number) getResponse.getField("_ttl").getValue()).longValue(); - assertThat(ttl, greaterThan(0L)); - client().prepareUpdate(indexOrAlias(), "type1", "2") - .setScript(new Script(ScriptType.INLINE, "field_inc", "field", Collections.emptyMap())).execute().actionGet(); - getResponse = client().prepareGet("test", "type1", "2").setStoredFields("_ttl").execute().actionGet(); - ttl = ((Number) getResponse.getField("_ttl").getValue()).longValue(); - assertThat(ttl, greaterThan(0L)); - - // check TTL update - client().prepareUpdate(indexOrAlias(), "type1", "2") - .setScript(new Script(ScriptType.INLINE, "put_values", "", - Collections.singletonMap("_ctx", Collections.singletonMap("_ttl", 3600000)))).execute().actionGet(); - getResponse = client().prepareGet("test", "type1", "2").setStoredFields("_ttl").execute().actionGet(); - ttl = ((Number) getResponse.getField("_ttl").getValue()).longValue(); - assertThat(ttl, greaterThan(0L)); - assertThat(ttl, lessThanOrEqualTo(3600000L)); - - // check timestamp update - client().prepareIndex("test", "type1", "3").setSource("field", 1).setRefreshPolicy(IMMEDIATE).get(); - client().prepareUpdate(indexOrAlias(), "type1", "3") - .setScript(new Script(ScriptType.INLINE, "put_values", "", - Collections.singletonMap("_ctx", Collections.singletonMap("_timestamp", "2009-11-15T14:12:12")))).execute() - .actionGet(); - getResponse = client().prepareGet("test", "type1", "3").setStoredFields("_timestamp").execute().actionGet(); - long timestamp = ((Number) getResponse.getField("_timestamp").getValue()).longValue(); - assertThat(timestamp, equalTo(1258294332000L)); - } - - public void testContextVariables() throws Exception { - assertAcked(prepareCreate("test").addAlias(new Alias("alias")) - .setSettings(IndexMetaData.SETTING_VERSION_CREATED, Version.V_2_3_0.id) - .addMapping("type1", XContentFactory.jsonBuilder() - .startObject() - .startObject("type1") - .startObject("_timestamp").field("enabled", true).endObject() - .startObject("_ttl").field("enabled", true).endObject() - .endObject() - .endObject()) - .addMapping("subtype1", XContentFactory.jsonBuilder() - .startObject() - .startObject("subtype1") - .startObject("_parent").field("type", "type1").endObject() - .startObject("_timestamp").field("enabled", true).endObject() - .startObject("_ttl").field("enabled", true).endObject() - .endObject() - .endObject()) - ); - ensureGreen(); - - // Index some documents - long timestamp = System.currentTimeMillis(); - client().prepareIndex() - .setIndex("test") - .setType("type1") - .setId("parentId1") - .setTimestamp(String.valueOf(timestamp-1)) - .setSource("field1", 0, "content", "bar") - .execute().actionGet(); - - long ttl = 10000; - client().prepareIndex() - .setIndex("test") - .setType("subtype1") - .setId("id1") - .setParent("parentId1") - .setRouting("routing1") - .setTimestamp(String.valueOf(timestamp)) - .setTTL(ttl) - .setSource("field1", 1, "content", "foo") - .execute().actionGet(); - - // Update the first object and note context variables values - UpdateResponse updateResponse = client().prepareUpdate("test", "subtype1", "id1") - .setRouting("routing1") - .setScript(new Script(ScriptType.INLINE, "extract_ctx", "", Collections.emptyMap())) - .execute().actionGet(); - - assertEquals(2, updateResponse.getVersion()); - - GetResponse getResponse = client().prepareGet("test", "subtype1", "id1").setRouting("routing1").execute().actionGet(); - Map updateContext = (Map) getResponse.getSourceAsMap().get("update_context"); - assertEquals("test", updateContext.get("_index")); - assertEquals("subtype1", updateContext.get("_type")); - assertEquals("id1", updateContext.get("_id")); - assertEquals(1, updateContext.get("_version")); - assertEquals("parentId1", updateContext.get("_parent")); - assertEquals("routing1", updateContext.get("_routing")); - assertThat(((Integer) updateContext.get("_ttl")).longValue(), allOf(greaterThanOrEqualTo(ttl-3000), lessThanOrEqualTo(ttl))); - - // Idem with the second object - updateResponse = client().prepareUpdate("test", "type1", "parentId1") - .setScript(new Script(ScriptType.INLINE, "extract_ctx", "", Collections.emptyMap())) - .execute().actionGet(); - - assertEquals(2, updateResponse.getVersion()); - - getResponse = client().prepareGet("test", "type1", "parentId1").execute().actionGet(); - updateContext = (Map) getResponse.getSourceAsMap().get("update_context"); - assertEquals("test", updateContext.get("_index")); - assertEquals("type1", updateContext.get("_type")); - assertEquals("parentId1", updateContext.get("_id")); - assertEquals(1, updateContext.get("_version")); - assertNull(updateContext.get("_parent")); - assertNull(updateContext.get("_routing")); - assertNull(updateContext.get("_ttl")); - } - - private static String indexOrAlias() { - return randomBoolean() ? "test" : "alias"; - } -} diff --git a/core/src/test/java/org/elasticsearch/update/UpdateIT.java b/core/src/test/java/org/elasticsearch/update/UpdateIT.java index a3903023edfa7..2c98b393642ab 100644 --- a/core/src/test/java/org/elasticsearch/update/UpdateIT.java +++ b/core/src/test/java/org/elasticsearch/update/UpdateIT.java @@ -743,24 +743,19 @@ public void testContextVariables() throws Exception { ensureGreen(); // Index some documents - long timestamp = System.currentTimeMillis(); client().prepareIndex() .setIndex("test") .setType("type1") .setId("parentId1") - .setTimestamp(String.valueOf(timestamp-1)) .setSource("field1", 0, "content", "bar") .execute().actionGet(); - long ttl = 10000; client().prepareIndex() .setIndex("test") .setType("subtype1") .setId("id1") .setParent("parentId1") .setRouting("routing1") - .setTimestamp(String.valueOf(timestamp)) - .setTTL(ttl) .setSource("field1", 1, "content", "foo") .execute().actionGet(); diff --git a/core/src/test/resources/indices/bwc/missing-checksum-repo-2.3.4.zip b/core/src/test/resources/indices/bwc/missing-checksum-repo-2.3.4.zip deleted file mode 100644 index 9590f8dbd660fd56617c8e55b6bc73276aae4744..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 11371 zcmdT~2{_bU`!*)V{@Rxi#+qzn%a$c3#=d4LYax>?k&HDD(G^ zIB+u`emP-;s*<0_IItg+E@0eFY=!>!j+mz>o9U;=T5o!C2G!a6t&mq(g1=(9y?DB= z<)v}nvUf;kaAenb_hd`Dcg$5scesxW+zRGiXM0u`Zsh|DBh%7yL>M^0U~nrxL)5kK z%%Dsabr(3y&hMC=dzrMt z$Ok+p4HHsK)8n%aWM!r%TO^ppA+y@gbdWO%3-@T#Toddf6W0_!Pen!XmGTors)(eQ z@~V*14af>nce-RU>|8^jSL2ayg%vy%)jY30CO7{P^C4h;eT~(8rREMd2(Xd)zoPiL zc>N_CCW6|SM-X=uH;5#>MIe#Www`#T#a1+%Fln(BOj%LL;b&XpD&dz9<*fkM3svOH zNGN^KcXWr@8NmEt4lV_PIZ0Kg)VgjuP|p}!TRFKH!VUZaEc`4Sbm0rxy>Y8?E$PeS z-37)BNlA<)W6{=Bx(f@$M~GMwTalg0<}uLBR`gsA8o}GT5LP6MM$GZDD1XVAf+Bf& z(TD|Dq6nW4u@5nO4VpK6fhD|^B@mhV_CaqL94MDoM62w|+`=3hfrf_jDsw>gFE&B7v?PpLZbsY;4*`bwH~VSp z918XTf*<~IMmqo?!QW$VIQ${2A@=gu-l1ytU8@={-mD z;^a3(6c2ujyf;csPfdoy$I`P-#8{^$k7Kx=fVAi)*f;Vpjzi&9gNf^#rm*l&X%!PJ z)hiR(T{`2GpAEatvv4Z^p*ec_A?u~dPw`w$kuUXE6zEstwDSYnGzHd3p!J*$%-~)& z%4ZynH>B2x?@BhIOzF9>g?75orA9Y>h&2Qprl?&zZ>Nn@C& z4AYJIqJOxoK8WlXfyuNA(csA&y&4J|954nngTGoH-L+UCoWdJ9FwtU)=1XM+1Sz1nqb}iIxXe)7{_hu2N{WGN%^KpsmXEH zJd@T5De3VAW9in$G1gmD2PTRL5}%i$_#*U~V!Wq&nd@ea7^yC|(GdxW5gt{KdbW~6 z8`|5qd~W#=zg=nd35RyFI5RLhEI9CiZAPFd1TV72Em1+NOyyPy!%Q+GZ{FQoQwfZY zZh#A_45h65XIb{oBhLqWGA8p(Gk)Bknu6S)mWpI%WwA(=C~IY%<;_FgB}9*FljNls z95`Uey8=ZYbU~tJ54O6jKx@KzeQH(?qEG7HdBC;s0C4Ay2uPBz;Zhh~NW>#1(5|qn z$P~;P0|^K!NbyKSaa>{VG3n8xQ=uP4uE7BZv%rKFWFR`4;g1HFQv@ZY@&&?zN%<@X zyoMwzEuV}Cs_CSCq-#wX5~Q7p+fq`Z_PUleVmE+J4RU@jox5}SJu;mGw3 zyB|G6U_I8RUL}vd<{G5-@vdJE4-^bq>9x8y=r2S?JE&+68GrP4dvHxR_jF{$vNuN4PB>rFi4?M1h@&k6(QK;j35mMvie+w~k%O zVJfDa8~*xQGcUGUH*FZ{OV>Nx5yBd>d}VA{K=CZUr#n{(7Y8>9waeXVAsa;sgU~{@ z>0Hnax)!SeB8p2ZF-ingoyiRRhh!f;5T>Dn#P`I9JoF;etBe2PN9xnV@;>#OqUFuZ zkETd9+dKz_;VXTj!KP`?XKM;#f=lO(%I5f|)nigB4L-W-z;ZM=uhY^ide8(nSnk+yUf~KTj(pSF(H7t$RVQ;gd?#g)(G#OSLKMbix!_e*dI{ z4BXfHhN}lrh=)aWCab-q@{#gbW8>nFO(o8rZPceu>CO;SWv;f(P7k%|TeRQIplb}Z zFOn}%a_%326gVW<7~HoyBqK!^)#ex^MP<^L$5nGZvd}4u;vr&P-gW`OGWI>HP{Gwu zmaTc+QfNYhj%qIuN1lOC$ogr<8+|g=(OFjoXgm#+ zB#;h=Rp$yoie>nfjwrI{=LnSYNGL@=zRDzGVUdaiC*{8}H=nX)q&Gvdnq+1%0sF*;LoZ{~5)s;3D3RgiB>Q^IL+mg)H^u)L4+3o0-uCmUqh zzfalwaT#&TzzrZ+4WOX@))Iu^s}Mw)>PeA1Ey2bL^gV@Gl-_)oMs)zwfY5QC>!Tht zmm7my_h80NgnWv!9>mCBLXBC(oyrry}qzA&=cfa6&)4!syHnO z@#Om3?;ITQRS{_RTodyjxAUV?xxVDNu)Wv$(R3y`j0<2}?n9l%91xtsUVB02K(t;2 zQ$v8$42g1yJ6DQ6_choOLBVuMg6=z!ABP~HZ2u?06c zi%}f}ylpBH&Wp zV9AMYNU+OiE>5km%Li&np4Vi27{QRCqq^PC)$m}Y^93RoNp!PplMUMB36Do%JAA?kyY z;yVp_*AGg8PwUADg?Rb+g;Y9Fqz`*lPPx@fSHHMNVVYeYQ39JqBFHG2C|_}ws|ZX6 z9r1_BFA5fFKw-qS0dmLg>*lCGn#;I<)gDB8V2Xn(*`Qr1OOn9-EyhhxkOYE3nJC&2$V0Q6QPiDK)uN^8jvL6aDYe`lA=SJ>2q#EQ)8V zTq^72b?l;2d+bJvIWBUqxhyrWXRS^24K$5D|M7Q^IpTXb=5#(+)x}H35Xa}3gImUk zz*ELZxm*~orEKo5Z=-zEDJ)hGny(Up@Y z`tK{>mt0Rzy?GumLb)r9GYZ1CcoAIUpCGu}KTL2TJPF=3;USnoxJn14Jk)$mhdj8D z_S$_z;FeD5I6lg)-4M8bj8U$}u2bpn68!hgjbD4;#m}>fcoAIkzbE*0d;!8viiA+3@5y zSSPBC-5(rzG)^!>;z>wXoF(@)@HW5IOUC^>H~SDrk4L=Pt<)Z^E4#A~BaX{p9syi^ zpmX+~wtMLUB_osLEV9$mEaFn)%;Gx!-3uZIyW~3Fc3dnh+HL`9Xkl7FumX<&6}XkJ zI~)dcu<}_bhS{KeI-zJxy9K(S>|y9nq0_A$rPHGwNh#DVoTq9$nK;ddVr-qsYh^+) zPM+rDXOw5wg(eI3Y6xi_^b&g)s*#Z7a4YoqlzP|-4?`QsVtFP-E)_JXcAT;HmR?Z) zrCLh6KzKx2Y*1-XIBM#{*Ox89gfYOtRk#pj^_jW;K2I+Lm;-#F_*Tp3e(N6IL?U#r z>By{$1@f|a<<&eD0`=vR61?S4Zb%M`_Ou=r z04sDWDJm*-fNx83m*ntBRdMo{@>DE8|1jFHu?0LJAmUTfLF}rm18QT|P?+I1?HuwR zbcesFq1pKs3wO2y%eJyWR8azYSyrjCNEp)|l#s1hzQPm}Ymt3+GgZn^)jXv<&xB?A z2M48Os_8lo35JVycI%kf_b+-yYRsM0>(NqSeDTyW&N?oI&N@BTEWywZe$?GbysvM( z+^|~T*TT+0*NLX@R#rnGM_{-MF$Wb0Fq%`{IjbH8#fL%UepBdR5sqX zFD?*mU0m38;b6BZ>}v~KrZ5~M$PH5ZX=?it1Ag{42JrJmxp^64V%WQRRxI;Um_T5Sc&GV|3k|pcew7i}*BO4!)<%dUa06^M zKk>uBzHP7*Lk}a>;c#NhCCuFe{BW@Q@jG!6F>)Xdod2LCF1$a_bnMAtAi{yS(T&Ch z@N06gd(k@`g5w6*!{lIhcXwj=5#w~i+8WdA#m^7yR_0EOPoUpmU>ckFVPLmlc4B0a z{{{ookiicFy9u%rBZvYg#vT(HTgBdqBk%`Kod5GgUZuf-x1l%U7dULSai>EDv^W9w zkjU6#;7$xiI-D4LoE&VqY$wi140Hc-mH89#3|H*%S!G@_;K17ui*Nz_T4k`MqMZ&w z7;ysZVREpQoShh7abxUpa")); @@ -48,7 +48,7 @@ public void testTAI64N()throws Exception { Function function = DateFormat.Tai64n.getFunction(null, DateTimeZone.UTC, null); DateIndexNameProcessor dateProcessor = new DateIndexNameProcessor("_tag", "_field", Collections.singletonList(function), DateTimeZone.UTC, "events-", "m", "yyyyMMdd"); - IngestDocument document = new IngestDocument("_index", "_type", "_id", null, null, null, null, + IngestDocument document = new IngestDocument("_index", "_type", "_id", null, null, Collections.singletonMap("_field", (randomBoolean() ? "@" : "") + "4000000050d506482dbdf024")); dateProcessor.execute(document); assertThat(document.getSourceAndMetadata().get("_index"), equalTo("")); @@ -58,7 +58,7 @@ public void testUnixMs()throws Exception { Function function = DateFormat.UnixMs.getFunction(null, DateTimeZone.UTC, null); DateIndexNameProcessor dateProcessor = new DateIndexNameProcessor("_tag", "_field", Collections.singletonList(function), DateTimeZone.UTC, "events-", "m", "yyyyMMdd"); - IngestDocument document = new IngestDocument("_index", "_type", "_id", null, null, null, null, + IngestDocument document = new IngestDocument("_index", "_type", "_id", null, null, Collections.singletonMap("_field", "1000500")); dateProcessor.execute(document); assertThat(document.getSourceAndMetadata().get("_index"), equalTo("")); @@ -68,7 +68,7 @@ public void testUnix()throws Exception { Function function = DateFormat.Unix.getFunction(null, DateTimeZone.UTC, null); DateIndexNameProcessor dateProcessor = new DateIndexNameProcessor("_tag", "_field", Collections.singletonList(function), DateTimeZone.UTC, "events-", "m", "yyyyMMdd"); - IngestDocument document = new IngestDocument("_index", "_type", "_id", null, null, null, null, + IngestDocument document = new IngestDocument("_index", "_type", "_id", null, null, Collections.singletonMap("_field", "1000.5")); dateProcessor.execute(document); assertThat(document.getSourceAndMetadata().get("_index"), equalTo("")); diff --git a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/ForEachProcessorTests.java b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/ForEachProcessorTests.java index edbc00156996f..2baa32b3d5196 100644 --- a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/ForEachProcessorTests.java +++ b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/ForEachProcessorTests.java @@ -45,7 +45,7 @@ public void testExecute() throws Exception { values.add("bar"); values.add("baz"); IngestDocument ingestDocument = new IngestDocument( - "_index", "_type", "_id", null, null, null, null, Collections.singletonMap("values", values) + "_index", "_type", "_id", null, null, Collections.singletonMap("values", values) ); ForEachProcessor processor = new ForEachProcessor( @@ -61,7 +61,7 @@ public void testExecute() throws Exception { public void testExecuteWithFailure() throws Exception { IngestDocument ingestDocument = new IngestDocument( - "_index", "_type", "_id", null, null, null, null, Collections.singletonMap("values", Arrays.asList("a", "b", "c")) + "_index", "_type", "_id", null, null, Collections.singletonMap("values", Arrays.asList("a", "b", "c")) ); TestProcessor testProcessor = new TestProcessor(id -> { @@ -101,7 +101,7 @@ public void testMetaDataAvailable() throws Exception { values.add(new HashMap<>()); values.add(new HashMap<>()); IngestDocument ingestDocument = new IngestDocument( - "_index", "_type", "_id", null, null, null, null, Collections.singletonMap("values", values) + "_index", "_type", "_id", null, null, Collections.singletonMap("values", values) ); TestProcessor innerProcessor = new TestProcessor(id -> { @@ -132,7 +132,7 @@ public void testRestOfTheDocumentIsAvailable() throws Exception { document.put("values", values); document.put("flat_values", new ArrayList<>()); document.put("other", "value"); - IngestDocument ingestDocument = new IngestDocument("_index", "_type", "_id", null, null, null, null, document); + IngestDocument ingestDocument = new IngestDocument("_index", "_type", "_id", null, null, document); TemplateService ts = TestTemplateService.instance(); ForEachProcessor processor = new ForEachProcessor( @@ -171,7 +171,7 @@ public String getTag() { values.add(""); } IngestDocument ingestDocument = new IngestDocument( - "_index", "_type", "_id", null, null, null, null, Collections.singletonMap("values", values) + "_index", "_type", "_id", null, null, Collections.singletonMap("values", values) ); ForEachProcessor processor = new ForEachProcessor("_tag", "values", innerProcessor); @@ -190,7 +190,7 @@ public void testModifyFieldsOutsideArray() throws Exception { values.add(1); values.add(null); IngestDocument ingestDocument = new IngestDocument( - "_index", "_type", "_id", null, null, null, null, Collections.singletonMap("values", values) + "_index", "_type", "_id", null, null, Collections.singletonMap("values", values) ); TemplateService ts = TestTemplateService.instance(); @@ -221,7 +221,7 @@ public void testScalarValueAllowsUnderscoreValueFieldToRemainAccessible() throws source.put("_value", "new_value"); source.put("values", values); IngestDocument ingestDocument = new IngestDocument( - "_index", "_type", "_id", null, null, null, null, source + "_index", "_type", "_id", null, null, source ); TestProcessor processor = new TestProcessor(doc -> doc.setFieldValue("_ingest._value", @@ -252,7 +252,7 @@ public void testNestedForEach() throws Exception { values.add(value); IngestDocument ingestDocument = new IngestDocument( - "_index", "_type", "_id", null, null, null, null, Collections.singletonMap("values1", values) + "_index", "_type", "_id", null, null, Collections.singletonMap("values1", values) ); TestProcessor testProcessor = new TestProcessor( diff --git a/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/ExpressionScriptEngineService.java b/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/ExpressionScriptEngineService.java index 36ce55067f475..c78825758e2dc 100644 --- a/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/ExpressionScriptEngineService.java +++ b/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/ExpressionScriptEngineService.java @@ -34,7 +34,6 @@ import org.elasticsearch.index.fielddata.IndexNumericFieldData; import org.elasticsearch.index.mapper.BaseGeoPointFieldMapper; import org.elasticsearch.index.mapper.DateFieldMapper; -import org.elasticsearch.index.mapper.LegacyDateFieldMapper; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.script.ClassPermission; @@ -200,8 +199,7 @@ public SearchScript search(CompiledScript compiledScript, SearchLookup lookup, @ } else { valueSource = GeoField.getMethod(fieldData, fieldname, methodname); } - } else if (fieldType instanceof LegacyDateFieldMapper.DateFieldType || - fieldType instanceof DateFieldMapper.DateFieldType) { + } else if (fieldType instanceof DateFieldMapper.DateFieldType) { if (dateAccessor) { // date object if (methodname == null) { diff --git a/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolateQueryBuilderTests.java b/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolateQueryBuilderTests.java index 97535b4209398..e1589cba14b1e 100644 --- a/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolateQueryBuilderTests.java +++ b/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolateQueryBuilderTests.java @@ -226,7 +226,7 @@ public void testCreateMultiDocumentSearcher() throws Exception { } Analyzer analyzer = new WhitespaceAnalyzer(); - ParsedDocument parsedDocument = new ParsedDocument(null, null, "_id", "_type", null, -1L, -1L, docs, null, null); + ParsedDocument parsedDocument = new ParsedDocument(null, null, "_id", "_type", null, docs, null, null); IndexSearcher indexSearcher = PercolateQueryBuilder.createMultiDocumentSearcher(analyzer, parsedDocument); assertThat(indexSearcher.getIndexReader().numDocs(), equalTo(numDocs)); diff --git a/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorIT.java b/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorIT.java index 197a82f2ccc75..433e77d84a4aa 100644 --- a/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorIT.java +++ b/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorIT.java @@ -242,7 +242,7 @@ public void testPercolateQueriesWithRouting() throws Exception { client().admin().indices().prepareCreate(INDEX_NAME) .setSettings(Settings.builder().put("index.number_of_shards", 2)) .addMapping(TYPE_NAME, "query", "type=percolator") - .addMapping("type", "field1", "type=string") + .addMapping("type", "field1", "type=text") .execute().actionGet(); ensureGreen(); diff --git a/modules/percolator/src/test/resources/indices/percolator/bwc_index_2.0.0.zip b/modules/percolator/src/test/resources/indices/percolator/bwc_index_2.0.0.zip deleted file mode 100644 index 43a8cceb1937d10a30e27381d8dbfbfe3a0bb9f6..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 11852 zcmd5?cRbZ?8$b45MP`z{N0gCqkP4^l?7g>)G9p`MG7e6%XFX&@L`EUogOr5IV-rPI z*84l1(lMTLdfz|Z)8|%wKF;;MukU?d-}}0+>sD68z{Cf^Kg_x*7ry=D`wwgo9mv7a z%-n@fLmdx)Pmk84#8 z4-;Hmj3fVxyxkRyjwP2=87ddg;1`q8qh6$CVW40mcf;D+0{@~Ghv-|u%)0)w0u|Lo zgW_ie7=&B@YA)6beXtDQH~jys2&pGyBW)Ylp5Wm)oI^;9p7p!10vV-TcvbK*vJBwM z77zeFR3Hrbc}*?Mzr`9quyOw-)_CXCV9Brq8=LpZsk+gIu!IqqrfOW0PCE!Qxcf!v zGIhgTyb))`+=B`)0u14s;t-E&KLZ7yQy7?&(o^E(YccJ5d8>4|jg!ImYq_yFw{Y)w z9+j(MsHI|T&mxLnG4E~*ur|5xi?35UnVk(5h1^Z|KWTMFTK`!)aXg=_i#3{h%`r|X zN;50Gzz#+%3O&LSdYdC{26<^x%I5Uy(eXIb=`%P&thK(JMz`Ma@sNC50EFnk*-?H@ zA^8X$1X{%Y6Lws7*h$~7L}ltq7b&EvQj>->yllR1#KI(XQ%SK-b}^je2#lkYfu@;7 zSK6&x*xSDAblJ{gj3L*@nCdT9WK$Z-%Hx#|!4-2~?2-GlkSF^-+v3%|332ZSu1-d8 zlGbY8&99$*_bF%Sm9rp!QNTm9^A~Ht=xoCG*TvEvY!4?~IB{y_-Q&z>lTLBp*h|D3)4=2UQ#nVO5@Oj7xBROvna z{5h8seANTk-LQo7yaKkDUfY|9r#K6wJaZVPEh9`-_&{bsb-%Ra_Q^&8gTiM6WZJ#< zRXU3o-_vMyZ)BrG%j#PDtdxpov>Q|ybVIf0c%sE@*75sK-zr#d2qNu3=g{EOTUwsi z=fod~VHX!t8ObbhYvPEx<%QYJ8=D{qO=VQ_xusq0zP^Utc7f~m?JHAJ4mkbcT*c9N zcP0mB?!96PQ&QTNKC|fN6nuxJa`l*TcBHKsnWx9sMGfprS0|Yxk57~wD|a!tva%t- z7DCD8h6|1!_CNX6?=-t}&F0hP0`79vn5N=~6H!WWn4`7bVeB}@k3MWmv)Yjs)T}Kz z-_^F~b^N5YP2yS+cP}}^=9F*Dqaa3I9L!eFWTONEY}UV;Q!O)Gcv?E=3419L#`Cm7 zN~@pXlYu84$m(d@@H8%U;q>b(kmN7D$C4Oxd*|K{0v{otDVmsaNet^D(JOhM$Epoc z3%p;*Y^HpUE3_$qc9jj;*?GO@KulMf9m=5>12ed5d&% zO45tyYHdE%MzenKnJW%f=g{0}2!VdhU-!0Heehv?MTG(iVs zg9jMAC`yNm^-gT$?T-!kDa||kT{%dTVJUZEnj=XG(5||=Ix8w~aobesSp&Qr|CySw zs>h&4DlNZ6JZ8LS5p3+#yWssD*cbNy1Jn=%qKq3C1c{t7yoR-Daz!MK7g zGGpELoOIj*8G|I62Vx2}lW`(F-VRG@KlS zM#ITDP@^wdy#$k|Br)3$*;iF<7T4Q|IVzWkF!eUxx|6(?OyX<@2XciAa-{p z^b@>^bXWDRwephNzw61>P{J}xuG|`Xb!EBfg@O8D%JHpF^bpc{$#DtGsubgbCyG%Y zj4cOVO$Rk3K8}%F;HM$W&KZZA7Q5T(dNC$VPtHzD4)?x&EybTmFZOxrOQ|6PUFdu* zfspQtENk1xlyni4MQTpDL>)|5`Wc8KY4W-Rd_@Asup^X0!)#z3VYcg0!7Pnj6vcU~ z!NVDYjZwD!P|T@Y1^ngU$?)69iZkEjOur!xw8FxjI^$qFI=hv_oPOLd5Qn4Hy`fxQ zz=PB$ieVsgS)rj-qj{9pQ10Z_DYAkKVfW_4E`?L|8AyJaSQO5au4JQ!IKiO|OPXkTQ=B4LlwT#{NyCU_?9#=asexp?Hz$-Dt}iIC$3O zK|L=0ctu)l&Vjkpk>de|1d5bn<6qaRi?K`nV!Fvd34PqrrqKEjE%nN=rEaE8V)s~D zCiD+!=L-~4u-TzHb%m~&Y$I<6wnmpu54@^p9}7|_wExtEeGZhKtq*cpF)c~1{Kqsm zjymCOooNXpUfEqdkgAbzl)k2ZV?&%kpBIbvV@*M3F!Z%4W53)?vDs4WGr?q!^^Dw+ zCMegNi*18)M}4&l?cE)XMU1zLX)5O-H->Yx&vKoZZuZhzQYENd!x~{lXU9Fu-mYOD zeI}F8tKG?`6L(E~v}Nic^u(tbse!(N?cJfF%kr2X?T-{%fcLvX!z1IrRcQ48Gle#@ zu&M(TIxivl3Sqjcwx9Y{)=cri$1H1d`5X>O`Jx<+4r_9ip)8)2YaID{mkaApey&F= zEaZhJiblTZrQ5J&B!R{NG$;c-s;Bgi1p3sDK%f4f2sAB*aJYeiHaC?`Z?Gj>+^#kSR;wB#*^k2v1*VOJAq~$1WJbYQR%74F7O3 z(2e&h$Dhn|XjKZvEp3(^TT~uZJe3|?@l^zm78GkO5@~L|<4hfB^&~SwC(MuIqm$gWm zn&m8^oh6D@q$Jf@Z|-@|d(Yopsj2BED&P{GV%K`XR%&`a-%iBSSg+h1Jln7_@!&*h zf|P6KLX-OTy0B5)IX0~?9?!Y8)6-Lvu1$NK&C0*W?KY^)+X@Si?aky?ZT;%! z(@rcGgoCsQ+PX&YE5)w*JT6!$E z{B@+F@NGVaU0YC*!)H+OY;D7cPsRK|n<##QiBoT|`Gc<+ZY}Eeb}_v-U7haLev0=m z6MX!(3h(J;#>zIg_pbC zIZV!t+QOqngqwcwHgdt>DWBKpnQw@?&WKE=K-UsE3 zH>X=E+Ri#v^K=j2nHBHpyC*i#dw1^a_`_@SOW;O|aq14`!m&#ci-~iT8pX8T%kk;j z5|N@$NY{(>0(ea4z9jZMZv2SfeKZo3FGoqGTbHeKgBB$72vqT9E*mU`k3KMZ@rmxU z=S(&Djb8#*-;tfueR<*>VTui;X06J7raR%fUy_ICknQH`lX|pawNK?l&j$#+h+o_1 zRc4Qh6%3Fi>*tMMBf8zzF)B|*!tlaAL#IHWIQ__Yze!YYuT5l^y;P+km(n~=XQ`Ce zHd6KiSB~mQeb+xrqJe4!B8i3v$A2r)X#ZytZDV0l*G}V@rJ3A<%^cgSdQx2g1&Q|W z@V1mfPNL(1VoHbk0!GOnX|&*uM#F%+3Va+;Rs2PzBblpWduubA&-;0r&m8?$fH>hG zx2H7+1tDn~PPVMpmxWf!8@%5Zp?8{dh zJ`sVl$2@Lazy7wvZv2F}Pwb7mAla8si1w+jS{_>Ucl zAZdGI{I=uC1a?D1*c@M6@n1Ms1ai7eMzjkv!K~BdmIPUI?l-BA_LdP1h>>P+FqrEG zJ*Xk@N(l2WXP%}H&Kpq~;(Sgn-EbYVo4EHib%}G1iTZE)iolf6lhsb+s)h@IV0GoydA#@SaV#&4u8Rq;lvo_q zSoGAgBoO39tAtHSQ2WMHZqeo0ZiURa2gzM}?U2=68;rpv&k2F33Q~jR2V~;4aL8M2 z;!Hxgcq4TwB-jV6)`4=pL-2=G!-E7xscx?H zU!Q+RveqL^I6X1aQLp_lyworqQVjRHgf+q^GA)TgqEF*j$x2aX@>R9$H!K zmz|xSbSEi^L(sOm*x)KFZ8YRESFmz^#Ddd2bd)kXr@}mEJd3<)jB<<>Iyp9;o8>H8 zi5=2_gH!S33Ekg`8~$kcFwM{0Xh=CDyzqT)*m1Q5(4#njYC%*=OD^V?_T~<*E{12m z)tBHU@PFaEQw331Qf&u{O09cIeqq%FLdyWRqd$Xu#*Wxmst!@IcD6CNpI?8I9W=Fi z8@sNFzmYa$W>_K2fys5~?Zs~U_FLmp^doM9i%@cwaf*r2+!qN@i=10iPuQ?^SaaF1 zb1ZYPax^{8>ST>x$Rc=7k&O|_qrlzL-eoFe&R5v37}W(KyGp2dsw=ErNHEN)nXFkvlP59X3 znW+&7!yX&c2)S(s_c4Vu1i}R%!^hb_ML*5!3i$l1?I`uyjuQ9BYyl|vNqmgwg0YII zsim=srL~2MrKh5_h?%37F4qrsME-DoJKVt613vzR8?0|3ir8#>8+z=A z!A7~Ar!;;nM-LEycwn4D0Rhp>12h+8x?1t%O>(ZDmUQWyT+Y{FcXiF ze&-rizQ@PLd={1@kEzC5RzHd<7Rqc^D2Spl7Zkrr1xuuXCDPLBz>>6RE`{VZb9G!& z;0fze3g=VcX>NgB=Dx2!D+5 zQ*_|(5w090I%w$h7>I4R*BCa43WyZ=hZOWt8vgf>_YGKx_YHftva;e&Tl-%(Alm}( z*n;=C0c-HL4!^LrHy}uM&CDOk+D;SzMi1zjBBqGQ%=f=~_%THgD9f*HztLT9Abw>Z zLFe8hgLK!VZvjzVYT2Ph$64J-N$j&jD5sc(Q;x4AU+c9za6(PJSQE z_oe-fKYN#!a~Dw9;eb$F_1zB{kXL;UB|PM34Zq(Hdrx(aU7prr9E#E(?+*aD0}d19 z?Y;l?0kWfvm?*&gc#8ml!rvks7^sMt+TYYi1_fiG3JTnP4+pxtS&9s*kBb^8yz29N z%>8XcWXuo!#GH!exH< z{Tdmwf(8Xl807kyB0>HmH!uK9*+&jSVyiu3#P{2R# zXNY|8UtL8UGyAWu$Q;+Bi1-8g$3+(K_<&0+a&`;%c=zQM8DHu+Y98<4|G3KjYON6j zz$dW#c8WZKcYinm#9j4Q=!l)RH)mttz2NkRa395tZTDRj+1IBipdsH}0Wbgcet__D w?{$c5$4huT1u^{e5`OA_zi9zx;Ww>)#|#$GodtnT0Dmli6Z3E2%OKEy0ER%Hvj6}9 diff --git a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/AbstractAsyncBulkIndexByScrollAction.java b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/AbstractAsyncBulkIndexByScrollAction.java index bfe7c5d77e326..34ad6e2ee25ee 100644 --- a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/AbstractAsyncBulkIndexByScrollAction.java +++ b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/AbstractAsyncBulkIndexByScrollAction.java @@ -27,15 +27,12 @@ import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.client.ParentTaskAssigningClient; import org.elasticsearch.cluster.ClusterState; -import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.index.VersionType; import org.elasticsearch.index.mapper.IdFieldMapper; import org.elasticsearch.index.mapper.IndexFieldMapper; import org.elasticsearch.index.mapper.ParentFieldMapper; import org.elasticsearch.index.mapper.RoutingFieldMapper; import org.elasticsearch.index.mapper.SourceFieldMapper; -import org.elasticsearch.index.mapper.TTLFieldMapper; -import org.elasticsearch.index.mapper.TimestampFieldMapper; import org.elasticsearch.index.mapper.TypeFieldMapper; import org.elasticsearch.index.mapper.VersionFieldMapper; import org.elasticsearch.script.CompiledScript; @@ -133,16 +130,6 @@ protected boolean accept(ScrollableHitSource.Hit doc) { protected RequestWrapper copyMetadata(RequestWrapper request, ScrollableHitSource.Hit doc) { request.setParent(doc.getParent()); copyRouting(request, doc.getRouting()); - - // Comes back as a Long but needs to be a string - Long timestamp = doc.getTimestamp(); - if (timestamp != null) { - request.setTimestamp(timestamp.toString()); - } - Long ttl = doc.getTTL(); - if (ttl != null) { - request.setTtl(ttl); - } return request; } @@ -184,10 +171,6 @@ interface RequestWrapper> { String getRouting(); - void setTimestamp(String timestamp); - - void setTtl(Long ttl); - void setSource(Map source); Map getSource(); @@ -271,20 +254,6 @@ public String getRouting() { return request.routing(); } - @Override - public void setTimestamp(String timestamp) { - request.timestamp(timestamp); - } - - @Override - public void setTtl(Long ttl) { - if (ttl == null) { - request.ttl((TimeValue) null); - } else { - request.ttl(ttl); - } - } - @Override public Map getSource() { return request.sourceAsMap(); @@ -384,16 +353,6 @@ public String getRouting() { return request.routing(); } - @Override - public void setTimestamp(String timestamp) { - throw new UnsupportedOperationException("unable to set [timestamp] on action request [" + request.getClass() + "]"); - } - - @Override - public void setTtl(Long ttl) { - throw new UnsupportedOperationException("unable to set [ttl] on action request [" + request.getClass() + "]"); - } - @Override public Map getSource() { throw new UnsupportedOperationException("unable to get source from action request [" + request.getClass() + "]"); @@ -463,10 +422,6 @@ public RequestWrapper apply(RequestWrapper request, ScrollableHitSource.Hi context.put(ParentFieldMapper.NAME, oldParent); String oldRouting = doc.getRouting(); context.put(RoutingFieldMapper.NAME, oldRouting); - Long oldTimestamp = doc.getTimestamp(); - context.put(TimestampFieldMapper.NAME, oldTimestamp); - Long oldTTL = doc.getTTL(); - context.put(TTLFieldMapper.NAME, oldTTL); context.put(SourceFieldMapper.NAME, request.getSource()); OpType oldOpType = OpType.INDEX; @@ -515,14 +470,6 @@ public RequestWrapper apply(RequestWrapper request, ScrollableHitSource.Hi if (false == Objects.equals(oldRouting, newValue)) { scriptChangedRouting(request, newValue); } - newValue = resultCtx.remove(TimestampFieldMapper.NAME); - if (false == Objects.equals(oldTimestamp, newValue)) { - scriptChangedTimestamp(request, newValue); - } - newValue = resultCtx.remove(TTLFieldMapper.NAME); - if (false == Objects.equals(oldTTL, newValue)) { - scriptChangedTTL(request, newValue); - } OpType newOpType = OpType.fromString(newOp); if (newOpType != oldOpType) { @@ -564,10 +511,6 @@ protected RequestWrapper scriptChangedOpType(RequestWrapper request, OpTyp protected abstract void scriptChangedParent(RequestWrapper request, Object to); - protected abstract void scriptChangedTimestamp(RequestWrapper request, Object to); - - protected abstract void scriptChangedTTL(RequestWrapper request, Object to); - } public enum OpType { diff --git a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/ClientScrollableHitSource.java b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/ClientScrollableHitSource.java index 7f7ae52b73baf..09423b2cca878 100644 --- a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/ClientScrollableHitSource.java +++ b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/ClientScrollableHitSource.java @@ -39,8 +39,6 @@ import org.elasticsearch.common.util.concurrent.EsRejectedExecutionException; import org.elasticsearch.index.mapper.ParentFieldMapper; import org.elasticsearch.index.mapper.RoutingFieldMapper; -import org.elasticsearch.index.mapper.TTLFieldMapper; -import org.elasticsearch.index.mapper.TimestampFieldMapper; import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.SearchHitField; import org.elasticsearch.threadpool.ThreadPool; @@ -237,16 +235,6 @@ public String getRouting() { return fieldValue(RoutingFieldMapper.NAME); } - @Override - public Long getTimestamp() { - return fieldValue(TimestampFieldMapper.NAME); - } - - @Override - public Long getTTL() { - return fieldValue(TTLFieldMapper.NAME); - } - private T fieldValue(String fieldName) { SearchHitField field = delegate.field(fieldName); return field == null ? null : field.value(); diff --git a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/ReindexRequest.java b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/ReindexRequest.java index 40aa745d06a5e..d8388af755c87 100644 --- a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/ReindexRequest.java +++ b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/ReindexRequest.java @@ -93,12 +93,6 @@ public ActionRequestValidationException validate() { e = addValidationError("unsupported version for internal versioning [" + destination.version() + ']', e); } } - if (destination.ttl() != null) { - e = addValidationError("setting ttl on destination isn't supported. use scripts instead.", e); - } - if (destination.timestamp() != null) { - e = addValidationError("setting timestamp on destination isn't supported. use scripts instead.", e); - } if (getRemoteInfo() != null) { if (getSearchRequest().source().query() != null) { e = addValidationError("reindex from remote sources should use RemoteInfo's query instead of source's query", e); diff --git a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/RestReindexAction.java b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/RestReindexAction.java index 14fd6cb903148..54746bc74b839 100644 --- a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/RestReindexAction.java +++ b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/RestReindexAction.java @@ -96,11 +96,6 @@ public class RestReindexAction extends AbstractBaseReindexRestHandler s.versionType(VersionType.fromString(i)), new ParseField("version_type")); - // These exist just so the user can get a nice validation error: - destParser.declareString(IndexRequest::timestamp, new ParseField("timestamp")); - destParser.declareString((i, ttl) -> i.ttl(parseTimeValue(ttl, TimeValue.timeValueMillis(-1), "ttl").millis()), - new ParseField("ttl")); - PARSER.declareField((p, v, c) -> sourceParser.parse(p, v, c), new ParseField("source"), ValueType.OBJECT); PARSER.declareField((p, v, c) -> destParser.parse(p, v.getDestination(), c), new ParseField("dest"), ValueType.OBJECT); PARSER.declareInt(ReindexRequest::setSize, new ParseField("size")); diff --git a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/ScrollableHitSource.java b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/ScrollableHitSource.java index 0b4b66222bc4b..bf13d6d72e269 100644 --- a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/ScrollableHitSource.java +++ b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/ScrollableHitSource.java @@ -187,14 +187,6 @@ public interface Hit { * The routing on the hit if there is any or null if there isn't. */ @Nullable String getRouting(); - /** - * The {@code _timestamp} on the hit if one was stored with the hit or null if one wasn't. - */ - @Nullable Long getTimestamp(); - /** - * The {@code _ttl} on the hit if one was set on it or null one wasn't. - */ - @Nullable Long getTTL(); } /** @@ -210,8 +202,6 @@ public static class BasicHit implements Hit { private BytesReference source; private String parent; private String routing; - private Long timestamp; - private Long ttl; public BasicHit(String index, String type, String id, long version) { this.index = index; @@ -269,26 +259,6 @@ public BasicHit setRouting(String routing) { this.routing = routing; return this; } - - @Override - public Long getTimestamp() { - return timestamp; - } - - public BasicHit setTimestamp(Long timestamp) { - this.timestamp = timestamp; - return this; - } - - @Override - public Long getTTL() { - return ttl; - } - - public BasicHit setTTL(Long ttl) { - this.ttl = ttl; - return this; - } } /** diff --git a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/TransportReindexAction.java b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/TransportReindexAction.java index 96f9061c2163f..7ae0d715ed002 100644 --- a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/TransportReindexAction.java +++ b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/TransportReindexAction.java @@ -56,7 +56,6 @@ import org.elasticsearch.common.settings.Setting.Property; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.VersionType; -import org.elasticsearch.index.mapper.TTLFieldMapper; import org.elasticsearch.index.mapper.VersionFieldMapper; import org.elasticsearch.index.reindex.ScrollableHitSource.SearchFailure; import org.elasticsearch.index.reindex.remote.RemoteInfo; @@ -319,8 +318,6 @@ protected RequestWrapper buildRequest(ScrollableHitSource.Hit doc) */ index.routing(mainRequest.getDestination().routing()); index.parent(mainRequest.getDestination().parent()); - index.timestamp(mainRequest.getDestination().timestamp()); - index.ttl(mainRequest.getDestination().ttl()); index.contentType(mainRequest.getDestination().getContentType()); index.setPipeline(mainRequest.getDestination().getPipeline()); // OpType is synthesized from version so it is handled when we copy version above. @@ -407,20 +404,6 @@ protected void scriptChangedRouting(RequestWrapper request, Object to) { request.setRouting(Objects.toString(to, null)); } - @Override - protected void scriptChangedTimestamp(RequestWrapper request, Object to) { - request.setTimestamp(Objects.toString(to, null)); - } - - @Override - protected void scriptChangedTTL(RequestWrapper request, Object to) { - if (to == null) { - request.setTtl(null); - } else { - request.setTtl(asLong(to, TTLFieldMapper.NAME)); - } - } - private long asLong(Object from, String name) { /* * Stuffing a number into the map will have converted it to diff --git a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/TransportUpdateByQueryAction.java b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/TransportUpdateByQueryAction.java index d8ca0441023e9..009ffabb73bff 100644 --- a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/TransportUpdateByQueryAction.java +++ b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/TransportUpdateByQueryAction.java @@ -36,8 +36,6 @@ import org.elasticsearch.index.mapper.IndexFieldMapper; import org.elasticsearch.index.mapper.ParentFieldMapper; import org.elasticsearch.index.mapper.RoutingFieldMapper; -import org.elasticsearch.index.mapper.TTLFieldMapper; -import org.elasticsearch.index.mapper.TimestampFieldMapper; import org.elasticsearch.index.mapper.TypeFieldMapper; import org.elasticsearch.script.Script; import org.elasticsearch.script.ScriptService; @@ -161,15 +159,6 @@ protected void scriptChangedParent(RequestWrapper request, Object to) { throw new IllegalArgumentException("Modifying [" + ParentFieldMapper.NAME + "] not allowed"); } - @Override - protected void scriptChangedTimestamp(RequestWrapper request, Object to) { - throw new IllegalArgumentException("Modifying [" + TimestampFieldMapper.NAME + "] not allowed"); - } - - @Override - protected void scriptChangedTTL(RequestWrapper request, Object to) { - throw new IllegalArgumentException("Modifying [" + TTLFieldMapper.NAME + "] not allowed"); - } } } } diff --git a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/remote/RemoteResponseParsers.java b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/remote/RemoteResponseParsers.java index 4583e4c8c055f..9f1467d7cfdba 100644 --- a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/remote/RemoteResponseParsers.java +++ b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/remote/RemoteResponseParsers.java @@ -88,23 +88,19 @@ private RemoteResponseParsers() {} ParseField ttlField = new ParseField("_ttl"); HIT_PARSER.declareString(BasicHit::setRouting, routingField); HIT_PARSER.declareString(BasicHit::setParent, parentField); - HIT_PARSER.declareLong(BasicHit::setTTL, ttlField); - HIT_PARSER.declareLong(BasicHit::setTimestamp, new ParseField("_timestamp")); // Pre-2.0.0 parent and routing come back in "fields" class Fields { String routing; String parent; - long ttl; } ObjectParser fieldsParser = new ObjectParser<>("fields", Fields::new); HIT_PARSER.declareObject((hit, fields) -> { hit.setRouting(fields.routing); hit.setParent(fields.parent); - hit.setTTL(fields.ttl); }, fieldsParser, new ParseField("fields")); fieldsParser.declareString((fields, routing) -> fields.routing = routing, routingField); fieldsParser.declareString((fields, parent) -> fields.parent = parent, parentField); - fieldsParser.declareLong((fields, ttl) -> fields.ttl = ttl, ttlField); + fieldsParser.declareLong((fields, ttl) -> {}, ttlField); // ignore ttls since they have been removed } /** diff --git a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/AbstractAsyncBulkIndexbyScrollActionMetadataTestCase.java b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/AbstractAsyncBulkIndexbyScrollActionMetadataTestCase.java index 4cc10334223c6..cb9ec0c273ba2 100644 --- a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/AbstractAsyncBulkIndexbyScrollActionMetadataTestCase.java +++ b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/AbstractAsyncBulkIndexbyScrollActionMetadataTestCase.java @@ -19,10 +19,6 @@ package org.elasticsearch.index.reindex; -import org.elasticsearch.action.index.IndexRequest; - -import static org.elasticsearch.common.unit.TimeValue.timeValueMillis; - public abstract class AbstractAsyncBulkIndexbyScrollActionMetadataTestCase< Request extends AbstractBulkIndexByScrollRequest, Response extends BulkIndexByScrollResponse> @@ -32,17 +28,5 @@ protected ScrollableHitSource.BasicHit doc() { return new ScrollableHitSource.BasicHit("index", "type", "id", 0); } - public void testTimestampIsCopied() { - IndexRequest index = new IndexRequest(); - action().copyMetadata(AbstractAsyncBulkIndexByScrollAction.wrap(index), doc().setTimestamp(10L)); - assertEquals("10", index.timestamp()); - } - - public void testTTL() throws Exception { - IndexRequest index = new IndexRequest(); - action().copyMetadata(AbstractAsyncBulkIndexByScrollAction.wrap(index), doc().setTTL(10L)); - assertEquals(timeValueMillis(10), index.ttl()); - } - protected abstract AbstractAsyncBulkIndexByScrollAction action(); } diff --git a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/ReindexFailureTests.java b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/ReindexFailureTests.java index 9bfa41da7f31a..d089f0427c307 100644 --- a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/ReindexFailureTests.java +++ b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/ReindexFailureTests.java @@ -19,7 +19,6 @@ package org.elasticsearch.index.reindex; -import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.bulk.BulkItemResponse.Failure; import org.elasticsearch.action.index.IndexRequestBuilder; @@ -119,28 +118,6 @@ public void testResponseOnSearchFailure() throws Exception { assumeFalse("Wasn't able to trigger a reindex failure in " + attempt + " attempts.", true); } - public void testSettingTtlIsValidationFailure() throws Exception { - indexDocs(1); - ReindexRequestBuilder copy = reindex().source("source").destination("dest"); - copy.destination().setTTL(123); - try { - copy.get(); - } catch (ActionRequestValidationException e) { - assertThat(e.getMessage(), containsString("setting ttl on destination isn't supported. use scripts instead.")); - } - } - - public void testSettingTimestampIsValidationFailure() throws Exception { - indexDocs(1); - ReindexRequestBuilder copy = reindex().source("source").destination("dest"); - copy.destination().setTimestamp("now"); - try { - copy.get(); - } catch (ActionRequestValidationException e) { - assertThat(e.getMessage(), containsString("setting timestamp on destination isn't supported. use scripts instead.")); - } - } - private void indexDocs(int count) throws Exception { List docs = new ArrayList(count); for (int i = 0; i < count; i++) { diff --git a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/ReindexRequestTests.java b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/ReindexRequestTests.java index 559d0b54565cf..30ba03aca76fe 100644 --- a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/ReindexRequestTests.java +++ b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/ReindexRequestTests.java @@ -34,14 +34,6 @@ * Tests some of the validation of {@linkplain ReindexRequest}. See reindex's rest tests for much more. */ public class ReindexRequestTests extends AbstractBulkByScrollRequestTestCase { - public void testTimestampAndTtlNotAllowed() { - ReindexRequest reindex = newRequest(); - reindex.getDestination().ttl("1s").timestamp("now"); - ActionRequestValidationException e = reindex.validate(); - assertEquals("Validation Failed: 1: setting ttl on destination isn't supported. use scripts instead.;" - + "2: setting timestamp on destination isn't supported. use scripts instead.;", - e.getMessage()); - } public void testReindexFromRemoteDoesNotSupportSearchQuery() { ReindexRequest reindex = newRequest(); diff --git a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/ReindexScriptTests.java b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/ReindexScriptTests.java index c70b80b8e374c..66b681b149477 100644 --- a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/ReindexScriptTests.java +++ b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/ReindexScriptTests.java @@ -26,7 +26,6 @@ import java.util.Map; -import static org.elasticsearch.common.unit.TimeValue.timeValueMillis; import static org.hamcrest.Matchers.containsString; /** @@ -104,32 +103,6 @@ public void testSetRouting() throws Exception { assertEquals(routing, index.routing()); } - public void testSetTimestamp() throws Exception { - String timestamp = randomFrom("now", "1234", null); - IndexRequest index = applyScript((Map ctx) -> ctx.put("_timestamp", timestamp)); - assertEquals(timestamp, index.timestamp()); - } - - public void testSetTtl() throws Exception { - Number ttl = randomFrom(new Number[] { null, 1233214, 134143797143L }); - IndexRequest index = applyScript((Map ctx) -> ctx.put("_ttl", ttl)); - if (ttl == null) { - assertEquals(null, index.ttl()); - } else { - assertEquals(timeValueMillis(ttl.longValue()), index.ttl()); - } - } - - public void testSettingTtlToJunkIsAnError() throws Exception { - Object junkTtl = randomFrom(new Object[] { "junk", Math.PI }); - try { - applyScript((Map ctx) -> ctx.put("_ttl", junkTtl)); - } catch (IllegalArgumentException e) { - assertThat(e.getMessage(), containsString("_ttl may only be set to an int or a long but was [")); - assertThat(e.getMessage(), containsString(junkTtl.toString())); - } - } - @Override protected ReindexRequest request() { return new ReindexRequest(new SearchRequest(), new IndexRequest()); diff --git a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/UpdateByQueryWithScriptTests.java b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/UpdateByQueryWithScriptTests.java index c5b9d4da64fe6..5ff54e4e06d80 100644 --- a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/UpdateByQueryWithScriptTests.java +++ b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/UpdateByQueryWithScriptTests.java @@ -38,7 +38,7 @@ public void testModifyingCtxNotAllowed() { * error message to the user, not some ClassCastException. */ Object[] options = new Object[] {"cat", new Object(), 123, new Date(), Math.PI}; - for (String ctxVar: new String[] {"_index", "_type", "_id", "_version", "_parent", "_routing", "_timestamp", "_ttl"}) { + for (String ctxVar: new String[] {"_index", "_type", "_id", "_version", "_parent", "_routing"}) { try { applyScript((Map ctx) -> ctx.put(ctxVar, randomFrom(options))); } catch (IllegalArgumentException e) { diff --git a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/remote/RemoteScrollableHitSourceTests.java b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/remote/RemoteScrollableHitSourceTests.java index 351f939265668..f898483a124d0 100644 --- a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/remote/RemoteScrollableHitSourceTests.java +++ b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/remote/RemoteScrollableHitSourceTests.java @@ -169,8 +169,6 @@ public void testParseStartOk() throws Exception { assertEquals("test", r.getHits().get(0).getType()); assertEquals("AVToMiC250DjIiBO3yJ_", r.getHits().get(0).getId()); assertEquals("{\"test\":\"test2\"}", r.getHits().get(0).getSource().utf8ToString()); - assertNull(r.getHits().get(0).getTTL()); - assertNull(r.getHits().get(0).getTimestamp()); assertNull(r.getHits().get(0).getRouting()); called.set(true); }); @@ -189,8 +187,6 @@ public void testParseScrollOk() throws Exception { assertEquals("test", r.getHits().get(0).getType()); assertEquals("AVToMiDL50DjIiBO3yKA", r.getHits().get(0).getId()); assertEquals("{\"test\":\"test3\"}", r.getHits().get(0).getSource().utf8ToString()); - assertNull(r.getHits().get(0).getTTL()); - assertNull(r.getHits().get(0).getTimestamp()); assertNull(r.getHits().get(0).getRouting()); called.set(true); }); @@ -205,8 +201,6 @@ public void testParseScrollFullyLoaded() throws Exception { sourceWithMockedRemoteCall("scroll_fully_loaded.json").doStartNextScroll("", timeValueMillis(0), r -> { assertEquals("AVToMiDL50DjIiBO3yKA", r.getHits().get(0).getId()); assertEquals("{\"test\":\"test3\"}", r.getHits().get(0).getSource().utf8ToString()); - assertEquals((Long) 1234L, r.getHits().get(0).getTTL()); - assertEquals((Long) 123444L, r.getHits().get(0).getTimestamp()); assertEquals("testrouting", r.getHits().get(0).getRouting()); assertEquals("testparent", r.getHits().get(0).getParent()); called.set(true); @@ -222,8 +216,6 @@ public void testParseScrollFullyLoadedFrom1_7() throws Exception { sourceWithMockedRemoteCall("scroll_fully_loaded_1_7.json").doStartNextScroll("", timeValueMillis(0), r -> { assertEquals("AVToMiDL50DjIiBO3yKA", r.getHits().get(0).getId()); assertEquals("{\"test\":\"test3\"}", r.getHits().get(0).getSource().utf8ToString()); - assertEquals((Long) 1234L, r.getHits().get(0).getTTL()); - assertNull(r.getHits().get(0).getTimestamp()); // Not available from 1.7 assertEquals("testrouting", r.getHits().get(0).getRouting()); assertEquals("testparent", r.getHits().get(0).getParent()); called.set(true); @@ -248,8 +240,6 @@ public void testScanJumpStart() throws Exception { assertEquals("test", r.getHits().get(0).getType()); assertEquals("AVToMiDL50DjIiBO3yKA", r.getHits().get(0).getId()); assertEquals("{\"test\":\"test3\"}", r.getHits().get(0).getSource().utf8ToString()); - assertNull(r.getHits().get(0).getTTL()); - assertNull(r.getHits().get(0).getTimestamp()); assertNull(r.getHits().get(0).getRouting()); called.set(true); }); diff --git a/modules/reindex/src/test/resources/rest-api-spec/test/reindex/20_validation.yaml b/modules/reindex/src/test/resources/rest-api-spec/test/reindex/20_validation.yaml index ffcdb42c86c6a..68ae83eabd216 100644 --- a/modules/reindex/src/test/resources/rest-api-spec/test/reindex/20_validation.yaml +++ b/modules/reindex/src/test/resources/rest-api-spec/test/reindex/20_validation.yaml @@ -113,42 +113,6 @@ index: dest size: -4 ---- -"can't set ttl": - - do: - index: - index: test - type: test - id: 1 - body: { "text": "test" } - - do: - catch: /setting ttl on destination isn't supported. use scripts instead./ - reindex: - body: - source: - index: test - dest: - index: dest - ttl: 3m - ---- -"can't set timestamp": - - do: - index: - index: test - type: test - id: 1 - body: { "text": "test" } - - do: - catch: /setting timestamp on destination isn't supported. use scripts instead./ - reindex: - body: - source: - index: test - dest: - index: dest - timestamp: "123" - --- "requests_per_second cannot be an empty string": - do: diff --git a/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/Netty4HttpRequestSizeLimitIT.java b/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/Netty4HttpRequestSizeLimitIT.java index 0b8b347e30fe6..d99820bb86465 100644 --- a/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/Netty4HttpRequestSizeLimitIT.java +++ b/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/Netty4HttpRequestSizeLimitIT.java @@ -101,7 +101,7 @@ public void testDoesNotLimitExcludedRequests() throws Exception { Tuple[] requestUris = new Tuple[1500]; for (int i = 0; i < requestUris.length; i++) { requestUris[i] = Tuple.tuple("/_cluster/settings", - "{ \"transient\": {\"indices.ttl.interval\": \"40s\" } }"); + "{ \"transient\": {\"search.default_search_timeout\": \"40s\" } }"); } HttpServerTransport httpServerTransport = internalCluster().getInstance(HttpServerTransport.class); diff --git a/plugins/mapper-murmur3/src/test/java/org/elasticsearch/index/mapper/murmur3/Murmur3FieldMapperUpgradeTests.java b/plugins/mapper-murmur3/src/test/java/org/elasticsearch/index/mapper/murmur3/Murmur3FieldMapperUpgradeTests.java deleted file mode 100644 index c632e139955e0..0000000000000 --- a/plugins/mapper-murmur3/src/test/java/org/elasticsearch/index/mapper/murmur3/Murmur3FieldMapperUpgradeTests.java +++ /dev/null @@ -1,90 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.index.mapper.murmur3; - -import org.apache.lucene.util.LuceneTestCase; -import org.apache.lucene.util.TestUtil; -import org.elasticsearch.action.search.SearchResponse; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.env.Environment; -import org.elasticsearch.env.NodeEnvironment; -import org.elasticsearch.plugin.mapper.MapperMurmur3Plugin; -import org.elasticsearch.plugins.Plugin; -import org.elasticsearch.search.aggregations.AggregationBuilders; -import org.elasticsearch.search.aggregations.metrics.cardinality.Cardinality; -import org.elasticsearch.test.ESIntegTestCase; -import org.elasticsearch.test.InternalTestCluster; -import org.elasticsearch.test.hamcrest.ElasticsearchAssertions; - -import java.io.IOException; -import java.io.InputStream; -import java.nio.file.Files; -import java.nio.file.Path; -import java.util.Collection; -import java.util.Collections; -import java.util.concurrent.ExecutionException; - -@ESIntegTestCase.ClusterScope(scope = ESIntegTestCase.Scope.TEST, numDataNodes = 0) -@LuceneTestCase.SuppressFileSystems("ExtrasFS") -public class Murmur3FieldMapperUpgradeTests extends ESIntegTestCase { - - @Override - protected Collection> nodePlugins() { - return Collections.singleton(MapperMurmur3Plugin.class); - } - - public void testUpgradeOldMapping() throws IOException, ExecutionException, InterruptedException { - final String indexName = "index-mapper-murmur3-2.0.0"; - final String indexUUID = "1VzJds59TTK7lRu17W0mcg"; - InternalTestCluster.Async master = internalCluster().startNodeAsync(); - Path unzipDir = createTempDir(); - Path unzipDataDir = unzipDir.resolve("data"); - Path backwardsIndex = getBwcIndicesPath().resolve(indexName + ".zip"); - try (InputStream stream = Files.newInputStream(backwardsIndex)) { - TestUtil.unzip(stream, unzipDir); - } - assertTrue(Files.exists(unzipDataDir)); - - Path dataPath = createTempDir(); - Settings settings = Settings.builder() - .put(Environment.PATH_DATA_SETTING.getKey(), dataPath) - .build(); - final String node = internalCluster().startDataOnlyNode(settings); // workaround for dangling index loading issue when node is master - Path[] nodePaths = internalCluster().getInstance(NodeEnvironment.class, node).nodeDataPaths(); - assertEquals(1, nodePaths.length); - dataPath = nodePaths[0].resolve(NodeEnvironment.INDICES_FOLDER); - assertFalse(Files.exists(dataPath)); - Path src = unzipDataDir.resolve(indexName + "/nodes/0/indices"); - Files.move(src, dataPath); - Files.move(dataPath.resolve(indexName), dataPath.resolve(indexUUID)); - - master.get(); - // force reloading dangling indices with a cluster state republish - client().admin().cluster().prepareReroute().get(); - ensureGreen(indexName); - final SearchResponse countResponse = client().prepareSearch(indexName).setSize(0).get(); - ElasticsearchAssertions.assertHitCount(countResponse, 3L); - - final SearchResponse cardinalityResponse = client().prepareSearch(indexName).addAggregation( - AggregationBuilders.cardinality("card").field("foo.hash")).get(); - Cardinality cardinality = cardinalityResponse.getAggregations().get("card"); - assertEquals(3L, cardinality.getValue()); - } -} diff --git a/plugins/mapper-murmur3/src/test/resources/indices/bwc/index-mapper-murmur3-2.0.0.zip b/plugins/mapper-murmur3/src/test/resources/indices/bwc/index-mapper-murmur3-2.0.0.zip deleted file mode 100644 index 0b69aac180bc6a62f65f735812ba05ed5b59cf01..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 8226 zcmcIp2|UzkAODXrZW80_m5|z@9W!H^n8;P$TvK7(SB`O|QI3%9phK=+$jqepu!RW`-aMj z**f`R>H!9*0{%XcJ}5_`H~_UePjuQ2uBaGE03zHE1%Szi9rJ!&efsOcWPef&a<|W( zJ*05t?x=9^jz#L}qIJ=novr!ZdF>*0CWrZxA{MznX3oJ8i{5|g1ltxm!CVVyJ}iLr z1^Z2(g>8ww=Jmo{OEzC&X(5O4eO&vc1>qeL=@UufIF|<&I2gxb0Gp6rL@3!D0ssfV z(eR2?JQPJW~)SZS3^QVt7dms zi>A8X6I=`SA$GLQDcg;hPbrMP04x6mU#pJ${+0owSPP4~HQKIqW!tS6)Yb;MbJS7z zIk{Dv<>e*)B(ah<@>t2!1F{OSv}!B5#hbC0%XAYtj!h%RqhM|;|;r`6DD^v#9VheloM`gqTzvXR4gUnrrUU+TGp5}FkcdUJ}n z(^4NjGTet85gHcAsXOde$g>@?<1^*$`SEq28`z5^Fg(-1z@8cj;^S3-Oxi=0#HK$F zf)WCk;?@9Xf`6=y8PS`x)iGc@(jaV`&2Bq(rp;8)cQT<2_EiG=KfVG#iLT$SJ-m+L zw-bQw77^ab%l{5bjdHRpF6ev?W$a)q|5NYpM3Amq;PMmgs-f6X)2H`B1v$c5dLaOHxLXtj@idqH0WQ zq{q@rL?+Gy#m`Mv0ROq8Zyr0_@a6V6FJr#kaRBB=M z8P~NiKOAm<*w9e_w4PPp(pvX4or3H!Md6WI3YH3i<_65vw|l~JUMSH@u9He$7Q=7$ zDfO+*?(ch9S{#nA7D;Rn6}^7rhU&vJ9}^%FBWj+@#pgJuQ0RhL6&SX=KnOvB6jAx_ zq@p_8pes&$n<}6!Kb7SR&!JI1?AKB~a8UY5Ot3UY@S>4Hbhm`tDI4c>P~K$R=-GXX z;RcT{PdJtksuqZ6Sbv9?-Ec(n?;SgUyTeO5;=I$^Bx15OTO?1GahMB z;T%LG?3e}frnU9YU405!j%_$b)Xh!9(rE;szGmXxhrd1vKN)Qs+?#b3VWRlBUMW%j zsMP6(qdjHErFD+wsWz7zjaBE#x?d@b&k)sdQISp=N=mpp6&BPK*b7zLZ$&g&z!fSgiR^{vouw4CwYPnNUcC+V3L$}|;3LNaZ&6#_~0394XK9G0b7*hUksy*h@@E$Z4S zb4&(O1jCD>9e*Z- zhWaIXsAZurmio>c=~rLvZHm4*(DRpP1gk1OPe#QpVat1>TuT3eWU^GlYOO4kIK)>2 z(*3R8@s%x`Mql`xJkrs*LhHC($NRe;LXnWF5NSz<E-!^Za24~597@aekLVh z-{YQC} z&}}RAjpD_4(fFD=SR(ZIsL{h882-`0zqlVL=x8&9wOOa7Va`(G#ZCa##vebq3)@_d z2{wzpnRlydL(OB7)1Ri%$o^tlFC_7q&sEX75LtAY_7C*4d11DF#577)ULP==e(&wY z>({zuD^Qo#wX^m$o+31h^x9SK+pvGl;A>`TrAkIHDZK>qi@7u;1AR4yFk;t~{74b^ z4Q17D#U3Oby|CgMFtoqo0a=&T;E*$dfpQZOZDO@Aua#TB*IP zk2RaS8}T&e8gR0m9A7WLd8yc_ODni-oMbk3>Ns7b+UuZp{K_@-Y8c+sA?48cr|}Q$ zabbs_ik?vI0Oum)%W4;fkF&AGr5S&*^(*%JmnH)dQiFck7z|Jb>15wzfpIhT^OVpA z;|OhhY^Bb%mC5v0JT)B93ZH=2a#^7)!pgJ&x3I0blRSdU*4UzLu}5~$8asV>@Hf{Q zn#-Cyc*Qg63LgA%PKHK-ADI&7__u1qWtI6AR(SyY8XD}m09k{BA-^>60Q9-&@FI#c z8?fEOE!RzF3IJ5aKM@G7*=hzagq&QS8$bju<}M2_DRW;J(?l*vIM@P&EZK8t0HVyK#ro<~R zW-^ysCYlXZ8JvNZu%e(*(+EGm;D|=NaB}i@uDcz$lI6#x#yMFQ7x49rEW#}s%_h)B ze2EF<0Gll#f!xg-jx!Lsm7CcRgF%9q)W;l%Q##J#&~Zx_v(br22@2h$kg+&;+>*d- zcm!#|!2<;XUhux$*E~Px1-sP9Ef)y9*`p5#k^T0nL7K0)T field return; } final int value = context.sourceToParse().source().length(); - if (Version.indexCreated(context.indexSettings()).before(Version.V_5_0_0_alpha2)) { - fields.add(new LegacyIntegerFieldMapper.CustomIntegerNumericField(value, fieldType())); - } else { - boolean indexed = fieldType().indexOptions() != IndexOptions.NONE; - boolean docValued = fieldType().hasDocValues(); - boolean stored = fieldType().stored(); - fields.addAll(NumberFieldMapper.NumberType.INTEGER.createFields(name(), value, indexed, docValued, stored)); - } + boolean indexed = fieldType().indexOptions() != IndexOptions.NONE; + boolean docValued = fieldType().hasDocValues(); + boolean stored = fieldType().stored(); + fields.addAll(NumberFieldMapper.NumberType.INTEGER.createFields(name(), value, indexed, docValued, stored)); } @Override diff --git a/plugins/mapper-size/src/test/java/org/elasticsearch/index/mapper/size/SizeFieldMapperUpgradeTests.java b/plugins/mapper-size/src/test/java/org/elasticsearch/index/mapper/size/SizeFieldMapperUpgradeTests.java deleted file mode 100644 index 7cbce102c57b2..0000000000000 --- a/plugins/mapper-size/src/test/java/org/elasticsearch/index/mapper/size/SizeFieldMapperUpgradeTests.java +++ /dev/null @@ -1,102 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.index.mapper.size; - -import org.apache.lucene.util.LuceneTestCase; -import org.apache.lucene.util.TestUtil; -import org.elasticsearch.action.search.SearchResponse; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.env.Environment; -import org.elasticsearch.env.NodeEnvironment; -import org.elasticsearch.plugin.mapper.MapperSizePlugin; -import org.elasticsearch.plugins.Plugin; -import org.elasticsearch.search.SearchHit; -import org.elasticsearch.search.SearchHitField; -import org.elasticsearch.test.ESIntegTestCase; -import org.elasticsearch.test.InternalTestCluster; -import org.elasticsearch.test.hamcrest.ElasticsearchAssertions; - -import java.io.IOException; -import java.io.InputStream; -import java.nio.file.Files; -import java.nio.file.Path; -import java.util.Collection; -import java.util.Collections; -import java.util.Map; -import java.util.concurrent.ExecutionException; - -@ESIntegTestCase.ClusterScope(scope = ESIntegTestCase.Scope.TEST, numDataNodes = 0) -@LuceneTestCase.SuppressFileSystems("ExtrasFS") -public class SizeFieldMapperUpgradeTests extends ESIntegTestCase { - - @Override - protected Collection> nodePlugins() { - return Collections.singleton(MapperSizePlugin.class); - } - - public void testUpgradeOldMapping() throws IOException, ExecutionException, InterruptedException { - final String indexName = "index-mapper-size-2.0.0"; - final String indexUUID = "ENCw7sG0SWuTPcH60bHheg"; - InternalTestCluster.Async master = internalCluster().startNodeAsync(); - Path unzipDir = createTempDir(); - Path unzipDataDir = unzipDir.resolve("data"); - Path backwardsIndex = getBwcIndicesPath().resolve(indexName + ".zip"); - try (InputStream stream = Files.newInputStream(backwardsIndex)) { - TestUtil.unzip(stream, unzipDir); - } - assertTrue(Files.exists(unzipDataDir)); - - Path dataPath = createTempDir(); - Settings settings = Settings.builder() - .put(Environment.PATH_DATA_SETTING.getKey(), dataPath) - .build(); - // workaround for dangling index loading issue when node is master - final String node = internalCluster().startDataOnlyNode(settings); - Path[] nodePaths = internalCluster().getInstance(NodeEnvironment.class, node).nodeDataPaths(); - assertEquals(1, nodePaths.length); - dataPath = nodePaths[0].resolve(NodeEnvironment.INDICES_FOLDER); - assertFalse(Files.exists(dataPath)); - Path src = unzipDataDir.resolve(indexName + "/nodes/0/indices"); - Files.move(src, dataPath); - Files.move(dataPath.resolve(indexName), dataPath.resolve(indexUUID)); - master.get(); - // force reloading dangling indices with a cluster state republish - client().admin().cluster().prepareReroute().get(); - ensureGreen(indexName); - final SearchResponse countResponse = client().prepareSearch(indexName).setSize(0).get(); - ElasticsearchAssertions.assertHitCount(countResponse, 3L); - - final SearchResponse sizeResponse = client().prepareSearch(indexName) - .addStoredField("_source") - .addStoredField("_size") - .get(); - ElasticsearchAssertions.assertHitCount(sizeResponse, 3L); - for (SearchHit hit : sizeResponse.getHits().getHits()) { - String source = hit.getSourceAsString(); - assertNotNull(source); - Map fields = hit.getFields(); - assertTrue(fields.containsKey("_size")); - Number size = fields.get("_size").getValue(); - assertNotNull(size); - assertEquals(source.length(), size.longValue()); - } - } - -} diff --git a/plugins/mapper-size/src/test/java/org/elasticsearch/index/mapper/size/SizeMappingTests.java b/plugins/mapper-size/src/test/java/org/elasticsearch/index/mapper/size/SizeMappingTests.java index 5d832aa1518dd..3fa5300fb4383 100644 --- a/plugins/mapper-size/src/test/java/org/elasticsearch/index/mapper/size/SizeMappingTests.java +++ b/plugins/mapper-size/src/test/java/org/elasticsearch/index/mapper/size/SizeMappingTests.java @@ -31,7 +31,6 @@ import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.NumberFieldMapper; import org.elasticsearch.index.mapper.DocumentMapper; -import org.elasticsearch.index.mapper.LegacyNumberFieldMapper; import org.elasticsearch.index.mapper.ParsedDocument; import org.elasticsearch.index.mapper.SourceToParse; import org.elasticsearch.index.mapper.MappedFieldType; @@ -115,41 +114,4 @@ public void testThatDisablingWorksWhenMerging() throws Exception { assertThat(docMapper.metadataMapper(SizeFieldMapper.class).enabled(), is(false)); } - public void testBWCMapper() throws Exception { - { - // IntPoint && docvalues=true for V_5_0_0_alpha5 - IndexService service = createIndex("foo", Settings.EMPTY, "bar", "_size", "enabled=true"); - DocumentMapper docMapper = service.mapperService().documentMapper("bar"); - SizeFieldMapper mapper = docMapper.metadataMapper(SizeFieldMapper.class); - assertThat(mapper.enabled(), is(true)); - MappedFieldType ft = mapper.fieldType(); - assertThat(ft.hasDocValues(), is(true)); - assertThat(mapper.fieldType(), instanceOf(NumberFieldMapper.NumberFieldType.class)); - } - - { - // IntPoint with docvalues=false if version > V_5_0_0_alpha2 && version < V_5_0_0_beta1 - IndexService service = createIndex("foo2", - Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_5_0_0_alpha4.id).build(), - "bar", "_size", "enabled=true"); - DocumentMapper docMapper = service.mapperService().documentMapper("bar"); - SizeFieldMapper mapper = docMapper.metadataMapper(SizeFieldMapper.class); - assertThat(mapper.enabled(), is(true)); - assertThat(mapper.fieldType().hasDocValues(), is(false)); - assertThat(mapper.fieldType(), instanceOf(NumberFieldMapper.NumberFieldType.class)); - } - - { - // LegacyIntField with docvalues=false if version < V_5_0_0_alpha2 - IndexService service = createIndex("foo3", - Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_5_0_0_alpha1.id).build(), - "bar", "_size", "enabled=true"); - DocumentMapper docMapper = service.mapperService().documentMapper("bar"); - SizeFieldMapper mapper = docMapper.metadataMapper(SizeFieldMapper.class); - assertThat(mapper.enabled(), is(true)); - assertThat(mapper.fieldType().hasDocValues(), is(false)); - assertThat(mapper.fieldType(), instanceOf(LegacyNumberFieldMapper.NumberFieldType.class)); - } - } - } diff --git a/plugins/mapper-size/src/test/resources/indices/bwc/index-mapper-size-2.0.0.zip b/plugins/mapper-size/src/test/resources/indices/bwc/index-mapper-size-2.0.0.zip deleted file mode 100644 index 0a74f835c3eb43ff8a9f65f724dcd917b908a853..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 7973 zcmcIo2|U#48~-t`P>qa~rDU8HW(=`KId-z{!8j7*7&8&ZG1Dg5B&~=_j=_*)YTA&^ zy;-4^YRQozrVq-MR=biDv?|r_{_j%sud7m%F48kW2uwEKT z!N$`+%zp3#(txKs#a-PFy9R(@va$B|;5rg40`Ty?gaE+w2j=tRD2~Tv<{tMZc@lzE z{oDfr2tlf3?+}8jh8jW*F|)1pYPW?iw=K!vlR#eK=2{$>6;`(SEOxu)uyby&y3*}I z%DnM&Ua-ut)}qXFnnADD%#smWwU_4HGVe;JxKju-p}7c zy$!io7q}ioFT&N2kb}OPI)2R9={D`sn262T(->PbQ;o4lF<}#Q$2(9#l}V40a4T@#(+5>nG0hushoF1b4JLS;tB?*-y= z)=sq44MRhN&LrG02`Y_~+7LJFPK@inc#*c3pUyk9D{3-Fg|uHl-_3ixLZ*Lx93*Ul zFiZ&?b9a=uT55A2PI;VLpf*YNNr{cvD`KH0ESynvFYmz)oeGMuL%Y)JERy===cM}< zsr$mV2dMsyE5}Di7M>V3MXtjrPfj&AyeU>iDxnH>Nr9Hb#7_O%QEjY z$kxH(S}xIf?@r$9mqhd>9TINl0~U`yhI>v^g4{`DU;jfh<%89^mh4`vJnW}pckTeG zXD!#SfMgC5*#9UXvgtaq2(W$azLG+7{}0091=mJ9J4f^`Ej$WqV|7%=3#H-hsO=tT zz+!X01ioMATgqzZtlv0)MIY4gJ^@QPfOi8Regb@Myh}vef_wO;JJ_Z=G(=yU%4mJ(9Z{H5Ne6!EH;v=&w+O`X487SU$ z&#M;T8P;zQsxcoGi1o@X3;K|#`11<}O={u{Z`}DQFThtcA;84i0~cs{A|!M-#@O1! zgm_}hWifTQ1>k9m=R0-NJ{Xth0z|N2GiVQQ?^m&pa=3 zn;PLFVE5bwE*oz1;tJLt@8u{_8@8pT(~=J#hR#sao0?2*2H6SbWJef=cjNa^jFX3= zP(g?R$>NWmFOuv74k=I-hLRXB82yrK#w8UL2d`|`8I+XhmsFs7z8Ddc40>NFR4F~c0Y`wH+VM1H*~i4#@BZ8;*7+_MerghGYA}lG9s1*Ln`}Au=cWc z>1%pmsQNpVF7d93v|ef|Ex~4(>YAA5NKa17jIST)my+JFRr>kRP>??UZxI3ID2MTK zNsh2|@9I2X3Z|+L7#7>F5EeXdgj46S6Xr}n6`$AX45>%jpSL-lGGL--3 zAHRzk--8)<+gPcaqDcJlT9#G}&dHP?ix9cGp;7UgS;~9Efd>zrod7W+GP1rJve}8( zy?DDJe}(=NBMT3gwGy$sxeXD*MTLekp*_=?%`Y05anX>2!7#|JEvn$qG9dJ|7$h&n zU@6wjOWa^n#{Jjw-U(%Y-q~yGvt3yz-rf``E1?=rPl{rM6uzRFQ;>D{x-3jeJw~&7 zvjw*boyEmG&w0t%(i$QjBA%PvEgUl3nAYU`!y}6{KdV&Tf0&a~Q)z!?P9e)OMtt|V znz!i6L|6lnVG zqx-2?2lP{I0q|ob=I+p2n0vKmBXs|{ zA9+zfL&al>FVnAFV_vsHzNKHJ7kM;FT)P4*Epbg{MCSZ?EWPE(sSz~MSBxgxJ~7eP~RRzL8Y!Q(aOh>wYeTKUnTH0t3bHGSQ0Wt+&@Afp$r zl^Sw^9okwZs#=v(e(ti9WGdv?hCSaX;{pi9pIRG_nmK&P@oPIbXcA^T*i*|;!L^)h z8x;s@dYT<~3+Bl*>T&&_&Gleq-aJDjt#!-o_BRni#~Nk){(RSue$5;5LHu2mPkw5TjZAGqN zcUrPpU2au@kN*wxVx5b9y6XP^M+u2^ui7mPUgmj{TxQ`O&!e6vkj$H8#>sm8x*`vW z8yI<$wcnUgaHp*8^ZPa*{BSoAMT1*kd3zk^H=Ocj)HP=tR7r9}&r)~fH67e^o z&ZaL{3zfD@pL`Kco-h5=;+cqAiOY@m$`|^i+wmx%+{0Fl5{gCUe z`e=|hg`no^kN25Ay^BnBiv!xZ(qhax3$QGGVW6My#MI0;-+6&0{|LCWS83&tufK=8 zuj(qU`rxpNc*>ep9gInbP3R<350l;-Wks#@-iC==MqPE^GVUtmphp>uRF&*syiHUT z6|-^M1fs=~&|>#CZYETnACOVVDd+!I-!>*}@}J31(}ftn`D!H&K6AyGk(f9&BDIw| z#u9br!^9GeSnK}?K9ztwB>?OyT(maGFwOGi3wbWTqpq7;Q5? zA=JUY&q`sdH;HC5S-x76i$VJ@-kY;Q(E;bVRe8S{l+R^lDRSnE3cElym&O245?;k+ zL<{AzrLfJMR?b{8W@GCJAD7trZ;5Nk2Id|2z^pQO?CQ|M;GN*-8bPy|7HUPS49dF8?xGGcUiH(Z&;zb3+4#W<`-|} U{9t1M07>xu9hiC#z?}i$UwIFvIsgCw diff --git a/qa/smoke-test-ingest-with-all-dependencies/src/test/java/org/elasticsearch/ingest/IngestDocumentMustacheIT.java b/qa/smoke-test-ingest-with-all-dependencies/src/test/java/org/elasticsearch/ingest/IngestDocumentMustacheIT.java index 345ccb0ddcdae..94acfb6164685 100644 --- a/qa/smoke-test-ingest-with-all-dependencies/src/test/java/org/elasticsearch/ingest/IngestDocumentMustacheIT.java +++ b/qa/smoke-test-ingest-with-all-dependencies/src/test/java/org/elasticsearch/ingest/IngestDocumentMustacheIT.java @@ -33,7 +33,7 @@ public class IngestDocumentMustacheIT extends AbstractScriptTestCase { public void testAccessMetaDataViaTemplate() { Map document = new HashMap<>(); document.put("foo", "bar"); - IngestDocument ingestDocument = new IngestDocument("index", "type", "id", null, null, null, null, document); + IngestDocument ingestDocument = new IngestDocument("index", "type", "id", null, null, document); ingestDocument.setFieldValue(templateService.compile("field1"), ValueSource.wrap("1 {{foo}}", templateService)); assertThat(ingestDocument.getFieldValue("field1", String.class), equalTo("1 bar")); @@ -48,7 +48,7 @@ public void testAccessMapMetaDataViaTemplate() { innerObject.put("baz", "hello baz"); innerObject.put("qux", Collections.singletonMap("fubar", "hello qux and fubar")); document.put("foo", innerObject); - IngestDocument ingestDocument = new IngestDocument("index", "type", "id", null, null, null, null, document); + IngestDocument ingestDocument = new IngestDocument("index", "type", "id", null, null, document); ingestDocument.setFieldValue(templateService.compile("field1"), ValueSource.wrap("1 {{foo.bar}} {{foo.baz}} {{foo.qux.fubar}}", templateService)); assertThat(ingestDocument.getFieldValue("field1", String.class), equalTo("1 hello bar hello baz hello qux and fubar")); @@ -67,7 +67,7 @@ public void testAccessListMetaDataViaTemplate() { list.add(value); list.add(null); document.put("list2", list); - IngestDocument ingestDocument = new IngestDocument("index", "type", "id", null, null, null, null, document); + IngestDocument ingestDocument = new IngestDocument("index", "type", "id", null, null, document); ingestDocument.setFieldValue(templateService.compile("field1"), ValueSource.wrap("1 {{list1.0}} {{list2.0}}", templateService)); assertThat(ingestDocument.getFieldValue("field1", String.class), equalTo("1 foo {field=value}")); } @@ -77,7 +77,7 @@ public void testAccessIngestMetadataViaTemplate() { Map ingestMap = new HashMap<>(); ingestMap.put("timestamp", "bogus_timestamp"); document.put("_ingest", ingestMap); - IngestDocument ingestDocument = new IngestDocument("index", "type", "id", null, null, null, null, document); + IngestDocument ingestDocument = new IngestDocument("index", "type", "id", null, null, document); ingestDocument.setFieldValue(templateService.compile("ingest_timestamp"), ValueSource.wrap("{{_ingest.timestamp}} and {{_source._ingest.timestamp}}", templateService)); assertThat(ingestDocument.getFieldValue("ingest_timestamp", String.class), diff --git a/qa/smoke-test-ingest-with-all-dependencies/src/test/java/org/elasticsearch/ingest/ValueSourceMustacheIT.java b/qa/smoke-test-ingest-with-all-dependencies/src/test/java/org/elasticsearch/ingest/ValueSourceMustacheIT.java index 884897885cac6..38f17d6badae4 100644 --- a/qa/smoke-test-ingest-with-all-dependencies/src/test/java/org/elasticsearch/ingest/ValueSourceMustacheIT.java +++ b/qa/smoke-test-ingest-with-all-dependencies/src/test/java/org/elasticsearch/ingest/ValueSourceMustacheIT.java @@ -64,7 +64,7 @@ public void testValueSourceWithTemplates() { } public void testAccessSourceViaTemplate() { - IngestDocument ingestDocument = new IngestDocument("marvel", "type", "id", null, null, null, null, new HashMap<>()); + IngestDocument ingestDocument = new IngestDocument("marvel", "type", "id", null, null, new HashMap<>()); assertThat(ingestDocument.hasField("marvel"), is(false)); ingestDocument.setFieldValue(templateService.compile("{{_index}}"), ValueSource.wrap("{{_index}}", templateService)); assertThat(ingestDocument.getFieldValue("marvel", String.class), equalTo("marvel")); diff --git a/test/framework/src/main/java/org/elasticsearch/ingest/RandomDocumentPicks.java b/test/framework/src/main/java/org/elasticsearch/ingest/RandomDocumentPicks.java index efdf10d5a5d0b..f1c19710850ce 100644 --- a/test/framework/src/main/java/org/elasticsearch/ingest/RandomDocumentPicks.java +++ b/test/framework/src/main/java/org/elasticsearch/ingest/RandomDocumentPicks.java @@ -144,15 +144,7 @@ public static IngestDocument randomIngestDocument(Random random, Map randomSource(Random random) { diff --git a/test/framework/src/main/java/org/elasticsearch/test/AbstractQueryTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/AbstractQueryTestCase.java index d6bae91d7cf48..7410a445a38d2 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/AbstractQueryTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/AbstractQueryTestCase.java @@ -73,7 +73,6 @@ import org.elasticsearch.index.cache.bitset.BitsetFilterCache; import org.elasticsearch.index.fielddata.IndexFieldDataCache; import org.elasticsearch.index.fielddata.IndexFieldDataService; -import org.elasticsearch.index.mapper.LatLonPointFieldMapper; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.query.AbstractQueryBuilder; import org.elasticsearch.index.query.QueryBuilder; @@ -145,7 +144,6 @@ public abstract class AbstractQueryTestCase> protected static final String DATE_RANGE_FIELD_NAME = "mapped_date_range"; protected static final String OBJECT_FIELD_NAME = "mapped_object"; protected static final String GEO_POINT_FIELD_NAME = "mapped_geo_point"; - protected static final String LEGACY_GEO_POINT_FIELD_MAPPING = "type=geo_point,lat_lon=true,geohash=true,geohash_prefix=true"; protected static final String GEO_SHAPE_FIELD_NAME = "mapped_geo_shape"; protected static final String[] MAPPED_FIELD_NAMES = new String[]{STRING_FIELD_NAME, INT_FIELD_NAME, INT_RANGE_FIELD_NAME, DOUBLE_FIELD_NAME, BOOLEAN_FIELD_NAME, DATE_FIELD_NAME, DATE_RANGE_FIELD_NAME, OBJECT_FIELD_NAME, GEO_POINT_FIELD_NAME, @@ -1111,9 +1109,6 @@ public void onRemoval(ShardId shardId, Accountable accountable) { }); indicesQueriesRegistry = searchModule.getQueryParserRegistry(); - String geoFieldMapping = (idxSettings.getIndexVersionCreated().before(LatLonPointFieldMapper.LAT_LON_FIELD_VERSION)) ? - LEGACY_GEO_POINT_FIELD_MAPPING : "type=geo_point"; - for (String type : currentTypes) { mapperService.merge(type, new CompressedXContent(PutMappingRequest.buildFromSimplifiedDef(type, STRING_FIELD_NAME, "type=text", @@ -1125,7 +1120,7 @@ public void onRemoval(ShardId shardId, Accountable accountable) { DATE_FIELD_NAME, "type=date", DATE_RANGE_FIELD_NAME, "type=date_range", OBJECT_FIELD_NAME, "type=object", - GEO_POINT_FIELD_NAME, geoFieldMapping, + GEO_POINT_FIELD_NAME, "type=geo_point", GEO_SHAPE_FIELD_NAME, "type=geo_shape" ).string()), MapperService.MergeReason.MAPPING_UPDATE, false); // also add mappings for two inner field in the object field