diff --git a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/AbstractStringProcessorTestCase.java b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/AbstractStringProcessorTestCase.java index af108feecb829..d052facbac74e 100644 --- a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/AbstractStringProcessorTestCase.java +++ b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/AbstractStringProcessorTestCase.java @@ -157,7 +157,7 @@ public void testTargetField() throws Exception { String fieldName; boolean ignoreMissing; do { - ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), Collections.emptyMap()); + ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), new HashMap<>()); fieldValue = RandomDocumentPicks.randomString(random()); fieldName = RandomDocumentPicks.addRandomField(random(), ingestDocument, modifyInput(fieldValue)); ignoreMissing = randomBoolean(); diff --git a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/AppendProcessorTests.java b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/AppendProcessorTests.java index d97f627e422e3..d789f1bd79c42 100644 --- a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/AppendProcessorTests.java +++ b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/AppendProcessorTests.java @@ -11,7 +11,6 @@ import org.elasticsearch.common.util.Maps; import org.elasticsearch.common.util.set.Sets; import org.elasticsearch.ingest.IngestDocument; -import org.elasticsearch.ingest.IngestDocument.Metadata; import org.elasticsearch.ingest.Processor; import org.elasticsearch.ingest.RandomDocumentPicks; import org.elasticsearch.ingest.TestTemplateService; @@ -121,39 +120,6 @@ public void testConvertScalarToList() throws Exception { } } - public void testAppendMetadataExceptVersion() throws Exception { - // here any metadata field value becomes a list, which won't make sense in most of the cases, - // but support for append is streamlined like for set so we test it - Metadata randomMetadata = randomFrom(Metadata.INDEX, Metadata.ID, Metadata.ROUTING); - List values = new ArrayList<>(); - Processor appendProcessor; - if (randomBoolean()) { - String value = randomAlphaOfLengthBetween(1, 10); - values.add(value); - appendProcessor = createAppendProcessor(randomMetadata.getFieldName(), value, true); - } else { - int valuesSize = randomIntBetween(0, 10); - for (int i = 0; i < valuesSize; i++) { - values.add(randomAlphaOfLengthBetween(1, 10)); - } - appendProcessor = createAppendProcessor(randomMetadata.getFieldName(), values, true); - } - - IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random()); - Object initialValue = ingestDocument.getSourceAndMetadata().get(randomMetadata.getFieldName()); - appendProcessor.execute(ingestDocument); - List list = ingestDocument.getFieldValue(randomMetadata.getFieldName(), List.class); - if (initialValue == null) { - assertThat(list, equalTo(values)); - } else { - assertThat(list.size(), equalTo(values.size() + 1)); - assertThat(list.get(0), equalTo(initialValue)); - for (int i = 1; i < list.size(); i++) { - assertThat(list.get(i), equalTo(values.get(i - 1))); - } - } - } - public void testAppendingDuplicateValueToScalarDoesNotModifyDocument() throws Exception { IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random()); String originalValue = randomAlphaOfLengthBetween(1, 10); diff --git a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/RenameProcessorTests.java b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/RenameProcessorTests.java index 77d8ea8e9d612..92d2892a1f2e4 100644 --- a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/RenameProcessorTests.java +++ b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/RenameProcessorTests.java @@ -137,18 +137,14 @@ public void testRenameExistingFieldNullValue() throws Exception { } public void testRenameAtomicOperationSetFails() throws Exception { - Map source = new HashMap() { - @Override - public Object put(String key, Object value) { - if (key.equals("new_field")) { - throw new UnsupportedOperationException(); - } - return super.put(key, value); - } - }; - source.put("list", Collections.singletonList("item")); + Map metadata = new HashMap<>(); + metadata.put("list", Collections.singletonList("item")); - IngestDocument ingestDocument = TestIngestDocument.ofSourceAndMetadata(source); + IngestDocument ingestDocument = TestIngestDocument.ofMetadataWithValidator(metadata, Map.of("new_field", (k, v) -> { + if (v != null) { + throw new UnsupportedOperationException(); + } + }, "list", (k, v) -> {})); Processor processor = createRenameProcessor("list", "new_field", false); try { processor.execute(ingestDocument); @@ -161,18 +157,14 @@ public Object put(String key, Object value) { } public void testRenameAtomicOperationRemoveFails() throws Exception { - Map source = new HashMap() { - @Override - public Object remove(Object key) { - if (key.equals("list")) { - throw new UnsupportedOperationException(); - } - return super.remove(key); - } - }; - source.put("list", Collections.singletonList("item")); + Map metadata = new HashMap<>(); + metadata.put("list", Collections.singletonList("item")); - IngestDocument ingestDocument = TestIngestDocument.ofSourceAndMetadata(source); + IngestDocument ingestDocument = TestIngestDocument.ofMetadataWithValidator(metadata, Map.of("list", (k, v) -> { + if (v == null) { + throw new UnsupportedOperationException(); + } + })); Processor processor = createRenameProcessor("list", "new_field", false); try { processor.execute(ingestDocument); diff --git a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/SetProcessorTests.java b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/SetProcessorTests.java index 48428b2e2b157..e6477b8940a8b 100644 --- a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/SetProcessorTests.java +++ b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/SetProcessorTests.java @@ -9,7 +9,6 @@ package org.elasticsearch.ingest.common; import org.elasticsearch.ingest.IngestDocument; -import org.elasticsearch.ingest.IngestDocument.Metadata; import org.elasticsearch.ingest.Processor; import org.elasticsearch.ingest.RandomDocumentPicks; import org.elasticsearch.ingest.TestIngestDocument; @@ -28,6 +27,7 @@ import java.util.stream.Collectors; import java.util.stream.IntStream; +import static org.elasticsearch.ingest.IngestDocument.Metadata; import static org.hamcrest.Matchers.arrayContainingInAnyOrder; import static org.hamcrest.Matchers.equalTo; @@ -36,6 +36,9 @@ public class SetProcessorTests extends ESTestCase { public void testSetExistingFields() throws Exception { IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random()); String fieldName = RandomDocumentPicks.randomExistingFieldName(random(), ingestDocument); + while (Metadata.isMetadata(fieldName)) { + fieldName = RandomDocumentPicks.randomExistingFieldName(random(), ingestDocument); + } Object fieldValue = RandomDocumentPicks.randomFieldValue(random()); Processor processor = createSetProcessor(fieldName, fieldValue, null, true, false); processor.execute(ingestDocument); diff --git a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/UriPartsProcessorTests.java b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/UriPartsProcessorTests.java index 01b5b2888a775..11d4226d93ef6 100644 --- a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/UriPartsProcessorTests.java +++ b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/UriPartsProcessorTests.java @@ -15,8 +15,8 @@ import java.util.HashMap; import java.util.Map; -import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.hasEntry; public class UriPartsProcessorTests extends ESTestCase { @@ -191,7 +191,9 @@ public void testRemoveIfSuccessfulDoesNotRemoveTargetField() throws Exception { Map expectedSourceAndMetadata = new HashMap<>(); expectedSourceAndMetadata.put(field, Map.of("scheme", "http", "domain", "www.google.com", "path", "")); - assertThat(output.getSourceAndMetadata().entrySet(), containsInAnyOrder(expectedSourceAndMetadata.entrySet().toArray())); + for (Map.Entry entry : expectedSourceAndMetadata.entrySet()) { + assertThat(output.getSourceAndMetadata(), hasEntry(entry.getKey(), entry.getValue())); + } } public void testInvalidUri() { @@ -234,7 +236,9 @@ private void testUriParsing(boolean keepOriginal, boolean removeIfSuccessful, St } expectedSourceAndMetadata.put("url", values); - assertThat(output.getSourceAndMetadata().entrySet(), containsInAnyOrder(expectedSourceAndMetadata.entrySet().toArray())); + for (Map.Entry entry : expectedSourceAndMetadata.entrySet()) { + assertThat(output.getSourceAndMetadata(), hasEntry(entry.getKey(), entry.getValue())); + } } } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/ingest/IngestClientIT.java b/server/src/internalClusterTest/java/org/elasticsearch/ingest/IngestClientIT.java index 97ac5d2ba7e6a..8407d72eb728e 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/ingest/IngestClientIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/ingest/IngestClientIT.java @@ -114,7 +114,8 @@ public void testSimulate() throws Exception { source.put("fail", false); source.put("processed", true); IngestDocument ingestDocument = new IngestDocument("index", "id", Versions.MATCH_ANY, null, null, source); - assertThat(simulateDocumentBaseResult.getIngestDocument().getSourceAndMetadata(), equalTo(ingestDocument.getSourceAndMetadata())); + assertThat(simulateDocumentBaseResult.getIngestDocument().getSource(), equalTo(ingestDocument.getSource())); + assertThat(simulateDocumentBaseResult.getIngestDocument().getMetadata(), equalTo(ingestDocument.getMetadata())); assertThat(simulateDocumentBaseResult.getFailure(), nullValue()); // cleanup diff --git a/server/src/main/java/org/elasticsearch/action/ingest/WriteableIngestDocument.java b/server/src/main/java/org/elasticsearch/action/ingest/WriteableIngestDocument.java index 14d6474799abc..e22f4cb1b6f4a 100644 --- a/server/src/main/java/org/elasticsearch/action/ingest/WriteableIngestDocument.java +++ b/server/src/main/java/org/elasticsearch/action/ingest/WriteableIngestDocument.java @@ -11,6 +11,7 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.common.util.Maps; import org.elasticsearch.core.RestApiVersion; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.ingest.IngestDocument; @@ -23,7 +24,6 @@ import java.io.IOException; import java.time.ZonedDateTime; -import java.util.HashMap; import java.util.Map; import java.util.Objects; @@ -42,7 +42,7 @@ final class WriteableIngestDocument implements Writeable, ToXContentFragment { "ingest_document", true, a -> { - HashMap sourceAndMetadata = new HashMap<>(); + Map sourceAndMetadata = Maps.newHashMapWithExpectedSize(5); sourceAndMetadata.put(Metadata.INDEX.getFieldName(), a[0]); sourceAndMetadata.put(Metadata.ID.getFieldName(), a[1]); if (a[2] != null) { @@ -55,7 +55,8 @@ final class WriteableIngestDocument implements Writeable, ToXContentFragment { sourceAndMetadata.put(Metadata.VERSION_TYPE.getFieldName(), a[4]); } sourceAndMetadata.putAll((Map) a[5]); - return new WriteableIngestDocument(IngestDocument.of(sourceAndMetadata, (Map) a[6])); + Map ingestMetadata = (Map) a[6]; + return new WriteableIngestDocument(new IngestDocument(sourceAndMetadata, ingestMetadata)); } ); static { @@ -89,7 +90,7 @@ final class WriteableIngestDocument implements Writeable, ToXContentFragment { WriteableIngestDocument(StreamInput in) throws IOException { Map sourceAndMetadata = in.readMap(); Map ingestMetadata = in.readMap(); - this.ingestDocument = IngestDocument.of(sourceAndMetadata, ingestMetadata); + this.ingestDocument = new IngestDocument(sourceAndMetadata, ingestMetadata); } @Override @@ -105,18 +106,16 @@ IngestDocument getIngestDocument() { @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(DOC_FIELD); - Map metadataMap = ingestDocument.getMetadata(); - for (Map.Entry metadata : metadataMap.entrySet()) { + Map metadataMap = ingestDocument.getMetadata(); + for (Map.Entry metadata : metadataMap.entrySet()) { if (metadata.getValue() != null) { - builder.field(metadata.getKey().getFieldName(), metadata.getValue().toString()); + builder.field(metadata.getKey(), metadata.getValue().toString()); } } if (builder.getRestApiVersion() == RestApiVersion.V_7) { builder.field(MapperService.TYPE_FIELD_NAME, MapperService.SINGLE_MAPPING_NAME); } - Map source = IngestDocument.deepCopyMap(ingestDocument.getSourceAndMetadata()); - metadataMap.keySet().forEach(mD -> source.remove(mD.getFieldName())); - builder.field(SOURCE_FIELD, source); + builder.field(SOURCE_FIELD, ingestDocument.getSource()); builder.field(INGEST_FIELD, ingestDocument.getIngestMetadata()); builder.endObject(); return builder; diff --git a/server/src/main/java/org/elasticsearch/ingest/IngestDocument.java b/server/src/main/java/org/elasticsearch/ingest/IngestDocument.java index 3d5a9e1fc5602..e19e968eeca60 100644 --- a/server/src/main/java/org/elasticsearch/ingest/IngestDocument.java +++ b/server/src/main/java/org/elasticsearch/ingest/IngestDocument.java @@ -12,6 +12,7 @@ import org.elasticsearch.common.util.LazyMap; import org.elasticsearch.common.util.Maps; import org.elasticsearch.common.util.set.Sets; +import org.elasticsearch.core.Tuple; import org.elasticsearch.index.VersionType; import org.elasticsearch.index.mapper.IdFieldMapper; import org.elasticsearch.index.mapper.IndexFieldMapper; @@ -27,7 +28,6 @@ import java.util.Base64; import java.util.Collections; import java.util.Date; -import java.util.EnumMap; import java.util.HashMap; import java.util.LinkedHashSet; import java.util.List; @@ -49,7 +49,7 @@ public final class IngestDocument { static final String TIMESTAMP = "timestamp"; - private final Map sourceAndMetadata; + private final IngestSourceAndMetadata sourceAndMetadata; private final Map ingestMetadata; // Contains all pipelines that have been executed for this document @@ -58,42 +58,54 @@ public final class IngestDocument { private boolean doNoSelfReferencesCheck = false; public IngestDocument(String index, String id, long version, String routing, VersionType versionType, Map source) { - // source + at max 5 extra fields - this.sourceAndMetadata = Maps.newMapWithExpectedSize(source.size() + 5); - this.sourceAndMetadata.putAll(source); - this.sourceAndMetadata.put(Metadata.INDEX.getFieldName(), index); - this.sourceAndMetadata.put(Metadata.ID.getFieldName(), id); - this.sourceAndMetadata.put(Metadata.VERSION.getFieldName(), version); - if (routing != null) { - this.sourceAndMetadata.put(Metadata.ROUTING.getFieldName(), routing); - } - if (versionType != null) { - sourceAndMetadata.put(Metadata.VERSION_TYPE.getFieldName(), VersionType.toString(versionType)); - } + this.sourceAndMetadata = new IngestSourceAndMetadata( + index, + id, + version, + routing, + versionType, + ZonedDateTime.now(ZoneOffset.UTC), + source + ); this.ingestMetadata = new HashMap<>(); - this.ingestMetadata.put(TIMESTAMP, ZonedDateTime.now(ZoneOffset.UTC)); + this.ingestMetadata.put(TIMESTAMP, sourceAndMetadata.getTimestamp()); } /** * Copy constructor that creates a new {@link IngestDocument} which has exactly the same properties as the one provided as argument */ public IngestDocument(IngestDocument other) { - this(deepCopyMap(other.sourceAndMetadata), deepCopyMap(other.ingestMetadata)); + this( + new IngestSourceAndMetadata( + deepCopyMap(other.sourceAndMetadata.getSource()), + deepCopyMap(other.sourceAndMetadata.getMetadata()), + other.getIngestSourceAndMetadata().timestamp, + other.getIngestSourceAndMetadata().validators + ), + deepCopyMap(other.ingestMetadata) + ); } /** - * Constructor to create an IngestDocument from its constituent maps + * Constructor to create an IngestDocument from its constituent maps. The maps are shallow copied. */ - IngestDocument(Map sourceAndMetadata, Map ingestMetadata) { - this.sourceAndMetadata = sourceAndMetadata; + public IngestDocument(Map sourceAndMetadata, Map ingestMetadata) { + Tuple, Map> sm = IngestSourceAndMetadata.splitSourceAndMetadata(sourceAndMetadata); + this.sourceAndMetadata = new IngestSourceAndMetadata( + sm.v1(), + sm.v2(), + IngestSourceAndMetadata.getTimestamp(ingestMetadata), + IngestSourceAndMetadata.VALIDATORS + ); this.ingestMetadata = ingestMetadata; } /** - * Build an IngestDocument from values read via deserialization + * Constructor to create an IngestDocument from its constituent maps */ - public static IngestDocument of(Map sourceAndMetadata, Map ingestMetadata) { - return new IngestDocument(sourceAndMetadata, ingestMetadata); + IngestDocument(IngestSourceAndMetadata sourceAndMetadata, Map ingestMetadata) { + this.sourceAndMetadata = sourceAndMetadata; + this.ingestMetadata = ingestMetadata; } /** @@ -706,43 +718,39 @@ private Map createTemplateModel() { } /** - * one time operation that extracts the metadata fields from the ingest document and returns them. - * Metadata fields that used to be accessible as ordinary top level fields will be removed as part of this call. + * Get source and metadata map */ - public Map extractMetadata() { - Map metadataMap = new EnumMap<>(Metadata.class); - for (Metadata metadata : Metadata.values()) { - metadataMap.put(metadata, sourceAndMetadata.remove(metadata.getFieldName())); - } - return metadataMap; + public Map getSourceAndMetadata() { + return sourceAndMetadata; } /** - * Does the same thing as {@link #extractMetadata} but does not mutate the map. + * Get source and metadata map as {@link IngestSourceAndMetadata} */ - public Map getMetadata() { - Map metadataMap = new EnumMap<>(Metadata.class); - for (Metadata metadata : Metadata.values()) { - metadataMap.put(metadata, sourceAndMetadata.get(metadata.getFieldName())); - } - return metadataMap; + IngestSourceAndMetadata getIngestSourceAndMetadata() { + return sourceAndMetadata; } /** - * Returns the available ingest metadata fields, by default only timestamp, but it is possible to set additional ones. - * Use only for reading values, modify them instead using {@link #setFieldValue(String, Object)} and {@link #removeField(String)} + * Get all Metadata values in a Map */ - public Map getIngestMetadata() { - return this.ingestMetadata; + public Map getMetadata() { + return sourceAndMetadata.getMetadata(); } /** - * Returns the document including its metadata fields, unless {@link #extractMetadata()} has been called, in which case the - * metadata fields will not be present anymore. - * Modify the document instead using {@link #setFieldValue(String, Object)} and {@link #removeField(String)} + * Get all source values in a Map */ - public Map getSourceAndMetadata() { - return this.sourceAndMetadata; + public Map getSource() { + return sourceAndMetadata.getSource(); + } + + /** + * Returns the available ingest metadata fields, by default only timestamp, but it is possible to set additional ones. + * Use only for reading values, modify them instead using {@link #setFieldValue(String, Object)} and {@link #removeField(String)} + */ + public Map getIngestMetadata() { + return this.ingestMetadata; } @SuppressWarnings("unchecked") @@ -756,6 +764,7 @@ public static Object deepCopy(Object value) { for (Map.Entry entry : mapValue.entrySet()) { copy.put(entry.getKey(), deepCopy(entry.getValue())); } + // TODO(stu): should this check for IngestSourceAndMetadata in addition to Map? return copy; } else if (value instanceof List listValue) { List copy = new ArrayList<>(listValue.size()); diff --git a/server/src/main/java/org/elasticsearch/ingest/IngestService.java b/server/src/main/java/org/elasticsearch/ingest/IngestService.java index 67594d96ec710..67f0abae7b23d 100644 --- a/server/src/main/java/org/elasticsearch/ingest/IngestService.java +++ b/server/src/main/java/org/elasticsearch/ingest/IngestService.java @@ -897,27 +897,27 @@ private void innerExecute( itemDroppedHandler.accept(slot); handler.accept(null); } else { - Map metadataMap = ingestDocument.extractMetadata(); + IngestSourceAndMetadata sourceAndMetadata = ingestDocument.getIngestSourceAndMetadata(); - String newIndex = (String) metadataMap.get(IngestDocument.Metadata.INDEX); // it's fine to set all metadata fields all the time, as ingest document holds their starting values // before ingestion, which might also get modified during ingestion. - indexRequest.index(newIndex); - indexRequest.id((String) metadataMap.get(IngestDocument.Metadata.ID)); - indexRequest.routing((String) metadataMap.get(IngestDocument.Metadata.ROUTING)); - indexRequest.version(((Number) metadataMap.get(IngestDocument.Metadata.VERSION)).longValue()); - if (metadataMap.get(IngestDocument.Metadata.VERSION_TYPE) != null) { - indexRequest.versionType(VersionType.fromString((String) metadataMap.get(IngestDocument.Metadata.VERSION_TYPE))); + indexRequest.index(sourceAndMetadata.getIndex()); + indexRequest.id(sourceAndMetadata.getId()); + indexRequest.routing(sourceAndMetadata.getRouting()); + indexRequest.version(sourceAndMetadata.getVersion()); + if (sourceAndMetadata.getVersionType() != null) { + indexRequest.versionType(VersionType.fromString(sourceAndMetadata.getVersionType())); } - if (metadataMap.get(IngestDocument.Metadata.IF_SEQ_NO) != null) { - indexRequest.setIfSeqNo(((Number) metadataMap.get(IngestDocument.Metadata.IF_SEQ_NO)).longValue()); + Number number; + if ((number = sourceAndMetadata.getIfSeqNo()) != null) { + indexRequest.setIfSeqNo(number.longValue()); } - if (metadataMap.get(IngestDocument.Metadata.IF_PRIMARY_TERM) != null) { - indexRequest.setIfPrimaryTerm(((Number) metadataMap.get(IngestDocument.Metadata.IF_PRIMARY_TERM)).longValue()); + if ((number = sourceAndMetadata.getIfPrimaryTerm()) != null) { + indexRequest.setIfPrimaryTerm(number.longValue()); } try { boolean ensureNoSelfReferences = ingestDocument.doNoSelfReferencesCheck(); - indexRequest.source(ingestDocument.getSourceAndMetadata(), indexRequest.getContentType(), ensureNoSelfReferences); + indexRequest.source(sourceAndMetadata.getSource(), indexRequest.getContentType(), ensureNoSelfReferences); } catch (IllegalArgumentException ex) { // An IllegalArgumentException can be thrown when an ingest // processor creates a source map that is self-referencing. @@ -932,10 +932,9 @@ private void innerExecute( ); return; } - if (metadataMap.get(IngestDocument.Metadata.DYNAMIC_TEMPLATES) != null) { + Map map; + if ((map = sourceAndMetadata.getDynamicTemplates()) != null) { Map mergedDynamicTemplates = new HashMap<>(indexRequest.getDynamicTemplates()); - @SuppressWarnings("unchecked") - Map map = (Map) metadataMap.get(IngestDocument.Metadata.DYNAMIC_TEMPLATES); mergedDynamicTemplates.putAll(map); indexRequest.setDynamicTemplates(mergedDynamicTemplates); } diff --git a/server/src/main/java/org/elasticsearch/ingest/IngestSourceAndMetadata.java b/server/src/main/java/org/elasticsearch/ingest/IngestSourceAndMetadata.java new file mode 100644 index 0000000000000..1312e4bc45d69 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/ingest/IngestSourceAndMetadata.java @@ -0,0 +1,588 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.ingest; + +import org.elasticsearch.common.util.Maps; +import org.elasticsearch.core.Tuple; +import org.elasticsearch.index.VersionType; + +import java.time.ZonedDateTime; +import java.util.AbstractCollection; +import java.util.AbstractMap; +import java.util.AbstractSet; +import java.util.Arrays; +import java.util.Collection; +import java.util.HashMap; +import java.util.HashSet; +import java.util.Iterator; +import java.util.Map; +import java.util.Objects; +import java.util.Set; +import java.util.function.BiConsumer; +import java.util.stream.Collectors; + +/** + * Map containing ingest source and metadata. + * + * The Metadata values in {@link IngestDocument.Metadata} are validated when put in the map. + * _index, _id and _routing must be a String or null + * _version_type must be a lower case VersionType or null + * _version must be representable as a long without loss of precision or null + * _dyanmic_templates must be a map + * _if_seq_no must be a long or null + * _if_primary_term must be a long or null + * + * The map is expected to be used by processors, server code should the typed getter and setters where possible. + */ +class IngestSourceAndMetadata extends AbstractMap { + protected final ZonedDateTime timestamp; + + /** + * map of key to validating function. Should throw {@link IllegalArgumentException} on invalid value + */ + static final Map> VALIDATORS = Map.of( + IngestDocument.Metadata.INDEX.getFieldName(), + IngestSourceAndMetadata::stringValidator, + IngestDocument.Metadata.ID.getFieldName(), + IngestSourceAndMetadata::stringValidator, + IngestDocument.Metadata.ROUTING.getFieldName(), + IngestSourceAndMetadata::stringValidator, + IngestDocument.Metadata.VERSION.getFieldName(), + IngestSourceAndMetadata::longValidator, + IngestDocument.Metadata.VERSION_TYPE.getFieldName(), + IngestSourceAndMetadata::versionTypeValidator, + IngestDocument.Metadata.DYNAMIC_TEMPLATES.getFieldName(), + IngestSourceAndMetadata::mapValidator, + IngestDocument.Metadata.IF_SEQ_NO.getFieldName(), + IngestSourceAndMetadata::longValidator, + IngestDocument.Metadata.IF_PRIMARY_TERM.getFieldName(), + IngestSourceAndMetadata::longValidator, + IngestDocument.Metadata.TYPE.getFieldName(), + IngestSourceAndMetadata::stringValidator + ); + + protected final Map source; + protected final Map metadata; + protected final Map> validators; + private EntrySet entrySet; // cache to avoid recreation + + /** + * Create an IngestSourceAndMetadata with the given metadata, source and default validators + */ + IngestSourceAndMetadata( + String index, + String id, + long version, + String routing, + VersionType versionType, + ZonedDateTime timestamp, + Map source + ) { + this(new HashMap<>(source), metadataMap(index, id, version, routing, versionType), timestamp, VALIDATORS); + } + + /** + * Create IngestSourceAndMetadata with custom validators. + * + * @param source the source document map + * @param metadata the metadata map + * @param timestamp the time of ingestion + * @param validators validators to run on metadata map, if a key is in this map, the value is stored in metadata. + * if null, use the default validators from {@link #VALIDATORS} + */ + IngestSourceAndMetadata( + Map source, + Map metadata, + ZonedDateTime timestamp, + Map> validators + ) { + this.source = source != null ? source : new HashMap<>(); + this.metadata = metadata != null ? metadata : new HashMap<>(); + this.timestamp = timestamp; + this.validators = validators != null ? validators : VALIDATORS; + validateMetadata(); + } + + /** + * Create the backing metadata map with the standard contents assuming default validators. + */ + protected static Map metadataMap(String index, String id, long version, String routing, VersionType versionType) { + Map metadata = Maps.newHashMapWithExpectedSize(IngestDocument.Metadata.values().length); + metadata.put(IngestDocument.Metadata.INDEX.getFieldName(), index); + metadata.put(IngestDocument.Metadata.ID.getFieldName(), id); + metadata.put(IngestDocument.Metadata.VERSION.getFieldName(), version); + if (routing != null) { + metadata.put(IngestDocument.Metadata.ROUTING.getFieldName(), routing); + } + if (versionType != null) { + metadata.put(IngestDocument.Metadata.VERSION_TYPE.getFieldName(), VersionType.toString(versionType)); + } + return metadata; + } + + /** + * Returns a new metadata map and the existing source map with metadata removed. + */ + public static Tuple, Map> splitSourceAndMetadata(Map sourceAndMetadata) { + if (sourceAndMetadata instanceof IngestSourceAndMetadata ingestSourceAndMetadata) { + return new Tuple<>(new HashMap<>(ingestSourceAndMetadata.source), new HashMap<>(ingestSourceAndMetadata.metadata)); + } + Map metadata = Maps.newHashMapWithExpectedSize(IngestDocument.Metadata.values().length); + Map source = new HashMap<>(sourceAndMetadata); + for (String metadataName : VALIDATORS.keySet()) { + if (sourceAndMetadata.containsKey(metadataName)) { + metadata.put(metadataName, source.remove(metadataName)); + } + } + return new Tuple<>(source, metadata); + } + + /** + * Fetch the timestamp from the ingestMetadata, if it exists + * @return the timestamp for the document or null + */ + public static ZonedDateTime getTimestamp(Map ingestMetadata) { + if (ingestMetadata == null) { + return null; + } + if (ingestMetadata.get(IngestDocument.TIMESTAMP)instanceof ZonedDateTime timestamp) { + return timestamp; + } + return null; + } + + /** + * get the source map, if externally modified then the guarantees of this class are not enforced + */ + public Map getSource() { + return source; + } + + /** + * get the metadata map, if externally modified then the guarantees of this class are not enforced + */ + public Map getMetadata() { + return metadata; + } + + // These are available to scripts + public String getIndex() { + return getString(IngestDocument.Metadata.INDEX.getFieldName()); + } + + public void setIndex(String index) { + put(IngestDocument.Metadata.INDEX.getFieldName(), index); + } + + public String getId() { + return getString(IngestDocument.Metadata.ID.getFieldName()); + } + + public void setId(String id) { + put(IngestDocument.Metadata.ID.getFieldName(), id); + } + + public String getRouting() { + return getString(IngestDocument.Metadata.ROUTING.getFieldName()); + } + + public void setRouting(String routing) { + put(IngestDocument.Metadata.ROUTING.getFieldName(), routing); + } + + public String getVersionType() { + return getString(IngestDocument.Metadata.VERSION_TYPE.getFieldName()); + } + + public void setVersionType(String versionType) { + put(IngestDocument.Metadata.VERSION_TYPE.getFieldName(), versionType); + } + + public long getVersion() { + Number version = getNumber(IngestDocument.Metadata.VERSION.getFieldName()); + assert version != null : IngestDocument.Metadata.VERSION.getFieldName() + " validation allowed null version"; + return version.longValue(); + } + + public void setVersion(long version) { + put(IngestDocument.Metadata.VERSION.getFieldName(), version); + } + + // timestamp isn't backed by the map + public ZonedDateTime getTimestamp() { + return timestamp; + } + + // These are not available to scripts + public Number getIfSeqNo() { + return getNumber(IngestDocument.Metadata.IF_SEQ_NO.getFieldName()); + } + + public Number getIfPrimaryTerm() { + return getNumber(IngestDocument.Metadata.IF_PRIMARY_TERM.getFieldName()); + } + + @SuppressWarnings("unchecked") + public Map getDynamicTemplates() { + return (Map) metadata.get(IngestDocument.Metadata.DYNAMIC_TEMPLATES.getFieldName()); + } + + /** + * Check that all metadata map contains only valid metadata and no extraneous keys and source map contains no metadata + */ + protected void validateMetadata() { + int numMetadata = 0; + for (Map.Entry> entry : validators.entrySet()) { + String key = entry.getKey(); + if (metadata.containsKey(key)) { + numMetadata++; + } + entry.getValue().accept(key, metadata.get(key)); + if (source.containsKey(key)) { + throw new IllegalArgumentException("Unexpected metadata key [" + key + "] in source with value [" + source.get(key) + "]"); + } + } + if (numMetadata < metadata.size()) { + Set keys = new HashSet<>(metadata.keySet()); + keys.removeAll(validators.keySet()); + throw new IllegalArgumentException( + "Unexpected metadata keys [" + + keys.stream().sorted().map(k -> k + ":" + metadata.get(k)).collect(Collectors.joining(", ")) + + "]" + ); + } + } + + /** + * Returns an entrySet that respects the validators of the map. + */ + @Override + public Set> entrySet() { + if (entrySet == null) { + entrySet = new EntrySet(source.entrySet(), metadata.entrySet()); + } + return entrySet; + } + + /** + * Associate a key with a value. If the key has a validator, it is applied before association. + * @throws IllegalArgumentException if value does not pass validation for the given key + */ + @Override + public Object put(String key, Object value) { + BiConsumer validator = validators.get(key); + if (validator != null) { + validator.accept(key, value); + return metadata.put(key, value); + } + return source.put(key, value); + } + + /** + * Remove the mapping of key. If the key has a validator, it is checked before key removal. + * @throws IllegalArgumentException if the validator does not allow the key to be removed + */ + @Override + public Object remove(Object key) { + // uses map directly to avoid AbstractMaps linear time implementation using entrySet() + if (key instanceof String strKey) { + BiConsumer validator = validators.get(key); + if (validator != null) { + validator.accept(strKey, null); + return metadata.remove(key); + } + } + return source.remove(key); + } + + /** + * Clear entire map. For each key in the map with a validator, that validator is checked as in {@link #remove(Object)}. + * @throws IllegalArgumentException if any validator does not allow the key to be removed, in this case the map is unmodified + */ + @Override + public void clear() { + // AbstractMap uses entrySet().clear(), it should be quicker to run through the validators, then call the wrapped maps clear + validators.forEach((k, v) -> { + if (metadata.containsKey(k)) { + v.accept(k, null); + } + }); + metadata.clear(); + source.clear(); + } + + @Override + public int size() { + // uses map directly to avoid creating an EntrySet via AbstractMaps implementation, which returns entrySet().size() + return source.size() + metadata.size(); + } + + @Override + public boolean containsValue(Object value) { + // uses map directly to avoid AbstractMaps linear time implementation using entrySet() + return metadata.containsValue(value) || source.containsValue(value); + } + + @Override + public boolean containsKey(Object key) { + // uses map directly to avoid AbstractMaps linear time implementation using entrySet() + return metadata.containsKey(key) || source.containsKey(key); + } + + @Override + public Object get(Object key) { + // uses map directly to avoid AbstractMaps linear time implementation using entrySet() + if (validators.get(key) != null) { + return metadata.get(key); + } + return source.get(key); + } + + /** + * Get the String version of the value associated with {@code key}, or null + */ + public String getString(Object key) { + return Objects.toString(get(key), null); + } + + /** + * Get the {@link Number} associated with key, or null + * @throws IllegalArgumentException if the value is not a {@link Number} + */ + public Number getNumber(Object key) { + Object value = get(key); + if (value == null) { + return null; + } + if (value instanceof Number number) { + return number; + } + throw new IllegalArgumentException( + "unexpected type for [" + key + "] with value [" + value + "], expected Number, got [" + value.getClass().getName() + "]" + ); + } + + /** + * Set of entries of the wrapped map that calls the appropriate validator before changing an entries value or removing an entry. + * + * Inherits {@link AbstractSet#removeAll(Collection)}, which calls the overridden {@link #remove(Object)} which performs validation. + * + * Inherits {@link AbstractCollection#retainAll(Collection)} and {@link AbstractCollection#clear()}, which both use + * {@link EntrySetIterator#remove()} for removal. + */ + class EntrySet extends AbstractSet> { + Set> sourceSet; + Set> metadataSet; + + EntrySet(Set> sourceSet, Set> metadataSet) { + this.sourceSet = sourceSet; + this.metadataSet = metadataSet; + } + + @Override + public Iterator> iterator() { + return new EntrySetIterator(sourceSet.iterator(), metadataSet.iterator()); + } + + @Override + public int size() { + return sourceSet.size() + metadataSet.size(); + } + + @Override + public boolean remove(Object o) { + if (metadataSet.contains(o)) { + if (o instanceof Map.Entry entry) { + if (entry.getKey()instanceof String key) { + BiConsumer validator = validators.get(key); + if (validator != null) { + validator.accept(key, null); + return metadataSet.remove(o); + } + } + } + } + return sourceSet.remove(o); + } + } + + /** + * Iterator over the wrapped map that returns a validating {@link Entry} on {@link #next()} and validates on {@link #remove()}. + * + * {@link #remove()} is called by remove in {@link AbstractMap#values()}, {@link AbstractMap#keySet()}, {@link AbstractMap#clear()} via + * {@link AbstractSet#clear()} + */ + class EntrySetIterator implements Iterator> { + final Iterator> sourceIter; + final Iterator> metadataIter; + + boolean sourceCur = true; + Entry cur; + + EntrySetIterator(Iterator> sourceIter, Iterator> metadataIter) { + this.sourceIter = sourceIter; + this.metadataIter = metadataIter; + } + + @Override + public boolean hasNext() { + return sourceIter.hasNext() || metadataIter.hasNext(); + } + + @Override + public Map.Entry next() { + sourceCur = sourceIter.hasNext(); + return cur = new Entry(sourceCur ? sourceIter.next() : metadataIter.next(), sourceCur); + } + + /** + * Remove current entry from the backing Map. Checks the Entry's key's validator, if one exists, before removal. + * @throws IllegalArgumentException if the validator does not allow the Entry to be removed + * @throws IllegalStateException if remove is called before {@link #next()} + */ + @Override + public void remove() { + if (cur == null) { + throw new IllegalStateException(); + } + if (sourceCur) { + sourceIter.remove(); + } else { + BiConsumer validator = validators.get(cur.getKey()); + if (validator != null) { + validator.accept(cur.getKey(), null); + } + metadataIter.remove(); + } + } + } + + /** + * Wrapped Map.Entry that calls the key's validator on {@link #setValue(Object)} + */ + class Entry implements Map.Entry { + final Map.Entry entry; + final boolean isSource; + + Entry(Map.Entry entry, boolean isSource) { + this.entry = entry; + this.isSource = isSource; + } + + @Override + public String getKey() { + return entry.getKey(); + } + + @Override + public Object getValue() { + return entry.getValue(); + } + + /** + * Associate the value with the Entry's key in the linked Map. If the Entry's key has a validator, it is applied before association + * @throws IllegalArgumentException if value does not pass validation for the Entry's key + */ + @Override + public Object setValue(Object value) { + if (isSource == false) { + BiConsumer validator = validators.get(entry.getKey()); + if (validator != null) { + validator.accept(entry.getKey(), value); + } + } + return entry.setValue(value); + } + } + + /** + * Allow a String or null + */ + protected static void stringValidator(String key, Object value) { + if (value == null || value instanceof String) { + return; + } + throw new IllegalArgumentException( + key + " must be null or a String but was [" + value + "] with type [" + value.getClass().getName() + "]" + ); + } + + /** + * Allow Numbers that can be represented as longs without loss of precision + */ + protected static void longValidator(String key, Object value) { + if (value == null) { + return; // Allow null version for now + } + if (value instanceof Number number) { + long version = number.longValue(); + // did we round? + if (number.doubleValue() == version) { + return; + } + } + throw new IllegalArgumentException( + key + " may only be set to an int or a long but was [" + value + "] with type [" + value.getClass().getName() + "]" + ); + } + + /** + * Allow lower case Strings that map to VersionType values, or null + */ + protected static void versionTypeValidator(String key, Object value) { + if (value == null) { + return; + } + if (value instanceof String versionType) { + try { + VersionType.fromString(versionType); + return; + } catch (IllegalArgumentException ignored) {} + } + throw new IllegalArgumentException( + key + + " must be a null or one of [" + + Arrays.stream(VersionType.values()).map(vt -> VersionType.toString(vt)).collect(Collectors.joining(", ")) + + "] but was [" + + value + + "] with type [" + + value.getClass().getName() + + "]" + ); + } + + /** + * Allow maps + */ + protected static void mapValidator(String key, Object value) { + if (value == null || value instanceof Map) { + return; + } + throw new IllegalArgumentException( + key + " must be a null or a Map but was [" + value + "] with type [" + value.getClass().getName() + "]" + ); + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if ((o instanceof IngestSourceAndMetadata) == false) return false; + if (super.equals(o) == false) return false; + IngestSourceAndMetadata that = (IngestSourceAndMetadata) o; + return Objects.equals(timestamp, that.timestamp) + && source.equals(that.source) + && metadata.equals(that.metadata) + && validators.equals(that.validators); + } + + @Override + public int hashCode() { + return Objects.hash(timestamp, source, metadata, validators); + } +} diff --git a/server/src/test/java/org/elasticsearch/action/ingest/SimulateExecutionServiceTests.java b/server/src/test/java/org/elasticsearch/action/ingest/SimulateExecutionServiceTests.java index 0a40080e56c8b..f609114832a70 100644 --- a/server/src/test/java/org/elasticsearch/action/ingest/SimulateExecutionServiceTests.java +++ b/server/src/test/java/org/elasticsearch/action/ingest/SimulateExecutionServiceTests.java @@ -377,7 +377,10 @@ public boolean isAsync() { for (int id = 0; id < numDocs; id++) { SimulateDocumentBaseResult result = (SimulateDocumentBaseResult) response.getResults().get(id); - assertThat(result.getIngestDocument().getMetadata().get(IngestDocument.Metadata.ID), equalTo(Integer.toString(id))); + assertThat( + result.getIngestDocument().getMetadata().get(IngestDocument.Metadata.ID.getFieldName()), + equalTo(Integer.toString(id)) + ); assertThat(result.getIngestDocument().getSourceAndMetadata().get("processed"), is(true)); } } diff --git a/server/src/test/java/org/elasticsearch/action/ingest/SimulatePipelineRequestParsingTests.java b/server/src/test/java/org/elasticsearch/action/ingest/SimulatePipelineRequestParsingTests.java index 5865aadb8922a..3fbb867346fbc 100644 --- a/server/src/test/java/org/elasticsearch/action/ingest/SimulatePipelineRequestParsingTests.java +++ b/server/src/test/java/org/elasticsearch/action/ingest/SimulatePipelineRequestParsingTests.java @@ -31,6 +31,7 @@ import static org.elasticsearch.action.ingest.SimulatePipelineRequest.Fields; import static org.elasticsearch.action.ingest.SimulatePipelineRequest.SIMULATED_PIPELINE_ID; +import static org.elasticsearch.ingest.IngestDocument.Metadata; import static org.elasticsearch.ingest.IngestDocument.Metadata.ID; import static org.elasticsearch.ingest.IngestDocument.Metadata.IF_PRIMARY_TERM; import static org.elasticsearch.ingest.IngestDocument.Metadata.IF_SEQ_NO; @@ -99,10 +100,10 @@ public void testParseUsingPipelineStore() throws Exception { Iterator> expectedDocsIterator = expectedDocs.iterator(); for (IngestDocument ingestDocument : actualRequest.documents()) { Map expectedDocument = expectedDocsIterator.next(); - Map metadataMap = ingestDocument.extractMetadata(); - assertThat(metadataMap.get(INDEX), equalTo(expectedDocument.get(INDEX.getFieldName()))); - assertThat(metadataMap.get(ID), equalTo(expectedDocument.get(ID.getFieldName()))); - assertThat(ingestDocument.getSourceAndMetadata(), equalTo(expectedDocument.get(Fields.SOURCE))); + Map metadataMap = ingestDocument.getMetadata(); + assertThat(metadataMap.get(INDEX.getFieldName()), equalTo(expectedDocument.get(INDEX.getFieldName()))); + assertThat(metadataMap.get(ID.getFieldName()), equalTo(expectedDocument.get(ID.getFieldName()))); + assertThat(ingestDocument.getSource(), equalTo(expectedDocument.get(Fields.SOURCE))); } assertThat(actualRequest.pipeline().getId(), equalTo(SIMULATED_PIPELINE_ID)); @@ -120,8 +121,8 @@ public void testParseWithProvidedPipeline() throws Exception { for (int i = 0; i < numDocs; i++) { Map doc = new HashMap<>(); Map expectedDoc = new HashMap<>(); - List fields = Arrays.asList(INDEX, ID, ROUTING, VERSION, VERSION_TYPE, IF_SEQ_NO, IF_PRIMARY_TERM); - for (IngestDocument.Metadata field : fields) { + List fields = Arrays.asList(INDEX, ID, ROUTING, VERSION, VERSION_TYPE, IF_SEQ_NO, IF_PRIMARY_TERM); + for (Metadata field : fields) { if (field == VERSION) { Object value = randomBoolean() ? randomLong() : randomInt(); doc.put(field.getFieldName(), randomBoolean() ? value : value.toString()); @@ -195,15 +196,15 @@ public void testParseWithProvidedPipeline() throws Exception { Iterator> expectedDocsIterator = expectedDocs.iterator(); for (IngestDocument ingestDocument : actualRequest.documents()) { Map expectedDocument = expectedDocsIterator.next(); - Map metadataMap = ingestDocument.extractMetadata(); - assertThat(metadataMap.get(INDEX), equalTo(expectedDocument.get(INDEX.getFieldName()))); - assertThat(metadataMap.get(ID), equalTo(expectedDocument.get(ID.getFieldName()))); - assertThat(metadataMap.get(ROUTING), equalTo(expectedDocument.get(ROUTING.getFieldName()))); - assertThat(metadataMap.get(VERSION), equalTo(expectedDocument.get(VERSION.getFieldName()))); - assertThat(metadataMap.get(VERSION_TYPE), equalTo(expectedDocument.get(VERSION_TYPE.getFieldName()))); - assertThat(metadataMap.get(IF_SEQ_NO), equalTo(expectedDocument.get(IF_SEQ_NO.getFieldName()))); - assertThat(metadataMap.get(IF_PRIMARY_TERM), equalTo(expectedDocument.get(IF_PRIMARY_TERM.getFieldName()))); - assertThat(ingestDocument.getSourceAndMetadata(), equalTo(expectedDocument.get(Fields.SOURCE))); + Map metadataMap = ingestDocument.getMetadata(); + assertThat(metadataMap.get(INDEX.getFieldName()), equalTo(expectedDocument.get(INDEX.getFieldName()))); + assertThat(metadataMap.get(ID.getFieldName()), equalTo(expectedDocument.get(ID.getFieldName()))); + assertThat(metadataMap.get(ROUTING.getFieldName()), equalTo(expectedDocument.get(ROUTING.getFieldName()))); + assertThat(metadataMap.get(VERSION.getFieldName()), equalTo(expectedDocument.get(VERSION.getFieldName()))); + assertThat(metadataMap.get(VERSION_TYPE.getFieldName()), equalTo(expectedDocument.get(VERSION_TYPE.getFieldName()))); + assertThat(metadataMap.get(IF_SEQ_NO.getFieldName()), equalTo(expectedDocument.get(IF_SEQ_NO.getFieldName()))); + assertThat(metadataMap.get(IF_PRIMARY_TERM.getFieldName()), equalTo(expectedDocument.get(IF_PRIMARY_TERM.getFieldName()))); + assertThat(ingestDocument.getSource(), equalTo(expectedDocument.get(Fields.SOURCE))); } assertThat(actualRequest.pipeline().getId(), equalTo(SIMULATED_PIPELINE_ID)); @@ -279,8 +280,8 @@ public void testIngestPipelineWithDocumentsWithType() throws Exception { for (int i = 0; i < numDocs; i++) { Map doc = new HashMap<>(); Map expectedDoc = new HashMap<>(); - List fields = Arrays.asList(INDEX, TYPE, ID, ROUTING, VERSION, VERSION_TYPE); - for (IngestDocument.Metadata field : fields) { + List fields = Arrays.asList(INDEX, TYPE, ID, ROUTING, VERSION, VERSION_TYPE); + for (Metadata field : fields) { if (field == VERSION) { Long value = randomLong(); doc.put(field.getFieldName(), value); @@ -348,13 +349,13 @@ public void testIngestPipelineWithDocumentsWithType() throws Exception { Iterator> expectedDocsIterator = expectedDocs.iterator(); for (IngestDocument ingestDocument : actualRequest.documents()) { Map expectedDocument = expectedDocsIterator.next(); - Map metadataMap = ingestDocument.extractMetadata(); - assertThat(metadataMap.get(INDEX), equalTo(expectedDocument.get(INDEX.getFieldName()))); - assertThat(metadataMap.get(ID), equalTo(expectedDocument.get(ID.getFieldName()))); - assertThat(metadataMap.get(ROUTING), equalTo(expectedDocument.get(ROUTING.getFieldName()))); - assertThat(metadataMap.get(VERSION), equalTo(expectedDocument.get(VERSION.getFieldName()))); - assertThat(metadataMap.get(VERSION_TYPE), equalTo(expectedDocument.get(VERSION_TYPE.getFieldName()))); - assertThat(ingestDocument.getSourceAndMetadata(), equalTo(expectedDocument.get(Fields.SOURCE))); + Map metadataMap = ingestDocument.getMetadata(); + assertThat(metadataMap.get(INDEX.getFieldName()), equalTo(expectedDocument.get(INDEX.getFieldName()))); + assertThat(metadataMap.get(ID.getFieldName()), equalTo(expectedDocument.get(ID.getFieldName()))); + assertThat(metadataMap.get(ROUTING.getFieldName()), equalTo(expectedDocument.get(ROUTING.getFieldName()))); + assertThat(metadataMap.get(VERSION.getFieldName()), equalTo(expectedDocument.get(VERSION.getFieldName()))); + assertThat(metadataMap.get(VERSION_TYPE.getFieldName()), equalTo(expectedDocument.get(VERSION_TYPE.getFieldName()))); + assertThat(ingestDocument.getSource(), equalTo(expectedDocument.get(Fields.SOURCE))); } assertThat(actualRequest.pipeline().getId(), equalTo(SIMULATED_PIPELINE_ID)); assertThat(actualRequest.pipeline().getDescription(), nullValue()); diff --git a/server/src/test/java/org/elasticsearch/action/ingest/WriteableIngestDocumentTests.java b/server/src/test/java/org/elasticsearch/action/ingest/WriteableIngestDocumentTests.java index 3596663b4fc69..f3e655fb58583 100644 --- a/server/src/test/java/org/elasticsearch/action/ingest/WriteableIngestDocumentTests.java +++ b/server/src/test/java/org/elasticsearch/action/ingest/WriteableIngestDocumentTests.java @@ -12,6 +12,7 @@ import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.xcontent.XContentHelper; +import org.elasticsearch.core.Tuple; import org.elasticsearch.ingest.IngestDocument; import org.elasticsearch.ingest.RandomDocumentPicks; import org.elasticsearch.ingest.TestIngestDocument; @@ -44,16 +45,15 @@ public void testEqualsAndHashcode() throws Exception { Map sourceAndMetadata = RandomDocumentPicks.randomSource(random()); int numFields = randomIntBetween(1, IngestDocument.Metadata.values().length); for (int i = 0; i < numFields; i++) { - sourceAndMetadata.put(randomFrom(IngestDocument.Metadata.values()).getFieldName(), randomAlphaOfLengthBetween(5, 10)); + Tuple metadata = TestIngestDocument.randomMetadata(); + sourceAndMetadata.put(metadata.v1(), metadata.v2()); } Map ingestMetadata = new HashMap<>(); numFields = randomIntBetween(1, 5); for (int i = 0; i < numFields; i++) { ingestMetadata.put(randomAlphaOfLengthBetween(5, 10), randomAlphaOfLengthBetween(5, 10)); } - WriteableIngestDocument ingestDocument = new WriteableIngestDocument( - TestIngestDocument.ofSourceAndIngest(sourceAndMetadata, ingestMetadata) - ); + WriteableIngestDocument ingestDocument = new WriteableIngestDocument(new IngestDocument(sourceAndMetadata, ingestMetadata)); boolean changed = false; Map otherSourceAndMetadata; @@ -66,7 +66,8 @@ public void testEqualsAndHashcode() throws Exception { if (randomBoolean()) { numFields = randomIntBetween(1, IngestDocument.Metadata.values().length); for (int i = 0; i < numFields; i++) { - otherSourceAndMetadata.put(randomFrom(IngestDocument.Metadata.values()).getFieldName(), randomAlphaOfLengthBetween(5, 10)); + Tuple metadata = TestIngestDocument.randomMetadata(); + otherSourceAndMetadata.put(metadata.v1(), metadata.v2()); } changed = true; } @@ -84,7 +85,7 @@ public void testEqualsAndHashcode() throws Exception { } WriteableIngestDocument otherIngestDocument = new WriteableIngestDocument( - TestIngestDocument.ofSourceAndIngest(otherSourceAndMetadata, otherIngestMetadata) + new IngestDocument(otherSourceAndMetadata, otherIngestMetadata) ); if (changed) { assertThat(ingestDocument, not(equalTo(otherIngestDocument))); @@ -94,10 +95,7 @@ public void testEqualsAndHashcode() throws Exception { assertThat(otherIngestDocument, equalTo(ingestDocument)); assertThat(ingestDocument.hashCode(), equalTo(otherIngestDocument.hashCode())); WriteableIngestDocument thirdIngestDocument = new WriteableIngestDocument( - TestIngestDocument.ofSourceAndIngest( - Collections.unmodifiableMap(sourceAndMetadata), - Collections.unmodifiableMap(ingestMetadata) - ) + new IngestDocument(Collections.unmodifiableMap(sourceAndMetadata), Collections.unmodifiableMap(ingestMetadata)) ); assertThat(thirdIngestDocument, equalTo(ingestDocument)); assertThat(ingestDocument, equalTo(thirdIngestDocument)); @@ -109,7 +107,8 @@ public void testSerialization() throws IOException { Map sourceAndMetadata = RandomDocumentPicks.randomSource(random()); int numFields = randomIntBetween(1, IngestDocument.Metadata.values().length); for (int i = 0; i < numFields; i++) { - sourceAndMetadata.put(randomFrom(IngestDocument.Metadata.values()).getFieldName(), randomAlphaOfLengthBetween(5, 10)); + Tuple metadata = TestIngestDocument.randomMetadata(); + sourceAndMetadata.put(metadata.v1(), metadata.v2()); } Map ingestMetadata = new HashMap<>(); numFields = randomIntBetween(1, 5); @@ -117,7 +116,7 @@ public void testSerialization() throws IOException { ingestMetadata.put(randomAlphaOfLengthBetween(5, 10), randomAlphaOfLengthBetween(5, 10)); } WriteableIngestDocument writeableIngestDocument = new WriteableIngestDocument( - TestIngestDocument.ofSourceAndIngest(sourceAndMetadata, ingestMetadata) + new IngestDocument(sourceAndMetadata, ingestMetadata) ); BytesStreamOutput out = new BytesStreamOutput(); @@ -143,9 +142,9 @@ public void testToXContent() throws IOException { Map toXContentSource = (Map) toXContentDoc.get("_source"); Map toXContentIngestMetadata = (Map) toXContentDoc.get("_ingest"); - Map metadataMap = ingestDocument.extractMetadata(); - for (Map.Entry metadata : metadataMap.entrySet()) { - String fieldName = metadata.getKey().getFieldName(); + Map metadataMap = ingestDocument.getMetadata(); + for (Map.Entry metadata : metadataMap.entrySet()) { + String fieldName = metadata.getKey(); if (metadata.getValue() == null) { assertThat(toXContentDoc.containsKey(fieldName), is(false)); } else { @@ -153,8 +152,7 @@ public void testToXContent() throws IOException { } } - // this is testing xcontent parsing so use the wire constructor - IngestDocument serializedIngestDocument = IngestDocument.of(toXContentSource, toXContentIngestMetadata); + IngestDocument serializedIngestDocument = new IngestDocument(toXContentSource, toXContentIngestMetadata); assertThat(serializedIngestDocument, equalTo(serializedIngestDocument)); } diff --git a/server/src/test/java/org/elasticsearch/ingest/IngestDocumentTests.java b/server/src/test/java/org/elasticsearch/ingest/IngestDocumentTests.java index b7af95d8e9844..6db0217947ad9 100644 --- a/server/src/test/java/org/elasticsearch/ingest/IngestDocumentTests.java +++ b/server/src/test/java/org/elasticsearch/ingest/IngestDocumentTests.java @@ -8,6 +8,7 @@ package org.elasticsearch.ingest; +import org.elasticsearch.core.Tuple; import org.elasticsearch.test.ESTestCase; import org.junit.Before; @@ -962,7 +963,8 @@ public void testEqualsAndHashcode() throws Exception { Map sourceAndMetadata = RandomDocumentPicks.randomSource(random()); int numFields = randomIntBetween(1, IngestDocument.Metadata.values().length); for (int i = 0; i < numFields; i++) { - sourceAndMetadata.put(randomFrom(IngestDocument.Metadata.values()).getFieldName(), randomAlphaOfLengthBetween(5, 10)); + Tuple metadata = TestIngestDocument.randomMetadata(); + sourceAndMetadata.put(metadata.v1(), metadata.v2()); } Map ingestMetadata = new HashMap<>(); numFields = randomIntBetween(1, 5); @@ -970,7 +972,7 @@ public void testEqualsAndHashcode() throws Exception { ingestMetadata.put(randomAlphaOfLengthBetween(5, 10), randomAlphaOfLengthBetween(5, 10)); } // this is testing equality so use the wire constructor - IngestDocument ingestDocument = IngestDocument.of(sourceAndMetadata, ingestMetadata); + IngestDocument ingestDocument = new IngestDocument(sourceAndMetadata, ingestMetadata); boolean changed = false; Map otherSourceAndMetadata; @@ -983,7 +985,8 @@ public void testEqualsAndHashcode() throws Exception { if (randomBoolean()) { numFields = randomIntBetween(1, IngestDocument.Metadata.values().length); for (int i = 0; i < numFields; i++) { - otherSourceAndMetadata.put(randomFrom(IngestDocument.Metadata.values()).getFieldName(), randomAlphaOfLengthBetween(5, 10)); + Tuple metadata = TestIngestDocument.randomMetadata(); + otherSourceAndMetadata.put(metadata.v1(), metadata.v2()); } changed = true; } @@ -1000,7 +1003,7 @@ public void testEqualsAndHashcode() throws Exception { otherIngestMetadata = Collections.unmodifiableMap(ingestMetadata); } - IngestDocument otherIngestDocument = IngestDocument.of(otherSourceAndMetadata, otherIngestMetadata); + IngestDocument otherIngestDocument = new IngestDocument(otherSourceAndMetadata, otherIngestMetadata); if (changed) { assertThat(ingestDocument, not(equalTo(otherIngestDocument))); assertThat(otherIngestDocument, not(equalTo(ingestDocument))); @@ -1008,7 +1011,7 @@ public void testEqualsAndHashcode() throws Exception { assertThat(ingestDocument, equalTo(otherIngestDocument)); assertThat(otherIngestDocument, equalTo(ingestDocument)); assertThat(ingestDocument.hashCode(), equalTo(otherIngestDocument.hashCode())); - IngestDocument thirdIngestDocument = IngestDocument.of( + IngestDocument thirdIngestDocument = new IngestDocument( Collections.unmodifiableMap(sourceAndMetadata), Collections.unmodifiableMap(ingestMetadata) ); @@ -1070,8 +1073,7 @@ public void testSetInvalidSourceField() throws Exception { } public void testDeepCopy() { - // this is testing copy so use the wire constructor - IngestDocument copiedDoc = IngestDocument.of( + IngestDocument copiedDoc = new IngestDocument( IngestDocument.deepCopyMap(ingestDocument.getSourceAndMetadata()), IngestDocument.deepCopyMap(ingestDocument.getIngestMetadata()) ); diff --git a/server/src/test/java/org/elasticsearch/ingest/IngestServiceTests.java b/server/src/test/java/org/elasticsearch/ingest/IngestServiceTests.java index 667d38bd13812..c3559aceafa06 100644 --- a/server/src/test/java/org/elasticsearch/ingest/IngestServiceTests.java +++ b/server/src/test/java/org/elasticsearch/ingest/IngestServiceTests.java @@ -2229,8 +2229,9 @@ private class IngestDocumentMatcher implements ArgumentMatcher { @Override public boolean matches(IngestDocument other) { - // ingest metadata will not be the same (timestamp differs every time) - return Objects.equals(ingestDocument.getSourceAndMetadata(), other.getSourceAndMetadata()); + // ingest metadata and IngestSourceAndMetadata will not be the same (timestamp differs every time) + return Objects.equals(ingestDocument.getSource(), other.getSource()) + && Objects.equals(ingestDocument.getMetadata(), other.getMetadata()); } } diff --git a/server/src/test/java/org/elasticsearch/ingest/IngestSourceAndMetadataTests.java b/server/src/test/java/org/elasticsearch/ingest/IngestSourceAndMetadataTests.java new file mode 100644 index 0000000000000..5d2c45b6e80c3 --- /dev/null +++ b/server/src/test/java/org/elasticsearch/ingest/IngestSourceAndMetadataTests.java @@ -0,0 +1,346 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.ingest; + +import org.elasticsearch.index.VersionType; +import org.elasticsearch.test.ESTestCase; + +import java.util.HashMap; +import java.util.Iterator; +import java.util.Map; +import java.util.Set; + +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.hasEntry; +import static org.hamcrest.Matchers.notNullValue; + +public class IngestSourceAndMetadataTests extends ESTestCase { + + IngestSourceAndMetadata map; + + public void testSettersAndGetters() { + Map metadata = new HashMap<>(); + metadata.put("_index", "myIndex"); + metadata.put("_id", "myId"); + metadata.put("_routing", "myRouting"); + metadata.put("_version", 20); + metadata.put("_if_seq_no", 500); + metadata.put("_if_primary_term", 10000); + metadata.put("_version_type", "internal"); + metadata.put("_dynamic_templates", Map.of("foo", "bar")); + map = new IngestSourceAndMetadata(new HashMap<>(), metadata, null, null); + assertEquals("myIndex", map.getIndex()); + map.setIndex("myIndex2"); + assertEquals("myIndex2", map.getIndex()); + + assertEquals("myId", map.getId()); + map.setId("myId2"); + assertEquals("myId2", map.getId()); + + assertEquals("myRouting", map.getRouting()); + map.setRouting("myRouting2"); + assertEquals("myRouting2", map.getRouting()); + + assertEquals(20, map.getVersion()); + map.setVersion(10); + assertEquals(10, map.getVersion()); + + assertEquals("internal", map.getVersionType()); + map.setVersionType("external_gte"); + assertEquals("external_gte", map.getVersionType()); + + assertEquals(Map.of("foo", "bar"), map.getDynamicTemplates()); + + assertEquals(500, map.getIfSeqNo()); + assertEquals(10000, map.getIfPrimaryTerm()); + } + + public void testGetString() { + Map metadata = new HashMap<>(); + metadata.put("_routing", "myRouting"); + Map source = new HashMap<>(); + source.put("str", "myStr"); + source.put("toStr", new Object() { + @Override + public String toString() { + return "myToString()"; + } + }); + source.put("missing", null); + map = new IngestSourceAndMetadata(source, metadata, null, null); + assertNull(map.getString("missing")); + assertNull(map.getString("no key")); + assertEquals("myToString()", map.getString("toStr")); + assertEquals("myStr", map.getString("str")); + assertEquals("myRouting", map.getString("_routing")); + } + + public void testGetNumber() { + Map metadata = new HashMap<>(); + metadata.put("_version", Long.MAX_VALUE); + Map source = new HashMap<>(); + source.put("number", "NaN"); + source.put("missing", null); + map = new IngestSourceAndMetadata(source, metadata, null, null); + assertEquals(Long.MAX_VALUE, map.getNumber("_version")); + IllegalArgumentException err = expectThrows(IllegalArgumentException.class, () -> map.getNumber("number")); + assertEquals("unexpected type for [number] with value [NaN], expected Number, got [java.lang.String]", err.getMessage()); + assertNull(map.getNumber("missing")); + assertNull(map.getNumber("no key")); + } + + public void testInvalidMetadata() { + Map metadata = new HashMap<>(); + metadata.put("_version", Double.MAX_VALUE); + IllegalArgumentException err = expectThrows( + IllegalArgumentException.class, + () -> new IngestSourceAndMetadata(new HashMap<>(), metadata, null, null) + ); + assertThat(err.getMessage(), containsString("_version may only be set to an int or a long but was [")); + assertThat(err.getMessage(), containsString("] with type [java.lang.Double]")); + } + + public void testSourceInMetadata() { + Map source = new HashMap<>(); + source.put("_version", 25); + IllegalArgumentException err = expectThrows( + IllegalArgumentException.class, + () -> new IngestSourceAndMetadata(source, source, null, null) + ); + assertEquals("Unexpected metadata key [_version] in source with value [25]", err.getMessage()); + } + + public void testExtraMetadata() { + Map metadata = new HashMap<>(); + metadata.put("_version", 123); + metadata.put("version", 567); + metadata.put("routing", "myRouting"); + IllegalArgumentException err = expectThrows( + IllegalArgumentException.class, + () -> new IngestSourceAndMetadata(new HashMap<>(), metadata, null, null) + ); + assertEquals("Unexpected metadata keys [routing:myRouting, version:567]", err.getMessage()); + } + + public void testPutSource() { + Map metadata = new HashMap<>(); + metadata.put("_version", 123); + Map source = new HashMap<>(); + map = new IngestSourceAndMetadata(source, metadata, null, null); + } + + public void testRemove() { + String cannotRemove = "cannotRemove"; + String canRemove = "canRemove"; + Map metadata = new HashMap<>(); + metadata.put(cannotRemove, "value"); + map = new IngestSourceAndMetadata(new HashMap<>(), metadata, null, Map.of(cannotRemove, (k, v) -> { + if (v == null) { + throw new IllegalArgumentException(k + " cannot be null or removed"); + } + }, canRemove, (k, v) -> {})); + String msg = "cannotRemove cannot be null or removed"; + IllegalArgumentException err = expectThrows(IllegalArgumentException.class, () -> map.remove(cannotRemove)); + assertEquals(msg, err.getMessage()); + + err = expectThrows(IllegalArgumentException.class, () -> map.put(cannotRemove, null)); + assertEquals(msg, err.getMessage()); + + err = expectThrows(IllegalArgumentException.class, () -> map.entrySet().iterator().next().setValue(null)); + assertEquals(msg, err.getMessage()); + + err = expectThrows(IllegalArgumentException.class, () -> { + Iterator> it = map.entrySet().iterator(); + while (it.hasNext()) { + it.next(); + it.remove(); + } + }); + assertEquals(msg, err.getMessage()); + + err = expectThrows(IllegalArgumentException.class, () -> { + Set> set = map.entrySet(); + set.remove(map.entrySet().iterator().next()); + }); + assertEquals(msg, err.getMessage()); + + map.put(canRemove, "value"); + assertEquals("value", map.get(canRemove)); + + err = expectThrows(IllegalArgumentException.class, () -> map.clear()); + assertEquals(msg, err.getMessage()); + + assertEquals(2, map.size()); + + map.entrySet().remove(new TestEntry(canRemove, "value")); + assertNull(map.get(canRemove)); + + map.put(canRemove, "value"); + assertEquals("value", map.get(canRemove)); + map.remove(canRemove); + assertNull(map.get(canRemove)); + + map.put("sourceKey", "sourceValue"); + assertEquals("sourceValue", map.get("sourceKey")); + map.entrySet().remove(new TestEntry("sourceKey", "sourceValue")); + assertNull(map.get("sourceKey")); + + map.put("sourceKey", "sourceValue"); + assertEquals("sourceValue", map.get("sourceKey")); + map.remove("sourceKey"); + assertNull(map.get("sourceKey")); + } + + public void testEntryAndIterator() { + Map metadata = new HashMap<>(); + metadata.put("_version", 123); + metadata.put("_version_type", "external"); + Map source = new HashMap<>(); + source.put("foo", "bar"); + source.put("baz", "qux"); + source.put("noz", "zon"); + map = new IngestSourceAndMetadata(source, metadata, null, null); + + for (Map.Entry entry : map.entrySet()) { + if ("foo".equals(entry.getKey())) { + entry.setValue("changed"); + } else if ("_version_type".equals(entry.getKey())) { + entry.setValue("external_gte"); + } + } + assertEquals("changed", map.get("foo")); + assertEquals("external_gte", map.getVersionType()); + + assertEquals(5, map.entrySet().size()); + assertEquals(5, map.size()); + + Iterator> it = map.entrySet().iterator(); + expectThrows(IllegalStateException.class, it::remove); + + while (it.hasNext()) { + Map.Entry entry = it.next(); + if ("baz".equals(entry.getKey())) { + it.remove(); + } else if ("_version_type".equals(entry.getKey())) { + it.remove(); + } + } + + assertNull(map.getVersionType()); + assertFalse(map.containsKey("baz")); + assertTrue(map.containsKey("_version")); + assertTrue(map.containsKey("foo")); + assertTrue(map.containsKey("noz")); + assertEquals(3, map.entrySet().size()); + assertEquals(3, map.size()); + map.clear(); + assertEquals(0, map.size()); + } + + public void testContainsValue() { + map = new IngestSourceAndMetadata(Map.of("myField", "fieldValue"), Map.of("_version", 5678), null, null); + assertTrue(map.containsValue(5678)); + assertFalse(map.containsValue(5679)); + assertTrue(map.containsValue("fieldValue")); + assertFalse(map.containsValue("fieldValue2")); + } + + public void testValidators() { + map = new IngestSourceAndMetadata("myIndex", "myId", 1234, "myRouting", VersionType.EXTERNAL, null, new HashMap<>()); + IllegalArgumentException err = expectThrows(IllegalArgumentException.class, () -> map.put("_index", 555)); + assertEquals("_index must be null or a String but was [555] with type [java.lang.Integer]", err.getMessage()); + assertEquals("myIndex", map.getIndex()); + + err = expectThrows(IllegalArgumentException.class, () -> map.put("_id", 555)); + assertEquals("_id must be null or a String but was [555] with type [java.lang.Integer]", err.getMessage()); + assertEquals("myId", map.getId()); + map.put("_id", "myId2"); + assertEquals("myId2", map.getId()); + + err = expectThrows(IllegalArgumentException.class, () -> map.put("_routing", 555)); + assertEquals("_routing must be null or a String but was [555] with type [java.lang.Integer]", err.getMessage()); + assertEquals("myRouting", map.getRouting()); + map.put("_routing", "myRouting2"); + assertEquals("myRouting2", map.getRouting()); + + err = expectThrows(IllegalArgumentException.class, () -> map.put("_version", "five-five-five")); + assertEquals( + "_version may only be set to an int or a long but was [five-five-five] with type [java.lang.String]", + err.getMessage() + ); + assertEquals(1234, map.getVersion()); + map.put("_version", 555); + assertEquals(555, map.getVersion()); + + err = expectThrows(IllegalArgumentException.class, () -> map.put("_version_type", "vt")); + assertEquals( + "_version_type must be a null or one of [internal, external, external_gte] but was [vt] with type [java.lang.String]", + err.getMessage() + ); + assertEquals("external", map.getVersionType()); + map.put("_version_type", "internal"); + assertEquals("internal", map.getVersionType()); + err = expectThrows(IllegalArgumentException.class, () -> map.put("_version_type", VersionType.EXTERNAL.toString())); + assertEquals( + "_version_type must be a null or one of [internal, external, external_gte] but was [EXTERNAL] with type [java.lang.String]", + err.getMessage() + ); + err = expectThrows(IllegalArgumentException.class, () -> map.put("_version_type", VersionType.EXTERNAL)); + assertEquals( + "_version_type must be a null or one of [internal, external, external_gte] but was [EXTERNAL] with type" + + " [org.elasticsearch.index.VersionType$2]", + err.getMessage() + ); + assertEquals("internal", map.getVersionType()); + err = expectThrows(IllegalArgumentException.class, () -> map.setVersionType(VersionType.EXTERNAL.toString())); + assertEquals( + "_version_type must be a null or one of [internal, external, external_gte] but was [EXTERNAL] with type [java.lang.String]", + err.getMessage() + ); + + err = expectThrows(IllegalArgumentException.class, () -> map.put("_dynamic_templates", "5")); + assertEquals("_dynamic_templates must be a null or a Map but was [5] with type [java.lang.String]", err.getMessage()); + Map dt = Map.of("a", "b"); + map.put("_dynamic_templates", dt); + assertThat(dt, equalTo(map.getDynamicTemplates())); + } + + public void testDefaultValidatorForAllMetadata() { + for (IngestDocument.Metadata m : IngestDocument.Metadata.values()) { + assertThat(IngestSourceAndMetadata.VALIDATORS, hasEntry(equalTo(m.getFieldName()), notNullValue())); + } + assertEquals(IngestDocument.Metadata.values().length, IngestSourceAndMetadata.VALIDATORS.size()); + } + + private static class TestEntry implements Map.Entry { + String key; + Object value; + + TestEntry(String key, Object value) { + this.key = key; + this.value = value; + } + + @Override + public String getKey() { + return key; + } + + @Override + public Object getValue() { + return value; + } + + @Override + public Object setValue(Object value) { + throw new UnsupportedOperationException(); + } + } +} diff --git a/test/framework/src/main/java/org/elasticsearch/ingest/TestIngestDocument.java b/test/framework/src/main/java/org/elasticsearch/ingest/TestIngestDocument.java index 569e960eabc1c..eeedacacbed74 100644 --- a/test/framework/src/main/java/org/elasticsearch/ingest/TestIngestDocument.java +++ b/test/framework/src/main/java/org/elasticsearch/ingest/TestIngestDocument.java @@ -8,8 +8,13 @@ package org.elasticsearch.ingest; +import org.elasticsearch.core.Tuple; +import org.elasticsearch.index.VersionType; +import org.elasticsearch.test.ESTestCase; + import java.util.HashMap; import java.util.Map; +import java.util.function.BiConsumer; /** * Construct ingest documents for testing purposes @@ -17,21 +22,18 @@ public class TestIngestDocument { /** - * These two test static factory methods are needed for testing and allow to the creation of a new {@link IngestDocument} given the - * provided elasticsearch metadata, source and ingest metadata. - * - * This is needed because the ingest metadata will be initialized with the current timestamp at init time, which makes equality - * comparisons impossible in tests. + * Create an IngestDocument for testing that pass an empty mutable map for ingestMetaata */ - public static IngestDocument ofSourceAndIngest(Map sourceAndMetadata, Map ingestMetadata) { - return new IngestDocument(sourceAndMetadata, ingestMetadata); + public static IngestDocument ofSourceAndMetadata(Map sourceAndMetadata) { + return new IngestDocument(sourceAndMetadata, new HashMap<>()); } /** - * Create an IngestDocument for testing as in {@link #ofSourceAndIngest(Map, Map)} but pass an empty mutable map for ingestMetaata + * Create an IngestDocument with a metadata map and validators. The metadata map is passed by reference, not copied, so callers + * can observe changes to the map directly. */ - public static IngestDocument ofSourceAndMetadata(Map sourceAndMetadata) { - return new IngestDocument(sourceAndMetadata, new HashMap<>()); + public static IngestDocument ofMetadataWithValidator(Map metadata, Map> validators) { + return new IngestDocument(new IngestSourceAndMetadata(new HashMap<>(), metadata, null, validators), new HashMap<>()); } /** @@ -40,4 +42,14 @@ public static IngestDocument ofSourceAndMetadata(Map sourceAndMe public static IngestDocument emptyIngestDocument() { return new IngestDocument(new HashMap<>(), new HashMap<>()); } + + public static Tuple randomMetadata() { + IngestDocument.Metadata metadata = ESTestCase.randomFrom(IngestDocument.Metadata.values()); + return new Tuple<>(metadata.getFieldName(), switch (metadata) { + case VERSION, IF_SEQ_NO, IF_PRIMARY_TERM -> ESTestCase.randomIntBetween(0, 124); + case VERSION_TYPE -> VersionType.toString(ESTestCase.randomFrom(VersionType.values())); + case DYNAMIC_TEMPLATES -> Map.of(ESTestCase.randomAlphaOfLengthBetween(5, 10), ESTestCase.randomAlphaOfLengthBetween(5, 10)); + default -> ESTestCase.randomAlphaOfLengthBetween(5, 10); + }); + } } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/results/InferenceResultsTestCase.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/results/InferenceResultsTestCase.java index 2b87c916694e9..4377051fabd59 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/results/InferenceResultsTestCase.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/results/InferenceResultsTestCase.java @@ -55,14 +55,14 @@ public void testWriteToDocAndSerialize() throws IOException { InferenceResults.writeResult(inferenceResult, document, parentField, modelId); try (XContentBuilder builder = XContentFactory.jsonBuilder()) { builder.startObject(); - Map metadataMap = document.getMetadata(); - for (Map.Entry metadata : metadataMap.entrySet()) { + Map metadataMap = document.getMetadata(); + for (Map.Entry metadata : metadataMap.entrySet()) { if (metadata.getValue() != null) { - builder.field(metadata.getKey().getFieldName(), metadata.getValue().toString()); + builder.field(metadata.getKey(), metadata.getValue().toString()); } } Map source = IngestDocument.deepCopyMap(document.getSourceAndMetadata()); - metadataMap.keySet().forEach(mD -> source.remove(mD.getFieldName())); + metadataMap.keySet().forEach(mD -> source.remove(mD)); builder.field("_source", source); builder.field("_ingest", document.getIngestMetadata()); builder.endObject(); diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/ingest/InferenceProcessorTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/ingest/InferenceProcessorTests.java index 5e8384b950d66..78e97c8116bac 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/ingest/InferenceProcessorTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/ingest/InferenceProcessorTests.java @@ -303,7 +303,7 @@ public void testGenerateRequestWithEmptyMapping() { assertThat(processor.buildRequest(document).getObjectsToInfer().get(0), equalTo(source)); Map ingestMetadata = Collections.singletonMap("_value", 3); - document = TestIngestDocument.ofSourceAndIngest(source, ingestMetadata); + document = new IngestDocument(source, ingestMetadata); Map expected = new HashMap<>(source); expected.put("_ingest", ingestMetadata); @@ -346,7 +346,7 @@ public void testGenerateWithMapping() { assertThat(processor.buildRequest(document).getObjectsToInfer().get(0), equalTo(expectedMap)); Map ingestMetadata = Collections.singletonMap("_value", "baz"); - document = TestIngestDocument.ofSourceAndIngest(source, ingestMetadata); + document = new IngestDocument(source, ingestMetadata); expectedMap = new HashMap<>(expectedMap); expectedMap.put("metafield", "baz"); expectedMap.put("_ingest", ingestMetadata);