Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
62 commits
Select commit Hold shift + click to select a range
d9c1f40
Ingest: IngestDocument requires non-null version
stu-elastic Jun 14, 2022
9a21f12
Fix doc and unit tests
stu-elastic Jun 14, 2022
d1286b1
simulate sets version to Versions.MATCH_ANY
stu-elastic Jun 14, 2022
d051ec1
Ingest: Add validation and strong typing to sourceAndMetdata map
stu-elastic Jun 14, 2022
466287a
Merge branch 'ingest_long-version' of github.com:stu-elastic/elastics…
stu-elastic Jun 14, 2022
05db222
Fix test compile issues
stu-elastic Jun 14, 2022
02246f3
spotless
stu-elastic Jun 14, 2022
4d7883d
Merge branch 'master' of github.com:elastic/elasticsearch into ingest…
stu-elastic Jun 15, 2022
aece824
Clean up some tests
stu-elastic Jun 15, 2022
d7bbe07
more test cleanup
stu-elastic Jun 15, 2022
94ba063
Merge branch 'master' of github.com:elastic/elasticsearch into ingest…
stu-elastic Jun 15, 2022
5444453
update size for version, check for entries individually to avoid type…
stu-elastic Jun 15, 2022
567d159
spotless
stu-elastic Jun 15, 2022
d7c61a8
Fix xpack tests by using the test constructor, add ValidatingMap and …
stu-elastic Jun 15, 2022
0121dff
version added in test constructor, no need to add in test
stu-elastic Jun 15, 2022
b07a484
comment hashcode in xcontent test, produce valid random metadata
stu-elastic Jun 15, 2022
5e5a899
Ingest: move IngestDocument test ctor usage to static builder
stu-elastic Jun 16, 2022
1801596
Add javadoc and comments
stu-elastic Jun 16, 2022
4d27346
revert modules/ingest-common/build.gradle
stu-elastic Jun 16, 2022
145cd6b
map constructor and fromWire javadoc
stu-elastic Jun 16, 2022
7feb3f7
Use map in CircleProcessorTests
stu-elastic Jun 16, 2022
5b4fdf0
WIP
stu-elastic Jun 17, 2022
26582c0
Move test constructors to test framework
stu-elastic Jun 20, 2022
4f72efb
Merge branch 'master' of github.com:elastic/elasticsearch into ingest…
stu-elastic Jun 20, 2022
a20642b
Merge branch 'master' of github.com:elastic/elasticsearch into ingest…
stu-elastic Jun 20, 2022
b4dffc8
Merge branch 'ingest_document-test-constructor' of github.com:stu-ela…
stu-elastic Jun 20, 2022
ae3e171
getMetadata returns strings
stu-elastic Jun 20, 2022
6462fbe
New constructors
stu-elastic Jun 20, 2022
0c37da1
Remove version must exist validation
stu-elastic Jun 21, 2022
346c772
IngestDocument.fromWire -> of & TestIngestDocument.{{from -> of}Sourc…
stu-elastic Jun 21, 2022
afdbfdd
Revert unnecessary changes from Metadata move
stu-elastic Jun 21, 2022
3edbf05
spotless
stu-elastic Jun 21, 2022
7bcb441
merge of
stu-elastic Jun 21, 2022
821abab
Put all metadata in metadata map, use correct types for if seq and if…
stu-elastic Jun 21, 2022
178b3ec
handle copying IngestSourceAndMetadata in constructor
stu-elastic Jun 21, 2022
e436eb7
Merge branch 'master' of github.com:elastic/elasticsearch into ingest…
stu-elastic Jun 21, 2022
00d378c
Remove override xcontent test
stu-elastic Jun 21, 2022
5b57462
Correct source and metadata order
stu-elastic Jun 21, 2022
f93512a
Fix dynamic template getter
stu-elastic Jun 21, 2022
4b409fe
Fix rename processor tests
stu-elastic Jun 21, 2022
32f31e3
IngestDocumentMatcher matches underlying maps
stu-elastic Jun 21, 2022
902c104
Cleanup IngestDocument
stu-elastic Jun 21, 2022
63ce5eb
Add getter tests
stu-elastic Jun 21, 2022
e1ff7c2
Metadata tests
stu-elastic Jun 21, 2022
1735ba4
Merge branch 'master' of github.com:elastic/elasticsearch into ingest…
stu-elastic Jun 22, 2022
46cd1ee
Test source and metadata separately in IngestClientIT
stu-elastic Jun 22, 2022
0be9818
IngestSourceAndMetadata covering unit tests
stu-elastic Jun 22, 2022
c1ab555
Comments
stu-elastic Jun 22, 2022
79fe344
spotless
stu-elastic Jun 22, 2022
281a6ce
Move IngestSourceAndMetadata package private
stu-elastic Jun 22, 2022
6b52e40
Continue hiding IngestSourceAndMetdata in tests by adding ability to …
stu-elastic Jun 22, 2022
9181ea6
Merge branch 'master' of github.com:elastic/elasticsearch into ingest…
stu-elastic Jun 22, 2022
e39ab10
use correct helper in getString test
stu-elastic Jun 22, 2022
1ff3adb
WriteableIngest presize, typo in primate, more tests, type and value …
stu-elastic Jun 23, 2022
e8ecf2a
Validators are BiConsumers, fix TestIG javadoc, rm extra setup call
stu-elastic Jun 24, 2022
70becc6
remove return from validator javadoc
stu-elastic Jun 24, 2022
30b89e0
clean up consumer validators
stu-elastic Jun 24, 2022
1460ae9
Remove some IngestDocument and IngestSourceAndMetadata constructors
stu-elastic Jun 24, 2022
13f6898
Move getTimestamp from IngestDocument to IngestSourceAndMetadata
stu-elastic Jun 24, 2022
0849a2f
Merge branch 'master' of github.com:stu-elastic/elasticsearch into in…
stu-elastic Jun 27, 2022
2bad2d0
getIngestSourceAndMetadata pkg private, expect ingestMetadata to be n…
stu-elastic Jun 27, 2022
9de0da6
Merge branch 'master' of github.com:stu-elastic/elasticsearch into in…
stu-elastic Jun 27, 2022
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -157,7 +157,7 @@ public void testTargetField() throws Exception {
String fieldName;
boolean ignoreMissing;
do {
ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), Collections.emptyMap());
ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), new HashMap<>());
fieldValue = RandomDocumentPicks.randomString(random());
fieldName = RandomDocumentPicks.addRandomField(random(), ingestDocument, modifyInput(fieldValue));
ignoreMissing = randomBoolean();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,6 @@
import org.elasticsearch.common.util.Maps;
import org.elasticsearch.common.util.set.Sets;
import org.elasticsearch.ingest.IngestDocument;
import org.elasticsearch.ingest.IngestDocument.Metadata;
import org.elasticsearch.ingest.Processor;
import org.elasticsearch.ingest.RandomDocumentPicks;
import org.elasticsearch.ingest.TestTemplateService;
Expand Down Expand Up @@ -121,39 +120,6 @@ public void testConvertScalarToList() throws Exception {
}
}

public void testAppendMetadataExceptVersion() throws Exception {
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

why is this test removed?

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

It's not valid to append to any metadata field (and thus create an array list) based on my tests in 8.2. This was correctly failing validation.

Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Was this test just abusing metadata?

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Yes, the only metadata that doesn't fail is _type and I think that's because it's missing validation.

// here any metadata field value becomes a list, which won't make sense in most of the cases,
// but support for append is streamlined like for set so we test it
Metadata randomMetadata = randomFrom(Metadata.INDEX, Metadata.ID, Metadata.ROUTING);
List<String> values = new ArrayList<>();
Processor appendProcessor;
if (randomBoolean()) {
String value = randomAlphaOfLengthBetween(1, 10);
values.add(value);
appendProcessor = createAppendProcessor(randomMetadata.getFieldName(), value, true);
} else {
int valuesSize = randomIntBetween(0, 10);
for (int i = 0; i < valuesSize; i++) {
values.add(randomAlphaOfLengthBetween(1, 10));
}
appendProcessor = createAppendProcessor(randomMetadata.getFieldName(), values, true);
}

IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random());
Object initialValue = ingestDocument.getSourceAndMetadata().get(randomMetadata.getFieldName());
appendProcessor.execute(ingestDocument);
List<?> list = ingestDocument.getFieldValue(randomMetadata.getFieldName(), List.class);
if (initialValue == null) {
assertThat(list, equalTo(values));
} else {
assertThat(list.size(), equalTo(values.size() + 1));
assertThat(list.get(0), equalTo(initialValue));
for (int i = 1; i < list.size(); i++) {
assertThat(list.get(i), equalTo(values.get(i - 1)));
}
}
}

public void testAppendingDuplicateValueToScalarDoesNotModifyDocument() throws Exception {
IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random());
String originalValue = randomAlphaOfLengthBetween(1, 10);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -137,18 +137,14 @@ public void testRenameExistingFieldNullValue() throws Exception {
}

public void testRenameAtomicOperationSetFails() throws Exception {
Map<String, Object> source = new HashMap<String, Object>() {
@Override
public Object put(String key, Object value) {
if (key.equals("new_field")) {
throw new UnsupportedOperationException();
}
return super.put(key, value);
}
};
source.put("list", Collections.singletonList("item"));
Map<String, Object> metadata = new HashMap<>();
metadata.put("list", Collections.singletonList("item"));

IngestDocument ingestDocument = TestIngestDocument.ofSourceAndMetadata(source);
IngestDocument ingestDocument = TestIngestDocument.ofMetadataWithValidator(metadata, Map.of("new_field", (k, v) -> {
if (v != null) {
throw new UnsupportedOperationException();
}
}, "list", (k, v) -> {}));
Processor processor = createRenameProcessor("list", "new_field", false);
try {
processor.execute(ingestDocument);
Expand All @@ -161,18 +157,14 @@ public Object put(String key, Object value) {
}

public void testRenameAtomicOperationRemoveFails() throws Exception {
Map<String, Object> source = new HashMap<String, Object>() {
@Override
public Object remove(Object key) {
if (key.equals("list")) {
throw new UnsupportedOperationException();
}
return super.remove(key);
}
};
source.put("list", Collections.singletonList("item"));
Map<String, Object> metadata = new HashMap<>();
metadata.put("list", Collections.singletonList("item"));

IngestDocument ingestDocument = TestIngestDocument.ofSourceAndMetadata(source);
IngestDocument ingestDocument = TestIngestDocument.ofMetadataWithValidator(metadata, Map.of("list", (k, v) -> {
if (v == null) {
throw new UnsupportedOperationException();
}
}));
Processor processor = createRenameProcessor("list", "new_field", false);
try {
processor.execute(ingestDocument);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,6 @@
package org.elasticsearch.ingest.common;

import org.elasticsearch.ingest.IngestDocument;
import org.elasticsearch.ingest.IngestDocument.Metadata;
import org.elasticsearch.ingest.Processor;
import org.elasticsearch.ingest.RandomDocumentPicks;
import org.elasticsearch.ingest.TestIngestDocument;
Expand All @@ -28,6 +27,7 @@
import java.util.stream.Collectors;
import java.util.stream.IntStream;

import static org.elasticsearch.ingest.IngestDocument.Metadata;
import static org.hamcrest.Matchers.arrayContainingInAnyOrder;
import static org.hamcrest.Matchers.equalTo;

Expand All @@ -36,6 +36,9 @@ public class SetProcessorTests extends ESTestCase {
public void testSetExistingFields() throws Exception {
IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random());
String fieldName = RandomDocumentPicks.randomExistingFieldName(random(), ingestDocument);
while (Metadata.isMetadata(fieldName)) {
fieldName = RandomDocumentPicks.randomExistingFieldName(random(), ingestDocument);
}
Object fieldValue = RandomDocumentPicks.randomFieldValue(random());
Processor processor = createSetProcessor(fieldName, fieldValue, null, true, false);
processor.execute(ingestDocument);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -15,8 +15,8 @@
import java.util.HashMap;
import java.util.Map;

import static org.hamcrest.Matchers.containsInAnyOrder;
import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.hasEntry;

public class UriPartsProcessorTests extends ESTestCase {

Expand Down Expand Up @@ -191,7 +191,9 @@ public void testRemoveIfSuccessfulDoesNotRemoveTargetField() throws Exception {

Map<String, Object> expectedSourceAndMetadata = new HashMap<>();
expectedSourceAndMetadata.put(field, Map.of("scheme", "http", "domain", "www.google.com", "path", ""));
assertThat(output.getSourceAndMetadata().entrySet(), containsInAnyOrder(expectedSourceAndMetadata.entrySet().toArray()));
for (Map.Entry<String, Object> entry : expectedSourceAndMetadata.entrySet()) {
assertThat(output.getSourceAndMetadata(), hasEntry(entry.getKey(), entry.getValue()));
}
}

public void testInvalidUri() {
Expand Down Expand Up @@ -234,7 +236,9 @@ private void testUriParsing(boolean keepOriginal, boolean removeIfSuccessful, St
}
expectedSourceAndMetadata.put("url", values);

assertThat(output.getSourceAndMetadata().entrySet(), containsInAnyOrder(expectedSourceAndMetadata.entrySet().toArray()));
for (Map.Entry<String, Object> entry : expectedSourceAndMetadata.entrySet()) {
assertThat(output.getSourceAndMetadata(), hasEntry(entry.getKey(), entry.getValue()));
}
}

}
Original file line number Diff line number Diff line change
Expand Up @@ -114,7 +114,8 @@ public void testSimulate() throws Exception {
source.put("fail", false);
source.put("processed", true);
IngestDocument ingestDocument = new IngestDocument("index", "id", Versions.MATCH_ANY, null, null, source);
assertThat(simulateDocumentBaseResult.getIngestDocument().getSourceAndMetadata(), equalTo(ingestDocument.getSourceAndMetadata()));
assertThat(simulateDocumentBaseResult.getIngestDocument().getSource(), equalTo(ingestDocument.getSource()));
assertThat(simulateDocumentBaseResult.getIngestDocument().getMetadata(), equalTo(ingestDocument.getMetadata()));
assertThat(simulateDocumentBaseResult.getFailure(), nullValue());

// cleanup
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.io.stream.Writeable;
import org.elasticsearch.common.util.Maps;
import org.elasticsearch.core.RestApiVersion;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.ingest.IngestDocument;
Expand All @@ -23,7 +24,6 @@

import java.io.IOException;
import java.time.ZonedDateTime;
import java.util.HashMap;
import java.util.Map;
import java.util.Objects;

Expand All @@ -42,7 +42,7 @@ final class WriteableIngestDocument implements Writeable, ToXContentFragment {
"ingest_document",
true,
a -> {
HashMap<String, Object> sourceAndMetadata = new HashMap<>();
Map<String, Object> sourceAndMetadata = Maps.newHashMapWithExpectedSize(5);
sourceAndMetadata.put(Metadata.INDEX.getFieldName(), a[0]);
sourceAndMetadata.put(Metadata.ID.getFieldName(), a[1]);
if (a[2] != null) {
Expand All @@ -55,7 +55,8 @@ final class WriteableIngestDocument implements Writeable, ToXContentFragment {
sourceAndMetadata.put(Metadata.VERSION_TYPE.getFieldName(), a[4]);
}
sourceAndMetadata.putAll((Map<String, Object>) a[5]);
return new WriteableIngestDocument(IngestDocument.of(sourceAndMetadata, (Map<String, Object>) a[6]));
Map<String, Object> ingestMetadata = (Map<String, Object>) a[6];
return new WriteableIngestDocument(new IngestDocument(sourceAndMetadata, ingestMetadata));
}
);
static {
Expand Down Expand Up @@ -89,7 +90,7 @@ final class WriteableIngestDocument implements Writeable, ToXContentFragment {
WriteableIngestDocument(StreamInput in) throws IOException {
Map<String, Object> sourceAndMetadata = in.readMap();
Map<String, Object> ingestMetadata = in.readMap();
this.ingestDocument = IngestDocument.of(sourceAndMetadata, ingestMetadata);
this.ingestDocument = new IngestDocument(sourceAndMetadata, ingestMetadata);
}

@Override
Expand All @@ -105,18 +106,16 @@ IngestDocument getIngestDocument() {
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject(DOC_FIELD);
Map<IngestDocument.Metadata, Object> metadataMap = ingestDocument.getMetadata();
for (Map.Entry<IngestDocument.Metadata, Object> metadata : metadataMap.entrySet()) {
Map<String, Object> metadataMap = ingestDocument.getMetadata();
for (Map.Entry<String, Object> metadata : metadataMap.entrySet()) {
if (metadata.getValue() != null) {
builder.field(metadata.getKey().getFieldName(), metadata.getValue().toString());
builder.field(metadata.getKey(), metadata.getValue().toString());
}
}
if (builder.getRestApiVersion() == RestApiVersion.V_7) {
builder.field(MapperService.TYPE_FIELD_NAME, MapperService.SINGLE_MAPPING_NAME);
}
Map<String, Object> source = IngestDocument.deepCopyMap(ingestDocument.getSourceAndMetadata());
metadataMap.keySet().forEach(mD -> source.remove(mD.getFieldName()));
builder.field(SOURCE_FIELD, source);
builder.field(SOURCE_FIELD, ingestDocument.getSource());
builder.field(INGEST_FIELD, ingestDocument.getIngestMetadata());
builder.endObject();
return builder;
Expand Down
Loading