Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
50 commits
Select commit Hold shift + click to select a range
04d376a
datefieldmapper checks for index version
pgomulka Feb 19, 2020
f02fa66
normalize
pgomulka Feb 20, 2020
601e691
encoding
pgomulka Feb 20, 2020
1b23e9d
revert encoding
pgomulka Feb 20, 2020
424c670
type
pgomulka Feb 20, 2020
85701cd
support date range
pgomulka Feb 20, 2020
a280ff5
todos
pgomulka Feb 20, 2020
4291101
todos and debugging
pgomulka Feb 20, 2020
95ccbc4
range test that failes - notfinished
pgomulka Feb 21, 2020
146ce90
6.87 range test
pgomulka Feb 21, 2020
beed11d
test with more ingestion and replicas
pgomulka Feb 21, 2020
dc631dd
test fix
pgomulka Feb 21, 2020
71fdcef
imports
pgomulka Feb 21, 2020
2d44cf7
imports
pgomulka Feb 24, 2020
bd7c389
Merge branch '7.x' into joda/enable_joda_indices
pgomulka Feb 24, 2020
e56350e
import fix
pgomulka Feb 24, 2020
e5727a9
extend basing joda test
pgomulka Feb 24, 2020
3a65b1e
warning fix
pgomulka Feb 24, 2020
380bda0
typo fixg
pgomulka Feb 24, 2020
6044009
test data fix
pgomulka Feb 25, 2020
cb289f9
remove assertions
pgomulka Feb 25, 2020
dbfb7ba
missing quote
pgomulka Feb 25, 2020
b65bec7
more changes
pgomulka Feb 27, 2020
4a789c0
rename and empty line
pgomulka Feb 28, 2020
12e6e21
java test
pgomulka Mar 1, 2020
a018f55
testing joda backed fields
pgomulka Mar 1, 2020
4ddddf2
warnings
pgomulka Mar 1, 2020
e6e9ce9
skipping 7
pgomulka Mar 1, 2020
c501967
tests passing
pgomulka Mar 2, 2020
d79ee33
import
pgomulka Mar 2, 2020
d262231
codestyle
pgomulka Mar 2, 2020
077dfeb
cleanup
pgomulka Mar 2, 2020
1cfebf2
todo cleanup
pgomulka Mar 2, 2020
567ef7a
typoos fix
pgomulka Mar 2, 2020
815648e
code review follow up
pgomulka Mar 3, 2020
ae3ab0a
fix upgrade from version
pgomulka Mar 3, 2020
5d6ad6f
removal of warrnings
pgomulka Mar 4, 2020
1b16ced
javadoc
pgomulka Mar 4, 2020
091151a
imports
pgomulka Mar 4, 2020
04285f1
force refresh
pgomulka Mar 4, 2020
da574a6
Merge branch '7.x' into joda/enable_joda_indices
elasticmachine Mar 4, 2020
810505f
flush after post doc
pgomulka Mar 4, 2020
f1ea0e8
Merge branch 'joda/enable_joda_indices' of github.com:pgomulka/elasti…
pgomulka Mar 4, 2020
f3c6c6a
import
pgomulka Mar 4, 2020
d3f8739
assert on body
pgomulka Mar 5, 2020
675b3d1
sout remove
pgomulka Mar 9, 2020
729d310
refresh instead of flush
pgomulka Mar 9, 2020
ecf57f5
code review follow up
pgomulka Mar 10, 2020
5ed29d2
Merge branch '7.x' into joda/enable_joda_indices
elasticmachine Mar 10, 2020
bda9c47
remove comment
pgomulka Mar 11, 2020
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 4 additions & 2 deletions qa/rolling-upgrade/build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -59,6 +59,7 @@ for (Version bwcVersion : bwcVersions.wireCompatible) {
doFirst {
project.delete("${buildDir}/cluster/shared/repo/${baseName}")
}
systemProperty 'tests.upgrade_from_version', bwcVersion.toString()
systemProperty 'tests.rest.suite', 'old_cluster'
nonInputProperties.systemProperty('tests.rest.cluster', "${-> testClusters."${baseName}".allHttpSocketURI.join(",")}")
nonInputProperties.systemProperty('tests.clustername', "${-> testClusters."${baseName}".getName()}")
Expand All @@ -71,7 +72,7 @@ for (Version bwcVersion : bwcVersions.wireCompatible) {
testClusters."${baseName}".nextNodeToNextVersion()
}
systemProperty 'tests.rest.suite', 'mixed_cluster'
systemProperty 'tests.upgrade_from_version', project.version.replace("-SNAPSHOT", "")
systemProperty 'tests.upgrade_from_version', bwcVersion.toString()
systemProperty 'tests.first_round', 'true'
nonInputProperties.systemProperty('tests.rest.cluster', "${-> testClusters."${baseName}".allHttpSocketURI.join(",")}")
nonInputProperties.systemProperty('tests.clustername', "${-> testClusters."${baseName}".getName()}")
Expand All @@ -84,7 +85,7 @@ for (Version bwcVersion : bwcVersions.wireCompatible) {
testClusters."${baseName}".nextNodeToNextVersion()
}
systemProperty 'tests.rest.suite', 'mixed_cluster'
systemProperty 'tests.upgrade_from_version', project.version.replace("-SNAPSHOT", "")
systemProperty 'tests.upgrade_from_version', bwcVersion.toString()
systemProperty 'tests.first_round', 'false'
nonInputProperties.systemProperty('tests.rest.cluster', "${-> testClusters."${baseName}".allHttpSocketURI.join(",")}")
nonInputProperties.systemProperty('tests.clustername', "${-> testClusters."${baseName}".getName()}")
Expand All @@ -97,6 +98,7 @@ for (Version bwcVersion : bwcVersions.wireCompatible) {
}
useCluster testClusters."${baseName}"
systemProperty 'tests.rest.suite', 'upgraded_cluster'
systemProperty 'tests.upgrade_from_version', bwcVersion.toString()

nonInputProperties.systemProperty('tests.rest.cluster', "${-> testClusters."${baseName}".allHttpSocketURI.join(",")}")
nonInputProperties.systemProperty('tests.clustername', "${-> testClusters."${baseName}".getName()}")
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,269 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.upgrades;

import org.apache.http.HttpStatus;
import org.apache.http.util.EntityUtils;
import org.elasticsearch.Version;
import org.elasticsearch.client.Node;
import org.elasticsearch.client.Request;
import org.elasticsearch.client.RequestOptions;
import org.elasticsearch.client.Response;
import org.elasticsearch.client.WarningsHandler;
import org.elasticsearch.common.Booleans;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.search.DocValueFormat;
import org.junit.BeforeClass;

import java.io.IOException;
import java.nio.charset.StandardCharsets;
import java.util.Collections;
import java.util.List;
import java.util.function.Consumer;

import static org.elasticsearch.rest.action.search.RestSearchAction.TOTAL_HITS_AS_INT_PARAM;

/**
* This is test is meant to verify that when upgrading from 6.x version to 7.7 or newer it is able to parse date fields with joda pattern.
*
* The test is indexing documents and searches with use of joda or java pattern.
* In order to make sure that serialization logic is used a search call is executed 3 times (using all nodes).
* It cannot be guaranteed that serialization logic will always be used as it might happen that
* all shards are allocated on the same node and client is connecting to it.
* Because of this warnings assertions have to be ignored.
*
* A special flag used when serializing {@link DocValueFormat.DateTime#writeTo DocValueFormat.DateTime::writeTo}
* is used to indicate that an index was created in 6.x and has a joda pattern. The same flag is read when
* {@link DocValueFormat.DateTime#DateTime(StreamInput)} deserializing.
* When upgrading from 7.0-7.6 to 7.7 there is no way to tell if a pattern was created in 6.x as this flag cannot be added.
* Hence a skip assume section in init()
*
* @see org.elasticsearch.search.DocValueFormat.DateTime
*/
public class JodaCompatibilityIT extends AbstractRollingTestCase {

@BeforeClass
public static void init(){
assumeTrue("upgrading from 7.0-7.6 will fail parsing joda formats",
UPGRADE_FROM_VERSION.before(Version.V_7_0_0));
}

public void testJodaBackedDocValueAndDateFields() throws Exception {
switch (CLUSTER_TYPE) {
case OLD:
Request createTestIndex = indexWithDateField("joda_time", "YYYY-MM-dd'T'HH:mm:ssZZ");
createTestIndex.setOptions(ignoreWarnings());

Response resp = client().performRequest(createTestIndex);
assertEquals(HttpStatus.SC_OK, resp.getStatusLine().getStatusCode());

postNewDoc("joda_time", 1);

break;
case MIXED:
int minute = Booleans.parseBoolean(System.getProperty("tests.first_round")) ? 2 : 3;
postNewDoc("joda_time", minute);

Request search = dateRangeSearch("joda_time");
search.setOptions(ignoreWarnings());

performOnAllNodes(search, r -> assertEquals(HttpStatus.SC_OK, r.getStatusLine().getStatusCode()));
break;
case UPGRADED:
postNewDoc("joda_time", 4);

search = searchWithAgg("joda_time");
search.setOptions(ignoreWarnings());
//making sure all nodes were used for search
performOnAllNodes(search, r -> assertResponseHasAllDocuments(r));
break;
}
}

public void testJavaBackedDocValueAndDateFields() throws Exception {
switch (CLUSTER_TYPE) {
case OLD:
Request createTestIndex = indexWithDateField("java_time", "8yyyy-MM-dd'T'HH:mm:ssXXX");
Response resp = client().performRequest(createTestIndex);
assertEquals(HttpStatus.SC_OK, resp.getStatusLine().getStatusCode());

postNewDoc("java_time", 1);

break;
case MIXED:
int minute = Booleans.parseBoolean(System.getProperty("tests.first_round")) ? 2 : 3;
postNewDoc("java_time", minute);

Request search = dateRangeSearch("java_time");
Response searchResp = client().performRequest(search);
assertEquals(HttpStatus.SC_OK, searchResp.getStatusLine().getStatusCode());
break;
case UPGRADED:
postNewDoc("java_time", 4);

search = searchWithAgg("java_time");
//making sure all nodes were used for search
performOnAllNodes(search, r -> assertResponseHasAllDocuments(r));

break;
}
}

private RequestOptions ignoreWarnings() {
RequestOptions.Builder options = RequestOptions.DEFAULT.toBuilder();
options.setWarningsHandler(WarningsHandler.PERMISSIVE);
return options.build();
}

private void performOnAllNodes(Request search, Consumer<Response> consumer) throws IOException {
List<Node> nodes = client().getNodes();
for (Node node : nodes) {
client().setNodes(Collections.singletonList(node));
Response response = client().performRequest(search);
consumer.accept(response);
assertEquals(HttpStatus.SC_OK, response.getStatusLine().getStatusCode());
}
client().setNodes(nodes);
}

private void assertResponseHasAllDocuments(Response searchResp) {
assertEquals(HttpStatus.SC_OK, searchResp.getStatusLine().getStatusCode());
try {
assertEquals(removeWhiteSpace("{" +
" \"_shards\": {" +
" \"total\": 3," +
" \"successful\": 3" +
" },"+
" \"hits\": {" +
" \"total\": 4," +
" \"hits\": [" +
" {" +
" \"_source\": {" +
" \"datetime\": \"2020-01-01T00:00:01+01:00\"" +
" }" +
" }," +
" {" +
" \"_source\": {" +
" \"datetime\": \"2020-01-01T00:00:02+01:00\"" +
" }" +
" }," +
" {" +
" \"_source\": {" +
" \"datetime\": \"2020-01-01T00:00:03+01:00\"" +
" }" +
" }," +
" {" +
" \"_source\": {" +
" \"datetime\": \"2020-01-01T00:00:04+01:00\"" +
" }" +
" }" +
" ]" +
" }" +
"}"),
EntityUtils.toString(searchResp.getEntity(), StandardCharsets.UTF_8));
} catch (IOException e) {
throw new AssertionError("Exception during response parising", e);
}
}

private String removeWhiteSpace(String input) {
return input.replaceAll("[\\n\\r\\t\\ ]", "");
}

private Request dateRangeSearch(String endpoint) {
Request search = new Request("GET", endpoint+"/_search");
search.addParameter(TOTAL_HITS_AS_INT_PARAM, "true");
search.addParameter("filter_path", "hits.total,hits.hits._source.datetime,_shards.total,_shards.successful");
search.setJsonEntity("" +
"{\n" +
" \"track_total_hits\": true,\n" +
" \"sort\": \"datetime\",\n" +
" \"query\": {\n" +
" \"range\": {\n" +
" \"datetime\": {\n" +
" \"gte\": \"2020-01-01T00:00:00+01:00\",\n" +
" \"lte\": \"2020-01-02T00:00:00+01:00\"\n" +
" }\n" +
" }\n" +
" }\n" +
"}\n"
);
return search;
}

private Request searchWithAgg(String endpoint) throws IOException {
Request search = new Request("GET", endpoint+"/_search");
search.addParameter(TOTAL_HITS_AS_INT_PARAM, "true");
search.addParameter("filter_path", "hits.total,hits.hits._source.datetime,_shards.total,_shards.successful");

search.setJsonEntity("{\n" +
" \"track_total_hits\": true,\n" +
" \"sort\": \"datetime\",\n" +
" \"query\": {\n" +
" \"range\": {\n" +
" \"datetime\": {\n" +
" \"gte\": \"2020-01-01T00:00:00+01:00\",\n" +
" \"lte\": \"2020-01-02T00:00:00+01:00\"\n" +
" }\n" +
" }\n" +
" },\n" +
" \"aggs\" : {\n" +
" \"docs_per_year\" : {\n" +
" \"date_histogram\" : {\n" +
" \"field\" : \"date\",\n" +
" \"calendar_interval\" : \"year\"\n" +
" }\n" +
" }\n" +
" }\n" +
"}\n"
);
return search;
}
private Request indexWithDateField(String indexName, String format) {
Request createTestIndex = new Request("PUT", indexName);
createTestIndex.addParameter("include_type_name", "false");
createTestIndex.setJsonEntity("{\n" +
" \"settings\": {\n" +
" \"index.number_of_shards\": 3\n" +
" },\n" +
" \"mappings\": {\n" +
" \"properties\": {\n" +
" \"datetime\": {\n" +
" \"type\": \"date\",\n" +
" \"format\": \"" + format + "\"\n" +
" }\n" +
" }\n" +
" }\n" +
"}"
);
return createTestIndex;
}

private void postNewDoc(String endpoint, int minute) throws IOException {
Request putDoc = new Request("POST", endpoint+"/_doc");
putDoc.addParameter("refresh", "true");
putDoc.addParameter("wait_for_active_shards", "all");
putDoc.setJsonEntity("{\n" +
" \"datetime\": \"2020-01-01T00:00:0" + minute + "+01:00\"\n" +
"}"
);
Response resp = client().performRequest(putDoc);
assertEquals(HttpStatus.SC_CREATED, resp.getStatusLine().getStatusCode());
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,39 @@
---
"Insert more docs to joda index":
- do:
bulk:
refresh: true
body:
- '{"index": {"_index": "joda_for_range"}}'
- '{"time_frame": {"gte": "2019-01-01T00:00+01:00", "lte" : "2019-03-01T00:00+01:00"}}'

- do:
search:
rest_total_hits_as_int: true
index: joda_for_range
body:
query:
range:
time_frame:
gte: "2019-02-01T00:00+01:00"
lte: "2019-02-01T00:00+01:00"

---
"Insert more docs to java index":
- do:
bulk:
refresh: true
body:
- '{"index": {"_index": "java_for_range"}}'
- '{"time_frame": {"gte": "2019-01-01T00:00+01:00", "lte" : "2019-03-01T00:00+01:00"}}'

- do:
search:
rest_total_hits_as_int: true
index: java_for_range
body:
query:
range:
time_frame:
gte: "2019-02-01T00:00+01:00"
lte: "2019-02-01T00:00+01:00"
Loading