From 270c612e120fac543addb3ccb953f710914c2b4c Mon Sep 17 00:00:00 2001 From: javanna Date: Tue, 6 Jun 2017 15:22:45 +0200 Subject: [PATCH] Introduce high level client The high level client supports the following apis: ping, info, index, bulk, get, delete, update, search, search scroll and clear scroll. Also, the BulkProcessor has been updated so that it can be used with the high level client as well. --- .../resources/forbidden/http-signatures.txt | 45 + client/rest-high-level/build.gradle | 51 + .../org/elasticsearch/client/Request.java | 548 +++++++++++ .../client/RestHighLevelClient.java | 582 +++++++++++ .../java/org/elasticsearch/client/CrudIT.java | 704 ++++++++++++++ .../client/ESRestHighLevelClientTestCase.java | 75 ++ .../elasticsearch/client/PingAndInfoIT.java | 51 + .../elasticsearch/client/RequestTests.java | 906 ++++++++++++++++++ .../client/RestHighLevelClientExtTests.java | 138 +++ .../client/RestHighLevelClientTests.java | 666 +++++++++++++ .../org/elasticsearch/client/SearchIT.java | 465 +++++++++ .../documentation/DeleteDocumentationIT.java | 112 +++ .../QueryDSLDocumentationTests.java | 453 +++++++++ .../RestClientSingleHostIntegTests.java | 31 +- .../client/RestClientSingleHostTests.java | 32 +- .../client/RestClientTestCase.java | 99 +- .../client/RestClientTestUtil.java | 23 + .../action/bulk/BulkProcessor.java | 85 +- .../action/bulk/BulkRequestHandler.java | 81 +- .../org/elasticsearch/action/bulk/Retry.java | 100 +- .../action/bulk/BulkProcessorIT.java | 6 +- .../action/bulk/BulkProcessorRetryIT.java | 1 - .../elasticsearch/action/bulk/RetryTests.java | 12 +- docs/java-rest/high-level/apis.asciidoc | 10 + .../high-level/document/delete.asciidoc | 67 ++ .../high-level/document/index.asciidoc | 5 + docs/java-rest/high-level/index.asciidoc | 14 + docs/java-rest/high-level/usage.asciidoc | 75 ++ docs/java-rest/index.asciidoc | 6 +- docs/java-rest/license.asciidoc | 16 + .../{ => low-level}/configuration.asciidoc | 12 +- docs/java-rest/low-level/index.asciidoc | 27 + .../{ => low-level}/sniffer.asciidoc | 14 +- docs/java-rest/{ => low-level}/usage.asciidoc | 38 +- docs/java-rest/overview.asciidoc | 45 +- .../AbstractAsyncBulkByScrollAction.java | 18 +- .../index/reindex/TransportReindexAction.java | 10 +- .../reindex/TransportUpdateByQueryAction.java | 10 +- .../reindex/AsyncBulkByScrollActionTests.java | 2 +- .../index/reindex/ReindexMetadataTests.java | 3 +- .../index/reindex/ReindexScriptTests.java | 3 +- .../index/reindex/RetryTests.java | 4 +- .../reindex/UpdateByQueryMetadataTests.java | 4 +- .../reindex/UpdateByQueryWithScriptTests.java | 3 +- settings.gradle | 1 + 45 files changed, 5327 insertions(+), 326 deletions(-) create mode 100644 buildSrc/src/main/resources/forbidden/http-signatures.txt create mode 100644 client/rest-high-level/build.gradle create mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/Request.java create mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/RestHighLevelClient.java create mode 100644 client/rest-high-level/src/test/java/org/elasticsearch/client/CrudIT.java create mode 100644 client/rest-high-level/src/test/java/org/elasticsearch/client/ESRestHighLevelClientTestCase.java create mode 100644 client/rest-high-level/src/test/java/org/elasticsearch/client/PingAndInfoIT.java create mode 100644 client/rest-high-level/src/test/java/org/elasticsearch/client/RequestTests.java create mode 100644 client/rest-high-level/src/test/java/org/elasticsearch/client/RestHighLevelClientExtTests.java create mode 100644 client/rest-high-level/src/test/java/org/elasticsearch/client/RestHighLevelClientTests.java create mode 100644 client/rest-high-level/src/test/java/org/elasticsearch/client/SearchIT.java create mode 100644 client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/DeleteDocumentationIT.java create mode 100644 client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/QueryDSLDocumentationTests.java create mode 100644 docs/java-rest/high-level/apis.asciidoc create mode 100644 docs/java-rest/high-level/document/delete.asciidoc create mode 100644 docs/java-rest/high-level/document/index.asciidoc create mode 100644 docs/java-rest/high-level/index.asciidoc create mode 100644 docs/java-rest/high-level/usage.asciidoc create mode 100644 docs/java-rest/license.asciidoc rename docs/java-rest/{ => low-level}/configuration.asciidoc (98%) create mode 100644 docs/java-rest/low-level/index.asciidoc rename docs/java-rest/{ => low-level}/sniffer.asciidoc (97%) rename docs/java-rest/{ => low-level}/usage.asciidoc (95%) diff --git a/buildSrc/src/main/resources/forbidden/http-signatures.txt b/buildSrc/src/main/resources/forbidden/http-signatures.txt new file mode 100644 index 0000000000000..dcf20bbb09387 --- /dev/null +++ b/buildSrc/src/main/resources/forbidden/http-signatures.txt @@ -0,0 +1,45 @@ +# Licensed to Elasticsearch under one or more contributor +# license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright +# ownership. Elasticsearch licenses this file to you under +# the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on +# an 'AS IS' BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, +# either express or implied. See the License for the specific +# language governing permissions and limitations under the License. + +@defaultMessage Explicitly specify the ContentType of HTTP entities when creating +org.apache.http.entity.StringEntity#(java.lang.String) +org.apache.http.entity.StringEntity#(java.lang.String,java.lang.String) +org.apache.http.entity.StringEntity#(java.lang.String,java.nio.charset.Charset) +org.apache.http.entity.ByteArrayEntity#(byte[]) +org.apache.http.entity.ByteArrayEntity#(byte[],int,int) +org.apache.http.entity.FileEntity#(java.io.File) +org.apache.http.entity.InputStreamEntity#(java.io.InputStream) +org.apache.http.entity.InputStreamEntity#(java.io.InputStream,long) +org.apache.http.nio.entity.NByteArrayEntity#(byte[]) +org.apache.http.nio.entity.NByteArrayEntity#(byte[],int,int) +org.apache.http.nio.entity.NFileEntity#(java.io.File) +org.apache.http.nio.entity.NStringEntity#(java.lang.String) +org.apache.http.nio.entity.NStringEntity#(java.lang.String,java.lang.String) + +@defaultMessage Use non-deprecated constructors +org.apache.http.nio.entity.NFileEntity#(java.io.File,java.lang.String) +org.apache.http.nio.entity.NFileEntity#(java.io.File,java.lang.String,boolean) +org.apache.http.entity.FileEntity#(java.io.File,java.lang.String) +org.apache.http.entity.StringEntity#(java.lang.String,java.lang.String,java.lang.String) + +@defaultMessage BasicEntity is easy to mess up and forget to set content type +org.apache.http.entity.BasicHttpEntity#() + +@defaultMessage EntityTemplate is easy to mess up and forget to set content type +org.apache.http.entity.EntityTemplate#(org.apache.http.entity.ContentProducer) + +@defaultMessage SerializableEntity uses java serialization and makes it easy to forget to set content type +org.apache.http.entity.SerializableEntity#(java.io.Serializable) diff --git a/client/rest-high-level/build.gradle b/client/rest-high-level/build.gradle new file mode 100644 index 0000000000000..9203b8978fd05 --- /dev/null +++ b/client/rest-high-level/build.gradle @@ -0,0 +1,51 @@ +import org.elasticsearch.gradle.precommit.PrecommitTasks + +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +apply plugin: 'elasticsearch.build' +apply plugin: 'elasticsearch.rest-test' + +group = 'org.elasticsearch.client' + +dependencies { + compile "org.elasticsearch:elasticsearch:${version}" + compile "org.elasticsearch.client:rest:${version}" + compile "org.elasticsearch.plugin:parent-join-client:${version}" + compile "org.elasticsearch.plugin:aggs-matrix-stats-client:${version}" + + testCompile "org.elasticsearch.client:test:${version}" + testCompile "org.elasticsearch.test:framework:${version}" + testCompile "com.carrotsearch.randomizedtesting:randomizedtesting-runner:${versions.randomizedrunner}" + testCompile "junit:junit:${versions.junit}" + testCompile "org.hamcrest:hamcrest-all:${versions.hamcrest}" +} + +dependencyLicenses { + // Don't check licenses for dependency that are part of the elasticsearch project + // But any other dependency should have its license/notice/sha1 + dependencies = project.configurations.runtime.fileCollection { + it.group.startsWith('org.elasticsearch') == false + } +} + +forbiddenApisMain { + // core does not depend on the httpclient for compile so we add the signatures here. We don't add them for test as they are already + // specified + signaturesURLs += [PrecommitTasks.getResource('/forbidden/http-signatures.txt')] +} \ No newline at end of file diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/Request.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/Request.java new file mode 100644 index 0000000000000..9e881cf7b9add --- /dev/null +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/Request.java @@ -0,0 +1,548 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.client; + +import org.apache.http.HttpEntity; +import org.apache.http.client.methods.HttpDelete; +import org.apache.http.client.methods.HttpGet; +import org.apache.http.client.methods.HttpHead; +import org.apache.http.client.methods.HttpPost; +import org.apache.http.client.methods.HttpPut; +import org.apache.http.entity.ByteArrayEntity; +import org.apache.http.entity.ContentType; +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.action.DocWriteRequest; +import org.elasticsearch.action.bulk.BulkRequest; +import org.elasticsearch.action.delete.DeleteRequest; +import org.elasticsearch.action.get.GetRequest; +import org.elasticsearch.action.index.IndexRequest; +import org.elasticsearch.action.search.ClearScrollRequest; +import org.elasticsearch.action.search.SearchRequest; +import org.elasticsearch.action.search.SearchScrollRequest; +import org.elasticsearch.action.support.ActiveShardCount; +import org.elasticsearch.action.support.IndicesOptions; +import org.elasticsearch.action.support.WriteRequest; +import org.elasticsearch.action.update.UpdateRequest; +import org.elasticsearch.common.Nullable; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.lucene.uid.Versions; +import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.common.xcontent.NamedXContentRegistry; +import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentHelper; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.common.xcontent.XContentType; +import org.elasticsearch.index.VersionType; +import org.elasticsearch.rest.action.search.RestSearchAction; +import org.elasticsearch.search.fetch.subphase.FetchSourceContext; + +import java.io.ByteArrayOutputStream; +import java.io.IOException; +import java.util.Collections; +import java.util.HashMap; +import java.util.Locale; +import java.util.Map; +import java.util.StringJoiner; + +final class Request { + + static final XContentType REQUEST_BODY_CONTENT_TYPE = XContentType.JSON; + + final String method; + final String endpoint; + final Map params; + final HttpEntity entity; + + Request(String method, String endpoint, Map params, HttpEntity entity) { + this.method = method; + this.endpoint = endpoint; + this.params = params; + this.entity = entity; + } + + @Override + public String toString() { + return "Request{" + + "method='" + method + '\'' + + ", endpoint='" + endpoint + '\'' + + ", params=" + params + + ", hasBody=" + (entity != null) + + '}'; + } + + static Request delete(DeleteRequest deleteRequest) { + String endpoint = endpoint(deleteRequest.index(), deleteRequest.type(), deleteRequest.id()); + + Params parameters = Params.builder(); + parameters.withRouting(deleteRequest.routing()); + parameters.withParent(deleteRequest.parent()); + parameters.withTimeout(deleteRequest.timeout()); + parameters.withVersion(deleteRequest.version()); + parameters.withVersionType(deleteRequest.versionType()); + parameters.withRefreshPolicy(deleteRequest.getRefreshPolicy()); + parameters.withWaitForActiveShards(deleteRequest.waitForActiveShards()); + + return new Request(HttpDelete.METHOD_NAME, endpoint, parameters.getParams(), null); + } + + static Request info() { + return new Request(HttpGet.METHOD_NAME, "/", Collections.emptyMap(), null); + } + + static Request bulk(BulkRequest bulkRequest) throws IOException { + Params parameters = Params.builder(); + parameters.withTimeout(bulkRequest.timeout()); + parameters.withRefreshPolicy(bulkRequest.getRefreshPolicy()); + + // Bulk API only supports newline delimited JSON or Smile. Before executing + // the bulk, we need to check that all requests have the same content-type + // and this content-type is supported by the Bulk API. + XContentType bulkContentType = null; + for (int i = 0; i < bulkRequest.numberOfActions(); i++) { + DocWriteRequest request = bulkRequest.requests().get(i); + + DocWriteRequest.OpType opType = request.opType(); + if (opType == DocWriteRequest.OpType.INDEX || opType == DocWriteRequest.OpType.CREATE) { + bulkContentType = enforceSameContentType((IndexRequest) request, bulkContentType); + + } else if (opType == DocWriteRequest.OpType.UPDATE) { + UpdateRequest updateRequest = (UpdateRequest) request; + if (updateRequest.doc() != null) { + bulkContentType = enforceSameContentType(updateRequest.doc(), bulkContentType); + } + if (updateRequest.upsertRequest() != null) { + bulkContentType = enforceSameContentType(updateRequest.upsertRequest(), bulkContentType); + } + } + } + + if (bulkContentType == null) { + bulkContentType = XContentType.JSON; + } + + byte separator = bulkContentType.xContent().streamSeparator(); + ContentType requestContentType = ContentType.create(bulkContentType.mediaType()); + + ByteArrayOutputStream content = new ByteArrayOutputStream(); + for (DocWriteRequest request : bulkRequest.requests()) { + DocWriteRequest.OpType opType = request.opType(); + + try (XContentBuilder metadata = XContentBuilder.builder(bulkContentType.xContent())) { + metadata.startObject(); + { + metadata.startObject(opType.getLowercase()); + if (Strings.hasLength(request.index())) { + metadata.field("_index", request.index()); + } + if (Strings.hasLength(request.type())) { + metadata.field("_type", request.type()); + } + if (Strings.hasLength(request.id())) { + metadata.field("_id", request.id()); + } + if (Strings.hasLength(request.routing())) { + metadata.field("_routing", request.routing()); + } + if (Strings.hasLength(request.parent())) { + metadata.field("_parent", request.parent()); + } + if (request.version() != Versions.MATCH_ANY) { + metadata.field("_version", request.version()); + } + + VersionType versionType = request.versionType(); + if (versionType != VersionType.INTERNAL) { + if (versionType == VersionType.EXTERNAL) { + metadata.field("_version_type", "external"); + } else if (versionType == VersionType.EXTERNAL_GTE) { + metadata.field("_version_type", "external_gte"); + } else if (versionType == VersionType.FORCE) { + metadata.field("_version_type", "force"); + } + } + + if (opType == DocWriteRequest.OpType.INDEX || opType == DocWriteRequest.OpType.CREATE) { + IndexRequest indexRequest = (IndexRequest) request; + if (Strings.hasLength(indexRequest.getPipeline())) { + metadata.field("pipeline", indexRequest.getPipeline()); + } + } else if (opType == DocWriteRequest.OpType.UPDATE) { + UpdateRequest updateRequest = (UpdateRequest) request; + if (updateRequest.retryOnConflict() > 0) { + metadata.field("_retry_on_conflict", updateRequest.retryOnConflict()); + } + if (updateRequest.fetchSource() != null) { + metadata.field("_source", updateRequest.fetchSource()); + } + } + metadata.endObject(); + } + metadata.endObject(); + + BytesRef metadataSource = metadata.bytes().toBytesRef(); + content.write(metadataSource.bytes, metadataSource.offset, metadataSource.length); + content.write(separator); + } + + BytesRef source = null; + if (opType == DocWriteRequest.OpType.INDEX || opType == DocWriteRequest.OpType.CREATE) { + IndexRequest indexRequest = (IndexRequest) request; + BytesReference indexSource = indexRequest.source(); + XContentType indexXContentType = indexRequest.getContentType(); + + try (XContentParser parser = XContentHelper.createParser(NamedXContentRegistry.EMPTY, indexSource, indexXContentType)) { + try (XContentBuilder builder = XContentBuilder.builder(bulkContentType.xContent())) { + builder.copyCurrentStructure(parser); + source = builder.bytes().toBytesRef(); + } + } + } else if (opType == DocWriteRequest.OpType.UPDATE) { + source = XContentHelper.toXContent((UpdateRequest) request, bulkContentType, false).toBytesRef(); + } + + if (source != null) { + content.write(source.bytes, source.offset, source.length); + content.write(separator); + } + } + + HttpEntity entity = new ByteArrayEntity(content.toByteArray(), 0, content.size(), requestContentType); + return new Request(HttpPost.METHOD_NAME, "/_bulk", parameters.getParams(), entity); + } + + static Request exists(GetRequest getRequest) { + Request request = get(getRequest); + return new Request(HttpHead.METHOD_NAME, request.endpoint, request.params, null); + } + + static Request get(GetRequest getRequest) { + String endpoint = endpoint(getRequest.index(), getRequest.type(), getRequest.id()); + + Params parameters = Params.builder(); + parameters.withPreference(getRequest.preference()); + parameters.withRouting(getRequest.routing()); + parameters.withParent(getRequest.parent()); + parameters.withRefresh(getRequest.refresh()); + parameters.withRealtime(getRequest.realtime()); + parameters.withStoredFields(getRequest.storedFields()); + parameters.withVersion(getRequest.version()); + parameters.withVersionType(getRequest.versionType()); + parameters.withFetchSourceContext(getRequest.fetchSourceContext()); + + return new Request(HttpGet.METHOD_NAME, endpoint, parameters.getParams(), null); + } + + static Request index(IndexRequest indexRequest) { + String method = Strings.hasLength(indexRequest.id()) ? HttpPut.METHOD_NAME : HttpPost.METHOD_NAME; + + boolean isCreate = (indexRequest.opType() == DocWriteRequest.OpType.CREATE); + String endpoint = endpoint(indexRequest.index(), indexRequest.type(), indexRequest.id(), isCreate ? "_create" : null); + + Params parameters = Params.builder(); + parameters.withRouting(indexRequest.routing()); + parameters.withParent(indexRequest.parent()); + parameters.withTimeout(indexRequest.timeout()); + parameters.withVersion(indexRequest.version()); + parameters.withVersionType(indexRequest.versionType()); + parameters.withPipeline(indexRequest.getPipeline()); + parameters.withRefreshPolicy(indexRequest.getRefreshPolicy()); + parameters.withWaitForActiveShards(indexRequest.waitForActiveShards()); + + BytesRef source = indexRequest.source().toBytesRef(); + ContentType contentType = ContentType.create(indexRequest.getContentType().mediaType()); + HttpEntity entity = new ByteArrayEntity(source.bytes, source.offset, source.length, contentType); + + return new Request(method, endpoint, parameters.getParams(), entity); + } + + static Request ping() { + return new Request(HttpHead.METHOD_NAME, "/", Collections.emptyMap(), null); + } + + static Request update(UpdateRequest updateRequest) throws IOException { + String endpoint = endpoint(updateRequest.index(), updateRequest.type(), updateRequest.id(), "_update"); + + Params parameters = Params.builder(); + parameters.withRouting(updateRequest.routing()); + parameters.withParent(updateRequest.parent()); + parameters.withTimeout(updateRequest.timeout()); + parameters.withRefreshPolicy(updateRequest.getRefreshPolicy()); + parameters.withWaitForActiveShards(updateRequest.waitForActiveShards()); + parameters.withDocAsUpsert(updateRequest.docAsUpsert()); + parameters.withFetchSourceContext(updateRequest.fetchSource()); + parameters.withRetryOnConflict(updateRequest.retryOnConflict()); + parameters.withVersion(updateRequest.version()); + parameters.withVersionType(updateRequest.versionType()); + + // The Java API allows update requests with different content types + // set for the partial document and the upsert document. This client + // only accepts update requests that have the same content types set + // for both doc and upsert. + XContentType xContentType = null; + if (updateRequest.doc() != null) { + xContentType = updateRequest.doc().getContentType(); + } + if (updateRequest.upsertRequest() != null) { + XContentType upsertContentType = updateRequest.upsertRequest().getContentType(); + if ((xContentType != null) && (xContentType != upsertContentType)) { + throw new IllegalStateException("Update request cannot have different content types for doc [" + xContentType + "]" + + " and upsert [" + upsertContentType + "] documents"); + } else { + xContentType = upsertContentType; + } + } + if (xContentType == null) { + xContentType = Requests.INDEX_CONTENT_TYPE; + } + + HttpEntity entity = createEntity(updateRequest, xContentType); + return new Request(HttpPost.METHOD_NAME, endpoint, parameters.getParams(), entity); + } + + static Request search(SearchRequest searchRequest) throws IOException { + String endpoint = endpoint(searchRequest.indices(), searchRequest.types(), "_search"); + Params params = Params.builder(); + params.putParam(RestSearchAction.TYPED_KEYS_PARAM, "true"); + params.withRouting(searchRequest.routing()); + params.withPreference(searchRequest.preference()); + params.withIndicesOptions(searchRequest.indicesOptions()); + params.putParam("search_type", searchRequest.searchType().name().toLowerCase(Locale.ROOT)); + if (searchRequest.requestCache() != null) { + params.putParam("request_cache", Boolean.toString(searchRequest.requestCache())); + } + params.putParam("batched_reduce_size", Integer.toString(searchRequest.getBatchedReduceSize())); + if (searchRequest.scroll() != null) { + params.putParam("scroll", searchRequest.scroll().keepAlive()); + } + HttpEntity entity = null; + if (searchRequest.source() != null) { + entity = createEntity(searchRequest.source(), REQUEST_BODY_CONTENT_TYPE); + } + return new Request(HttpGet.METHOD_NAME, endpoint, params.getParams(), entity); + } + + static Request searchScroll(SearchScrollRequest searchScrollRequest) throws IOException { + HttpEntity entity = createEntity(searchScrollRequest, REQUEST_BODY_CONTENT_TYPE); + return new Request("GET", "/_search/scroll", Collections.emptyMap(), entity); + } + + static Request clearScroll(ClearScrollRequest clearScrollRequest) throws IOException { + HttpEntity entity = createEntity(clearScrollRequest, REQUEST_BODY_CONTENT_TYPE); + return new Request("DELETE", "/_search/scroll", Collections.emptyMap(), entity); + } + + private static HttpEntity createEntity(ToXContent toXContent, XContentType xContentType) throws IOException { + BytesRef source = XContentHelper.toXContent(toXContent, xContentType, false).toBytesRef(); + return new ByteArrayEntity(source.bytes, source.offset, source.length, ContentType.create(xContentType.mediaType())); + } + + static String endpoint(String[] indices, String[] types, String endpoint) { + return endpoint(String.join(",", indices), String.join(",", types), endpoint); + } + + /** + * Utility method to build request's endpoint. + */ + static String endpoint(String... parts) { + StringJoiner joiner = new StringJoiner("/", "/", ""); + for (String part : parts) { + if (Strings.hasLength(part)) { + joiner.add(part); + } + } + return joiner.toString(); + } + + /** + * Utility class to build request's parameters map and centralize all parameter names. + */ + static class Params { + private final Map params = new HashMap<>(); + + private Params() { + } + + Params putParam(String key, String value) { + if (Strings.hasLength(value)) { + if (params.putIfAbsent(key, value) != null) { + throw new IllegalArgumentException("Request parameter [" + key + "] is already registered"); + } + } + return this; + } + + Params putParam(String key, TimeValue value) { + if (value != null) { + return putParam(key, value.getStringRep()); + } + return this; + } + + Params withDocAsUpsert(boolean docAsUpsert) { + if (docAsUpsert) { + return putParam("doc_as_upsert", Boolean.TRUE.toString()); + } + return this; + } + + Params withFetchSourceContext(FetchSourceContext fetchSourceContext) { + if (fetchSourceContext != null) { + if (fetchSourceContext.fetchSource() == false) { + putParam("_source", Boolean.FALSE.toString()); + } + if (fetchSourceContext.includes() != null && fetchSourceContext.includes().length > 0) { + putParam("_source_include", String.join(",", fetchSourceContext.includes())); + } + if (fetchSourceContext.excludes() != null && fetchSourceContext.excludes().length > 0) { + putParam("_source_exclude", String.join(",", fetchSourceContext.excludes())); + } + } + return this; + } + + Params withParent(String parent) { + return putParam("parent", parent); + } + + Params withPipeline(String pipeline) { + return putParam("pipeline", pipeline); + } + + Params withPreference(String preference) { + return putParam("preference", preference); + } + + Params withRealtime(boolean realtime) { + if (realtime == false) { + return putParam("realtime", Boolean.FALSE.toString()); + } + return this; + } + + Params withRefresh(boolean refresh) { + if (refresh) { + return withRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE); + } + return this; + } + + Params withRefreshPolicy(WriteRequest.RefreshPolicy refreshPolicy) { + if (refreshPolicy != WriteRequest.RefreshPolicy.NONE) { + return putParam("refresh", refreshPolicy.getValue()); + } + return this; + } + + Params withRetryOnConflict(int retryOnConflict) { + if (retryOnConflict > 0) { + return putParam("retry_on_conflict", String.valueOf(retryOnConflict)); + } + return this; + } + + Params withRouting(String routing) { + return putParam("routing", routing); + } + + Params withStoredFields(String[] storedFields) { + if (storedFields != null && storedFields.length > 0) { + return putParam("stored_fields", String.join(",", storedFields)); + } + return this; + } + + Params withTimeout(TimeValue timeout) { + return putParam("timeout", timeout); + } + + Params withVersion(long version) { + if (version != Versions.MATCH_ANY) { + return putParam("version", Long.toString(version)); + } + return this; + } + + Params withVersionType(VersionType versionType) { + if (versionType != VersionType.INTERNAL) { + return putParam("version_type", versionType.name().toLowerCase(Locale.ROOT)); + } + return this; + } + + Params withWaitForActiveShards(ActiveShardCount activeShardCount) { + if (activeShardCount != null && activeShardCount != ActiveShardCount.DEFAULT) { + return putParam("wait_for_active_shards", activeShardCount.toString().toLowerCase(Locale.ROOT)); + } + return this; + } + + Params withIndicesOptions(IndicesOptions indicesOptions) { + putParam("ignore_unavailable", Boolean.toString(indicesOptions.ignoreUnavailable())); + putParam("allow_no_indices", Boolean.toString(indicesOptions.allowNoIndices())); + String expandWildcards; + if (indicesOptions.expandWildcardsOpen() == false && indicesOptions.expandWildcardsClosed() == false) { + expandWildcards = "none"; + } else { + StringJoiner joiner = new StringJoiner(","); + if (indicesOptions.expandWildcardsOpen()) { + joiner.add("open"); + } + if (indicesOptions.expandWildcardsClosed()) { + joiner.add("closed"); + } + expandWildcards = joiner.toString(); + } + putParam("expand_wildcards", expandWildcards); + return this; + } + + Map getParams() { + return Collections.unmodifiableMap(params); + } + + static Params builder() { + return new Params(); + } + } + + /** + * Ensure that the {@link IndexRequest}'s content type is supported by the Bulk API and that it conforms + * to the current {@link BulkRequest}'s content type (if it's known at the time of this method get called). + * + * @return the {@link IndexRequest}'s content type + */ + static XContentType enforceSameContentType(IndexRequest indexRequest, @Nullable XContentType xContentType) { + XContentType requestContentType = indexRequest.getContentType(); + if (requestContentType != XContentType.JSON && requestContentType != XContentType.SMILE) { + throw new IllegalArgumentException("Unsupported content-type found for request with content-type [" + requestContentType + + "], only JSON and SMILE are supported"); + } + if (xContentType == null) { + return requestContentType; + } + if (requestContentType != xContentType) { + throw new IllegalArgumentException("Mismatching content-type found for request with content-type [" + requestContentType + + "], previous requests have content-type [" + xContentType + "]"); + } + return xContentType; + } +} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/RestHighLevelClient.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/RestHighLevelClient.java new file mode 100644 index 0000000000000..fa9980977f4f1 --- /dev/null +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/RestHighLevelClient.java @@ -0,0 +1,582 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.client; + +import org.apache.http.Header; +import org.apache.http.HttpEntity; +import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.ElasticsearchStatusException; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.ActionRequest; +import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.action.bulk.BulkRequest; +import org.elasticsearch.action.bulk.BulkResponse; +import org.elasticsearch.action.delete.DeleteRequest; +import org.elasticsearch.action.delete.DeleteResponse; +import org.elasticsearch.action.get.GetRequest; +import org.elasticsearch.action.get.GetResponse; +import org.elasticsearch.action.index.IndexRequest; +import org.elasticsearch.action.index.IndexResponse; +import org.elasticsearch.action.main.MainRequest; +import org.elasticsearch.action.main.MainResponse; +import org.elasticsearch.action.search.ClearScrollRequest; +import org.elasticsearch.action.search.ClearScrollResponse; +import org.elasticsearch.action.search.SearchRequest; +import org.elasticsearch.action.search.SearchResponse; +import org.elasticsearch.action.search.SearchScrollRequest; +import org.elasticsearch.action.update.UpdateRequest; +import org.elasticsearch.action.update.UpdateResponse; +import org.elasticsearch.common.CheckedFunction; +import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.xcontent.ContextParser; +import org.elasticsearch.common.xcontent.NamedXContentRegistry; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.common.xcontent.XContentType; +import org.elasticsearch.join.aggregations.ChildrenAggregationBuilder; +import org.elasticsearch.join.aggregations.ParsedChildren; +import org.elasticsearch.rest.BytesRestResponse; +import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.search.aggregations.Aggregation; +import org.elasticsearch.search.aggregations.bucket.adjacency.AdjacencyMatrixAggregationBuilder; +import org.elasticsearch.search.aggregations.bucket.adjacency.ParsedAdjacencyMatrix; +import org.elasticsearch.search.aggregations.bucket.filter.FilterAggregationBuilder; +import org.elasticsearch.search.aggregations.bucket.filter.ParsedFilter; +import org.elasticsearch.search.aggregations.bucket.filters.FiltersAggregationBuilder; +import org.elasticsearch.search.aggregations.bucket.filters.ParsedFilters; +import org.elasticsearch.search.aggregations.bucket.geogrid.GeoGridAggregationBuilder; +import org.elasticsearch.search.aggregations.bucket.geogrid.ParsedGeoHashGrid; +import org.elasticsearch.search.aggregations.bucket.global.GlobalAggregationBuilder; +import org.elasticsearch.search.aggregations.bucket.global.ParsedGlobal; +import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramAggregationBuilder; +import org.elasticsearch.search.aggregations.bucket.histogram.HistogramAggregationBuilder; +import org.elasticsearch.search.aggregations.bucket.histogram.ParsedDateHistogram; +import org.elasticsearch.search.aggregations.bucket.histogram.ParsedHistogram; +import org.elasticsearch.search.aggregations.bucket.missing.MissingAggregationBuilder; +import org.elasticsearch.search.aggregations.bucket.missing.ParsedMissing; +import org.elasticsearch.search.aggregations.bucket.nested.NestedAggregationBuilder; +import org.elasticsearch.search.aggregations.bucket.nested.ParsedNested; +import org.elasticsearch.search.aggregations.bucket.nested.ParsedReverseNested; +import org.elasticsearch.search.aggregations.bucket.nested.ReverseNestedAggregationBuilder; +import org.elasticsearch.search.aggregations.bucket.range.ParsedRange; +import org.elasticsearch.search.aggregations.bucket.range.RangeAggregationBuilder; +import org.elasticsearch.search.aggregations.bucket.range.date.DateRangeAggregationBuilder; +import org.elasticsearch.search.aggregations.bucket.range.date.ParsedDateRange; +import org.elasticsearch.search.aggregations.bucket.range.geodistance.GeoDistanceAggregationBuilder; +import org.elasticsearch.search.aggregations.bucket.range.geodistance.ParsedGeoDistance; +import org.elasticsearch.search.aggregations.bucket.sampler.InternalSampler; +import org.elasticsearch.search.aggregations.bucket.sampler.ParsedSampler; +import org.elasticsearch.search.aggregations.bucket.significant.ParsedSignificantLongTerms; +import org.elasticsearch.search.aggregations.bucket.significant.ParsedSignificantStringTerms; +import org.elasticsearch.search.aggregations.bucket.significant.SignificantLongTerms; +import org.elasticsearch.search.aggregations.bucket.significant.SignificantStringTerms; +import org.elasticsearch.search.aggregations.bucket.terms.DoubleTerms; +import org.elasticsearch.search.aggregations.bucket.terms.LongTerms; +import org.elasticsearch.search.aggregations.bucket.terms.ParsedDoubleTerms; +import org.elasticsearch.search.aggregations.bucket.terms.ParsedLongTerms; +import org.elasticsearch.search.aggregations.bucket.terms.ParsedStringTerms; +import org.elasticsearch.search.aggregations.bucket.terms.StringTerms; +import org.elasticsearch.search.aggregations.matrix.stats.MatrixStatsAggregationBuilder; +import org.elasticsearch.search.aggregations.matrix.stats.ParsedMatrixStats; +import org.elasticsearch.search.aggregations.metrics.avg.AvgAggregationBuilder; +import org.elasticsearch.search.aggregations.metrics.avg.ParsedAvg; +import org.elasticsearch.search.aggregations.metrics.cardinality.CardinalityAggregationBuilder; +import org.elasticsearch.search.aggregations.metrics.cardinality.ParsedCardinality; +import org.elasticsearch.search.aggregations.metrics.geobounds.GeoBoundsAggregationBuilder; +import org.elasticsearch.search.aggregations.metrics.geobounds.ParsedGeoBounds; +import org.elasticsearch.search.aggregations.metrics.geocentroid.GeoCentroidAggregationBuilder; +import org.elasticsearch.search.aggregations.metrics.geocentroid.ParsedGeoCentroid; +import org.elasticsearch.search.aggregations.metrics.max.MaxAggregationBuilder; +import org.elasticsearch.search.aggregations.metrics.max.ParsedMax; +import org.elasticsearch.search.aggregations.metrics.min.MinAggregationBuilder; +import org.elasticsearch.search.aggregations.metrics.min.ParsedMin; +import org.elasticsearch.search.aggregations.metrics.percentiles.hdr.InternalHDRPercentileRanks; +import org.elasticsearch.search.aggregations.metrics.percentiles.hdr.InternalHDRPercentiles; +import org.elasticsearch.search.aggregations.metrics.percentiles.hdr.ParsedHDRPercentileRanks; +import org.elasticsearch.search.aggregations.metrics.percentiles.hdr.ParsedHDRPercentiles; +import org.elasticsearch.search.aggregations.metrics.percentiles.tdigest.InternalTDigestPercentileRanks; +import org.elasticsearch.search.aggregations.metrics.percentiles.tdigest.InternalTDigestPercentiles; +import org.elasticsearch.search.aggregations.metrics.percentiles.tdigest.ParsedTDigestPercentileRanks; +import org.elasticsearch.search.aggregations.metrics.percentiles.tdigest.ParsedTDigestPercentiles; +import org.elasticsearch.search.aggregations.metrics.scripted.ParsedScriptedMetric; +import org.elasticsearch.search.aggregations.metrics.scripted.ScriptedMetricAggregationBuilder; +import org.elasticsearch.search.aggregations.metrics.stats.ParsedStats; +import org.elasticsearch.search.aggregations.metrics.stats.StatsAggregationBuilder; +import org.elasticsearch.search.aggregations.metrics.stats.extended.ExtendedStatsAggregationBuilder; +import org.elasticsearch.search.aggregations.metrics.stats.extended.ParsedExtendedStats; +import org.elasticsearch.search.aggregations.metrics.sum.ParsedSum; +import org.elasticsearch.search.aggregations.metrics.sum.SumAggregationBuilder; +import org.elasticsearch.search.aggregations.metrics.valuecount.ParsedValueCount; +import org.elasticsearch.search.aggregations.metrics.valuecount.ValueCountAggregationBuilder; +import org.elasticsearch.search.aggregations.pipeline.InternalSimpleValue; +import org.elasticsearch.search.aggregations.pipeline.ParsedSimpleValue; +import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.InternalBucketMetricValue; +import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.ParsedBucketMetricValue; +import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.percentile.ParsedPercentilesBucket; +import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.percentile.PercentilesBucketPipelineAggregationBuilder; +import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.stats.ParsedStatsBucket; +import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.stats.StatsBucketPipelineAggregationBuilder; +import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.stats.extended.ExtendedStatsBucketPipelineAggregationBuilder; +import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.stats.extended.ParsedExtendedStatsBucket; +import org.elasticsearch.search.aggregations.pipeline.derivative.DerivativePipelineAggregationBuilder; +import org.elasticsearch.search.aggregations.pipeline.derivative.ParsedDerivative; +import org.elasticsearch.search.suggest.Suggest; +import org.elasticsearch.search.suggest.completion.CompletionSuggestion; +import org.elasticsearch.search.suggest.phrase.PhraseSuggestion; +import org.elasticsearch.search.suggest.term.TermSuggestion; + +import java.io.IOException; +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Objects; +import java.util.Set; +import java.util.function.Function; +import java.util.stream.Collectors; +import java.util.stream.Stream; + +import static java.util.Collections.emptySet; +import static java.util.Collections.singleton; +import static java.util.stream.Collectors.toList; + +/** + * High level REST client that wraps an instance of the low level {@link RestClient} and allows to build requests and read responses. + * The provided {@link RestClient} is externally built and closed. + * Can be sub-classed to expose additional client methods that make use of endpoints added to Elasticsearch through plugins, or to + * add support for custom response sections, again added to Elasticsearch through plugins. + */ +public class RestHighLevelClient { + + private final RestClient client; + private final NamedXContentRegistry registry; + + /** + * Creates a {@link RestHighLevelClient} given the low level {@link RestClient} that it should use to perform requests. + */ + public RestHighLevelClient(RestClient restClient) { + this(restClient, Collections.emptyList()); + } + + /** + * Creates a {@link RestHighLevelClient} given the low level {@link RestClient} that it should use to perform requests and + * a list of entries that allow to parse custom response sections added to Elasticsearch through plugins. + */ + protected RestHighLevelClient(RestClient restClient, List namedXContentEntries) { + this.client = Objects.requireNonNull(restClient); + this.registry = new NamedXContentRegistry(Stream.of(getDefaultNamedXContents().stream(), namedXContentEntries.stream()) + .flatMap(Function.identity()).collect(toList())); + } + + /** + * Executes a bulk request using the Bulk API + * + * See Bulk API on elastic.co + */ + public BulkResponse bulk(BulkRequest bulkRequest, Header... headers) throws IOException { + return performRequestAndParseEntity(bulkRequest, Request::bulk, BulkResponse::fromXContent, emptySet(), headers); + } + + /** + * Asynchronously executes a bulk request using the Bulk API + * + * See Bulk API on elastic.co + */ + public void bulkAsync(BulkRequest bulkRequest, ActionListener listener, Header... headers) { + performRequestAsyncAndParseEntity(bulkRequest, Request::bulk, BulkResponse::fromXContent, listener, emptySet(), headers); + } + + /** + * Pings the remote Elasticsearch cluster and returns true if the ping succeeded, false otherwise + */ + public boolean ping(Header... headers) throws IOException { + return performRequest(new MainRequest(), (request) -> Request.ping(), RestHighLevelClient::convertExistsResponse, + emptySet(), headers); + } + + /** + * Get the cluster info otherwise provided when sending an HTTP request to port 9200 + */ + public MainResponse info(Header... headers) throws IOException { + return performRequestAndParseEntity(new MainRequest(), (request) -> Request.info(), MainResponse::fromXContent, emptySet(), + headers); + } + + /** + * Retrieves a document by id using the Get API + * + * See Get API on elastic.co + */ + public GetResponse get(GetRequest getRequest, Header... headers) throws IOException { + return performRequestAndParseEntity(getRequest, Request::get, GetResponse::fromXContent, singleton(404), headers); + } + + /** + * Asynchronously retrieves a document by id using the Get API + * + * See Get API on elastic.co + */ + public void getAsync(GetRequest getRequest, ActionListener listener, Header... headers) { + performRequestAsyncAndParseEntity(getRequest, Request::get, GetResponse::fromXContent, listener, singleton(404), headers); + } + + /** + * Checks for the existence of a document. Returns true if it exists, false otherwise + * + * See Get API on elastic.co + */ + public boolean exists(GetRequest getRequest, Header... headers) throws IOException { + return performRequest(getRequest, Request::exists, RestHighLevelClient::convertExistsResponse, emptySet(), headers); + } + + /** + * Asynchronously checks for the existence of a document. Returns true if it exists, false otherwise + * + * See Get API on elastic.co + */ + public void existsAsync(GetRequest getRequest, ActionListener listener, Header... headers) { + performRequestAsync(getRequest, Request::exists, RestHighLevelClient::convertExistsResponse, listener, emptySet(), headers); + } + + /** + * Index a document using the Index API + * + * See Index API on elastic.co + */ + public IndexResponse index(IndexRequest indexRequest, Header... headers) throws IOException { + return performRequestAndParseEntity(indexRequest, Request::index, IndexResponse::fromXContent, emptySet(), headers); + } + + /** + * Asynchronously index a document using the Index API + * + * See Index API on elastic.co + */ + public void indexAsync(IndexRequest indexRequest, ActionListener listener, Header... headers) { + performRequestAsyncAndParseEntity(indexRequest, Request::index, IndexResponse::fromXContent, listener, emptySet(), headers); + } + + /** + * Updates a document using the Update API + *

+ * See Update API on elastic.co + */ + public UpdateResponse update(UpdateRequest updateRequest, Header... headers) throws IOException { + return performRequestAndParseEntity(updateRequest, Request::update, UpdateResponse::fromXContent, emptySet(), headers); + } + + /** + * Asynchronously updates a document using the Update API + *

+ * See Update API on elastic.co + */ + public void updateAsync(UpdateRequest updateRequest, ActionListener listener, Header... headers) { + performRequestAsyncAndParseEntity(updateRequest, Request::update, UpdateResponse::fromXContent, listener, emptySet(), headers); + } + + /** + * Deletes a document by id using the Delete api + * + * See Delete API on elastic.co + */ + public DeleteResponse delete(DeleteRequest deleteRequest, Header... headers) throws IOException { + return performRequestAndParseEntity(deleteRequest, Request::delete, DeleteResponse::fromXContent, Collections.singleton(404), + headers); + } + + /** + * Asynchronously deletes a document by id using the Delete api + * + * See Delete API on elastic.co + */ + public void deleteAsync(DeleteRequest deleteRequest, ActionListener listener, Header... headers) { + performRequestAsyncAndParseEntity(deleteRequest, Request::delete, DeleteResponse::fromXContent, listener, + Collections.singleton(404), headers); + } + + /** + * Executes a search using the Search api + * + * See Search API on elastic.co + */ + public SearchResponse search(SearchRequest searchRequest, Header... headers) throws IOException { + return performRequestAndParseEntity(searchRequest, Request::search, SearchResponse::fromXContent, emptySet(), headers); + } + + /** + * Asynchronously executes a search using the Search api + * + * See Search API on elastic.co + */ + public void searchAsync(SearchRequest searchRequest, ActionListener listener, Header... headers) { + performRequestAsyncAndParseEntity(searchRequest, Request::search, SearchResponse::fromXContent, listener, emptySet(), headers); + } + + /** + * Executes a search using the Search Scroll api + * + * See Search Scroll + * API on elastic.co + */ + public SearchResponse searchScroll(SearchScrollRequest searchScrollRequest, Header... headers) throws IOException { + return performRequestAndParseEntity(searchScrollRequest, Request::searchScroll, SearchResponse::fromXContent, emptySet(), headers); + } + + /** + * Asynchronously executes a search using the Search Scroll api + * + * See Search Scroll + * API on elastic.co + */ + public void searchScrollAsync(SearchScrollRequest searchScrollRequest, ActionListener listener, Header... headers) { + performRequestAsyncAndParseEntity(searchScrollRequest, Request::searchScroll, SearchResponse::fromXContent, + listener, emptySet(), headers); + } + + /** + * Clears one or more scroll ids using the Clear Scroll api + * + * See + * Clear Scroll API on elastic.co + */ + public ClearScrollResponse clearScroll(ClearScrollRequest clearScrollRequest, Header... headers) throws IOException { + return performRequestAndParseEntity(clearScrollRequest, Request::clearScroll, ClearScrollResponse::fromXContent, + emptySet(), headers); + } + + /** + * Asynchronously clears one or more scroll ids using the Clear Scroll api + * + * See + * Clear Scroll API on elastic.co + */ + public void clearScrollAsync(ClearScrollRequest clearScrollRequest, ActionListener listener, Header... headers) { + performRequestAsyncAndParseEntity(clearScrollRequest, Request::clearScroll, ClearScrollResponse::fromXContent, + listener, emptySet(), headers); + } + + private Resp performRequestAndParseEntity(Req request, + CheckedFunction requestConverter, + CheckedFunction entityParser, + Set ignores, Header... headers) throws IOException { + return performRequest(request, requestConverter, (response) -> parseEntity(response.getEntity(), entityParser), ignores, headers); + } + + Resp performRequest(Req request, + CheckedFunction requestConverter, + CheckedFunction responseConverter, + Set ignores, Header... headers) throws IOException { + ActionRequestValidationException validationException = request.validate(); + if (validationException != null) { + throw validationException; + } + Request req = requestConverter.apply(request); + Response response; + try { + response = client.performRequest(req.method, req.endpoint, req.params, req.entity, headers); + } catch (ResponseException e) { + if (ignores.contains(e.getResponse().getStatusLine().getStatusCode())) { + try { + return responseConverter.apply(e.getResponse()); + } catch (Exception innerException) { + throw parseResponseException(e); + } + } + throw parseResponseException(e); + } + + try { + return responseConverter.apply(response); + } catch(Exception e) { + throw new IOException("Unable to parse response body for " + response, e); + } + } + + private void performRequestAsyncAndParseEntity(Req request, + CheckedFunction requestConverter, + CheckedFunction entityParser, + ActionListener listener, Set ignores, Header... headers) { + performRequestAsync(request, requestConverter, (response) -> parseEntity(response.getEntity(), entityParser), + listener, ignores, headers); + } + + void performRequestAsync(Req request, + CheckedFunction requestConverter, + CheckedFunction responseConverter, + ActionListener listener, Set ignores, Header... headers) { + ActionRequestValidationException validationException = request.validate(); + if (validationException != null) { + listener.onFailure(validationException); + return; + } + Request req; + try { + req = requestConverter.apply(request); + } catch (Exception e) { + listener.onFailure(e); + return; + } + + ResponseListener responseListener = wrapResponseListener(responseConverter, listener, ignores); + client.performRequestAsync(req.method, req.endpoint, req.params, req.entity, responseListener, headers); + } + + ResponseListener wrapResponseListener(CheckedFunction responseConverter, + ActionListener actionListener, Set ignores) { + return new ResponseListener() { + @Override + public void onSuccess(Response response) { + try { + actionListener.onResponse(responseConverter.apply(response)); + } catch(Exception e) { + IOException ioe = new IOException("Unable to parse response body for " + response, e); + onFailure(ioe); + } + } + + @Override + public void onFailure(Exception exception) { + if (exception instanceof ResponseException) { + ResponseException responseException = (ResponseException) exception; + Response response = responseException.getResponse(); + if (ignores.contains(response.getStatusLine().getStatusCode())) { + try { + actionListener.onResponse(responseConverter.apply(response)); + } catch (Exception innerException) { + //the exception is ignored as we now try to parse the response as an error. + //this covers cases like get where 404 can either be a valid document not found response, + //or an error for which parsing is completely different. We try to consider the 404 response as a valid one + //first. If parsing of the response breaks, we fall back to parsing it as an error. + actionListener.onFailure(parseResponseException(responseException)); + } + } else { + actionListener.onFailure(parseResponseException(responseException)); + } + } else { + actionListener.onFailure(exception); + } + } + }; + } + + /** + * Converts a {@link ResponseException} obtained from the low level REST client into an {@link ElasticsearchException}. + * If a response body was returned, tries to parse it as an error returned from Elasticsearch. + * If no response body was returned or anything goes wrong while parsing the error, returns a new {@link ElasticsearchStatusException} + * that wraps the original {@link ResponseException}. The potential exception obtained while parsing is added to the returned + * exception as a suppressed exception. This method is guaranteed to not throw any exception eventually thrown while parsing. + */ + ElasticsearchStatusException parseResponseException(ResponseException responseException) { + Response response = responseException.getResponse(); + HttpEntity entity = response.getEntity(); + ElasticsearchStatusException elasticsearchException; + if (entity == null) { + elasticsearchException = new ElasticsearchStatusException( + responseException.getMessage(), RestStatus.fromCode(response.getStatusLine().getStatusCode()), responseException); + } else { + try { + elasticsearchException = parseEntity(entity, BytesRestResponse::errorFromXContent); + elasticsearchException.addSuppressed(responseException); + } catch (Exception e) { + RestStatus restStatus = RestStatus.fromCode(response.getStatusLine().getStatusCode()); + elasticsearchException = new ElasticsearchStatusException("Unable to parse response body", restStatus, responseException); + elasticsearchException.addSuppressed(e); + } + } + return elasticsearchException; + } + + Resp parseEntity( + HttpEntity entity, CheckedFunction entityParser) throws IOException { + if (entity == null) { + throw new IllegalStateException("Response body expected but not returned"); + } + if (entity.getContentType() == null) { + throw new IllegalStateException("Elasticsearch didn't return the [Content-Type] header, unable to parse response body"); + } + XContentType xContentType = XContentType.fromMediaTypeOrFormat(entity.getContentType().getValue()); + if (xContentType == null) { + throw new IllegalStateException("Unsupported Content-Type: " + entity.getContentType().getValue()); + } + try (XContentParser parser = xContentType.xContent().createParser(registry, entity.getContent())) { + return entityParser.apply(parser); + } + } + + static boolean convertExistsResponse(Response response) { + return response.getStatusLine().getStatusCode() == 200; + } + + static List getDefaultNamedXContents() { + Map> map = new HashMap<>(); + map.put(CardinalityAggregationBuilder.NAME, (p, c) -> ParsedCardinality.fromXContent(p, (String) c)); + map.put(InternalHDRPercentiles.NAME, (p, c) -> ParsedHDRPercentiles.fromXContent(p, (String) c)); + map.put(InternalHDRPercentileRanks.NAME, (p, c) -> ParsedHDRPercentileRanks.fromXContent(p, (String) c)); + map.put(InternalTDigestPercentiles.NAME, (p, c) -> ParsedTDigestPercentiles.fromXContent(p, (String) c)); + map.put(InternalTDigestPercentileRanks.NAME, (p, c) -> ParsedTDigestPercentileRanks.fromXContent(p, (String) c)); + map.put(PercentilesBucketPipelineAggregationBuilder.NAME, (p, c) -> ParsedPercentilesBucket.fromXContent(p, (String) c)); + map.put(MinAggregationBuilder.NAME, (p, c) -> ParsedMin.fromXContent(p, (String) c)); + map.put(MaxAggregationBuilder.NAME, (p, c) -> ParsedMax.fromXContent(p, (String) c)); + map.put(SumAggregationBuilder.NAME, (p, c) -> ParsedSum.fromXContent(p, (String) c)); + map.put(AvgAggregationBuilder.NAME, (p, c) -> ParsedAvg.fromXContent(p, (String) c)); + map.put(ValueCountAggregationBuilder.NAME, (p, c) -> ParsedValueCount.fromXContent(p, (String) c)); + map.put(InternalSimpleValue.NAME, (p, c) -> ParsedSimpleValue.fromXContent(p, (String) c)); + map.put(DerivativePipelineAggregationBuilder.NAME, (p, c) -> ParsedDerivative.fromXContent(p, (String) c)); + map.put(InternalBucketMetricValue.NAME, (p, c) -> ParsedBucketMetricValue.fromXContent(p, (String) c)); + map.put(StatsAggregationBuilder.NAME, (p, c) -> ParsedStats.fromXContent(p, (String) c)); + map.put(StatsBucketPipelineAggregationBuilder.NAME, (p, c) -> ParsedStatsBucket.fromXContent(p, (String) c)); + map.put(ExtendedStatsAggregationBuilder.NAME, (p, c) -> ParsedExtendedStats.fromXContent(p, (String) c)); + map.put(ExtendedStatsBucketPipelineAggregationBuilder.NAME, + (p, c) -> ParsedExtendedStatsBucket.fromXContent(p, (String) c)); + map.put(GeoBoundsAggregationBuilder.NAME, (p, c) -> ParsedGeoBounds.fromXContent(p, (String) c)); + map.put(GeoCentroidAggregationBuilder.NAME, (p, c) -> ParsedGeoCentroid.fromXContent(p, (String) c)); + map.put(HistogramAggregationBuilder.NAME, (p, c) -> ParsedHistogram.fromXContent(p, (String) c)); + map.put(DateHistogramAggregationBuilder.NAME, (p, c) -> ParsedDateHistogram.fromXContent(p, (String) c)); + map.put(StringTerms.NAME, (p, c) -> ParsedStringTerms.fromXContent(p, (String) c)); + map.put(LongTerms.NAME, (p, c) -> ParsedLongTerms.fromXContent(p, (String) c)); + map.put(DoubleTerms.NAME, (p, c) -> ParsedDoubleTerms.fromXContent(p, (String) c)); + map.put(MissingAggregationBuilder.NAME, (p, c) -> ParsedMissing.fromXContent(p, (String) c)); + map.put(NestedAggregationBuilder.NAME, (p, c) -> ParsedNested.fromXContent(p, (String) c)); + map.put(ReverseNestedAggregationBuilder.NAME, (p, c) -> ParsedReverseNested.fromXContent(p, (String) c)); + map.put(GlobalAggregationBuilder.NAME, (p, c) -> ParsedGlobal.fromXContent(p, (String) c)); + map.put(FilterAggregationBuilder.NAME, (p, c) -> ParsedFilter.fromXContent(p, (String) c)); + map.put(InternalSampler.PARSER_NAME, (p, c) -> ParsedSampler.fromXContent(p, (String) c)); + map.put(GeoGridAggregationBuilder.NAME, (p, c) -> ParsedGeoHashGrid.fromXContent(p, (String) c)); + map.put(RangeAggregationBuilder.NAME, (p, c) -> ParsedRange.fromXContent(p, (String) c)); + map.put(DateRangeAggregationBuilder.NAME, (p, c) -> ParsedDateRange.fromXContent(p, (String) c)); + map.put(GeoDistanceAggregationBuilder.NAME, (p, c) -> ParsedGeoDistance.fromXContent(p, (String) c)); + map.put(FiltersAggregationBuilder.NAME, (p, c) -> ParsedFilters.fromXContent(p, (String) c)); + map.put(AdjacencyMatrixAggregationBuilder.NAME, (p, c) -> ParsedAdjacencyMatrix.fromXContent(p, (String) c)); + map.put(SignificantLongTerms.NAME, (p, c) -> ParsedSignificantLongTerms.fromXContent(p, (String) c)); + map.put(SignificantStringTerms.NAME, (p, c) -> ParsedSignificantStringTerms.fromXContent(p, (String) c)); + map.put(ScriptedMetricAggregationBuilder.NAME, (p, c) -> ParsedScriptedMetric.fromXContent(p, (String) c)); + map.put(ChildrenAggregationBuilder.NAME, (p, c) -> ParsedChildren.fromXContent(p, (String) c)); + map.put(MatrixStatsAggregationBuilder.NAME, (p, c) -> ParsedMatrixStats.fromXContent(p, (String) c)); + List entries = map.entrySet().stream() + .map(entry -> new NamedXContentRegistry.Entry(Aggregation.class, new ParseField(entry.getKey()), entry.getValue())) + .collect(Collectors.toList()); + entries.add(new NamedXContentRegistry.Entry(Suggest.Suggestion.class, new ParseField(TermSuggestion.NAME), + (parser, context) -> TermSuggestion.fromXContent(parser, (String)context))); + entries.add(new NamedXContentRegistry.Entry(Suggest.Suggestion.class, new ParseField(PhraseSuggestion.NAME), + (parser, context) -> PhraseSuggestion.fromXContent(parser, (String)context))); + entries.add(new NamedXContentRegistry.Entry(Suggest.Suggestion.class, new ParseField(CompletionSuggestion.NAME), + (parser, context) -> CompletionSuggestion.fromXContent(parser, (String)context))); + return entries; + } +} diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/CrudIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/CrudIT.java new file mode 100644 index 0000000000000..a28686b27aa0d --- /dev/null +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/CrudIT.java @@ -0,0 +1,704 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.client; + +import org.apache.http.entity.ContentType; +import org.apache.http.entity.StringEntity; +import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.ElasticsearchStatusException; +import org.elasticsearch.action.DocWriteRequest; +import org.elasticsearch.action.DocWriteResponse; +import org.elasticsearch.action.bulk.BulkItemResponse; +import org.elasticsearch.action.bulk.BulkProcessor; +import org.elasticsearch.action.bulk.BulkRequest; +import org.elasticsearch.action.bulk.BulkResponse; +import org.elasticsearch.action.delete.DeleteRequest; +import org.elasticsearch.action.delete.DeleteResponse; +import org.elasticsearch.action.get.GetRequest; +import org.elasticsearch.action.get.GetResponse; +import org.elasticsearch.action.index.IndexRequest; +import org.elasticsearch.action.index.IndexResponse; +import org.elasticsearch.action.update.UpdateRequest; +import org.elasticsearch.action.update.UpdateResponse; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.unit.ByteSizeUnit; +import org.elasticsearch.common.unit.ByteSizeValue; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentType; +import org.elasticsearch.index.VersionType; +import org.elasticsearch.index.get.GetResult; +import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.script.Script; +import org.elasticsearch.script.ScriptType; +import org.elasticsearch.search.fetch.subphase.FetchSourceContext; +import org.elasticsearch.threadpool.ThreadPool; + +import java.io.IOException; +import java.util.Collections; +import java.util.Map; + +import java.util.concurrent.atomic.AtomicReference; + +import static java.util.Collections.singletonMap; + +public class CrudIT extends ESRestHighLevelClientTestCase { + + public void testDelete() throws IOException { + { + // Testing deletion + String docId = "id"; + highLevelClient().index(new IndexRequest("index", "type", docId).source(Collections.singletonMap("foo", "bar"))); + DeleteRequest deleteRequest = new DeleteRequest("index", "type", docId); + if (randomBoolean()) { + deleteRequest.version(1L); + } + DeleteResponse deleteResponse = execute(deleteRequest, highLevelClient()::delete, highLevelClient()::deleteAsync); + assertEquals("index", deleteResponse.getIndex()); + assertEquals("type", deleteResponse.getType()); + assertEquals(docId, deleteResponse.getId()); + assertEquals(DocWriteResponse.Result.DELETED, deleteResponse.getResult()); + } + { + // Testing non existing document + String docId = "does_not_exist"; + DeleteRequest deleteRequest = new DeleteRequest("index", "type", docId); + DeleteResponse deleteResponse = execute(deleteRequest, highLevelClient()::delete, highLevelClient()::deleteAsync); + assertEquals("index", deleteResponse.getIndex()); + assertEquals("type", deleteResponse.getType()); + assertEquals(docId, deleteResponse.getId()); + assertEquals(DocWriteResponse.Result.NOT_FOUND, deleteResponse.getResult()); + } + { + // Testing version conflict + String docId = "version_conflict"; + highLevelClient().index(new IndexRequest("index", "type", docId).source(Collections.singletonMap("foo", "bar"))); + DeleteRequest deleteRequest = new DeleteRequest("index", "type", docId).version(2); + ElasticsearchException exception = expectThrows(ElasticsearchException.class, + () -> execute(deleteRequest, highLevelClient()::delete, highLevelClient()::deleteAsync)); + assertEquals(RestStatus.CONFLICT, exception.status()); + assertEquals("Elasticsearch exception [type=version_conflict_engine_exception, reason=[type][" + docId + "]: " + + "version conflict, current version [1] is different than the one provided [2]]", exception.getMessage()); + assertEquals("index", exception.getMetadata("es.index").get(0)); + } + { + // Testing version type + String docId = "version_type"; + highLevelClient().index(new IndexRequest("index", "type", docId).source(Collections.singletonMap("foo", "bar")) + .versionType(VersionType.EXTERNAL).version(12)); + DeleteRequest deleteRequest = new DeleteRequest("index", "type", docId).versionType(VersionType.EXTERNAL).version(13); + DeleteResponse deleteResponse = execute(deleteRequest, highLevelClient()::delete, highLevelClient()::deleteAsync); + assertEquals("index", deleteResponse.getIndex()); + assertEquals("type", deleteResponse.getType()); + assertEquals(docId, deleteResponse.getId()); + assertEquals(DocWriteResponse.Result.DELETED, deleteResponse.getResult()); + } + { + // Testing version type with a wrong version + String docId = "wrong_version"; + highLevelClient().index(new IndexRequest("index", "type", docId).source(Collections.singletonMap("foo", "bar")) + .versionType(VersionType.EXTERNAL).version(12)); + ElasticsearchStatusException exception = expectThrows(ElasticsearchStatusException.class, () -> { + DeleteRequest deleteRequest = new DeleteRequest("index", "type", docId).versionType(VersionType.EXTERNAL).version(10); + execute(deleteRequest, highLevelClient()::delete, highLevelClient()::deleteAsync); + }); + assertEquals(RestStatus.CONFLICT, exception.status()); + assertEquals("Elasticsearch exception [type=version_conflict_engine_exception, reason=[type][" + + docId + "]: version conflict, current version [12] is higher or equal to the one provided [10]]", exception.getMessage()); + assertEquals("index", exception.getMetadata("es.index").get(0)); + } + { + // Testing routing + String docId = "routing"; + highLevelClient().index(new IndexRequest("index", "type", docId).source(Collections.singletonMap("foo", "bar")).routing("foo")); + DeleteRequest deleteRequest = new DeleteRequest("index", "type", docId).routing("foo"); + DeleteResponse deleteResponse = execute(deleteRequest, highLevelClient()::delete, highLevelClient()::deleteAsync); + assertEquals("index", deleteResponse.getIndex()); + assertEquals("type", deleteResponse.getType()); + assertEquals(docId, deleteResponse.getId()); + assertEquals(DocWriteResponse.Result.DELETED, deleteResponse.getResult()); + } + } + + public void testExists() throws IOException { + { + GetRequest getRequest = new GetRequest("index", "type", "id"); + assertFalse(execute(getRequest, highLevelClient()::exists, highLevelClient()::existsAsync)); + } + String document = "{\"field1\":\"value1\",\"field2\":\"value2\"}"; + StringEntity stringEntity = new StringEntity(document, ContentType.APPLICATION_JSON); + Response response = client().performRequest("PUT", "/index/type/id", Collections.singletonMap("refresh", "wait_for"), stringEntity); + assertEquals(201, response.getStatusLine().getStatusCode()); + { + GetRequest getRequest = new GetRequest("index", "type", "id"); + assertTrue(execute(getRequest, highLevelClient()::exists, highLevelClient()::existsAsync)); + } + { + GetRequest getRequest = new GetRequest("index", "type", "does_not_exist"); + assertFalse(execute(getRequest, highLevelClient()::exists, highLevelClient()::existsAsync)); + } + { + GetRequest getRequest = new GetRequest("index", "type", "does_not_exist").version(1); + assertFalse(execute(getRequest, highLevelClient()::exists, highLevelClient()::existsAsync)); + } + } + + public void testGet() throws IOException { + { + GetRequest getRequest = new GetRequest("index", "type", "id"); + ElasticsearchException exception = expectThrows(ElasticsearchException.class, + () -> execute(getRequest, highLevelClient()::get, highLevelClient()::getAsync)); + assertEquals(RestStatus.NOT_FOUND, exception.status()); + assertEquals("Elasticsearch exception [type=index_not_found_exception, reason=no such index]", exception.getMessage()); + assertEquals("index", exception.getMetadata("es.index").get(0)); + } + + String document = "{\"field1\":\"value1\",\"field2\":\"value2\"}"; + StringEntity stringEntity = new StringEntity(document, ContentType.APPLICATION_JSON); + Response response = client().performRequest("PUT", "/index/type/id", Collections.singletonMap("refresh", "wait_for"), stringEntity); + assertEquals(201, response.getStatusLine().getStatusCode()); + { + GetRequest getRequest = new GetRequest("index", "type", "id").version(2); + ElasticsearchException exception = expectThrows(ElasticsearchException.class, + () -> execute(getRequest, highLevelClient()::get, highLevelClient()::getAsync)); + assertEquals(RestStatus.CONFLICT, exception.status()); + assertEquals("Elasticsearch exception [type=version_conflict_engine_exception, " + "reason=[type][id]: " + + "version conflict, current version [1] is different than the one provided [2]]", exception.getMessage()); + assertEquals("index", exception.getMetadata("es.index").get(0)); + } + { + GetRequest getRequest = new GetRequest("index", "type", "id"); + if (randomBoolean()) { + getRequest.version(1L); + } + GetResponse getResponse = execute(getRequest, highLevelClient()::get, highLevelClient()::getAsync); + assertEquals("index", getResponse.getIndex()); + assertEquals("type", getResponse.getType()); + assertEquals("id", getResponse.getId()); + assertTrue(getResponse.isExists()); + assertFalse(getResponse.isSourceEmpty()); + assertEquals(1L, getResponse.getVersion()); + assertEquals(document, getResponse.getSourceAsString()); + } + { + GetRequest getRequest = new GetRequest("index", "type", "does_not_exist"); + GetResponse getResponse = execute(getRequest, highLevelClient()::get, highLevelClient()::getAsync); + assertEquals("index", getResponse.getIndex()); + assertEquals("type", getResponse.getType()); + assertEquals("does_not_exist", getResponse.getId()); + assertFalse(getResponse.isExists()); + assertEquals(-1, getResponse.getVersion()); + assertTrue(getResponse.isSourceEmpty()); + assertNull(getResponse.getSourceAsString()); + } + { + GetRequest getRequest = new GetRequest("index", "type", "id"); + getRequest.fetchSourceContext(new FetchSourceContext(false, Strings.EMPTY_ARRAY, Strings.EMPTY_ARRAY)); + GetResponse getResponse = execute(getRequest, highLevelClient()::get, highLevelClient()::getAsync); + assertEquals("index", getResponse.getIndex()); + assertEquals("type", getResponse.getType()); + assertEquals("id", getResponse.getId()); + assertTrue(getResponse.isExists()); + assertTrue(getResponse.isSourceEmpty()); + assertEquals(1L, getResponse.getVersion()); + assertNull(getResponse.getSourceAsString()); + } + { + GetRequest getRequest = new GetRequest("index", "type", "id"); + if (randomBoolean()) { + getRequest.fetchSourceContext(new FetchSourceContext(true, new String[]{"field1"}, Strings.EMPTY_ARRAY)); + } else { + getRequest.fetchSourceContext(new FetchSourceContext(true, Strings.EMPTY_ARRAY, new String[]{"field2"})); + } + GetResponse getResponse = execute(getRequest, highLevelClient()::get, highLevelClient()::getAsync); + assertEquals("index", getResponse.getIndex()); + assertEquals("type", getResponse.getType()); + assertEquals("id", getResponse.getId()); + assertTrue(getResponse.isExists()); + assertFalse(getResponse.isSourceEmpty()); + assertEquals(1L, getResponse.getVersion()); + Map sourceAsMap = getResponse.getSourceAsMap(); + assertEquals(1, sourceAsMap.size()); + assertEquals("value1", sourceAsMap.get("field1")); + } + } + + public void testIndex() throws IOException { + final XContentType xContentType = randomFrom(XContentType.values()); + { + IndexRequest indexRequest = new IndexRequest("index", "type"); + indexRequest.source(XContentBuilder.builder(xContentType.xContent()).startObject().field("test", "test").endObject()); + + IndexResponse indexResponse = execute(indexRequest, highLevelClient()::index, highLevelClient()::indexAsync); + assertEquals(RestStatus.CREATED, indexResponse.status()); + assertEquals(DocWriteResponse.Result.CREATED, indexResponse.getResult()); + assertEquals("index", indexResponse.getIndex()); + assertEquals("type", indexResponse.getType()); + assertTrue(Strings.hasLength(indexResponse.getId())); + assertEquals(1L, indexResponse.getVersion()); + assertNotNull(indexResponse.getShardId()); + assertEquals(-1, indexResponse.getShardId().getId()); + assertEquals("index", indexResponse.getShardId().getIndexName()); + assertEquals("index", indexResponse.getShardId().getIndex().getName()); + assertEquals("_na_", indexResponse.getShardId().getIndex().getUUID()); + assertNotNull(indexResponse.getShardInfo()); + assertEquals(0, indexResponse.getShardInfo().getFailed()); + assertTrue(indexResponse.getShardInfo().getSuccessful() > 0); + assertTrue(indexResponse.getShardInfo().getTotal() > 0); + } + { + IndexRequest indexRequest = new IndexRequest("index", "type", "id"); + indexRequest.source(XContentBuilder.builder(xContentType.xContent()).startObject().field("version", 1).endObject()); + + IndexResponse indexResponse = execute(indexRequest, highLevelClient()::index, highLevelClient()::indexAsync); + assertEquals(RestStatus.CREATED, indexResponse.status()); + assertEquals("index", indexResponse.getIndex()); + assertEquals("type", indexResponse.getType()); + assertEquals("id", indexResponse.getId()); + assertEquals(1L, indexResponse.getVersion()); + + indexRequest = new IndexRequest("index", "type", "id"); + indexRequest.source(XContentBuilder.builder(xContentType.xContent()).startObject().field("version", 2).endObject()); + + indexResponse = execute(indexRequest, highLevelClient()::index, highLevelClient()::indexAsync); + assertEquals(RestStatus.OK, indexResponse.status()); + assertEquals("index", indexResponse.getIndex()); + assertEquals("type", indexResponse.getType()); + assertEquals("id", indexResponse.getId()); + assertEquals(2L, indexResponse.getVersion()); + + ElasticsearchStatusException exception = expectThrows(ElasticsearchStatusException.class, () -> { + IndexRequest wrongRequest = new IndexRequest("index", "type", "id"); + wrongRequest.source(XContentBuilder.builder(xContentType.xContent()).startObject().field("field", "test").endObject()); + wrongRequest.version(5L); + + execute(wrongRequest, highLevelClient()::index, highLevelClient()::indexAsync); + }); + assertEquals(RestStatus.CONFLICT, exception.status()); + assertEquals("Elasticsearch exception [type=version_conflict_engine_exception, reason=[type][id]: " + + "version conflict, current version [2] is different than the one provided [5]]", exception.getMessage()); + assertEquals("index", exception.getMetadata("es.index").get(0)); + } + { + ElasticsearchStatusException exception = expectThrows(ElasticsearchStatusException.class, () -> { + IndexRequest indexRequest = new IndexRequest("index", "type", "missing_parent"); + indexRequest.source(XContentBuilder.builder(xContentType.xContent()).startObject().field("field", "test").endObject()); + indexRequest.parent("missing"); + + execute(indexRequest, highLevelClient()::index, highLevelClient()::indexAsync); + }); + + assertEquals(RestStatus.BAD_REQUEST, exception.status()); + assertEquals("Elasticsearch exception [type=illegal_argument_exception, " + + "reason=can't specify parent if no parent field has been configured]", exception.getMessage()); + } + { + ElasticsearchStatusException exception = expectThrows(ElasticsearchStatusException.class, () -> { + IndexRequest indexRequest = new IndexRequest("index", "type", "missing_pipeline"); + indexRequest.source(XContentBuilder.builder(xContentType.xContent()).startObject().field("field", "test").endObject()); + indexRequest.setPipeline("missing"); + + execute(indexRequest, highLevelClient()::index, highLevelClient()::indexAsync); + }); + + assertEquals(RestStatus.BAD_REQUEST, exception.status()); + assertEquals("Elasticsearch exception [type=illegal_argument_exception, " + + "reason=pipeline with id [missing] does not exist]", exception.getMessage()); + } + { + IndexRequest indexRequest = new IndexRequest("index", "type", "external_version_type"); + indexRequest.source(XContentBuilder.builder(xContentType.xContent()).startObject().field("field", "test").endObject()); + indexRequest.version(12L); + indexRequest.versionType(VersionType.EXTERNAL); + + IndexResponse indexResponse = execute(indexRequest, highLevelClient()::index, highLevelClient()::indexAsync); + assertEquals(RestStatus.CREATED, indexResponse.status()); + assertEquals("index", indexResponse.getIndex()); + assertEquals("type", indexResponse.getType()); + assertEquals("external_version_type", indexResponse.getId()); + assertEquals(12L, indexResponse.getVersion()); + } + { + final IndexRequest indexRequest = new IndexRequest("index", "type", "with_create_op_type"); + indexRequest.source(XContentBuilder.builder(xContentType.xContent()).startObject().field("field", "test").endObject()); + indexRequest.opType(DocWriteRequest.OpType.CREATE); + + IndexResponse indexResponse = execute(indexRequest, highLevelClient()::index, highLevelClient()::indexAsync); + assertEquals(RestStatus.CREATED, indexResponse.status()); + assertEquals("index", indexResponse.getIndex()); + assertEquals("type", indexResponse.getType()); + assertEquals("with_create_op_type", indexResponse.getId()); + + ElasticsearchStatusException exception = expectThrows(ElasticsearchStatusException.class, () -> { + execute(indexRequest, highLevelClient()::index, highLevelClient()::indexAsync); + }); + + assertEquals(RestStatus.CONFLICT, exception.status()); + assertEquals("Elasticsearch exception [type=version_conflict_engine_exception, reason=[type][with_create_op_type]: " + + "version conflict, document already exists (current version [1])]", exception.getMessage()); + } + } + + public void testUpdate() throws IOException { + { + UpdateRequest updateRequest = new UpdateRequest("index", "type", "does_not_exist"); + updateRequest.doc(singletonMap("field", "value"), randomFrom(XContentType.values())); + + ElasticsearchStatusException exception = expectThrows(ElasticsearchStatusException.class, () -> + execute(updateRequest, highLevelClient()::update, highLevelClient()::updateAsync)); + assertEquals(RestStatus.NOT_FOUND, exception.status()); + assertEquals("Elasticsearch exception [type=document_missing_exception, reason=[type][does_not_exist]: document missing]", + exception.getMessage()); + } + { + IndexRequest indexRequest = new IndexRequest("index", "type", "id"); + indexRequest.source(singletonMap("field", "value")); + IndexResponse indexResponse = highLevelClient().index(indexRequest); + assertEquals(RestStatus.CREATED, indexResponse.status()); + + UpdateRequest updateRequest = new UpdateRequest("index", "type", "id"); + updateRequest.doc(singletonMap("field", "updated"), randomFrom(XContentType.values())); + + UpdateResponse updateResponse = execute(updateRequest, highLevelClient()::update, highLevelClient()::updateAsync); + assertEquals(RestStatus.OK, updateResponse.status()); + assertEquals(indexResponse.getVersion() + 1, updateResponse.getVersion()); + + UpdateRequest updateRequestConflict = new UpdateRequest("index", "type", "id"); + updateRequestConflict.doc(singletonMap("field", "with_version_conflict"), randomFrom(XContentType.values())); + updateRequestConflict.version(indexResponse.getVersion()); + + ElasticsearchStatusException exception = expectThrows(ElasticsearchStatusException.class, () -> + execute(updateRequestConflict, highLevelClient()::update, highLevelClient()::updateAsync)); + assertEquals(RestStatus.CONFLICT, exception.status()); + assertEquals("Elasticsearch exception [type=version_conflict_engine_exception, reason=[type][id]: version conflict, " + + "current version [2] is different than the one provided [1]]", exception.getMessage()); + } + { + ElasticsearchStatusException exception = expectThrows(ElasticsearchStatusException.class, () -> { + UpdateRequest updateRequest = new UpdateRequest("index", "type", "id"); + updateRequest.doc(singletonMap("field", "updated"), randomFrom(XContentType.values())); + if (randomBoolean()) { + updateRequest.parent("missing"); + } else { + updateRequest.routing("missing"); + } + execute(updateRequest, highLevelClient()::update, highLevelClient()::updateAsync); + }); + + assertEquals(RestStatus.NOT_FOUND, exception.status()); + assertEquals("Elasticsearch exception [type=document_missing_exception, reason=[type][id]: document missing]", + exception.getMessage()); + } + { + IndexRequest indexRequest = new IndexRequest("index", "type", "with_script"); + indexRequest.source(singletonMap("counter", 12)); + IndexResponse indexResponse = highLevelClient().index(indexRequest); + assertEquals(RestStatus.CREATED, indexResponse.status()); + + UpdateRequest updateRequest = new UpdateRequest("index", "type", "with_script"); + Script script = new Script(ScriptType.INLINE, "painless", "ctx._source.counter += params.count", singletonMap("count", 8)); + updateRequest.script(script); + updateRequest.fetchSource(true); + + UpdateResponse updateResponse = execute(updateRequest, highLevelClient()::update, highLevelClient()::updateAsync); + assertEquals(RestStatus.OK, updateResponse.status()); + assertEquals(DocWriteResponse.Result.UPDATED, updateResponse.getResult()); + assertEquals(2L, updateResponse.getVersion()); + assertEquals(20, updateResponse.getGetResult().sourceAsMap().get("counter")); + + } + { + IndexRequest indexRequest = new IndexRequest("index", "type", "with_doc"); + indexRequest.source("field_1", "one", "field_3", "three"); + indexRequest.version(12L); + indexRequest.versionType(VersionType.EXTERNAL); + IndexResponse indexResponse = highLevelClient().index(indexRequest); + assertEquals(RestStatus.CREATED, indexResponse.status()); + assertEquals(12L, indexResponse.getVersion()); + + UpdateRequest updateRequest = new UpdateRequest("index", "type", "with_doc"); + updateRequest.doc(singletonMap("field_2", "two"), randomFrom(XContentType.values())); + updateRequest.fetchSource("field_*", "field_3"); + + UpdateResponse updateResponse = execute(updateRequest, highLevelClient()::update, highLevelClient()::updateAsync); + assertEquals(RestStatus.OK, updateResponse.status()); + assertEquals(DocWriteResponse.Result.UPDATED, updateResponse.getResult()); + assertEquals(13L, updateResponse.getVersion()); + GetResult getResult = updateResponse.getGetResult(); + assertEquals(13L, updateResponse.getVersion()); + Map sourceAsMap = getResult.sourceAsMap(); + assertEquals("one", sourceAsMap.get("field_1")); + assertEquals("two", sourceAsMap.get("field_2")); + assertFalse(sourceAsMap.containsKey("field_3")); + } + { + IndexRequest indexRequest = new IndexRequest("index", "type", "noop"); + indexRequest.source("field", "value"); + IndexResponse indexResponse = highLevelClient().index(indexRequest); + assertEquals(RestStatus.CREATED, indexResponse.status()); + assertEquals(1L, indexResponse.getVersion()); + + UpdateRequest updateRequest = new UpdateRequest("index", "type", "noop"); + updateRequest.doc(singletonMap("field", "value"), randomFrom(XContentType.values())); + + UpdateResponse updateResponse = execute(updateRequest, highLevelClient()::update, highLevelClient()::updateAsync); + assertEquals(RestStatus.OK, updateResponse.status()); + assertEquals(DocWriteResponse.Result.NOOP, updateResponse.getResult()); + assertEquals(1L, updateResponse.getVersion()); + + updateRequest.detectNoop(false); + + updateResponse = execute(updateRequest, highLevelClient()::update, highLevelClient()::updateAsync); + assertEquals(RestStatus.OK, updateResponse.status()); + assertEquals(DocWriteResponse.Result.UPDATED, updateResponse.getResult()); + assertEquals(2L, updateResponse.getVersion()); + } + { + UpdateRequest updateRequest = new UpdateRequest("index", "type", "with_upsert"); + updateRequest.upsert(singletonMap("doc_status", "created")); + updateRequest.doc(singletonMap("doc_status", "updated")); + updateRequest.fetchSource(true); + + UpdateResponse updateResponse = execute(updateRequest, highLevelClient()::update, highLevelClient()::updateAsync); + assertEquals(RestStatus.CREATED, updateResponse.status()); + assertEquals("index", updateResponse.getIndex()); + assertEquals("type", updateResponse.getType()); + assertEquals("with_upsert", updateResponse.getId()); + GetResult getResult = updateResponse.getGetResult(); + assertEquals(1L, updateResponse.getVersion()); + assertEquals("created", getResult.sourceAsMap().get("doc_status")); + } + { + UpdateRequest updateRequest = new UpdateRequest("index", "type", "with_doc_as_upsert"); + updateRequest.doc(singletonMap("field", "initialized")); + updateRequest.fetchSource(true); + updateRequest.docAsUpsert(true); + + UpdateResponse updateResponse = execute(updateRequest, highLevelClient()::update, highLevelClient()::updateAsync); + assertEquals(RestStatus.CREATED, updateResponse.status()); + assertEquals("index", updateResponse.getIndex()); + assertEquals("type", updateResponse.getType()); + assertEquals("with_doc_as_upsert", updateResponse.getId()); + GetResult getResult = updateResponse.getGetResult(); + assertEquals(1L, updateResponse.getVersion()); + assertEquals("initialized", getResult.sourceAsMap().get("field")); + } + { + UpdateRequest updateRequest = new UpdateRequest("index", "type", "with_scripted_upsert"); + updateRequest.fetchSource(true); + updateRequest.script(new Script(ScriptType.INLINE, "painless", "ctx._source.level = params.test", singletonMap("test", "C"))); + updateRequest.scriptedUpsert(true); + updateRequest.upsert(singletonMap("level", "A")); + + UpdateResponse updateResponse = execute(updateRequest, highLevelClient()::update, highLevelClient()::updateAsync); + assertEquals(RestStatus.CREATED, updateResponse.status()); + assertEquals("index", updateResponse.getIndex()); + assertEquals("type", updateResponse.getType()); + assertEquals("with_scripted_upsert", updateResponse.getId()); + + GetResult getResult = updateResponse.getGetResult(); + assertEquals(1L, updateResponse.getVersion()); + assertEquals("C", getResult.sourceAsMap().get("level")); + } + { + IllegalStateException exception = expectThrows(IllegalStateException.class, () -> { + UpdateRequest updateRequest = new UpdateRequest("index", "type", "id"); + updateRequest.doc(new IndexRequest().source(Collections.singletonMap("field", "doc"), XContentType.JSON)); + updateRequest.upsert(new IndexRequest().source(Collections.singletonMap("field", "upsert"), XContentType.YAML)); + execute(updateRequest, highLevelClient()::update, highLevelClient()::updateAsync); + }); + assertEquals("Update request cannot have different content types for doc [JSON] and upsert [YAML] documents", + exception.getMessage()); + } + } + + public void testBulk() throws IOException { + int nbItems = randomIntBetween(10, 100); + boolean[] errors = new boolean[nbItems]; + + XContentType xContentType = randomFrom(XContentType.JSON, XContentType.SMILE); + + BulkRequest bulkRequest = new BulkRequest(); + for (int i = 0; i < nbItems; i++) { + String id = String.valueOf(i); + boolean erroneous = randomBoolean(); + errors[i] = erroneous; + + DocWriteRequest.OpType opType = randomFrom(DocWriteRequest.OpType.values()); + if (opType == DocWriteRequest.OpType.DELETE) { + if (erroneous == false) { + assertEquals(RestStatus.CREATED, + highLevelClient().index(new IndexRequest("index", "test", id).source("field", -1)).status()); + } + DeleteRequest deleteRequest = new DeleteRequest("index", "test", id); + bulkRequest.add(deleteRequest); + + } else { + BytesReference source = XContentBuilder.builder(xContentType.xContent()).startObject().field("id", i).endObject().bytes(); + if (opType == DocWriteRequest.OpType.INDEX) { + IndexRequest indexRequest = new IndexRequest("index", "test", id).source(source, xContentType); + if (erroneous) { + indexRequest.version(12L); + } + bulkRequest.add(indexRequest); + + } else if (opType == DocWriteRequest.OpType.CREATE) { + IndexRequest createRequest = new IndexRequest("index", "test", id).source(source, xContentType).create(true); + if (erroneous) { + assertEquals(RestStatus.CREATED, highLevelClient().index(createRequest).status()); + } + bulkRequest.add(createRequest); + + } else if (opType == DocWriteRequest.OpType.UPDATE) { + UpdateRequest updateRequest = new UpdateRequest("index", "test", id) + .doc(new IndexRequest().source(source, xContentType)); + if (erroneous == false) { + assertEquals(RestStatus.CREATED, + highLevelClient().index(new IndexRequest("index", "test", id).source("field", -1)).status()); + } + bulkRequest.add(updateRequest); + } + } + } + + BulkResponse bulkResponse = execute(bulkRequest, highLevelClient()::bulk, highLevelClient()::bulkAsync); + assertEquals(RestStatus.OK, bulkResponse.status()); + assertTrue(bulkResponse.getTook().getMillis() > 0); + assertEquals(nbItems, bulkResponse.getItems().length); + + validateBulkResponses(nbItems, errors, bulkResponse, bulkRequest); + } + + public void testBulkProcessorIntegration() throws IOException, InterruptedException { + int nbItems = randomIntBetween(10, 100); + boolean[] errors = new boolean[nbItems]; + + XContentType xContentType = randomFrom(XContentType.JSON, XContentType.SMILE); + + AtomicReference responseRef = new AtomicReference<>(); + AtomicReference requestRef = new AtomicReference<>(); + AtomicReference error = new AtomicReference<>(); + + BulkProcessor.Listener listener = new BulkProcessor.Listener() { + @Override + public void beforeBulk(long executionId, BulkRequest request) { + + } + + @Override + public void afterBulk(long executionId, BulkRequest request, BulkResponse response) { + responseRef.set(response); + requestRef.set(request); + } + + @Override + public void afterBulk(long executionId, BulkRequest request, Throwable failure) { + error.set(failure); + } + }; + + ThreadPool threadPool = new ThreadPool(Settings.builder().put("node.name", getClass().getName()).build()); + try(BulkProcessor processor = new BulkProcessor.Builder(highLevelClient()::bulkAsync, listener, threadPool) + .setConcurrentRequests(0) + .setBulkSize(new ByteSizeValue(5, ByteSizeUnit.GB)) + .setBulkActions(nbItems + 1) + .build()) { + for (int i = 0; i < nbItems; i++) { + String id = String.valueOf(i); + boolean erroneous = randomBoolean(); + errors[i] = erroneous; + + DocWriteRequest.OpType opType = randomFrom(DocWriteRequest.OpType.values()); + if (opType == DocWriteRequest.OpType.DELETE) { + if (erroneous == false) { + assertEquals(RestStatus.CREATED, + highLevelClient().index(new IndexRequest("index", "test", id).source("field", -1)).status()); + } + DeleteRequest deleteRequest = new DeleteRequest("index", "test", id); + processor.add(deleteRequest); + + } else { + if (opType == DocWriteRequest.OpType.INDEX) { + IndexRequest indexRequest = new IndexRequest("index", "test", id).source(xContentType, "id", i); + if (erroneous) { + indexRequest.version(12L); + } + processor.add(indexRequest); + + } else if (opType == DocWriteRequest.OpType.CREATE) { + IndexRequest createRequest = new IndexRequest("index", "test", id).source(xContentType, "id", i).create(true); + if (erroneous) { + assertEquals(RestStatus.CREATED, highLevelClient().index(createRequest).status()); + } + processor.add(createRequest); + + } else if (opType == DocWriteRequest.OpType.UPDATE) { + UpdateRequest updateRequest = new UpdateRequest("index", "test", id) + .doc(new IndexRequest().source(xContentType, "id", i)); + if (erroneous == false) { + assertEquals(RestStatus.CREATED, + highLevelClient().index(new IndexRequest("index", "test", id).source("field", -1)).status()); + } + processor.add(updateRequest); + } + } + } + assertNull(responseRef.get()); + assertNull(requestRef.get()); + } + + + BulkResponse bulkResponse = responseRef.get(); + BulkRequest bulkRequest = requestRef.get(); + + assertEquals(RestStatus.OK, bulkResponse.status()); + assertTrue(bulkResponse.getTookInMillis() > 0); + assertEquals(nbItems, bulkResponse.getItems().length); + assertNull(error.get()); + + validateBulkResponses(nbItems, errors, bulkResponse, bulkRequest); + + terminate(threadPool); + } + + private void validateBulkResponses(int nbItems, boolean[] errors, BulkResponse bulkResponse, BulkRequest bulkRequest) { + for (int i = 0; i < nbItems; i++) { + BulkItemResponse bulkItemResponse = bulkResponse.getItems()[i]; + + assertEquals(i, bulkItemResponse.getItemId()); + assertEquals("index", bulkItemResponse.getIndex()); + assertEquals("test", bulkItemResponse.getType()); + assertEquals(String.valueOf(i), bulkItemResponse.getId()); + + DocWriteRequest.OpType requestOpType = bulkRequest.requests().get(i).opType(); + if (requestOpType == DocWriteRequest.OpType.INDEX || requestOpType == DocWriteRequest.OpType.CREATE) { + assertEquals(errors[i], bulkItemResponse.isFailed()); + assertEquals(errors[i] ? RestStatus.CONFLICT : RestStatus.CREATED, bulkItemResponse.status()); + } else if (requestOpType == DocWriteRequest.OpType.UPDATE) { + assertEquals(errors[i], bulkItemResponse.isFailed()); + assertEquals(errors[i] ? RestStatus.NOT_FOUND : RestStatus.OK, bulkItemResponse.status()); + } else if (requestOpType == DocWriteRequest.OpType.DELETE) { + assertFalse(bulkItemResponse.isFailed()); + assertEquals(errors[i] ? RestStatus.NOT_FOUND : RestStatus.OK, bulkItemResponse.status()); + } + } + } +} diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/ESRestHighLevelClientTestCase.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/ESRestHighLevelClientTestCase.java new file mode 100644 index 0000000000000..cdd8317830909 --- /dev/null +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/ESRestHighLevelClientTestCase.java @@ -0,0 +1,75 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.client; + +import org.apache.http.Header; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.support.PlainActionFuture; +import org.elasticsearch.test.rest.ESRestTestCase; +import org.junit.AfterClass; +import org.junit.Before; + +import java.io.IOException; + +public abstract class ESRestHighLevelClientTestCase extends ESRestTestCase { + + private static RestHighLevelClient restHighLevelClient; + + @Before + public void initHighLevelClient() throws IOException { + super.initClient(); + if (restHighLevelClient == null) { + restHighLevelClient = new RestHighLevelClient(client()); + } + } + + @AfterClass + public static void cleanupClient() { + restHighLevelClient = null; + } + + protected static RestHighLevelClient highLevelClient() { + return restHighLevelClient; + } + + /** + * Executes the provided request using either the sync method or its async variant, both provided as functions + */ + protected static Resp execute(Req request, SyncMethod syncMethod, + AsyncMethod asyncMethod, Header... headers) throws IOException { + if (randomBoolean()) { + return syncMethod.execute(request, headers); + } else { + PlainActionFuture future = PlainActionFuture.newFuture(); + asyncMethod.execute(request, future, headers); + return future.actionGet(); + } + } + + @FunctionalInterface + protected interface SyncMethod { + Response execute(Request request, Header... headers) throws IOException; + } + + @FunctionalInterface + protected interface AsyncMethod { + void execute(Request request, ActionListener listener, Header... headers); + } +} diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/PingAndInfoIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/PingAndInfoIT.java new file mode 100644 index 0000000000000..b22ded52655df --- /dev/null +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/PingAndInfoIT.java @@ -0,0 +1,51 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.client; + +import org.elasticsearch.action.main.MainResponse; + +import java.io.IOException; +import java.util.Map; + +public class PingAndInfoIT extends ESRestHighLevelClientTestCase { + + public void testPing() throws IOException { + assertTrue(highLevelClient().ping()); + } + + @SuppressWarnings("unchecked") + public void testInfo() throws IOException { + MainResponse info = highLevelClient().info(); + // compare with what the low level client outputs + Map infoAsMap = entityAsMap(adminClient().performRequest("GET", "/")); + assertEquals(infoAsMap.get("cluster_name"), info.getClusterName().value()); + assertEquals(infoAsMap.get("cluster_uuid"), info.getClusterUuid()); + + // only check node name existence, might be a different one from what was hit by low level client in multi-node cluster + assertNotNull(info.getNodeName()); + Map versionMap = (Map) infoAsMap.get("version"); + assertEquals(versionMap.get("build_hash"), info.getBuild().shortHash()); + assertEquals(versionMap.get("build_date"), info.getBuild().date()); + assertEquals(versionMap.get("build_snapshot"), info.getBuild().isSnapshot()); + assertEquals(versionMap.get("number"), info.getVersion().toString()); + assertEquals(versionMap.get("lucene_version"), info.getVersion().luceneVersion.toString()); + } + +} diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/RequestTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/RequestTests.java new file mode 100644 index 0000000000000..f18e348adce5e --- /dev/null +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/RequestTests.java @@ -0,0 +1,906 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.client; + +import org.apache.http.HttpEntity; +import org.apache.http.entity.ByteArrayEntity; +import org.apache.http.util.EntityUtils; +import org.elasticsearch.action.DocWriteRequest; +import org.elasticsearch.action.bulk.BulkRequest; +import org.elasticsearch.action.bulk.BulkShardRequest; +import org.elasticsearch.action.delete.DeleteRequest; +import org.elasticsearch.action.get.GetRequest; +import org.elasticsearch.action.index.IndexRequest; +import org.elasticsearch.action.search.ClearScrollRequest; +import org.elasticsearch.action.search.SearchRequest; +import org.elasticsearch.action.search.SearchScrollRequest; +import org.elasticsearch.action.search.SearchType; +import org.elasticsearch.action.support.IndicesOptions; +import org.elasticsearch.action.support.WriteRequest; +import org.elasticsearch.action.support.replication.ReplicatedWriteRequest; +import org.elasticsearch.action.support.replication.ReplicationRequest; +import org.elasticsearch.action.update.UpdateRequest; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.bytes.BytesArray; +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.io.Streams; +import org.elasticsearch.common.lucene.uid.Versions; +import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentHelper; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.common.xcontent.XContentType; +import org.elasticsearch.index.VersionType; +import org.elasticsearch.index.query.TermQueryBuilder; +import org.elasticsearch.rest.action.search.RestSearchAction; +import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregationBuilder; +import org.elasticsearch.search.aggregations.support.ValueType; +import org.elasticsearch.search.builder.SearchSourceBuilder; +import org.elasticsearch.search.collapse.CollapseBuilder; +import org.elasticsearch.search.fetch.subphase.FetchSourceContext; +import org.elasticsearch.search.fetch.subphase.highlight.HighlightBuilder; +import org.elasticsearch.search.rescore.QueryRescorerBuilder; +import org.elasticsearch.search.suggest.SuggestBuilder; +import org.elasticsearch.search.suggest.completion.CompletionSuggestionBuilder; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.test.RandomObjects; + +import java.io.IOException; +import java.io.InputStream; +import java.util.HashMap; +import java.util.Locale; +import java.util.Map; +import java.util.StringJoiner; +import java.util.function.Consumer; +import java.util.function.Function; + +import static java.util.Collections.singletonMap; +import static org.elasticsearch.client.Request.enforceSameContentType; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertToXContentEquivalent; + +public class RequestTests extends ESTestCase { + + public void testPing() { + Request request = Request.ping(); + assertEquals("/", request.endpoint); + assertEquals(0, request.params.size()); + assertNull(request.entity); + assertEquals("HEAD", request.method); + } + + public void testInfo() { + Request request = Request.info(); + assertEquals("/", request.endpoint); + assertEquals(0, request.params.size()); + assertNull(request.entity); + assertEquals("GET", request.method); + } + + public void testGet() { + getAndExistsTest(Request::get, "GET"); + } + + public void testDelete() throws IOException { + String index = randomAlphaOfLengthBetween(3, 10); + String type = randomAlphaOfLengthBetween(3, 10); + String id = randomAlphaOfLengthBetween(3, 10); + DeleteRequest deleteRequest = new DeleteRequest(index, type, id); + + Map expectedParams = new HashMap<>(); + + setRandomTimeout(deleteRequest, expectedParams); + setRandomRefreshPolicy(deleteRequest, expectedParams); + setRandomVersion(deleteRequest, expectedParams); + setRandomVersionType(deleteRequest, expectedParams); + + if (frequently()) { + if (randomBoolean()) { + String routing = randomAlphaOfLengthBetween(3, 10); + deleteRequest.routing(routing); + expectedParams.put("routing", routing); + } + if (randomBoolean()) { + String parent = randomAlphaOfLengthBetween(3, 10); + deleteRequest.parent(parent); + expectedParams.put("parent", parent); + } + } + + Request request = Request.delete(deleteRequest); + assertEquals("/" + index + "/" + type + "/" + id, request.endpoint); + assertEquals(expectedParams, request.params); + assertEquals("DELETE", request.method); + assertNull(request.entity); + } + + public void testExists() { + getAndExistsTest(Request::exists, "HEAD"); + } + + private static void getAndExistsTest(Function requestConverter, String method) { + String index = randomAlphaOfLengthBetween(3, 10); + String type = randomAlphaOfLengthBetween(3, 10); + String id = randomAlphaOfLengthBetween(3, 10); + GetRequest getRequest = new GetRequest(index, type, id); + + Map expectedParams = new HashMap<>(); + if (randomBoolean()) { + if (randomBoolean()) { + String preference = randomAlphaOfLengthBetween(3, 10); + getRequest.preference(preference); + expectedParams.put("preference", preference); + } + if (randomBoolean()) { + String routing = randomAlphaOfLengthBetween(3, 10); + getRequest.routing(routing); + expectedParams.put("routing", routing); + } + if (randomBoolean()) { + boolean realtime = randomBoolean(); + getRequest.realtime(realtime); + if (realtime == false) { + expectedParams.put("realtime", "false"); + } + } + if (randomBoolean()) { + boolean refresh = randomBoolean(); + getRequest.refresh(refresh); + if (refresh) { + expectedParams.put("refresh", "true"); + } + } + if (randomBoolean()) { + long version = randomLong(); + getRequest.version(version); + if (version != Versions.MATCH_ANY) { + expectedParams.put("version", Long.toString(version)); + } + } + if (randomBoolean()) { + VersionType versionType = randomFrom(VersionType.values()); + getRequest.versionType(versionType); + if (versionType != VersionType.INTERNAL) { + expectedParams.put("version_type", versionType.name().toLowerCase(Locale.ROOT)); + } + } + if (randomBoolean()) { + int numStoredFields = randomIntBetween(1, 10); + String[] storedFields = new String[numStoredFields]; + StringBuilder storedFieldsParam = new StringBuilder(); + for (int i = 0; i < numStoredFields; i++) { + String storedField = randomAlphaOfLengthBetween(3, 10); + storedFields[i] = storedField; + storedFieldsParam.append(storedField); + if (i < numStoredFields - 1) { + storedFieldsParam.append(","); + } + } + getRequest.storedFields(storedFields); + expectedParams.put("stored_fields", storedFieldsParam.toString()); + } + if (randomBoolean()) { + randomizeFetchSourceContextParams(getRequest::fetchSourceContext, expectedParams); + } + } + Request request = requestConverter.apply(getRequest); + assertEquals("/" + index + "/" + type + "/" + id, request.endpoint); + assertEquals(expectedParams, request.params); + assertNull(request.entity); + assertEquals(method, request.method); + } + + public void testIndex() throws IOException { + String index = randomAlphaOfLengthBetween(3, 10); + String type = randomAlphaOfLengthBetween(3, 10); + IndexRequest indexRequest = new IndexRequest(index, type); + + String id = randomBoolean() ? randomAlphaOfLengthBetween(3, 10) : null; + indexRequest.id(id); + + Map expectedParams = new HashMap<>(); + + String method = "POST"; + if (id != null) { + method = "PUT"; + if (randomBoolean()) { + indexRequest.opType(DocWriteRequest.OpType.CREATE); + } + } + + setRandomTimeout(indexRequest, expectedParams); + setRandomRefreshPolicy(indexRequest, expectedParams); + + // There is some logic around _create endpoint and version/version type + if (indexRequest.opType() == DocWriteRequest.OpType.CREATE) { + indexRequest.version(randomFrom(Versions.MATCH_ANY, Versions.MATCH_DELETED)); + expectedParams.put("version", Long.toString(Versions.MATCH_DELETED)); + } else { + setRandomVersion(indexRequest, expectedParams); + setRandomVersionType(indexRequest, expectedParams); + } + + if (frequently()) { + if (randomBoolean()) { + String routing = randomAlphaOfLengthBetween(3, 10); + indexRequest.routing(routing); + expectedParams.put("routing", routing); + } + if (randomBoolean()) { + String parent = randomAlphaOfLengthBetween(3, 10); + indexRequest.parent(parent); + expectedParams.put("parent", parent); + } + if (randomBoolean()) { + String pipeline = randomAlphaOfLengthBetween(3, 10); + indexRequest.setPipeline(pipeline); + expectedParams.put("pipeline", pipeline); + } + } + + XContentType xContentType = randomFrom(XContentType.values()); + int nbFields = randomIntBetween(0, 10); + try (XContentBuilder builder = XContentBuilder.builder(xContentType.xContent())) { + builder.startObject(); + for (int i = 0; i < nbFields; i++) { + builder.field("field_" + i, i); + } + builder.endObject(); + indexRequest.source(builder); + } + + Request request = Request.index(indexRequest); + if (indexRequest.opType() == DocWriteRequest.OpType.CREATE) { + assertEquals("/" + index + "/" + type + "/" + id + "/_create", request.endpoint); + } else if (id != null) { + assertEquals("/" + index + "/" + type + "/" + id, request.endpoint); + } else { + assertEquals("/" + index + "/" + type, request.endpoint); + } + assertEquals(expectedParams, request.params); + assertEquals(method, request.method); + + HttpEntity entity = request.entity; + assertTrue(entity instanceof ByteArrayEntity); + assertEquals(indexRequest.getContentType().mediaType(), entity.getContentType().getValue()); + try (XContentParser parser = createParser(xContentType.xContent(), entity.getContent())) { + assertEquals(nbFields, parser.map().size()); + } + } + + public void testUpdate() throws IOException { + XContentType xContentType = randomFrom(XContentType.values()); + + Map expectedParams = new HashMap<>(); + String index = randomAlphaOfLengthBetween(3, 10); + String type = randomAlphaOfLengthBetween(3, 10); + String id = randomAlphaOfLengthBetween(3, 10); + + UpdateRequest updateRequest = new UpdateRequest(index, type, id); + updateRequest.detectNoop(randomBoolean()); + + if (randomBoolean()) { + BytesReference source = RandomObjects.randomSource(random(), xContentType); + updateRequest.doc(new IndexRequest().source(source, xContentType)); + + boolean docAsUpsert = randomBoolean(); + updateRequest.docAsUpsert(docAsUpsert); + if (docAsUpsert) { + expectedParams.put("doc_as_upsert", "true"); + } + } else { + updateRequest.script(mockScript("_value + 1")); + updateRequest.scriptedUpsert(randomBoolean()); + } + if (randomBoolean()) { + BytesReference source = RandomObjects.randomSource(random(), xContentType); + updateRequest.upsert(new IndexRequest().source(source, xContentType)); + } + if (randomBoolean()) { + String routing = randomAlphaOfLengthBetween(3, 10); + updateRequest.routing(routing); + expectedParams.put("routing", routing); + } + if (randomBoolean()) { + String parent = randomAlphaOfLengthBetween(3, 10); + updateRequest.parent(parent); + expectedParams.put("parent", parent); + } + if (randomBoolean()) { + String timeout = randomTimeValue(); + updateRequest.timeout(timeout); + expectedParams.put("timeout", timeout); + } else { + expectedParams.put("timeout", ReplicationRequest.DEFAULT_TIMEOUT.getStringRep()); + } + if (randomBoolean()) { + WriteRequest.RefreshPolicy refreshPolicy = randomFrom(WriteRequest.RefreshPolicy.values()); + updateRequest.setRefreshPolicy(refreshPolicy); + if (refreshPolicy != WriteRequest.RefreshPolicy.NONE) { + expectedParams.put("refresh", refreshPolicy.getValue()); + } + } + if (randomBoolean()) { + int waitForActiveShards = randomIntBetween(0, 10); + updateRequest.waitForActiveShards(waitForActiveShards); + expectedParams.put("wait_for_active_shards", String.valueOf(waitForActiveShards)); + } + if (randomBoolean()) { + long version = randomLong(); + updateRequest.version(version); + if (version != Versions.MATCH_ANY) { + expectedParams.put("version", Long.toString(version)); + } + } + if (randomBoolean()) { + VersionType versionType = randomFrom(VersionType.values()); + updateRequest.versionType(versionType); + if (versionType != VersionType.INTERNAL) { + expectedParams.put("version_type", versionType.name().toLowerCase(Locale.ROOT)); + } + } + if (randomBoolean()) { + int retryOnConflict = randomIntBetween(0, 5); + updateRequest.retryOnConflict(retryOnConflict); + if (retryOnConflict > 0) { + expectedParams.put("retry_on_conflict", String.valueOf(retryOnConflict)); + } + } + if (randomBoolean()) { + randomizeFetchSourceContextParams(updateRequest::fetchSource, expectedParams); + } + + Request request = Request.update(updateRequest); + assertEquals("/" + index + "/" + type + "/" + id + "/_update", request.endpoint); + assertEquals(expectedParams, request.params); + assertEquals("POST", request.method); + + HttpEntity entity = request.entity; + assertTrue(entity instanceof ByteArrayEntity); + + UpdateRequest parsedUpdateRequest = new UpdateRequest(); + + XContentType entityContentType = XContentType.fromMediaTypeOrFormat(entity.getContentType().getValue()); + try (XContentParser parser = createParser(entityContentType.xContent(), entity.getContent())) { + parsedUpdateRequest.fromXContent(parser); + } + + assertEquals(updateRequest.scriptedUpsert(), parsedUpdateRequest.scriptedUpsert()); + assertEquals(updateRequest.docAsUpsert(), parsedUpdateRequest.docAsUpsert()); + assertEquals(updateRequest.detectNoop(), parsedUpdateRequest.detectNoop()); + assertEquals(updateRequest.fetchSource(), parsedUpdateRequest.fetchSource()); + assertEquals(updateRequest.script(), parsedUpdateRequest.script()); + if (updateRequest.doc() != null) { + assertToXContentEquivalent(updateRequest.doc().source(), parsedUpdateRequest.doc().source(), xContentType); + } else { + assertNull(parsedUpdateRequest.doc()); + } + if (updateRequest.upsertRequest() != null) { + assertToXContentEquivalent(updateRequest.upsertRequest().source(), parsedUpdateRequest.upsertRequest().source(), xContentType); + } else { + assertNull(parsedUpdateRequest.upsertRequest()); + } + } + + public void testUpdateWithDifferentContentTypes() throws IOException { + IllegalStateException exception = expectThrows(IllegalStateException.class, () -> { + UpdateRequest updateRequest = new UpdateRequest(); + updateRequest.doc(new IndexRequest().source(singletonMap("field", "doc"), XContentType.JSON)); + updateRequest.upsert(new IndexRequest().source(singletonMap("field", "upsert"), XContentType.YAML)); + Request.update(updateRequest); + }); + assertEquals("Update request cannot have different content types for doc [JSON] and upsert [YAML] documents", + exception.getMessage()); + } + + public void testBulk() throws IOException { + Map expectedParams = new HashMap<>(); + + BulkRequest bulkRequest = new BulkRequest(); + if (randomBoolean()) { + String timeout = randomTimeValue(); + bulkRequest.timeout(timeout); + expectedParams.put("timeout", timeout); + } else { + expectedParams.put("timeout", BulkShardRequest.DEFAULT_TIMEOUT.getStringRep()); + } + + if (randomBoolean()) { + WriteRequest.RefreshPolicy refreshPolicy = randomFrom(WriteRequest.RefreshPolicy.values()); + bulkRequest.setRefreshPolicy(refreshPolicy); + if (refreshPolicy != WriteRequest.RefreshPolicy.NONE) { + expectedParams.put("refresh", refreshPolicy.getValue()); + } + } + + XContentType xContentType = randomFrom(XContentType.JSON, XContentType.SMILE); + + int nbItems = randomIntBetween(10, 100); + for (int i = 0; i < nbItems; i++) { + String index = randomAlphaOfLength(5); + String type = randomAlphaOfLength(5); + String id = randomAlphaOfLength(5); + + BytesReference source = RandomObjects.randomSource(random(), xContentType); + DocWriteRequest.OpType opType = randomFrom(DocWriteRequest.OpType.values()); + + DocWriteRequest docWriteRequest = null; + if (opType == DocWriteRequest.OpType.INDEX) { + IndexRequest indexRequest = new IndexRequest(index, type, id).source(source, xContentType); + docWriteRequest = indexRequest; + if (randomBoolean()) { + indexRequest.setPipeline(randomAlphaOfLength(5)); + } + if (randomBoolean()) { + indexRequest.parent(randomAlphaOfLength(5)); + } + } else if (opType == DocWriteRequest.OpType.CREATE) { + IndexRequest createRequest = new IndexRequest(index, type, id).source(source, xContentType).create(true); + docWriteRequest = createRequest; + if (randomBoolean()) { + createRequest.parent(randomAlphaOfLength(5)); + } + } else if (opType == DocWriteRequest.OpType.UPDATE) { + final UpdateRequest updateRequest = new UpdateRequest(index, type, id).doc(new IndexRequest().source(source, xContentType)); + docWriteRequest = updateRequest; + if (randomBoolean()) { + updateRequest.retryOnConflict(randomIntBetween(1, 5)); + } + if (randomBoolean()) { + randomizeFetchSourceContextParams(updateRequest::fetchSource, new HashMap<>()); + } + if (randomBoolean()) { + updateRequest.parent(randomAlphaOfLength(5)); + } + } else if (opType == DocWriteRequest.OpType.DELETE) { + docWriteRequest = new DeleteRequest(index, type, id); + } + + if (randomBoolean()) { + docWriteRequest.routing(randomAlphaOfLength(10)); + } + if (randomBoolean()) { + docWriteRequest.version(randomNonNegativeLong()); + } + if (randomBoolean()) { + docWriteRequest.versionType(randomFrom(VersionType.values())); + } + bulkRequest.add(docWriteRequest); + } + + Request request = Request.bulk(bulkRequest); + assertEquals("/_bulk", request.endpoint); + assertEquals(expectedParams, request.params); + assertEquals("POST", request.method); + assertEquals(xContentType.mediaType(), request.entity.getContentType().getValue()); + byte[] content = new byte[(int) request.entity.getContentLength()]; + try (InputStream inputStream = request.entity.getContent()) { + Streams.readFully(inputStream, content); + } + + BulkRequest parsedBulkRequest = new BulkRequest(); + parsedBulkRequest.add(content, 0, content.length, xContentType); + assertEquals(bulkRequest.numberOfActions(), parsedBulkRequest.numberOfActions()); + + for (int i = 0; i < bulkRequest.numberOfActions(); i++) { + DocWriteRequest originalRequest = bulkRequest.requests().get(i); + DocWriteRequest parsedRequest = parsedBulkRequest.requests().get(i); + + assertEquals(originalRequest.opType(), parsedRequest.opType()); + assertEquals(originalRequest.index(), parsedRequest.index()); + assertEquals(originalRequest.type(), parsedRequest.type()); + assertEquals(originalRequest.id(), parsedRequest.id()); + assertEquals(originalRequest.routing(), parsedRequest.routing()); + assertEquals(originalRequest.parent(), parsedRequest.parent()); + assertEquals(originalRequest.version(), parsedRequest.version()); + assertEquals(originalRequest.versionType(), parsedRequest.versionType()); + + DocWriteRequest.OpType opType = originalRequest.opType(); + if (opType == DocWriteRequest.OpType.INDEX) { + IndexRequest indexRequest = (IndexRequest) originalRequest; + IndexRequest parsedIndexRequest = (IndexRequest) parsedRequest; + + assertEquals(indexRequest.getPipeline(), parsedIndexRequest.getPipeline()); + assertToXContentEquivalent(indexRequest.source(), parsedIndexRequest.source(), xContentType); + } else if (opType == DocWriteRequest.OpType.UPDATE) { + UpdateRequest updateRequest = (UpdateRequest) originalRequest; + UpdateRequest parsedUpdateRequest = (UpdateRequest) parsedRequest; + + assertEquals(updateRequest.retryOnConflict(), parsedUpdateRequest.retryOnConflict()); + assertEquals(updateRequest.fetchSource(), parsedUpdateRequest.fetchSource()); + if (updateRequest.doc() != null) { + assertToXContentEquivalent(updateRequest.doc().source(), parsedUpdateRequest.doc().source(), xContentType); + } else { + assertNull(parsedUpdateRequest.doc()); + } + } + } + } + + public void testBulkWithDifferentContentTypes() throws IOException { + { + BulkRequest bulkRequest = new BulkRequest(); + bulkRequest.add(new DeleteRequest("index", "type", "0")); + bulkRequest.add(new UpdateRequest("index", "type", "1").script(mockScript("test"))); + bulkRequest.add(new DeleteRequest("index", "type", "2")); + + Request request = Request.bulk(bulkRequest); + assertEquals(XContentType.JSON.mediaType(), request.entity.getContentType().getValue()); + } + { + XContentType xContentType = randomFrom(XContentType.JSON, XContentType.SMILE); + BulkRequest bulkRequest = new BulkRequest(); + bulkRequest.add(new DeleteRequest("index", "type", "0")); + bulkRequest.add(new IndexRequest("index", "type", "0").source(singletonMap("field", "value"), xContentType)); + bulkRequest.add(new DeleteRequest("index", "type", "2")); + + Request request = Request.bulk(bulkRequest); + assertEquals(xContentType.mediaType(), request.entity.getContentType().getValue()); + } + { + XContentType xContentType = randomFrom(XContentType.JSON, XContentType.SMILE); + UpdateRequest updateRequest = new UpdateRequest("index", "type", "0"); + if (randomBoolean()) { + updateRequest.doc(new IndexRequest().source(singletonMap("field", "value"), xContentType)); + } else { + updateRequest.upsert(new IndexRequest().source(singletonMap("field", "value"), xContentType)); + } + + Request request = Request.bulk(new BulkRequest().add(updateRequest)); + assertEquals(xContentType.mediaType(), request.entity.getContentType().getValue()); + } + { + BulkRequest bulkRequest = new BulkRequest(); + bulkRequest.add(new IndexRequest("index", "type", "0").source(singletonMap("field", "value"), XContentType.SMILE)); + bulkRequest.add(new IndexRequest("index", "type", "1").source(singletonMap("field", "value"), XContentType.JSON)); + IllegalArgumentException exception = expectThrows(IllegalArgumentException.class, () -> Request.bulk(bulkRequest)); + assertEquals("Mismatching content-type found for request with content-type [JSON], " + + "previous requests have content-type [SMILE]", exception.getMessage()); + } + { + BulkRequest bulkRequest = new BulkRequest(); + bulkRequest.add(new IndexRequest("index", "type", "0") + .source(singletonMap("field", "value"), XContentType.JSON)); + bulkRequest.add(new IndexRequest("index", "type", "1") + .source(singletonMap("field", "value"), XContentType.JSON)); + bulkRequest.add(new UpdateRequest("index", "type", "2") + .doc(new IndexRequest().source(singletonMap("field", "value"), XContentType.JSON)) + .upsert(new IndexRequest().source(singletonMap("field", "value"), XContentType.SMILE)) + ); + IllegalArgumentException exception = expectThrows(IllegalArgumentException.class, () -> Request.bulk(bulkRequest)); + assertEquals("Mismatching content-type found for request with content-type [SMILE], " + + "previous requests have content-type [JSON]", exception.getMessage()); + } + { + XContentType xContentType = randomFrom(XContentType.CBOR, XContentType.YAML); + BulkRequest bulkRequest = new BulkRequest(); + bulkRequest.add(new DeleteRequest("index", "type", "0")); + bulkRequest.add(new IndexRequest("index", "type", "1").source(singletonMap("field", "value"), XContentType.JSON)); + bulkRequest.add(new DeleteRequest("index", "type", "2")); + bulkRequest.add(new DeleteRequest("index", "type", "3")); + bulkRequest.add(new IndexRequest("index", "type", "4").source(singletonMap("field", "value"), XContentType.JSON)); + bulkRequest.add(new IndexRequest("index", "type", "1").source(singletonMap("field", "value"), xContentType)); + IllegalArgumentException exception = expectThrows(IllegalArgumentException.class, () -> Request.bulk(bulkRequest)); + assertEquals("Unsupported content-type found for request with content-type [" + xContentType + + "], only JSON and SMILE are supported", exception.getMessage()); + } + } + + public void testSearch() throws Exception { + SearchRequest searchRequest = new SearchRequest(); + int numIndices = randomIntBetween(0, 5); + String[] indices = new String[numIndices]; + for (int i = 0; i < numIndices; i++) { + indices[i] = "index-" + randomAlphaOfLengthBetween(2, 5); + } + searchRequest.indices(indices); + int numTypes = randomIntBetween(0, 5); + String[] types = new String[numTypes]; + for (int i = 0; i < numTypes; i++) { + types[i] = "type-" + randomAlphaOfLengthBetween(2, 5); + } + searchRequest.types(types); + + Map expectedParams = new HashMap<>(); + expectedParams.put(RestSearchAction.TYPED_KEYS_PARAM, "true"); + if (randomBoolean()) { + searchRequest.routing(randomAlphaOfLengthBetween(3, 10)); + expectedParams.put("routing", searchRequest.routing()); + } + if (randomBoolean()) { + searchRequest.preference(randomAlphaOfLengthBetween(3, 10)); + expectedParams.put("preference", searchRequest.preference()); + } + if (randomBoolean()) { + searchRequest.searchType(randomFrom(SearchType.values())); + } + expectedParams.put("search_type", searchRequest.searchType().name().toLowerCase(Locale.ROOT)); + if (randomBoolean()) { + searchRequest.requestCache(randomBoolean()); + expectedParams.put("request_cache", Boolean.toString(searchRequest.requestCache())); + } + if (randomBoolean()) { + searchRequest.setBatchedReduceSize(randomIntBetween(2, Integer.MAX_VALUE)); + } + expectedParams.put("batched_reduce_size", Integer.toString(searchRequest.getBatchedReduceSize())); + if (randomBoolean()) { + searchRequest.scroll(randomTimeValue()); + expectedParams.put("scroll", searchRequest.scroll().keepAlive().getStringRep()); + } + + if (randomBoolean()) { + searchRequest.indicesOptions(IndicesOptions.fromOptions(randomBoolean(), randomBoolean(), randomBoolean(), randomBoolean())); + } + expectedParams.put("ignore_unavailable", Boolean.toString(searchRequest.indicesOptions().ignoreUnavailable())); + expectedParams.put("allow_no_indices", Boolean.toString(searchRequest.indicesOptions().allowNoIndices())); + if (searchRequest.indicesOptions().expandWildcardsOpen() && searchRequest.indicesOptions().expandWildcardsClosed()) { + expectedParams.put("expand_wildcards", "open,closed"); + } else if (searchRequest.indicesOptions().expandWildcardsOpen()) { + expectedParams.put("expand_wildcards", "open"); + } else if (searchRequest.indicesOptions().expandWildcardsClosed()) { + expectedParams.put("expand_wildcards", "closed"); + } else { + expectedParams.put("expand_wildcards", "none"); + } + + SearchSourceBuilder searchSourceBuilder = null; + if (frequently()) { + searchSourceBuilder = new SearchSourceBuilder(); + if (randomBoolean()) { + searchSourceBuilder.size(randomIntBetween(0, Integer.MAX_VALUE)); + } + if (randomBoolean()) { + searchSourceBuilder.from(randomIntBetween(0, Integer.MAX_VALUE)); + } + if (randomBoolean()) { + searchSourceBuilder.minScore(randomFloat()); + } + if (randomBoolean()) { + searchSourceBuilder.explain(randomBoolean()); + } + if (randomBoolean()) { + searchSourceBuilder.profile(randomBoolean()); + } + if (randomBoolean()) { + searchSourceBuilder.highlighter(new HighlightBuilder().field(randomAlphaOfLengthBetween(3, 10))); + } + if (randomBoolean()) { + searchSourceBuilder.query(new TermQueryBuilder(randomAlphaOfLengthBetween(3, 10), randomAlphaOfLengthBetween(3, 10))); + } + if (randomBoolean()) { + searchSourceBuilder.aggregation(new TermsAggregationBuilder(randomAlphaOfLengthBetween(3, 10), ValueType.STRING) + .field(randomAlphaOfLengthBetween(3, 10))); + } + if (randomBoolean()) { + searchSourceBuilder.suggest(new SuggestBuilder().addSuggestion(randomAlphaOfLengthBetween(3, 10), + new CompletionSuggestionBuilder(randomAlphaOfLengthBetween(3, 10)))); + } + if (randomBoolean()) { + searchSourceBuilder.addRescorer(new QueryRescorerBuilder( + new TermQueryBuilder(randomAlphaOfLengthBetween(3, 10), randomAlphaOfLengthBetween(3, 10)))); + } + if (randomBoolean()) { + searchSourceBuilder.collapse(new CollapseBuilder(randomAlphaOfLengthBetween(3, 10))); + } + searchRequest.source(searchSourceBuilder); + } + + Request request = Request.search(searchRequest); + StringJoiner endpoint = new StringJoiner("/", "/", ""); + String index = String.join(",", indices); + if (Strings.hasLength(index)) { + endpoint.add(index); + } + String type = String.join(",", types); + if (Strings.hasLength(type)) { + endpoint.add(type); + } + endpoint.add("_search"); + assertEquals(endpoint.toString(), request.endpoint); + assertEquals(expectedParams, request.params); + if (searchSourceBuilder == null) { + assertNull(request.entity); + } else { + assertToXContentBody(searchSourceBuilder, request.entity); + } + } + + public void testSearchScroll() throws IOException { + SearchScrollRequest searchScrollRequest = new SearchScrollRequest(); + searchScrollRequest.scrollId(randomAlphaOfLengthBetween(5, 10)); + if (randomBoolean()) { + searchScrollRequest.scroll(randomPositiveTimeValue()); + } + Request request = Request.searchScroll(searchScrollRequest); + assertEquals("GET", request.method); + assertEquals("/_search/scroll", request.endpoint); + assertEquals(0, request.params.size()); + assertToXContentBody(searchScrollRequest, request.entity); + assertEquals(Request.REQUEST_BODY_CONTENT_TYPE.mediaType(), request.entity.getContentType().getValue()); + } + + public void testClearScroll() throws IOException { + ClearScrollRequest clearScrollRequest = new ClearScrollRequest(); + int numScrolls = randomIntBetween(1, 10); + for (int i = 0; i < numScrolls; i++) { + clearScrollRequest.addScrollId(randomAlphaOfLengthBetween(5, 10)); + } + Request request = Request.clearScroll(clearScrollRequest); + assertEquals("DELETE", request.method); + assertEquals("/_search/scroll", request.endpoint); + assertEquals(0, request.params.size()); + assertToXContentBody(clearScrollRequest, request.entity); + assertEquals(Request.REQUEST_BODY_CONTENT_TYPE.mediaType(), request.entity.getContentType().getValue()); + } + + private static void assertToXContentBody(ToXContent expectedBody, HttpEntity actualEntity) throws IOException { + BytesReference expectedBytes = XContentHelper.toXContent(expectedBody, Request.REQUEST_BODY_CONTENT_TYPE, false); + assertEquals(XContentType.JSON.mediaType(), actualEntity.getContentType().getValue()); + assertEquals(expectedBytes, new BytesArray(EntityUtils.toByteArray(actualEntity))); + } + + public void testParams() { + final int nbParams = randomIntBetween(0, 10); + Request.Params params = Request.Params.builder(); + Map expectedParams = new HashMap<>(); + for (int i = 0; i < nbParams; i++) { + String paramName = "p_" + i; + String paramValue = randomAlphaOfLength(5); + params.putParam(paramName, paramValue); + expectedParams.put(paramName, paramValue); + } + + Map requestParams = params.getParams(); + assertEquals(nbParams, requestParams.size()); + assertEquals(expectedParams, requestParams); + } + + public void testParamsNoDuplicates() { + Request.Params params = Request.Params.builder(); + params.putParam("test", "1"); + + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> params.putParam("test", "2")); + assertEquals("Request parameter [test] is already registered", e.getMessage()); + + Map requestParams = params.getParams(); + assertEquals(1L, requestParams.size()); + assertEquals("1", requestParams.values().iterator().next()); + } + + public void testEndpoint() { + assertEquals("/", Request.endpoint()); + assertEquals("/", Request.endpoint(Strings.EMPTY_ARRAY)); + assertEquals("/", Request.endpoint("")); + assertEquals("/a/b", Request.endpoint("a", "b")); + assertEquals("/a/b/_create", Request.endpoint("a", "b", "_create")); + assertEquals("/a/b/c/_create", Request.endpoint("a", "b", "c", "_create")); + assertEquals("/a/_create", Request.endpoint("a", null, null, "_create")); + } + + public void testEnforceSameContentType() { + XContentType xContentType = randomFrom(XContentType.JSON, XContentType.SMILE); + IndexRequest indexRequest = new IndexRequest().source(singletonMap("field", "value"), xContentType); + assertEquals(xContentType, enforceSameContentType(indexRequest, null)); + assertEquals(xContentType, enforceSameContentType(indexRequest, xContentType)); + + XContentType bulkContentType = randomBoolean() ? xContentType : null; + + IllegalArgumentException exception = expectThrows(IllegalArgumentException.class, () -> + enforceSameContentType(new IndexRequest().source(singletonMap("field", "value"), XContentType.CBOR), bulkContentType)); + assertEquals("Unsupported content-type found for request with content-type [CBOR], only JSON and SMILE are supported", + exception.getMessage()); + + exception = expectThrows(IllegalArgumentException.class, () -> + enforceSameContentType(new IndexRequest().source(singletonMap("field", "value"), XContentType.YAML), bulkContentType)); + assertEquals("Unsupported content-type found for request with content-type [YAML], only JSON and SMILE are supported", + exception.getMessage()); + + XContentType requestContentType = xContentType == XContentType.JSON ? XContentType.SMILE : XContentType.JSON; + + exception = expectThrows(IllegalArgumentException.class, () -> + enforceSameContentType(new IndexRequest().source(singletonMap("field", "value"), requestContentType), xContentType)); + assertEquals("Mismatching content-type found for request with content-type [" + requestContentType + "], " + + "previous requests have content-type [" + xContentType + "]", exception.getMessage()); + } + + /** + * Randomize the {@link FetchSourceContext} request parameters. + */ + private static void randomizeFetchSourceContextParams(Consumer consumer, Map expectedParams) { + if (randomBoolean()) { + if (randomBoolean()) { + boolean fetchSource = randomBoolean(); + consumer.accept(new FetchSourceContext(fetchSource)); + if (fetchSource == false) { + expectedParams.put("_source", "false"); + } + } else { + int numIncludes = randomIntBetween(0, 5); + String[] includes = new String[numIncludes]; + StringBuilder includesParam = new StringBuilder(); + for (int i = 0; i < numIncludes; i++) { + String include = randomAlphaOfLengthBetween(3, 10); + includes[i] = include; + includesParam.append(include); + if (i < numIncludes - 1) { + includesParam.append(","); + } + } + if (numIncludes > 0) { + expectedParams.put("_source_include", includesParam.toString()); + } + int numExcludes = randomIntBetween(0, 5); + String[] excludes = new String[numExcludes]; + StringBuilder excludesParam = new StringBuilder(); + for (int i = 0; i < numExcludes; i++) { + String exclude = randomAlphaOfLengthBetween(3, 10); + excludes[i] = exclude; + excludesParam.append(exclude); + if (i < numExcludes - 1) { + excludesParam.append(","); + } + } + if (numExcludes > 0) { + expectedParams.put("_source_exclude", excludesParam.toString()); + } + consumer.accept(new FetchSourceContext(true, includes, excludes)); + } + } + } + + private static void setRandomTimeout(ReplicationRequest request, Map expectedParams) { + if (randomBoolean()) { + String timeout = randomTimeValue(); + request.timeout(timeout); + expectedParams.put("timeout", timeout); + } else { + expectedParams.put("timeout", ReplicationRequest.DEFAULT_TIMEOUT.getStringRep()); + } + } + + private static void setRandomRefreshPolicy(ReplicatedWriteRequest request, Map expectedParams) { + if (randomBoolean()) { + WriteRequest.RefreshPolicy refreshPolicy = randomFrom(WriteRequest.RefreshPolicy.values()); + request.setRefreshPolicy(refreshPolicy); + if (refreshPolicy != WriteRequest.RefreshPolicy.NONE) { + expectedParams.put("refresh", refreshPolicy.getValue()); + } + } + } + + private static void setRandomVersion(DocWriteRequest request, Map expectedParams) { + if (randomBoolean()) { + long version = randomFrom(Versions.MATCH_ANY, Versions.MATCH_DELETED, Versions.NOT_FOUND, randomNonNegativeLong()); + request.version(version); + if (version != Versions.MATCH_ANY) { + expectedParams.put("version", Long.toString(version)); + } + } + } + + private static void setRandomVersionType(DocWriteRequest request, Map expectedParams) { + if (randomBoolean()) { + VersionType versionType = randomFrom(VersionType.values()); + request.versionType(versionType); + if (versionType != VersionType.INTERNAL) { + expectedParams.put("version_type", versionType.name().toLowerCase(Locale.ROOT)); + } + } + } +} diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/RestHighLevelClientExtTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/RestHighLevelClientExtTests.java new file mode 100644 index 0000000000000..cb32f9ae9dd93 --- /dev/null +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/RestHighLevelClientExtTests.java @@ -0,0 +1,138 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.client; + +import org.apache.http.HttpEntity; +import org.apache.http.entity.ContentType; +import org.apache.http.entity.StringEntity; +import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.xcontent.NamedXContentRegistry; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.test.ESTestCase; +import org.junit.Before; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; + +import static org.hamcrest.CoreMatchers.instanceOf; +import static org.mockito.Mockito.mock; + +/** + * This test works against a {@link RestHighLevelClient} subclass that simulats how custom response sections returned by + * Elasticsearch plugins can be parsed using the high level client. + */ +public class RestHighLevelClientExtTests extends ESTestCase { + + private RestHighLevelClient restHighLevelClient; + + @Before + public void initClient() throws IOException { + RestClient restClient = mock(RestClient.class); + restHighLevelClient = new RestHighLevelClientExt(restClient); + } + + public void testParseEntityCustomResponseSection() throws IOException { + { + HttpEntity jsonEntity = new StringEntity("{\"custom1\":{ \"field\":\"value\"}}", ContentType.APPLICATION_JSON); + BaseCustomResponseSection customSection = restHighLevelClient.parseEntity(jsonEntity, BaseCustomResponseSection::fromXContent); + assertThat(customSection, instanceOf(CustomResponseSection1.class)); + CustomResponseSection1 customResponseSection1 = (CustomResponseSection1) customSection; + assertEquals("value", customResponseSection1.value); + } + { + HttpEntity jsonEntity = new StringEntity("{\"custom2\":{ \"array\": [\"item1\", \"item2\"]}}", ContentType.APPLICATION_JSON); + BaseCustomResponseSection customSection = restHighLevelClient.parseEntity(jsonEntity, BaseCustomResponseSection::fromXContent); + assertThat(customSection, instanceOf(CustomResponseSection2.class)); + CustomResponseSection2 customResponseSection2 = (CustomResponseSection2) customSection; + assertArrayEquals(new String[]{"item1", "item2"}, customResponseSection2.values); + } + } + + private static class RestHighLevelClientExt extends RestHighLevelClient { + + private RestHighLevelClientExt(RestClient restClient) { + super(restClient, getNamedXContentsExt()); + } + + private static List getNamedXContentsExt() { + List entries = new ArrayList<>(); + entries.add(new NamedXContentRegistry.Entry(BaseCustomResponseSection.class, new ParseField("custom1"), + CustomResponseSection1::fromXContent)); + entries.add(new NamedXContentRegistry.Entry(BaseCustomResponseSection.class, new ParseField("custom2"), + CustomResponseSection2::fromXContent)); + return entries; + } + } + + private abstract static class BaseCustomResponseSection { + + static BaseCustomResponseSection fromXContent(XContentParser parser) throws IOException { + assertEquals(XContentParser.Token.START_OBJECT, parser.nextToken()); + assertEquals(XContentParser.Token.FIELD_NAME, parser.nextToken()); + BaseCustomResponseSection custom = parser.namedObject(BaseCustomResponseSection.class, parser.currentName(), null); + assertEquals(XContentParser.Token.END_OBJECT, parser.nextToken()); + return custom; + } + } + + private static class CustomResponseSection1 extends BaseCustomResponseSection { + + private final String value; + + private CustomResponseSection1(String value) { + this.value = value; + } + + static CustomResponseSection1 fromXContent(XContentParser parser) throws IOException { + assertEquals(XContentParser.Token.START_OBJECT, parser.nextToken()); + assertEquals(XContentParser.Token.FIELD_NAME, parser.nextToken()); + assertEquals("field", parser.currentName()); + assertEquals(XContentParser.Token.VALUE_STRING, parser.nextToken()); + CustomResponseSection1 responseSection1 = new CustomResponseSection1(parser.text()); + assertEquals(XContentParser.Token.END_OBJECT, parser.nextToken()); + return responseSection1; + } + } + + private static class CustomResponseSection2 extends BaseCustomResponseSection { + + private final String[] values; + + private CustomResponseSection2(String[] values) { + this.values = values; + } + + static CustomResponseSection2 fromXContent(XContentParser parser) throws IOException { + assertEquals(XContentParser.Token.START_OBJECT, parser.nextToken()); + assertEquals(XContentParser.Token.FIELD_NAME, parser.nextToken()); + assertEquals("array", parser.currentName()); + assertEquals(XContentParser.Token.START_ARRAY, parser.nextToken()); + List values = new ArrayList<>(); + while(parser.nextToken().isValue()) { + values.add(parser.text()); + } + assertEquals(XContentParser.Token.END_ARRAY, parser.currentToken()); + CustomResponseSection2 responseSection2 = new CustomResponseSection2(values.toArray(new String[values.size()])); + assertEquals(XContentParser.Token.END_OBJECT, parser.nextToken()); + return responseSection2; + } + } +} diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/RestHighLevelClientTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/RestHighLevelClientTests.java new file mode 100644 index 0000000000000..7fc0733a7f0c7 --- /dev/null +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/RestHighLevelClientTests.java @@ -0,0 +1,666 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.client; + +import com.fasterxml.jackson.core.JsonParseException; +import org.apache.http.Header; +import org.apache.http.HttpEntity; +import org.apache.http.HttpHost; +import org.apache.http.HttpResponse; +import org.apache.http.ProtocolVersion; +import org.apache.http.RequestLine; +import org.apache.http.StatusLine; +import org.apache.http.entity.ByteArrayEntity; +import org.apache.http.entity.ContentType; +import org.apache.http.entity.StringEntity; +import org.apache.http.message.BasicHttpResponse; +import org.apache.http.message.BasicRequestLine; +import org.apache.http.message.BasicStatusLine; +import org.apache.http.nio.entity.NStringEntity; +import org.elasticsearch.Build; +import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.Version; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.ActionRequest; +import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.action.main.MainRequest; +import org.elasticsearch.action.main.MainResponse; +import org.elasticsearch.action.search.ClearScrollRequest; +import org.elasticsearch.action.search.ClearScrollResponse; +import org.elasticsearch.action.search.SearchResponse; +import org.elasticsearch.action.search.SearchResponseSections; +import org.elasticsearch.action.search.SearchScrollRequest; +import org.elasticsearch.action.search.ShardSearchFailure; +import org.elasticsearch.cluster.ClusterName; +import org.elasticsearch.common.CheckedFunction; +import org.elasticsearch.common.xcontent.NamedXContentRegistry; +import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.common.xcontent.cbor.CborXContent; +import org.elasticsearch.common.xcontent.smile.SmileXContent; +import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.search.SearchHits; +import org.elasticsearch.search.aggregations.Aggregation; +import org.elasticsearch.search.aggregations.InternalAggregations; +import org.elasticsearch.search.suggest.Suggest; +import org.elasticsearch.test.ESTestCase; +import org.junit.Before; +import org.mockito.ArgumentMatcher; +import org.mockito.internal.matchers.ArrayEquals; +import org.mockito.internal.matchers.VarargMatcher; + +import java.io.IOException; +import java.net.SocketTimeoutException; +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.concurrent.atomic.AtomicInteger; +import java.util.concurrent.atomic.AtomicReference; + +import static org.elasticsearch.client.RestClientTestUtil.randomHeaders; +import static org.elasticsearch.common.xcontent.XContentHelper.toXContent; +import static org.hamcrest.CoreMatchers.instanceOf; +import static org.mockito.Matchers.anyMapOf; +import static org.mockito.Matchers.anyObject; +import static org.mockito.Matchers.anyString; +import static org.mockito.Matchers.anyVararg; +import static org.mockito.Matchers.argThat; +import static org.mockito.Matchers.eq; +import static org.mockito.Matchers.isNotNull; +import static org.mockito.Matchers.isNull; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; + +public class RestHighLevelClientTests extends ESTestCase { + + private static final ProtocolVersion HTTP_PROTOCOL = new ProtocolVersion("http", 1, 1); + private static final RequestLine REQUEST_LINE = new BasicRequestLine("GET", "/", HTTP_PROTOCOL); + + private RestClient restClient; + private RestHighLevelClient restHighLevelClient; + + @Before + public void initClient() { + restClient = mock(RestClient.class); + restHighLevelClient = new RestHighLevelClient(restClient); + } + + public void testPingSuccessful() throws IOException { + Header[] headers = randomHeaders(random(), "Header"); + Response response = mock(Response.class); + when(response.getStatusLine()).thenReturn(newStatusLine(RestStatus.OK)); + when(restClient.performRequest(anyString(), anyString(), anyMapOf(String.class, String.class), + anyObject(), anyVararg())).thenReturn(response); + assertTrue(restHighLevelClient.ping(headers)); + verify(restClient).performRequest(eq("HEAD"), eq("/"), eq(Collections.emptyMap()), + isNull(HttpEntity.class), argThat(new HeadersVarargMatcher(headers))); + } + + public void testPing404NotFound() throws IOException { + Header[] headers = randomHeaders(random(), "Header"); + Response response = mock(Response.class); + when(response.getStatusLine()).thenReturn(newStatusLine(RestStatus.NOT_FOUND)); + when(restClient.performRequest(anyString(), anyString(), anyMapOf(String.class, String.class), + anyObject(), anyVararg())).thenReturn(response); + assertFalse(restHighLevelClient.ping(headers)); + verify(restClient).performRequest(eq("HEAD"), eq("/"), eq(Collections.emptyMap()), + isNull(HttpEntity.class), argThat(new HeadersVarargMatcher(headers))); + } + + public void testPingSocketTimeout() throws IOException { + Header[] headers = randomHeaders(random(), "Header"); + when(restClient.performRequest(anyString(), anyString(), anyMapOf(String.class, String.class), + anyObject(), anyVararg())).thenThrow(new SocketTimeoutException()); + expectThrows(SocketTimeoutException.class, () -> restHighLevelClient.ping(headers)); + verify(restClient).performRequest(eq("HEAD"), eq("/"), eq(Collections.emptyMap()), + isNull(HttpEntity.class), argThat(new HeadersVarargMatcher(headers))); + } + + public void testInfo() throws IOException { + Header[] headers = randomHeaders(random(), "Header"); + MainResponse testInfo = new MainResponse("nodeName", Version.CURRENT, new ClusterName("clusterName"), "clusterUuid", + Build.CURRENT, true); + mockResponse(testInfo); + MainResponse receivedInfo = restHighLevelClient.info(headers); + assertEquals(testInfo, receivedInfo); + verify(restClient).performRequest(eq("GET"), eq("/"), eq(Collections.emptyMap()), + isNull(HttpEntity.class), argThat(new HeadersVarargMatcher(headers))); + } + + public void testSearchScroll() throws IOException { + Header[] headers = randomHeaders(random(), "Header"); + SearchResponse mockSearchResponse = new SearchResponse(new SearchResponseSections(SearchHits.empty(), InternalAggregations.EMPTY, + null, false, false, null, 1), randomAlphaOfLengthBetween(5, 10), 5, 5, 100, new ShardSearchFailure[0]); + mockResponse(mockSearchResponse); + SearchResponse searchResponse = restHighLevelClient.searchScroll(new SearchScrollRequest(randomAlphaOfLengthBetween(5, 10)), + headers); + assertEquals(mockSearchResponse.getScrollId(), searchResponse.getScrollId()); + assertEquals(0, searchResponse.getHits().totalHits); + assertEquals(5, searchResponse.getTotalShards()); + assertEquals(5, searchResponse.getSuccessfulShards()); + assertEquals(100, searchResponse.getTook().getMillis()); + verify(restClient).performRequest(eq("GET"), eq("/_search/scroll"), eq(Collections.emptyMap()), + isNotNull(HttpEntity.class), argThat(new HeadersVarargMatcher(headers))); + } + + public void testClearScroll() throws IOException { + Header[] headers = randomHeaders(random(), "Header"); + ClearScrollResponse mockClearScrollResponse = new ClearScrollResponse(randomBoolean(), randomIntBetween(0, Integer.MAX_VALUE)); + mockResponse(mockClearScrollResponse); + ClearScrollRequest clearScrollRequest = new ClearScrollRequest(); + clearScrollRequest.addScrollId(randomAlphaOfLengthBetween(5, 10)); + ClearScrollResponse clearScrollResponse = restHighLevelClient.clearScroll(clearScrollRequest, headers); + assertEquals(mockClearScrollResponse.isSucceeded(), clearScrollResponse.isSucceeded()); + assertEquals(mockClearScrollResponse.getNumFreed(), clearScrollResponse.getNumFreed()); + verify(restClient).performRequest(eq("DELETE"), eq("/_search/scroll"), eq(Collections.emptyMap()), + isNotNull(HttpEntity.class), argThat(new HeadersVarargMatcher(headers))); + } + + private void mockResponse(ToXContent toXContent) throws IOException { + Response response = mock(Response.class); + ContentType contentType = ContentType.parse(Request.REQUEST_BODY_CONTENT_TYPE.mediaType()); + String requestBody = toXContent(toXContent, Request.REQUEST_BODY_CONTENT_TYPE, false).utf8ToString(); + when(response.getEntity()).thenReturn(new NStringEntity(requestBody, contentType)); + when(restClient.performRequest(anyString(), anyString(), anyMapOf(String.class, String.class), + anyObject(), anyVararg())).thenReturn(response); + } + + public void testRequestValidation() { + ActionRequestValidationException validationException = new ActionRequestValidationException(); + validationException.addValidationError("validation error"); + ActionRequest request = new ActionRequest() { + @Override + public ActionRequestValidationException validate() { + return validationException; + } + }; + + { + ActionRequestValidationException actualException = expectThrows(ActionRequestValidationException.class, + () -> restHighLevelClient.performRequest(request, null, null, null)); + assertSame(validationException, actualException); + } + { + TrackingActionListener trackingActionListener = new TrackingActionListener(); + restHighLevelClient.performRequestAsync(request, null, null, trackingActionListener, null); + assertSame(validationException, trackingActionListener.exception.get()); + } + } + + public void testParseEntity() throws IOException { + { + IllegalStateException ise = expectThrows(IllegalStateException.class, () -> restHighLevelClient.parseEntity(null, null)); + assertEquals("Response body expected but not returned", ise.getMessage()); + } + { + IllegalStateException ise = expectThrows(IllegalStateException.class, + () -> restHighLevelClient.parseEntity(new StringEntity("", (ContentType) null), null)); + assertEquals("Elasticsearch didn't return the [Content-Type] header, unable to parse response body", ise.getMessage()); + } + { + StringEntity entity = new StringEntity("", ContentType.APPLICATION_SVG_XML); + IllegalStateException ise = expectThrows(IllegalStateException.class, () -> restHighLevelClient.parseEntity(entity, null)); + assertEquals("Unsupported Content-Type: " + entity.getContentType().getValue(), ise.getMessage()); + } + { + CheckedFunction entityParser = parser -> { + assertEquals(XContentParser.Token.START_OBJECT, parser.nextToken()); + assertEquals(XContentParser.Token.FIELD_NAME, parser.nextToken()); + assertTrue(parser.nextToken().isValue()); + String value = parser.text(); + assertEquals(XContentParser.Token.END_OBJECT, parser.nextToken()); + return value; + }; + HttpEntity jsonEntity = new StringEntity("{\"field\":\"value\"}", ContentType.APPLICATION_JSON); + assertEquals("value", restHighLevelClient.parseEntity(jsonEntity, entityParser)); + HttpEntity yamlEntity = new StringEntity("---\nfield: value\n", ContentType.create("application/yaml")); + assertEquals("value", restHighLevelClient.parseEntity(yamlEntity, entityParser)); + HttpEntity smileEntity = createBinaryEntity(SmileXContent.contentBuilder(), ContentType.create("application/smile")); + assertEquals("value", restHighLevelClient.parseEntity(smileEntity, entityParser)); + HttpEntity cborEntity = createBinaryEntity(CborXContent.contentBuilder(), ContentType.create("application/cbor")); + assertEquals("value", restHighLevelClient.parseEntity(cborEntity, entityParser)); + } + } + + private static HttpEntity createBinaryEntity(XContentBuilder xContentBuilder, ContentType contentType) throws IOException { + try (XContentBuilder builder = xContentBuilder) { + builder.startObject(); + builder.field("field", "value"); + builder.endObject(); + return new ByteArrayEntity(builder.bytes().toBytesRef().bytes, contentType); + } + } + + public void testConvertExistsResponse() { + RestStatus restStatus = randomBoolean() ? RestStatus.OK : randomFrom(RestStatus.values()); + HttpResponse httpResponse = new BasicHttpResponse(newStatusLine(restStatus)); + Response response = new Response(REQUEST_LINE, new HttpHost("localhost", 9200), httpResponse); + boolean result = RestHighLevelClient.convertExistsResponse(response); + assertEquals(restStatus == RestStatus.OK, result); + } + + public void testParseResponseException() throws IOException { + { + RestStatus restStatus = randomFrom(RestStatus.values()); + HttpResponse httpResponse = new BasicHttpResponse(newStatusLine(restStatus)); + Response response = new Response(REQUEST_LINE, new HttpHost("localhost", 9200), httpResponse); + ResponseException responseException = new ResponseException(response); + ElasticsearchException elasticsearchException = restHighLevelClient.parseResponseException(responseException); + assertEquals(responseException.getMessage(), elasticsearchException.getMessage()); + assertEquals(restStatus, elasticsearchException.status()); + assertSame(responseException, elasticsearchException.getCause()); + } + { + RestStatus restStatus = randomFrom(RestStatus.values()); + HttpResponse httpResponse = new BasicHttpResponse(newStatusLine(restStatus)); + httpResponse.setEntity(new StringEntity("{\"error\":\"test error message\",\"status\":" + restStatus.getStatus() + "}", + ContentType.APPLICATION_JSON)); + Response response = new Response(REQUEST_LINE, new HttpHost("localhost", 9200), httpResponse); + ResponseException responseException = new ResponseException(response); + ElasticsearchException elasticsearchException = restHighLevelClient.parseResponseException(responseException); + assertEquals("Elasticsearch exception [type=exception, reason=test error message]", elasticsearchException.getMessage()); + assertEquals(restStatus, elasticsearchException.status()); + assertSame(responseException, elasticsearchException.getSuppressed()[0]); + } + { + RestStatus restStatus = randomFrom(RestStatus.values()); + HttpResponse httpResponse = new BasicHttpResponse(newStatusLine(restStatus)); + httpResponse.setEntity(new StringEntity("{\"error\":", ContentType.APPLICATION_JSON)); + Response response = new Response(REQUEST_LINE, new HttpHost("localhost", 9200), httpResponse); + ResponseException responseException = new ResponseException(response); + ElasticsearchException elasticsearchException = restHighLevelClient.parseResponseException(responseException); + assertEquals("Unable to parse response body", elasticsearchException.getMessage()); + assertEquals(restStatus, elasticsearchException.status()); + assertSame(responseException, elasticsearchException.getCause()); + assertThat(elasticsearchException.getSuppressed()[0], instanceOf(IOException.class)); + } + { + RestStatus restStatus = randomFrom(RestStatus.values()); + HttpResponse httpResponse = new BasicHttpResponse(newStatusLine(restStatus)); + httpResponse.setEntity(new StringEntity("{\"status\":" + restStatus.getStatus() + "}", ContentType.APPLICATION_JSON)); + Response response = new Response(REQUEST_LINE, new HttpHost("localhost", 9200), httpResponse); + ResponseException responseException = new ResponseException(response); + ElasticsearchException elasticsearchException = restHighLevelClient.parseResponseException(responseException); + assertEquals("Unable to parse response body", elasticsearchException.getMessage()); + assertEquals(restStatus, elasticsearchException.status()); + assertSame(responseException, elasticsearchException.getCause()); + assertThat(elasticsearchException.getSuppressed()[0], instanceOf(IllegalStateException.class)); + } + } + + public void testPerformRequestOnSuccess() throws IOException { + MainRequest mainRequest = new MainRequest(); + CheckedFunction requestConverter = request -> + new Request("GET", "/", Collections.emptyMap(), null); + RestStatus restStatus = randomFrom(RestStatus.values()); + HttpResponse httpResponse = new BasicHttpResponse(newStatusLine(restStatus)); + Response mockResponse = new Response(REQUEST_LINE, new HttpHost("localhost", 9200), httpResponse); + when(restClient.performRequest(anyString(), anyString(), anyMapOf(String.class, String.class), + anyObject(), anyVararg())).thenReturn(mockResponse); + { + Integer result = restHighLevelClient.performRequest(mainRequest, requestConverter, + response -> response.getStatusLine().getStatusCode(), Collections.emptySet()); + assertEquals(restStatus.getStatus(), result.intValue()); + } + { + IOException ioe = expectThrows(IOException.class, () -> restHighLevelClient.performRequest(mainRequest, + requestConverter, response -> {throw new IllegalStateException();}, Collections.emptySet())); + assertEquals("Unable to parse response body for Response{requestLine=GET / http/1.1, host=http://localhost:9200, " + + "response=http/1.1 " + restStatus.getStatus() + " " + restStatus.name() + "}", ioe.getMessage()); + } + } + + public void testPerformRequestOnResponseExceptionWithoutEntity() throws IOException { + MainRequest mainRequest = new MainRequest(); + CheckedFunction requestConverter = request -> + new Request("GET", "/", Collections.emptyMap(), null); + RestStatus restStatus = randomFrom(RestStatus.values()); + HttpResponse httpResponse = new BasicHttpResponse(newStatusLine(restStatus)); + Response mockResponse = new Response(REQUEST_LINE, new HttpHost("localhost", 9200), httpResponse); + ResponseException responseException = new ResponseException(mockResponse); + when(restClient.performRequest(anyString(), anyString(), anyMapOf(String.class, String.class), + anyObject(), anyVararg())).thenThrow(responseException); + ElasticsearchException elasticsearchException = expectThrows(ElasticsearchException.class, + () -> restHighLevelClient.performRequest(mainRequest, requestConverter, + response -> response.getStatusLine().getStatusCode(), Collections.emptySet())); + assertEquals(responseException.getMessage(), elasticsearchException.getMessage()); + assertEquals(restStatus, elasticsearchException.status()); + assertSame(responseException, elasticsearchException.getCause()); + } + + public void testPerformRequestOnResponseExceptionWithEntity() throws IOException { + MainRequest mainRequest = new MainRequest(); + CheckedFunction requestConverter = request -> + new Request("GET", "/", Collections.emptyMap(), null); + RestStatus restStatus = randomFrom(RestStatus.values()); + HttpResponse httpResponse = new BasicHttpResponse(newStatusLine(restStatus)); + httpResponse.setEntity(new StringEntity("{\"error\":\"test error message\",\"status\":" + restStatus.getStatus() + "}", + ContentType.APPLICATION_JSON)); + Response mockResponse = new Response(REQUEST_LINE, new HttpHost("localhost", 9200), httpResponse); + ResponseException responseException = new ResponseException(mockResponse); + when(restClient.performRequest(anyString(), anyString(), anyMapOf(String.class, String.class), + anyObject(), anyVararg())).thenThrow(responseException); + ElasticsearchException elasticsearchException = expectThrows(ElasticsearchException.class, + () -> restHighLevelClient.performRequest(mainRequest, requestConverter, + response -> response.getStatusLine().getStatusCode(), Collections.emptySet())); + assertEquals("Elasticsearch exception [type=exception, reason=test error message]", elasticsearchException.getMessage()); + assertEquals(restStatus, elasticsearchException.status()); + assertSame(responseException, elasticsearchException.getSuppressed()[0]); + } + + public void testPerformRequestOnResponseExceptionWithBrokenEntity() throws IOException { + MainRequest mainRequest = new MainRequest(); + CheckedFunction requestConverter = request -> + new Request("GET", "/", Collections.emptyMap(), null); + RestStatus restStatus = randomFrom(RestStatus.values()); + HttpResponse httpResponse = new BasicHttpResponse(newStatusLine(restStatus)); + httpResponse.setEntity(new StringEntity("{\"error\":", ContentType.APPLICATION_JSON)); + Response mockResponse = new Response(REQUEST_LINE, new HttpHost("localhost", 9200), httpResponse); + ResponseException responseException = new ResponseException(mockResponse); + when(restClient.performRequest(anyString(), anyString(), anyMapOf(String.class, String.class), + anyObject(), anyVararg())).thenThrow(responseException); + ElasticsearchException elasticsearchException = expectThrows(ElasticsearchException.class, + () -> restHighLevelClient.performRequest(mainRequest, requestConverter, + response -> response.getStatusLine().getStatusCode(), Collections.emptySet())); + assertEquals("Unable to parse response body", elasticsearchException.getMessage()); + assertEquals(restStatus, elasticsearchException.status()); + assertSame(responseException, elasticsearchException.getCause()); + assertThat(elasticsearchException.getSuppressed()[0], instanceOf(JsonParseException.class)); + } + + public void testPerformRequestOnResponseExceptionWithBrokenEntity2() throws IOException { + MainRequest mainRequest = new MainRequest(); + CheckedFunction requestConverter = request -> + new Request("GET", "/", Collections.emptyMap(), null); + RestStatus restStatus = randomFrom(RestStatus.values()); + HttpResponse httpResponse = new BasicHttpResponse(newStatusLine(restStatus)); + httpResponse.setEntity(new StringEntity("{\"status\":" + restStatus.getStatus() + "}", ContentType.APPLICATION_JSON)); + Response mockResponse = new Response(REQUEST_LINE, new HttpHost("localhost", 9200), httpResponse); + ResponseException responseException = new ResponseException(mockResponse); + when(restClient.performRequest(anyString(), anyString(), anyMapOf(String.class, String.class), + anyObject(), anyVararg())).thenThrow(responseException); + ElasticsearchException elasticsearchException = expectThrows(ElasticsearchException.class, + () -> restHighLevelClient.performRequest(mainRequest, requestConverter, + response -> response.getStatusLine().getStatusCode(), Collections.emptySet())); + assertEquals("Unable to parse response body", elasticsearchException.getMessage()); + assertEquals(restStatus, elasticsearchException.status()); + assertSame(responseException, elasticsearchException.getCause()); + assertThat(elasticsearchException.getSuppressed()[0], instanceOf(IllegalStateException.class)); + } + + public void testPerformRequestOnResponseExceptionWithIgnores() throws IOException { + MainRequest mainRequest = new MainRequest(); + CheckedFunction requestConverter = request -> + new Request("GET", "/", Collections.emptyMap(), null); + HttpResponse httpResponse = new BasicHttpResponse(newStatusLine(RestStatus.NOT_FOUND)); + Response mockResponse = new Response(REQUEST_LINE, new HttpHost("localhost", 9200), httpResponse); + ResponseException responseException = new ResponseException(mockResponse); + when(restClient.performRequest(anyString(), anyString(), anyMapOf(String.class, String.class), + anyObject(), anyVararg())).thenThrow(responseException); + //although we got an exception, we turn it into a successful response because the status code was provided among ignores + assertEquals(Integer.valueOf(404), restHighLevelClient.performRequest(mainRequest, requestConverter, + response -> response.getStatusLine().getStatusCode(), Collections.singleton(404))); + } + + public void testPerformRequestOnResponseExceptionWithIgnoresErrorNoBody() throws IOException { + MainRequest mainRequest = new MainRequest(); + CheckedFunction requestConverter = request -> + new Request("GET", "/", Collections.emptyMap(), null); + HttpResponse httpResponse = new BasicHttpResponse(newStatusLine(RestStatus.NOT_FOUND)); + Response mockResponse = new Response(REQUEST_LINE, new HttpHost("localhost", 9200), httpResponse); + ResponseException responseException = new ResponseException(mockResponse); + when(restClient.performRequest(anyString(), anyString(), anyMapOf(String.class, String.class), + anyObject(), anyVararg())).thenThrow(responseException); + ElasticsearchException elasticsearchException = expectThrows(ElasticsearchException.class, + () -> restHighLevelClient.performRequest(mainRequest, requestConverter, + response -> {throw new IllegalStateException();}, Collections.singleton(404))); + assertEquals(RestStatus.NOT_FOUND, elasticsearchException.status()); + assertSame(responseException, elasticsearchException.getCause()); + assertEquals(responseException.getMessage(), elasticsearchException.getMessage()); + } + + public void testPerformRequestOnResponseExceptionWithIgnoresErrorValidBody() throws IOException { + MainRequest mainRequest = new MainRequest(); + CheckedFunction requestConverter = request -> + new Request("GET", "/", Collections.emptyMap(), null); + HttpResponse httpResponse = new BasicHttpResponse(newStatusLine(RestStatus.NOT_FOUND)); + httpResponse.setEntity(new StringEntity("{\"error\":\"test error message\",\"status\":404}", + ContentType.APPLICATION_JSON)); + Response mockResponse = new Response(REQUEST_LINE, new HttpHost("localhost", 9200), httpResponse); + ResponseException responseException = new ResponseException(mockResponse); + when(restClient.performRequest(anyString(), anyString(), anyMapOf(String.class, String.class), + anyObject(), anyVararg())).thenThrow(responseException); + ElasticsearchException elasticsearchException = expectThrows(ElasticsearchException.class, + () -> restHighLevelClient.performRequest(mainRequest, requestConverter, + response -> {throw new IllegalStateException();}, Collections.singleton(404))); + assertEquals(RestStatus.NOT_FOUND, elasticsearchException.status()); + assertSame(responseException, elasticsearchException.getSuppressed()[0]); + assertEquals("Elasticsearch exception [type=exception, reason=test error message]", elasticsearchException.getMessage()); + } + + public void testWrapResponseListenerOnSuccess() { + { + TrackingActionListener trackingActionListener = new TrackingActionListener(); + ResponseListener responseListener = restHighLevelClient.wrapResponseListener( + response -> response.getStatusLine().getStatusCode(), trackingActionListener, Collections.emptySet()); + RestStatus restStatus = randomFrom(RestStatus.values()); + HttpResponse httpResponse = new BasicHttpResponse(newStatusLine(restStatus)); + responseListener.onSuccess(new Response(REQUEST_LINE, new HttpHost("localhost", 9200), httpResponse)); + assertNull(trackingActionListener.exception.get()); + assertEquals(restStatus.getStatus(), trackingActionListener.statusCode.get()); + } + { + TrackingActionListener trackingActionListener = new TrackingActionListener(); + ResponseListener responseListener = restHighLevelClient.wrapResponseListener( + response -> {throw new IllegalStateException();}, trackingActionListener, Collections.emptySet()); + RestStatus restStatus = randomFrom(RestStatus.values()); + HttpResponse httpResponse = new BasicHttpResponse(newStatusLine(restStatus)); + responseListener.onSuccess(new Response(REQUEST_LINE, new HttpHost("localhost", 9200), httpResponse)); + assertThat(trackingActionListener.exception.get(), instanceOf(IOException.class)); + IOException ioe = (IOException) trackingActionListener.exception.get(); + assertEquals("Unable to parse response body for Response{requestLine=GET / http/1.1, host=http://localhost:9200, " + + "response=http/1.1 " + restStatus.getStatus() + " " + restStatus.name() + "}", ioe.getMessage()); + assertThat(ioe.getCause(), instanceOf(IllegalStateException.class)); + } + } + + public void testWrapResponseListenerOnException() { + TrackingActionListener trackingActionListener = new TrackingActionListener(); + ResponseListener responseListener = restHighLevelClient.wrapResponseListener( + response -> response.getStatusLine().getStatusCode(), trackingActionListener, Collections.emptySet()); + IllegalStateException exception = new IllegalStateException(); + responseListener.onFailure(exception); + assertSame(exception, trackingActionListener.exception.get()); + } + + public void testWrapResponseListenerOnResponseExceptionWithoutEntity() throws IOException { + TrackingActionListener trackingActionListener = new TrackingActionListener(); + ResponseListener responseListener = restHighLevelClient.wrapResponseListener( + response -> response.getStatusLine().getStatusCode(), trackingActionListener, Collections.emptySet()); + RestStatus restStatus = randomFrom(RestStatus.values()); + HttpResponse httpResponse = new BasicHttpResponse(newStatusLine(restStatus)); + Response response = new Response(REQUEST_LINE, new HttpHost("localhost", 9200), httpResponse); + ResponseException responseException = new ResponseException(response); + responseListener.onFailure(responseException); + assertThat(trackingActionListener.exception.get(), instanceOf(ElasticsearchException.class)); + ElasticsearchException elasticsearchException = (ElasticsearchException) trackingActionListener.exception.get(); + assertEquals(responseException.getMessage(), elasticsearchException.getMessage()); + assertEquals(restStatus, elasticsearchException.status()); + assertSame(responseException, elasticsearchException.getCause()); + } + + public void testWrapResponseListenerOnResponseExceptionWithEntity() throws IOException { + TrackingActionListener trackingActionListener = new TrackingActionListener(); + ResponseListener responseListener = restHighLevelClient.wrapResponseListener( + response -> response.getStatusLine().getStatusCode(), trackingActionListener, Collections.emptySet()); + RestStatus restStatus = randomFrom(RestStatus.values()); + HttpResponse httpResponse = new BasicHttpResponse(newStatusLine(restStatus)); + httpResponse.setEntity(new StringEntity("{\"error\":\"test error message\",\"status\":" + restStatus.getStatus() + "}", + ContentType.APPLICATION_JSON)); + Response response = new Response(REQUEST_LINE, new HttpHost("localhost", 9200), httpResponse); + ResponseException responseException = new ResponseException(response); + responseListener.onFailure(responseException); + assertThat(trackingActionListener.exception.get(), instanceOf(ElasticsearchException.class)); + ElasticsearchException elasticsearchException = (ElasticsearchException)trackingActionListener.exception.get(); + assertEquals("Elasticsearch exception [type=exception, reason=test error message]", elasticsearchException.getMessage()); + assertEquals(restStatus, elasticsearchException.status()); + assertSame(responseException, elasticsearchException.getSuppressed()[0]); + } + + public void testWrapResponseListenerOnResponseExceptionWithBrokenEntity() throws IOException { + { + TrackingActionListener trackingActionListener = new TrackingActionListener(); + ResponseListener responseListener = restHighLevelClient.wrapResponseListener( + response -> response.getStatusLine().getStatusCode(), trackingActionListener, Collections.emptySet()); + RestStatus restStatus = randomFrom(RestStatus.values()); + HttpResponse httpResponse = new BasicHttpResponse(newStatusLine(restStatus)); + httpResponse.setEntity(new StringEntity("{\"error\":", ContentType.APPLICATION_JSON)); + Response response = new Response(REQUEST_LINE, new HttpHost("localhost", 9200), httpResponse); + ResponseException responseException = new ResponseException(response); + responseListener.onFailure(responseException); + assertThat(trackingActionListener.exception.get(), instanceOf(ElasticsearchException.class)); + ElasticsearchException elasticsearchException = (ElasticsearchException)trackingActionListener.exception.get(); + assertEquals("Unable to parse response body", elasticsearchException.getMessage()); + assertEquals(restStatus, elasticsearchException.status()); + assertSame(responseException, elasticsearchException.getCause()); + assertThat(elasticsearchException.getSuppressed()[0], instanceOf(JsonParseException.class)); + } + { + TrackingActionListener trackingActionListener = new TrackingActionListener(); + ResponseListener responseListener = restHighLevelClient.wrapResponseListener( + response -> response.getStatusLine().getStatusCode(), trackingActionListener, Collections.emptySet()); + RestStatus restStatus = randomFrom(RestStatus.values()); + HttpResponse httpResponse = new BasicHttpResponse(newStatusLine(restStatus)); + httpResponse.setEntity(new StringEntity("{\"status\":" + restStatus.getStatus() + "}", ContentType.APPLICATION_JSON)); + Response response = new Response(REQUEST_LINE, new HttpHost("localhost", 9200), httpResponse); + ResponseException responseException = new ResponseException(response); + responseListener.onFailure(responseException); + assertThat(trackingActionListener.exception.get(), instanceOf(ElasticsearchException.class)); + ElasticsearchException elasticsearchException = (ElasticsearchException)trackingActionListener.exception.get(); + assertEquals("Unable to parse response body", elasticsearchException.getMessage()); + assertEquals(restStatus, elasticsearchException.status()); + assertSame(responseException, elasticsearchException.getCause()); + assertThat(elasticsearchException.getSuppressed()[0], instanceOf(IllegalStateException.class)); + } + } + + public void testWrapResponseListenerOnResponseExceptionWithIgnores() throws IOException { + TrackingActionListener trackingActionListener = new TrackingActionListener(); + ResponseListener responseListener = restHighLevelClient.wrapResponseListener( + response -> response.getStatusLine().getStatusCode(), trackingActionListener, Collections.singleton(404)); + HttpResponse httpResponse = new BasicHttpResponse(newStatusLine(RestStatus.NOT_FOUND)); + Response response = new Response(REQUEST_LINE, new HttpHost("localhost", 9200), httpResponse); + ResponseException responseException = new ResponseException(response); + responseListener.onFailure(responseException); + //although we got an exception, we turn it into a successful response because the status code was provided among ignores + assertNull(trackingActionListener.exception.get()); + assertEquals(404, trackingActionListener.statusCode.get()); + } + + public void testWrapResponseListenerOnResponseExceptionWithIgnoresErrorNoBody() throws IOException { + TrackingActionListener trackingActionListener = new TrackingActionListener(); + //response parsing throws exception while handling ignores. same as when GetResponse#fromXContent throws error when trying + //to parse a 404 response which contains an error rather than a valid document not found response. + ResponseListener responseListener = restHighLevelClient.wrapResponseListener( + response -> { throw new IllegalStateException(); }, trackingActionListener, Collections.singleton(404)); + HttpResponse httpResponse = new BasicHttpResponse(newStatusLine(RestStatus.NOT_FOUND)); + Response response = new Response(REQUEST_LINE, new HttpHost("localhost", 9200), httpResponse); + ResponseException responseException = new ResponseException(response); + responseListener.onFailure(responseException); + assertThat(trackingActionListener.exception.get(), instanceOf(ElasticsearchException.class)); + ElasticsearchException elasticsearchException = (ElasticsearchException)trackingActionListener.exception.get(); + assertEquals(RestStatus.NOT_FOUND, elasticsearchException.status()); + assertSame(responseException, elasticsearchException.getCause()); + assertEquals(responseException.getMessage(), elasticsearchException.getMessage()); + } + + public void testWrapResponseListenerOnResponseExceptionWithIgnoresErrorValidBody() throws IOException { + TrackingActionListener trackingActionListener = new TrackingActionListener(); + //response parsing throws exception while handling ignores. same as when GetResponse#fromXContent throws error when trying + //to parse a 404 response which contains an error rather than a valid document not found response. + ResponseListener responseListener = restHighLevelClient.wrapResponseListener( + response -> { throw new IllegalStateException(); }, trackingActionListener, Collections.singleton(404)); + HttpResponse httpResponse = new BasicHttpResponse(newStatusLine(RestStatus.NOT_FOUND)); + httpResponse.setEntity(new StringEntity("{\"error\":\"test error message\",\"status\":404}", + ContentType.APPLICATION_JSON)); + Response response = new Response(REQUEST_LINE, new HttpHost("localhost", 9200), httpResponse); + ResponseException responseException = new ResponseException(response); + responseListener.onFailure(responseException); + assertThat(trackingActionListener.exception.get(), instanceOf(ElasticsearchException.class)); + ElasticsearchException elasticsearchException = (ElasticsearchException)trackingActionListener.exception.get(); + assertEquals(RestStatus.NOT_FOUND, elasticsearchException.status()); + assertSame(responseException, elasticsearchException.getSuppressed()[0]); + assertEquals("Elasticsearch exception [type=exception, reason=test error message]", elasticsearchException.getMessage()); + } + + public void testNamedXContents() { + List namedXContents = RestHighLevelClient.getDefaultNamedXContents(); + assertEquals(45, namedXContents.size()); + Map, Integer> categories = new HashMap<>(); + for (NamedXContentRegistry.Entry namedXContent : namedXContents) { + Integer counter = categories.putIfAbsent(namedXContent.categoryClass, 1); + if (counter != null) { + categories.put(namedXContent.categoryClass, counter + 1); + } + } + assertEquals(2, categories.size()); + assertEquals(Integer.valueOf(42), categories.get(Aggregation.class)); + assertEquals(Integer.valueOf(3), categories.get(Suggest.Suggestion.class)); + } + + private static class TrackingActionListener implements ActionListener { + private final AtomicInteger statusCode = new AtomicInteger(-1); + private final AtomicReference exception = new AtomicReference<>(); + + @Override + public void onResponse(Integer statusCode) { + assertTrue(this.statusCode.compareAndSet(-1, statusCode)); + } + + @Override + public void onFailure(Exception e) { + assertTrue(exception.compareAndSet(null, e)); + } + } + + private static class HeadersVarargMatcher extends ArgumentMatcher implements VarargMatcher { + private Header[] expectedHeaders; + + HeadersVarargMatcher(Header... expectedHeaders) { + this.expectedHeaders = expectedHeaders; + } + + @Override + public boolean matches(Object varargArgument) { + if (varargArgument instanceof Header[]) { + Header[] actualHeaders = (Header[]) varargArgument; + return new ArrayEquals(expectedHeaders).matches(actualHeaders); + } + return false; + } + } + + private static StatusLine newStatusLine(RestStatus restStatus) { + return new BasicStatusLine(HTTP_PROTOCOL, restStatus.getStatus(), restStatus.name()); + } +} diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/SearchIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/SearchIT.java new file mode 100644 index 0000000000000..3d27386246c90 --- /dev/null +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/SearchIT.java @@ -0,0 +1,465 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.client; + +import org.apache.http.HttpEntity; +import org.apache.http.entity.ContentType; +import org.apache.http.entity.StringEntity; +import org.apache.http.nio.entity.NStringEntity; +import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.ElasticsearchStatusException; +import org.elasticsearch.action.search.ClearScrollRequest; +import org.elasticsearch.action.search.ClearScrollResponse; +import org.elasticsearch.action.search.SearchRequest; +import org.elasticsearch.action.search.SearchResponse; +import org.elasticsearch.action.search.SearchScrollRequest; +import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.index.query.MatchQueryBuilder; +import org.elasticsearch.join.aggregations.Children; +import org.elasticsearch.join.aggregations.ChildrenAggregationBuilder; +import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.search.SearchHit; +import org.elasticsearch.search.aggregations.bucket.range.Range; +import org.elasticsearch.search.aggregations.bucket.range.RangeAggregationBuilder; +import org.elasticsearch.search.aggregations.bucket.terms.Terms; +import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregationBuilder; +import org.elasticsearch.search.aggregations.matrix.stats.MatrixStats; +import org.elasticsearch.search.aggregations.matrix.stats.MatrixStatsAggregationBuilder; +import org.elasticsearch.search.aggregations.support.ValueType; +import org.elasticsearch.search.builder.SearchSourceBuilder; +import org.elasticsearch.search.sort.SortOrder; +import org.elasticsearch.search.suggest.Suggest; +import org.elasticsearch.search.suggest.SuggestBuilder; +import org.elasticsearch.search.suggest.phrase.PhraseSuggestionBuilder; +import org.junit.Before; + +import java.io.IOException; +import java.util.Arrays; +import java.util.Collections; + +import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; +import static org.hamcrest.Matchers.both; +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.either; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.greaterThan; +import static org.hamcrest.Matchers.greaterThanOrEqualTo; +import static org.hamcrest.Matchers.instanceOf; +import static org.hamcrest.Matchers.lessThan; + +public class SearchIT extends ESRestHighLevelClientTestCase { + + @Before + public void indexDocuments() throws IOException { + StringEntity doc1 = new StringEntity("{\"type\":\"type1\", \"num\":10, \"num2\":50}", ContentType.APPLICATION_JSON); + client().performRequest("PUT", "/index/type/1", Collections.emptyMap(), doc1); + StringEntity doc2 = new StringEntity("{\"type\":\"type1\", \"num\":20, \"num2\":40}", ContentType.APPLICATION_JSON); + client().performRequest("PUT", "/index/type/2", Collections.emptyMap(), doc2); + StringEntity doc3 = new StringEntity("{\"type\":\"type1\", \"num\":50, \"num2\":35}", ContentType.APPLICATION_JSON); + client().performRequest("PUT", "/index/type/3", Collections.emptyMap(), doc3); + StringEntity doc4 = new StringEntity("{\"type\":\"type2\", \"num\":100, \"num2\":10}", ContentType.APPLICATION_JSON); + client().performRequest("PUT", "/index/type/4", Collections.emptyMap(), doc4); + StringEntity doc5 = new StringEntity("{\"type\":\"type2\", \"num\":100, \"num2\":10}", ContentType.APPLICATION_JSON); + client().performRequest("PUT", "/index/type/5", Collections.emptyMap(), doc5); + client().performRequest("POST", "/index/_refresh"); + } + + public void testSearchNoQuery() throws IOException { + SearchRequest searchRequest = new SearchRequest(); + SearchResponse searchResponse = execute(searchRequest, highLevelClient()::search, highLevelClient()::searchAsync); + assertSearchHeader(searchResponse); + assertNull(searchResponse.getAggregations()); + assertNull(searchResponse.getSuggest()); + assertEquals(Collections.emptyMap(), searchResponse.getProfileResults()); + assertEquals(5, searchResponse.getHits().totalHits); + assertEquals(5, searchResponse.getHits().getHits().length); + for (SearchHit searchHit : searchResponse.getHits().getHits()) { + assertEquals("index", searchHit.getIndex()); + assertEquals("type", searchHit.getType()); + assertThat(Integer.valueOf(searchHit.getId()), both(greaterThan(0)).and(lessThan(6))); + assertEquals(1.0f, searchHit.getScore(), 0); + assertEquals(-1L, searchHit.getVersion()); + assertNotNull(searchHit.getSourceAsMap()); + assertEquals(3, searchHit.getSourceAsMap().size()); + assertTrue(searchHit.getSourceAsMap().containsKey("type")); + assertTrue(searchHit.getSourceAsMap().containsKey("num")); + assertTrue(searchHit.getSourceAsMap().containsKey("num2")); + } + } + + public void testSearchMatchQuery() throws IOException { + SearchRequest searchRequest = new SearchRequest(); + searchRequest.source(new SearchSourceBuilder().query(new MatchQueryBuilder("num", 10))); + SearchResponse searchResponse = execute(searchRequest, highLevelClient()::search, highLevelClient()::searchAsync); + assertSearchHeader(searchResponse); + assertNull(searchResponse.getAggregations()); + assertNull(searchResponse.getSuggest()); + assertEquals(Collections.emptyMap(), searchResponse.getProfileResults()); + assertEquals(1, searchResponse.getHits().totalHits); + assertEquals(1, searchResponse.getHits().getHits().length); + assertThat(searchResponse.getHits().getMaxScore(), greaterThan(0f)); + SearchHit searchHit = searchResponse.getHits().getHits()[0]; + assertEquals("index", searchHit.getIndex()); + assertEquals("type", searchHit.getType()); + assertEquals("1", searchHit.getId()); + assertThat(searchHit.getScore(), greaterThan(0f)); + assertEquals(-1L, searchHit.getVersion()); + assertNotNull(searchHit.getSourceAsMap()); + assertEquals(3, searchHit.getSourceAsMap().size()); + assertEquals("type1", searchHit.getSourceAsMap().get("type")); + assertEquals(50, searchHit.getSourceAsMap().get("num2")); + } + + public void testSearchWithTermsAgg() throws IOException { + SearchRequest searchRequest = new SearchRequest(); + SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder(); + searchSourceBuilder.aggregation(new TermsAggregationBuilder("agg1", ValueType.STRING).field("type.keyword")); + searchSourceBuilder.size(0); + searchRequest.source(searchSourceBuilder); + SearchResponse searchResponse = execute(searchRequest, highLevelClient()::search, highLevelClient()::searchAsync); + assertSearchHeader(searchResponse); + assertNull(searchResponse.getSuggest()); + assertEquals(Collections.emptyMap(), searchResponse.getProfileResults()); + assertEquals(0, searchResponse.getHits().getHits().length); + assertEquals(0f, searchResponse.getHits().getMaxScore(), 0f); + Terms termsAgg = searchResponse.getAggregations().get("agg1"); + assertEquals("agg1", termsAgg.getName()); + assertEquals(2, termsAgg.getBuckets().size()); + Terms.Bucket type1 = termsAgg.getBucketByKey("type1"); + assertEquals(3, type1.getDocCount()); + assertEquals(0, type1.getAggregations().asList().size()); + Terms.Bucket type2 = termsAgg.getBucketByKey("type2"); + assertEquals(2, type2.getDocCount()); + assertEquals(0, type2.getAggregations().asList().size()); + } + + public void testSearchWithRangeAgg() throws IOException { + { + SearchRequest searchRequest = new SearchRequest(); + SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder(); + searchSourceBuilder.aggregation(new RangeAggregationBuilder("agg1").field("num")); + searchSourceBuilder.size(0); + searchRequest.source(searchSourceBuilder); + + ElasticsearchStatusException exception = expectThrows(ElasticsearchStatusException.class, + () -> execute(searchRequest, highLevelClient()::search, highLevelClient()::searchAsync)); + assertEquals(RestStatus.BAD_REQUEST, exception.status()); + } + + SearchRequest searchRequest = new SearchRequest(); + SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder(); + searchSourceBuilder.aggregation(new RangeAggregationBuilder("agg1").field("num") + .addRange("first", 0, 30).addRange("second", 31, 200)); + searchSourceBuilder.size(0); + searchRequest.source(searchSourceBuilder); + SearchResponse searchResponse = execute(searchRequest, highLevelClient()::search, highLevelClient()::searchAsync); + assertSearchHeader(searchResponse); + assertNull(searchResponse.getSuggest()); + assertEquals(Collections.emptyMap(), searchResponse.getProfileResults()); + assertThat(searchResponse.getTook().nanos(), greaterThan(0L)); + assertEquals(5, searchResponse.getHits().totalHits); + assertEquals(0, searchResponse.getHits().getHits().length); + assertEquals(0f, searchResponse.getHits().getMaxScore(), 0f); + Range rangeAgg = searchResponse.getAggregations().get("agg1"); + assertEquals("agg1", rangeAgg.getName()); + assertEquals(2, rangeAgg.getBuckets().size()); + { + Range.Bucket bucket = rangeAgg.getBuckets().get(0); + assertEquals("first", bucket.getKeyAsString()); + assertEquals(2, bucket.getDocCount()); + } + { + Range.Bucket bucket = rangeAgg.getBuckets().get(1); + assertEquals("second", bucket.getKeyAsString()); + assertEquals(3, bucket.getDocCount()); + } + } + + public void testSearchWithTermsAndRangeAgg() throws IOException { + SearchRequest searchRequest = new SearchRequest(); + SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder(); + TermsAggregationBuilder agg = new TermsAggregationBuilder("agg1", ValueType.STRING).field("type.keyword"); + agg.subAggregation(new RangeAggregationBuilder("subagg").field("num") + .addRange("first", 0, 30).addRange("second", 31, 200)); + searchSourceBuilder.aggregation(agg); + searchSourceBuilder.size(0); + searchRequest.source(searchSourceBuilder); + SearchResponse searchResponse = execute(searchRequest, highLevelClient()::search, highLevelClient()::searchAsync); + assertSearchHeader(searchResponse); + assertNull(searchResponse.getSuggest()); + assertEquals(Collections.emptyMap(), searchResponse.getProfileResults()); + assertEquals(0, searchResponse.getHits().getHits().length); + assertEquals(0f, searchResponse.getHits().getMaxScore(), 0f); + Terms termsAgg = searchResponse.getAggregations().get("agg1"); + assertEquals("agg1", termsAgg.getName()); + assertEquals(2, termsAgg.getBuckets().size()); + Terms.Bucket type1 = termsAgg.getBucketByKey("type1"); + assertEquals(3, type1.getDocCount()); + assertEquals(1, type1.getAggregations().asList().size()); + { + Range rangeAgg = type1.getAggregations().get("subagg"); + assertEquals(2, rangeAgg.getBuckets().size()); + { + Range.Bucket bucket = rangeAgg.getBuckets().get(0); + assertEquals("first", bucket.getKeyAsString()); + assertEquals(2, bucket.getDocCount()); + } + { + Range.Bucket bucket = rangeAgg.getBuckets().get(1); + assertEquals("second", bucket.getKeyAsString()); + assertEquals(1, bucket.getDocCount()); + } + } + Terms.Bucket type2 = termsAgg.getBucketByKey("type2"); + assertEquals(2, type2.getDocCount()); + assertEquals(1, type2.getAggregations().asList().size()); + { + Range rangeAgg = type2.getAggregations().get("subagg"); + assertEquals(2, rangeAgg.getBuckets().size()); + { + Range.Bucket bucket = rangeAgg.getBuckets().get(0); + assertEquals("first", bucket.getKeyAsString()); + assertEquals(0, bucket.getDocCount()); + } + { + Range.Bucket bucket = rangeAgg.getBuckets().get(1); + assertEquals("second", bucket.getKeyAsString()); + assertEquals(2, bucket.getDocCount()); + } + } + } + + public void testSearchWithMatrixStats() throws IOException { + SearchRequest searchRequest = new SearchRequest(); + SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder(); + searchSourceBuilder.aggregation(new MatrixStatsAggregationBuilder("agg1").fields(Arrays.asList("num", "num2"))); + searchSourceBuilder.size(0); + searchRequest.source(searchSourceBuilder); + SearchResponse searchResponse = execute(searchRequest, highLevelClient()::search, highLevelClient()::searchAsync); + assertSearchHeader(searchResponse); + assertNull(searchResponse.getSuggest()); + assertEquals(Collections.emptyMap(), searchResponse.getProfileResults()); + assertThat(searchResponse.getTook().nanos(), greaterThan(0L)); + assertEquals(5, searchResponse.getHits().totalHits); + assertEquals(0, searchResponse.getHits().getHits().length); + assertEquals(0f, searchResponse.getHits().getMaxScore(), 0f); + assertEquals(1, searchResponse.getAggregations().asList().size()); + MatrixStats matrixStats = searchResponse.getAggregations().get("agg1"); + assertEquals(5, matrixStats.getFieldCount("num")); + assertEquals(56d, matrixStats.getMean("num"), 0d); + assertEquals(1830d, matrixStats.getVariance("num"), 0d); + assertEquals(0.09340198804973057, matrixStats.getSkewness("num"), 0d); + assertEquals(1.2741646510794589, matrixStats.getKurtosis("num"), 0d); + assertEquals(5, matrixStats.getFieldCount("num2")); + assertEquals(29d, matrixStats.getMean("num2"), 0d); + assertEquals(330d, matrixStats.getVariance("num2"), 0d); + assertEquals(-0.13568039346585542, matrixStats.getSkewness("num2"), 0d); + assertEquals(1.3517561983471074, matrixStats.getKurtosis("num2"), 0d); + assertEquals(-767.5, matrixStats.getCovariance("num", "num2"), 0d); + assertEquals(-0.9876336291667923, matrixStats.getCorrelation("num", "num2"), 0d); + } + + public void testSearchWithParentJoin() throws IOException { + StringEntity parentMapping = new StringEntity("{\n" + + " \"mappings\": {\n" + + " \"answer\" : {\n" + + " \"_parent\" : {\n" + + " \"type\" : \"question\"\n" + + " }\n" + + " }\n" + + " },\n" + + " \"settings\": {\n" + + " \"index.mapping.single_type\": false" + + " }\n" + + "}", ContentType.APPLICATION_JSON); + client().performRequest("PUT", "/child_example", Collections.emptyMap(), parentMapping); + StringEntity questionDoc = new StringEntity("{\n" + + " \"body\": \"

I have Windows 2003 server and i bought a new Windows 2008 server...\",\n" + + " \"title\": \"Whats the best way to file transfer my site from server to a newer one?\",\n" + + " \"tags\": [\n" + + " \"windows-server-2003\",\n" + + " \"windows-server-2008\",\n" + + " \"file-transfer\"\n" + + " ]\n" + + "}", ContentType.APPLICATION_JSON); + client().performRequest("PUT", "/child_example/question/1", Collections.emptyMap(), questionDoc); + StringEntity answerDoc1 = new StringEntity("{\n" + + " \"owner\": {\n" + + " \"location\": \"Norfolk, United Kingdom\",\n" + + " \"display_name\": \"Sam\",\n" + + " \"id\": 48\n" + + " },\n" + + " \"body\": \"

Unfortunately you're pretty much limited to FTP...\",\n" + + " \"creation_date\": \"2009-05-04T13:45:37.030\"\n" + + "}", ContentType.APPLICATION_JSON); + client().performRequest("PUT", "child_example/answer/1", Collections.singletonMap("parent", "1"), answerDoc1); + StringEntity answerDoc2 = new StringEntity("{\n" + + " \"owner\": {\n" + + " \"location\": \"Norfolk, United Kingdom\",\n" + + " \"display_name\": \"Troll\",\n" + + " \"id\": 49\n" + + " },\n" + + " \"body\": \"

Use Linux...\",\n" + + " \"creation_date\": \"2009-05-05T13:45:37.030\"\n" + + "}", ContentType.APPLICATION_JSON); + client().performRequest("PUT", "/child_example/answer/2", Collections.singletonMap("parent", "1"), answerDoc2); + client().performRequest("POST", "/_refresh"); + + TermsAggregationBuilder leafTermAgg = new TermsAggregationBuilder("top-names", ValueType.STRING) + .field("owner.display_name.keyword").size(10); + ChildrenAggregationBuilder childrenAgg = new ChildrenAggregationBuilder("to-answers", "answer").subAggregation(leafTermAgg); + TermsAggregationBuilder termsAgg = new TermsAggregationBuilder("top-tags", ValueType.STRING).field("tags.keyword") + .size(10).subAggregation(childrenAgg); + SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder(); + searchSourceBuilder.size(0).aggregation(termsAgg); + SearchRequest searchRequest = new SearchRequest("child_example"); + searchRequest.source(searchSourceBuilder); + + SearchResponse searchResponse = execute(searchRequest, highLevelClient()::search, highLevelClient()::searchAsync); + assertSearchHeader(searchResponse); + assertNull(searchResponse.getSuggest()); + assertEquals(Collections.emptyMap(), searchResponse.getProfileResults()); + assertThat(searchResponse.getTook().nanos(), greaterThan(0L)); + assertEquals(3, searchResponse.getHits().totalHits); + assertEquals(0, searchResponse.getHits().getHits().length); + assertEquals(0f, searchResponse.getHits().getMaxScore(), 0f); + assertEquals(1, searchResponse.getAggregations().asList().size()); + Terms terms = searchResponse.getAggregations().get("top-tags"); + assertEquals(0, terms.getDocCountError()); + assertEquals(0, terms.getSumOfOtherDocCounts()); + assertEquals(3, terms.getBuckets().size()); + for (Terms.Bucket bucket : terms.getBuckets()) { + assertThat(bucket.getKeyAsString(), + either(equalTo("file-transfer")).or(equalTo("windows-server-2003")).or(equalTo("windows-server-2008"))); + assertEquals(1, bucket.getDocCount()); + assertEquals(1, bucket.getAggregations().asList().size()); + Children children = bucket.getAggregations().get("to-answers"); + assertEquals(2, children.getDocCount()); + assertEquals(1, children.getAggregations().asList().size()); + Terms leafTerms = children.getAggregations().get("top-names"); + assertEquals(0, leafTerms.getDocCountError()); + assertEquals(0, leafTerms.getSumOfOtherDocCounts()); + assertEquals(2, leafTerms.getBuckets().size()); + assertEquals(2, leafTerms.getBuckets().size()); + Terms.Bucket sam = leafTerms.getBucketByKey("Sam"); + assertEquals(1, sam.getDocCount()); + Terms.Bucket troll = leafTerms.getBucketByKey("Troll"); + assertEquals(1, troll.getDocCount()); + } + } + + public void testSearchWithSuggest() throws IOException { + SearchRequest searchRequest = new SearchRequest(); + SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder(); + searchSourceBuilder.suggest(new SuggestBuilder().addSuggestion("sugg1", new PhraseSuggestionBuilder("type")) + .setGlobalText("type")); + searchSourceBuilder.size(0); + searchRequest.source(searchSourceBuilder); + + SearchResponse searchResponse = execute(searchRequest, highLevelClient()::search, highLevelClient()::searchAsync); + assertSearchHeader(searchResponse); + assertNull(searchResponse.getAggregations()); + assertEquals(Collections.emptyMap(), searchResponse.getProfileResults()); + assertEquals(0, searchResponse.getHits().totalHits); + assertEquals(0f, searchResponse.getHits().getMaxScore(), 0f); + assertEquals(0, searchResponse.getHits().getHits().length); + assertEquals(1, searchResponse.getSuggest().size()); + + Suggest.Suggestion> sugg = searchResponse + .getSuggest().iterator().next(); + assertEquals("sugg1", sugg.getName()); + for (Suggest.Suggestion.Entry options : sugg) { + assertEquals("type", options.getText().string()); + assertEquals(0, options.getOffset()); + assertEquals(4, options.getLength()); + assertEquals(2 ,options.getOptions().size()); + for (Suggest.Suggestion.Entry.Option option : options) { + assertThat(option.getScore(), greaterThan(0f)); + assertThat(option.getText().string(), either(equalTo("type1")).or(equalTo("type2"))); + } + } + } + + public void testSearchScroll() throws Exception { + + for (int i = 0; i < 100; i++) { + XContentBuilder builder = jsonBuilder().startObject().field("field", i).endObject(); + HttpEntity entity = new NStringEntity(builder.string(), ContentType.APPLICATION_JSON); + client().performRequest("PUT", "test/type1/" + Integer.toString(i), Collections.emptyMap(), entity); + } + client().performRequest("POST", "/test/_refresh"); + + SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder().size(35).sort("field", SortOrder.ASC); + SearchRequest searchRequest = new SearchRequest("test").scroll(TimeValue.timeValueMinutes(2)).source(searchSourceBuilder); + SearchResponse searchResponse = execute(searchRequest, highLevelClient()::search, highLevelClient()::searchAsync); + + try { + long counter = 0; + assertSearchHeader(searchResponse); + assertThat(searchResponse.getHits().getTotalHits(), equalTo(100L)); + assertThat(searchResponse.getHits().getHits().length, equalTo(35)); + for (SearchHit hit : searchResponse.getHits()) { + assertThat(((Number) hit.getSortValues()[0]).longValue(), equalTo(counter++)); + } + + searchResponse = execute(new SearchScrollRequest(searchResponse.getScrollId()).scroll(TimeValue.timeValueMinutes(2)), + highLevelClient()::searchScroll, highLevelClient()::searchScrollAsync); + + assertThat(searchResponse.getHits().getTotalHits(), equalTo(100L)); + assertThat(searchResponse.getHits().getHits().length, equalTo(35)); + for (SearchHit hit : searchResponse.getHits()) { + assertEquals(counter++, ((Number) hit.getSortValues()[0]).longValue()); + } + + searchResponse = execute(new SearchScrollRequest(searchResponse.getScrollId()).scroll(TimeValue.timeValueMinutes(2)), + highLevelClient()::searchScroll, highLevelClient()::searchScrollAsync); + + assertThat(searchResponse.getHits().getTotalHits(), equalTo(100L)); + assertThat(searchResponse.getHits().getHits().length, equalTo(30)); + for (SearchHit hit : searchResponse.getHits()) { + assertEquals(counter++, ((Number) hit.getSortValues()[0]).longValue()); + } + } finally { + ClearScrollRequest clearScrollRequest = new ClearScrollRequest(); + clearScrollRequest.addScrollId(searchResponse.getScrollId()); + ClearScrollResponse clearScrollResponse = execute(clearScrollRequest, highLevelClient()::clearScroll, + highLevelClient()::clearScrollAsync); + assertThat(clearScrollResponse.getNumFreed(), greaterThan(0)); + assertTrue(clearScrollResponse.isSucceeded()); + + SearchScrollRequest scrollRequest = new SearchScrollRequest(searchResponse.getScrollId()).scroll(TimeValue.timeValueMinutes(2)); + ElasticsearchStatusException exception = expectThrows(ElasticsearchStatusException.class, () -> execute(scrollRequest, + highLevelClient()::searchScroll, highLevelClient()::searchScrollAsync)); + assertEquals(RestStatus.NOT_FOUND, exception.status()); + assertThat(exception.getRootCause(), instanceOf(ElasticsearchException.class)); + ElasticsearchException rootCause = (ElasticsearchException) exception.getRootCause(); + assertThat(rootCause.getMessage(), containsString("No search context found for")); + } + } + + private static void assertSearchHeader(SearchResponse searchResponse) { + assertThat(searchResponse.getTook().nanos(), greaterThanOrEqualTo(0L)); + assertEquals(0, searchResponse.getFailedShards()); + assertThat(searchResponse.getTotalShards(), greaterThan(0)); + assertEquals(searchResponse.getTotalShards(), searchResponse.getSuccessfulShards()); + assertEquals(0, searchResponse.getShardFailures().length); + } +} diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/DeleteDocumentationIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/DeleteDocumentationIT.java new file mode 100644 index 0000000000000..00c19019f47e7 --- /dev/null +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/DeleteDocumentationIT.java @@ -0,0 +1,112 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.client.documentation; + +import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.DocWriteResponse; +import org.elasticsearch.action.delete.DeleteRequest; +import org.elasticsearch.action.delete.DeleteResponse; +import org.elasticsearch.action.support.WriteRequest; +import org.elasticsearch.client.ESRestHighLevelClientTestCase; +import org.elasticsearch.client.RestHighLevelClient; +import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.index.VersionType; +import org.elasticsearch.rest.RestStatus; + +import java.io.IOException; + +/** + * This class is used to generate the Java Delete API documentation. + * You need to wrap your code between two tags like: + * // tag::example[] + * // end::example[] + * + * Where example is your tag name. + * + * Then in the documentation, you can extract what is between tag and end tags with + * ["source","java",subs="attributes,callouts"] + * -------------------------------------------------- + * sys2::[perl -ne 'exit if /end::example/; print if $tag; $tag = $tag || /tag::example/' \ + * {docdir}/../../client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/DeleteDocumentationIT.java] + * -------------------------------------------------- + */ +public class DeleteDocumentationIT extends ESRestHighLevelClientTestCase { + + /** + * This test documents docs/java-rest/high-level/document/delete.asciidoc + */ + public void testDelete() throws IOException { + RestHighLevelClient client = highLevelClient(); + + // tag::delete-request + DeleteRequest request = new DeleteRequest( + "index", // <1> + "type", // <2> + "id"); // <3> + // end::delete-request + + // tag::delete-request-props + request.timeout(TimeValue.timeValueSeconds(1)); // <1> + request.timeout("1s"); // <2> + request.setRefreshPolicy(WriteRequest.RefreshPolicy.WAIT_UNTIL); // <3> + request.setRefreshPolicy("wait_for"); // <4> + request.version(2); // <5> + request.versionType(VersionType.EXTERNAL); // <6> + // end::delete-request-props + + // tag::delete-execute + DeleteResponse response = client.delete(request); + // end::delete-execute + + try { + // tag::delete-notfound + if (response.getResult().equals(DocWriteResponse.Result.NOT_FOUND)) { + throw new Exception("Can't find document to be removed"); // <1> + } + // end::delete-notfound + } catch (Exception ignored) { } + + // tag::delete-execute-async + client.deleteAsync(request, new ActionListener() { + @Override + public void onResponse(DeleteResponse deleteResponse) { + // <1> + } + + @Override + public void onFailure(Exception e) { + // <2> + } + }); + // end::delete-execute-async + + // tag::delete-conflict + try { + client.delete(request); + } catch (ElasticsearchException exception) { + if (exception.status().equals(RestStatus.CONFLICT)) { + // <1> + } + } + // end::delete-conflict + + } +} diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/QueryDSLDocumentationTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/QueryDSLDocumentationTests.java new file mode 100644 index 0000000000000..01a5eb5dfc12d --- /dev/null +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/QueryDSLDocumentationTests.java @@ -0,0 +1,453 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.client.documentation; + +import org.apache.lucene.search.join.ScoreMode; +import org.elasticsearch.common.geo.GeoPoint; +import org.elasticsearch.common.geo.ShapeRelation; +import org.elasticsearch.common.geo.builders.CoordinatesBuilder; +import org.elasticsearch.common.geo.builders.ShapeBuilders; +import org.elasticsearch.common.unit.DistanceUnit; +import org.elasticsearch.index.query.GeoShapeQueryBuilder; +import org.elasticsearch.index.query.functionscore.FunctionScoreQueryBuilder; +import org.elasticsearch.index.query.functionscore.FunctionScoreQueryBuilder.FilterFunctionBuilder; +import org.elasticsearch.script.Script; +import org.elasticsearch.script.ScriptType; +import org.elasticsearch.test.ESTestCase; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +import static java.util.Collections.singletonMap; +import static org.elasticsearch.index.query.QueryBuilders.boolQuery; +import static org.elasticsearch.index.query.QueryBuilders.boostingQuery; +import static org.elasticsearch.index.query.QueryBuilders.commonTermsQuery; +import static org.elasticsearch.index.query.QueryBuilders.constantScoreQuery; +import static org.elasticsearch.index.query.QueryBuilders.disMaxQuery; +import static org.elasticsearch.index.query.QueryBuilders.existsQuery; +import static org.elasticsearch.index.query.QueryBuilders.functionScoreQuery; +import static org.elasticsearch.index.query.QueryBuilders.fuzzyQuery; +import static org.elasticsearch.index.query.QueryBuilders.geoBoundingBoxQuery; +import static org.elasticsearch.index.query.QueryBuilders.geoDistanceQuery; +import static org.elasticsearch.index.query.QueryBuilders.geoPolygonQuery; +import static org.elasticsearch.index.query.QueryBuilders.geoShapeQuery; +import static org.elasticsearch.index.query.QueryBuilders.idsQuery; +import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery; +import static org.elasticsearch.index.query.QueryBuilders.matchQuery; +import static org.elasticsearch.index.query.QueryBuilders.moreLikeThisQuery; +import static org.elasticsearch.index.query.QueryBuilders.multiMatchQuery; +import static org.elasticsearch.index.query.QueryBuilders.nestedQuery; +import static org.elasticsearch.index.query.QueryBuilders.prefixQuery; +import static org.elasticsearch.index.query.QueryBuilders.queryStringQuery; +import static org.elasticsearch.index.query.QueryBuilders.rangeQuery; +import static org.elasticsearch.index.query.QueryBuilders.regexpQuery; +import static org.elasticsearch.index.query.QueryBuilders.scriptQuery; +import static org.elasticsearch.index.query.QueryBuilders.simpleQueryStringQuery; +import static org.elasticsearch.index.query.QueryBuilders.spanContainingQuery; +import static org.elasticsearch.index.query.QueryBuilders.spanFirstQuery; +import static org.elasticsearch.index.query.QueryBuilders.spanMultiTermQueryBuilder; +import static org.elasticsearch.index.query.QueryBuilders.spanNearQuery; +import static org.elasticsearch.index.query.QueryBuilders.spanNotQuery; +import static org.elasticsearch.index.query.QueryBuilders.spanOrQuery; +import static org.elasticsearch.index.query.QueryBuilders.spanTermQuery; +import static org.elasticsearch.index.query.QueryBuilders.spanWithinQuery; +import static org.elasticsearch.index.query.QueryBuilders.termQuery; +import static org.elasticsearch.index.query.QueryBuilders.termsQuery; +import static org.elasticsearch.index.query.QueryBuilders.typeQuery; +import static org.elasticsearch.index.query.QueryBuilders.wildcardQuery; +import static org.elasticsearch.index.query.functionscore.ScoreFunctionBuilders.exponentialDecayFunction; +import static org.elasticsearch.index.query.functionscore.ScoreFunctionBuilders.randomFunction; +import static org.elasticsearch.join.query.JoinQueryBuilders.hasChildQuery; +import static org.elasticsearch.join.query.JoinQueryBuilders.hasParentQuery; + +/** + * Examples of using the transport client that are imported into the transport client documentation. + * There are no assertions here because we're mostly concerned with making sure that the examples + * compile and don't throw weird runtime exceptions. Assertions and example data would be nice, but + * that is secondary. + */ +public class QueryDSLDocumentationTests extends ESTestCase { + public void testBool() { + // tag::bool + boolQuery() + .must(termQuery("content", "test1")) // <1> + .must(termQuery("content", "test4")) // <1> + .mustNot(termQuery("content", "test2")) // <2> + .should(termQuery("content", "test3")) // <3> + .filter(termQuery("content", "test5")); // <4> + // end::bool + } + + public void testBoosting() { + // tag::boosting + boostingQuery( + termQuery("name","kimchy"), // <1> + termQuery("name","dadoonet")) // <2> + .negativeBoost(0.2f); // <3> + // end::boosting + } + + public void testCommonTerms() { + // tag::common_terms + commonTermsQuery("name", // <1> + "kimchy"); // <2> + // end::common_terms + } + + public void testConstantScore() { + // tag::constant_score + constantScoreQuery( + termQuery("name","kimchy")) // <1> + .boost(2.0f); // <2> + // end::constant_score + } + + public void testDisMax() { + // tag::dis_max + disMaxQuery() + .add(termQuery("name", "kimchy")) // <1> + .add(termQuery("name", "elasticsearch")) // <2> + .boost(1.2f) // <3> + .tieBreaker(0.7f); // <4> + // end::dis_max + } + + public void testExists() { + // tag::exists + existsQuery("name"); // <1> + // end::exists + } + + public void testFunctionScore() { + // tag::function_score + FilterFunctionBuilder[] functions = { + new FunctionScoreQueryBuilder.FilterFunctionBuilder( + matchQuery("name", "kimchy"), // <1> + randomFunction("ABCDEF")), // <2> + new FunctionScoreQueryBuilder.FilterFunctionBuilder( + exponentialDecayFunction("age", 0L, 1L)) // <3> + }; + functionScoreQuery(functions); + // end::function_score + } + + public void testFuzzy() { + // tag::fuzzy + fuzzyQuery( + "name", // <1> + "kimchy"); // <2> + // end::fuzzy + } + + public void testGeoBoundingBox() { + // tag::geo_bounding_box + geoBoundingBoxQuery("pin.location") // <1> + .setCorners(40.73, -74.1, // <2> + 40.717, -73.99); // <3> + // end::geo_bounding_box + } + + public void testGeoDistance() { + // tag::geo_distance + geoDistanceQuery("pin.location") // <1> + .point(40, -70) // <2> + .distance(200, DistanceUnit.KILOMETERS); // <3> + // end::geo_distance + } + + public void testGeoPolygon() { + // tag::geo_polygon + List points = new ArrayList(); // <1> + points.add(new GeoPoint(40, -70)); + points.add(new GeoPoint(30, -80)); + points.add(new GeoPoint(20, -90)); + geoPolygonQuery("pin.location", points); // <2> + // end::geo_polygon + } + + public void testGeoShape() throws IOException { + { + // tag::geo_shape + GeoShapeQueryBuilder qb = geoShapeQuery( + "pin.location", // <1> + ShapeBuilders.newMultiPoint( // <2> + new CoordinatesBuilder() + .coordinate(0, 0) + .coordinate(0, 10) + .coordinate(10, 10) + .coordinate(10, 0) + .coordinate(0, 0) + .build())); + qb.relation(ShapeRelation.WITHIN); // <3> + // end::geo_shape + } + + { + // tag::indexed_geo_shape + // Using pre-indexed shapes + GeoShapeQueryBuilder qb = geoShapeQuery( + "pin.location", // <1> + "DEU", // <2> + "countries"); // <3> + qb.relation(ShapeRelation.WITHIN) // <4> + .indexedShapeIndex("shapes") // <5> + .indexedShapePath("location"); // <6> + // end::indexed_geo_shape + } + } + + public void testHasChild() { + // tag::has_child + hasChildQuery( + "blog_tag", // <1> + termQuery("tag","something"), // <2> + ScoreMode.None); // <3> + // end::has_child + } + + public void testHasParent() { + // tag::has_parent + hasParentQuery( + "blog", // <1> + termQuery("tag","something"), // <2> + false); // <3> + // end::has_parent + } + + public void testIds() { + // tag::ids + idsQuery("my_type", "type2") + .addIds("1", "4", "100"); + + idsQuery() // <1> + .addIds("1", "4", "100"); + // end::ids + } + + public void testMatchAll() { + // tag::match_all + matchAllQuery(); + // end::match_all + } + + public void testMatch() { + // tag::match + matchQuery( + "name", // <1> + "kimchy elasticsearch"); // <2> + // end::match + } + + public void testMoreLikeThis() { + // tag::more_like_this + String[] fields = {"name.first", "name.last"}; // <1> + String[] texts = {"text like this one"}; // <2> + + moreLikeThisQuery(fields, texts, null) + .minTermFreq(1) // <3> + .maxQueryTerms(12); // <4> + // end::more_like_this + } + + public void testMultiMatch() { + // tag::multi_match + multiMatchQuery( + "kimchy elasticsearch", // <1> + "user", "message"); // <2> + // end::multi_match + } + + public void testNested() { + // tag::nested + nestedQuery( + "obj1", // <1> + boolQuery() // <2> + .must(matchQuery("obj1.name", "blue")) + .must(rangeQuery("obj1.count").gt(5)), + ScoreMode.Avg); // <3> + // end::nested + } + + public void testPrefix() { + // tag::prefix + prefixQuery( + "brand", // <1> + "heine"); // <2> + // end::prefix + } + + public void testQueryString() { + // tag::query_string + queryStringQuery("+kimchy -elasticsearch"); + // end::query_string + } + + public void testRange() { + // tag::range + rangeQuery("price") // <1> + .from(5) // <2> + .to(10) // <3> + .includeLower(true) // <4> + .includeUpper(false); // <5> + // end::range + + // tag::range_simplified + // A simplified form using gte, gt, lt or lte + rangeQuery("age") // <1> + .gte("10") // <2> + .lt("20"); // <3> + // end::range_simplified + } + + public void testRegExp() { + // tag::regexp + regexpQuery( + "name.first", // <1> + "s.*y"); // <2> + // end::regexp + } + + public void testScript() { + // tag::script_inline + scriptQuery( + new Script("doc['num1'].value > 1") // <1> + ); + // end::script_inline + + // tag::script_file + Map parameters = new HashMap<>(); + parameters.put("param1", 5); + scriptQuery(new Script( + ScriptType.STORED, // <1> + "painless", // <2> + "myscript", // <3> + singletonMap("param1", 5))); // <4> + // end::script_file + } + + public void testSimpleQueryString() { + // tag::simple_query_string + simpleQueryStringQuery("+kimchy -elasticsearch"); + // end::simple_query_string + } + + public void testSpanContaining() { + // tag::span_containing + spanContainingQuery( + spanNearQuery(spanTermQuery("field1","bar"), 5) // <1> + .addClause(spanTermQuery("field1","baz")) + .inOrder(true), + spanTermQuery("field1","foo")); // <2> + // end::span_containing + } + + public void testSpanFirst() { + // tag::span_first + spanFirstQuery( + spanTermQuery("user", "kimchy"), // <1> + 3 // <2> + ); + // end::span_first + } + + public void testSpanMultiTerm() { + // tag::span_multi + spanMultiTermQueryBuilder( + prefixQuery("user", "ki")); // <1> + // end::span_multi + } + + public void testSpanNear() { + // tag::span_near + spanNearQuery( + spanTermQuery("field","value1"), // <1> + 12) // <2> + .addClause(spanTermQuery("field","value2")) // <1> + .addClause(spanTermQuery("field","value3")) // <1> + .inOrder(false); // <3> + // end::span_near + } + + public void testSpanNot() { + // tag::span_not + spanNotQuery( + spanTermQuery("field","value1"), // <1> + spanTermQuery("field","value2")); // <2> + // end::span_not + } + + public void testSpanOr() { + // tag::span_or + spanOrQuery(spanTermQuery("field","value1")) // <1> + .addClause(spanTermQuery("field","value2")) // <1> + .addClause(spanTermQuery("field","value3")); // <1> + // end::span_or + } + + public void testSpanTerm() { + // tag::span_term + spanTermQuery( + "user", // <1> + "kimchy"); // <2> + // end::span_term + } + + public void testSpanWithin() { + // tag::span_within + spanWithinQuery( + spanNearQuery(spanTermQuery("field1", "bar"), 5) // <1> + .addClause(spanTermQuery("field1", "baz")) + .inOrder(true), + spanTermQuery("field1", "foo")); // <2> + // end::span_within + } + + public void testTerm() { + // tag::term + termQuery( + "name", // <1> + "kimchy"); // <2> + // end::term + } + + public void testTerms() { + // tag::terms + termsQuery("tags", // <1> + "blue", "pill"); // <2> + // end::terms + } + + public void testType() { + // tag::type + typeQuery("my_type"); // <1> + // end::type + } + + public void testWildcard() { + // tag::wildcard + wildcardQuery( + "user", // <1> + "k?mch*"); // <2> + // end::wildcard + } +} diff --git a/client/rest/src/test/java/org/elasticsearch/client/RestClientSingleHostIntegTests.java b/client/rest/src/test/java/org/elasticsearch/client/RestClientSingleHostIntegTests.java index 6a01e0dc182bf..9b63e3492fd80 100644 --- a/client/rest/src/test/java/org/elasticsearch/client/RestClientSingleHostIntegTests.java +++ b/client/rest/src/test/java/org/elasticsearch/client/RestClientSingleHostIntegTests.java @@ -43,7 +43,6 @@ import java.net.InetSocketAddress; import java.util.Arrays; import java.util.Collections; -import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; @@ -55,7 +54,6 @@ import static org.hamcrest.Matchers.nullValue; import static org.hamcrest.Matchers.startsWith; import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertThat; import static org.junit.Assert.assertTrue; @@ -76,7 +74,7 @@ public class RestClientSingleHostIntegTests extends RestClientTestCase { public static void startHttpServer() throws Exception { pathPrefix = randomBoolean() ? "/testPathPrefix/" + randomAsciiOfLengthBetween(1, 5) : ""; httpServer = createHttpServer(); - defaultHeaders = generateHeaders("Header-default", "Header-array", randomIntBetween(0, 5)); + defaultHeaders = RestClientTestUtil.randomHeaders(getRandom(), "Header-default"); restClient = createRestClient(false, true); } @@ -173,17 +171,11 @@ public void testHeaders() throws IOException { if (method.equals("HEAD") == false) { standardHeaders.add("Content-length"); } - - final int numHeaders = randomIntBetween(1, 5); - final Header[] headers = generateHeaders("Header", "Header-array", numHeaders); - final Map> expectedHeaders = new HashMap<>(); - - addHeaders(expectedHeaders, defaultHeaders, headers); - + final Header[] requestHeaders = RestClientTestUtil.randomHeaders(getRandom(), "Header"); final int statusCode = randomStatusCode(getRandom()); Response esResponse; try { - esResponse = restClient.performRequest(method, "/" + statusCode, Collections.emptyMap(), headers); + esResponse = restClient.performRequest(method, "/" + statusCode, Collections.emptyMap(), requestHeaders); } catch(ResponseException e) { esResponse = e.getResponse(); } @@ -191,24 +183,13 @@ public void testHeaders() throws IOException { assertEquals(method, esResponse.getRequestLine().getMethod()); assertEquals(statusCode, esResponse.getStatusLine().getStatusCode()); assertEquals(pathPrefix + "/" + statusCode, esResponse.getRequestLine().getUri()); - + assertHeaders(defaultHeaders, requestHeaders, esResponse.getHeaders(), standardHeaders); for (final Header responseHeader : esResponse.getHeaders()) { - final String name = responseHeader.getName(); - final String value = responseHeader.getValue(); - if (name.startsWith("Header")) { - final List values = expectedHeaders.get(name); - assertNotNull("found response header [" + name + "] that wasn't originally sent: " + value, values); - assertTrue("found incorrect response header [" + name + "]: " + value, values.remove(value)); - - // we've collected them all - if (values.isEmpty()) { - expectedHeaders.remove(name); - } - } else { + String name = responseHeader.getName(); + if (name.startsWith("Header") == false) { assertTrue("unknown header was returned " + name, standardHeaders.remove(name)); } } - assertTrue("some headers that were sent weren't returned: " + expectedHeaders, expectedHeaders.isEmpty()); assertTrue("some expected standard headers weren't returned: " + standardHeaders, standardHeaders.isEmpty()); } } diff --git a/client/rest/src/test/java/org/elasticsearch/client/RestClientSingleHostTests.java b/client/rest/src/test/java/org/elasticsearch/client/RestClientSingleHostTests.java index c92e17e3d982e..a74310daa0140 100644 --- a/client/rest/src/test/java/org/elasticsearch/client/RestClientSingleHostTests.java +++ b/client/rest/src/test/java/org/elasticsearch/client/RestClientSingleHostTests.java @@ -58,7 +58,6 @@ import java.util.Collections; import java.util.HashMap; import java.util.HashSet; -import java.util.List; import java.util.Map; import java.util.Set; import java.util.concurrent.Future; @@ -72,7 +71,6 @@ import static org.hamcrest.CoreMatchers.instanceOf; import static org.junit.Assert.assertArrayEquals; import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertThat; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; @@ -136,8 +134,7 @@ public Future answer(InvocationOnMock invocationOnMock) throws Thr }); - int numHeaders = randomIntBetween(0, 3); - defaultHeaders = generateHeaders("Header-default", "Header-array", numHeaders); + defaultHeaders = RestClientTestUtil.randomHeaders(getRandom(), "Header-default"); httpHost = new HttpHost("localhost", 9200); failureListener = new HostsTrackingFailureListener(); restClient = new RestClient(httpClient, 10000, defaultHeaders, new HttpHost[]{httpHost}, null, failureListener); @@ -365,33 +362,17 @@ public void testNullParams() throws IOException { */ public void testHeaders() throws IOException { for (String method : getHttpMethods()) { - final int numHeaders = randomIntBetween(1, 5); - final Header[] headers = generateHeaders("Header", null, numHeaders); - final Map> expectedHeaders = new HashMap<>(); - - addHeaders(expectedHeaders, defaultHeaders, headers); + final Header[] requestHeaders = RestClientTestUtil.randomHeaders(getRandom(), "Header"); final int statusCode = randomStatusCode(getRandom()); Response esResponse; try { - esResponse = restClient.performRequest(method, "/" + statusCode, headers); + esResponse = restClient.performRequest(method, "/" + statusCode, requestHeaders); } catch(ResponseException e) { esResponse = e.getResponse(); } assertThat(esResponse.getStatusLine().getStatusCode(), equalTo(statusCode)); - for (Header responseHeader : esResponse.getHeaders()) { - final String name = responseHeader.getName(); - final String value = responseHeader.getValue(); - final List values = expectedHeaders.get(name); - assertNotNull("found response header [" + name + "] that wasn't originally sent: " + value, values); - assertTrue("found incorrect response header [" + name + "]: " + value, values.remove(value)); - - // we've collected them all - if (values.isEmpty()) { - expectedHeaders.remove(name); - } - } - assertTrue("some headers that were sent weren't returned " + expectedHeaders, expectedHeaders.isEmpty()); + assertHeaders(defaultHeaders, requestHeaders, esResponse.getHeaders(), Collections.emptySet()); } } @@ -457,10 +438,9 @@ private HttpUriRequest performRandomRequest(String method) throws Exception { } Header[] headers = new Header[0]; - final int numHeaders = randomIntBetween(1, 5); - final Set uniqueNames = new HashSet<>(numHeaders); + final Set uniqueNames = new HashSet<>(); if (randomBoolean()) { - headers = generateHeaders("Header", "Header-array", numHeaders); + headers = RestClientTestUtil.randomHeaders(getRandom(), "Header"); for (Header header : headers) { request.addHeader(header); uniqueNames.add(header.getName()); diff --git a/client/test/src/main/java/org/elasticsearch/client/RestClientTestCase.java b/client/test/src/main/java/org/elasticsearch/client/RestClientTestCase.java index 4296932a00208..6a2a45ef2813c 100644 --- a/client/test/src/main/java/org/elasticsearch/client/RestClientTestCase.java +++ b/client/test/src/main/java/org/elasticsearch/client/RestClientTestCase.java @@ -30,16 +30,19 @@ import com.carrotsearch.randomizedtesting.annotations.ThreadLeakScope; import com.carrotsearch.randomizedtesting.annotations.ThreadLeakZombies; import com.carrotsearch.randomizedtesting.annotations.TimeoutSuite; - import org.apache.http.Header; -import org.apache.http.message.BasicHeader; import java.util.ArrayList; +import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertTrue; + @TestMethodProviders({ JUnit3MethodProvider.class }) @@ -53,70 +56,56 @@ public abstract class RestClientTestCase extends RandomizedTest { /** - * Create the specified number of {@link Header}s. - *

- * Generated header names will be the {@code baseName} plus its index or, rarely, the {@code arrayName} if it's supplied. + * Assert that the actual headers are the expected ones given the original default and request headers. Some headers can be ignored, + * for instance in case the http client is adding its own automatically. * - * @param baseName The base name to use for all headers. - * @param arrayName The optional ({@code null}able) array name to use randomly. - * @param headers The number of headers to create. - * @return Never {@code null}. + * @param defaultHeaders the default headers set to the REST client instance + * @param requestHeaders the request headers sent with a particular request + * @param actualHeaders the actual headers as a result of the provided default and request headers + * @param ignoreHeaders header keys to be ignored as they are not part of default nor request headers, yet they + * will be part of the actual ones */ - protected static Header[] generateHeaders(final String baseName, final String arrayName, final int headers) { - final Header[] generated = new Header[headers]; - for (int i = 0; i < headers; i++) { - String headerName = baseName + i; - if (arrayName != null && rarely()) { - headerName = arrayName; + protected static void assertHeaders(final Header[] defaultHeaders, final Header[] requestHeaders, + final Header[] actualHeaders, final Set ignoreHeaders) { + final Map> expectedHeaders = new HashMap<>(); + final Set requestHeaderKeys = new HashSet<>(); + for (final Header header : requestHeaders) { + final String name = header.getName(); + addValueToListEntry(expectedHeaders, name, header.getValue()); + requestHeaderKeys.add(name); + } + for (final Header defaultHeader : defaultHeaders) { + final String name = defaultHeader.getName(); + if (requestHeaderKeys.contains(name) == false) { + addValueToListEntry(expectedHeaders, name, defaultHeader.getValue()); } - - generated[i] = new BasicHeader(headerName, randomAsciiOfLengthBetween(3, 10)); } - return generated; + Set actualIgnoredHeaders = new HashSet<>(); + for (Header responseHeader : actualHeaders) { + final String name = responseHeader.getName(); + if (ignoreHeaders.contains(name)) { + expectedHeaders.remove(name); + actualIgnoredHeaders.add(name); + continue; + } + final String value = responseHeader.getValue(); + final List values = expectedHeaders.get(name); + assertNotNull("found response header [" + name + "] that wasn't originally sent: " + value, values); + assertTrue("found incorrect response header [" + name + "]: " + value, values.remove(value)); + if (values.isEmpty()) { + expectedHeaders.remove(name); + } + } + assertEquals("some headers meant to be ignored were not part of the actual headers", ignoreHeaders, actualIgnoredHeaders); + assertTrue("some headers that were sent weren't returned " + expectedHeaders, expectedHeaders.isEmpty()); } - /** - * Create a new {@link List} within the {@code map} if none exists for {@code name} or append to the existing list. - * - * @param map The map to manipulate. - * @param name The name to create/append the list for. - * @param value The value to add. - */ - private static void createOrAppendList(final Map> map, final String name, final String value) { + private static void addValueToListEntry(final Map> map, final String name, final String value) { List values = map.get(name); - if (values == null) { values = new ArrayList<>(); map.put(name, values); } - values.add(value); } - - /** - * Add the {@code headers} to the {@code map} so that related tests can more easily assert that they exist. - *

- * If both the {@code defaultHeaders} and {@code headers} contain the same {@link Header}, based on its - * {@linkplain Header#getName() name}, then this will only use the {@code Header}(s) from {@code headers}. - * - * @param map The map to build with name/value(s) pairs. - * @param defaultHeaders The headers to add to the map representing default headers. - * @param headers The headers to add to the map representing request-level headers. - * @see #createOrAppendList(Map, String, String) - */ - protected static void addHeaders(final Map> map, final Header[] defaultHeaders, final Header[] headers) { - final Set uniqueHeaders = new HashSet<>(); - for (final Header header : headers) { - final String name = header.getName(); - createOrAppendList(map, name, header.getValue()); - uniqueHeaders.add(name); - } - for (final Header defaultHeader : defaultHeaders) { - final String name = defaultHeader.getName(); - if (uniqueHeaders.contains(name) == false) { - createOrAppendList(map, name, defaultHeader.getValue()); - } - } - } - } diff --git a/client/test/src/main/java/org/elasticsearch/client/RestClientTestUtil.java b/client/test/src/main/java/org/elasticsearch/client/RestClientTestUtil.java index fd8046c10fae3..7cda8a71d6178 100644 --- a/client/test/src/main/java/org/elasticsearch/client/RestClientTestUtil.java +++ b/client/test/src/main/java/org/elasticsearch/client/RestClientTestUtil.java @@ -19,7 +19,11 @@ package org.elasticsearch.client; +import com.carrotsearch.randomizedtesting.generators.RandomNumbers; import com.carrotsearch.randomizedtesting.generators.RandomPicks; +import com.carrotsearch.randomizedtesting.generators.RandomStrings; +import org.apache.http.Header; +import org.apache.http.message.BasicHeader; import java.util.ArrayList; import java.util.Arrays; @@ -81,4 +85,23 @@ static List getAllErrorStatusCodes() { static List getAllStatusCodes() { return ALL_STATUS_CODES; } + + /** + * Create a random number of {@link org.apache.http.Header}s. + * Generated header names will either be the {@code baseName} plus its index, or exactly the provided {@code baseName} so that the + * we test also support for multiple headers with same key and different values. + */ + static Header[] randomHeaders(Random random, final String baseName) { + int numHeaders = RandomNumbers.randomIntBetween(random, 0, 5); + final Header[] headers = new Header[numHeaders]; + for (int i = 0; i < numHeaders; i++) { + String headerName = baseName; + //randomly exercise the code path that supports multiple headers with same key + if (random.nextBoolean()) { + headerName = headerName + i; + } + headers[i] = new BasicHeader(headerName, RandomStrings.randomAsciiOfLengthBetween(random, 3, 10)); + } + return headers; + } } diff --git a/core/src/main/java/org/elasticsearch/action/bulk/BulkProcessor.java b/core/src/main/java/org/elasticsearch/action/bulk/BulkProcessor.java index cbfc843162883..9b1b803d4604c 100644 --- a/core/src/main/java/org/elasticsearch/action/bulk/BulkProcessor.java +++ b/core/src/main/java/org/elasticsearch/action/bulk/BulkProcessor.java @@ -19,6 +19,7 @@ package org.elasticsearch.action.bulk; +import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.DocWriteRequest; import org.elasticsearch.action.delete.DeleteRequest; import org.elasticsearch.action.index.IndexRequest; @@ -28,17 +29,14 @@ import org.elasticsearch.common.unit.ByteSizeUnit; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.unit.TimeValue; -import org.elasticsearch.common.util.concurrent.EsExecutors; -import org.elasticsearch.common.util.concurrent.FutureUtils; import org.elasticsearch.common.xcontent.XContentType; +import org.elasticsearch.threadpool.ThreadPool; import java.io.Closeable; import java.util.Objects; -import java.util.concurrent.Executors; -import java.util.concurrent.ScheduledFuture; -import java.util.concurrent.ScheduledThreadPoolExecutor; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicLong; +import java.util.function.BiConsumer; /** * A bulk processor is a thread safe bulk processing class, allowing to easily set when to "flush" a new bulk request @@ -66,7 +64,7 @@ public interface Listener { /** * Callback after a failed execution of bulk request. - * + *

* Note that in case an instance of InterruptedException is passed, which means that request processing has been * cancelled externally, the thread's interruption status has been restored prior to calling this method. */ @@ -78,10 +76,10 @@ public interface Listener { */ public static class Builder { - private final Client client; + private final BiConsumer> consumer; private final Listener listener; + private final ThreadPool threadPool; - private String name; private int concurrentRequests = 1; private int bulkActions = 1000; private ByteSizeValue bulkSize = new ByteSizeValue(5, ByteSizeUnit.MB); @@ -92,17 +90,10 @@ public static class Builder { * Creates a builder of bulk processor with the client to use and the listener that will be used * to be notified on the completion of bulk requests. */ - public Builder(Client client, Listener listener) { - this.client = client; + public Builder(BiConsumer> consumer, Listener listener, ThreadPool threadPool) { + this.consumer = consumer; this.listener = listener; - } - - /** - * Sets an optional name to identify this bulk processor. - */ - public Builder setName(String name) { - this.name = name; - return this; + this.threadPool = threadPool; } /** @@ -164,7 +155,7 @@ public Builder setBackoffPolicy(BackoffPolicy backoffPolicy) { * Builds a new bulk processor. */ public BulkProcessor build() { - return new BulkProcessor(client, backoffPolicy, listener, name, concurrentRequests, bulkActions, bulkSize, flushInterval); + return new BulkProcessor(consumer, backoffPolicy, listener, concurrentRequests, bulkActions, bulkSize, flushInterval, threadPool); } } @@ -172,15 +163,13 @@ public static Builder builder(Client client, Listener listener) { Objects.requireNonNull(client, "client"); Objects.requireNonNull(listener, "listener"); - return new Builder(client, listener); + return new Builder(client::bulk, listener, client.threadPool()); } private final int bulkActions; private final long bulkSize; - - private final ScheduledThreadPoolExecutor scheduler; - private final ScheduledFuture scheduledFuture; + private final ThreadPool.Cancellable cancellableFlushTask; private final AtomicLong executionIdGen = new AtomicLong(); @@ -189,22 +178,21 @@ public static Builder builder(Client client, Listener listener) { private volatile boolean closed = false; - BulkProcessor(Client client, BackoffPolicy backoffPolicy, Listener listener, @Nullable String name, int concurrentRequests, int bulkActions, ByteSizeValue bulkSize, @Nullable TimeValue flushInterval) { + BulkProcessor(BiConsumer> consumer, BackoffPolicy backoffPolicy, Listener listener, + int concurrentRequests, int bulkActions, ByteSizeValue bulkSize, @Nullable TimeValue flushInterval, + ThreadPool threadPool) { this.bulkActions = bulkActions; this.bulkSize = bulkSize.getBytes(); - this.bulkRequest = new BulkRequest(); - this.bulkRequestHandler = (concurrentRequests == 0) ? BulkRequestHandler.syncHandler(client, backoffPolicy, listener) : BulkRequestHandler.asyncHandler(client, backoffPolicy, listener, concurrentRequests); - if (flushInterval != null) { - this.scheduler = (ScheduledThreadPoolExecutor) Executors.newScheduledThreadPool(1, EsExecutors.daemonThreadFactory(client.settings(), (name != null ? "[" + name + "]" : "") + "bulk_processor")); - this.scheduler.setExecuteExistingDelayedTasksAfterShutdownPolicy(false); - this.scheduler.setContinueExistingPeriodicTasksAfterShutdownPolicy(false); - this.scheduledFuture = this.scheduler.scheduleWithFixedDelay(new Flush(), flushInterval.millis(), flushInterval.millis(), TimeUnit.MILLISECONDS); + if (concurrentRequests == 0) { + this.bulkRequestHandler = BulkRequestHandler.syncHandler(consumer, backoffPolicy, listener, threadPool); } else { - this.scheduler = null; - this.scheduledFuture = null; + this.bulkRequestHandler = BulkRequestHandler.asyncHandler(consumer, backoffPolicy, listener, threadPool, concurrentRequests); } + + // Start period flushing task after everything is setup + this.cancellableFlushTask = startFlushTask(flushInterval, threadPool); } /** @@ -214,20 +202,20 @@ public static Builder builder(Client client, Listener listener) { public void close() { try { awaitClose(0, TimeUnit.NANOSECONDS); - } catch(InterruptedException exc) { + } catch (InterruptedException exc) { Thread.currentThread().interrupt(); } } /** * Closes the processor. If flushing by time is enabled, then it's shutdown. Any remaining bulk actions are flushed. - * + *

* If concurrent requests are not enabled, returns {@code true} immediately. * If concurrent requests are enabled, waits for up to the specified timeout for all bulk requests to complete then returns {@code true}, * If the specified waiting time elapses before all bulk requests complete, {@code false} is returned. * * @param timeout The maximum time to wait for the bulk requests to complete - * @param unit The time unit of the {@code timeout} argument + * @param unit The time unit of the {@code timeout} argument * @return {@code true} if all bulk requests completed and {@code false} if the waiting time elapsed before all the bulk requests completed * @throws InterruptedException If the current thread is interrupted */ @@ -236,10 +224,9 @@ public synchronized boolean awaitClose(long timeout, TimeUnit unit) throws Inter return true; } closed = true; - if (this.scheduledFuture != null) { - FutureUtils.cancel(this.scheduledFuture); - this.scheduler.shutdown(); - } + + this.cancellableFlushTask.cancel(); + if (bulkRequest.numberOfActions() > 0) { execute(); } @@ -323,12 +310,28 @@ public synchronized BulkProcessor add(BytesReference data, @Nullable String defa * Adds the data from the bytes to be processed by the bulk processor */ public synchronized BulkProcessor add(BytesReference data, @Nullable String defaultIndex, @Nullable String defaultType, - @Nullable String defaultPipeline, @Nullable Object payload, XContentType xContentType) throws Exception { + @Nullable String defaultPipeline, @Nullable Object payload, XContentType xContentType) throws Exception { bulkRequest.add(data, defaultIndex, defaultType, null, null, null, defaultPipeline, payload, true, xContentType); executeIfNeeded(); return this; } + private ThreadPool.Cancellable startFlushTask(TimeValue flushInterval, ThreadPool threadPool) { + if (flushInterval == null) { + return new ThreadPool.Cancellable() { + @Override + public void cancel() {} + + @Override + public boolean isCancelled() { + return true; + } + }; + } + + return threadPool.scheduleWithFixedDelay(new Flush(), flushInterval, ThreadPool.Names.GENERIC); + } + private void executeIfNeeded() { ensureOpen(); if (!isOverTheLimit()) { diff --git a/core/src/main/java/org/elasticsearch/action/bulk/BulkRequestHandler.java b/core/src/main/java/org/elasticsearch/action/bulk/BulkRequestHandler.java index 5d9910d9179af..e1755bfb8bf1e 100644 --- a/core/src/main/java/org/elasticsearch/action/bulk/BulkRequestHandler.java +++ b/core/src/main/java/org/elasticsearch/action/bulk/BulkRequestHandler.java @@ -22,23 +22,27 @@ import org.apache.logging.log4j.message.ParameterizedMessage; import org.apache.logging.log4j.util.Supplier; import org.elasticsearch.action.ActionListener; -import org.elasticsearch.client.Client; import org.elasticsearch.common.logging.Loggers; +import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.EsRejectedExecutionException; +import org.elasticsearch.threadpool.ThreadPool; import java.util.concurrent.Semaphore; import java.util.concurrent.TimeUnit; +import java.util.function.BiConsumer; /** * Abstracts the low-level details of bulk request handling */ abstract class BulkRequestHandler { protected final Logger logger; - protected final Client client; + protected final BiConsumer> consumer; + protected final ThreadPool threadPool; - protected BulkRequestHandler(Client client) { - this.client = client; - this.logger = Loggers.getLogger(getClass(), client.settings()); + protected BulkRequestHandler(BiConsumer> consumer, ThreadPool threadPool) { + this.logger = Loggers.getLogger(getClass()); + this.consumer = consumer; + this.threadPool = threadPool; } @@ -47,20 +51,25 @@ protected BulkRequestHandler(Client client) { public abstract boolean awaitClose(long timeout, TimeUnit unit) throws InterruptedException; - public static BulkRequestHandler syncHandler(Client client, BackoffPolicy backoffPolicy, BulkProcessor.Listener listener) { - return new SyncBulkRequestHandler(client, backoffPolicy, listener); + public static BulkRequestHandler syncHandler(BiConsumer> consumer, + BackoffPolicy backoffPolicy, BulkProcessor.Listener listener, + ThreadPool threadPool) { + return new SyncBulkRequestHandler(consumer, backoffPolicy, listener, threadPool); } - public static BulkRequestHandler asyncHandler(Client client, BackoffPolicy backoffPolicy, BulkProcessor.Listener listener, int concurrentRequests) { - return new AsyncBulkRequestHandler(client, backoffPolicy, listener, concurrentRequests); + public static BulkRequestHandler asyncHandler(BiConsumer> consumer, + BackoffPolicy backoffPolicy, BulkProcessor.Listener listener, + ThreadPool threadPool, int concurrentRequests) { + return new AsyncBulkRequestHandler(consumer, backoffPolicy, listener, threadPool, concurrentRequests); } private static class SyncBulkRequestHandler extends BulkRequestHandler { private final BulkProcessor.Listener listener; private final BackoffPolicy backoffPolicy; - SyncBulkRequestHandler(Client client, BackoffPolicy backoffPolicy, BulkProcessor.Listener listener) { - super(client); + SyncBulkRequestHandler(BiConsumer> consumer, BackoffPolicy backoffPolicy, + BulkProcessor.Listener listener, ThreadPool threadPool) { + super(consumer, threadPool); this.backoffPolicy = backoffPolicy; this.listener = listener; } @@ -71,9 +80,10 @@ public void execute(BulkRequest bulkRequest, long executionId) { try { listener.beforeBulk(executionId, bulkRequest); BulkResponse bulkResponse = Retry - .on(EsRejectedExecutionException.class) - .policy(backoffPolicy) - .withSyncBackoff(client, bulkRequest); + .on(EsRejectedExecutionException.class) + .policy(backoffPolicy) + .using(threadPool) + .withSyncBackoff(consumer, bulkRequest, Settings.EMPTY); afterCalled = true; listener.afterBulk(executionId, bulkRequest, bulkResponse); } catch (InterruptedException e) { @@ -103,8 +113,10 @@ private static class AsyncBulkRequestHandler extends BulkRequestHandler { private final Semaphore semaphore; private final int concurrentRequests; - private AsyncBulkRequestHandler(Client client, BackoffPolicy backoffPolicy, BulkProcessor.Listener listener, int concurrentRequests) { - super(client); + private AsyncBulkRequestHandler(BiConsumer> consumer, BackoffPolicy backoffPolicy, + BulkProcessor.Listener listener, ThreadPool threadPool, + int concurrentRequests) { + super(consumer, threadPool); this.backoffPolicy = backoffPolicy; assert concurrentRequests > 0; this.listener = listener; @@ -121,26 +133,27 @@ public void execute(BulkRequest bulkRequest, long executionId) { semaphore.acquire(); acquired = true; Retry.on(EsRejectedExecutionException.class) - .policy(backoffPolicy) - .withAsyncBackoff(client, bulkRequest, new ActionListener() { - @Override - public void onResponse(BulkResponse response) { - try { - listener.afterBulk(executionId, bulkRequest, response); - } finally { - semaphore.release(); - } + .policy(backoffPolicy) + .using(threadPool) + .withAsyncBackoff(consumer, bulkRequest, new ActionListener() { + @Override + public void onResponse(BulkResponse response) { + try { + listener.afterBulk(executionId, bulkRequest, response); + } finally { + semaphore.release(); } - - @Override - public void onFailure(Exception e) { - try { - listener.afterBulk(executionId, bulkRequest, e); - } finally { - semaphore.release(); - } + } + + @Override + public void onFailure(Exception e) { + try { + listener.afterBulk(executionId, bulkRequest, e); + } finally { + semaphore.release(); } - }); + } + }, Settings.EMPTY); bulkRequestSetupSuccessful = true; } catch (InterruptedException e) { Thread.currentThread().interrupt(); diff --git a/core/src/main/java/org/elasticsearch/action/bulk/Retry.java b/core/src/main/java/org/elasticsearch/action/bulk/Retry.java index c746894b78e95..e1ba1a6bee112 100644 --- a/core/src/main/java/org/elasticsearch/action/bulk/Retry.java +++ b/core/src/main/java/org/elasticsearch/action/bulk/Retry.java @@ -20,19 +20,25 @@ import org.apache.logging.log4j.Logger; import org.elasticsearch.ExceptionsHelper; -import org.elasticsearch.action.ActionFuture; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.PlainActionFuture; -import org.elasticsearch.client.Client; import org.elasticsearch.common.logging.Loggers; +import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.common.util.concurrent.FutureUtils; import org.elasticsearch.threadpool.ThreadPool; import java.util.ArrayList; import java.util.Iterator; import java.util.List; +import java.util.concurrent.Executors; +import java.util.concurrent.ScheduledExecutorService; import java.util.concurrent.ScheduledFuture; +import java.util.concurrent.ScheduledThreadPoolExecutor; +import java.util.concurrent.TimeUnit; +import java.util.function.BiConsumer; +import java.util.function.BiFunction; import java.util.function.Predicate; /** @@ -42,11 +48,16 @@ public class Retry { private final Class retryOnThrowable; private BackoffPolicy backoffPolicy; + private ThreadPool threadPool; public static Retry on(Class retryOnThrowable) { return new Retry(retryOnThrowable); } + Retry(Class retryOnThrowable) { + this.retryOnThrowable = retryOnThrowable; + } + /** * @param backoffPolicy The backoff policy that defines how long and how often to wait for retries. */ @@ -55,42 +66,48 @@ public Retry policy(BackoffPolicy backoffPolicy) { return this; } - Retry(Class retryOnThrowable) { - this.retryOnThrowable = retryOnThrowable; + /** + * @param threadPool The threadPool that will be used to schedule retries. + */ + public Retry using(ThreadPool threadPool) { + this.threadPool = threadPool; + return this; } /** - * Invokes #bulk(BulkRequest, ActionListener) on the provided client. Backs off on the provided exception and delegates results to the - * provided listener. - * - * @param client Client invoking the bulk request. + * Invokes #apply(BulkRequest, ActionListener). Backs off on the provided exception and delegates results to the + * provided listener. Retries will be attempted using the provided schedule function + * @param consumer The consumer to which apply the request and listener * @param bulkRequest The bulk request that should be executed. - * @param listener A listener that is invoked when the bulk request finishes or completes with an exception. The listener is not + * @param listener A listener that is invoked when the bulk request finishes or completes with an exception. The listener is not + * @param settings settings */ - public void withAsyncBackoff(Client client, BulkRequest bulkRequest, ActionListener listener) { - AsyncRetryHandler r = new AsyncRetryHandler(retryOnThrowable, backoffPolicy, client, listener); + public void withAsyncBackoff(BiConsumer> consumer, BulkRequest bulkRequest, ActionListener listener, Settings settings) { + RetryHandler r = new RetryHandler(retryOnThrowable, backoffPolicy, consumer, listener, settings, threadPool); r.execute(bulkRequest); - } /** - * Invokes #bulk(BulkRequest) on the provided client. Backs off on the provided exception. + * Invokes #apply(BulkRequest, ActionListener). Backs off on the provided exception. Retries will be attempted using + * the provided schedule function. * - * @param client Client invoking the bulk request. + * @param consumer The consumer to which apply the request and listener * @param bulkRequest The bulk request that should be executed. + * @param settings settings * @return the bulk response as returned by the client. * @throws Exception Any exception thrown by the callable. */ - public BulkResponse withSyncBackoff(Client client, BulkRequest bulkRequest) throws Exception { - return SyncRetryHandler - .create(retryOnThrowable, backoffPolicy, client) - .executeBlocking(bulkRequest) - .actionGet(); + public BulkResponse withSyncBackoff(BiConsumer> consumer, BulkRequest bulkRequest, Settings settings) throws Exception { + PlainActionFuture actionFuture = PlainActionFuture.newFuture(); + RetryHandler r = new RetryHandler(retryOnThrowable, backoffPolicy, consumer, actionFuture, settings, threadPool); + r.execute(bulkRequest); + return actionFuture.actionGet(); } - static class AbstractRetryHandler implements ActionListener { + static class RetryHandler implements ActionListener { private final Logger logger; - private final Client client; + private final ThreadPool threadPool; + private final BiConsumer> consumer; private final ActionListener listener; private final Iterator backoff; private final Class retryOnThrowable; @@ -102,12 +119,15 @@ static class AbstractRetryHandler implements ActionListener { private volatile BulkRequest currentBulkRequest; private volatile ScheduledFuture scheduledRequestFuture; - AbstractRetryHandler(Class retryOnThrowable, BackoffPolicy backoffPolicy, Client client, ActionListener listener) { + RetryHandler(Class retryOnThrowable, BackoffPolicy backoffPolicy, + BiConsumer> consumer, ActionListener listener, + Settings settings, ThreadPool threadPool) { this.retryOnThrowable = retryOnThrowable; this.backoff = backoffPolicy.iterator(); - this.client = client; + this.consumer = consumer; this.listener = listener; - this.logger = Loggers.getLogger(getClass(), client.settings()); + this.logger = Loggers.getLogger(getClass(), settings); + this.threadPool = threadPool; // in contrast to System.currentTimeMillis(), nanoTime() uses a monotonic clock under the hood this.startTimestampNanos = System.nanoTime(); } @@ -142,9 +162,8 @@ private void retry(BulkRequest bulkRequestForRetry) { assert backoff.hasNext(); TimeValue next = backoff.next(); logger.trace("Retry of bulk request scheduled in {} ms.", next.millis()); - Runnable retry = () -> this.execute(bulkRequestForRetry); - retry = client.threadPool().getThreadContext().preserveContext(retry); - scheduledRequestFuture = client.threadPool().schedule(next, ThreadPool.Names.SAME, retry); + Runnable command = threadPool.getThreadContext().preserveContext(() -> this.execute(bulkRequestForRetry)); + scheduledRequestFuture = threadPool.schedule(next, ThreadPool.Names.SAME, command); } private BulkRequest createBulkRequestForRetry(BulkResponse bulkItemResponses) { @@ -208,32 +227,7 @@ private BulkResponse getAccumulatedResponse() { public void execute(BulkRequest bulkRequest) { this.currentBulkRequest = bulkRequest; - client.bulk(bulkRequest, this); - } - } - - static class AsyncRetryHandler extends AbstractRetryHandler { - AsyncRetryHandler(Class retryOnThrowable, BackoffPolicy backoffPolicy, Client client, ActionListener listener) { - super(retryOnThrowable, backoffPolicy, client, listener); - } - } - - static class SyncRetryHandler extends AbstractRetryHandler { - private final PlainActionFuture actionFuture; - - public static SyncRetryHandler create(Class retryOnThrowable, BackoffPolicy backoffPolicy, Client client) { - PlainActionFuture actionFuture = PlainActionFuture.newFuture(); - return new SyncRetryHandler(retryOnThrowable, backoffPolicy, client, actionFuture); - } - - SyncRetryHandler(Class retryOnThrowable, BackoffPolicy backoffPolicy, Client client, PlainActionFuture actionFuture) { - super(retryOnThrowable, backoffPolicy, client, actionFuture); - this.actionFuture = actionFuture; - } - - public ActionFuture executeBlocking(BulkRequest bulkRequest) { - super.execute(bulkRequest); - return actionFuture; + consumer.accept(bulkRequest, this); } } } diff --git a/core/src/test/java/org/elasticsearch/action/bulk/BulkProcessorIT.java b/core/src/test/java/org/elasticsearch/action/bulk/BulkProcessorIT.java index e6eb9afe704e4..44e0bbf823063 100644 --- a/core/src/test/java/org/elasticsearch/action/bulk/BulkProcessorIT.java +++ b/core/src/test/java/org/elasticsearch/action/bulk/BulkProcessorIT.java @@ -62,7 +62,7 @@ public void testThatBulkProcessorCountIsCorrect() throws Exception { BulkProcessorTestListener listener = new BulkProcessorTestListener(latch); int numDocs = randomIntBetween(10, 100); - try (BulkProcessor processor = BulkProcessor.builder(client(), listener).setName("foo") + try (BulkProcessor processor = BulkProcessor.builder(client(), listener) //let's make sure that the bulk action limit trips, one single execution will index all the documents .setConcurrentRequests(randomIntBetween(0, 1)).setBulkActions(numDocs) .setFlushInterval(TimeValue.timeValueHours(24)).setBulkSize(new ByteSizeValue(1, ByteSizeUnit.GB)) @@ -86,7 +86,7 @@ public void testBulkProcessorFlush() throws Exception { int numDocs = randomIntBetween(10, 100); - try (BulkProcessor processor = BulkProcessor.builder(client(), listener).setName("foo") + try (BulkProcessor processor = BulkProcessor.builder(client(), listener) //let's make sure that this bulk won't be automatically flushed .setConcurrentRequests(randomIntBetween(0, 10)).setBulkActions(numDocs + randomIntBetween(1, 100)) .setFlushInterval(TimeValue.timeValueHours(24)).setBulkSize(new ByteSizeValue(1, ByteSizeUnit.GB)).build()) { @@ -203,7 +203,7 @@ public void testBulkProcessorWaitOnClose() throws Exception { BulkProcessorTestListener listener = new BulkProcessorTestListener(); int numDocs = randomIntBetween(10, 100); - BulkProcessor processor = BulkProcessor.builder(client(), listener).setName("foo") + BulkProcessor processor = BulkProcessor.builder(client(), listener) //let's make sure that the bulk action limit trips, one single execution will index all the documents .setConcurrentRequests(randomIntBetween(0, 1)).setBulkActions(numDocs) .setFlushInterval(TimeValue.timeValueHours(24)).setBulkSize(new ByteSizeValue(randomIntBetween(1, 10), diff --git a/core/src/test/java/org/elasticsearch/action/bulk/BulkProcessorRetryIT.java b/core/src/test/java/org/elasticsearch/action/bulk/BulkProcessorRetryIT.java index 87249bc8b5a13..bb2aff8026978 100644 --- a/core/src/test/java/org/elasticsearch/action/bulk/BulkProcessorRetryIT.java +++ b/core/src/test/java/org/elasticsearch/action/bulk/BulkProcessorRetryIT.java @@ -59,7 +59,6 @@ protected Settings nodeSettings(int nodeOrdinal) { .build(); } - public void testBulkRejectionLoadWithoutBackoff() throws Throwable { boolean rejectedExecutionExpected = true; executeBulkRejectionLoad(BackoffPolicy.noBackoff(), rejectedExecutionExpected); diff --git a/core/src/test/java/org/elasticsearch/action/bulk/RetryTests.java b/core/src/test/java/org/elasticsearch/action/bulk/RetryTests.java index a2cd7e9820a12..51d3709ba1b2d 100644 --- a/core/src/test/java/org/elasticsearch/action/bulk/RetryTests.java +++ b/core/src/test/java/org/elasticsearch/action/bulk/RetryTests.java @@ -87,7 +87,8 @@ public void testSyncRetryBacksOff() throws Exception { BulkResponse response = Retry .on(EsRejectedExecutionException.class) .policy(backoff) - .withSyncBackoff(bulkClient, bulkRequest); + .using(bulkClient.threadPool()) + .withSyncBackoff(bulkClient::bulk, bulkRequest, bulkClient.settings()); assertFalse(response.hasFailures()); assertThat(response.getItems().length, equalTo(bulkRequest.numberOfActions())); @@ -100,7 +101,8 @@ public void testSyncRetryFailsAfterBackoff() throws Exception { BulkResponse response = Retry .on(EsRejectedExecutionException.class) .policy(backoff) - .withSyncBackoff(bulkClient, bulkRequest); + .using(bulkClient.threadPool()) + .withSyncBackoff(bulkClient::bulk, bulkRequest, bulkClient.settings()); assertTrue(response.hasFailures()); assertThat(response.getItems().length, equalTo(bulkRequest.numberOfActions())); @@ -113,7 +115,8 @@ public void testAsyncRetryBacksOff() throws Exception { BulkRequest bulkRequest = createBulkRequest(); Retry.on(EsRejectedExecutionException.class) .policy(backoff) - .withAsyncBackoff(bulkClient, bulkRequest, listener); + .using(bulkClient.threadPool()) + .withAsyncBackoff(bulkClient::bulk, bulkRequest, listener, bulkClient.settings()); listener.awaitCallbacksCalled(); listener.assertOnResponseCalled(); @@ -129,7 +132,8 @@ public void testAsyncRetryFailsAfterBacksOff() throws Exception { BulkRequest bulkRequest = createBulkRequest(); Retry.on(EsRejectedExecutionException.class) .policy(backoff) - .withAsyncBackoff(bulkClient, bulkRequest, listener); + .using(bulkClient.threadPool()) + .withAsyncBackoff(bulkClient::bulk, bulkRequest, listener, bulkClient.settings()); listener.awaitCallbacksCalled(); diff --git a/docs/java-rest/high-level/apis.asciidoc b/docs/java-rest/high-level/apis.asciidoc new file mode 100644 index 0000000000000..f021e93b84b83 --- /dev/null +++ b/docs/java-rest/high-level/apis.asciidoc @@ -0,0 +1,10 @@ +* index API + +* get API + +* <> + +* bulk API + +* search API + diff --git a/docs/java-rest/high-level/document/delete.asciidoc b/docs/java-rest/high-level/document/delete.asciidoc new file mode 100644 index 0000000000000..e9ba8b1940856 --- /dev/null +++ b/docs/java-rest/high-level/document/delete.asciidoc @@ -0,0 +1,67 @@ +[[java-rest-high-document-delete]] +=== Delete API + +[[java-rest-high-document-delete-request]] +==== Delete Request + +The most simple Delete Request needs is: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/DeleteDocumentationIT.java[delete-request] +-------------------------------------------------- +<1> Index name +<2> Type +<3> Document id + + +You can also provide the following properties: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/DeleteDocumentationIT.java[delete-request-props] +-------------------------------------------------- +<1> Timeout +<2> Timeout as String +<3> Refresh policy +<4> Refresh policy as String +<5> Version +<6> Version type + +[[java-rest-high-document-delete-sync]] +==== Execution + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/DeleteDocumentationIT.java[delete-execute] +-------------------------------------------------- + +[[java-rest-high-document-delete-async]] +==== Asynchronous Execution + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/DeleteDocumentationIT.java[delete-execute-async] +-------------------------------------------------- +<1> Implement if needed when execution did not throw an exception +<2> Implement if needed in case of failure + +[[java-rest-high-document-delete-response]] +==== Delete Response + +In the Delete Response object, you can check for example the result of the operation: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/DeleteDocumentationIT.java[delete-notfound] +-------------------------------------------------- +<1> Do something if we did not find the document which should have been deleted + +Note that if you have a version conflict because you defined the version within the +<>, it will raise an `ElasticsearchException` like: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/DeleteDocumentationIT.java[delete-conflict] +-------------------------------------------------- +<1> We got a version conflict diff --git a/docs/java-rest/high-level/document/index.asciidoc b/docs/java-rest/high-level/document/index.asciidoc new file mode 100644 index 0000000000000..32815794a1ba2 --- /dev/null +++ b/docs/java-rest/high-level/document/index.asciidoc @@ -0,0 +1,5 @@ +:doc-tests: {docdir}/../../client/rest-high-level/src/test/java/org/elasticsearch/client/documentation + +include::delete.asciidoc[] + +:doc-tests!: diff --git a/docs/java-rest/high-level/index.asciidoc b/docs/java-rest/high-level/index.asciidoc new file mode 100644 index 0000000000000..8b8dbe65ffd5f --- /dev/null +++ b/docs/java-rest/high-level/index.asciidoc @@ -0,0 +1,14 @@ +[[java-rest-high]] +== Java High Level REST Client + +The <>'s features include: + +include::apis.asciidoc[] + +It depends on elasticsearch core project as it uses elasticsearch request and response +objects so it will simplify a migration from the transport client. + + +include::usage.asciidoc[] + +include::document/index.asciidoc[] diff --git a/docs/java-rest/high-level/usage.asciidoc b/docs/java-rest/high-level/usage.asciidoc new file mode 100644 index 0000000000000..1ea6c7b5d771b --- /dev/null +++ b/docs/java-rest/high-level/usage.asciidoc @@ -0,0 +1,75 @@ +[[java-rest-high-usage]] +=== Getting started + +[[java-rest-high-usage-maven]] +==== Maven Repository + +The high-level Java REST client is hosted on +http://search.maven.org/#search%7Cga%7C1%7Cg%3A%22org.elasticsearch.client%22[Maven +Central]. The minimum Java version required is `1.8`. + +The high-level REST client is subject to the same release cycle as +elasticsearch. Replace the version with the desired client version. + +[[java-rest-high-usage-maven-maven]] +===== Maven configuration + +Here is how you can configure the dependency using maven as a dependency manager. +Add the following to your `pom.xml` file: + +["source","xml",subs="attributes"] +-------------------------------------------------- + + org.elasticsearch.client + rest-high-level + {version} + +-------------------------------------------------- + +[[java-rest-high-usage-maven-gradle]] +===== Gradle configuration + +Here is how you can configure the dependency using gradle as a dependency manager. +Add the following to your `build.gradle` file: + +["source","groovy",subs="attributes"] +-------------------------------------------------- +dependencies { + compile 'org.elasticsearch.client:rest-high-level:{version}' +} +-------------------------------------------------- + +[[java-rest-high-usage-dependencies]] +==== Dependencies + +The high-level Java REST client depends on the following artifacts and their +transitive dependencies: + +- org.elasticsearch.client:rest +- org.elasticsearch:elasticsearch + + +[[java-rest-high-usage-initialization]] +==== Initialization + +A `RestHighLevelClient` instance needs a <> +to be built as follows: + +[source,java] +-------------------------------------------------- +RestHighLevelClient client = + new RestHighLevelClient(lowLevelRestClient); <1> +-------------------------------------------------- +<1> We pass the <> instance + +In the rest of this documentation about the high-level client, the `RestHighLevelClient` instance +will be referenced as `client`. + +Then you have access to the high level APIs such as: + +include::apis.asciidoc[] + +Each API can be executed synchronously (i.e. <>) or +asynchronously (i.e. <>). +The asynchronous APIs require a listener that is called on thread pool managed by the low level client +when the response is received. diff --git a/docs/java-rest/index.asciidoc b/docs/java-rest/index.asciidoc index 683c495c7f8d3..d44470199249d 100644 --- a/docs/java-rest/index.asciidoc +++ b/docs/java-rest/index.asciidoc @@ -5,8 +5,8 @@ include::../Versions.asciidoc[] include::overview.asciidoc[] -include::usage.asciidoc[] +include::low-level/index.asciidoc[] -include::configuration.asciidoc[] +include::high-level/index.asciidoc[] -include::sniffer.asciidoc[] +include::license.asciidoc[] diff --git a/docs/java-rest/license.asciidoc b/docs/java-rest/license.asciidoc new file mode 100644 index 0000000000000..ca858c291e6f7 --- /dev/null +++ b/docs/java-rest/license.asciidoc @@ -0,0 +1,16 @@ +[[java-rest-license]] +== License + +Copyright 2013-2017 Elasticsearch + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. \ No newline at end of file diff --git a/docs/java-rest/configuration.asciidoc b/docs/java-rest/low-level/configuration.asciidoc similarity index 98% rename from docs/java-rest/configuration.asciidoc rename to docs/java-rest/low-level/configuration.asciidoc index 5fc6d37daa846..a75e1620c7e72 100644 --- a/docs/java-rest/configuration.asciidoc +++ b/docs/java-rest/low-level/configuration.asciidoc @@ -1,4 +1,4 @@ -== Common configuration +=== Common configuration The `RestClientBuilder` supports providing both a `RequestConfigCallback` and an `HttpClientConfigCallback` which allow for any customization that the Apache @@ -8,7 +8,7 @@ configuration that the `RestClient` is initialized with. This section describes some common scenarios that require additional configuration for the low-level Java REST Client. -=== Timeouts +==== Timeouts Configuring requests timeouts can be done by providing an instance of `RequestConfigCallback` while building the `RestClient` through its builder. @@ -34,7 +34,7 @@ RestClient restClient = RestClient.builder(new HttpHost("localhost", 9200)) .build(); -------------------------------------------------- -=== Number of threads +==== Number of threads The Apache Http Async Client starts by default one dispatcher thread, and a number of worker threads used by the connection manager, as many as the number @@ -55,7 +55,7 @@ RestClient restClient = RestClient.builder(new HttpHost("localhost", 9200)) .build(); -------------------------------------------------- -=== Basic authentication +==== Basic authentication Configuring basic authentication can be done by providing an `HttpClientConfigCallback` while building the `RestClient` through its builder. @@ -104,7 +104,7 @@ RestClient restClient = RestClient.builder(new HttpHost("localhost", 9200)) .build(); -------------------------------------------------- -=== Encrypted communication +==== Encrypted communication Encrypted communication can also be configured through the `HttpClientConfigCallback`. The @@ -130,7 +130,7 @@ RestClient restClient = RestClient.builder(new HttpHost("localhost", 9200)) .build(); -------------------------------------------------- -=== Others +==== Others For any other required configuration needed, the Apache HttpAsyncClient docs should be consulted: https://hc.apache.org/httpcomponents-asyncclient-4.1.x/ . diff --git a/docs/java-rest/low-level/index.asciidoc b/docs/java-rest/low-level/index.asciidoc new file mode 100644 index 0000000000000..082232af586ea --- /dev/null +++ b/docs/java-rest/low-level/index.asciidoc @@ -0,0 +1,27 @@ +[[java-rest-low]] +== Java Low Level REST Client + +The low-level client's features include: + +* minimal dependencies + +* load balancing across all available nodes + +* failover in case of node failures and upon specific response codes + +* failed connection penalization (whether a failed node is retried depends on + how many consecutive times it failed; the more failed attempts the longer the + client will wait before trying that same node again) + +* persistent connections + +* trace logging of requests and responses + +* optional automatic <> + + +include::usage.asciidoc[] + +include::configuration.asciidoc[] + +include::sniffer.asciidoc[] \ No newline at end of file diff --git a/docs/java-rest/sniffer.asciidoc b/docs/java-rest/low-level/sniffer.asciidoc similarity index 97% rename from docs/java-rest/sniffer.asciidoc rename to docs/java-rest/low-level/sniffer.asciidoc index 6c4c531306b84..081ecd3dd6634 100644 --- a/docs/java-rest/sniffer.asciidoc +++ b/docs/java-rest/low-level/sniffer.asciidoc @@ -1,5 +1,5 @@ [[sniffer]] -== Sniffer +=== Sniffer Minimal library that allows to automatically discover nodes from a running Elasticsearch cluster and set them to an existing `RestClient` instance. @@ -8,7 +8,7 @@ Nodes Info api and uses jackson to parse the obtained json response. Compatible with Elasticsearch 2.x and onwards. -=== Maven Repository +==== Maven Repository The low-level REST client is subject to the same release cycle as elasticsearch. Replace the version with the desired sniffer version, first @@ -17,7 +17,7 @@ and the elasticsearch version that the client can communicate with. Sniffer supports fetching the nodes list from elasticsearch 2.x and onwards. -==== Maven configuration +===== Maven configuration Here is how you can configure the dependency using maven as a dependency manager. Add the following to your `pom.xml` file: @@ -31,7 +31,7 @@ Add the following to your `pom.xml` file: -------------------------------------------------- -==== Gradle configuration +===== Gradle configuration Here is how you can configure the dependency using gradle as a dependency manager. Add the following to your `build.gradle` file: @@ -43,7 +43,7 @@ dependencies { } -------------------------------------------------- -=== Usage +==== Usage Once a `RestClient` instance has been created, a `Sniffer` can be associated to it. The `Sniffer` will make use of the provided `RestClient` to periodically @@ -133,9 +133,9 @@ Sniffer sniffer = Sniffer.builder(restClient) Note that this last configuration parameter has no effect in case sniffing on failure is not enabled like explained above. -=== License +==== License -Copyright 2013-2016 Elasticsearch +Copyright 2013-2017 Elasticsearch Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. diff --git a/docs/java-rest/usage.asciidoc b/docs/java-rest/low-level/usage.asciidoc similarity index 95% rename from docs/java-rest/usage.asciidoc rename to docs/java-rest/low-level/usage.asciidoc index 46edb4f57c9a4..fbd0c1e61ee21 100644 --- a/docs/java-rest/usage.asciidoc +++ b/docs/java-rest/low-level/usage.asciidoc @@ -1,6 +1,8 @@ -== Getting started +[[java-rest-low-usage]] +=== Getting started -=== Maven Repository +[[java-rest-low-usage-maven]] +==== Maven Repository The low-level Java REST client is hosted on http://search.maven.org/#search%7Cga%7C1%7Cg%3A%22org.elasticsearch.client%22[Maven @@ -8,11 +10,12 @@ Central]. The minimum Java version required is `1.7`. The low-level REST client is subject to the same release cycle as elasticsearch. Replace the version with the desired client version, first -released with `5.0.0-alpha4`. There is no relation between the client version +released with `5.0.0-alpha4`. There is no relation between the client version and the elasticsearch version that the client can communicate with. The low-level REST client is compatible with all elasticsearch versions. -==== Maven configuration +[[java-rest-low-usage-maven-maven]] +===== Maven configuration Here is how you can configure the dependency using maven as a dependency manager. Add the following to your `pom.xml` file: @@ -26,7 +29,8 @@ Add the following to your `pom.xml` file: -------------------------------------------------- -==== Gradle configuration +[[java-rest-low-usage-maven-gradle]] +===== Gradle configuration Here is how you can configure the dependency using gradle as a dependency manager. Add the following to your `build.gradle` file: @@ -38,7 +42,8 @@ dependencies { } -------------------------------------------------- -=== Dependencies +[[java-rest-low-usage-dependencies]] +==== Dependencies The low-level Java REST client internally uses the http://hc.apache.org/httpcomponents-asyncclient-dev/[Apache Http Async Client] @@ -52,8 +57,8 @@ http://hc.apache.org/httpcomponents-asyncclient-dev/[Apache Http Async Client] - commons-codec:commons-codec - commons-logging:commons-logging - -=== Initialization +[[java-rest-low-usage-initialization]] +==== Initialization A `RestClient` instance can be built through the corresponding `RestClientBuilder` class, created via `RestClient#builder(HttpHost...)` @@ -100,8 +105,8 @@ the https://hc.apache.org/httpcomponents-client-ga/httpclient/apidocs/org/apache http://hc.apache.org/httpcomponents-asyncclient-dev/httpasyncclient/apidocs/org/apache/http/impl/nio/client/HttpAsyncClientBuilder.html[`org.apache.http.impl.nio.client.HttpAsyncClientBuilder`] allows to set) - -=== Performing requests +[[java-rest-low-usage-requests]] +==== Performing requests Once the `RestClient` has been created, requests can be sent by calling one of the available `performRequest` or `performRequestAsync` method variants. @@ -159,7 +164,8 @@ void performRequestAsync(String method, String endpoint, Header... headers); -------------------------------------------------- -==== Request Arguments +[[java-rest-low-usage-requests-arguments]] +===== Request Arguments The following are the arguments accepted by the different methods: @@ -179,7 +185,8 @@ http://hc.apache.org/httpcomponents-core-ga/httpcore-nio/apidocs/org/apache/http request success or failure `headers`:: optional request headers -=== Reading responses +[[java-rest-low-usage-responses]] +==== Reading responses The `Response` object, either returned by the synchronous `performRequest` methods or received as an argument in `ResponseListener#onSuccess(Response)`, wraps the @@ -215,8 +222,8 @@ with the get api as it can return `404` when the document is missing, in which case the response body will not contain an error but rather the usual get api response, just without the document as it was not found. - -=== Example requests +[[java-rest-low-usage-example]] +==== Example requests Here are a couple of examples: @@ -294,7 +301,8 @@ latch.await(); -------------------------------------------------- -=== Logging +[[java-rest-low-usage-logging]] +==== Logging The Java REST client uses the same logging library that the Apache Async Http Client uses: https://commons.apache.org/proper/commons-logging/[Apache Commons Logging], diff --git a/docs/java-rest/overview.asciidoc b/docs/java-rest/overview.asciidoc index 206fcb931b496..3c5ea06c1dda2 100644 --- a/docs/java-rest/overview.asciidoc +++ b/docs/java-rest/overview.asciidoc @@ -1,42 +1,11 @@ +[[java-rest-overview]] == Overview -Official low-level client for Elasticsearch. Allows to communicate with an -Elasticsearch cluster through http. Compatible with all elasticsearch versions. +The Java REST Client comes with 2 flavors: -=== Features - -The low-level client's features include: - -* minimal dependencies - -* load balancing across all available nodes - -* failover in case of node failures and upon specific response codes - -* failed connection penalization (whether a failed node is retried depends on - how many consecutive times it failed; the more failed attempts the longer the - client will wait before trying that same node again) - -* persistent connections - -* trace logging of requests and responses - -* optional automatic <> - - -=== License - -Copyright 2013-2016 Elasticsearch - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. +* <>: which is the official low-level client for Elasticsearch. +It allows to communicate with an Elasticsearch cluster through http and is compatible +with all elasticsearch versions. +* <>: which is the official high-level client for Elasticsearch. It adds support +part of the elasticsearch document level and search API on top of the low-level client. diff --git a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/AbstractAsyncBulkByScrollAction.java b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/AbstractAsyncBulkByScrollAction.java index 1f6ff0d395642..e4272e8d96e39 100644 --- a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/AbstractAsyncBulkByScrollAction.java +++ b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/AbstractAsyncBulkByScrollAction.java @@ -36,6 +36,7 @@ import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.client.ParentTaskAssigningClient; import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.concurrent.AbstractRunnable; @@ -107,6 +108,7 @@ public abstract class AbstractAsyncBulkByScrollAction listener; private final Retry bulkRetry; private final ScrollableHitSource scrollSource; + private final Settings settings; /** * This BiFunction is used to apply various changes depending of the Reindex action and the search hit, @@ -115,19 +117,27 @@ public abstract class AbstractAsyncBulkByScrollAction, ScrollableHitSource.Hit, RequestWrapper> scriptApplier; + public AbstractAsyncBulkByScrollAction(WorkingBulkByScrollTask task, Logger logger, ParentTaskAssigningClient client, + ThreadPool threadPool, Request mainRequest, ScriptService scriptService, + ClusterState clusterState, ActionListener listener) { + this(task, logger, client, threadPool, mainRequest, scriptService, clusterState, listener, client.settings()); + } + public AbstractAsyncBulkByScrollAction(WorkingBulkByScrollTask task, Logger logger, ParentTaskAssigningClient client, ThreadPool threadPool, Request mainRequest, ScriptService scriptService, ClusterState clusterState, - ActionListener listener) { + ActionListener listener, Settings settings) { this.task = task; this.logger = logger; this.client = client; + this.settings = settings; this.threadPool = threadPool; this.scriptService = scriptService; this.clusterState = clusterState; this.mainRequest = mainRequest; this.listener = listener; BackoffPolicy backoffPolicy = buildBackoffPolicy(); - bulkRetry = Retry.on(EsRejectedExecutionException.class).policy(BackoffPolicy.wrap(backoffPolicy, task::countBulkRetry)); + bulkRetry = Retry.on(EsRejectedExecutionException.class) + .policy(BackoffPolicy.wrap(backoffPolicy, task::countBulkRetry)).using(threadPool); scrollSource = buildScrollableResultSource(backoffPolicy); scriptApplier = Objects.requireNonNull(buildScriptApplier(), "script applier must not be null"); /* @@ -340,7 +350,7 @@ void sendBulkRequest(TimeValue thisBatchStartTime, BulkRequest request) { finishHim(null); return; } - bulkRetry.withAsyncBackoff(client, request, new ActionListener() { + bulkRetry.withAsyncBackoff(client::bulk, request, new ActionListener() { @Override public void onResponse(BulkResponse response) { onBulkResponse(thisBatchStartTime, response); @@ -350,7 +360,7 @@ public void onResponse(BulkResponse response) { public void onFailure(Exception e) { finishHim(e); } - }); + }, settings); } /** diff --git a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/TransportReindexAction.java b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/TransportReindexAction.java index 41ba23c8e1dcc..13caabda8b6ab 100644 --- a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/TransportReindexAction.java +++ b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/TransportReindexAction.java @@ -245,10 +245,16 @@ static class AsyncIndexBySearchAction extends AbstractAsyncBulkByScrollAction createdThreads = emptyList(); + AsyncIndexBySearchAction(WorkingBulkByScrollTask task, Logger logger, ParentTaskAssigningClient client, + ThreadPool threadPool, ReindexRequest request, ScriptService scriptService, ClusterState clusterState, + ActionListener listener) { + this(task, logger, client, threadPool, request, scriptService, clusterState, listener, client.settings()); + } + AsyncIndexBySearchAction(WorkingBulkByScrollTask task, Logger logger, ParentTaskAssigningClient client, ThreadPool threadPool, ReindexRequest request, ScriptService scriptService, ClusterState clusterState, - ActionListener listener) { - super(task, logger, client, threadPool, request, scriptService, clusterState, listener); + ActionListener listener, Settings settings) { + super(task, logger, client, threadPool, request, scriptService, clusterState, listener, settings); } @Override diff --git a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/TransportUpdateByQueryAction.java b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/TransportUpdateByQueryAction.java index e24ec0846154d..a8f8528d35626 100644 --- a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/TransportUpdateByQueryAction.java +++ b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/TransportUpdateByQueryAction.java @@ -86,10 +86,16 @@ protected void doExecute(UpdateByQueryRequest request, ActionListener { + AsyncIndexBySearchAction(WorkingBulkByScrollTask task, Logger logger, ParentTaskAssigningClient client, + ThreadPool threadPool, UpdateByQueryRequest request, ScriptService scriptService, + ClusterState clusterState, ActionListener listener) { + this(task, logger, client, threadPool, request, scriptService, clusterState, listener, client.settings()); + } + AsyncIndexBySearchAction(WorkingBulkByScrollTask task, Logger logger, ParentTaskAssigningClient client, ThreadPool threadPool, UpdateByQueryRequest request, ScriptService scriptService, ClusterState clusterState, - ActionListener listener) { - super(task, logger, client, threadPool, request, scriptService, clusterState, listener); + ActionListener listener, Settings settings) { + super(task, logger, client, threadPool, request, scriptService, clusterState, listener, settings); } @Override diff --git a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/AsyncBulkByScrollActionTests.java b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/AsyncBulkByScrollActionTests.java index 90b94624bb077..cce721dbfe602 100644 --- a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/AsyncBulkByScrollActionTests.java +++ b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/AsyncBulkByScrollActionTests.java @@ -663,7 +663,7 @@ private void simulateScrollResponse(DummyAsyncBulkByScrollAction action, TimeVal private class DummyAsyncBulkByScrollAction extends AbstractAsyncBulkByScrollAction { DummyAsyncBulkByScrollAction() { super(testTask, AsyncBulkByScrollActionTests.this.logger, new ParentTaskAssigningClient(client, localNode, testTask), - client.threadPool(), testRequest, null, null, listener); + client.threadPool(), testRequest, null, null, listener, Settings.EMPTY); } @Override diff --git a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/ReindexMetadataTests.java b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/ReindexMetadataTests.java index eaf1aea6f034a..4611f9dcbcddb 100644 --- a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/ReindexMetadataTests.java +++ b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/ReindexMetadataTests.java @@ -22,6 +22,7 @@ import org.elasticsearch.index.reindex.ScrollableHitSource.Hit; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.search.SearchRequest; +import org.elasticsearch.common.settings.Settings; /** * Index-by-search test for ttl, timestamp, and routing. @@ -78,7 +79,7 @@ protected ReindexRequest request() { private class TestAction extends TransportReindexAction.AsyncIndexBySearchAction { TestAction() { super(ReindexMetadataTests.this.task, ReindexMetadataTests.this.logger, null, ReindexMetadataTests.this.threadPool, request(), - null, null, listener()); + null, null, listener(), Settings.EMPTY); } public ReindexRequest mainRequest() { diff --git a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/ReindexScriptTests.java b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/ReindexScriptTests.java index 0862d659d0183..36d4a1d0c0ce8 100644 --- a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/ReindexScriptTests.java +++ b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/ReindexScriptTests.java @@ -22,6 +22,7 @@ import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.common.lucene.uid.Versions; +import org.elasticsearch.common.settings.Settings; import org.elasticsearch.script.ScriptService; import java.util.Map; @@ -138,6 +139,6 @@ protected ReindexRequest request() { @Override protected TransportReindexAction.AsyncIndexBySearchAction action(ScriptService scriptService, ReindexRequest request) { return new TransportReindexAction.AsyncIndexBySearchAction(task, logger, null, threadPool, request, scriptService, null, - listener()); + listener(), Settings.EMPTY); } } diff --git a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/RetryTests.java b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/RetryTests.java index 549adb145135b..9a5cb4f38dd30 100644 --- a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/RetryTests.java +++ b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/RetryTests.java @@ -73,8 +73,8 @@ public void setUp() throws Exception { for (int i = 0; i < DOC_COUNT; i++) { bulk.add(client().prepareIndex("source", "test").setSource("foo", "bar " + i)); } - Retry retry = Retry.on(EsRejectedExecutionException.class).policy(BackoffPolicy.exponentialBackoff()); - BulkResponse response = retry.withSyncBackoff(client(), bulk.request()); + Retry retry = Retry.on(EsRejectedExecutionException.class).policy(BackoffPolicy.exponentialBackoff()).using(client().threadPool()); + BulkResponse response = retry.withSyncBackoff(client()::bulk, bulk.request(), client().settings()); assertFalse(response.buildFailureMessage(), response.hasFailures()); client().admin().indices().prepareRefresh("source").get(); } diff --git a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/UpdateByQueryMetadataTests.java b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/UpdateByQueryMetadataTests.java index 2aaf54e9aed5a..b688ce019e3df 100644 --- a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/UpdateByQueryMetadataTests.java +++ b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/UpdateByQueryMetadataTests.java @@ -22,6 +22,7 @@ import org.elasticsearch.index.reindex.ScrollableHitSource.Hit; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.search.SearchRequest; +import org.elasticsearch.common.settings.Settings; public class UpdateByQueryMetadataTests extends AbstractAsyncBulkByScrollActionMetadataTestCase { @@ -44,7 +45,8 @@ protected UpdateByQueryRequest request() { private class TestAction extends TransportUpdateByQueryAction.AsyncIndexBySearchAction { TestAction() { super(UpdateByQueryMetadataTests.this.task, UpdateByQueryMetadataTests.this.logger, null, - UpdateByQueryMetadataTests.this.threadPool, request(), null, null, listener()); + UpdateByQueryMetadataTests.this.threadPool, request(), null, null, listener(), + Settings.EMPTY); } @Override diff --git a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/UpdateByQueryWithScriptTests.java b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/UpdateByQueryWithScriptTests.java index 9d6e6c5e0cae6..608dad41c03e3 100644 --- a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/UpdateByQueryWithScriptTests.java +++ b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/UpdateByQueryWithScriptTests.java @@ -20,6 +20,7 @@ package org.elasticsearch.index.reindex; import org.elasticsearch.action.search.SearchRequest; +import org.elasticsearch.common.settings.Settings; import org.elasticsearch.script.ScriptService; import java.util.Date; @@ -55,6 +56,6 @@ protected UpdateByQueryRequest request() { @Override protected TransportUpdateByQueryAction.AsyncIndexBySearchAction action(ScriptService scriptService, UpdateByQueryRequest request) { return new TransportUpdateByQueryAction.AsyncIndexBySearchAction(task, logger, null, threadPool, request, scriptService, null, - listener()); + listener(), Settings.EMPTY); } } diff --git a/settings.gradle b/settings.gradle index dee2194eaa81c..aefb33866d992 100644 --- a/settings.gradle +++ b/settings.gradle @@ -7,6 +7,7 @@ List projects = [ 'core', 'docs', 'client:rest', + 'client:rest-high-level', 'client:sniffer', 'client:transport', 'client:test',