listener) {
+ return restHighLevelClient.performRequestAsyncAndParseEntity(request, AsyncSearchRequestConverters::deleteAsyncSearch, options,
+ AcknowledgedResponse::fromXContent, listener, emptySet());
+ }
+
+}
diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/AsyncSearchRequestConverters.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/AsyncSearchRequestConverters.java
new file mode 100644
index 0000000000000..8a63589a55c51
--- /dev/null
+++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/AsyncSearchRequestConverters.java
@@ -0,0 +1,103 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.client;
+
+import org.apache.http.client.methods.HttpDelete;
+import org.apache.http.client.methods.HttpGet;
+import org.apache.http.client.methods.HttpPost;
+import org.elasticsearch.client.RequestConverters.Params;
+import org.elasticsearch.client.asyncsearch.DeleteAsyncSearchRequest;
+import org.elasticsearch.client.asyncsearch.GetAsyncSearchRequest;
+import org.elasticsearch.client.asyncsearch.SubmitAsyncSearchRequest;
+import org.elasticsearch.rest.action.search.RestSearchAction;
+
+import java.io.IOException;
+import java.util.Locale;
+
+import static org.elasticsearch.client.RequestConverters.REQUEST_BODY_CONTENT_TYPE;
+
+final class AsyncSearchRequestConverters {
+
+ static Request submitAsyncSearch(SubmitAsyncSearchRequest asyncSearchRequest) throws IOException {
+ String endpoint = new RequestConverters.EndpointBuilder().addCommaSeparatedPathParts(
+ asyncSearchRequest.getIndices())
+ .addPathPartAsIs("_async_search").build();
+ Request request = new Request(HttpPost.METHOD_NAME, endpoint);
+ Params params = new RequestConverters.Params();
+ // add all typical search params and search request source as body
+ addSearchRequestParams(params, asyncSearchRequest);
+ if (asyncSearchRequest.getSearchSource() != null) {
+ request.setEntity(RequestConverters.createEntity(asyncSearchRequest.getSearchSource(), REQUEST_BODY_CONTENT_TYPE));
+ }
+ // set async search submit specific parameters
+ if (asyncSearchRequest.isCleanOnCompletion() != null) {
+ params.putParam("clean_on_completion", asyncSearchRequest.isCleanOnCompletion().toString());
+ }
+ if (asyncSearchRequest.getKeepAlive() != null) {
+ params.putParam("keep_alive", asyncSearchRequest.getKeepAlive().getStringRep());
+ }
+ if (asyncSearchRequest.getWaitForCompletion() != null) {
+ params.putParam("wait_for_completion", asyncSearchRequest.getWaitForCompletion().getStringRep());
+ }
+ request.addParameters(params.asMap());
+ return request;
+ }
+
+ static void addSearchRequestParams(Params params, SubmitAsyncSearchRequest request) {
+ params.putParam(RestSearchAction.TYPED_KEYS_PARAM, "true");
+ params.withRouting(request.getRouting());
+ params.withPreference(request.getPreference());
+ params.withIndicesOptions(request.getIndicesOptions());
+ params.withSearchType(request.getSearchType().name().toLowerCase(Locale.ROOT));
+ params.withMaxConcurrentShardRequests(request.getMaxConcurrentShardRequests());
+ if (request.getRequestCache() != null) {
+ params.withRequestCache(request.getRequestCache());
+ }
+ if (request.getAllowPartialSearchResults() != null) {
+ params.withAllowPartialResults(request.getAllowPartialSearchResults());
+ }
+ params.withBatchedReduceSize(request.getBatchedReduceSize());
+ }
+
+ static Request getAsyncSearch(GetAsyncSearchRequest asyncSearchRequest) throws IOException {
+ String endpoint = new RequestConverters.EndpointBuilder()
+ .addPathPartAsIs("_async_search")
+ .addPathPart(asyncSearchRequest.getId())
+ .build();
+ Request request = new Request(HttpGet.METHOD_NAME, endpoint);
+ Params params = new RequestConverters.Params();
+ if (asyncSearchRequest.getKeepAlive() != null) {
+ params.putParam("keep_alive", asyncSearchRequest.getKeepAlive().getStringRep());
+ }
+ if (asyncSearchRequest.getWaitForCompletion() != null) {
+ params.putParam("wait_for_completion", asyncSearchRequest.getWaitForCompletion().getStringRep());
+ }
+ request.addParameters(params.asMap());
+ return request;
+ }
+
+ static Request deleteAsyncSearch(DeleteAsyncSearchRequest deleteAsyncSearchRequest) throws IOException {
+ String endpoint = new RequestConverters.EndpointBuilder()
+ .addPathPartAsIs("_async_search")
+ .addPathPart(deleteAsyncSearchRequest.getId())
+ .build();
+ return new Request(HttpDelete.METHOD_NAME, endpoint);
+ }
+}
diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/RequestConverters.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/RequestConverters.java
index b9b3c4b31a414..d0cd3ea5a0091 100644
--- a/client/rest-high-level/src/main/java/org/elasticsearch/client/RequestConverters.java
+++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/RequestConverters.java
@@ -403,20 +403,24 @@ static Request search(SearchRequest searchRequest, String searchEndpoint) throws
return request;
}
- private static void addSearchRequestParams(Params params, SearchRequest searchRequest) {
+ static void addSearchRequestParams(Params params, SearchRequest searchRequest) {
params.putParam(RestSearchAction.TYPED_KEYS_PARAM, "true");
params.withRouting(searchRequest.routing());
params.withPreference(searchRequest.preference());
params.withIndicesOptions(searchRequest.indicesOptions());
- params.putParam("search_type", searchRequest.searchType().name().toLowerCase(Locale.ROOT));
+ params.withSearchType(searchRequest.searchType().name().toLowerCase(Locale.ROOT));
params.putParam("ccs_minimize_roundtrips", Boolean.toString(searchRequest.isCcsMinimizeRoundtrips()));
+ if (searchRequest.getPreFilterShardSize() != null) {
+ params.putParam("pre_filter_shard_size", Integer.toString(searchRequest.getPreFilterShardSize()));
+ }
+ params.withMaxConcurrentShardRequests(searchRequest.getMaxConcurrentShardRequests());
if (searchRequest.requestCache() != null) {
- params.putParam("request_cache", Boolean.toString(searchRequest.requestCache()));
+ params.withRequestCache(searchRequest.requestCache());
}
if (searchRequest.allowPartialSearchResults() != null) {
- params.putParam("allow_partial_search_results", Boolean.toString(searchRequest.allowPartialSearchResults()));
+ params.withAllowPartialResults(searchRequest.allowPartialSearchResults());
}
- params.putParam("batched_reduce_size", Integer.toString(searchRequest.getBatchedReduceSize()));
+ params.withBatchedReduceSize(searchRequest.getBatchedReduceSize());
if (searchRequest.scroll() != null) {
params.putParam("scroll", searchRequest.scroll().keepAlive());
}
@@ -858,6 +862,26 @@ Params withPreference(String preference) {
return putParam("preference", preference);
}
+ Params withSearchType(String searchType) {
+ return putParam("search_type", searchType);
+ }
+
+ Params withMaxConcurrentShardRequests(int maxConcurrentShardRequests) {
+ return putParam("max_concurrent_shard_requests", Integer.toString(maxConcurrentShardRequests));
+ }
+
+ Params withBatchedReduceSize(int batchedReduceSize) {
+ return putParam("batched_reduce_size", Integer.toString(batchedReduceSize));
+ }
+
+ Params withRequestCache(boolean requestCache) {
+ return putParam("request_cache", Boolean.toString(requestCache));
+ }
+
+ Params withAllowPartialResults(boolean allowPartialSearchResults) {
+ return putParam("allow_partial_search_results", Boolean.toString(allowPartialSearchResults));
+ }
+
Params withRealtime(boolean realtime) {
if (realtime == false) {
return putParam("realtime", Boolean.FALSE.toString());
diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/RestHighLevelClient.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/RestHighLevelClient.java
index b464c2166f865..8b5262d6aada6 100644
--- a/client/rest-high-level/src/main/java/org/elasticsearch/client/RestHighLevelClient.java
+++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/RestHighLevelClient.java
@@ -265,6 +265,7 @@ public class RestHighLevelClient implements Closeable {
private final TransformClient transformClient = new TransformClient(this);
private final EnrichClient enrichClient = new EnrichClient(this);
private final EqlClient eqlClient = new EqlClient(this);
+ private final AsyncSearchClient asyncSearchClient = new AsyncSearchClient(this);
/**
* Creates a {@link RestHighLevelClient} given the low level {@link RestClientBuilder} that allows to build the
@@ -428,13 +429,23 @@ public final XPackClient xpack() {
* A wrapper for the {@link RestHighLevelClient} that provides methods for
* accessing the Elastic Index Lifecycle APIs.
*
- * See the X-Pack APIs
+ * See the X-Pack APIs
* on elastic.co for more information.
*/
public IndexLifecycleClient indexLifecycle() {
return ilmClient;
}
+ /**
+ * A wrapper for the {@link RestHighLevelClient} that provides methods for accessing the Elastic Index Async Search APIs.
+ *
+ * See the X-Pack APIs on elastic.co
+ * for more information.
+ */
+ public AsyncSearchClient asyncSearch() {
+ return asyncSearchClient;
+ }
+
/**
* Provides methods for accessing the Elastic Licensed Migration APIs that
* are shipped with the default distribution of Elasticsearch. All of
@@ -1888,12 +1899,7 @@ protected static boolean convertExistsResponse(Response response) {
* emitted there just mean that you are talking to an old version of
* Elasticsearch. There isn't anything you can do about the deprecation.
*/
- private static final DeprecationHandler DEPRECATION_HANDLER = new DeprecationHandler() {
- @Override
- public void usedDeprecatedName(String usedName, String modernName) {}
- @Override
- public void usedDeprecatedField(String usedName, String replacedWith) {}
- };
+ private static final DeprecationHandler DEPRECATION_HANDLER = DeprecationHandler.IGNORE_DEPRECATIONS;
static List getDefaultNamedXContents() {
Map> map = new HashMap<>();
diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/asyncsearch/AsyncSearchResponse.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/asyncsearch/AsyncSearchResponse.java
new file mode 100644
index 0000000000000..07d3ce81fea8c
--- /dev/null
+++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/asyncsearch/AsyncSearchResponse.java
@@ -0,0 +1,200 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.elasticsearch.client.asyncsearch;
+
+import org.elasticsearch.ElasticsearchException;
+import org.elasticsearch.action.search.SearchResponse;
+import org.elasticsearch.common.Nullable;
+import org.elasticsearch.common.ParseField;
+import org.elasticsearch.common.Strings;
+import org.elasticsearch.common.xcontent.ConstructingObjectParser;
+import org.elasticsearch.common.xcontent.ToXContentObject;
+import org.elasticsearch.common.xcontent.XContentBuilder;
+import org.elasticsearch.common.xcontent.XContentParser;
+import org.elasticsearch.common.xcontent.XContentParser.Token;
+
+import java.io.IOException;
+
+import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg;
+import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optionalConstructorArg;
+import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureExpectedToken;
+
+/**
+ * A response of an async search request.
+ */
+public class AsyncSearchResponse implements ToXContentObject {
+ @Nullable
+ private final String id;
+ @Nullable
+ private final SearchResponse searchResponse;
+ @Nullable
+ private final ElasticsearchException error;
+ private final boolean isRunning;
+ private final boolean isPartial;
+
+ private final long startTimeMillis;
+ private final long expirationTimeMillis;
+
+ /**
+ * Creates an {@link AsyncSearchResponse} with the arguments that are always present in the server response
+ */
+ AsyncSearchResponse(boolean isPartial,
+ boolean isRunning,
+ long startTimeMillis,
+ long expirationTimeMillis,
+ @Nullable String id,
+ @Nullable SearchResponse searchResponse,
+ @Nullable ElasticsearchException error) {
+ this.isPartial = isPartial;
+ this.isRunning = isRunning;
+ this.startTimeMillis = startTimeMillis;
+ this.expirationTimeMillis = expirationTimeMillis;
+ this.id = id;
+ this.searchResponse = searchResponse;
+ this.error = error;
+ }
+
+ /**
+ * Returns the id of the async search request or null if the response is not stored in the cluster.
+ */
+ @Nullable
+ public String getId() {
+ return id;
+ }
+
+ /**
+ * Returns the current {@link SearchResponse} or null if not available.
+ *
+ * See {@link #isPartial()} to determine whether the response contains partial or complete
+ * results.
+ */
+ public SearchResponse getSearchResponse() {
+ return searchResponse;
+ }
+
+ /**
+ * Returns the failure reason or null if the query is running or has completed normally.
+ */
+ public ElasticsearchException getFailure() {
+ return error;
+ }
+
+ /**
+ * Returns true if the {@link SearchResponse} contains partial
+ * results computed from a subset of the total shards.
+ */
+ public boolean isPartial() {
+ return isPartial;
+ }
+
+ /**
+ * Whether the search is still running in the cluster.
+ *
+ * A value of false indicates that the response is final
+ * even if {@link #isPartial()} returns true. In such case,
+ * the partial response represents the status of the search before a
+ * non-recoverable failure.
+ */
+ public boolean isRunning() {
+ return isRunning;
+ }
+
+ /**
+ * When this response was created as a timestamp in milliseconds since epoch.
+ */
+ public long getStartTime() {
+ return startTimeMillis;
+ }
+
+ /**
+ * When this response will expired as a timestamp in milliseconds since epoch.
+ */
+ public long getExpirationTime() {
+ return expirationTimeMillis;
+ }
+
+ @Override
+ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
+ builder.startObject();
+ if (id != null) {
+ builder.field("id", id);
+ }
+ builder.field("is_partial", isPartial);
+ builder.field("is_running", isRunning);
+ builder.field("start_time_in_millis", startTimeMillis);
+ builder.field("expiration_time_in_millis", expirationTimeMillis);
+
+ if (searchResponse != null) {
+ builder.field("response");
+ searchResponse.toXContent(builder, params);
+ }
+ if (error != null) {
+ builder.startObject("error");
+ error.toXContent(builder, params);
+ builder.endObject();
+ }
+ builder.endObject();
+ return builder;
+ }
+
+ public static final ParseField ID_FIELD = new ParseField("id");
+ public static final ParseField IS_PARTIAL_FIELD = new ParseField("is_partial");
+ public static final ParseField IS_RUNNING_FIELD = new ParseField("is_running");
+ public static final ParseField START_TIME_FIELD = new ParseField("start_time_in_millis");
+ public static final ParseField EXPIRATION_FIELD = new ParseField("expiration_time_in_millis");
+ public static final ParseField RESPONSE_FIELD = new ParseField("response");
+ public static final ParseField ERROR_FIELD = new ParseField("error");
+
+ public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>(
+ "submit_async_search_response", true,
+ args -> new AsyncSearchResponse(
+ (boolean) args[0],
+ (boolean) args[1],
+ (long) args[2],
+ (long) args[3],
+ (String) args[4],
+ (SearchResponse) args[5],
+ (ElasticsearchException) args[6]));
+ static {
+ PARSER.declareBoolean(constructorArg(), IS_PARTIAL_FIELD);
+ PARSER.declareBoolean(constructorArg(), IS_RUNNING_FIELD);
+ PARSER.declareLong(constructorArg(), START_TIME_FIELD);
+ PARSER.declareLong(constructorArg(), EXPIRATION_FIELD);
+ PARSER.declareString(optionalConstructorArg(), ID_FIELD);
+ PARSER.declareObject(optionalConstructorArg(), (p, c) -> AsyncSearchResponse.parseSearchResponse(p),
+ RESPONSE_FIELD);
+ PARSER.declareObject(optionalConstructorArg(), (p, c) -> ElasticsearchException.fromXContent(p), ERROR_FIELD);
+ }
+
+ private static SearchResponse parseSearchResponse(XContentParser p) throws IOException {
+ // we should be before the opening START_OBJECT of the response
+ ensureExpectedToken(Token.START_OBJECT, p.currentToken(), p::getTokenLocation);
+ p.nextToken();
+ return SearchResponse.innerFromXContent(p);
+ }
+
+ public static AsyncSearchResponse fromXContent(XContentParser parser) {
+ return PARSER.apply(parser, null);
+ }
+
+ @Override
+ public String toString() {
+ return Strings.toString(this);
+ }
+}
diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/asyncsearch/DeleteAsyncSearchRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/asyncsearch/DeleteAsyncSearchRequest.java
new file mode 100644
index 0000000000000..3b37293212da0
--- /dev/null
+++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/asyncsearch/DeleteAsyncSearchRequest.java
@@ -0,0 +1,55 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+package org.elasticsearch.client.asyncsearch;
+
+import org.elasticsearch.client.Validatable;
+
+import java.util.Objects;
+
+public class DeleteAsyncSearchRequest implements Validatable {
+
+ private final String id;
+
+ public DeleteAsyncSearchRequest(String id) {
+ this.id = id;
+}
+
+ public String getId() {
+ return this.id;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) {
+ return true;
+ }
+ if (o == null || getClass() != o.getClass()) {
+ return false;
+ }
+ DeleteAsyncSearchRequest request = (DeleteAsyncSearchRequest) o;
+ return Objects.equals(getId(), request.getId());
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(getId());
+ }
+}
diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/asyncsearch/GetAsyncSearchRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/asyncsearch/GetAsyncSearchRequest.java
new file mode 100644
index 0000000000000..11ad059349481
--- /dev/null
+++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/asyncsearch/GetAsyncSearchRequest.java
@@ -0,0 +1,93 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+package org.elasticsearch.client.asyncsearch;
+
+import org.elasticsearch.client.Validatable;
+import org.elasticsearch.client.ValidationException;
+import org.elasticsearch.common.unit.TimeValue;
+
+import java.util.Objects;
+import java.util.Optional;
+
+public class GetAsyncSearchRequest implements Validatable {
+
+ private TimeValue waitForCompletion;
+ private TimeValue keepAlive;
+
+ public static final long MIN_KEEPALIVE = TimeValue.timeValueMinutes(1).millis();
+
+ private final String id;
+
+ public GetAsyncSearchRequest(String id) {
+ this.id = id;
+ }
+
+ public String getId() {
+ return this.id;
+ }
+
+ public TimeValue getWaitForCompletion() {
+ return waitForCompletion;
+ }
+
+ public void setWaitForCompletion(TimeValue waitForCompletion) {
+ this.waitForCompletion = waitForCompletion;
+ }
+
+ public TimeValue getKeepAlive() {
+ return keepAlive;
+ }
+
+ public void setKeepAlive(TimeValue keepAlive) {
+ this.keepAlive = keepAlive;
+ }
+
+ @Override
+ public Optional validate() {
+ final ValidationException validationException = new ValidationException();
+ if (keepAlive != null && keepAlive.getMillis() < MIN_KEEPALIVE) {
+ validationException.addValidationError("keep_alive must be greater than 1 minute, got: " + keepAlive.toString());
+ }
+ if (validationException.validationErrors().isEmpty()) {
+ return Optional.empty();
+ }
+ return Optional.of(validationException);
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) {
+ return true;
+ }
+ if (o == null || getClass() != o.getClass()) {
+ return false;
+ }
+ GetAsyncSearchRequest request = (GetAsyncSearchRequest) o;
+ return Objects.equals(getId(), request.getId())
+ && Objects.equals(getKeepAlive(), request.getKeepAlive())
+ && Objects.equals(getWaitForCompletion(), request.getWaitForCompletion());
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(getId(), getKeepAlive(), getWaitForCompletion());
+ }
+}
diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/asyncsearch/SubmitAsyncSearchRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/asyncsearch/SubmitAsyncSearchRequest.java
new file mode 100644
index 0000000000000..1b0a07c4dea41
--- /dev/null
+++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/asyncsearch/SubmitAsyncSearchRequest.java
@@ -0,0 +1,284 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+package org.elasticsearch.client.asyncsearch;
+
+import org.elasticsearch.action.search.SearchRequest;
+import org.elasticsearch.action.search.SearchType;
+import org.elasticsearch.action.support.IndicesOptions;
+import org.elasticsearch.client.Validatable;
+import org.elasticsearch.client.ValidationException;
+import org.elasticsearch.common.unit.TimeValue;
+import org.elasticsearch.search.builder.SearchSourceBuilder;
+
+import java.util.Objects;
+import java.util.Optional;
+
+/**
+ * A request to track asynchronously the progress of a search against one or more indices.
+ */
+public class SubmitAsyncSearchRequest implements Validatable {
+
+ public static final int DEFAULT_PRE_FILTER_SHARD_SIZE = 1;
+ public static final int DEFAULT_BATCHED_REDUCE_SIZE = 5;
+ private static final boolean DEFAULT_CCS_MINIMIZE_ROUNDTRIPS = false;
+ private static final boolean DEFAULT_REQUEST_CACHE_VALUE = true;
+
+ public static long MIN_KEEP_ALIVE = TimeValue.timeValueMinutes(1).millis();
+
+ private TimeValue waitForCompletion;
+ private Boolean cleanOnCompletion;
+ private TimeValue keepAlive;
+ private final SearchRequest searchRequest;
+
+ /**
+ * Creates a new request
+ */
+ public SubmitAsyncSearchRequest(SearchSourceBuilder source, String... indices) {
+ this.searchRequest = new SearchRequest(indices, source);
+ searchRequest.setCcsMinimizeRoundtrips(DEFAULT_CCS_MINIMIZE_ROUNDTRIPS);
+ searchRequest.setPreFilterShardSize(DEFAULT_PRE_FILTER_SHARD_SIZE);
+ searchRequest.setBatchedReduceSize(DEFAULT_BATCHED_REDUCE_SIZE);
+ searchRequest.requestCache(DEFAULT_REQUEST_CACHE_VALUE);
+ }
+
+ /**
+ * Get the target indices
+ */
+ public String[] getIndices() {
+ return this.searchRequest.indices();
+ }
+
+
+ /**
+ * Get the minimum time that the request should wait before returning a partial result (defaults to 1 second).
+ */
+ public TimeValue getWaitForCompletion() {
+ return waitForCompletion;
+ }
+
+ /**
+ * Sets the minimum time that the request should wait before returning a partial result (defaults to 1 second).
+ */
+ public void setWaitForCompletion(TimeValue waitForCompletion) {
+ this.waitForCompletion = waitForCompletion;
+ }
+
+ /**
+ * Returns whether the resource resource should be removed on completion or failure (defaults to true).
+ */
+ public Boolean isCleanOnCompletion() {
+ return cleanOnCompletion;
+ }
+
+ /**
+ * Determines if the resource should be removed on completion or failure (defaults to true).
+ */
+ public void setCleanOnCompletion(boolean cleanOnCompletion) {
+ this.cleanOnCompletion = cleanOnCompletion;
+ }
+
+ /**
+ * Get the amount of time after which the result will expire (defaults to 5 days).
+ */
+ public TimeValue getKeepAlive() {
+ return keepAlive;
+ }
+
+ /**
+ * Sets the amount of time after which the result will expire (defaults to 5 days).
+ */
+ public void setKeepAlive(TimeValue keepAlive) {
+ this.keepAlive = keepAlive;
+ }
+
+ // setters for request parameters of the wrapped SearchRequest
+ /**
+ * Set the routing value to control the shards that the search will be executed on.
+ * A comma separated list of routing values to control the shards the search will be executed on.
+ */
+ public void setRouting(String routing) {
+ this.searchRequest.routing(routing);
+ }
+
+ /**
+ * Set the routing values to control the shards that the search will be executed on.
+ */
+ public void setRoutings(String... routings) {
+ this.searchRequest.routing(routings);
+ }
+
+ /**
+ * Get the routing value to control the shards that the search will be executed on.
+ */
+ public String getRouting() {
+ return this.searchRequest.routing();
+ }
+
+ /**
+ * Sets the preference to execute the search. Defaults to randomize across shards. Can be set to
+ * {@code _local} to prefer local shards or a custom value, which guarantees that the same order
+ * will be used across different requests.
+ */
+ public void setPreference(String preference) {
+ this.searchRequest.preference(preference);
+ }
+
+ /**
+ * Get the preference to execute the search.
+ */
+ public String getPreference() {
+ return this.searchRequest.preference();
+ }
+
+ /**
+ * Specifies what type of requested indices to ignore and how to deal with indices wildcard expressions.
+ */
+ public void setIndicesOptions(IndicesOptions indicesOptions) {
+ this.searchRequest.indicesOptions(indicesOptions);
+ }
+
+ /**
+ * Get the indices Options.
+ */
+ public IndicesOptions getIndicesOptions() {
+ return this.searchRequest.indicesOptions();
+ }
+
+ /**
+ * The search type to execute, defaults to {@link SearchType#DEFAULT}.
+ */
+ public void setSearchType(SearchType searchType) {
+ this.searchRequest.searchType(searchType);
+ }
+
+ /**
+ * Get the search type to execute, defaults to {@link SearchType#DEFAULT}.
+ */
+ public SearchType getSearchType() {
+ return this.searchRequest.searchType();
+ }
+
+ /**
+ * Sets if this request should allow partial results. (If method is not called,
+ * will default to the cluster level setting).
+ */
+ public void setAllowPartialSearchResults(boolean allowPartialSearchResults) {
+ this.searchRequest.allowPartialSearchResults(allowPartialSearchResults);
+ }
+
+ /**
+ * Gets if this request should allow partial results.
+ */
+ public Boolean getAllowPartialSearchResults() {
+ return this.searchRequest.allowPartialSearchResults();
+ }
+
+ /**
+ * Sets the number of shard results that should be reduced at once on the coordinating node. This value should be used as a protection
+ * mechanism to reduce the memory overhead per search request if the potential number of shards in the request can be large.
+ */
+ public void setBatchedReduceSize(int batchedReduceSize) {
+ this.searchRequest.setBatchedReduceSize(batchedReduceSize);
+ }
+
+ /**
+ * Gets the number of shard results that should be reduced at once on the coordinating node.
+ * This defaults to 5 for {@link SubmitAsyncSearchRequest}.
+ */
+ public int getBatchedReduceSize() {
+ return this.searchRequest.getBatchedReduceSize();
+ }
+
+ /**
+ * Sets if this request should use the request cache or not, assuming that it can (for
+ * example, if "now" is used, it will never be cached). By default (not set, or null,
+ * will default to the index level setting if request cache is enabled or not).
+ */
+ public void setRequestCache(Boolean requestCache) {
+ this.searchRequest.requestCache(requestCache);
+ }
+
+ /**
+ * Gets if this request should use the request cache or not.
+ * Defaults to `true` for {@link SubmitAsyncSearchRequest}.
+ */
+ public Boolean getRequestCache() {
+ return this.searchRequest.requestCache();
+ }
+
+ /**
+ * Returns the number of shard requests that should be executed concurrently on a single node.
+ * The default is {@code 5}.
+ */
+ public int getMaxConcurrentShardRequests() {
+ return this.searchRequest.getMaxConcurrentShardRequests();
+ }
+
+ /**
+ * Sets the number of shard requests that should be executed concurrently on a single node.
+ * The default is {@code 5}.
+ */
+ public void setMaxConcurrentShardRequests(int maxConcurrentShardRequests) {
+ this.searchRequest.setMaxConcurrentShardRequests(maxConcurrentShardRequests);
+ }
+
+ /**
+ * Gets if the source of the {@link SearchSourceBuilder} initially used on this request.
+ */
+ public SearchSourceBuilder getSearchSource() {
+ return this.searchRequest.source();
+ }
+
+ @Override
+ public Optional validate() {
+ final ValidationException validationException = new ValidationException();
+ if (searchRequest.isSuggestOnly()) {
+ validationException.addValidationError("suggest-only queries are not supported");
+ }
+ if (keepAlive != null && keepAlive.getMillis() < MIN_KEEP_ALIVE) {
+ validationException.addValidationError("[keep_alive] must be greater than 1 minute, got: " + keepAlive.toString());
+ }
+ if (validationException.validationErrors().isEmpty()) {
+ return Optional.empty();
+ }
+ return Optional.of(validationException);
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) {
+ return true;
+ }
+ if (o == null || getClass() != o.getClass()) {
+ return false;
+ }
+ SubmitAsyncSearchRequest request = (SubmitAsyncSearchRequest) o;
+ return Objects.equals(searchRequest, request.searchRequest)
+ && Objects.equals(getKeepAlive(), request.getKeepAlive())
+ && Objects.equals(getWaitForCompletion(), request.getWaitForCompletion())
+ && Objects.equals(isCleanOnCompletion(), request.isCleanOnCompletion());
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(searchRequest, getKeepAlive(), getWaitForCompletion(), isCleanOnCompletion());
+ }
+}
diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/core/IndexerJobStats.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/core/IndexerJobStats.java
index 5e59b4b19dbbe..dc332fa8a4ab0 100644
--- a/client/rest-high-level/src/main/java/org/elasticsearch/client/core/IndexerJobStats.java
+++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/core/IndexerJobStats.java
@@ -31,8 +31,10 @@ public abstract class IndexerJobStats {
public static ParseField NUM_INVOCATIONS = new ParseField("trigger_count");
public static ParseField INDEX_TIME_IN_MS = new ParseField("index_time_in_ms");
public static ParseField SEARCH_TIME_IN_MS = new ParseField("search_time_in_ms");
+ public static ParseField PROCESSING_TIME_IN_MS = new ParseField("processing_time_in_ms");
public static ParseField INDEX_TOTAL = new ParseField("index_total");
public static ParseField SEARCH_TOTAL = new ParseField("search_total");
+ public static ParseField PROCESSING_TOTAL = new ParseField("processing_total");
public static ParseField SEARCH_FAILURES = new ParseField("search_failures");
public static ParseField INDEX_FAILURES = new ParseField("index_failures");
@@ -44,11 +46,14 @@ public abstract class IndexerJobStats {
protected final long indexTotal;
protected final long searchTime;
protected final long searchTotal;
+ protected final long processingTime;
+ protected final long processingTotal;
protected final long indexFailures;
protected final long searchFailures;
public IndexerJobStats(long numPages, long numInputDocuments, long numOutputDocuments, long numInvocations,
- long indexTime, long searchTime, long indexTotal, long searchTotal, long indexFailures, long searchFailures) {
+ long indexTime, long searchTime, long processingTime, long indexTotal, long searchTotal, long processingTotal,
+ long indexFailures, long searchFailures) {
this.numPages = numPages;
this.numInputDocuments = numInputDocuments;
this.numOuputDocuments = numOutputDocuments;
@@ -57,6 +62,8 @@ public IndexerJobStats(long numPages, long numInputDocuments, long numOutputDocu
this.indexTotal = indexTotal;
this.searchTime = searchTime;
this.searchTotal = searchTotal;
+ this.processingTime = processingTime;
+ this.processingTotal = processingTotal;
this.indexFailures = indexFailures;
this.searchFailures = searchFailures;
}
@@ -117,6 +124,13 @@ public long getSearchTime() {
return searchTime;
}
+ /**
+ * Returns the time spent processing (cumulative) in milliseconds
+ */
+ public long getProcessingTime() {
+ return processingTime;
+ }
+
/**
* Returns the total number of indexing requests that have been processed
* (Note: this is not the number of _documents_ that have been indexed)
@@ -132,6 +146,14 @@ public long getSearchTotal() {
return searchTotal;
}
+ /**
+ * Returns the total number of processing runs that have been made
+ */
+ public long getProcessingTotal() {
+ return processingTotal;
+ }
+
+
@Override
public boolean equals(Object other) {
if (this == other) {
@@ -149,16 +171,19 @@ public boolean equals(Object other) {
&& Objects.equals(this.numInvocations, that.numInvocations)
&& Objects.equals(this.indexTime, that.indexTime)
&& Objects.equals(this.searchTime, that.searchTime)
+ && Objects.equals(this.processingTime, that.processingTime)
&& Objects.equals(this.indexFailures, that.indexFailures)
&& Objects.equals(this.searchFailures, that.searchFailures)
&& Objects.equals(this.searchTotal, that.searchTotal)
+ && Objects.equals(this.processingTotal, that.processingTotal)
&& Objects.equals(this.indexTotal, that.indexTotal);
}
@Override
public int hashCode() {
return Objects.hash(numPages, numInputDocuments, numOuputDocuments, numInvocations,
- indexTime, searchTime, indexFailures, searchFailures, searchTotal, indexTotal);
+ indexTime, searchTime, processingTime, indexFailures, searchFailures, searchTotal,
+ indexTotal, processingTotal);
}
@Override
@@ -172,6 +197,8 @@ public final String toString() {
+ ", index_time_in_ms=" + indexTime
+ ", index_total=" + indexTotal
+ ", search_time_in_ms=" + searchTime
- + ", search_total=" + searchTotal+ "}";
+ + ", search_total=" + searchTotal
+ + ", processing_time_in_ms=" + processingTime
+ + ", processing_total=" + processingTotal + "}";
}
}
diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/DataFrameAnalyticsStats.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/DataFrameAnalyticsStats.java
index 53e3adf2b8433..acdb9cccca1eb 100644
--- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/DataFrameAnalyticsStats.java
+++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/DataFrameAnalyticsStats.java
@@ -20,12 +20,16 @@
package org.elasticsearch.client.ml.dataframe;
import org.elasticsearch.client.ml.NodeAttributes;
+import org.elasticsearch.client.ml.dataframe.stats.AnalysisStats;
+import org.elasticsearch.client.ml.dataframe.stats.common.DataCounts;
+import org.elasticsearch.client.ml.dataframe.stats.common.MemoryUsage;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.inject.internal.ToStringBuilder;
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
import org.elasticsearch.common.xcontent.ObjectParser;
import org.elasticsearch.common.xcontent.XContentParser;
+import org.elasticsearch.common.xcontent.XContentParserUtils;
import java.io.IOException;
import java.util.List;
@@ -44,7 +48,9 @@ public static DataFrameAnalyticsStats fromXContent(XContentParser parser) throws
static final ParseField STATE = new ParseField("state");
static final ParseField FAILURE_REASON = new ParseField("failure_reason");
static final ParseField PROGRESS = new ParseField("progress");
+ static final ParseField DATA_COUNTS = new ParseField("data_counts");
static final ParseField MEMORY_USAGE = new ParseField("memory_usage");
+ static final ParseField ANALYSIS_STATS = new ParseField("analysis_stats");
static final ParseField NODE = new ParseField("node");
static final ParseField ASSIGNMENT_EXPLANATION = new ParseField("assignment_explanation");
@@ -56,9 +62,11 @@ public static DataFrameAnalyticsStats fromXContent(XContentParser parser) throws
(DataFrameAnalyticsState) args[1],
(String) args[2],
(List) args[3],
- (MemoryUsage) args[4],
- (NodeAttributes) args[5],
- (String) args[6]));
+ (DataCounts) args[4],
+ (MemoryUsage) args[5],
+ (AnalysisStats) args[6],
+ (NodeAttributes) args[7],
+ (String) args[8]));
static {
PARSER.declareString(constructorArg(), ID);
@@ -70,27 +78,42 @@ public static DataFrameAnalyticsStats fromXContent(XContentParser parser) throws
}, STATE, ObjectParser.ValueType.STRING);
PARSER.declareString(optionalConstructorArg(), FAILURE_REASON);
PARSER.declareObjectArray(optionalConstructorArg(), PhaseProgress.PARSER, PROGRESS);
+ PARSER.declareObject(optionalConstructorArg(), DataCounts.PARSER, DATA_COUNTS);
PARSER.declareObject(optionalConstructorArg(), MemoryUsage.PARSER, MEMORY_USAGE);
+ PARSER.declareObject(optionalConstructorArg(), (p, c) -> parseAnalysisStats(p), ANALYSIS_STATS);
PARSER.declareObject(optionalConstructorArg(), NodeAttributes.PARSER, NODE);
PARSER.declareString(optionalConstructorArg(), ASSIGNMENT_EXPLANATION);
}
+ private static AnalysisStats parseAnalysisStats(XContentParser parser) throws IOException {
+ XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.currentToken(), parser::getTokenLocation);
+ XContentParserUtils.ensureExpectedToken(XContentParser.Token.FIELD_NAME, parser.nextToken(), parser::getTokenLocation);
+ AnalysisStats analysisStats = parser.namedObject(AnalysisStats.class, parser.currentName(), true);
+ XContentParserUtils.ensureExpectedToken(XContentParser.Token.END_OBJECT, parser.nextToken(), parser::getTokenLocation);
+ return analysisStats;
+ }
+
private final String id;
private final DataFrameAnalyticsState state;
private final String failureReason;
private final List progress;
+ private final DataCounts dataCounts;
private final MemoryUsage memoryUsage;
+ private final AnalysisStats analysisStats;
private final NodeAttributes node;
private final String assignmentExplanation;
public DataFrameAnalyticsStats(String id, DataFrameAnalyticsState state, @Nullable String failureReason,
- @Nullable List progress, @Nullable MemoryUsage memoryUsage,
- @Nullable NodeAttributes node, @Nullable String assignmentExplanation) {
+ @Nullable List progress, @Nullable DataCounts dataCounts,
+ @Nullable MemoryUsage memoryUsage, @Nullable AnalysisStats analysisStats, @Nullable NodeAttributes node,
+ @Nullable String assignmentExplanation) {
this.id = id;
this.state = state;
this.failureReason = failureReason;
this.progress = progress;
+ this.dataCounts = dataCounts;
this.memoryUsage = memoryUsage;
+ this.analysisStats = analysisStats;
this.node = node;
this.assignmentExplanation = assignmentExplanation;
}
@@ -111,11 +134,21 @@ public List getProgress() {
return progress;
}
+ @Nullable
+ public DataCounts getDataCounts() {
+ return dataCounts;
+ }
+
@Nullable
public MemoryUsage getMemoryUsage() {
return memoryUsage;
}
+ @Nullable
+ public AnalysisStats getAnalysisStats() {
+ return analysisStats;
+ }
+
public NodeAttributes getNode() {
return node;
}
@@ -134,14 +167,16 @@ public boolean equals(Object o) {
&& Objects.equals(state, other.state)
&& Objects.equals(failureReason, other.failureReason)
&& Objects.equals(progress, other.progress)
+ && Objects.equals(dataCounts, other.dataCounts)
&& Objects.equals(memoryUsage, other.memoryUsage)
+ && Objects.equals(analysisStats, other.analysisStats)
&& Objects.equals(node, other.node)
&& Objects.equals(assignmentExplanation, other.assignmentExplanation);
}
@Override
public int hashCode() {
- return Objects.hash(id, state, failureReason, progress, memoryUsage, node, assignmentExplanation);
+ return Objects.hash(id, state, failureReason, progress, dataCounts, memoryUsage, analysisStats, node, assignmentExplanation);
}
@Override
@@ -151,7 +186,9 @@ public String toString() {
.add("state", state)
.add("failureReason", failureReason)
.add("progress", progress)
+ .add("dataCounts", dataCounts)
.add("memoryUsage", memoryUsage)
+ .add("analysisStats", analysisStats)
.add("node", node)
.add("assignmentExplanation", assignmentExplanation)
.toString();
diff --git a/modules/kibana/build.gradle b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/stats/AnalysisStats.java
similarity index 67%
rename from modules/kibana/build.gradle
rename to client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/stats/AnalysisStats.java
index f9d11e5a6c58b..c1a823682a762 100644
--- a/modules/kibana/build.gradle
+++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/stats/AnalysisStats.java
@@ -7,7 +7,7 @@
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
- * http://www.apache.org/licenses/LICENSE-2.0
+ * http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
@@ -16,16 +16,14 @@
* specific language governing permissions and limitations
* under the License.
*/
+package org.elasticsearch.client.ml.dataframe.stats;
-esplugin {
- description 'Plugin exposing APIs for Kibana system indices'
- classname 'org.elasticsearch.kibana.KibanaPlugin'
-}
+import org.elasticsearch.common.xcontent.ToXContentObject;
-dependencies {
- compile project(path: ':modules:reindex', configuration: 'runtime')
-}
+/**
+ * Statistics for the data frame analysis
+ */
+public interface AnalysisStats extends ToXContentObject {
-testClusters.integTest {
- module file(project(':modules:reindex').tasks.bundlePlugin.archiveFile)
+ String getName();
}
diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/stats/AnalysisStatsNamedXContentProvider.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/stats/AnalysisStatsNamedXContentProvider.java
new file mode 100644
index 0000000000000..8c9bc615e8653
--- /dev/null
+++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/stats/AnalysisStatsNamedXContentProvider.java
@@ -0,0 +1,52 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.elasticsearch.client.ml.dataframe.stats;
+
+import org.elasticsearch.client.ml.dataframe.stats.classification.ClassificationStats;
+import org.elasticsearch.client.ml.dataframe.stats.outlierdetection.OutlierDetectionStats;
+import org.elasticsearch.client.ml.dataframe.stats.regression.RegressionStats;
+import org.elasticsearch.common.xcontent.NamedXContentRegistry;
+import org.elasticsearch.plugins.spi.NamedXContentProvider;
+
+import java.util.Arrays;
+import java.util.List;
+
+public class AnalysisStatsNamedXContentProvider implements NamedXContentProvider {
+
+ @Override
+ public List getNamedXContentParsers() {
+ return Arrays.asList(
+ new NamedXContentRegistry.Entry(
+ AnalysisStats.class,
+ ClassificationStats.NAME,
+ (p, c) -> ClassificationStats.PARSER.apply(p, null)
+ ),
+ new NamedXContentRegistry.Entry(
+ AnalysisStats.class,
+ OutlierDetectionStats.NAME,
+ (p, c) -> OutlierDetectionStats.PARSER.apply(p, null)
+ ),
+ new NamedXContentRegistry.Entry(
+ AnalysisStats.class,
+ RegressionStats.NAME,
+ (p, c) -> RegressionStats.PARSER.apply(p, null)
+ )
+ );
+ }
+}
diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/stats/classification/ClassificationStats.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/stats/classification/ClassificationStats.java
new file mode 100644
index 0000000000000..101f74f2fe239
--- /dev/null
+++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/stats/classification/ClassificationStats.java
@@ -0,0 +1,135 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.elasticsearch.client.ml.dataframe.stats.classification;
+
+import org.elasticsearch.client.common.TimeUtil;
+import org.elasticsearch.client.ml.dataframe.stats.AnalysisStats;
+import org.elasticsearch.common.ParseField;
+import org.elasticsearch.common.xcontent.ConstructingObjectParser;
+import org.elasticsearch.common.xcontent.ObjectParser;
+import org.elasticsearch.common.xcontent.ToXContent;
+import org.elasticsearch.common.xcontent.XContentBuilder;
+
+import java.io.IOException;
+import java.time.Instant;
+import java.util.Objects;
+
+public class ClassificationStats implements AnalysisStats {
+
+ public static final ParseField NAME = new ParseField("classification_stats");
+
+ public static final ParseField TIMESTAMP = new ParseField("timestamp");
+ public static final ParseField ITERATION = new ParseField("iteration");
+ public static final ParseField HYPERPARAMETERS = new ParseField("hyperparameters");
+ public static final ParseField TIMING_STATS = new ParseField("timing_stats");
+ public static final ParseField VALIDATION_LOSS = new ParseField("validation_loss");
+
+ public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>(NAME.getPreferredName(),
+ true,
+ a -> new ClassificationStats(
+ (Instant) a[0],
+ (Integer) a[1],
+ (Hyperparameters) a[2],
+ (TimingStats) a[3],
+ (ValidationLoss) a[4]
+ )
+ );
+
+ static {
+ PARSER.declareField(ConstructingObjectParser.constructorArg(),
+ p -> TimeUtil.parseTimeFieldToInstant(p, TIMESTAMP.getPreferredName()),
+ TIMESTAMP,
+ ObjectParser.ValueType.VALUE);
+ PARSER.declareInt(ConstructingObjectParser.optionalConstructorArg(), ITERATION);
+ PARSER.declareObject(ConstructingObjectParser.constructorArg(), Hyperparameters.PARSER, HYPERPARAMETERS);
+ PARSER.declareObject(ConstructingObjectParser.constructorArg(), TimingStats.PARSER, TIMING_STATS);
+ PARSER.declareObject(ConstructingObjectParser.constructorArg(), ValidationLoss.PARSER, VALIDATION_LOSS);
+ }
+
+ private final Instant timestamp;
+ private final Integer iteration;
+ private final Hyperparameters hyperparameters;
+ private final TimingStats timingStats;
+ private final ValidationLoss validationLoss;
+
+ public ClassificationStats(Instant timestamp, Integer iteration, Hyperparameters hyperparameters, TimingStats timingStats,
+ ValidationLoss validationLoss) {
+ this.timestamp = Instant.ofEpochMilli(Objects.requireNonNull(timestamp).toEpochMilli());
+ this.iteration = iteration;
+ this.hyperparameters = Objects.requireNonNull(hyperparameters);
+ this.timingStats = Objects.requireNonNull(timingStats);
+ this.validationLoss = Objects.requireNonNull(validationLoss);
+ }
+
+ public Instant getTimestamp() {
+ return timestamp;
+ }
+
+ public Integer getIteration() {
+ return iteration;
+ }
+
+ public Hyperparameters getHyperparameters() {
+ return hyperparameters;
+ }
+
+ public TimingStats getTimingStats() {
+ return timingStats;
+ }
+
+ public ValidationLoss getValidationLoss() {
+ return validationLoss;
+ }
+
+ @Override
+ public XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params params) throws IOException {
+ builder.startObject();
+ builder.timeField(TIMESTAMP.getPreferredName(), TIMESTAMP.getPreferredName() + "_string", timestamp.toEpochMilli());
+ if (iteration != null) {
+ builder.field(ITERATION.getPreferredName(), iteration);
+ }
+ builder.field(HYPERPARAMETERS.getPreferredName(), hyperparameters);
+ builder.field(TIMING_STATS.getPreferredName(), timingStats);
+ builder.field(VALIDATION_LOSS.getPreferredName(), validationLoss);
+ builder.endObject();
+ return builder;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ ClassificationStats that = (ClassificationStats) o;
+ return Objects.equals(timestamp, that.timestamp)
+ && Objects.equals(iteration, that.iteration)
+ && Objects.equals(hyperparameters, that.hyperparameters)
+ && Objects.equals(timingStats, that.timingStats)
+ && Objects.equals(validationLoss, that.validationLoss);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(timestamp, iteration, hyperparameters, timingStats, validationLoss);
+ }
+
+ @Override
+ public String getName() {
+ return NAME.getPreferredName();
+ }
+}
diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/stats/classification/Hyperparameters.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/stats/classification/Hyperparameters.java
new file mode 100644
index 0000000000000..c8d581b1d9c41
--- /dev/null
+++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/stats/classification/Hyperparameters.java
@@ -0,0 +1,293 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.elasticsearch.client.ml.dataframe.stats.classification;
+
+import org.elasticsearch.common.ParseField;
+import org.elasticsearch.common.xcontent.ConstructingObjectParser;
+import org.elasticsearch.common.xcontent.ToXContentObject;
+import org.elasticsearch.common.xcontent.XContentBuilder;
+
+import java.io.IOException;
+import java.util.Objects;
+
+import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optionalConstructorArg;
+
+public class Hyperparameters implements ToXContentObject {
+
+ public static final ParseField CLASS_ASSIGNMENT_OBJECTIVE = new ParseField("class_assignment_objective");
+ public static final ParseField DOWNSAMPLE_FACTOR = new ParseField("downsample_factor");
+ public static final ParseField ETA = new ParseField("eta");
+ public static final ParseField ETA_GROWTH_RATE_PER_TREE = new ParseField("eta_growth_rate_per_tree");
+ public static final ParseField FEATURE_BAG_FRACTION = new ParseField("feature_bag_fraction");
+ public static final ParseField MAX_ATTEMPTS_TO_ADD_TREE = new ParseField("max_attempts_to_add_tree");
+ public static final ParseField MAX_OPTIMIZATION_ROUNDS_PER_HYPERPARAMETER = new ParseField(
+ "max_optimization_rounds_per_hyperparameter");
+ public static final ParseField MAX_TREES = new ParseField("max_trees");
+ public static final ParseField NUM_FOLDS = new ParseField("num_folds");
+ public static final ParseField NUM_SPLITS_PER_FEATURE = new ParseField("num_splits_per_feature");
+ public static final ParseField REGULARIZATION_DEPTH_PENALTY_MULTIPLIER = new ParseField("regularization_depth_penalty_multiplier");
+ public static final ParseField REGULARIZATION_LEAF_WEIGHT_PENALTY_MULTIPLIER
+ = new ParseField("regularization_leaf_weight_penalty_multiplier");
+ public static final ParseField REGULARIZATION_SOFT_TREE_DEPTH_LIMIT = new ParseField("regularization_soft_tree_depth_limit");
+ public static final ParseField REGULARIZATION_SOFT_TREE_DEPTH_TOLERANCE = new ParseField("regularization_soft_tree_depth_tolerance");
+ public static final ParseField REGULARIZATION_TREE_SIZE_PENALTY_MULTIPLIER =
+ new ParseField("regularization_tree_size_penalty_multiplier");
+
+ public static ConstructingObjectParser PARSER = new ConstructingObjectParser<>("classification_hyperparameters",
+ true,
+ a -> new Hyperparameters(
+ (String) a[0],
+ (Double) a[1],
+ (Double) a[2],
+ (Double) a[3],
+ (Double) a[4],
+ (Integer) a[5],
+ (Integer) a[6],
+ (Integer) a[7],
+ (Integer) a[8],
+ (Integer) a[9],
+ (Double) a[10],
+ (Double) a[11],
+ (Double) a[12],
+ (Double) a[13],
+ (Double) a[14]
+ ));
+
+ static {
+ PARSER.declareString(optionalConstructorArg(), CLASS_ASSIGNMENT_OBJECTIVE);
+ PARSER.declareDouble(optionalConstructorArg(), DOWNSAMPLE_FACTOR);
+ PARSER.declareDouble(optionalConstructorArg(), ETA);
+ PARSER.declareDouble(optionalConstructorArg(), ETA_GROWTH_RATE_PER_TREE);
+ PARSER.declareDouble(optionalConstructorArg(), FEATURE_BAG_FRACTION);
+ PARSER.declareInt(optionalConstructorArg(), MAX_ATTEMPTS_TO_ADD_TREE);
+ PARSER.declareInt(optionalConstructorArg(), MAX_OPTIMIZATION_ROUNDS_PER_HYPERPARAMETER);
+ PARSER.declareInt(optionalConstructorArg(), MAX_TREES);
+ PARSER.declareInt(optionalConstructorArg(), NUM_FOLDS);
+ PARSER.declareInt(optionalConstructorArg(), NUM_SPLITS_PER_FEATURE);
+ PARSER.declareDouble(optionalConstructorArg(), REGULARIZATION_DEPTH_PENALTY_MULTIPLIER);
+ PARSER.declareDouble(optionalConstructorArg(), REGULARIZATION_LEAF_WEIGHT_PENALTY_MULTIPLIER);
+ PARSER.declareDouble(optionalConstructorArg(), REGULARIZATION_SOFT_TREE_DEPTH_LIMIT);
+ PARSER.declareDouble(optionalConstructorArg(), REGULARIZATION_SOFT_TREE_DEPTH_TOLERANCE);
+ PARSER.declareDouble(optionalConstructorArg(), REGULARIZATION_TREE_SIZE_PENALTY_MULTIPLIER);
+ }
+
+ private final String classAssignmentObjective;
+ private final Double downsampleFactor;
+ private final Double eta;
+ private final Double etaGrowthRatePerTree;
+ private final Double featureBagFraction;
+ private final Integer maxAttemptsToAddTree;
+ private final Integer maxOptimizationRoundsPerHyperparameter;
+ private final Integer maxTrees;
+ private final Integer numFolds;
+ private final Integer numSplitsPerFeature;
+ private final Double regularizationDepthPenaltyMultiplier;
+ private final Double regularizationLeafWeightPenaltyMultiplier;
+ private final Double regularizationSoftTreeDepthLimit;
+ private final Double regularizationSoftTreeDepthTolerance;
+ private final Double regularizationTreeSizePenaltyMultiplier;
+
+ public Hyperparameters(String classAssignmentObjective,
+ Double downsampleFactor,
+ Double eta,
+ Double etaGrowthRatePerTree,
+ Double featureBagFraction,
+ Integer maxAttemptsToAddTree,
+ Integer maxOptimizationRoundsPerHyperparameter,
+ Integer maxTrees,
+ Integer numFolds,
+ Integer numSplitsPerFeature,
+ Double regularizationDepthPenaltyMultiplier,
+ Double regularizationLeafWeightPenaltyMultiplier,
+ Double regularizationSoftTreeDepthLimit,
+ Double regularizationSoftTreeDepthTolerance,
+ Double regularizationTreeSizePenaltyMultiplier) {
+ this.classAssignmentObjective = classAssignmentObjective;
+ this.downsampleFactor = downsampleFactor;
+ this.eta = eta;
+ this.etaGrowthRatePerTree = etaGrowthRatePerTree;
+ this.featureBagFraction = featureBagFraction;
+ this.maxAttemptsToAddTree = maxAttemptsToAddTree;
+ this.maxOptimizationRoundsPerHyperparameter = maxOptimizationRoundsPerHyperparameter;
+ this.maxTrees = maxTrees;
+ this.numFolds = numFolds;
+ this.numSplitsPerFeature = numSplitsPerFeature;
+ this.regularizationDepthPenaltyMultiplier = regularizationDepthPenaltyMultiplier;
+ this.regularizationLeafWeightPenaltyMultiplier = regularizationLeafWeightPenaltyMultiplier;
+ this.regularizationSoftTreeDepthLimit = regularizationSoftTreeDepthLimit;
+ this.regularizationSoftTreeDepthTolerance = regularizationSoftTreeDepthTolerance;
+ this.regularizationTreeSizePenaltyMultiplier = regularizationTreeSizePenaltyMultiplier;
+ }
+
+ public String getClassAssignmentObjective() {
+ return classAssignmentObjective;
+ }
+
+ public Double getDownsampleFactor() {
+ return downsampleFactor;
+ }
+
+ public Double getEta() {
+ return eta;
+ }
+
+ public Double getEtaGrowthRatePerTree() {
+ return etaGrowthRatePerTree;
+ }
+
+ public Double getFeatureBagFraction() {
+ return featureBagFraction;
+ }
+
+ public Integer getMaxAttemptsToAddTree() {
+ return maxAttemptsToAddTree;
+ }
+
+ public Integer getMaxOptimizationRoundsPerHyperparameter() {
+ return maxOptimizationRoundsPerHyperparameter;
+ }
+
+ public Integer getMaxTrees() {
+ return maxTrees;
+ }
+
+ public Integer getNumFolds() {
+ return numFolds;
+ }
+
+ public Integer getNumSplitsPerFeature() {
+ return numSplitsPerFeature;
+ }
+
+ public Double getRegularizationDepthPenaltyMultiplier() {
+ return regularizationDepthPenaltyMultiplier;
+ }
+
+ public Double getRegularizationLeafWeightPenaltyMultiplier() {
+ return regularizationLeafWeightPenaltyMultiplier;
+ }
+
+ public Double getRegularizationSoftTreeDepthLimit() {
+ return regularizationSoftTreeDepthLimit;
+ }
+
+ public Double getRegularizationSoftTreeDepthTolerance() {
+ return regularizationSoftTreeDepthTolerance;
+ }
+
+ public Double getRegularizationTreeSizePenaltyMultiplier() {
+ return regularizationTreeSizePenaltyMultiplier;
+ }
+
+ @Override
+ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
+ builder.startObject();
+ if (classAssignmentObjective != null) {
+ builder.field(CLASS_ASSIGNMENT_OBJECTIVE.getPreferredName(), classAssignmentObjective);
+ }
+ if (downsampleFactor != null) {
+ builder.field(DOWNSAMPLE_FACTOR.getPreferredName(), downsampleFactor);
+ }
+ if (eta != null) {
+ builder.field(ETA.getPreferredName(), eta);
+ }
+ if (etaGrowthRatePerTree != null) {
+ builder.field(ETA_GROWTH_RATE_PER_TREE.getPreferredName(), etaGrowthRatePerTree);
+ }
+ if (featureBagFraction != null) {
+ builder.field(FEATURE_BAG_FRACTION.getPreferredName(), featureBagFraction);
+ }
+ if (maxAttemptsToAddTree != null) {
+ builder.field(MAX_ATTEMPTS_TO_ADD_TREE.getPreferredName(), maxAttemptsToAddTree);
+ }
+ if (maxOptimizationRoundsPerHyperparameter != null) {
+ builder.field(MAX_OPTIMIZATION_ROUNDS_PER_HYPERPARAMETER.getPreferredName(), maxOptimizationRoundsPerHyperparameter);
+ }
+ if (maxTrees != null) {
+ builder.field(MAX_TREES.getPreferredName(), maxTrees);
+ }
+ if (numFolds != null) {
+ builder.field(NUM_FOLDS.getPreferredName(), numFolds);
+ }
+ if (numSplitsPerFeature != null) {
+ builder.field(NUM_SPLITS_PER_FEATURE.getPreferredName(), numSplitsPerFeature);
+ }
+ if (regularizationDepthPenaltyMultiplier != null) {
+ builder.field(REGULARIZATION_DEPTH_PENALTY_MULTIPLIER.getPreferredName(), regularizationDepthPenaltyMultiplier);
+ }
+ if (regularizationLeafWeightPenaltyMultiplier != null) {
+ builder.field(REGULARIZATION_LEAF_WEIGHT_PENALTY_MULTIPLIER.getPreferredName(), regularizationLeafWeightPenaltyMultiplier);
+ }
+ if (regularizationSoftTreeDepthLimit != null) {
+ builder.field(REGULARIZATION_SOFT_TREE_DEPTH_LIMIT.getPreferredName(), regularizationSoftTreeDepthLimit);
+ }
+ if (regularizationSoftTreeDepthTolerance != null) {
+ builder.field(REGULARIZATION_SOFT_TREE_DEPTH_TOLERANCE.getPreferredName(), regularizationSoftTreeDepthTolerance);
+ }
+ if (regularizationTreeSizePenaltyMultiplier != null) {
+ builder.field(REGULARIZATION_TREE_SIZE_PENALTY_MULTIPLIER.getPreferredName(), regularizationTreeSizePenaltyMultiplier);
+ }
+ builder.endObject();
+ return builder;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+
+ Hyperparameters that = (Hyperparameters) o;
+ return Objects.equals(classAssignmentObjective, that.classAssignmentObjective)
+ && Objects.equals(downsampleFactor, that.downsampleFactor)
+ && Objects.equals(eta, that.eta)
+ && Objects.equals(etaGrowthRatePerTree, that.etaGrowthRatePerTree)
+ && Objects.equals(featureBagFraction, that.featureBagFraction)
+ && Objects.equals(maxAttemptsToAddTree, that.maxAttemptsToAddTree)
+ && Objects.equals(maxOptimizationRoundsPerHyperparameter, that.maxOptimizationRoundsPerHyperparameter)
+ && Objects.equals(maxTrees, that.maxTrees)
+ && Objects.equals(numFolds, that.numFolds)
+ && Objects.equals(numSplitsPerFeature, that.numSplitsPerFeature)
+ && Objects.equals(regularizationDepthPenaltyMultiplier, that.regularizationDepthPenaltyMultiplier)
+ && Objects.equals(regularizationLeafWeightPenaltyMultiplier, that.regularizationLeafWeightPenaltyMultiplier)
+ && Objects.equals(regularizationSoftTreeDepthLimit, that.regularizationSoftTreeDepthLimit)
+ && Objects.equals(regularizationSoftTreeDepthTolerance, that.regularizationSoftTreeDepthTolerance)
+ && Objects.equals(regularizationTreeSizePenaltyMultiplier, that.regularizationTreeSizePenaltyMultiplier);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(
+ classAssignmentObjective,
+ downsampleFactor,
+ eta,
+ etaGrowthRatePerTree,
+ featureBagFraction,
+ maxAttemptsToAddTree,
+ maxOptimizationRoundsPerHyperparameter,
+ maxTrees,
+ numFolds,
+ numSplitsPerFeature,
+ regularizationDepthPenaltyMultiplier,
+ regularizationLeafWeightPenaltyMultiplier,
+ regularizationSoftTreeDepthLimit,
+ regularizationSoftTreeDepthTolerance,
+ regularizationTreeSizePenaltyMultiplier
+ );
+ }
+}
diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/stats/classification/TimingStats.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/stats/classification/TimingStats.java
new file mode 100644
index 0000000000000..bad599298a780
--- /dev/null
+++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/stats/classification/TimingStats.java
@@ -0,0 +1,87 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.elasticsearch.client.ml.dataframe.stats.classification;
+
+import org.elasticsearch.common.ParseField;
+import org.elasticsearch.common.unit.TimeValue;
+import org.elasticsearch.common.xcontent.ConstructingObjectParser;
+import org.elasticsearch.common.xcontent.ToXContentObject;
+import org.elasticsearch.common.xcontent.XContentBuilder;
+
+import java.io.IOException;
+import java.util.Objects;
+
+public class TimingStats implements ToXContentObject {
+
+ public static final ParseField ELAPSED_TIME = new ParseField("elapsed_time");
+ public static final ParseField ITERATION_TIME = new ParseField("iteration_time");
+
+ public static ConstructingObjectParser PARSER = new ConstructingObjectParser<>("classification_timing_stats", true,
+ a -> new TimingStats(
+ a[0] == null ? null : TimeValue.timeValueMillis((long) a[0]),
+ a[1] == null ? null : TimeValue.timeValueMillis((long) a[1])
+ ));
+
+ static {
+ PARSER.declareLong(ConstructingObjectParser.optionalConstructorArg(), ELAPSED_TIME);
+ PARSER.declareLong(ConstructingObjectParser.optionalConstructorArg(), ITERATION_TIME);
+ }
+
+ private final TimeValue elapsedTime;
+ private final TimeValue iterationTime;
+
+ public TimingStats(TimeValue elapsedTime, TimeValue iterationTime) {
+ this.elapsedTime = elapsedTime;
+ this.iterationTime = iterationTime;
+ }
+
+ public TimeValue getElapsedTime() {
+ return elapsedTime;
+ }
+
+ public TimeValue getIterationTime() {
+ return iterationTime;
+ }
+
+ @Override
+ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
+ builder.startObject();
+ if (elapsedTime != null) {
+ builder.humanReadableField(ELAPSED_TIME.getPreferredName(), ELAPSED_TIME.getPreferredName() + "_string", elapsedTime);
+ }
+ if (iterationTime != null) {
+ builder.humanReadableField(ITERATION_TIME.getPreferredName(), ITERATION_TIME.getPreferredName() + "_string", iterationTime);
+ }
+ builder.endObject();
+ return builder;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ TimingStats that = (TimingStats) o;
+ return Objects.equals(elapsedTime, that.elapsedTime) && Objects.equals(iterationTime, that.iterationTime);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(elapsedTime, iterationTime);
+ }
+}
diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/stats/classification/ValidationLoss.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/stats/classification/ValidationLoss.java
new file mode 100644
index 0000000000000..a552f5d85e124
--- /dev/null
+++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/stats/classification/ValidationLoss.java
@@ -0,0 +1,87 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.elasticsearch.client.ml.dataframe.stats.classification;
+
+import org.elasticsearch.client.ml.dataframe.stats.common.FoldValues;
+import org.elasticsearch.common.ParseField;
+import org.elasticsearch.common.xcontent.ConstructingObjectParser;
+import org.elasticsearch.common.xcontent.ToXContentObject;
+import org.elasticsearch.common.xcontent.XContentBuilder;
+
+import java.io.IOException;
+import java.util.List;
+import java.util.Objects;
+
+public class ValidationLoss implements ToXContentObject {
+
+ public static final ParseField LOSS_TYPE = new ParseField("loss_type");
+ public static final ParseField FOLD_VALUES = new ParseField("fold_values");
+
+ @SuppressWarnings("unchecked")
+ public static ConstructingObjectParser PARSER = new ConstructingObjectParser<>("classification_validation_loss",
+ true,
+ a -> new ValidationLoss((String) a[0], (List) a[1]));
+
+ static {
+ PARSER.declareString(ConstructingObjectParser.optionalConstructorArg(), LOSS_TYPE);
+ PARSER.declareObjectArray(ConstructingObjectParser.optionalConstructorArg(), FoldValues.PARSER, FOLD_VALUES);
+ }
+
+ private final String lossType;
+ private final List foldValues;
+
+ public ValidationLoss(String lossType, List values) {
+ this.lossType = lossType;
+ this.foldValues = values;
+ }
+
+ public String getLossType() {
+ return lossType;
+ }
+
+ public List getFoldValues() {
+ return foldValues;
+ }
+
+ @Override
+ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
+ builder.startObject();
+ if (lossType != null) {
+ builder.field(LOSS_TYPE.getPreferredName(), lossType);
+ }
+ if (foldValues != null) {
+ builder.field(FOLD_VALUES.getPreferredName(), foldValues);
+ }
+ builder.endObject();
+ return builder;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ ValidationLoss that = (ValidationLoss) o;
+ return Objects.equals(lossType, that.lossType) && Objects.equals(foldValues, that.foldValues);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(lossType, foldValues);
+ }
+}
diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/stats/common/DataCounts.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/stats/common/DataCounts.java
new file mode 100644
index 0000000000000..b7a90b1f0b5c6
--- /dev/null
+++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/stats/common/DataCounts.java
@@ -0,0 +1,119 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.client.ml.dataframe.stats.common;
+
+import org.elasticsearch.common.Nullable;
+import org.elasticsearch.common.ParseField;
+import org.elasticsearch.common.inject.internal.ToStringBuilder;
+import org.elasticsearch.common.xcontent.ConstructingObjectParser;
+import org.elasticsearch.common.xcontent.ToXContentObject;
+import org.elasticsearch.common.xcontent.XContentBuilder;
+
+import java.io.IOException;
+import java.util.Objects;
+
+import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optionalConstructorArg;
+
+public class DataCounts implements ToXContentObject {
+
+ public static final String TYPE_VALUE = "analytics_data_counts";
+
+ public static final ParseField TRAINING_DOCS_COUNT = new ParseField("training_docs_count");
+ public static final ParseField TEST_DOCS_COUNT = new ParseField("test_docs_count");
+ public static final ParseField SKIPPED_DOCS_COUNT = new ParseField("skipped_docs_count");
+
+ public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>(TYPE_VALUE, true,
+ a -> {
+ Long trainingDocsCount = (Long) a[0];
+ Long testDocsCount = (Long) a[1];
+ Long skippedDocsCount = (Long) a[2];
+ return new DataCounts(
+ getOrDefault(trainingDocsCount, 0L),
+ getOrDefault(testDocsCount, 0L),
+ getOrDefault(skippedDocsCount, 0L)
+ );
+ });
+
+ static {
+ PARSER.declareLong(optionalConstructorArg(), TRAINING_DOCS_COUNT);
+ PARSER.declareLong(optionalConstructorArg(), TEST_DOCS_COUNT);
+ PARSER.declareLong(optionalConstructorArg(), SKIPPED_DOCS_COUNT);
+ }
+
+ private final long trainingDocsCount;
+ private final long testDocsCount;
+ private final long skippedDocsCount;
+
+ public DataCounts(long trainingDocsCount, long testDocsCount, long skippedDocsCount) {
+ this.trainingDocsCount = trainingDocsCount;
+ this.testDocsCount = testDocsCount;
+ this.skippedDocsCount = skippedDocsCount;
+ }
+
+ @Override
+ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
+ builder.startObject();
+ builder.field(TRAINING_DOCS_COUNT.getPreferredName(), trainingDocsCount);
+ builder.field(TEST_DOCS_COUNT.getPreferredName(), testDocsCount);
+ builder.field(SKIPPED_DOCS_COUNT.getPreferredName(), skippedDocsCount);
+ builder.endObject();
+ return builder;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ DataCounts that = (DataCounts) o;
+ return trainingDocsCount == that.trainingDocsCount
+ && testDocsCount == that.testDocsCount
+ && skippedDocsCount == that.skippedDocsCount;
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(trainingDocsCount, testDocsCount, skippedDocsCount);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringBuilder(getClass())
+ .add(TRAINING_DOCS_COUNT.getPreferredName(), trainingDocsCount)
+ .add(TEST_DOCS_COUNT.getPreferredName(), testDocsCount)
+ .add(SKIPPED_DOCS_COUNT.getPreferredName(), skippedDocsCount)
+ .toString();
+ }
+
+ public long getTrainingDocsCount() {
+ return trainingDocsCount;
+ }
+
+ public long getTestDocsCount() {
+ return testDocsCount;
+ }
+
+ public long getSkippedDocsCount() {
+ return skippedDocsCount;
+ }
+
+ private static T getOrDefault(@Nullable T value, T defaultValue) {
+ return value != null ? value : defaultValue;
+ }
+}
diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/stats/common/FoldValues.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/stats/common/FoldValues.java
new file mode 100644
index 0000000000000..30490981d9651
--- /dev/null
+++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/stats/common/FoldValues.java
@@ -0,0 +1,87 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.elasticsearch.client.ml.dataframe.stats.common;
+
+import org.elasticsearch.common.ParseField;
+import org.elasticsearch.common.xcontent.ConstructingObjectParser;
+import org.elasticsearch.common.xcontent.ToXContentObject;
+import org.elasticsearch.common.xcontent.XContentBuilder;
+
+import java.io.IOException;
+import java.util.Arrays;
+import java.util.List;
+import java.util.Objects;
+
+public class FoldValues implements ToXContentObject {
+
+ public static final ParseField FOLD = new ParseField("fold");
+ public static final ParseField VALUES = new ParseField("values");
+
+ @SuppressWarnings("unchecked")
+ public static ConstructingObjectParser PARSER = new ConstructingObjectParser<>("fold_values", true,
+ a -> new FoldValues((int) a[0], (List) a[1]));
+
+ static {
+ PARSER.declareInt(ConstructingObjectParser.constructorArg(), FOLD);
+ PARSER.declareDoubleArray(ConstructingObjectParser.constructorArg(), VALUES);
+ }
+
+ private final int fold;
+ private final double[] values;
+
+ private FoldValues(int fold, List values) {
+ this(fold, values.stream().mapToDouble(Double::doubleValue).toArray());
+ }
+
+ public FoldValues(int fold, double[] values) {
+ this.fold = fold;
+ this.values = values;
+ }
+
+ public int getFold() {
+ return fold;
+ }
+
+ public double[] getValues() {
+ return values;
+ }
+
+ @Override
+ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
+ builder.startObject();
+ builder.field(FOLD.getPreferredName(), fold);
+ builder.array(VALUES.getPreferredName(), values);
+ builder.endObject();
+ return builder;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (o == this) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+
+ FoldValues other = (FoldValues) o;
+ return fold == other.fold && Arrays.equals(values, other.values);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(fold, Arrays.hashCode(values));
+ }
+}
diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/MemoryUsage.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/stats/common/MemoryUsage.java
similarity index 94%
rename from client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/MemoryUsage.java
rename to client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/stats/common/MemoryUsage.java
index 323ebb52a7aed..f492d26528e02 100644
--- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/MemoryUsage.java
+++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/stats/common/MemoryUsage.java
@@ -16,7 +16,7 @@
* specific language governing permissions and limitations
* under the License.
*/
-package org.elasticsearch.client.ml.dataframe;
+package org.elasticsearch.client.ml.dataframe.stats.common;
import org.elasticsearch.client.common.TimeUtil;
import org.elasticsearch.common.ParseField;
@@ -54,6 +54,14 @@ public MemoryUsage(Instant timestamp, long peakUsageBytes) {
this.peakUsageBytes = peakUsageBytes;
}
+ public Instant getTimestamp() {
+ return timestamp;
+ }
+
+ public long getPeakUsageBytes() {
+ return peakUsageBytes;
+ }
+
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject();
diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/stats/outlierdetection/OutlierDetectionStats.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/stats/outlierdetection/OutlierDetectionStats.java
new file mode 100644
index 0000000000000..e3236dad0cd26
--- /dev/null
+++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/stats/outlierdetection/OutlierDetectionStats.java
@@ -0,0 +1,105 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.elasticsearch.client.ml.dataframe.stats.outlierdetection;
+
+import org.elasticsearch.client.common.TimeUtil;
+import org.elasticsearch.client.ml.dataframe.stats.AnalysisStats;
+import org.elasticsearch.common.ParseField;
+import org.elasticsearch.common.xcontent.ConstructingObjectParser;
+import org.elasticsearch.common.xcontent.ObjectParser;
+import org.elasticsearch.common.xcontent.ToXContent;
+import org.elasticsearch.common.xcontent.XContentBuilder;
+
+import java.io.IOException;
+import java.time.Instant;
+import java.util.Objects;
+
+public class OutlierDetectionStats implements AnalysisStats {
+
+ public static final ParseField NAME = new ParseField("outlier_detection_stats");
+
+ public static final ParseField TIMESTAMP = new ParseField("timestamp");
+ public static final ParseField PARAMETERS = new ParseField("parameters");
+ public static final ParseField TIMING_STATS = new ParseField("timing_stats");
+
+ public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>(
+ NAME.getPreferredName(), true,
+ a -> new OutlierDetectionStats((Instant) a[0], (Parameters) a[1], (TimingStats) a[2]));
+
+ static {
+ PARSER.declareField(ConstructingObjectParser.constructorArg(),
+ p -> TimeUtil.parseTimeFieldToInstant(p, TIMESTAMP.getPreferredName()),
+ TIMESTAMP,
+ ObjectParser.ValueType.VALUE);
+ PARSER.declareObject(ConstructingObjectParser.constructorArg(), Parameters.PARSER, PARAMETERS);
+ PARSER.declareObject(ConstructingObjectParser.constructorArg(), TimingStats.PARSER, TIMING_STATS);
+ }
+
+ private final Instant timestamp;
+ private final Parameters parameters;
+ private final TimingStats timingStats;
+
+ public OutlierDetectionStats(Instant timestamp, Parameters parameters, TimingStats timingStats) {
+ this.timestamp = Instant.ofEpochMilli(Objects.requireNonNull(timestamp).toEpochMilli());
+ this.parameters = Objects.requireNonNull(parameters);
+ this.timingStats = Objects.requireNonNull(timingStats);
+ }
+
+ public Instant getTimestamp() {
+ return timestamp;
+ }
+
+ public Parameters getParameters() {
+ return parameters;
+ }
+
+ public TimingStats getTimingStats() {
+ return timingStats;
+ }
+
+ @Override
+ public XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params params) throws IOException {
+ builder.startObject();
+ builder.timeField(TIMESTAMP.getPreferredName(), TIMESTAMP.getPreferredName() + "_string", timestamp.toEpochMilli());
+ builder.field(PARAMETERS.getPreferredName(), parameters);
+ builder.field(TIMING_STATS.getPreferredName(), timingStats);
+ builder.endObject();
+ return builder;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ OutlierDetectionStats that = (OutlierDetectionStats) o;
+ return Objects.equals(timestamp, that.timestamp)
+ && Objects.equals(parameters, that.parameters)
+ && Objects.equals(timingStats, that.timingStats);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(timestamp, parameters, timingStats);
+ }
+
+ @Override
+ public String getName() {
+ return NAME.getPreferredName();
+ }
+}
diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/stats/outlierdetection/Parameters.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/stats/outlierdetection/Parameters.java
new file mode 100644
index 0000000000000..deafb55081de0
--- /dev/null
+++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/stats/outlierdetection/Parameters.java
@@ -0,0 +1,146 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.elasticsearch.client.ml.dataframe.stats.outlierdetection;
+
+import org.elasticsearch.common.ParseField;
+import org.elasticsearch.common.xcontent.ConstructingObjectParser;
+import org.elasticsearch.common.xcontent.ToXContentObject;
+import org.elasticsearch.common.xcontent.XContentBuilder;
+
+import java.io.IOException;
+import java.util.Objects;
+
+import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optionalConstructorArg;
+
+public class Parameters implements ToXContentObject {
+
+ public static final ParseField N_NEIGHBORS = new ParseField("n_neighbors");
+ public static final ParseField METHOD = new ParseField("method");
+ public static final ParseField FEATURE_INFLUENCE_THRESHOLD = new ParseField("feature_influence_threshold");
+ public static final ParseField COMPUTE_FEATURE_INFLUENCE = new ParseField("compute_feature_influence");
+ public static final ParseField OUTLIER_FRACTION = new ParseField("outlier_fraction");
+ public static final ParseField STANDARDIZATION_ENABLED = new ParseField("standardization_enabled");
+
+ @SuppressWarnings("unchecked")
+ public static ConstructingObjectParser PARSER = new ConstructingObjectParser<>("outlier_detection_parameters",
+ true,
+ a -> new Parameters(
+ (Integer) a[0],
+ (String) a[1],
+ (Boolean) a[2],
+ (Double) a[3],
+ (Double) a[4],
+ (Boolean) a[5]
+ ));
+
+ static {
+ PARSER.declareInt(optionalConstructorArg(), N_NEIGHBORS);
+ PARSER.declareString(optionalConstructorArg(), METHOD);
+ PARSER.declareBoolean(optionalConstructorArg(), COMPUTE_FEATURE_INFLUENCE);
+ PARSER.declareDouble(optionalConstructorArg(), FEATURE_INFLUENCE_THRESHOLD);
+ PARSER.declareDouble(optionalConstructorArg(), OUTLIER_FRACTION);
+ PARSER.declareBoolean(optionalConstructorArg(), STANDARDIZATION_ENABLED);
+ }
+
+ private final Integer nNeighbors;
+ private final String method;
+ private final Boolean computeFeatureInfluence;
+ private final Double featureInfluenceThreshold;
+ private final Double outlierFraction;
+ private final Boolean standardizationEnabled;
+
+ public Parameters(Integer nNeighbors, String method, Boolean computeFeatureInfluence, Double featureInfluenceThreshold,
+ Double outlierFraction, Boolean standardizationEnabled) {
+ this.nNeighbors = nNeighbors;
+ this.method = method;
+ this.computeFeatureInfluence = computeFeatureInfluence;
+ this.featureInfluenceThreshold = featureInfluenceThreshold;
+ this.outlierFraction = outlierFraction;
+ this.standardizationEnabled = standardizationEnabled;
+ }
+
+ public Integer getnNeighbors() {
+ return nNeighbors;
+ }
+
+ public String getMethod() {
+ return method;
+ }
+
+ public Boolean getComputeFeatureInfluence() {
+ return computeFeatureInfluence;
+ }
+
+ public Double getFeatureInfluenceThreshold() {
+ return featureInfluenceThreshold;
+ }
+
+ public Double getOutlierFraction() {
+ return outlierFraction;
+ }
+
+ public Boolean getStandardizationEnabled() {
+ return standardizationEnabled;
+ }
+
+ @Override
+ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
+ builder.startObject();
+ if (nNeighbors != null) {
+ builder.field(N_NEIGHBORS.getPreferredName(), nNeighbors);
+ }
+ if (method != null) {
+ builder.field(METHOD.getPreferredName(), method);
+ }
+ if (computeFeatureInfluence != null) {
+ builder.field(COMPUTE_FEATURE_INFLUENCE.getPreferredName(), computeFeatureInfluence);
+ }
+ if (featureInfluenceThreshold != null) {
+ builder.field(FEATURE_INFLUENCE_THRESHOLD.getPreferredName(), featureInfluenceThreshold);
+ }
+ if (outlierFraction != null) {
+ builder.field(OUTLIER_FRACTION.getPreferredName(), outlierFraction);
+ }
+ if (standardizationEnabled != null) {
+ builder.field(STANDARDIZATION_ENABLED.getPreferredName(), standardizationEnabled);
+ }
+ builder.endObject();
+ return builder;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+
+ Parameters that = (Parameters) o;
+ return Objects.equals(nNeighbors, that.nNeighbors)
+ && Objects.equals(method, that.method)
+ && Objects.equals(computeFeatureInfluence, that.computeFeatureInfluence)
+ && Objects.equals(featureInfluenceThreshold, that.featureInfluenceThreshold)
+ && Objects.equals(outlierFraction, that.outlierFraction)
+ && Objects.equals(standardizationEnabled, that.standardizationEnabled);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(nNeighbors, method, computeFeatureInfluence, featureInfluenceThreshold, outlierFraction,
+ standardizationEnabled);
+ }
+}
diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/stats/outlierdetection/TimingStats.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/stats/outlierdetection/TimingStats.java
new file mode 100644
index 0000000000000..96f93a6651de7
--- /dev/null
+++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/stats/outlierdetection/TimingStats.java
@@ -0,0 +1,74 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.elasticsearch.client.ml.dataframe.stats.outlierdetection;
+
+import org.elasticsearch.common.ParseField;
+import org.elasticsearch.common.unit.TimeValue;
+import org.elasticsearch.common.xcontent.ConstructingObjectParser;
+import org.elasticsearch.common.xcontent.ToXContentObject;
+import org.elasticsearch.common.xcontent.XContentBuilder;
+
+import java.io.IOException;
+import java.util.Objects;
+
+public class TimingStats implements ToXContentObject {
+
+ public static final ParseField ELAPSED_TIME = new ParseField("elapsed_time");
+
+ public static ConstructingObjectParser PARSER = new ConstructingObjectParser<>("outlier_detection_timing_stats",
+ true,
+ a -> new TimingStats(a[0] == null ? null : TimeValue.timeValueMillis((long) a[0])));
+
+ static {
+ PARSER.declareLong(ConstructingObjectParser.optionalConstructorArg(), ELAPSED_TIME);
+ }
+
+ private final TimeValue elapsedTime;
+
+ public TimingStats(TimeValue elapsedTime) {
+ this.elapsedTime = elapsedTime;
+ }
+
+ public TimeValue getElapsedTime() {
+ return elapsedTime;
+ }
+
+ @Override
+ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
+ builder.startObject();
+ if (elapsedTime != null) {
+ builder.humanReadableField(ELAPSED_TIME.getPreferredName(), ELAPSED_TIME.getPreferredName() + "_string", elapsedTime);
+ }
+ builder.endObject();
+ return builder;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ TimingStats that = (TimingStats) o;
+ return Objects.equals(elapsedTime, that.elapsedTime);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(elapsedTime);
+ }
+}
diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/stats/regression/Hyperparameters.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/stats/regression/Hyperparameters.java
new file mode 100644
index 0000000000000..cb1a0b99ab58b
--- /dev/null
+++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/stats/regression/Hyperparameters.java
@@ -0,0 +1,278 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.elasticsearch.client.ml.dataframe.stats.regression;
+
+import org.elasticsearch.common.ParseField;
+import org.elasticsearch.common.xcontent.ConstructingObjectParser;
+import org.elasticsearch.common.xcontent.ToXContentObject;
+import org.elasticsearch.common.xcontent.XContentBuilder;
+
+import java.io.IOException;
+import java.util.Objects;
+
+import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optionalConstructorArg;
+
+public class Hyperparameters implements ToXContentObject {
+
+ public static final ParseField DOWNSAMPLE_FACTOR = new ParseField("downsample_factor");
+ public static final ParseField ETA = new ParseField("eta");
+ public static final ParseField ETA_GROWTH_RATE_PER_TREE = new ParseField("eta_growth_rate_per_tree");
+ public static final ParseField FEATURE_BAG_FRACTION = new ParseField("feature_bag_fraction");
+ public static final ParseField MAX_ATTEMPTS_TO_ADD_TREE = new ParseField("max_attempts_to_add_tree");
+ public static final ParseField MAX_OPTIMIZATION_ROUNDS_PER_HYPERPARAMETER = new ParseField(
+ "max_optimization_rounds_per_hyperparameter");
+ public static final ParseField MAX_TREES = new ParseField("max_trees");
+ public static final ParseField NUM_FOLDS = new ParseField("num_folds");
+ public static final ParseField NUM_SPLITS_PER_FEATURE = new ParseField("num_splits_per_feature");
+ public static final ParseField REGULARIZATION_DEPTH_PENALTY_MULTIPLIER = new ParseField("regularization_depth_penalty_multiplier");
+ public static final ParseField REGULARIZATION_LEAF_WEIGHT_PENALTY_MULTIPLIER
+ = new ParseField("regularization_leaf_weight_penalty_multiplier");
+ public static final ParseField REGULARIZATION_SOFT_TREE_DEPTH_LIMIT = new ParseField("regularization_soft_tree_depth_limit");
+ public static final ParseField REGULARIZATION_SOFT_TREE_DEPTH_TOLERANCE = new ParseField("regularization_soft_tree_depth_tolerance");
+ public static final ParseField REGULARIZATION_TREE_SIZE_PENALTY_MULTIPLIER =
+ new ParseField("regularization_tree_size_penalty_multiplier");
+
+ public static ConstructingObjectParser PARSER = new ConstructingObjectParser<>("regression_hyperparameters",
+ true,
+ a -> new Hyperparameters(
+ (Double) a[0],
+ (Double) a[1],
+ (Double) a[2],
+ (Double) a[3],
+ (Integer) a[4],
+ (Integer) a[5],
+ (Integer) a[6],
+ (Integer) a[7],
+ (Integer) a[8],
+ (Double) a[9],
+ (Double) a[10],
+ (Double) a[11],
+ (Double) a[12],
+ (Double) a[13]
+ ));
+
+ static {
+ PARSER.declareDouble(optionalConstructorArg(), DOWNSAMPLE_FACTOR);
+ PARSER.declareDouble(optionalConstructorArg(), ETA);
+ PARSER.declareDouble(optionalConstructorArg(), ETA_GROWTH_RATE_PER_TREE);
+ PARSER.declareDouble(optionalConstructorArg(), FEATURE_BAG_FRACTION);
+ PARSER.declareInt(optionalConstructorArg(), MAX_ATTEMPTS_TO_ADD_TREE);
+ PARSER.declareInt(optionalConstructorArg(), MAX_OPTIMIZATION_ROUNDS_PER_HYPERPARAMETER);
+ PARSER.declareInt(optionalConstructorArg(), MAX_TREES);
+ PARSER.declareInt(optionalConstructorArg(), NUM_FOLDS);
+ PARSER.declareInt(optionalConstructorArg(), NUM_SPLITS_PER_FEATURE);
+ PARSER.declareDouble(optionalConstructorArg(), REGULARIZATION_DEPTH_PENALTY_MULTIPLIER);
+ PARSER.declareDouble(optionalConstructorArg(), REGULARIZATION_LEAF_WEIGHT_PENALTY_MULTIPLIER);
+ PARSER.declareDouble(optionalConstructorArg(), REGULARIZATION_SOFT_TREE_DEPTH_LIMIT);
+ PARSER.declareDouble(optionalConstructorArg(), REGULARIZATION_SOFT_TREE_DEPTH_TOLERANCE);
+ PARSER.declareDouble(optionalConstructorArg(), REGULARIZATION_TREE_SIZE_PENALTY_MULTIPLIER);
+ }
+
+ private final Double downsampleFactor;
+ private final Double eta;
+ private final Double etaGrowthRatePerTree;
+ private final Double featureBagFraction;
+ private final Integer maxAttemptsToAddTree;
+ private final Integer maxOptimizationRoundsPerHyperparameter;
+ private final Integer maxTrees;
+ private final Integer numFolds;
+ private final Integer numSplitsPerFeature;
+ private final Double regularizationDepthPenaltyMultiplier;
+ private final Double regularizationLeafWeightPenaltyMultiplier;
+ private final Double regularizationSoftTreeDepthLimit;
+ private final Double regularizationSoftTreeDepthTolerance;
+ private final Double regularizationTreeSizePenaltyMultiplier;
+
+ public Hyperparameters(Double downsampleFactor,
+ Double eta,
+ Double etaGrowthRatePerTree,
+ Double featureBagFraction,
+ Integer maxAttemptsToAddTree,
+ Integer maxOptimizationRoundsPerHyperparameter,
+ Integer maxTrees,
+ Integer numFolds,
+ Integer numSplitsPerFeature,
+ Double regularizationDepthPenaltyMultiplier,
+ Double regularizationLeafWeightPenaltyMultiplier,
+ Double regularizationSoftTreeDepthLimit,
+ Double regularizationSoftTreeDepthTolerance,
+ Double regularizationTreeSizePenaltyMultiplier) {
+ this.downsampleFactor = downsampleFactor;
+ this.eta = eta;
+ this.etaGrowthRatePerTree = etaGrowthRatePerTree;
+ this.featureBagFraction = featureBagFraction;
+ this.maxAttemptsToAddTree = maxAttemptsToAddTree;
+ this.maxOptimizationRoundsPerHyperparameter = maxOptimizationRoundsPerHyperparameter;
+ this.maxTrees = maxTrees;
+ this.numFolds = numFolds;
+ this.numSplitsPerFeature = numSplitsPerFeature;
+ this.regularizationDepthPenaltyMultiplier = regularizationDepthPenaltyMultiplier;
+ this.regularizationLeafWeightPenaltyMultiplier = regularizationLeafWeightPenaltyMultiplier;
+ this.regularizationSoftTreeDepthLimit = regularizationSoftTreeDepthLimit;
+ this.regularizationSoftTreeDepthTolerance = regularizationSoftTreeDepthTolerance;
+ this.regularizationTreeSizePenaltyMultiplier = regularizationTreeSizePenaltyMultiplier;
+ }
+
+ public Double getDownsampleFactor() {
+ return downsampleFactor;
+ }
+
+ public Double getEta() {
+ return eta;
+ }
+
+ public Double getEtaGrowthRatePerTree() {
+ return etaGrowthRatePerTree;
+ }
+
+ public Double getFeatureBagFraction() {
+ return featureBagFraction;
+ }
+
+ public Integer getMaxAttemptsToAddTree() {
+ return maxAttemptsToAddTree;
+ }
+
+ public Integer getMaxOptimizationRoundsPerHyperparameter() {
+ return maxOptimizationRoundsPerHyperparameter;
+ }
+
+ public Integer getMaxTrees() {
+ return maxTrees;
+ }
+
+ public Integer getNumFolds() {
+ return numFolds;
+ }
+
+ public Integer getNumSplitsPerFeature() {
+ return numSplitsPerFeature;
+ }
+
+ public Double getRegularizationDepthPenaltyMultiplier() {
+ return regularizationDepthPenaltyMultiplier;
+ }
+
+ public Double getRegularizationLeafWeightPenaltyMultiplier() {
+ return regularizationLeafWeightPenaltyMultiplier;
+ }
+
+ public Double getRegularizationSoftTreeDepthLimit() {
+ return regularizationSoftTreeDepthLimit;
+ }
+
+ public Double getRegularizationSoftTreeDepthTolerance() {
+ return regularizationSoftTreeDepthTolerance;
+ }
+
+ public Double getRegularizationTreeSizePenaltyMultiplier() {
+ return regularizationTreeSizePenaltyMultiplier;
+ }
+
+ @Override
+ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
+ builder.startObject();
+ if (downsampleFactor != null) {
+ builder.field(DOWNSAMPLE_FACTOR.getPreferredName(), downsampleFactor);
+ }
+ if (eta != null) {
+ builder.field(ETA.getPreferredName(), eta);
+ }
+ if (etaGrowthRatePerTree != null) {
+ builder.field(ETA_GROWTH_RATE_PER_TREE.getPreferredName(), etaGrowthRatePerTree);
+ }
+ if (featureBagFraction != null) {
+ builder.field(FEATURE_BAG_FRACTION.getPreferredName(), featureBagFraction);
+ }
+ if (maxAttemptsToAddTree != null) {
+ builder.field(MAX_ATTEMPTS_TO_ADD_TREE.getPreferredName(), maxAttemptsToAddTree);
+ }
+ if (maxOptimizationRoundsPerHyperparameter != null) {
+ builder.field(MAX_OPTIMIZATION_ROUNDS_PER_HYPERPARAMETER.getPreferredName(), maxOptimizationRoundsPerHyperparameter);
+ }
+ if (maxTrees != null) {
+ builder.field(MAX_TREES.getPreferredName(), maxTrees);
+ }
+ if (numFolds != null) {
+ builder.field(NUM_FOLDS.getPreferredName(), numFolds);
+ }
+ if (numSplitsPerFeature != null) {
+ builder.field(NUM_SPLITS_PER_FEATURE.getPreferredName(), numSplitsPerFeature);
+ }
+ if (regularizationDepthPenaltyMultiplier != null) {
+ builder.field(REGULARIZATION_DEPTH_PENALTY_MULTIPLIER.getPreferredName(), regularizationDepthPenaltyMultiplier);
+ }
+ if (regularizationLeafWeightPenaltyMultiplier != null) {
+ builder.field(REGULARIZATION_LEAF_WEIGHT_PENALTY_MULTIPLIER.getPreferredName(), regularizationLeafWeightPenaltyMultiplier);
+ }
+ if (regularizationSoftTreeDepthLimit != null) {
+ builder.field(REGULARIZATION_SOFT_TREE_DEPTH_LIMIT.getPreferredName(), regularizationSoftTreeDepthLimit);
+ }
+ if (regularizationSoftTreeDepthTolerance != null) {
+ builder.field(REGULARIZATION_SOFT_TREE_DEPTH_TOLERANCE.getPreferredName(), regularizationSoftTreeDepthTolerance);
+ }
+ if (regularizationTreeSizePenaltyMultiplier != null) {
+ builder.field(REGULARIZATION_TREE_SIZE_PENALTY_MULTIPLIER.getPreferredName(), regularizationTreeSizePenaltyMultiplier);
+ }
+ builder.endObject();
+ return builder;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+
+ Hyperparameters that = (Hyperparameters) o;
+ return Objects.equals(downsampleFactor, that.downsampleFactor)
+ && Objects.equals(eta, that.eta)
+ && Objects.equals(etaGrowthRatePerTree, that.etaGrowthRatePerTree)
+ && Objects.equals(featureBagFraction, that.featureBagFraction)
+ && Objects.equals(maxAttemptsToAddTree, that.maxAttemptsToAddTree)
+ && Objects.equals(maxOptimizationRoundsPerHyperparameter, that.maxOptimizationRoundsPerHyperparameter)
+ && Objects.equals(maxTrees, that.maxTrees)
+ && Objects.equals(numFolds, that.numFolds)
+ && Objects.equals(numSplitsPerFeature, that.numSplitsPerFeature)
+ && Objects.equals(regularizationDepthPenaltyMultiplier, that.regularizationDepthPenaltyMultiplier)
+ && Objects.equals(regularizationLeafWeightPenaltyMultiplier, that.regularizationLeafWeightPenaltyMultiplier)
+ && Objects.equals(regularizationSoftTreeDepthLimit, that.regularizationSoftTreeDepthLimit)
+ && Objects.equals(regularizationSoftTreeDepthTolerance, that.regularizationSoftTreeDepthTolerance)
+ && Objects.equals(regularizationTreeSizePenaltyMultiplier, that.regularizationTreeSizePenaltyMultiplier);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(
+ downsampleFactor,
+ eta,
+ etaGrowthRatePerTree,
+ featureBagFraction,
+ maxAttemptsToAddTree,
+ maxOptimizationRoundsPerHyperparameter,
+ maxTrees,
+ numFolds,
+ numSplitsPerFeature,
+ regularizationDepthPenaltyMultiplier,
+ regularizationLeafWeightPenaltyMultiplier,
+ regularizationSoftTreeDepthLimit,
+ regularizationSoftTreeDepthTolerance,
+ regularizationTreeSizePenaltyMultiplier
+ );
+ }
+}
diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/stats/regression/RegressionStats.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/stats/regression/RegressionStats.java
new file mode 100644
index 0000000000000..7e890c3618f82
--- /dev/null
+++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/stats/regression/RegressionStats.java
@@ -0,0 +1,135 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.elasticsearch.client.ml.dataframe.stats.regression;
+
+import org.elasticsearch.client.common.TimeUtil;
+import org.elasticsearch.client.ml.dataframe.stats.AnalysisStats;
+import org.elasticsearch.common.ParseField;
+import org.elasticsearch.common.xcontent.ConstructingObjectParser;
+import org.elasticsearch.common.xcontent.ObjectParser;
+import org.elasticsearch.common.xcontent.ToXContent;
+import org.elasticsearch.common.xcontent.XContentBuilder;
+
+import java.io.IOException;
+import java.time.Instant;
+import java.util.Objects;
+
+public class RegressionStats implements AnalysisStats {
+
+ public static final ParseField NAME = new ParseField("regression_stats");
+
+ public static final ParseField TIMESTAMP = new ParseField("timestamp");
+ public static final ParseField ITERATION = new ParseField("iteration");
+ public static final ParseField HYPERPARAMETERS = new ParseField("hyperparameters");
+ public static final ParseField TIMING_STATS = new ParseField("timing_stats");
+ public static final ParseField VALIDATION_LOSS = new ParseField("validation_loss");
+
+ public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>(NAME.getPreferredName(),
+ true,
+ a -> new RegressionStats(
+ (Instant) a[0],
+ (Integer) a[1],
+ (Hyperparameters) a[2],
+ (TimingStats) a[3],
+ (ValidationLoss) a[4]
+ )
+ );
+
+ static {
+ PARSER.declareField(ConstructingObjectParser.constructorArg(),
+ p -> TimeUtil.parseTimeFieldToInstant(p, TIMESTAMP.getPreferredName()),
+ TIMESTAMP,
+ ObjectParser.ValueType.VALUE);
+ PARSER.declareInt(ConstructingObjectParser.optionalConstructorArg(), ITERATION);
+ PARSER.declareObject(ConstructingObjectParser.constructorArg(), Hyperparameters.PARSER, HYPERPARAMETERS);
+ PARSER.declareObject(ConstructingObjectParser.constructorArg(), TimingStats.PARSER, TIMING_STATS);
+ PARSER.declareObject(ConstructingObjectParser.constructorArg(), ValidationLoss.PARSER, VALIDATION_LOSS);
+ }
+
+ private final Instant timestamp;
+ private final Integer iteration;
+ private final Hyperparameters hyperparameters;
+ private final TimingStats timingStats;
+ private final ValidationLoss validationLoss;
+
+ public RegressionStats(Instant timestamp, Integer iteration, Hyperparameters hyperparameters, TimingStats timingStats,
+ ValidationLoss validationLoss) {
+ this.timestamp = Instant.ofEpochMilli(Objects.requireNonNull(timestamp).toEpochMilli());
+ this.iteration = iteration;
+ this.hyperparameters = Objects.requireNonNull(hyperparameters);
+ this.timingStats = Objects.requireNonNull(timingStats);
+ this.validationLoss = Objects.requireNonNull(validationLoss);
+ }
+
+ public Instant getTimestamp() {
+ return timestamp;
+ }
+
+ public Integer getIteration() {
+ return iteration;
+ }
+
+ public Hyperparameters getHyperparameters() {
+ return hyperparameters;
+ }
+
+ public TimingStats getTimingStats() {
+ return timingStats;
+ }
+
+ public ValidationLoss getValidationLoss() {
+ return validationLoss;
+ }
+
+ @Override
+ public XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params params) throws IOException {
+ builder.startObject();
+ builder.timeField(TIMESTAMP.getPreferredName(), TIMESTAMP.getPreferredName() + "_string", timestamp.toEpochMilli());
+ if (iteration != null) {
+ builder.field(ITERATION.getPreferredName(), iteration);
+ }
+ builder.field(HYPERPARAMETERS.getPreferredName(), hyperparameters);
+ builder.field(TIMING_STATS.getPreferredName(), timingStats);
+ builder.field(VALIDATION_LOSS.getPreferredName(), validationLoss);
+ builder.endObject();
+ return builder;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ RegressionStats that = (RegressionStats) o;
+ return Objects.equals(timestamp, that.timestamp)
+ && Objects.equals(iteration, that.iteration)
+ && Objects.equals(hyperparameters, that.hyperparameters)
+ && Objects.equals(timingStats, that.timingStats)
+ && Objects.equals(validationLoss, that.validationLoss);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(timestamp, iteration, hyperparameters, timingStats, validationLoss);
+ }
+
+ @Override
+ public String getName() {
+ return NAME.getPreferredName();
+ }
+}
diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/stats/regression/TimingStats.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/stats/regression/TimingStats.java
new file mode 100644
index 0000000000000..1a844a410f469
--- /dev/null
+++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/stats/regression/TimingStats.java
@@ -0,0 +1,87 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.elasticsearch.client.ml.dataframe.stats.regression;
+
+import org.elasticsearch.common.ParseField;
+import org.elasticsearch.common.unit.TimeValue;
+import org.elasticsearch.common.xcontent.ConstructingObjectParser;
+import org.elasticsearch.common.xcontent.ToXContentObject;
+import org.elasticsearch.common.xcontent.XContentBuilder;
+
+import java.io.IOException;
+import java.util.Objects;
+
+public class TimingStats implements ToXContentObject {
+
+ public static final ParseField ELAPSED_TIME = new ParseField("elapsed_time");
+ public static final ParseField ITERATION_TIME = new ParseField("iteration_time");
+
+ public static ConstructingObjectParser PARSER = new ConstructingObjectParser<>("regression_timing_stats", true,
+ a -> new TimingStats(
+ a[0] == null ? null : TimeValue.timeValueMillis((long) a[0]),
+ a[1] == null ? null : TimeValue.timeValueMillis((long) a[1])
+ ));
+
+ static {
+ PARSER.declareLong(ConstructingObjectParser.optionalConstructorArg(), ELAPSED_TIME);
+ PARSER.declareLong(ConstructingObjectParser.optionalConstructorArg(), ITERATION_TIME);
+ }
+
+ private final TimeValue elapsedTime;
+ private final TimeValue iterationTime;
+
+ public TimingStats(TimeValue elapsedTime, TimeValue iterationTime) {
+ this.elapsedTime = elapsedTime;
+ this.iterationTime = iterationTime;
+ }
+
+ public TimeValue getElapsedTime() {
+ return elapsedTime;
+ }
+
+ public TimeValue getIterationTime() {
+ return iterationTime;
+ }
+
+ @Override
+ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
+ builder.startObject();
+ if (elapsedTime != null) {
+ builder.humanReadableField(ELAPSED_TIME.getPreferredName(), ELAPSED_TIME.getPreferredName() + "_string", elapsedTime);
+ }
+ if (iterationTime != null) {
+ builder.humanReadableField(ITERATION_TIME.getPreferredName(), ITERATION_TIME.getPreferredName() + "_string", iterationTime);
+ }
+ builder.endObject();
+ return builder;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ TimingStats that = (TimingStats) o;
+ return Objects.equals(elapsedTime, that.elapsedTime) && Objects.equals(iterationTime, that.iterationTime);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(elapsedTime, iterationTime);
+ }
+}
diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/stats/regression/ValidationLoss.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/stats/regression/ValidationLoss.java
new file mode 100644
index 0000000000000..ee2513b0f395f
--- /dev/null
+++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/stats/regression/ValidationLoss.java
@@ -0,0 +1,87 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.elasticsearch.client.ml.dataframe.stats.regression;
+
+import org.elasticsearch.client.ml.dataframe.stats.common.FoldValues;
+import org.elasticsearch.common.ParseField;
+import org.elasticsearch.common.xcontent.ConstructingObjectParser;
+import org.elasticsearch.common.xcontent.ToXContentObject;
+import org.elasticsearch.common.xcontent.XContentBuilder;
+
+import java.io.IOException;
+import java.util.List;
+import java.util.Objects;
+
+public class ValidationLoss implements ToXContentObject {
+
+ public static final ParseField LOSS_TYPE = new ParseField("loss_type");
+ public static final ParseField FOLD_VALUES = new ParseField("fold_values");
+
+ @SuppressWarnings("unchecked")
+ public static ConstructingObjectParser PARSER = new ConstructingObjectParser<>("regression_validation_loss",
+ true,
+ a -> new ValidationLoss((String) a[0], (List) a[1]));
+
+ static {
+ PARSER.declareString(ConstructingObjectParser.optionalConstructorArg(), LOSS_TYPE);
+ PARSER.declareObjectArray(ConstructingObjectParser.optionalConstructorArg(), FoldValues.PARSER, FOLD_VALUES);
+ }
+
+ private final String lossType;
+ private final List foldValues;
+
+ public ValidationLoss(String lossType, List values) {
+ this.lossType = lossType;
+ this.foldValues = values;
+ }
+
+ public String getLossType() {
+ return lossType;
+ }
+
+ public List getFoldValues() {
+ return foldValues;
+ }
+
+ @Override
+ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
+ builder.startObject();
+ if (lossType != null) {
+ builder.field(LOSS_TYPE.getPreferredName(), lossType);
+ }
+ if (foldValues != null) {
+ builder.field(FOLD_VALUES.getPreferredName(), foldValues);
+ }
+ builder.endObject();
+ return builder;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ ValidationLoss that = (ValidationLoss) o;
+ return Objects.equals(lossType, that.lossType) && Objects.equals(foldValues, that.foldValues);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(lossType, foldValues);
+ }
+}
diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/RuleScope.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/RuleScope.java
index 8b6886d582524..95e727b818abe 100644
--- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/RuleScope.java
+++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/RuleScope.java
@@ -50,7 +50,7 @@ public static ContextParser parser() {
Map value = (Map) entry.getValue();
builder.map(value);
try (XContentParser scopeParser = XContentFactory.xContent(builder.contentType()).createParser(
- NamedXContentRegistry.EMPTY, DEPRECATION_HANDLER, Strings.toString(builder))) {
+ NamedXContentRegistry.EMPTY, DeprecationHandler.IGNORE_DEPRECATIONS, Strings.toString(builder))) {
scope.put(entry.getKey(), FilterRef.PARSER.parse(scopeParser, null));
}
}
@@ -59,15 +59,6 @@ public static ContextParser parser() {
};
}
- private static final DeprecationHandler DEPRECATION_HANDLER = new DeprecationHandler() {
-
- @Override
- public void usedDeprecatedName(String usedName, String modernName) {}
-
- @Override
- public void usedDeprecatedField(String usedName, String replacedWith) {}
- };
-
private final Map scope;
public RuleScope() {
diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/rollup/GetRollupJobResponse.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/rollup/GetRollupJobResponse.java
index e63daf5949002..9ded34aa05670 100644
--- a/client/rest-high-level/src/main/java/org/elasticsearch/client/rollup/GetRollupJobResponse.java
+++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/rollup/GetRollupJobResponse.java
@@ -177,16 +177,18 @@ public final String toString() {
public static class RollupIndexerJobStats extends IndexerJobStats {
RollupIndexerJobStats(long numPages, long numInputDocuments, long numOuputDocuments, long numInvocations,
- long indexTime, long indexTotal, long searchTime, long searchTotal, long indexFailures, long searchFailures) {
+ long indexTime, long indexTotal, long searchTime, long searchTotal, long processingTime,
+ long processingTotal, long indexFailures, long searchFailures) {
super(numPages, numInputDocuments, numOuputDocuments, numInvocations,
- indexTime, searchTime, indexTotal, searchTotal, indexFailures, searchFailures);
+ indexTime, searchTime, processingTime, indexTotal, searchTotal, processingTotal, indexFailures, searchFailures);
}
private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>(
STATS.getPreferredName(),
true,
args -> new RollupIndexerJobStats((long) args[0], (long) args[1], (long) args[2], (long) args[3],
- (long) args[4], (long) args[5], (long) args[6], (long) args[7], (long) args[8], (long) args[9]));
+ (long) args[4], (long) args[5], (long) args[6], (long) args[7], (long) args[8], (long) args[9],
+ (long) args[10], (long) args[11]));
static {
PARSER.declareLong(constructorArg(), NUM_PAGES);
PARSER.declareLong(constructorArg(), NUM_INPUT_DOCUMENTS);
@@ -196,6 +198,8 @@ public static class RollupIndexerJobStats extends IndexerJobStats {
PARSER.declareLong(constructorArg(), INDEX_TOTAL);
PARSER.declareLong(constructorArg(), SEARCH_TIME_IN_MS);
PARSER.declareLong(constructorArg(), SEARCH_TOTAL);
+ PARSER.declareLong(constructorArg(), PROCESSING_TIME_IN_MS);
+ PARSER.declareLong(constructorArg(), PROCESSING_TOTAL);
PARSER.declareLong(constructorArg(), INDEX_FAILURES);
PARSER.declareLong(constructorArg(), SEARCH_FAILURES);
}
diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/transform/PreviewTransformResponse.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/transform/PreviewTransformResponse.java
index 215d529f94993..12a37d1f9d791 100644
--- a/client/rest-high-level/src/main/java/org/elasticsearch/client/transform/PreviewTransformResponse.java
+++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/transform/PreviewTransformResponse.java
@@ -19,40 +19,167 @@
package org.elasticsearch.client.transform;
+import org.elasticsearch.action.admin.indices.alias.Alias;
+import org.elasticsearch.client.indices.CreateIndexRequest;
+import org.elasticsearch.common.ParseField;
+import org.elasticsearch.common.settings.Settings;
+import org.elasticsearch.common.xcontent.ConstructingObjectParser;
import org.elasticsearch.common.xcontent.XContentParser;
import java.io.IOException;
+import java.util.Collections;
+import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Objects;
+import java.util.Set;
+
+import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optionalConstructorArg;
public class PreviewTransformResponse {
- private static final String PREVIEW = "preview";
- private static final String MAPPINGS = "mappings";
+ public static class GeneratedDestIndexSettings {
+ static final ParseField MAPPINGS = new ParseField("mappings");
+ private static final ParseField SETTINGS = new ParseField("settings");
+ private static final ParseField ALIASES = new ParseField("aliases");
- @SuppressWarnings("unchecked")
- public static PreviewTransformResponse fromXContent(final XContentParser parser) throws IOException {
- Map previewMap = parser.mapOrdered();
- Object previewDocs = previewMap.get(PREVIEW);
- Object mappings = previewMap.get(MAPPINGS);
- return new PreviewTransformResponse((List>) previewDocs, (Map) mappings);
+ private final Map mappings;
+ private final Settings settings;
+ private final Set aliases;
+
+ private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>(
+ "transform_preview_generated_dest_index",
+ true,
+ args -> {
+ @SuppressWarnings("unchecked")
+ Map mappings = (Map) args[0];
+ Settings settings = (Settings) args[1];
+ @SuppressWarnings("unchecked")
+ Set aliases = (Set) args[2];
+
+ return new GeneratedDestIndexSettings(mappings, settings, aliases);
+ }
+ );
+
+ static {
+ PARSER.declareObject(optionalConstructorArg(), (p, c) -> p.mapOrdered(), MAPPINGS);
+ PARSER.declareObject(optionalConstructorArg(), (p, c) -> Settings.fromXContent(p), SETTINGS);
+ PARSER.declareObject(optionalConstructorArg(), (p, c) -> {
+ Set aliases = new HashSet<>();
+ while ((p.nextToken()) != XContentParser.Token.END_OBJECT) {
+ aliases.add(Alias.fromXContent(p));
+ }
+ return aliases;
+ }, ALIASES);
+ }
+
+ public GeneratedDestIndexSettings(Map mappings, Settings settings, Set aliases) {
+ this.mappings = mappings == null ? Collections.emptyMap() : Collections.unmodifiableMap(mappings);
+ this.settings = settings == null ? Settings.EMPTY : settings;
+ this.aliases = aliases == null ? Collections.emptySet() : Collections.unmodifiableSet(aliases);
+ }
+
+ public Map getMappings() {
+ return mappings;
+ }
+
+ public Settings getSettings() {
+ return settings;
+ }
+
+ public Set getAliases() {
+ return aliases;
+ }
+
+ public static GeneratedDestIndexSettings fromXContent(final XContentParser parser) {
+ return PARSER.apply(parser, null);
+ }
+
+ @Override
+ public boolean equals(Object obj) {
+ if (obj == this) {
+ return true;
+ }
+
+ if (obj == null || obj.getClass() != getClass()) {
+ return false;
+ }
+
+ GeneratedDestIndexSettings other = (GeneratedDestIndexSettings) obj;
+ return Objects.equals(other.mappings, mappings)
+ && Objects.equals(other.settings, settings)
+ && Objects.equals(other.aliases, aliases);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(mappings, settings, aliases);
+ }
}
- private List> docs;
- private Map mappings;
+ public static final ParseField PREVIEW = new ParseField("preview");
+ public static final ParseField GENERATED_DEST_INDEX_SETTINGS = new ParseField("generated_dest_index");
+
+ private final List> docs;
+ private final GeneratedDestIndexSettings generatedDestIndexSettings;
+
+ private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>(
+ "data_frame_transform_preview",
+ true,
+ args -> {
+ @SuppressWarnings("unchecked")
+ List> docs = (List>) args[0];
+ GeneratedDestIndexSettings generatedDestIndex = (GeneratedDestIndexSettings) args[1];
+
+ // ensure generatedDestIndex is not null
+ if (generatedDestIndex == null) {
+ // BWC parsing the output from nodes < 7.7
+ @SuppressWarnings("unchecked")
+ Map mappings = (Map) args[2];
+ generatedDestIndex = new GeneratedDestIndexSettings(mappings, null, null);
+ }
- public PreviewTransformResponse(List> docs, Map mappings) {
+ return new PreviewTransformResponse(docs, generatedDestIndex);
+ }
+ );
+ static {
+ PARSER.declareObjectArray(optionalConstructorArg(), (p, c) -> p.mapOrdered(), PREVIEW);
+ PARSER.declareObject(optionalConstructorArg(), (p, c) -> GeneratedDestIndexSettings.fromXContent(p), GENERATED_DEST_INDEX_SETTINGS);
+ PARSER.declareObject(optionalConstructorArg(), (p, c) -> p.mapOrdered(), GeneratedDestIndexSettings.MAPPINGS);
+ }
+
+ public PreviewTransformResponse(List> docs, GeneratedDestIndexSettings generatedDestIndexSettings) {
this.docs = docs;
- this.mappings = mappings;
+ this.generatedDestIndexSettings = generatedDestIndexSettings;
}
public List> getDocs() {
return docs;
}
+ public GeneratedDestIndexSettings getGeneratedDestIndexSettings() {
+ return generatedDestIndexSettings;
+ }
+
public Map getMappings() {
- return mappings;
+ return generatedDestIndexSettings.getMappings();
+ }
+
+ public Settings getSettings() {
+ return generatedDestIndexSettings.getSettings();
+ }
+
+ public Set getAliases() {
+ return generatedDestIndexSettings.getAliases();
+ }
+
+ public CreateIndexRequest getCreateIndexRequest(String index) {
+ CreateIndexRequest createIndexRequest = new CreateIndexRequest(index);
+ createIndexRequest.aliases(generatedDestIndexSettings.getAliases());
+ createIndexRequest.settings(generatedDestIndexSettings.getSettings());
+ createIndexRequest.mapping(generatedDestIndexSettings.getMappings());
+
+ return createIndexRequest;
}
@Override
@@ -66,12 +193,15 @@ public boolean equals(Object obj) {
}
PreviewTransformResponse other = (PreviewTransformResponse) obj;
- return Objects.equals(other.docs, docs) && Objects.equals(other.mappings, mappings);
+ return Objects.equals(other.docs, docs) && Objects.equals(other.generatedDestIndexSettings, generatedDestIndexSettings);
}
@Override
public int hashCode() {
- return Objects.hash(docs, mappings);
+ return Objects.hash(docs, generatedDestIndexSettings);
}
+ public static PreviewTransformResponse fromXContent(final XContentParser parser) throws IOException {
+ return PARSER.parse(parser, null);
+ }
}
diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/transform/transforms/TransformIndexerStats.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/transform/transforms/TransformIndexerStats.java
index 2a04c6ea45eb5..e3a0032e55b0d 100644
--- a/client/rest-high-level/src/main/java/org/elasticsearch/client/transform/transforms/TransformIndexerStats.java
+++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/transform/transforms/TransformIndexerStats.java
@@ -27,7 +27,6 @@
import java.io.IOException;
import java.util.Objects;
-import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg;
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optionalConstructorArg;
public class TransformIndexerStats extends IndexerJobStats {
@@ -39,21 +38,38 @@ public class TransformIndexerStats extends IndexerJobStats {
public static final ConstructingObjectParser LENIENT_PARSER = new ConstructingObjectParser<>(
NAME,
true,
- args -> new TransformIndexerStats((long) args[0], (long) args[1], (long) args[2],
- (long) args[3], (long) args[4], (long) args[5], (long) args[6], (long) args[7], (long) args[8], (long) args[9],
- (Double) args[10], (Double) args[11], (Double) args[12]));
+ args -> new TransformIndexerStats(
+ unboxSafe(args[0], 0L),
+ unboxSafe(args[1], 0L),
+ unboxSafe(args[2], 0L),
+ unboxSafe(args[3], 0L),
+ unboxSafe(args[4], 0L),
+ unboxSafe(args[5], 0L),
+ unboxSafe(args[6], 0L),
+ unboxSafe(args[7], 0L),
+ unboxSafe(args[8], 0L),
+ unboxSafe(args[9], 0L),
+ unboxSafe(args[10], 0L),
+ unboxSafe(args[11], 0L),
+ unboxSafe(args[12], 0.0),
+ unboxSafe(args[13], 0.0),
+ unboxSafe(args[14], 0.0)
+ )
+ );
static {
- LENIENT_PARSER.declareLong(constructorArg(), NUM_PAGES);
- LENIENT_PARSER.declareLong(constructorArg(), NUM_INPUT_DOCUMENTS);
- LENIENT_PARSER.declareLong(constructorArg(), NUM_OUTPUT_DOCUMENTS);
- LENIENT_PARSER.declareLong(constructorArg(), NUM_INVOCATIONS);
- LENIENT_PARSER.declareLong(constructorArg(), INDEX_TIME_IN_MS);
- LENIENT_PARSER.declareLong(constructorArg(), SEARCH_TIME_IN_MS);
- LENIENT_PARSER.declareLong(constructorArg(), INDEX_TOTAL);
- LENIENT_PARSER.declareLong(constructorArg(), SEARCH_TOTAL);
- LENIENT_PARSER.declareLong(constructorArg(), INDEX_FAILURES);
- LENIENT_PARSER.declareLong(constructorArg(), SEARCH_FAILURES);
+ LENIENT_PARSER.declareLong(optionalConstructorArg(), NUM_PAGES);
+ LENIENT_PARSER.declareLong(optionalConstructorArg(), NUM_INPUT_DOCUMENTS);
+ LENIENT_PARSER.declareLong(optionalConstructorArg(), NUM_OUTPUT_DOCUMENTS);
+ LENIENT_PARSER.declareLong(optionalConstructorArg(), NUM_INVOCATIONS);
+ LENIENT_PARSER.declareLong(optionalConstructorArg(), INDEX_TIME_IN_MS);
+ LENIENT_PARSER.declareLong(optionalConstructorArg(), SEARCH_TIME_IN_MS);
+ LENIENT_PARSER.declareLong(optionalConstructorArg(), PROCESSING_TIME_IN_MS);
+ LENIENT_PARSER.declareLong(optionalConstructorArg(), INDEX_TOTAL);
+ LENIENT_PARSER.declareLong(optionalConstructorArg(), SEARCH_TOTAL);
+ LENIENT_PARSER.declareLong(optionalConstructorArg(), PROCESSING_TOTAL);
+ LENIENT_PARSER.declareLong(optionalConstructorArg(), INDEX_FAILURES);
+ LENIENT_PARSER.declareLong(optionalConstructorArg(), SEARCH_FAILURES);
LENIENT_PARSER.declareDouble(optionalConstructorArg(), EXPONENTIAL_AVG_CHECKPOINT_DURATION_MS);
LENIENT_PARSER.declareDouble(optionalConstructorArg(), EXPONENTIAL_AVG_DOCUMENTS_INDEXED);
LENIENT_PARSER.declareDouble(optionalConstructorArg(), EXPONENTIAL_AVG_DOCUMENTS_PROCESSED);
@@ -67,16 +83,40 @@ public static TransformIndexerStats fromXContent(XContentParser parser) throws I
private final double expAvgDocumentsIndexed;
private final double expAvgDocumentsProcessed;
- public TransformIndexerStats(long numPages, long numInputDocuments, long numOuputDocuments,
- long numInvocations, long indexTime, long searchTime,
- long indexTotal, long searchTotal, long indexFailures, long searchFailures,
- Double expAvgCheckpointDurationMs, Double expAvgDocumentsIndexed,
- Double expAvgDocumentsProcessed) {
- super(numPages, numInputDocuments, numOuputDocuments, numInvocations, indexTime, searchTime,
- indexTotal, searchTotal, indexFailures, searchFailures);
- this.expAvgCheckpointDurationMs = expAvgCheckpointDurationMs == null ? 0.0 : expAvgCheckpointDurationMs;
- this.expAvgDocumentsIndexed = expAvgDocumentsIndexed == null ? 0.0 : expAvgDocumentsIndexed;
- this.expAvgDocumentsProcessed = expAvgDocumentsProcessed == null ? 0.0 : expAvgDocumentsProcessed;
+ public TransformIndexerStats(
+ long numPages,
+ long numInputDocuments,
+ long numOuputDocuments,
+ long numInvocations,
+ long indexTime,
+ long searchTime,
+ long processingTime,
+ long indexTotal,
+ long searchTotal,
+ long processingTotal,
+ long indexFailures,
+ long searchFailures,
+ double expAvgCheckpointDurationMs,
+ double expAvgDocumentsIndexed,
+ double expAvgDocumentsProcessed
+ ) {
+ super(
+ numPages,
+ numInputDocuments,
+ numOuputDocuments,
+ numInvocations,
+ indexTime,
+ searchTime,
+ processingTime,
+ indexTotal,
+ searchTotal,
+ processingTotal,
+ indexFailures,
+ searchFailures
+ );
+ this.expAvgCheckpointDurationMs = expAvgCheckpointDurationMs;
+ this.expAvgDocumentsIndexed = expAvgDocumentsIndexed;
+ this.expAvgDocumentsProcessed = expAvgDocumentsProcessed;
}
public double getExpAvgCheckpointDurationMs() {
@@ -109,10 +149,12 @@ public boolean equals(Object other) {
&& Objects.equals(this.numInvocations, that.numInvocations)
&& Objects.equals(this.indexTime, that.indexTime)
&& Objects.equals(this.searchTime, that.searchTime)
+ && Objects.equals(this.processingTime, that.processingTime)
&& Objects.equals(this.indexFailures, that.indexFailures)
&& Objects.equals(this.searchFailures, that.searchFailures)
&& Objects.equals(this.indexTotal, that.indexTotal)
&& Objects.equals(this.searchTotal, that.searchTotal)
+ && Objects.equals(this.processingTotal, that.processingTotal)
&& Objects.equals(this.expAvgCheckpointDurationMs, that.expAvgCheckpointDurationMs)
&& Objects.equals(this.expAvgDocumentsIndexed, that.expAvgDocumentsIndexed)
&& Objects.equals(this.expAvgDocumentsProcessed, that.expAvgDocumentsProcessed);
@@ -120,8 +162,31 @@ public boolean equals(Object other) {
@Override
public int hashCode() {
- return Objects.hash(numPages, numInputDocuments, numOuputDocuments, numInvocations,
- indexTime, searchTime, indexFailures, searchFailures, indexTotal, searchTotal,
- expAvgCheckpointDurationMs, expAvgDocumentsIndexed, expAvgDocumentsProcessed);
+ return Objects.hash(
+ numPages,
+ numInputDocuments,
+ numOuputDocuments,
+ numInvocations,
+ indexTime,
+ searchTime,
+ processingTime,
+ indexFailures,
+ searchFailures,
+ indexTotal,
+ searchTotal,
+ processingTotal,
+ expAvgCheckpointDurationMs,
+ expAvgDocumentsIndexed,
+ expAvgDocumentsProcessed
+ );
+ }
+
+ @SuppressWarnings("unchecked")
+ private static T unboxSafe(Object l, T default_value) {
+ if (l == null) {
+ return default_value;
+ } else {
+ return (T) l;
+ }
}
}
diff --git a/client/rest-high-level/src/main/resources/META-INF/services/org.elasticsearch.plugins.spi.NamedXContentProvider b/client/rest-high-level/src/main/resources/META-INF/services/org.elasticsearch.plugins.spi.NamedXContentProvider
index 145d06bd46b76..9426b3d1bdde7 100644
--- a/client/rest-high-level/src/main/resources/META-INF/services/org.elasticsearch.plugins.spi.NamedXContentProvider
+++ b/client/rest-high-level/src/main/resources/META-INF/services/org.elasticsearch.plugins.spi.NamedXContentProvider
@@ -1,5 +1,6 @@
org.elasticsearch.client.ilm.IndexLifecycleNamedXContentProvider
org.elasticsearch.client.ml.dataframe.MlDataFrameAnalysisNamedXContentProvider
org.elasticsearch.client.ml.dataframe.evaluation.MlEvaluationNamedXContentProvider
+org.elasticsearch.client.ml.dataframe.stats.AnalysisStatsNamedXContentProvider
org.elasticsearch.client.ml.inference.MlInferenceNamedXContentProvider
org.elasticsearch.client.transform.TransformNamedXContentProvider
diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/AsyncSearchRequestConvertersTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/AsyncSearchRequestConvertersTests.java
new file mode 100644
index 0000000000000..a3e2c0cea7d9c
--- /dev/null
+++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/AsyncSearchRequestConvertersTests.java
@@ -0,0 +1,150 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.client;
+
+import org.apache.http.client.methods.HttpDelete;
+import org.apache.http.client.methods.HttpGet;
+import org.apache.http.client.methods.HttpPost;
+import org.elasticsearch.action.search.SearchType;
+import org.elasticsearch.client.asyncsearch.DeleteAsyncSearchRequest;
+import org.elasticsearch.client.asyncsearch.GetAsyncSearchRequest;
+import org.elasticsearch.client.asyncsearch.SubmitAsyncSearchRequest;
+import org.elasticsearch.common.Strings;
+import org.elasticsearch.common.unit.TimeValue;
+import org.elasticsearch.rest.action.search.RestSearchAction;
+import org.elasticsearch.search.builder.SearchSourceBuilder;
+import org.elasticsearch.test.ESTestCase;
+
+import java.util.HashMap;
+import java.util.Locale;
+import java.util.Map;
+import java.util.StringJoiner;
+
+import static org.elasticsearch.client.RequestConvertersTests.createTestSearchSourceBuilder;
+import static org.elasticsearch.client.RequestConvertersTests.setRandomIndicesOptions;
+
+public class AsyncSearchRequestConvertersTests extends ESTestCase {
+
+ public void testSubmitAsyncSearch() throws Exception {
+ String[] indices = RequestConvertersTests.randomIndicesNames(0, 5);
+ Map expectedParams = new HashMap<>();
+ SearchSourceBuilder searchSourceBuilder = createTestSearchSourceBuilder();
+ SubmitAsyncSearchRequest submitRequest = new SubmitAsyncSearchRequest(searchSourceBuilder, indices);
+
+ // the following parameters might be overwritten by random ones later,
+ // but we need to set these since they are the default we send over http
+ expectedParams.put("request_cache", "true");
+ expectedParams.put("batched_reduce_size", "5");
+ setRandomSearchParams(submitRequest, expectedParams);
+ setRandomIndicesOptions(submitRequest::setIndicesOptions, submitRequest::getIndicesOptions, expectedParams);
+
+ if (randomBoolean()) {
+ boolean cleanOnCompletion = randomBoolean();
+ submitRequest.setCleanOnCompletion(cleanOnCompletion);
+ expectedParams.put("clean_on_completion", Boolean.toString(cleanOnCompletion));
+ }
+ if (randomBoolean()) {
+ TimeValue keepAlive = TimeValue.parseTimeValue(randomTimeValue(), "test");
+ submitRequest.setKeepAlive(keepAlive);
+ expectedParams.put("keep_alive", keepAlive.getStringRep());
+ }
+ if (randomBoolean()) {
+ TimeValue waitForCompletion = TimeValue.parseTimeValue(randomTimeValue(), "test");
+ submitRequest.setWaitForCompletion(waitForCompletion);
+ expectedParams.put("wait_for_completion", waitForCompletion.getStringRep());
+ }
+
+ Request request = AsyncSearchRequestConverters.submitAsyncSearch(submitRequest);
+ StringJoiner endpoint = new StringJoiner("/", "/", "");
+ String index = String.join(",", indices);
+ if (Strings.hasLength(index)) {
+ endpoint.add(index);
+ }
+ endpoint.add("_async_search");
+ assertEquals(HttpPost.METHOD_NAME, request.getMethod());
+ assertEquals(endpoint.toString(), request.getEndpoint());
+ assertEquals(expectedParams, request.getParameters());
+ RequestConvertersTests.assertToXContentBody(searchSourceBuilder, request.getEntity());
+ }
+
+ private static void setRandomSearchParams(SubmitAsyncSearchRequest request, Map expectedParams) {
+ expectedParams.put(RestSearchAction.TYPED_KEYS_PARAM, "true");
+ if (randomBoolean()) {
+ request.setRouting(randomAlphaOfLengthBetween(3, 10));
+ expectedParams.put("routing", request.getRouting());
+ }
+ if (randomBoolean()) {
+ request.setPreference(randomAlphaOfLengthBetween(3, 10));
+ expectedParams.put("preference", request.getPreference());
+ }
+ if (randomBoolean()) {
+ request.setSearchType(randomFrom(SearchType.CURRENTLY_SUPPORTED));
+ }
+ expectedParams.put("search_type", request.getSearchType().name().toLowerCase(Locale.ROOT));
+ if (randomBoolean()) {
+ request.setAllowPartialSearchResults(randomBoolean());
+ expectedParams.put("allow_partial_search_results", Boolean.toString(request.getAllowPartialSearchResults()));
+ }
+ if (randomBoolean()) {
+ request.setRequestCache(randomBoolean());
+ expectedParams.put("request_cache", Boolean.toString(request.getRequestCache()));
+ }
+ if (randomBoolean()) {
+ request.setBatchedReduceSize(randomIntBetween(2, Integer.MAX_VALUE));
+ }
+ expectedParams.put("batched_reduce_size", Integer.toString(request.getBatchedReduceSize()));
+ if (randomBoolean()) {
+ request.setMaxConcurrentShardRequests(randomIntBetween(1, Integer.MAX_VALUE));
+ }
+ expectedParams.put("max_concurrent_shard_requests", Integer.toString(request.getMaxConcurrentShardRequests()));
+ }
+
+ public void testGetAsyncSearch() throws Exception {
+ String id = randomAlphaOfLengthBetween(5, 10);
+ Map expectedParams = new HashMap<>();
+ GetAsyncSearchRequest submitRequest = new GetAsyncSearchRequest(id);
+ if (randomBoolean()) {
+ TimeValue keepAlive = TimeValue.parseTimeValue(randomTimeValue(), "test");
+ submitRequest.setKeepAlive(keepAlive);
+ expectedParams.put("keep_alive", keepAlive.getStringRep());
+ }
+ if (randomBoolean()) {
+ TimeValue waitForCompletion = TimeValue.parseTimeValue(randomTimeValue(), "test");
+ submitRequest.setWaitForCompletion(waitForCompletion);
+ expectedParams.put("wait_for_completion", waitForCompletion.getStringRep());
+ }
+
+ Request request = AsyncSearchRequestConverters.getAsyncSearch(submitRequest);
+ String endpoint = "/_async_search/" + id;
+ assertEquals(HttpGet.METHOD_NAME, request.getMethod());
+ assertEquals(endpoint.toString(), request.getEndpoint());
+ assertEquals(expectedParams, request.getParameters());
+ }
+
+ public void testDeleteAsyncSearch() throws Exception {
+ String id = randomAlphaOfLengthBetween(5, 10);
+ DeleteAsyncSearchRequest deleteRequest = new DeleteAsyncSearchRequest(id);
+
+ Request request = AsyncSearchRequestConverters.deleteAsyncSearch(deleteRequest);
+ assertEquals(HttpDelete.METHOD_NAME, request.getMethod());
+ assertEquals("/_async_search/" + id, request.getEndpoint());
+ assertTrue(request.getParameters().isEmpty());
+ }
+}
diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/MLRequestConvertersTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/MLRequestConvertersTests.java
index 6c280fba5ab01..7f8ed9e61bd1f 100644
--- a/client/rest-high-level/src/test/java/org/elasticsearch/client/MLRequestConvertersTests.java
+++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/MLRequestConvertersTests.java
@@ -91,6 +91,7 @@
import org.elasticsearch.client.ml.dataframe.DataFrameAnalyticsConfig;
import org.elasticsearch.client.ml.dataframe.MlDataFrameAnalysisNamedXContentProvider;
import org.elasticsearch.client.ml.dataframe.evaluation.MlEvaluationNamedXContentProvider;
+import org.elasticsearch.client.ml.dataframe.stats.AnalysisStatsNamedXContentProvider;
import org.elasticsearch.client.ml.filestructurefinder.FileStructure;
import org.elasticsearch.client.ml.inference.MlInferenceNamedXContentProvider;
import org.elasticsearch.client.ml.inference.TrainedModelConfig;
@@ -1067,6 +1068,7 @@ protected NamedXContentRegistry xContentRegistry() {
namedXContent.addAll(new MlDataFrameAnalysisNamedXContentProvider().getNamedXContentParsers());
namedXContent.addAll(new MlEvaluationNamedXContentProvider().getNamedXContentParsers());
namedXContent.addAll(new MlInferenceNamedXContentProvider().getNamedXContentParsers());
+ namedXContent.addAll(new AnalysisStatsNamedXContentProvider().getNamedXContentParsers());
return new NamedXContentRegistry(namedXContent);
}
diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/MachineLearningIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/MachineLearningIT.java
index 17a9c0cd1cfca..a1204823d749b 100644
--- a/client/rest-high-level/src/test/java/org/elasticsearch/client/MachineLearningIT.java
+++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/MachineLearningIT.java
@@ -874,7 +874,7 @@ private String createExpiredData(String jobId) throws Exception {
{
// Index a randomly named unused state document
String docId = "non_existing_job_" + randomFrom("model_state_1234567#1", "quantiles", "categorizer_state#1");
- IndexRequest indexRequest = new IndexRequest(".ml-state").id(docId);
+ IndexRequest indexRequest = new IndexRequest(".ml-state-000001").id(docId);
indexRequest.source(Collections.emptyMap(), XContentType.JSON);
indexRequest.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE);
highLevelClient().index(indexRequest, RequestOptions.DEFAULT);
@@ -944,8 +944,8 @@ public void testDeleteExpiredData() throws Exception {
assertTrue(forecastExists(jobId, forecastId));
{
- // Verify .ml-state contains the expected unused state document
- Iterable hits = searchAll(".ml-state");
+ // Verify .ml-state* contains the expected unused state document
+ Iterable hits = searchAll(".ml-state*");
List target = new ArrayList<>();
hits.forEach(target::add);
long numMatches = target.stream()
@@ -974,8 +974,8 @@ public void testDeleteExpiredData() throws Exception {
assertFalse(forecastExists(jobId, forecastId));
{
- // Verify .ml-state doesn't contain unused state documents
- Iterable hits = searchAll(".ml-state");
+ // Verify .ml-state* doesn't contain unused state documents
+ Iterable hits = searchAll(".ml-state*");
List hitList = new ArrayList<>();
hits.forEach(hitList::add);
long numMatches = hitList.stream()
diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/RequestConvertersTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/RequestConvertersTests.java
index cdabbb5b4c6cb..1360b58e0b1ec 100644
--- a/client/rest-high-level/src/test/java/org/elasticsearch/client/RequestConvertersTests.java
+++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/RequestConvertersTests.java
@@ -1021,12 +1021,34 @@ public void testSearchNullSource() throws IOException {
public void testSearch() throws Exception {
String searchEndpoint = randomFrom("_" + randomAlphaOfLength(5));
String[] indices = randomIndicesNames(0, 5);
+ Map expectedParams = new HashMap<>();
+ SearchRequest searchRequest = createTestSearchRequest(indices, expectedParams);
+
+ Request request = RequestConverters.search(searchRequest, searchEndpoint);
+ StringJoiner endpoint = new StringJoiner("/", "/", "");
+ String index = String.join(",", indices);
+ if (Strings.hasLength(index)) {
+ endpoint.add(index);
+ }
+ endpoint.add(searchEndpoint);
+ assertEquals(HttpPost.METHOD_NAME, request.getMethod());
+ assertEquals(endpoint.toString(), request.getEndpoint());
+ assertEquals(expectedParams, request.getParameters());
+ assertToXContentBody(searchRequest.source(), request.getEntity());
+ }
+
+ public static SearchRequest createTestSearchRequest(String[] indices, Map expectedParams) {
SearchRequest searchRequest = new SearchRequest(indices);
- Map expectedParams = new HashMap<>();
setRandomSearchParams(searchRequest, expectedParams);
setRandomIndicesOptions(searchRequest::indicesOptions, searchRequest::indicesOptions, expectedParams);
+ SearchSourceBuilder searchSourceBuilder = createTestSearchSourceBuilder();
+ searchRequest.source(searchSourceBuilder);
+ return searchRequest;
+ }
+
+ public static SearchSourceBuilder createTestSearchSourceBuilder() {
SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder();
// rarely skip setting the search source completely
if (frequently()) {
@@ -1071,22 +1093,11 @@ public void testSearch() throws Exception {
searchSourceBuilder.collapse(new CollapseBuilder(randomAlphaOfLengthBetween(3, 10)));
}
}
- searchRequest.source(searchSourceBuilder);
}
-
- Request request = RequestConverters.search(searchRequest, searchEndpoint);
- StringJoiner endpoint = new StringJoiner("/", "/", "");
- String index = String.join(",", indices);
- if (Strings.hasLength(index)) {
- endpoint.add(index);
- }
- endpoint.add(searchEndpoint);
- assertEquals(HttpPost.METHOD_NAME, request.getMethod());
- assertEquals(endpoint.toString(), request.getEndpoint());
- assertEquals(expectedParams, request.getParameters());
- assertToXContentBody(searchSourceBuilder, request.getEntity());
+ return searchSourceBuilder;
}
+
public void testSearchNullIndicesAndTypes() {
expectThrows(NullPointerException.class, () -> new SearchRequest((String[]) null));
expectThrows(NullPointerException.class, () -> new SearchRequest().indices((String[]) null));
@@ -1858,9 +1869,19 @@ private static void setRandomSearchParams(SearchRequest searchRequest,
searchRequest.setCcsMinimizeRoundtrips(randomBoolean());
}
expectedParams.put("ccs_minimize_roundtrips", Boolean.toString(searchRequest.isCcsMinimizeRoundtrips()));
+ if (randomBoolean()) {
+ searchRequest.setMaxConcurrentShardRequests(randomIntBetween(1, Integer.MAX_VALUE));
+ }
+ expectedParams.put("max_concurrent_shard_requests", Integer.toString(searchRequest.getMaxConcurrentShardRequests()));
+ if (randomBoolean()) {
+ searchRequest.setPreFilterShardSize(randomIntBetween(2, Integer.MAX_VALUE));
+ }
+ if (searchRequest.getPreFilterShardSize() != null) {
+ expectedParams.put("pre_filter_shard_size", Integer.toString(searchRequest.getPreFilterShardSize()));
+ }
}
- static void setRandomIndicesOptions(Consumer setter, Supplier getter,
+ public static void setRandomIndicesOptions(Consumer setter, Supplier getter,
Map expectedParams) {
if (randomBoolean()) {
diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/RestHighLevelClientTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/RestHighLevelClientTests.java
index a8e8037930741..e35adbc7aa5e4 100644
--- a/client/rest-high-level/src/test/java/org/elasticsearch/client/RestHighLevelClientTests.java
+++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/RestHighLevelClientTests.java
@@ -20,7 +20,6 @@
package org.elasticsearch.client;
import com.fasterxml.jackson.core.JsonParseException;
-
import org.apache.http.HttpEntity;
import org.apache.http.HttpHost;
import org.apache.http.HttpResponse;
@@ -69,6 +68,9 @@
import org.elasticsearch.client.ml.dataframe.evaluation.softclassification.ConfusionMatrixMetric;
import org.elasticsearch.client.ml.dataframe.evaluation.softclassification.PrecisionMetric;
import org.elasticsearch.client.ml.dataframe.evaluation.softclassification.RecallMetric;
+import org.elasticsearch.client.ml.dataframe.stats.classification.ClassificationStats;
+import org.elasticsearch.client.ml.dataframe.stats.outlierdetection.OutlierDetectionStats;
+import org.elasticsearch.client.ml.dataframe.stats.regression.RegressionStats;
import org.elasticsearch.client.ml.inference.preprocessing.CustomWordEmbedding;
import org.elasticsearch.client.ml.inference.preprocessing.FrequencyEncoding;
import org.elasticsearch.client.ml.inference.preprocessing.OneHotEncoding;
@@ -697,7 +699,7 @@ public void testDefaultNamedXContents() {
public void testProvidedNamedXContents() {
List namedXContents = RestHighLevelClient.getProvidedNamedXContents();
- assertEquals(59, namedXContents.size());
+ assertEquals(62, namedXContents.size());
Map, Integer> categories = new HashMap<>();
List names = new ArrayList<>();
for (NamedXContentRegistry.Entry namedXContent : namedXContents) {
@@ -707,7 +709,7 @@ public void testProvidedNamedXContents() {
categories.put(namedXContent.categoryClass, counter + 1);
}
}
- assertEquals("Had: " + categories, 12, categories.size());
+ assertEquals("Had: " + categories, 13, categories.size());
assertEquals(Integer.valueOf(3), categories.get(Aggregation.class));
assertTrue(names.contains(ChildrenAggregationBuilder.NAME));
assertTrue(names.contains(MatrixStatsAggregationBuilder.NAME));
@@ -737,6 +739,9 @@ public void testProvidedNamedXContents() {
assertTrue(names.contains(OutlierDetection.NAME.getPreferredName()));
assertTrue(names.contains(org.elasticsearch.client.ml.dataframe.Regression.NAME.getPreferredName()));
assertTrue(names.contains(org.elasticsearch.client.ml.dataframe.Classification.NAME.getPreferredName()));
+ assertTrue(names.contains(OutlierDetectionStats.NAME.getPreferredName()));
+ assertTrue(names.contains(RegressionStats.NAME.getPreferredName()));
+ assertTrue(names.contains(ClassificationStats.NAME.getPreferredName()));
assertEquals(Integer.valueOf(1), categories.get(SyncConfig.class));
assertTrue(names.contains(TimeSyncConfig.NAME));
assertEquals(Integer.valueOf(3), categories.get(org.elasticsearch.client.ml.dataframe.evaluation.Evaluation.class));
@@ -790,7 +795,13 @@ public void testApiNamingConventions() throws Exception {
"indices.get_upgrade",
"indices.put_alias",
"render_search_template",
- "scripts_painless_execute"
+ "scripts_painless_execute",
+ "cluster.put_component_template",
+ "cluster.get_component_template",
+ "cluster.delete_component_template",
+ "indices.create_data_stream",
+ "indices.get_data_streams",
+ "indices.delete_data_stream"
};
//These API are not required for high-level client feature completeness
String[] notRequiredApi = new String[] {
@@ -887,6 +898,7 @@ public void testApiNamingConventions() throws Exception {
apiName.startsWith("eql.") == false &&
apiName.endsWith("freeze") == false &&
apiName.endsWith("reload_analyzers") == false &&
+ apiName.startsWith("async_search") == false &&
// IndicesClientIT.getIndexTemplate should be renamed "getTemplate" in version 8.0 when we
// can get rid of 7.0's deprecated "getTemplate"
apiName.equals("indices.get_index_template") == false) {
diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/TransformIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/TransformIT.java
index 3f93806aca779..94341c41685f0 100644
--- a/client/rest-high-level/src/test/java/org/elasticsearch/client/TransformIT.java
+++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/TransformIT.java
@@ -441,6 +441,8 @@ public void testGetStats() throws Exception {
0L,
0L,
0L,
+ 0L,
+ 0L,
0.0,
0.0,
0.0);
diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/asyncsearch/AsyncSearchIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/asyncsearch/AsyncSearchIT.java
new file mode 100644
index 0000000000000..38e7351e58836
--- /dev/null
+++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/asyncsearch/AsyncSearchIT.java
@@ -0,0 +1,70 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.client.asyncsearch;
+
+import org.elasticsearch.client.ESRestHighLevelClientTestCase;
+import org.elasticsearch.client.RequestOptions;
+import org.elasticsearch.client.core.AcknowledgedResponse;
+import org.elasticsearch.common.settings.Settings;
+import org.elasticsearch.index.query.QueryBuilders;
+import org.elasticsearch.search.builder.SearchSourceBuilder;
+
+import java.io.IOException;
+
+public class AsyncSearchIT extends ESRestHighLevelClientTestCase {
+
+ public void testAsyncSearch() throws IOException {
+ String index = "test-index";
+ createIndex(index, Settings.EMPTY);
+
+ SearchSourceBuilder sourceBuilder = new SearchSourceBuilder().query(QueryBuilders.matchAllQuery());
+ SubmitAsyncSearchRequest submitRequest = new SubmitAsyncSearchRequest(sourceBuilder, index);
+ submitRequest.setCleanOnCompletion(false);
+ AsyncSearchResponse submitResponse = highLevelClient().asyncSearch().submit(submitRequest, RequestOptions.DEFAULT);
+ assertNotNull(submitResponse.getId());
+ assertFalse(submitResponse.isPartial());
+ assertTrue(submitResponse.getStartTime() > 0);
+ assertTrue(submitResponse.getExpirationTime() > 0);
+ assertNotNull(submitResponse.getSearchResponse());
+ if (submitResponse.isRunning() == false) {
+ assertFalse(submitResponse.isPartial());
+ } else {
+ assertTrue(submitResponse.isPartial());
+ }
+
+ GetAsyncSearchRequest getRequest = new GetAsyncSearchRequest(submitResponse.getId());
+ AsyncSearchResponse getResponse = highLevelClient().asyncSearch().get(getRequest, RequestOptions.DEFAULT);
+ while (getResponse.isRunning()) {
+ getResponse = highLevelClient().asyncSearch().get(getRequest, RequestOptions.DEFAULT);
+ }
+
+ assertFalse(getResponse.isRunning());
+ assertFalse(getResponse.isPartial());
+ assertTrue(getResponse.getStartTime() > 0);
+ assertTrue(getResponse.getExpirationTime() > 0);
+ assertNotNull(getResponse.getSearchResponse());
+
+ DeleteAsyncSearchRequest deleteRequest = new DeleteAsyncSearchRequest(submitResponse.getId());
+ AcknowledgedResponse deleteAsyncSearchResponse = highLevelClient().asyncSearch().delete(deleteRequest,
+ RequestOptions.DEFAULT);
+ assertNotNull(deleteAsyncSearchResponse);
+ assertNotNull(deleteAsyncSearchResponse.isAcknowledged());
+ }
+}
diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/asyncsearch/AsyncSearchResponseTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/asyncsearch/AsyncSearchResponseTests.java
new file mode 100644
index 0000000000000..08c0da25e5bb6
--- /dev/null
+++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/asyncsearch/AsyncSearchResponseTests.java
@@ -0,0 +1,83 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.elasticsearch.client.asyncsearch;
+
+import org.elasticsearch.ElasticsearchException;
+import org.elasticsearch.action.search.SearchResponse;
+import org.elasticsearch.action.search.SearchResponse.Clusters;
+import org.elasticsearch.action.search.ShardSearchFailure;
+import org.elasticsearch.client.AbstractResponseTestCase;
+import org.elasticsearch.common.xcontent.XContentParser;
+import org.elasticsearch.common.xcontent.XContentType;
+import org.elasticsearch.search.internal.InternalSearchResponse;
+
+import java.io.IOException;
+
+import static org.hamcrest.Matchers.containsString;
+
+public class AsyncSearchResponseTests
+ extends AbstractResponseTestCase {
+
+ @Override
+ protected org.elasticsearch.xpack.core.search.action.AsyncSearchResponse createServerTestInstance(XContentType xContentType) {
+ boolean isPartial = randomBoolean();
+ boolean isRunning = randomBoolean();
+ long startTimeMillis = randomLongBetween(0, Long.MAX_VALUE);
+ long expirationTimeMillis = randomLongBetween(0, Long.MAX_VALUE);
+ String id = randomBoolean() ? null : randomAlphaOfLength(10);
+ ElasticsearchException error = randomBoolean() ? null : new ElasticsearchException(randomAlphaOfLength(10));
+ // add search response, minimal object is okay since the full randomization of parsing is tested in SearchResponseTests
+ SearchResponse searchResponse = randomBoolean() ? null
+ : new SearchResponse(InternalSearchResponse.empty(), randomAlphaOfLength(10), 1, 1, 0, randomIntBetween(0, 10000),
+ ShardSearchFailure.EMPTY_ARRAY, Clusters.EMPTY);
+ org.elasticsearch.xpack.core.search.action.AsyncSearchResponse testResponse =
+ new org.elasticsearch.xpack.core.search.action.AsyncSearchResponse(id, searchResponse, error, isPartial, isRunning,
+ startTimeMillis, expirationTimeMillis);
+ return testResponse;
+ }
+
+ @Override
+ protected AsyncSearchResponse doParseToClientInstance(XContentParser parser) throws IOException {
+ return AsyncSearchResponse.fromXContent(parser);
+ }
+
+ @Override
+ protected void assertInstances(org.elasticsearch.xpack.core.search.action.AsyncSearchResponse expected, AsyncSearchResponse parsed) {
+ assertNotSame(parsed, expected);
+ assertEquals(expected.getId(), parsed.getId());
+ assertEquals(expected.isRunning(), parsed.isRunning());
+ assertEquals(expected.isPartial(), parsed.isPartial());
+ assertEquals(expected.getStartTime(), parsed.getStartTime());
+ assertEquals(expected.getExpirationTime(), parsed.getExpirationTime());
+ // we cannot directly compare error since Exceptions are wrapped differently on parsing, but we can check original message
+ if (expected.getFailure() != null) {
+ assertThat(parsed.getFailure().getMessage(), containsString(expected.getFailure().getMessage()));
+ } else {
+ assertNull(parsed.getFailure());
+ }
+ // we don't need to check the complete parsed search response since this is done elsewhere
+ // only spot-check some randomized properties for equality here
+ if (expected.getSearchResponse() != null) {
+ assertEquals(expected.getSearchResponse().getTook(), parsed.getSearchResponse().getTook());
+ assertEquals(expected.getSearchResponse().getScrollId(), parsed.getSearchResponse().getScrollId());
+ } else {
+ assertNull(parsed.getSearchResponse());
+ }
+ }
+}
diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/asyncsearch/GetAsyncSearchRequestTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/asyncsearch/GetAsyncSearchRequestTests.java
new file mode 100644
index 0000000000000..b6861b218cd28
--- /dev/null
+++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/asyncsearch/GetAsyncSearchRequestTests.java
@@ -0,0 +1,41 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.client.asyncsearch;
+
+import org.elasticsearch.client.ValidationException;
+import org.elasticsearch.common.unit.TimeValue;
+import org.elasticsearch.test.ESTestCase;
+
+import java.util.concurrent.TimeUnit;
+
+public class GetAsyncSearchRequestTests extends ESTestCase {
+
+ public void testValidation() {
+ GetAsyncSearchRequest getAsyncSearchRequest = new GetAsyncSearchRequest(randomAlphaOfLength(10));
+ getAsyncSearchRequest.setKeepAlive(new TimeValue(0));
+ assertTrue(getAsyncSearchRequest.validate().isPresent());
+ ValidationException validationException = getAsyncSearchRequest.validate().get();
+ assertEquals(1, validationException.validationErrors().size());
+ assertEquals("Validation Failed: 1: keep_alive must be greater than 1 minute, got: 0s;", validationException.getMessage());
+
+ getAsyncSearchRequest.setKeepAlive(new TimeValue(1, TimeUnit.MINUTES));
+ assertFalse(getAsyncSearchRequest.validate().isPresent());
+ }
+}
diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/asyncsearch/SubmitAsyncSearchRequestTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/asyncsearch/SubmitAsyncSearchRequestTests.java
new file mode 100644
index 0000000000000..f7075052cab2b
--- /dev/null
+++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/asyncsearch/SubmitAsyncSearchRequestTests.java
@@ -0,0 +1,57 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.client.asyncsearch;
+
+import org.elasticsearch.client.ValidationException;
+import org.elasticsearch.common.unit.TimeValue;
+import org.elasticsearch.search.builder.SearchSourceBuilder;
+import org.elasticsearch.search.suggest.SuggestBuilder;
+import org.elasticsearch.test.ESTestCase;
+
+import java.util.Optional;
+
+public class SubmitAsyncSearchRequestTests extends ESTestCase {
+
+ public void testValidation() {
+ {
+ SearchSourceBuilder source = new SearchSourceBuilder();
+ SubmitAsyncSearchRequest request = new SubmitAsyncSearchRequest(source, "test");
+ Optional validation = request.validate();
+ assertFalse(validation.isPresent());
+ }
+ {
+ SearchSourceBuilder source = new SearchSourceBuilder().suggest(new SuggestBuilder());
+ SubmitAsyncSearchRequest request = new SubmitAsyncSearchRequest(source, "test");
+ Optional validation = request.validate();
+ assertTrue(validation.isPresent());
+ assertEquals(1, validation.get().validationErrors().size());
+ assertEquals("suggest-only queries are not supported", validation.get().validationErrors().get(0));
+ }
+ {
+ SubmitAsyncSearchRequest request = new SubmitAsyncSearchRequest(new SearchSourceBuilder(), "test");
+ request.setKeepAlive(new TimeValue(1));
+ Optional validation = request.validate();
+ assertTrue(validation.isPresent());
+ assertEquals(1, validation.get().validationErrors().size());
+ assertEquals("[keep_alive] must be greater than 1 minute, got: 1ms", validation.get().validationErrors().get(0));
+ }
+ }
+
+}
diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/dataframe/DataFrameAnalyticsStatsTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/dataframe/DataFrameAnalyticsStatsTests.java
index 48ebf71e36023..d251f568dfa79 100644
--- a/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/dataframe/DataFrameAnalyticsStatsTests.java
+++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/dataframe/DataFrameAnalyticsStatsTests.java
@@ -20,6 +20,14 @@
package org.elasticsearch.client.ml.dataframe;
import org.elasticsearch.client.ml.NodeAttributesTests;
+import org.elasticsearch.client.ml.dataframe.stats.AnalysisStats;
+import org.elasticsearch.client.ml.dataframe.stats.AnalysisStatsNamedXContentProvider;
+import org.elasticsearch.client.ml.dataframe.stats.classification.ClassificationStatsTests;
+import org.elasticsearch.client.ml.dataframe.stats.common.DataCountsTests;
+import org.elasticsearch.client.ml.dataframe.stats.common.MemoryUsageTests;
+import org.elasticsearch.client.ml.dataframe.stats.outlierdetection.OutlierDetectionStatsTests;
+import org.elasticsearch.client.ml.dataframe.stats.regression.RegressionStatsTests;
+import org.elasticsearch.common.xcontent.NamedXContentRegistry;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.test.ESTestCase;
@@ -31,23 +39,39 @@
public class DataFrameAnalyticsStatsTests extends ESTestCase {
+ @Override
+ protected NamedXContentRegistry xContentRegistry() {
+ List namedXContent = new ArrayList<>();
+ namedXContent.addAll(new AnalysisStatsNamedXContentProvider().getNamedXContentParsers());
+ return new NamedXContentRegistry(namedXContent);
+ }
+
public void testFromXContent() throws IOException {
xContentTester(this::createParser,
DataFrameAnalyticsStatsTests::randomDataFrameAnalyticsStats,
DataFrameAnalyticsStatsTests::toXContent,
DataFrameAnalyticsStats::fromXContent)
.supportsUnknownFields(true)
- .randomFieldsExcludeFilter(field -> field.startsWith("node.attributes"))
+ .randomFieldsExcludeFilter(field -> field.startsWith("node.attributes") || field.startsWith("analysis_stats"))
.test();
}
public static DataFrameAnalyticsStats randomDataFrameAnalyticsStats() {
+ AnalysisStats analysisStats = randomBoolean() ? null :
+ randomFrom(
+ ClassificationStatsTests.createRandom(),
+ OutlierDetectionStatsTests.createRandom(),
+ RegressionStatsTests.createRandom()
+ );
+
return new DataFrameAnalyticsStats(
randomAlphaOfLengthBetween(1, 10),
randomFrom(DataFrameAnalyticsState.values()),
randomBoolean() ? null : randomAlphaOfLength(10),
randomBoolean() ? null : createRandomProgress(),
+ randomBoolean() ? null : DataCountsTests.createRandom(),
randomBoolean() ? null : MemoryUsageTests.createRandom(),
+ analysisStats,
randomBoolean() ? null : NodeAttributesTests.createRandom(),
randomBoolean() ? null : randomAlphaOfLengthBetween(1, 20));
}
@@ -71,9 +95,17 @@ public static void toXContent(DataFrameAnalyticsStats stats, XContentBuilder bui
if (stats.getProgress() != null) {
builder.field(DataFrameAnalyticsStats.PROGRESS.getPreferredName(), stats.getProgress());
}
+ if (stats.getDataCounts() != null) {
+ builder.field(DataFrameAnalyticsStats.DATA_COUNTS.getPreferredName(), stats.getDataCounts());
+ }
if (stats.getMemoryUsage() != null) {
builder.field(DataFrameAnalyticsStats.MEMORY_USAGE.getPreferredName(), stats.getMemoryUsage());
}
+ if (stats.getAnalysisStats() != null) {
+ builder.startObject("analysis_stats");
+ builder.field(stats.getAnalysisStats().getName(), stats.getAnalysisStats());
+ builder.endObject();
+ }
if (stats.getNode() != null) {
builder.field(DataFrameAnalyticsStats.NODE.getPreferredName(), stats.getNode());
}
diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/dataframe/stats/classification/ClassificationStatsTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/dataframe/stats/classification/ClassificationStatsTests.java
new file mode 100644
index 0000000000000..d23633c01d28a
--- /dev/null
+++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/dataframe/stats/classification/ClassificationStatsTests.java
@@ -0,0 +1,53 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.elasticsearch.client.ml.dataframe.stats.classification;
+
+import org.elasticsearch.common.xcontent.XContentParser;
+import org.elasticsearch.test.AbstractXContentTestCase;
+
+import java.io.IOException;
+import java.time.Instant;
+
+public class ClassificationStatsTests extends AbstractXContentTestCase {
+
+ @Override
+ protected boolean supportsUnknownFields() {
+ return true;
+ }
+
+ @Override
+ protected ClassificationStats doParseInstance(XContentParser parser) throws IOException {
+ return ClassificationStats.PARSER.apply(parser, null);
+ }
+
+ @Override
+ protected ClassificationStats createTestInstance() {
+ return createRandom();
+ }
+
+ public static ClassificationStats createRandom() {
+ return new ClassificationStats(
+ Instant.now(),
+ randomBoolean() ? null : randomIntBetween(1, Integer.MAX_VALUE),
+ HyperparametersTests.createRandom(),
+ TimingStatsTests.createRandom(),
+ ValidationLossTests.createRandom()
+ );
+ }
+}
diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/dataframe/stats/classification/HyperparametersTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/dataframe/stats/classification/HyperparametersTests.java
new file mode 100644
index 0000000000000..aa1ab12c542ea
--- /dev/null
+++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/dataframe/stats/classification/HyperparametersTests.java
@@ -0,0 +1,62 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.elasticsearch.client.ml.dataframe.stats.classification;
+
+import org.elasticsearch.common.xcontent.XContentParser;
+import org.elasticsearch.test.AbstractXContentTestCase;
+
+import java.io.IOException;
+
+public class HyperparametersTests extends AbstractXContentTestCase {
+
+ @Override
+ protected boolean supportsUnknownFields() {
+ return true;
+ }
+
+ @Override
+ protected Hyperparameters doParseInstance(XContentParser parser) throws IOException {
+ return Hyperparameters.PARSER.apply(parser, null);
+ }
+
+ @Override
+ protected Hyperparameters createTestInstance() {
+ return createRandom();
+ }
+
+ public static Hyperparameters createRandom() {
+ return new Hyperparameters(
+ randomBoolean() ? null : randomAlphaOfLength(10),
+ randomBoolean() ? null : randomDouble(),
+ randomBoolean() ? null : randomDouble(),
+ randomBoolean() ? null : randomDouble(),
+ randomBoolean() ? null : randomDouble(),
+ randomBoolean() ? null : randomIntBetween(0, Integer.MAX_VALUE),
+ randomBoolean() ? null : randomIntBetween(0, Integer.MAX_VALUE),
+ randomBoolean() ? null : randomIntBetween(0, Integer.MAX_VALUE),
+ randomBoolean() ? null : randomIntBetween(0, Integer.MAX_VALUE),
+ randomBoolean() ? null : randomIntBetween(0, Integer.MAX_VALUE),
+ randomBoolean() ? null : randomDouble(),
+ randomBoolean() ? null : randomDouble(),
+ randomBoolean() ? null : randomDouble(),
+ randomBoolean() ? null : randomDouble(),
+ randomBoolean() ? null : randomDouble()
+ );
+ }
+}
diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/dataframe/stats/classification/TimingStatsTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/dataframe/stats/classification/TimingStatsTests.java
new file mode 100644
index 0000000000000..5e2c4c842e18d
--- /dev/null
+++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/dataframe/stats/classification/TimingStatsTests.java
@@ -0,0 +1,50 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.elasticsearch.client.ml.dataframe.stats.classification;
+
+import org.elasticsearch.common.unit.TimeValue;
+import org.elasticsearch.common.xcontent.XContentParser;
+import org.elasticsearch.test.AbstractXContentTestCase;
+
+import java.io.IOException;
+
+public class TimingStatsTests extends AbstractXContentTestCase {
+
+ @Override
+ protected boolean supportsUnknownFields() {
+ return true;
+ }
+
+ @Override
+ protected TimingStats doParseInstance(XContentParser parser) throws IOException {
+ return TimingStats.PARSER.apply(parser, null);
+ }
+
+ @Override
+ protected TimingStats createTestInstance() {
+ return createRandom();
+ }
+
+ public static TimingStats createRandom() {
+ return new TimingStats(
+ randomBoolean() ? null : TimeValue.timeValueMillis(randomNonNegativeLong()),
+ randomBoolean() ? null : TimeValue.timeValueMillis(randomNonNegativeLong())
+ );
+ }
+}
diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/dataframe/stats/classification/ValidationLossTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/dataframe/stats/classification/ValidationLossTests.java
new file mode 100644
index 0000000000000..c841af43d4393
--- /dev/null
+++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/dataframe/stats/classification/ValidationLossTests.java
@@ -0,0 +1,50 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.elasticsearch.client.ml.dataframe.stats.classification;
+
+import org.elasticsearch.client.ml.dataframe.stats.common.FoldValuesTests;
+import org.elasticsearch.common.xcontent.XContentParser;
+import org.elasticsearch.test.AbstractXContentTestCase;
+
+import java.io.IOException;
+
+public class ValidationLossTests extends AbstractXContentTestCase {
+
+ @Override
+ protected boolean supportsUnknownFields() {
+ return true;
+ }
+
+ @Override
+ protected ValidationLoss doParseInstance(XContentParser parser) throws IOException {
+ return ValidationLoss.PARSER.apply(parser, null);
+ }
+
+ @Override
+ protected ValidationLoss createTestInstance() {
+ return createRandom();
+ }
+
+ public static ValidationLoss createRandom() {
+ return new ValidationLoss(
+ randomBoolean() ? null : randomAlphaOfLength(10),
+ randomBoolean() ? null : randomList(5, () -> FoldValuesTests.createRandom())
+ );
+ }
+}
diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/dataframe/stats/common/DataCountsTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/dataframe/stats/common/DataCountsTests.java
new file mode 100644
index 0000000000000..5e877e2d40f7b
--- /dev/null
+++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/dataframe/stats/common/DataCountsTests.java
@@ -0,0 +1,51 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.client.ml.dataframe.stats.common;
+
+import org.elasticsearch.common.xcontent.XContentParser;
+import org.elasticsearch.test.AbstractXContentTestCase;
+
+import java.io.IOException;
+
+public class DataCountsTests extends AbstractXContentTestCase {
+
+ @Override
+ protected DataCounts createTestInstance() {
+ return createRandom();
+ }
+
+ public static DataCounts createRandom() {
+ return new DataCounts(
+ randomNonNegativeLong(),
+ randomNonNegativeLong(),
+ randomNonNegativeLong()
+ );
+ }
+
+ @Override
+ protected DataCounts doParseInstance(XContentParser parser) throws IOException {
+ return DataCounts.PARSER.apply(parser, null);
+ }
+
+ @Override
+ protected boolean supportsUnknownFields() {
+ return true;
+ }
+}
diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/dataframe/stats/common/FoldValuesTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/dataframe/stats/common/FoldValuesTests.java
new file mode 100644
index 0000000000000..90d9219327648
--- /dev/null
+++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/dataframe/stats/common/FoldValuesTests.java
@@ -0,0 +1,51 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.elasticsearch.client.ml.dataframe.stats.common;
+
+import org.elasticsearch.common.xcontent.XContentParser;
+import org.elasticsearch.test.AbstractXContentTestCase;
+
+import java.io.IOException;
+
+public class FoldValuesTests extends AbstractXContentTestCase {
+
+ @Override
+ protected boolean supportsUnknownFields() {
+ return true;
+ }
+
+ @Override
+ protected FoldValues doParseInstance(XContentParser parser) throws IOException {
+ return FoldValues.PARSER.apply(parser, null);
+ }
+
+ @Override
+ protected FoldValues createTestInstance() {
+ return createRandom();
+ }
+
+ public static FoldValues createRandom() {
+ int valuesSize = randomIntBetween(0, 10);
+ double[] values = new double[valuesSize];
+ for (int i = 0; i < valuesSize; i++) {
+ values[i] = randomDouble();
+ }
+ return new FoldValues(randomIntBetween(0, Integer.MAX_VALUE), values);
+ }
+}
diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/dataframe/MemoryUsageTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/dataframe/stats/common/MemoryUsageTests.java
similarity index 96%
rename from client/rest-high-level/src/test/java/org/elasticsearch/client/ml/dataframe/MemoryUsageTests.java
rename to client/rest-high-level/src/test/java/org/elasticsearch/client/ml/dataframe/stats/common/MemoryUsageTests.java
index 8e06db6f2b37f..0e27295752190 100644
--- a/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/dataframe/MemoryUsageTests.java
+++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/dataframe/stats/common/MemoryUsageTests.java
@@ -16,7 +16,7 @@
* specific language governing permissions and limitations
* under the License.
*/
-package org.elasticsearch.client.ml.dataframe;
+package org.elasticsearch.client.ml.dataframe.stats.common;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.test.AbstractXContentTestCase;
diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/dataframe/stats/outlierdetection/OutlierDetectionStatsTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/dataframe/stats/outlierdetection/OutlierDetectionStatsTests.java
new file mode 100644
index 0000000000000..f40de67a62cd2
--- /dev/null
+++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/dataframe/stats/outlierdetection/OutlierDetectionStatsTests.java
@@ -0,0 +1,51 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.elasticsearch.client.ml.dataframe.stats.outlierdetection;
+
+import org.elasticsearch.common.xcontent.XContentParser;
+import org.elasticsearch.test.AbstractXContentTestCase;
+
+import java.io.IOException;
+import java.time.Instant;
+
+public class OutlierDetectionStatsTests extends AbstractXContentTestCase {
+
+ @Override
+ protected boolean supportsUnknownFields() {
+ return true;
+ }
+
+ @Override
+ protected OutlierDetectionStats doParseInstance(XContentParser parser) throws IOException {
+ return OutlierDetectionStats.PARSER.apply(parser, null);
+ }
+
+ @Override
+ protected OutlierDetectionStats createTestInstance() {
+ return createRandom();
+ }
+
+ public static OutlierDetectionStats createRandom() {
+ return new OutlierDetectionStats(
+ Instant.now(),
+ ParametersTests.createRandom(),
+ TimingStatsTests.createRandom()
+ );
+ }
+}
diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/dataframe/stats/outlierdetection/ParametersTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/dataframe/stats/outlierdetection/ParametersTests.java
new file mode 100644
index 0000000000000..4f566562683de
--- /dev/null
+++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/dataframe/stats/outlierdetection/ParametersTests.java
@@ -0,0 +1,53 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.elasticsearch.client.ml.dataframe.stats.outlierdetection;
+
+import org.elasticsearch.common.xcontent.XContentParser;
+import org.elasticsearch.test.AbstractXContentTestCase;
+
+import java.io.IOException;
+
+public class ParametersTests extends AbstractXContentTestCase {
+
+ @Override
+ protected boolean supportsUnknownFields() {
+ return true;
+ }
+
+ @Override
+ protected Parameters doParseInstance(XContentParser parser) throws IOException {
+ return Parameters.PARSER.apply(parser, null);
+ }
+
+ @Override
+ protected Parameters createTestInstance() {
+ return createRandom();
+ }
+
+ public static Parameters createRandom() {
+ return new Parameters(
+ randomBoolean() ? null : randomIntBetween(1, Integer.MAX_VALUE),
+ randomBoolean() ? null : randomAlphaOfLength(5),
+ randomBoolean() ? null : randomBoolean(),
+ randomBoolean() ? null : randomDouble(),
+ randomBoolean() ? null : randomDouble(),
+ randomBoolean() ? null : randomBoolean()
+ );
+ }
+}
diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/dataframe/stats/outlierdetection/TimingStatsTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/dataframe/stats/outlierdetection/TimingStatsTests.java
new file mode 100644
index 0000000000000..5483782e1d1cf
--- /dev/null
+++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/dataframe/stats/outlierdetection/TimingStatsTests.java
@@ -0,0 +1,48 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.elasticsearch.client.ml.dataframe.stats.outlierdetection;
+
+import org.elasticsearch.common.unit.TimeValue;
+import org.elasticsearch.common.xcontent.XContentParser;
+import org.elasticsearch.test.AbstractXContentTestCase;
+
+import java.io.IOException;
+
+public class TimingStatsTests extends AbstractXContentTestCase {
+
+ @Override
+ protected boolean supportsUnknownFields() {
+ return true;
+ }
+
+
+ @Override
+ protected TimingStats doParseInstance(XContentParser parser) throws IOException {
+ return TimingStats.PARSER.apply(parser, null);
+ }
+
+ @Override
+ protected TimingStats createTestInstance() {
+ return createRandom();
+ }
+
+ public static TimingStats createRandom() {
+ return new TimingStats(randomBoolean() ? null : TimeValue.timeValueMillis(randomNonNegativeLong()));
+ }
+}
diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/dataframe/stats/regression/HyperparametersTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/dataframe/stats/regression/HyperparametersTests.java
new file mode 100644
index 0000000000000..43d0571bb206f
--- /dev/null
+++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/dataframe/stats/regression/HyperparametersTests.java
@@ -0,0 +1,62 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.elasticsearch.client.ml.dataframe.stats.regression;
+
+import org.elasticsearch.common.xcontent.XContentParser;
+import org.elasticsearch.test.AbstractXContentTestCase;
+
+import java.io.IOException;
+
+public class HyperparametersTests extends AbstractXContentTestCase {
+
+ @Override
+ protected boolean supportsUnknownFields() {
+ return true;
+ }
+
+ @Override
+ protected Hyperparameters doParseInstance(XContentParser parser) throws IOException {
+ return Hyperparameters.PARSER.apply(parser, null);
+ }
+
+
+ @Override
+ protected Hyperparameters createTestInstance() {
+ return createRandom();
+ }
+
+ public static Hyperparameters createRandom() {
+ return new Hyperparameters(
+ randomBoolean() ? null : randomDouble(),
+ randomBoolean() ? null : randomDouble(),
+ randomBoolean() ? null : randomDouble(),
+ randomBoolean() ? null : randomDouble(),
+ randomBoolean() ? null : randomIntBetween(0, Integer.MAX_VALUE),
+ randomBoolean() ? null : randomIntBetween(0, Integer.MAX_VALUE),
+ randomBoolean() ? null : randomIntBetween(0, Integer.MAX_VALUE),
+ randomBoolean() ? null : randomIntBetween(0, Integer.MAX_VALUE),
+ randomBoolean() ? null : randomIntBetween(0, Integer.MAX_VALUE),
+ randomBoolean() ? null : randomDouble(),
+ randomBoolean() ? null : randomDouble(),
+ randomBoolean() ? null : randomDouble(),
+ randomBoolean() ? null : randomDouble(),
+ randomBoolean() ? null : randomDouble()
+ );
+ }
+}
diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/dataframe/stats/regression/RegressionStatsTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/dataframe/stats/regression/RegressionStatsTests.java
new file mode 100644
index 0000000000000..d4e784bb335cc
--- /dev/null
+++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/dataframe/stats/regression/RegressionStatsTests.java
@@ -0,0 +1,54 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.elasticsearch.client.ml.dataframe.stats.regression;
+
+import org.elasticsearch.common.xcontent.XContentParser;
+import org.elasticsearch.test.AbstractXContentTestCase;
+
+import java.io.IOException;
+import java.time.Instant;
+
+public class RegressionStatsTests extends AbstractXContentTestCase {
+
+ @Override
+ protected boolean supportsUnknownFields() {
+ return true;
+ }
+
+ @Override
+ protected RegressionStats doParseInstance(XContentParser parser) throws IOException {
+ return RegressionStats.PARSER.apply(parser, null);
+ }
+
+
+ @Override
+ protected RegressionStats createTestInstance() {
+ return createRandom();
+ }
+
+ public static RegressionStats createRandom() {
+ return new RegressionStats(
+ Instant.now(),
+ randomBoolean() ? null : randomIntBetween(1, Integer.MAX_VALUE),
+ HyperparametersTests.createRandom(),
+ TimingStatsTests.createRandom(),
+ ValidationLossTests.createRandom()
+ );
+ }
+}
diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/dataframe/stats/regression/TimingStatsTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/dataframe/stats/regression/TimingStatsTests.java
new file mode 100644
index 0000000000000..95fe6531f3b83
--- /dev/null
+++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/dataframe/stats/regression/TimingStatsTests.java
@@ -0,0 +1,50 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.elasticsearch.client.ml.dataframe.stats.regression;
+
+import org.elasticsearch.common.unit.TimeValue;
+import org.elasticsearch.common.xcontent.XContentParser;
+import org.elasticsearch.test.AbstractXContentTestCase;
+
+import java.io.IOException;
+
+public class TimingStatsTests extends AbstractXContentTestCase {
+
+ @Override
+ protected boolean supportsUnknownFields() {
+ return true;
+ }
+
+ @Override
+ protected TimingStats doParseInstance(XContentParser parser) throws IOException {
+ return TimingStats.PARSER.apply(parser, null);
+ }
+
+ @Override
+ protected TimingStats createTestInstance() {
+ return createRandom();
+ }
+
+ public static TimingStats createRandom() {
+ return new TimingStats(
+ randomBoolean() ? null : TimeValue.timeValueMillis(randomNonNegativeLong()),
+ randomBoolean() ? null : TimeValue.timeValueMillis(randomNonNegativeLong())
+ );
+ }
+}
diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/dataframe/stats/regression/ValidationLossTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/dataframe/stats/regression/ValidationLossTests.java
new file mode 100644
index 0000000000000..d2a9f960bbbb2
--- /dev/null
+++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/dataframe/stats/regression/ValidationLossTests.java
@@ -0,0 +1,50 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.elasticsearch.client.ml.dataframe.stats.regression;
+
+import org.elasticsearch.client.ml.dataframe.stats.common.FoldValuesTests;
+import org.elasticsearch.common.xcontent.XContentParser;
+import org.elasticsearch.test.AbstractXContentTestCase;
+
+import java.io.IOException;
+
+public class ValidationLossTests extends AbstractXContentTestCase {
+
+ @Override
+ protected boolean supportsUnknownFields() {
+ return true;
+ }
+
+ @Override
+ protected ValidationLoss doParseInstance(XContentParser parser) throws IOException {
+ return ValidationLoss.PARSER.apply(parser, null);
+ }
+
+ @Override
+ protected ValidationLoss createTestInstance() {
+ return createRandom();
+ }
+
+ public static ValidationLoss createRandom() {
+ return new ValidationLoss(
+ randomBoolean() ? null : randomAlphaOfLength(10),
+ randomBoolean() ? null : randomList(5, () -> FoldValuesTests.createRandom())
+ );
+ }
+}
diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/rollup/GetRollupJobResponseTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/rollup/GetRollupJobResponseTests.java
index b866420a44c01..122f156986d71 100644
--- a/client/rest-high-level/src/test/java/org/elasticsearch/client/rollup/GetRollupJobResponseTests.java
+++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/rollup/GetRollupJobResponseTests.java
@@ -64,8 +64,9 @@ private GetRollupJobResponse createTestInstance() {
private RollupIndexerJobStats randomStats() {
return new RollupIndexerJobStats(randomNonNegativeLong(), randomNonNegativeLong(), randomNonNegativeLong(),
- randomNonNegativeLong(), randomNonNegativeLong(), randomNonNegativeLong(),
- randomNonNegativeLong(), randomNonNegativeLong(), randomNonNegativeLong(), randomNonNegativeLong());
+ randomNonNegativeLong(), randomNonNegativeLong(), randomNonNegativeLong(), randomNonNegativeLong(),
+ randomNonNegativeLong(), randomNonNegativeLong(), randomNonNegativeLong(), randomNonNegativeLong(),
+ randomNonNegativeLong());
}
private RollupJobStatus randomStatus() {
@@ -120,6 +121,8 @@ public void toXContent(RollupIndexerJobStats stats, XContentBuilder builder, ToX
builder.field(IndexerJobStats.SEARCH_TIME_IN_MS.getPreferredName(), stats.getSearchTime());
builder.field(IndexerJobStats.SEARCH_TOTAL.getPreferredName(), stats.getSearchTotal());
builder.field(IndexerJobStats.SEARCH_FAILURES.getPreferredName(), stats.getSearchFailures());
+ builder.field(IndexerJobStats.PROCESSING_TIME_IN_MS.getPreferredName(), stats.getProcessingTime());
+ builder.field(IndexerJobStats.PROCESSING_TOTAL.getPreferredName(), stats.getProcessingTotal());
builder.endObject();
}
diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/security/DeleteRoleMappingResponseTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/security/DeleteRoleMappingResponseTests.java
index d89deb44e9f68..1eee41bc23685 100644
--- a/client/rest-high-level/src/test/java/org/elasticsearch/client/security/DeleteRoleMappingResponseTests.java
+++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/security/DeleteRoleMappingResponseTests.java
@@ -35,15 +35,7 @@ public class DeleteRoleMappingResponseTests extends ESTestCase {
public void testFromXContent() throws IOException {
final String json = "{ \"found\" : \"true\" }";
final DeleteRoleMappingResponse response = DeleteRoleMappingResponse.fromXContent(XContentType.JSON.xContent().createParser(
- new NamedXContentRegistry(Collections.emptyList()), new DeprecationHandler() {
- @Override
- public void usedDeprecatedName(String usedName, String modernName) {
- }
-
- @Override
- public void usedDeprecatedField(String usedName, String replacedWith) {
- }
- }, json));
+ new NamedXContentRegistry(Collections.emptyList()), DeprecationHandler.IGNORE_DEPRECATIONS, json));
final DeleteRoleMappingResponse expectedResponse = new DeleteRoleMappingResponse(true);
assertThat(response, equalTo(expectedResponse));
}
diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/security/ExpressionRoleMappingTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/security/ExpressionRoleMappingTests.java
index f30307ebde51a..3dd9d3ca5ee53 100644
--- a/client/rest-high-level/src/test/java/org/elasticsearch/client/security/ExpressionRoleMappingTests.java
+++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/security/ExpressionRoleMappingTests.java
@@ -37,29 +37,21 @@
public class ExpressionRoleMappingTests extends ESTestCase {
public void testExpressionRoleMappingParser() throws IOException {
- final String json =
- "{\n" +
- " \"enabled\" : true,\n" +
- " \"roles\" : [\n" +
- " \"superuser\"\n" +
- " ],\n" +
- " \"rules\" : {\n" +
- " \"field\" : {\n" +
- " \"realm.name\" : \"kerb1\"\n" +
- " }\n" +
- " },\n" +
- " \"metadata\" : { }\n" +
+ final String json =
+ "{\n" +
+ " \"enabled\" : true,\n" +
+ " \"roles\" : [\n" +
+ " \"superuser\"\n" +
+ " ],\n" +
+ " \"rules\" : {\n" +
+ " \"field\" : {\n" +
+ " \"realm.name\" : \"kerb1\"\n" +
+ " }\n" +
+ " },\n" +
+ " \"metadata\" : { }\n" +
" }";
final ExpressionRoleMapping expressionRoleMapping = ExpressionRoleMapping.PARSER.parse(XContentType.JSON.xContent().createParser(
- new NamedXContentRegistry(Collections.emptyList()), new DeprecationHandler() {
- @Override
- public void usedDeprecatedName(String usedName, String modernName) {
- }
-
- @Override
- public void usedDeprecatedField(String usedName, String replacedWith) {
- }
- }, json), "example-role-mapping");
+ new NamedXContentRegistry(Collections.emptyList()), DeprecationHandler.IGNORE_DEPRECATIONS, json), "example-role-mapping");
final ExpressionRoleMapping expectedRoleMapping = new ExpressionRoleMapping("example-role-mapping",
FieldRoleMapperExpression.ofKeyValues("realm.name", "kerb1"),
singletonList("superuser"), Collections.emptyList(),
diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/security/GetPrivilegesResponseTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/security/GetPrivilegesResponseTests.java
index bf55e224095fe..ce17c9b1105d8 100644
--- a/client/rest-high-level/src/test/java/org/elasticsearch/client/security/GetPrivilegesResponseTests.java
+++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/security/GetPrivilegesResponseTests.java
@@ -81,15 +81,7 @@ public void testFromXContent() throws IOException {
"}";
final GetPrivilegesResponse response = GetPrivilegesResponse.fromXContent(XContentType.JSON.xContent().createParser(
- new NamedXContentRegistry(Collections.emptyList()), new DeprecationHandler() {
- @Override
- public void usedDeprecatedName(String usedName, String modernName) {
- }
-
- @Override
- public void usedDeprecatedField(String usedName, String replacedWith) {
- }
- }, json));
+ new NamedXContentRegistry(Collections.emptyList()), DeprecationHandler.IGNORE_DEPRECATIONS, json));
final ApplicationPrivilege readTestappPrivilege =
new ApplicationPrivilege("testapp", "read", Arrays.asList("action:login", "data:read/*"), null);
diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/security/GetRoleMappingsResponseTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/security/GetRoleMappingsResponseTests.java
index 20883b859f9ae..4fda57f2ff631 100644
--- a/client/rest-high-level/src/test/java/org/elasticsearch/client/security/GetRoleMappingsResponseTests.java
+++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/security/GetRoleMappingsResponseTests.java
@@ -36,42 +36,34 @@
public class GetRoleMappingsResponseTests extends ESTestCase {
public void testFromXContent() throws IOException {
- final String json = "{\n" +
- " \"kerberosmapping\" : {\n" +
- " \"enabled\" : true,\n" +
- " \"roles\" : [\n" +
- " \"superuser\"\n" +
- " ],\n" +
- " \"rules\" : {\n" +
- " \"field\" : {\n" +
- " \"realm.name\" : \"kerb1\"\n" +
- " }\n" +
- " },\n" +
- " \"metadata\" : { }\n" +
- " },\n" +
- " \"ldapmapping\" : {\n" +
- " \"enabled\" : false,\n" +
- " \"roles\" : [\n" +
- " \"monitoring\"\n" +
- " ],\n" +
- " \"rules\" : {\n" +
- " \"field\" : {\n" +
- " \"groups\" : \"cn=ipausers,cn=groups,cn=accounts,dc=ipademo,dc=local\"\n" +
- " }\n" +
- " },\n" +
- " \"metadata\" : { }\n" +
- " }\n" +
+ final String json = "{\n" +
+ " \"kerberosmapping\" : {\n" +
+ " \"enabled\" : true,\n" +
+ " \"roles\" : [\n" +
+ " \"superuser\"\n" +
+ " ],\n" +
+ " \"rules\" : {\n" +
+ " \"field\" : {\n" +
+ " \"realm.name\" : \"kerb1\"\n" +
+ " }\n" +
+ " },\n" +
+ " \"metadata\" : { }\n" +
+ " },\n" +
+ " \"ldapmapping\" : {\n" +
+ " \"enabled\" : false,\n" +
+ " \"roles\" : [\n" +
+ " \"monitoring\"\n" +
+ " ],\n" +
+ " \"rules\" : {\n" +
+ " \"field\" : {\n" +
+ " \"groups\" : \"cn=ipausers,cn=groups,cn=accounts,dc=ipademo,dc=local\"\n" +
+ " }\n" +
+ " },\n" +
+ " \"metadata\" : { }\n" +
+ " }\n" +
"}";
final GetRoleMappingsResponse response = GetRoleMappingsResponse.fromXContent(XContentType.JSON.xContent().createParser(
- new NamedXContentRegistry(Collections.emptyList()), new DeprecationHandler() {
- @Override
- public void usedDeprecatedName(String usedName, String modernName) {
- }
-
- @Override
- public void usedDeprecatedField(String usedName, String replacedWith) {
- }
- }, json));
+ new NamedXContentRegistry(Collections.emptyList()), DeprecationHandler.IGNORE_DEPRECATIONS, json));
final List expectedRoleMappingsList = new ArrayList<>();
expectedRoleMappingsList.add(new ExpressionRoleMapping("kerberosmapping", FieldRoleMapperExpression.ofKeyValues("realm.name",
"kerb1"), Collections.singletonList("superuser"), Collections.emptyList(), null, true));
diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/security/GetRolesResponseTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/security/GetRolesResponseTests.java
index c4620fa1a2f3d..8be4320e4de7c 100644
--- a/client/rest-high-level/src/test/java/org/elasticsearch/client/security/GetRolesResponseTests.java
+++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/security/GetRolesResponseTests.java
@@ -64,15 +64,7 @@ public void testFromXContent() throws IOException {
" }\n" +
"}";
final GetRolesResponse response = GetRolesResponse.fromXContent((XContentType.JSON.xContent().createParser(
- new NamedXContentRegistry(Collections.emptyList()), new DeprecationHandler() {
- @Override
- public void usedDeprecatedName(String usedName, String modernName) {
- }
-
- @Override
- public void usedDeprecatedField(String usedName, String replacedWith) {
- }
- }, json)));
+ new NamedXContentRegistry(Collections.emptyList()), DeprecationHandler.IGNORE_DEPRECATIONS, json)));
assertThat(response.getRoles().size(), equalTo(1));
assertThat(response.getTransientMetadataMap().size(), equalTo(1));
final Role role = response.getRoles().get(0);
diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/security/support/expressiondsl/parser/RoleMapperExpressionParserTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/security/support/expressiondsl/parser/RoleMapperExpressionParserTests.java
index 24ed5684fa856..1c9ccaa2f7490 100644
--- a/client/rest-high-level/src/test/java/org/elasticsearch/client/security/support/expressiondsl/parser/RoleMapperExpressionParserTests.java
+++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/security/support/expressiondsl/parser/RoleMapperExpressionParserTests.java
@@ -115,15 +115,7 @@ private T checkExpressionType(RoleMapperExpression expr, Class type) {
private RoleMapperExpression parse(String json) throws IOException {
return new RoleMapperExpressionParser().parse("rules", XContentType.JSON.xContent().createParser(new NamedXContentRegistry(
- Collections.emptyList()), new DeprecationHandler() {
- @Override
- public void usedDeprecatedName(String usedName, String modernName) {
- }
-
- @Override
- public void usedDeprecatedField(String usedName, String replacedWith) {
- }
- }, json));
+ Collections.emptyList()), DeprecationHandler.IGNORE_DEPRECATIONS, json));
}
}
diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/security/user/privileges/ApplicationPrivilegeTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/security/user/privileges/ApplicationPrivilegeTests.java
index b720187673023..532ff612fedc1 100644
--- a/client/rest-high-level/src/test/java/org/elasticsearch/client/security/user/privileges/ApplicationPrivilegeTests.java
+++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/security/user/privileges/ApplicationPrivilegeTests.java
@@ -54,15 +54,7 @@ public void testFromXContentAndToXContent() throws IOException {
+ " }\n"
+ "}";
final ApplicationPrivilege privilege = ApplicationPrivilege.fromXContent(XContentType.JSON.xContent().createParser(
- new NamedXContentRegistry(Collections.emptyList()), new DeprecationHandler() {
- @Override
- public void usedDeprecatedName(String usedName, String modernName) {
- }
-
- @Override
- public void usedDeprecatedField(String usedName, String replacedWith) {
- }
- }, json));
+ new NamedXContentRegistry(Collections.emptyList()), DeprecationHandler.IGNORE_DEPRECATIONS, json));
final Map metadata = new HashMap<>();
metadata.put("description", "Read access to myapp");
final ApplicationPrivilege expectedPrivilege =
diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/transform/PreviewTransformResponseTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/transform/PreviewTransformResponseTests.java
index 8e1dbefa127a8..fcbc746de2263 100644
--- a/client/rest-high-level/src/test/java/org/elasticsearch/client/transform/PreviewTransformResponseTests.java
+++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/transform/PreviewTransformResponseTests.java
@@ -19,47 +19,75 @@
package org.elasticsearch.client.transform;
+import org.elasticsearch.action.admin.indices.alias.Alias;
+import org.elasticsearch.client.indices.CreateIndexRequest;
+import org.elasticsearch.common.bytes.BytesReference;
+import org.elasticsearch.common.settings.Settings;
+import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
+import org.elasticsearch.common.xcontent.XContentFactory;
+import org.elasticsearch.common.xcontent.XContentParser;
+import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.test.ESTestCase;
import java.io.IOException;
import java.util.ArrayList;
import java.util.HashMap;
+import java.util.HashSet;
import java.util.List;
import java.util.Map;
+import java.util.Set;
import static org.elasticsearch.test.AbstractXContentTestCase.xContentTester;
+import static org.hamcrest.Matchers.equalTo;
public class PreviewTransformResponseTests extends ESTestCase {
public void testFromXContent() throws IOException {
- xContentTester(this::createParser,
- this::createTestInstance,
- this::toXContent,
- PreviewTransformResponse::fromXContent)
- .supportsUnknownFields(true)
- .randomFieldsExcludeFilter(path -> path.isEmpty() == false)
- .test();
+ xContentTester(this::createParser, this::createTestInstance, this::toXContent, PreviewTransformResponse::fromXContent)
+ .supportsUnknownFields(true)
+ .randomFieldsExcludeFilter(path -> path.isEmpty() == false)
+ .test();
}
- private PreviewTransformResponse createTestInstance() {
- int numDocs = randomIntBetween(5, 10);
- List> docs = new ArrayList<>(numDocs);
- for (int i=0; i doc = new HashMap<>();
- for (int j=0; j mappings = new HashMap<>(numMappingEntries);
- for (int i = 0; i < numMappingEntries; i++) {
- mappings.put(randomAlphaOfLength(10), Map.of("type", randomAlphaOfLength(10)));
+ public void testCreateIndexRequest() throws IOException {
+ PreviewTransformResponse previewResponse = randomPreviewResponse();
+
+ CreateIndexRequest createIndexRequest = previewResponse.getCreateIndexRequest("dest_index");
+ assertEquals("dest_index", createIndexRequest.index());
+ assertThat(createIndexRequest.aliases(), equalTo(previewResponse.getAliases()));
+ assertThat(createIndexRequest.settings(), equalTo(previewResponse.getSettings()));
+
+ XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON);
+ builder.map(previewResponse.getMappings());
+
+ assertThat(BytesReference.bytes(builder), equalTo(createIndexRequest.mappings()));
+ }
+
+ public void testBWCPre77XContent() throws IOException {
+ PreviewTransformResponse response = randomPreviewResponse();
+
+ XContentBuilder builder = XContentFactory.jsonBuilder();
+
+ builder.startObject();
+ builder.startArray("preview");
+ for (Map doc : response.getDocs()) {
+ builder.map(doc);
}
+ builder.endArray();
+ builder.field("mappings", response.getGeneratedDestIndexSettings().getMappings());
+ builder.endObject();
+ XContentParser parser = createParser(builder);
+ PreviewTransformResponse oldResponse = PreviewTransformResponse.fromXContent(parser);
- return new PreviewTransformResponse(docs, mappings);
+ assertThat(response.getDocs(), equalTo(oldResponse.getDocs()));
+ assertThat(response.getMappings(), equalTo(oldResponse.getMappings()));
+ assertTrue(oldResponse.getAliases().isEmpty());
+ assertThat(oldResponse.getSettings(), equalTo(Settings.EMPTY));
+ }
+
+ private PreviewTransformResponse createTestInstance() {
+ return randomPreviewResponse();
}
private void toXContent(PreviewTransformResponse response, XContentBuilder builder) throws IOException {
@@ -69,7 +97,63 @@ private void toXContent(PreviewTransformResponse response, XContentBuilder build
builder.map(doc);
}
builder.endArray();
- builder.field("mappings", response.getMappings());
+ builder.startObject("generated_dest_index");
+ builder.field("mappings", response.getGeneratedDestIndexSettings().getMappings());
+
+ builder.startObject("settings");
+ response.getGeneratedDestIndexSettings().getSettings().toXContent(builder, ToXContent.EMPTY_PARAMS);
+ builder.endObject();
+
+ builder.startObject("aliases");
+ for (Alias alias : response.getGeneratedDestIndexSettings().getAliases()) {
+ alias.toXContent(builder, ToXContent.EMPTY_PARAMS);
+ }
+ builder.endObject();
builder.endObject();
+ builder.endObject();
+ }
+
+ private static PreviewTransformResponse randomPreviewResponse() {
+ int size = randomIntBetween(0, 10);
+ List> data = new ArrayList<>(size);
+ for (int i = 0; i < size; i++) {
+ data.add(Map.of(randomAlphaOfLength(10), Map.of("value1", randomIntBetween(1, 100))));
+ }
+
+ return new PreviewTransformResponse(data, randomGeneratedDestIndexSettings());
+ }
+
+ private static PreviewTransformResponse.GeneratedDestIndexSettings randomGeneratedDestIndexSettings() {
+ int size = randomIntBetween(0, 10);
+
+ Map mappings = null;
+ if (randomBoolean()) {
+ mappings = new HashMap<>(size);
+
+ for (int i = 0; i < size; i++) {
+ mappings.put(randomAlphaOfLength(10), Map.of("type", randomAlphaOfLength(10)));
+ }
+ }
+
+ Settings settings = null;
+ if (randomBoolean()) {
+ Settings.Builder settingsBuilder = Settings.builder();
+ size = randomIntBetween(0, 10);
+ for (int i = 0; i < size; i++) {
+ settingsBuilder.put(randomAlphaOfLength(10), randomBoolean());
+ }
+ settings = settingsBuilder.build();
+ }
+
+ Set aliases = null;
+ if (randomBoolean()) {
+ aliases = new HashSet<>();
+ size = randomIntBetween(0, 10);
+ for (int i = 0; i < size; i++) {
+ aliases.add(new Alias(randomAlphaOfLength(10)));
+ }
+ }
+
+ return new PreviewTransformResponse.GeneratedDestIndexSettings(mappings, settings, aliases);
}
}
diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/transform/hlrc/PreviewTransformResponseTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/transform/hlrc/PreviewTransformResponseTests.java
new file mode 100644
index 0000000000000..1aed99f320247
--- /dev/null
+++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/transform/hlrc/PreviewTransformResponseTests.java
@@ -0,0 +1,118 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.client.transform.hlrc;
+
+import org.elasticsearch.action.admin.indices.alias.Alias;
+import org.elasticsearch.client.AbstractResponseTestCase;
+import org.elasticsearch.client.transform.PreviewTransformResponse;
+import org.elasticsearch.common.settings.Settings;
+import org.elasticsearch.common.xcontent.XContentParser;
+import org.elasticsearch.common.xcontent.XContentType;
+import org.elasticsearch.xpack.core.transform.action.PreviewTransformAction;
+import org.elasticsearch.xpack.core.transform.action.PreviewTransformAction.Response;
+import org.elasticsearch.xpack.core.transform.transforms.TransformDestIndexSettings;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+
+import static org.hamcrest.Matchers.equalTo;
+
+public class PreviewTransformResponseTests extends AbstractResponseTestCase<
+ PreviewTransformAction.Response,
+ org.elasticsearch.client.transform.PreviewTransformResponse> {
+
+ public static Response randomPreviewResponse() {
+ int size = randomIntBetween(0, 10);
+ List> data = new ArrayList<>(size);
+ for (int i = 0; i < size; i++) {
+ data.add(Map.of(randomAlphaOfLength(10), Map.of("value1", randomIntBetween(1, 100))));
+ }
+
+ return new Response(data, randomGeneratedDestIndexSettings());
+ }
+
+ private static TransformDestIndexSettings randomGeneratedDestIndexSettings() {
+ int size = randomIntBetween(0, 10);
+
+ Map mappings = null;
+
+ if (randomBoolean()) {
+ mappings = new HashMap<>(size);
+
+ for (int i = 0; i < size; i++) {
+ mappings.put(randomAlphaOfLength(10), Map.of("type", randomAlphaOfLength(10)));
+ }
+ }
+
+ Settings settings = null;
+ if (randomBoolean()) {
+ Settings.Builder settingsBuilder = Settings.builder();
+ size = randomIntBetween(0, 10);
+ for (int i = 0; i < size; i++) {
+ settingsBuilder.put(randomAlphaOfLength(10), randomBoolean());
+ }
+ settings = settingsBuilder.build();
+ }
+
+ Set aliases = null;
+
+ if (randomBoolean()) {
+ aliases = new HashSet<>();
+ size = randomIntBetween(0, 10);
+ for (int i = 0; i < size; i++) {
+ aliases.add(new Alias(randomAlphaOfLength(10)));
+ }
+ }
+
+ return new TransformDestIndexSettings(mappings, settings, aliases);
+ }
+
+ @Override
+ protected Response createServerTestInstance(XContentType xContentType) {
+ return randomPreviewResponse();
+ }
+
+ @Override
+ protected PreviewTransformResponse doParseToClientInstance(XContentParser parser) throws IOException {
+ return org.elasticsearch.client.transform.PreviewTransformResponse.fromXContent(parser);
+ }
+
+ @Override
+ protected void assertInstances(Response serverTestInstance, PreviewTransformResponse clientInstance) {
+ assertThat(serverTestInstance.getDocs(), equalTo(clientInstance.getDocs()));
+ assertThat(
+ serverTestInstance.getGeneratedDestIndexSettings().getAliases(),
+ equalTo(clientInstance.getGeneratedDestIndexSettings().getAliases())
+ );
+ assertThat(
+ serverTestInstance.getGeneratedDestIndexSettings().getMappings(),
+ equalTo(clientInstance.getGeneratedDestIndexSettings().getMappings())
+ );
+ assertThat(
+ serverTestInstance.getGeneratedDestIndexSettings().getSettings(),
+ equalTo(clientInstance.getGeneratedDestIndexSettings().getSettings())
+ );
+ }
+}
diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/transform/transforms/TransformIndexerStatsTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/transform/transforms/TransformIndexerStatsTests.java
index 018cab89b0fc9..e06a7cddb93e9 100644
--- a/client/rest-high-level/src/test/java/org/elasticsearch/client/transform/transforms/TransformIndexerStatsTests.java
+++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/transform/transforms/TransformIndexerStatsTests.java
@@ -31,41 +31,103 @@ public class TransformIndexerStatsTests extends ESTestCase {
public void testFromXContent() throws IOException {
xContentTester(
- this::createParser,
- TransformIndexerStatsTests::randomStats,
- TransformIndexerStatsTests::toXContent,
- TransformIndexerStats::fromXContent)
- .supportsUnknownFields(true)
- .test();
+ this::createParser,
+ TransformIndexerStatsTests::randomStats,
+ TransformIndexerStatsTests::toXContent,
+ TransformIndexerStats::fromXContent
+ ).supportsUnknownFields(true).test();
}
public static TransformIndexerStats randomStats() {
- return new TransformIndexerStats(randomNonNegativeLong(), randomNonNegativeLong(), randomNonNegativeLong(),
- randomNonNegativeLong(), randomNonNegativeLong(), randomNonNegativeLong(),
- randomNonNegativeLong(), randomNonNegativeLong(), randomNonNegativeLong(), randomNonNegativeLong(),
- randomBoolean() ? null : randomDouble(),
- randomBoolean() ? null : randomDouble(),
- randomBoolean() ? null : randomDouble());
+ return new TransformIndexerStats(
+ randomNonNegativeLong(),
+ randomNonNegativeLong(),
+ randomNonNegativeLong(),
+ randomNonNegativeLong(),
+ randomNonNegativeLong(),
+ randomNonNegativeLong(),
+ randomNonNegativeLong(),
+ randomNonNegativeLong(),
+ randomNonNegativeLong(),
+ randomNonNegativeLong(),
+ randomNonNegativeLong(),
+ randomNonNegativeLong(),
+ randomDouble(),
+ randomDouble(),
+ randomDouble()
+ );
}
public static void toXContent(TransformIndexerStats stats, XContentBuilder builder) throws IOException {
builder.startObject();
- builder.field(IndexerJobStats.NUM_PAGES.getPreferredName(), stats.getNumPages());
- builder.field(IndexerJobStats.NUM_INPUT_DOCUMENTS.getPreferredName(), stats.getNumDocuments());
- builder.field(IndexerJobStats.NUM_OUTPUT_DOCUMENTS.getPreferredName(), stats.getOutputDocuments());
- builder.field(IndexerJobStats.NUM_INVOCATIONS.getPreferredName(), stats.getNumInvocations());
- builder.field(IndexerJobStats.INDEX_TIME_IN_MS.getPreferredName(), stats.getIndexTime());
- builder.field(IndexerJobStats.INDEX_TOTAL.getPreferredName(), stats.getIndexTotal());
- builder.field(IndexerJobStats.INDEX_FAILURES.getPreferredName(), stats.getIndexFailures());
- builder.field(IndexerJobStats.SEARCH_TIME_IN_MS.getPreferredName(), stats.getSearchTime());
- builder.field(IndexerJobStats.SEARCH_TOTAL.getPreferredName(), stats.getSearchTotal());
- builder.field(IndexerJobStats.SEARCH_FAILURES.getPreferredName(), stats.getSearchFailures());
- builder.field(TransformIndexerStats.EXPONENTIAL_AVG_CHECKPOINT_DURATION_MS.getPreferredName(),
- stats.getExpAvgCheckpointDurationMs());
- builder.field(TransformIndexerStats.EXPONENTIAL_AVG_DOCUMENTS_INDEXED.getPreferredName(),
- stats.getExpAvgDocumentsIndexed());
- builder.field(TransformIndexerStats.EXPONENTIAL_AVG_DOCUMENTS_PROCESSED.getPreferredName(),
- stats.getExpAvgDocumentsProcessed());
+ if (randomBoolean()) {
+ builder.field(IndexerJobStats.NUM_PAGES.getPreferredName(), stats.getNumPages());
+ builder.field(IndexerJobStats.NUM_INPUT_DOCUMENTS.getPreferredName(), stats.getNumDocuments());
+ builder.field(IndexerJobStats.NUM_OUTPUT_DOCUMENTS.getPreferredName(), stats.getOutputDocuments());
+ builder.field(IndexerJobStats.NUM_INVOCATIONS.getPreferredName(), stats.getNumInvocations());
+ builder.field(IndexerJobStats.INDEX_TIME_IN_MS.getPreferredName(), stats.getIndexTime());
+ builder.field(IndexerJobStats.INDEX_TOTAL.getPreferredName(), stats.getIndexTotal());
+ builder.field(IndexerJobStats.INDEX_FAILURES.getPreferredName(), stats.getIndexFailures());
+ builder.field(IndexerJobStats.SEARCH_TIME_IN_MS.getPreferredName(), stats.getSearchTime());
+ builder.field(IndexerJobStats.SEARCH_TOTAL.getPreferredName(), stats.getSearchTotal());
+ builder.field(IndexerJobStats.PROCESSING_TIME_IN_MS.getPreferredName(), stats.getProcessingTime());
+ builder.field(IndexerJobStats.PROCESSING_TOTAL.getPreferredName(), stats.getProcessingTotal());
+ builder.field(IndexerJobStats.SEARCH_FAILURES.getPreferredName(), stats.getSearchFailures());
+ builder.field(
+ TransformIndexerStats.EXPONENTIAL_AVG_CHECKPOINT_DURATION_MS.getPreferredName(),
+ stats.getExpAvgCheckpointDurationMs()
+ );
+ builder.field(TransformIndexerStats.EXPONENTIAL_AVG_DOCUMENTS_INDEXED.getPreferredName(), stats.getExpAvgDocumentsIndexed());
+ builder.field(
+ TransformIndexerStats.EXPONENTIAL_AVG_DOCUMENTS_PROCESSED.getPreferredName(),
+ stats.getExpAvgDocumentsProcessed()
+ );
+ } else {
+ // a toXContent version which leaves out field with value 0 (simulating the case that an older version misses a field)
+ xContentFieldIfNotZero(builder, IndexerJobStats.NUM_PAGES.getPreferredName(), stats.getNumPages());
+ xContentFieldIfNotZero(builder, IndexerJobStats.NUM_INPUT_DOCUMENTS.getPreferredName(), stats.getNumDocuments());
+ xContentFieldIfNotZero(builder, IndexerJobStats.NUM_OUTPUT_DOCUMENTS.getPreferredName(), stats.getOutputDocuments());
+ xContentFieldIfNotZero(builder, IndexerJobStats.NUM_INVOCATIONS.getPreferredName(), stats.getNumInvocations());
+ xContentFieldIfNotZero(builder, IndexerJobStats.INDEX_TIME_IN_MS.getPreferredName(), stats.getIndexTime());
+ xContentFieldIfNotZero(builder, IndexerJobStats.INDEX_TOTAL.getPreferredName(), stats.getIndexTotal());
+ xContentFieldIfNotZero(builder, IndexerJobStats.INDEX_FAILURES.getPreferredName(), stats.getIndexFailures());
+ xContentFieldIfNotZero(builder, IndexerJobStats.SEARCH_TIME_IN_MS.getPreferredName(), stats.getSearchTime());
+ xContentFieldIfNotZero(builder, IndexerJobStats.SEARCH_TOTAL.getPreferredName(), stats.getSearchTotal());
+ xContentFieldIfNotZero(builder, IndexerJobStats.PROCESSING_TIME_IN_MS.getPreferredName(), stats.getProcessingTime());
+ xContentFieldIfNotZero(builder, IndexerJobStats.PROCESSING_TOTAL.getPreferredName(), stats.getProcessingTotal());
+ xContentFieldIfNotZero(builder, IndexerJobStats.SEARCH_FAILURES.getPreferredName(), stats.getSearchFailures());
+ xContentFieldIfNotZero(
+ builder,
+ TransformIndexerStats.EXPONENTIAL_AVG_CHECKPOINT_DURATION_MS.getPreferredName(),
+ stats.getExpAvgCheckpointDurationMs()
+ );
+ xContentFieldIfNotZero(
+ builder,
+ TransformIndexerStats.EXPONENTIAL_AVG_DOCUMENTS_INDEXED.getPreferredName(),
+ stats.getExpAvgDocumentsIndexed()
+ );
+ xContentFieldIfNotZero(
+ builder,
+ TransformIndexerStats.EXPONENTIAL_AVG_DOCUMENTS_PROCESSED.getPreferredName(),
+ stats.getExpAvgDocumentsProcessed()
+ );
+ }
builder.endObject();
}
+
+ private static XContentBuilder xContentFieldIfNotZero(XContentBuilder builder, String name, long value) throws IOException {
+ if (value > 0) {
+ builder.field(name, value);
+ }
+
+ return builder;
+ }
+
+ private static XContentBuilder xContentFieldIfNotZero(XContentBuilder builder, String name, double value) throws IOException {
+ if (value > 0.0) {
+ builder.field(name, value);
+ }
+
+ return builder;
+ }
}
diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/transform/transforms/hlrc/TransformIndexerStatsTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/transform/transforms/hlrc/TransformIndexerStatsTests.java
index 50c98bcd8f0b6..eb74164be5364 100644
--- a/client/rest-high-level/src/test/java/org/elasticsearch/client/transform/transforms/hlrc/TransformIndexerStatsTests.java
+++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/transform/transforms/hlrc/TransformIndexerStatsTests.java
@@ -30,17 +30,31 @@
public class TransformIndexerStatsTests extends AbstractResponseTestCase<
org.elasticsearch.xpack.core.transform.transforms.TransformIndexerStats,
- TransformIndexerStats> {
+ TransformIndexerStats> {
+
+ public static org.elasticsearch.xpack.core.transform.transforms.TransformIndexerStats randomStats() {
+ return new org.elasticsearch.xpack.core.transform.transforms.TransformIndexerStats(
+ randomLongBetween(0L, 10000L),
+ randomLongBetween(0L, 10000L),
+ randomLongBetween(0L, 10000L),
+ randomLongBetween(0L, 10000L),
+ randomLongBetween(0L, 10000L),
+ randomLongBetween(0L, 10000L),
+ randomLongBetween(0L, 10000L),
+ randomLongBetween(0L, 10000L),
+ randomLongBetween(0L, 10000L),
+ randomLongBetween(0L, 10000L),
+ randomLongBetween(0L, 10000L),
+ randomLongBetween(0L, 10000L),
+ randomDouble(),
+ randomDouble(),
+ randomDouble()
+ );
+ }
@Override
protected org.elasticsearch.xpack.core.transform.transforms.TransformIndexerStats createServerTestInstance(XContentType xContentType) {
- return new org.elasticsearch.xpack.core.transform.transforms.TransformIndexerStats(randomLongBetween(10L, 10000L),
- randomLongBetween(0L, 10000L), randomLongBetween(0L, 10000L), randomLongBetween(0L, 10000L), randomLongBetween(0L, 10000L),
- randomLongBetween(0L, 10000L), randomLongBetween(0L, 10000L), randomLongBetween(0L, 10000L), randomLongBetween(0L, 10000L),
- randomLongBetween(0L, 10000L),
- randomBoolean() ? null : randomDouble(),
- randomBoolean() ? null : randomDouble(),
- randomBoolean() ? null : randomDouble());
+ return randomStats();
}
@Override
@@ -49,8 +63,10 @@ protected TransformIndexerStats doParseToClientInstance(XContentParser parser) t
}
@Override
- protected void assertInstances(org.elasticsearch.xpack.core.transform.transforms.TransformIndexerStats serverTestInstance,
- TransformIndexerStats clientInstance) {
+ protected void assertInstances(
+ org.elasticsearch.xpack.core.transform.transforms.TransformIndexerStats serverTestInstance,
+ TransformIndexerStats clientInstance
+ ) {
assertTransformIndexerStats(serverTestInstance, clientInstance);
}
}
diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/transform/transforms/hlrc/TransformStatsTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/transform/transforms/hlrc/TransformStatsTests.java
index 63f6bffdf6321..c732c425fe427 100644
--- a/client/rest-high-level/src/test/java/org/elasticsearch/client/transform/transforms/hlrc/TransformStatsTests.java
+++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/transform/transforms/hlrc/TransformStatsTests.java
@@ -36,40 +36,36 @@
import static org.hamcrest.Matchers.equalTo;
-public class TransformStatsTests extends AbstractResponseTestCase {
public static org.elasticsearch.xpack.core.transform.transforms.NodeAttributes randomNodeAttributes() {
int numberOfAttributes = randomIntBetween(1, 10);
Map attributes = new HashMap<>(numberOfAttributes);
- for(int i = 0; i < numberOfAttributes; i++) {
+ for (int i = 0; i < numberOfAttributes; i++) {
String val = randomAlphaOfLength(10);
- attributes.put("key-"+i, val);
+ attributes.put("key-" + i, val);
}
- return new org.elasticsearch.xpack.core.transform.transforms.NodeAttributes(randomAlphaOfLength(10),
+ return new org.elasticsearch.xpack.core.transform.transforms.NodeAttributes(
randomAlphaOfLength(10),
randomAlphaOfLength(10),
randomAlphaOfLength(10),
- attributes);
+ randomAlphaOfLength(10),
+ attributes
+ );
}
- public static org.elasticsearch.xpack.core.transform.transforms.TransformIndexerStats randomStats() {
- return new org.elasticsearch.xpack.core.transform.transforms.TransformIndexerStats(randomLongBetween(10L, 10000L),
- randomLongBetween(0L, 10000L), randomLongBetween(0L, 10000L), randomLongBetween(0L, 10000L), randomLongBetween(0L, 10000L),
- randomLongBetween(0L, 10000L), randomLongBetween(0L, 10000L), randomLongBetween(0L, 10000L), randomLongBetween(0L, 10000L),
- randomLongBetween(0L, 10000L),
- randomBoolean() ? null : randomDouble(),
- randomBoolean() ? null : randomDouble(),
- randomBoolean() ? null : randomDouble());
- }
@Override
protected org.elasticsearch.xpack.core.transform.transforms.TransformStats createServerTestInstance(XContentType xContentType) {
- return new org.elasticsearch.xpack.core.transform.transforms.TransformStats(randomAlphaOfLength(10),
+ return new org.elasticsearch.xpack.core.transform.transforms.TransformStats(
+ randomAlphaOfLength(10),
randomFrom(org.elasticsearch.xpack.core.transform.transforms.TransformStats.State.values()),
randomBoolean() ? null : randomAlphaOfLength(100),
randomBoolean() ? null : randomNodeAttributes(),
- randomStats(),
- TransformCheckpointingInfoTests.randomTransformCheckpointingInfo());
+ TransformIndexerStatsTests.randomStats(),
+ TransformCheckpointingInfoTests.randomTransformCheckpointingInfo()
+ );
}
@Override
@@ -78,8 +74,10 @@ protected TransformStats doParseToClientInstance(XContentParser parser) throws I
}
@Override
- protected void assertInstances(org.elasticsearch.xpack.core.transform.transforms.TransformStats serverTestInstance,
- TransformStats clientInstance) {
+ protected void assertInstances(
+ org.elasticsearch.xpack.core.transform.transforms.TransformStats serverTestInstance,
+ TransformStats clientInstance
+ ) {
assertThat(serverTestInstance.getId(), equalTo(clientInstance.getId()));
assertThat(serverTestInstance.getState().value(), equalTo(clientInstance.getState().value()));
assertTransformIndexerStats(serverTestInstance.getIndexerStats(), clientInstance.getIndexerStats());
@@ -88,8 +86,10 @@ protected void assertInstances(org.elasticsearch.xpack.core.transform.transforms
assertThat(serverTestInstance.getReason(), equalTo(clientInstance.getReason()));
}
- private void assertNodeAttributes(org.elasticsearch.xpack.core.transform.transforms.NodeAttributes serverTestInstance,
- NodeAttributes clientInstance) {
+ private void assertNodeAttributes(
+ org.elasticsearch.xpack.core.transform.transforms.NodeAttributes serverTestInstance,
+ NodeAttributes clientInstance
+ ) {
if (serverTestInstance == null || clientInstance == null) {
assertNull(serverTestInstance);
assertNull(clientInstance);
@@ -102,8 +102,10 @@ private void assertNodeAttributes(org.elasticsearch.xpack.core.transform.transfo
assertThat(serverTestInstance.getTransportAddress(), equalTo(clientInstance.getTransportAddress()));
}
- public static void assertTransformProgress(org.elasticsearch.xpack.core.transform.transforms.TransformProgress serverTestInstance,
- TransformProgress clientInstance) {
+ public static void assertTransformProgress(
+ org.elasticsearch.xpack.core.transform.transforms.TransformProgress serverTestInstance,
+ TransformProgress clientInstance
+ ) {
if (serverTestInstance == null || clientInstance == null) {
assertNull(serverTestInstance);
assertNull(clientInstance);
@@ -115,16 +117,18 @@ public static void assertTransformProgress(org.elasticsearch.xpack.core.transfor
assertThat(serverTestInstance.getDocumentsIndexed(), equalTo(clientInstance.getDocumentsIndexed()));
}
- public static void assertPosition(org.elasticsearch.xpack.core.transform.transforms.TransformIndexerPosition serverTestInstance,
- TransformIndexerPosition clientInstance) {
+ public static void assertPosition(
+ org.elasticsearch.xpack.core.transform.transforms.TransformIndexerPosition serverTestInstance,
+ TransformIndexerPosition clientInstance
+ ) {
assertThat(serverTestInstance.getIndexerPosition(), equalTo(clientInstance.getIndexerPosition()));
assertThat(serverTestInstance.getBucketsPosition(), equalTo(clientInstance.getBucketsPosition()));
}
-
public static void assertTransformCheckpointStats(
- org.elasticsearch.xpack.core.transform.transforms.TransformCheckpointStats serverTestInstance,
- TransformCheckpointStats clientInstance) {
+ org.elasticsearch.xpack.core.transform.transforms.TransformCheckpointStats serverTestInstance,
+ TransformCheckpointStats clientInstance
+ ) {
assertTransformProgress(serverTestInstance.getCheckpointProgress(), clientInstance.getCheckpointProgress());
assertThat(serverTestInstance.getCheckpoint(), equalTo(clientInstance.getCheckpoint()));
assertPosition(serverTestInstance.getPosition(), clientInstance.getPosition());
@@ -133,8 +137,9 @@ public static void assertTransformCheckpointStats(
}
public static void assertTransformCheckpointInfo(
- org.elasticsearch.xpack.core.transform.transforms.TransformCheckpointingInfo serverTestInstance,
- TransformCheckpointingInfo clientInstance) {
+ org.elasticsearch.xpack.core.transform.transforms.TransformCheckpointingInfo serverTestInstance,
+ TransformCheckpointingInfo clientInstance
+ ) {
assertTransformCheckpointStats(serverTestInstance.getNext(), clientInstance.getNext());
assertTransformCheckpointStats(serverTestInstance.getLast(), clientInstance.getLast());
assertThat(serverTestInstance.getChangesLastDetectedAt(), equalTo(clientInstance.getChangesLastDetectedAt()));
@@ -142,8 +147,9 @@ public static void assertTransformCheckpointInfo(
}
public static void assertTransformIndexerStats(
- org.elasticsearch.xpack.core.transform.transforms.TransformIndexerStats serverTestInstance,
- TransformIndexerStats clientInstance) {
+ org.elasticsearch.xpack.core.transform.transforms.TransformIndexerStats serverTestInstance,
+ TransformIndexerStats clientInstance
+ ) {
assertThat(serverTestInstance.getExpAvgCheckpointDurationMs(), equalTo(clientInstance.getExpAvgCheckpointDurationMs()));
assertThat(serverTestInstance.getExpAvgDocumentsProcessed(), equalTo(clientInstance.getExpAvgDocumentsProcessed()));
assertThat(serverTestInstance.getExpAvgDocumentsIndexed(), equalTo(clientInstance.getExpAvgDocumentsIndexed()));
diff --git a/client/sniffer/licenses/jackson-core-2.10.3.jar.sha1 b/client/sniffer/licenses/jackson-core-2.10.3.jar.sha1
new file mode 100644
index 0000000000000..f23937b0d82a4
--- /dev/null
+++ b/client/sniffer/licenses/jackson-core-2.10.3.jar.sha1
@@ -0,0 +1 @@
+f7ee7b55c7d292ac72fbaa7648c089f069c938d2
\ No newline at end of file
diff --git a/client/sniffer/licenses/jackson-core-2.8.11.jar.sha1 b/client/sniffer/licenses/jackson-core-2.8.11.jar.sha1
deleted file mode 100644
index e7ad1e74ed6b8..0000000000000
--- a/client/sniffer/licenses/jackson-core-2.8.11.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-876ead1db19f0c9e79c9789273a3ef8c6fd6c29b
\ No newline at end of file
diff --git a/distribution/archives/build.gradle b/distribution/archives/build.gradle
index 029eb0733023f..969e92fda28ce 100644
--- a/distribution/archives/build.gradle
+++ b/distribution/archives/build.gradle
@@ -50,7 +50,7 @@ task createJvmOptionsDir(type: EmptyDirTask) {
dirMode = 0750
}
-CopySpec archiveFiles(CopySpec modulesFiles, String distributionType, String platform, boolean oss, boolean jdk) {
+CopySpec archiveFiles(CopySpec modulesFiles, String distributionType, String platform, String architecture, boolean oss, boolean jdk) {
return copySpec {
into("elasticsearch-${version}") {
into('lib') {
@@ -70,7 +70,7 @@ CopySpec archiveFiles(CopySpec modulesFiles, String distributionType, String pla
}
if (jdk) {
into("darwin".equals(platform) ? 'jdk.app' : 'jdk') {
- with jdkFiles(project, platform)
+ with jdkFiles(project, platform, architecture)
}
}
into('') {
@@ -116,31 +116,31 @@ Closure commonZipConfig = {
task buildIntegTestZip(type: Zip) {
configure(commonZipConfig)
- with archiveFiles(transportModulesFiles, 'zip', null, true, false)
+ with archiveFiles(transportModulesFiles, 'zip', null, 'x64', true, false)
}
task buildWindowsZip(type: Zip) {
configure(commonZipConfig)
archiveClassifier = 'windows-x86_64'
- with archiveFiles(modulesFiles(false, 'windows'), 'zip', 'windows', false, true)
+ with archiveFiles(modulesFiles(false, 'windows'), 'zip', 'windows', 'x64', false, true)
}
task buildOssWindowsZip(type: Zip) {
configure(commonZipConfig)
archiveClassifier = 'windows-x86_64'
- with archiveFiles(modulesFiles(true, 'windows'), 'zip', 'windows', true, true)
+ with archiveFiles(modulesFiles(true, 'windows'), 'zip', 'windows', 'x64', true, true)
}
task buildNoJdkWindowsZip(type: Zip) {
configure(commonZipConfig)
archiveClassifier = 'no-jdk-windows-x86_64'
- with archiveFiles(modulesFiles(false, 'windows'), 'zip', 'windows', false, false)
+ with archiveFiles(modulesFiles(false, 'windows'), 'zip', 'windows', 'x64', false, false)
}
task buildOssNoJdkWindowsZip(type: Zip) {
configure(commonZipConfig)
archiveClassifier = 'no-jdk-windows-x86_64'
- with archiveFiles(modulesFiles(true, 'windows'), 'zip', 'windows', true, false)
+ with archiveFiles(modulesFiles(true, 'windows'), 'zip', 'windows', 'x64', true, false)
}
Closure commonTarConfig = {
@@ -153,49 +153,61 @@ Closure commonTarConfig = {
task buildDarwinTar(type: SymbolicLinkPreservingTar) {
configure(commonTarConfig)
archiveClassifier = 'darwin-x86_64'
- with archiveFiles(modulesFiles(false, 'darwin'), 'tar', 'darwin', false, true)
+ with archiveFiles(modulesFiles(false, 'darwin'), 'tar', 'darwin', 'x64', false, true)
}
task buildOssDarwinTar(type: SymbolicLinkPreservingTar) {
configure(commonTarConfig)
archiveClassifier = 'darwin-x86_64'
- with archiveFiles(modulesFiles(true, 'darwin'), 'tar', 'darwin', true, true)
+ with archiveFiles(modulesFiles(true, 'darwin'), 'tar', 'darwin', 'x64', true, true)
}
task buildNoJdkDarwinTar(type: SymbolicLinkPreservingTar) {
configure(commonTarConfig)
archiveClassifier = 'no-jdk-darwin-x86_64'
- with archiveFiles(modulesFiles(false, 'darwin'), 'tar', 'darwin', false, false)
+ with archiveFiles(modulesFiles(false, 'darwin'), 'tar', 'darwin', 'x64', false, false)
}
task buildOssNoJdkDarwinTar(type: SymbolicLinkPreservingTar) {
configure(commonTarConfig)
archiveClassifier = 'no-jdk-darwin-x86_64'
- with archiveFiles(modulesFiles(true, 'darwin'), 'tar', 'darwin', true, false)
+ with archiveFiles(modulesFiles(true, 'darwin'), 'tar', 'darwin', 'x64', true, false)
+}
+
+task buildLinuxAarch64Tar(type: SymbolicLinkPreservingTar) {
+ configure(commonTarConfig)
+ archiveClassifier = 'linux-aarch64'
+ with archiveFiles(modulesFiles(false, 'linux'), 'tar', 'linux', 'aarch64', false, true)
}
task buildLinuxTar(type: SymbolicLinkPreservingTar) {
configure(commonTarConfig)
archiveClassifier = 'linux-x86_64'
- with archiveFiles(modulesFiles(false, 'linux'), 'tar', 'linux', false, true)
+ with archiveFiles(modulesFiles(false, 'linux'), 'tar', 'linux', 'x64', false, true)
+}
+
+task buildOssLinuxAarch64Tar(type: SymbolicLinkPreservingTar) {
+ configure(commonTarConfig)
+ archiveClassifier = 'linux-aarch64'
+ with archiveFiles(modulesFiles(true, 'linux'), 'tar', 'linux', 'aarch64', true, true)
}
task buildOssLinuxTar(type: SymbolicLinkPreservingTar) {
configure(commonTarConfig)
archiveClassifier = 'linux-x86_64'
- with archiveFiles(modulesFiles(true, 'linux'), 'tar', 'linux', true, true)
+ with archiveFiles(modulesFiles(true, 'linux'), 'tar', 'linux', 'x64', true, true)
}
task buildNoJdkLinuxTar(type: SymbolicLinkPreservingTar) {
configure(commonTarConfig)
archiveClassifier = 'no-jdk-linux-x86_64'
- with archiveFiles(modulesFiles(false, 'linux'), 'tar', 'linux', false, false)
+ with archiveFiles(modulesFiles(false, 'linux'), 'tar', 'linux', 'x64', false, false)
}
task buildOssNoJdkLinuxTar(type: SymbolicLinkPreservingTar) {
configure(commonTarConfig)
archiveClassifier = 'no-jdk-linux-x86_64'
- with archiveFiles(modulesFiles(true, 'linux'), 'tar', 'linux', true, false)
+ with archiveFiles(modulesFiles(true, 'linux'), 'tar', 'linux', 'x64', true, false)
}
Closure tarExists = { it -> new File('/bin/tar').exists() || new File('/usr/bin/tar').exists() || new File('/usr/local/bin/tar').exists() }
diff --git a/distribution/archives/linux-aarch64-tar/build.gradle b/distribution/archives/linux-aarch64-tar/build.gradle
new file mode 100644
index 0000000000000..4a6dde5fc0c92
--- /dev/null
+++ b/distribution/archives/linux-aarch64-tar/build.gradle
@@ -0,0 +1,2 @@
+// This file is intentionally blank. All configuration of the
+// distribution is done in the parent project.
diff --git a/distribution/archives/oss-linux-aarch64-tar/build.gradle b/distribution/archives/oss-linux-aarch64-tar/build.gradle
new file mode 100644
index 0000000000000..4a6dde5fc0c92
--- /dev/null
+++ b/distribution/archives/oss-linux-aarch64-tar/build.gradle
@@ -0,0 +1,2 @@
+// This file is intentionally blank. All configuration of the
+// distribution is done in the parent project.
diff --git a/distribution/build.gradle b/distribution/build.gradle
index 584b80a8c67de..5fc350f5441e1 100644
--- a/distribution/build.gradle
+++ b/distribution/build.gradle
@@ -391,16 +391,17 @@ configure(subprojects.findAll { ['archives', 'packages'].contains(it.name) }) {
}
}
- jdkFiles = { Project project, String platform ->
+ jdkFiles = { Project project, String platform, String architecture ->
project.jdks {
- "bundled_${platform}" {
+ "bundled_${platform}_${architecture}" {
it.platform = platform
it.version = VersionProperties.getBundledJdk(platform)
it.vendor = VersionProperties.bundledJdkVendor
+ it.architecture = architecture
}
}
return copySpec {
- from project.jdks."bundled_${platform}"
+ from project.jdks."bundled_${platform}_${architecture}"
exclude "demo/**"
eachFile { FileCopyDetails details ->
if (details.relativePath.segments[-2] == 'bin' || details.relativePath.segments[-1] == 'jspawnhelper') {
@@ -607,10 +608,13 @@ subprojects {
['archives:windows-zip', 'archives:oss-windows-zip',
'archives:darwin-tar', 'archives:oss-darwin-tar',
+ 'archives:linux-aarch64-tar', 'archives:oss-linux-aarch64-tar',
'archives:linux-tar', 'archives:oss-linux-tar',
'archives:integ-test-zip',
'packages:rpm', 'packages:deb',
+ 'packages:aarch64-rpm', 'packages:aarch64-deb',
'packages:oss-rpm', 'packages:oss-deb',
+ 'packages:aarch64-oss-rpm', 'packages:aarch64-oss-deb'
].forEach { subName ->
Project subproject = project("${project.path}:${subName}")
Configuration configuration = configurations.create(subproject.name)
diff --git a/distribution/docker/aarch64-docker-build-context/build.gradle b/distribution/docker/aarch64-docker-build-context/build.gradle
new file mode 100644
index 0000000000000..19b0bc3646c60
--- /dev/null
+++ b/distribution/docker/aarch64-docker-build-context/build.gradle
@@ -0,0 +1,11 @@
+apply plugin: 'base'
+
+task buildDockerBuildContext(type: Tar) {
+ extension = 'tar.gz'
+ compression = Compression.GZIP
+ archiveClassifier = "docker-build-context"
+ archiveBaseName = "elasticsearch-aarch64"
+ with dockerBuildContext("aarch64", false, false)
+}
+
+assemble.dependsOn buildDockerBuildContext
diff --git a/distribution/docker/aarch64-docker-export/build.gradle b/distribution/docker/aarch64-docker-export/build.gradle
new file mode 100644
index 0000000000000..537b5a093683e
--- /dev/null
+++ b/distribution/docker/aarch64-docker-export/build.gradle
@@ -0,0 +1,2 @@
+// This file is intentionally blank. All configuration of the
+// export is done in the parent project.
diff --git a/distribution/docker/aarch64-oss-docker-build-context/build.gradle b/distribution/docker/aarch64-oss-docker-build-context/build.gradle
new file mode 100644
index 0000000000000..bea7d156803fd
--- /dev/null
+++ b/distribution/docker/aarch64-oss-docker-build-context/build.gradle
@@ -0,0 +1,11 @@
+apply plugin: 'base'
+
+task buildOssDockerBuildContext(type: Tar) {
+ extension = 'tar.gz'
+ compression = Compression.GZIP
+ archiveClassifier = "docker-build-context"
+ archiveBaseName = "elasticsearch-aarch64-oss"
+ with dockerBuildContext("aarch64", true, false)
+}
+
+assemble.dependsOn buildOssDockerBuildContext
diff --git a/distribution/docker/aarch64-oss-docker-export/build.gradle b/distribution/docker/aarch64-oss-docker-export/build.gradle
new file mode 100644
index 0000000000000..537b5a093683e
--- /dev/null
+++ b/distribution/docker/aarch64-oss-docker-export/build.gradle
@@ -0,0 +1,2 @@
+// This file is intentionally blank. All configuration of the
+// export is done in the parent project.
diff --git a/distribution/docker/build.gradle b/distribution/docker/build.gradle
index c33f74501da95..f67e520ce76a4 100644
--- a/distribution/docker/build.gradle
+++ b/distribution/docker/build.gradle
@@ -1,3 +1,4 @@
+import org.elasticsearch.gradle.Architecture
import org.elasticsearch.gradle.ElasticsearchDistribution.Flavor
import org.elasticsearch.gradle.LoggedExec
import org.elasticsearch.gradle.VersionProperties
@@ -13,38 +14,51 @@ testFixtures.useFixture()
configurations {
dockerPlugins
+ aarch64DockerSource
dockerSource
+ aarch64OssDockerSource
ossDockerSource
}
dependencies {
+ aarch64DockerSource project(path: ":distribution:archives:linux-aarch64-tar")
dockerSource project(path: ":distribution:archives:linux-tar")
+ aarch64OssDockerSource project(path: ":distribution:archives:oss-linux-aarch64-tar")
ossDockerSource project(path: ":distribution:archives:oss-linux-tar")
}
-ext.expansions = { oss, local ->
- final String classifier = 'linux-x86_64'
+ext.expansions = { architecture, oss, local ->
+ switch (architecture) {
+ case "aarch64":
+ case "x64":
+ break;
+ default:
+ throw new IllegalArgumentException("unrecongized architecture [" + architecture + "], must be one of (aarch64|x64)")
+ }
+ final String classifier = "aarch64".equals(architecture) ? "linux-aarch64" : "linux-x86_64"
final String elasticsearch = oss ? "elasticsearch-oss-${VersionProperties.elasticsearch}-${classifier}.tar.gz" : "elasticsearch-${VersionProperties.elasticsearch}-${classifier}.tar.gz"
return [
+ 'base_image' : "aarch64".equals(architecture) ? "arm64v8/centos:7" : "centos:7",
'build_date' : BuildParams.buildDate,
'elasticsearch' : elasticsearch,
'git_revision' : BuildParams.gitRevision,
'license' : oss ? 'Apache-2.0' : 'Elastic-License',
'source_elasticsearch': local ? "COPY $elasticsearch /opt/" : "RUN cd /opt && curl --retry 8 -s -L -O https://artifacts.elastic.co/downloads/elasticsearch/${elasticsearch} && cd -",
+ 'tini_suffix' : "aarch64".equals(architecture) ? "-arm64" : "",
'version' : VersionProperties.elasticsearch
]
}
-private static String buildPath(final boolean oss) {
- return "build/${oss ? 'oss-' : ''}docker"
+private static String buildPath(final String architecture, final boolean oss) {
+ return "build/${"aarch64".equals(architecture) ? 'aarch64-' : ''}${oss ? 'oss-' : ''}docker"
}
-private static String taskName(final String prefix, final boolean oss, final String suffix) {
- return "${prefix}${oss ? 'Oss' : ''}${suffix}"
+private static String taskName(final String prefix, final String architecture, final boolean oss, final String suffix) {
+ return "${prefix}${"aarch64".equals(architecture) ? 'Aarch64' : ''}${oss ? 'Oss' : ''}${suffix}"
}
project.ext {
- dockerBuildContext = { boolean oss, boolean local ->
+ dockerBuildContext = { String architecture, boolean oss, boolean local ->
copySpec {
into('bin') {
from project.projectDir.toPath().resolve("src/docker/bin")
@@ -62,25 +76,33 @@ project.ext {
}
from(project.projectDir.toPath().resolve("src/docker/Dockerfile")) {
- expand(expansions(oss, local))
+ expand(expansions(architecture, oss, local))
}
}
}
}
-void addCopyDockerContextTask(final boolean oss) {
- task(taskName("copy", oss, "DockerContext"), type: Sync) {
- expansions(oss, true).findAll { it.key != 'build_date' }.each { k, v ->
+void addCopyDockerContextTask(final String architecture, final boolean oss) {
+ task(taskName("copy", architecture, oss, "DockerContext"), type: Sync) {
+ expansions(architecture, oss, true).findAll { it.key != 'build_date' }.each { k, v ->
inputs.property(k, { v.toString() })
}
- into buildPath(oss)
+ into buildPath(architecture, oss)
- with dockerBuildContext(oss, true)
+ with dockerBuildContext(architecture, oss, true)
- if (oss) {
- from configurations.ossDockerSource
+ if ("aarch64".equals(architecture)) {
+ if (oss) {
+ from configurations.aarch64OssDockerSource
+ } else {
+ from configurations.aarch64DockerSource
+ }
} else {
- from configurations.dockerSource
+ if (oss) {
+ from configurations.ossDockerSource
+ } else {
+ from configurations.dockerSource
+ }
}
from configurations.dockerPlugins
@@ -149,9 +171,9 @@ task integTest(type: Test) {
check.dependsOn integTest
-void addBuildDockerImage(final boolean oss) {
- final Task buildDockerImageTask = task(taskName("build", oss, "DockerImage"), type: DockerBuildTask) {
- TaskProvider copyContextTask = tasks.named(taskName("copy", oss, "DockerContext"))
+void addBuildDockerImage(final String architecture, final boolean oss) {
+ final Task buildDockerImageTask = task(taskName("build", architecture, oss, "DockerImage"), type: DockerBuildTask) {
+ TaskProvider copyContextTask = tasks.named(taskName("copy", architecture, oss, "DockerContext"))
dependsOn(copyContextTask)
dockerContext.fileProvider(copyContextTask.map { it.destinationDir })
@@ -169,12 +191,15 @@ void addBuildDockerImage(final boolean oss) {
]
}
}
+ buildDockerImageTask.onlyIf { Architecture.current().name().toLowerCase().equals(architecture) }
assemble.dependsOn(buildDockerImageTask)
}
-for (final boolean oss : [false, true]) {
- addCopyDockerContextTask(oss)
- addBuildDockerImage(oss)
+for (final String architecture : ["aarch64", "x64"]) {
+ for (final boolean oss : [false, true]) {
+ addCopyDockerContextTask(architecture, oss)
+ addBuildDockerImage(architecture, oss)
+ }
}
// We build the images used in compose locally, but the pull command insists on using a repository
@@ -192,11 +217,12 @@ subprojects { Project subProject ->
if (subProject.name.contains('docker-export')) {
apply plugin: 'distribution'
+ final String architecture = subProject.name.contains('aarch64-') ? 'aarch64' : 'x64'
final boolean oss = subProject.name.contains('oss-')
- def exportTaskName = taskName("export", oss, "DockerImage")
- def buildTaskName = taskName("build", oss, "DockerImage")
- def tarFile = "${parent.projectDir}/build/elasticsearch${oss ? '-oss' : ''}_test.${VersionProperties.elasticsearch}.docker.tar"
+ def exportTaskName = taskName("export", architecture, oss, "DockerImage")
+ def buildTaskName = taskName("build", architecture, oss, "DockerImage")
+ def tarFile = "${parent.projectDir}/build/elasticsearch${"aarch64".equals(architecture) ? '-aarch64' : ''}${oss ? '-oss' : ''}_test.${VersionProperties.elasticsearch}.docker.tar"
final Task exportDockerImageTask = task(exportTaskName, type: LoggedExec) {
inputs.file("${parent.projectDir}/build/markers/${buildTaskName}.marker")
@@ -212,7 +238,7 @@ subprojects { Project subProject ->
artifacts.add('default', file(tarFile)) {
type 'tar'
- name "elasticsearch${oss ? '-oss' : ''}"
+ name "elasticsearch${"aarch64".equals(architecture) ? '-aarch64' : ''}${oss ? '-oss' : ''}"
builtBy exportTaskName
}
diff --git a/distribution/docker/docker-build-context/build.gradle b/distribution/docker/docker-build-context/build.gradle
index 50be407e566bc..2dd28329d7ba5 100644
--- a/distribution/docker/docker-build-context/build.gradle
+++ b/distribution/docker/docker-build-context/build.gradle
@@ -5,7 +5,7 @@ task buildDockerBuildContext(type: Tar) {
compression = Compression.GZIP
archiveClassifier = "docker-build-context"
archiveBaseName = "elasticsearch"
- with dockerBuildContext(false, false)
+ with dockerBuildContext("x64", false, false)
}
assemble.dependsOn buildDockerBuildContext
diff --git a/distribution/docker/oss-docker-build-context/build.gradle b/distribution/docker/oss-docker-build-context/build.gradle
index b69f7dc620f53..0a29c2a2b7274 100644
--- a/distribution/docker/oss-docker-build-context/build.gradle
+++ b/distribution/docker/oss-docker-build-context/build.gradle
@@ -5,7 +5,7 @@ task buildOssDockerBuildContext(type: Tar) {
compression = Compression.GZIP
archiveClassifier = "docker-build-context"
archiveBaseName = "elasticsearch-oss"
- with dockerBuildContext(true, false)
+ with dockerBuildContext("x64", true, false)
}
assemble.dependsOn buildOssDockerBuildContext
diff --git a/distribution/docker/src/docker/Dockerfile b/distribution/docker/src/docker/Dockerfile
index b12a3c7518265..7eb62b7d8ad1b 100644
--- a/distribution/docker/src/docker/Dockerfile
+++ b/distribution/docker/src/docker/Dockerfile
@@ -11,7 +11,7 @@
# Set gid=0 and make group perms==owner perms
################################################################################
-FROM centos:7 AS builder
+FROM ${base_image} AS builder
RUN for iter in {1..10}; do yum update --setopt=tsflags=nodocs -y && \
yum install --setopt=tsflags=nodocs -y gzip shadow-utils tar && \
@@ -42,8 +42,8 @@ RUN chmod 0660 config/elasticsearch.yml config/log4j2.properties
# gpg, but the keyservers are slow to return the key and this can fail the
# build. Instead, we check the binary against a checksum that we have
# computed.
-ADD https://github.com/krallin/tini/releases/download/v0.18.0/tini /tini
-COPY config/tini.sha512 /tini.sha512
+ADD https://github.com/krallin/tini/releases/download/v0.18.0/tini${tini_suffix} /tini
+COPY config/tini${tini_suffix}.sha512 /tini.sha512
RUN sha512sum -c /tini.sha512 && chmod +x /tini
################################################################################
@@ -52,7 +52,7 @@ RUN sha512sum -c /tini.sha512 && chmod +x /tini
# Add entrypoint
################################################################################
-FROM centos:7
+FROM ${base_image}
ENV ELASTIC_CONTAINER true
diff --git a/distribution/docker/src/docker/config/tini-arm64.sha512 b/distribution/docker/src/docker/config/tini-arm64.sha512
new file mode 100644
index 0000000000000..274eaa28cff08
--- /dev/null
+++ b/distribution/docker/src/docker/config/tini-arm64.sha512
@@ -0,0 +1 @@
+6ae5147e522e484b9d59b0caa04e6dadf0efe332b272039c7cf5951e39f5028e9852c3c4bcdd46b98977329108d555ee7ea55f9eca99765d05922ec7aff837d8 /tini
diff --git a/distribution/packages/aarch64-deb/build.gradle b/distribution/packages/aarch64-deb/build.gradle
new file mode 100644
index 0000000000000..4a6dde5fc0c92
--- /dev/null
+++ b/distribution/packages/aarch64-deb/build.gradle
@@ -0,0 +1,2 @@
+// This file is intentionally blank. All configuration of the
+// distribution is done in the parent project.
diff --git a/distribution/packages/aarch64-oss-deb/build.gradle b/distribution/packages/aarch64-oss-deb/build.gradle
new file mode 100644
index 0000000000000..4a6dde5fc0c92
--- /dev/null
+++ b/distribution/packages/aarch64-oss-deb/build.gradle
@@ -0,0 +1,2 @@
+// This file is intentionally blank. All configuration of the
+// distribution is done in the parent project.
diff --git a/distribution/packages/aarch64-oss-rpm/build.gradle b/distribution/packages/aarch64-oss-rpm/build.gradle
new file mode 100644
index 0000000000000..4a6dde5fc0c92
--- /dev/null
+++ b/distribution/packages/aarch64-oss-rpm/build.gradle
@@ -0,0 +1,2 @@
+// This file is intentionally blank. All configuration of the
+// distribution is done in the parent project.
diff --git a/distribution/packages/aarch64-rpm/build.gradle b/distribution/packages/aarch64-rpm/build.gradle
new file mode 100644
index 0000000000000..4a6dde5fc0c92
--- /dev/null
+++ b/distribution/packages/aarch64-rpm/build.gradle
@@ -0,0 +1,2 @@
+// This file is intentionally blank. All configuration of the
+// distribution is done in the parent project.
diff --git a/distribution/packages/build.gradle b/distribution/packages/build.gradle
index 9b1efa3e43d7c..105f244e6bc5e 100644
--- a/distribution/packages/build.gradle
+++ b/distribution/packages/build.gradle
@@ -98,17 +98,17 @@ addProcessFilesTask('rpm', false, false)
// Common configuration that is package dependent. This can't go in ospackage
// since we have different templated files that need to be consumed, but the structure
// is the same
-Closure commonPackageConfig(String type, boolean oss, boolean jdk) {
+Closure commonPackageConfig(String type, boolean oss, boolean jdk, String architecture) {
return {
onlyIf {
OS.current().equals(OS.WINDOWS) == false
}
dependsOn "process${oss ? 'Oss' : ''}${jdk ? '' : 'NoJdk'}${type.capitalize()}Files"
packageName "elasticsearch${oss ? '-oss' : ''}"
- arch(type == 'deb' ? 'amd64' : 'X86_64')
+ arch(architecture == 'aarch64' ? 'aarch64' : type == 'deb' ? 'amd64' : 'X86_64')
// Follow elasticsearch's file naming convention
String jdkString = jdk ? "" : "no-jdk-"
- String prefix = "${oss ? 'oss-' : ''}${jdk ? '' : 'no-jdk-'}${type}"
+ String prefix = "${architecture == 'aarch64' ? 'aarch64-' : ''}${oss ? 'oss-' : ''}${jdk ? '' : 'no-jdk-'}${type}"
destinationDir = file("${prefix}/build/distributions")
// SystemPackagingTask overrides default archive task convention mappings, but doesn't provide a setter so we have to override the convention mapping itself
@@ -143,7 +143,7 @@ Closure commonPackageConfig(String type, boolean oss, boolean jdk) {
}
if (jdk) {
into('jdk') {
- with jdkFiles(project, 'linux')
+ with jdkFiles(project, 'linux', architecture)
}
}
// we need to specify every intermediate directory in these paths so the package managers know they are explicitly
@@ -306,9 +306,9 @@ ospackage {
into '/usr/share/elasticsearch'
}
-Closure commonDebConfig(boolean oss, boolean jdk) {
+Closure commonDebConfig(boolean oss, boolean jdk, String architecture) {
return {
- configure(commonPackageConfig('deb', oss, jdk))
+ configure(commonPackageConfig('deb', oss, jdk, architecture))
// jdeb does not provide a way to set the License control attribute, and ospackage
// silently ignores setting it. Instead, we set the license as "custom field"
@@ -336,25 +336,33 @@ Closure commonDebConfig(boolean oss, boolean jdk) {
}
}
+task buildAarch64Deb(type: Deb) {
+ configure(commonDebConfig(false, true, 'aarch64'))
+}
+
task buildDeb(type: Deb) {
- configure(commonDebConfig(false, true))
+ configure(commonDebConfig(false, true, 'x64'))
+}
+
+task buildAarch64OssDeb(type: Deb) {
+ configure(commonDebConfig(true, true, 'aarch64'))
}
task buildOssDeb(type: Deb) {
- configure(commonDebConfig(true, true))
+ configure(commonDebConfig(true, true, 'x64'))
}
task buildNoJdkDeb(type: Deb) {
- configure(commonDebConfig(false, false))
+ configure(commonDebConfig(false, false, 'x64'))
}
task buildOssNoJdkDeb(type: Deb) {
- configure(commonDebConfig(true, false))
+ configure(commonDebConfig(true, false, 'x64'))
}
-Closure commonRpmConfig(boolean oss, boolean jdk) {
+Closure commonRpmConfig(boolean oss, boolean jdk, String architecture) {
return {
- configure(commonPackageConfig('rpm', oss, jdk))
+ configure(commonPackageConfig('rpm', oss, jdk, architecture))
if (oss) {
license 'ASL 2.0'
@@ -381,20 +389,28 @@ Closure commonRpmConfig(boolean oss, boolean jdk) {
}
}
+task buildAarch64Rpm(type: Rpm) {
+ configure(commonRpmConfig(false, true, 'aarch64'))
+}
+
task buildRpm(type: Rpm) {
- configure(commonRpmConfig(false, true))
+ configure(commonRpmConfig(false, true, 'x64'))
+}
+
+task buildAarch64OssRpm(type: Rpm) {
+ configure(commonRpmConfig(true, true, 'aarch64'))
}
task buildOssRpm(type: Rpm) {
- configure(commonRpmConfig(true, true))
+ configure(commonRpmConfig(true, true, 'x64'))
}
task buildNoJdkRpm(type: Rpm) {
- configure(commonRpmConfig(false, false))
+ configure(commonRpmConfig(false, false, 'x64'))
}
task buildOssNoJdkRpm(type: Rpm) {
- configure(commonRpmConfig(true, false))
+ configure(commonRpmConfig(true, false, 'x64'))
}
Closure dpkgExists = { it -> new File('/bin/dpkg-deb').exists() || new File('/usr/bin/dpkg-deb').exists() || new File('/usr/local/bin/dpkg-deb').exists() }
@@ -446,6 +462,8 @@ subprojects {
final File rpmDatabase = new File(extractionDir, 'rpm-database')
commandLine 'rpm',
'--badreloc',
+ '--ignorearch',
+ '--ignoreos',
'--nodeps',
'--noscripts',
'--notriggers',
diff --git a/distribution/src/bin/elasticsearch b/distribution/src/bin/elasticsearch
index 136aed6755c5e..e2c2288cb664c 100755
--- a/distribution/src/bin/elasticsearch
+++ b/distribution/src/bin/elasticsearch
@@ -29,7 +29,7 @@ for option in "$@"; do
done
if [ -z "$ES_TMPDIR" ]; then
- ES_TMPDIR=`"$JAVA" -cp "$ES_CLASSPATH" org.elasticsearch.tools.launchers.TempDirectory`
+ ES_TMPDIR=`"$JAVA" "$XSHARE" -cp "$ES_CLASSPATH" org.elasticsearch.tools.launchers.TempDirectory`
fi
# get keystore password before setting java options to avoid
@@ -52,12 +52,13 @@ fi
# - second, JVM options are read from jvm.options and jvm.options.d/*.options
# - third, JVM options from ES_JAVA_OPTS are applied
# - fourth, ergonomic JVM options are applied
-ES_JAVA_OPTS=`export ES_TMPDIR; "$JAVA" -cp "$ES_CLASSPATH" org.elasticsearch.tools.launchers.JvmOptionsParser "$ES_PATH_CONF"`
+ES_JAVA_OPTS=`export ES_TMPDIR; "$JAVA" "$XSHARE" -cp "$ES_CLASSPATH" org.elasticsearch.tools.launchers.JvmOptionsParser "$ES_PATH_CONF"`
# manual parsing to find out, if process should be detached
if [[ $DAEMONIZE = false ]]; then
exec \
"$JAVA" \
+ "$XSHARE" \
$ES_JAVA_OPTS \
-Des.path.home="$ES_HOME" \
-Des.path.conf="$ES_PATH_CONF" \
@@ -70,6 +71,7 @@ if [[ $DAEMONIZE = false ]]; then
else
exec \
"$JAVA" \
+ "$XSHARE" \
$ES_JAVA_OPTS \
-Des.path.home="$ES_HOME" \
-Des.path.conf="$ES_PATH_CONF" \
diff --git a/distribution/src/bin/elasticsearch-cli b/distribution/src/bin/elasticsearch-cli
index 4af827b67caf9..6f03456eb0122 100644
--- a/distribution/src/bin/elasticsearch-cli
+++ b/distribution/src/bin/elasticsearch-cli
@@ -22,6 +22,7 @@ ES_JAVA_OPTS="-Xms4m -Xmx64m -XX:+UseSerialGC ${ES_JAVA_OPTS}"
exec \
"$JAVA" \
+ "$XSHARE" \
$ES_JAVA_OPTS \
-Des.path.home="$ES_HOME" \
-Des.path.conf="$ES_PATH_CONF" \
diff --git a/distribution/src/bin/elasticsearch-env b/distribution/src/bin/elasticsearch-env
index cbdfbf8facb5c..5a54ad58e0abd 100644
--- a/distribution/src/bin/elasticsearch-env
+++ b/distribution/src/bin/elasticsearch-env
@@ -67,8 +67,14 @@ if [ ! -z "$JAVA_OPTS" ]; then
echo "pass JVM parameters via ES_JAVA_OPTS"
fi
+if [[ "$("$JAVA" -version 2>/dev/null)" =~ "Unable to map CDS archive" ]]; then
+ XSHARE="-Xshare:off"
+else
+ XSHARE="-Xshare:auto"
+fi
+
# check the Java version
-"$JAVA" -cp "$ES_CLASSPATH" org.elasticsearch.tools.java_version_checker.JavaVersionChecker
+"$JAVA" "$XSHARE" -cp "$ES_CLASSPATH" org.elasticsearch.tools.java_version_checker.JavaVersionChecker
export HOSTNAME=$HOSTNAME
diff --git a/distribution/tools/launchers/src/main/java/org/elasticsearch/tools/launchers/JvmErgonomics.java b/distribution/tools/launchers/src/main/java/org/elasticsearch/tools/launchers/JvmErgonomics.java
index 9ed0479c4d15b..0b2e9f9092a74 100644
--- a/distribution/tools/launchers/src/main/java/org/elasticsearch/tools/launchers/JvmErgonomics.java
+++ b/distribution/tools/launchers/src/main/java/org/elasticsearch/tools/launchers/JvmErgonomics.java
@@ -87,6 +87,7 @@ private static List flagsFinal(final List userDefinedJvmOptions)
final List command = Stream.of(
Stream.of(java),
userDefinedJvmOptions.stream(),
+ Stream.of("-Xshare:off"),
Stream.of("-XX:+PrintFlagsFinal"),
Stream.of("-version")
).reduce(Stream::concat).get().collect(Collectors.toUnmodifiableList());
diff --git a/docs/plugins/analysis-nori.asciidoc b/docs/plugins/analysis-nori.asciidoc
index 7cc04c9c3de75..1e5f998a72da4 100644
--- a/docs/plugins/analysis-nori.asciidoc
+++ b/docs/plugins/analysis-nori.asciidoc
@@ -54,6 +54,10 @@ It can be set to:
가곡역 => 가곡역, 가곡, 역
--
+`discard_punctuation`::
+
+ Whether punctuation should be discarded from the output. Defaults to `true`.
+
`user_dictionary`::
+
--
@@ -99,6 +103,7 @@ PUT nori_sample
"nori_user_dict": {
"type": "nori_tokenizer",
"decompound_mode": "mixed",
+ "discard_punctuation": "false",
"user_dictionary": "userdict_ko.txt"
}
},
@@ -434,3 +439,107 @@ Which responds with:
--------------------------------------------------
<1> The Hanja form is replaced by the Hangul translation.
+
+
+[[analysis-nori-number]]
+==== `nori_number` token filter
+
+The `nori_number` token filter normalizes Korean numbers
+to regular Arabic decimal numbers in half-width characters.
+
+Korean numbers are often written using a combination of Hangul and Arabic numbers with various kinds punctuation.
+For example, 3.2천 means 3200.
+This filter does this kind of normalization and allows a search for 3200 to match 3.2천 in text,
+but can also be used to make range facets based on the normalized numbers and so on.
+
+[NOTE]
+====
+Notice that this analyzer uses a token composition scheme and relies on punctuation tokens
+being found in the token stream.
+Please make sure your `nori_tokenizer` has `discard_punctuation` set to false.
+In case punctuation characters, such as U+FF0E(.), is removed from the token stream,
+this filter would find input tokens 3 and 2천 and give outputs 3 and 2000 instead of 3200,
+which is likely not the intended result.
+
+If you want to remove punctuation characters from your index that are not part of normalized numbers,
+add a `stop` token filter with the punctuation you wish to remove after `nori_number` in your analyzer chain.
+====
+Below are some examples of normalizations this filter supports.
+The input is untokenized text and the result is the single term attribute emitted for the input.
+
+- 영영칠 -> 7
+- 일영영영 -> 1000
+- 삼천2백2십삼 -> 3223
+- 조육백만오천일 -> 1000006005001
+- 3.2천 -> 3200
+- 1.2만345.67 -> 12345.67
+- 4,647.100 -> 4647.1
+- 15,7 -> 157 (be aware of this weakness)
+
+For example:
+
+[source,console]
+--------------------------------------------------
+PUT nori_sample
+{
+ "settings": {
+ "index": {
+ "analysis": {
+ "analyzer": {
+ "my_analyzer": {
+ "tokenizer": "tokenizer_discard_puncuation_false",
+ "filter": [
+ "part_of_speech_stop_sp", "nori_number"
+ ]
+ }
+ },
+ "tokenizer": {
+ "tokenizer_discard_puncuation_false": {
+ "type": "nori_tokenizer",
+ "discard_punctuation": "false"
+ }
+ },
+ "filter": {
+ "part_of_speech_stop_sp": {
+ "type": "nori_part_of_speech",
+ "stoptags": ["SP"]
+ }
+ }
+ }
+ }
+ }
+}
+
+GET nori_sample/_analyze
+{
+ "analyzer": "my_analyzer",
+ "text": "십만이천오백과 3.2천"
+}
+--------------------------------------------------
+
+Which results in:
+
+[source,console-result]
+--------------------------------------------------
+{
+ "tokens" : [{
+ "token" : "102500",
+ "start_offset" : 0,
+ "end_offset" : 6,
+ "type" : "word",
+ "position" : 0
+ }, {
+ "token" : "과",
+ "start_offset" : 6,
+ "end_offset" : 7,
+ "type" : "word",
+ "position" : 1
+ }, {
+ "token" : "3200",
+ "start_offset" : 8,
+ "end_offset" : 12,
+ "type" : "word",
+ "position" : 2
+ }]
+}
+--------------------------------------------------
diff --git a/docs/reference/aggregations/metrics/top-metrics-aggregation.asciidoc b/docs/reference/aggregations/metrics/top-metrics-aggregation.asciidoc
index 6dbc8adfd3ff8..e0ab0afde2f3a 100644
--- a/docs/reference/aggregations/metrics/top-metrics-aggregation.asciidoc
+++ b/docs/reference/aggregations/metrics/top-metrics-aggregation.asciidoc
@@ -50,7 +50,7 @@ faster.
The `sort` field in the metric request functions exactly the same as the `sort` field in the
<> request except:
-* It can't be used on <>, <, <>,
+* It can't be used on <>, <>, <>,
<>, or <> fields.
* It only supports a single sort value so which document wins ties is not specified.
diff --git a/docs/reference/analysis/anatomy.asciidoc b/docs/reference/analysis/anatomy.asciidoc
index 1db14e787a54a..22e7ffda667d4 100644
--- a/docs/reference/analysis/anatomy.asciidoc
+++ b/docs/reference/analysis/anatomy.asciidoc
@@ -10,6 +10,7 @@ blocks into analyzers suitable for different languages and types of text.
Elasticsearch also exposes the individual building blocks so that they can be
combined to define new <> analyzers.
+[[analyzer-anatomy-character-filters]]
==== Character filters
A _character filter_ receives the original text as a stream of characters and
@@ -21,6 +22,7 @@ elements like `` from the stream.
An analyzer may have *zero or more* <>,
which are applied in order.
+[[analyzer-anatomy-tokenizer]]
==== Tokenizer
A _tokenizer_ receives a stream of characters, breaks it up into individual
@@ -35,6 +37,7 @@ the term represents.
An analyzer must have *exactly one* <>.
+[[analyzer-anatomy-token-filters]]
==== Token filters
A _token filter_ receives the token stream and may add, remove, or change
diff --git a/docs/reference/analysis/concepts.asciidoc b/docs/reference/analysis/concepts.asciidoc
index 2468286e3a719..2e431efcd5fec 100644
--- a/docs/reference/analysis/concepts.asciidoc
+++ b/docs/reference/analysis/concepts.asciidoc
@@ -8,6 +8,8 @@ This section explains the fundamental concepts of text analysis in {es}.
* <>
* <>
+* <>
include::anatomy.asciidoc[]
-include::index-search-time.asciidoc[]
\ No newline at end of file
+include::index-search-time.asciidoc[]
+include::token-graphs.asciidoc[]
\ No newline at end of file
diff --git a/docs/reference/analysis/testing.asciidoc b/docs/reference/analysis/testing.asciidoc
index ba3300802ac87..845f275455eb2 100644
--- a/docs/reference/analysis/testing.asciidoc
+++ b/docs/reference/analysis/testing.asciidoc
@@ -55,7 +55,7 @@ The API returns the following response:
You can also test combinations of:
* A tokenizer
-* Zero or token filters
+* Zero or more token filters
* Zero or more character filters
[source,console]
diff --git a/docs/reference/analysis/token-graphs.asciidoc b/docs/reference/analysis/token-graphs.asciidoc
new file mode 100644
index 0000000000000..ab1dc52f5131b
--- /dev/null
+++ b/docs/reference/analysis/token-graphs.asciidoc
@@ -0,0 +1,104 @@
+[[token-graphs]]
+=== Token graphs
+
+When a <> converts a text into a stream of
+tokens, it also records the following:
+
+* The `position` of each token in the stream
+* The `positionLength`, the number of positions that a token spans
+
+Using these, you can create a
+https://en.wikipedia.org/wiki/Directed_acyclic_graph[directed acyclic graph],
+called a _token graph_, for a stream. In a token graph, each position represents
+a node. Each token represents an edge or arc, pointing to the next position.
+
+image::images/analysis/token-graph-qbf-ex.svg[align="center"]
+
+[[token-graphs-synonyms]]
+==== Synonyms
+
+Some <> can add new tokens, like
+synonyms, to an existing token stream. These synonyms often span the same
+positions as existing tokens.
+
+In the following graph, `quick` and its synonym `fast` both have a position of
+`0`. They span the same positions.
+
+image::images/analysis/token-graph-qbf-synonym-ex.svg[align="center"]
+
+[[token-graphs-multi-position-tokens]]
+==== Multi-position tokens
+
+Some token filters can add tokens that span multiple positions. These can
+include tokens for multi-word synonyms, such as using "atm" as a synonym for
+"automatic teller machine."
+
+However, only some token filters, known as _graph token filters_, accurately
+record the `positionLength` for multi-position tokens. This filters include:
+
+* <>
+* <>
+
+In the following graph, `domain name system` and its synonym, `dns`, both have a
+position of `0`. However, `dns` has a `positionLength` of `3`. Other tokens in
+the graph have a default `positionLength` of `1`.
+
+image::images/analysis/token-graph-dns-synonym-ex.svg[align="center"]
+
+[[token-graphs-token-graphs-search]]
+===== Using token graphs for search
+
+<> ignores the `positionLength` attribute
+and does not support token graphs containing multi-position tokens.
+
+However, queries, such as the <> or
+<> query, can use these graphs to
+generate multiple sub-queries from a single query string.
+
+.*Example*
+[%collapsible]
+====
+
+A user runs a search for the following phrase using the `match_phrase` query:
+
+`domain name system is fragile`
+
+During <>, `dns`, a synonym for
+`domain name system`, is added to the query string's token stream. The `dns`
+token has a `positionLength` of `3`.
+
+image::images/analysis/token-graph-dns-synonym-ex.svg[align="center"]
+
+The `match_phrase` query uses this graph to generate sub-queries for the
+following phrases:
+
+[source,text]
+------
+dns is fragile
+domain name system is fragile
+------
+
+This means the query matches documents containing either `dns is fragile` _or_
+`domain name system is fragile`.
+====
+
+[[token-graphs-invalid-token-graphs]]
+===== Invalid token graphs
+
+The following token filters can add tokens that span multiple positions but
+only record a default `positionLength` of `1`:
+
+* <>
+* <>
+
+This means these filters will produce invalid token graphs for streams
+containing such tokens.
+
+In the following graph, `dns` is a multi-position synonym for `domain name
+system`. However, `dns` has the default `positionLength` value of `1`, resulting
+in an invalid graph.
+
+image::images/analysis/token-graph-dns-invalid-ex.svg[align="center"]
+
+Avoid using invalid token graphs for search. Invalid graphs can cause unexpected
+search results.
\ No newline at end of file
diff --git a/docs/reference/analysis/tokenfilters/stemmer-tokenfilter.asciidoc b/docs/reference/analysis/tokenfilters/stemmer-tokenfilter.asciidoc
index 4e98e24d08ef0..957aad084619f 100644
--- a/docs/reference/analysis/tokenfilters/stemmer-tokenfilter.asciidoc
+++ b/docs/reference/analysis/tokenfilters/stemmer-tokenfilter.asciidoc
@@ -54,7 +54,6 @@ http://snowball.tartarus.org/algorithms/basque/stemmer.html[*`basque`*]
Bengali::
http://www.tandfonline.com/doi/abs/10.1080/02564602.1993.11437284[*`bengali`*]
-http://members.unine.ch/jacques.savoy/clef/BengaliStemmerLight.java.txt[*`light_bengali`*]
Brazilian Portuguese::
diff --git a/docs/reference/analysis/tokenfilters/synonym-graph-tokenfilter.asciidoc b/docs/reference/analysis/tokenfilters/synonym-graph-tokenfilter.asciidoc
index e6bc76e408f23..582ce99b20bf7 100644
--- a/docs/reference/analysis/tokenfilters/synonym-graph-tokenfilter.asciidoc
+++ b/docs/reference/analysis/tokenfilters/synonym-graph-tokenfilter.asciidoc
@@ -8,8 +8,8 @@ The `synonym_graph` token filter allows to easily handle synonyms,
including multi-word synonyms correctly during the analysis process.
In order to properly handle multi-word synonyms this token filter
-creates a "graph token stream" during processing. For more information
-on this topic and its various complexities, please read the
+creates a <> during processing. For more
+information on this topic and its various complexities, please read the
http://blog.mikemccandless.com/2012/04/lucenes-tokenstreams-are-actually.html[Lucene's TokenStreams are actually graphs] blog post.
["NOTE",id="synonym-graph-index-note"]
diff --git a/docs/reference/analysis/tokenfilters/uppercase-tokenfilter.asciidoc b/docs/reference/analysis/tokenfilters/uppercase-tokenfilter.asciidoc
index 780e09fa951cc..84c9ebd186595 100644
--- a/docs/reference/analysis/tokenfilters/uppercase-tokenfilter.asciidoc
+++ b/docs/reference/analysis/tokenfilters/uppercase-tokenfilter.asciidoc
@@ -16,7 +16,8 @@ Depending on the language, an uppercase character can map to multiple
lowercase characters. Using the `uppercase` filter could result in the loss of
lowercase character information.
-To avoid this loss but still have a consistent lettercase, use the <> filter instead.
+To avoid this loss but still have a consistent letter case, use the
+<> filter instead.
====
[[analysis-uppercase-tokenfilter-analyze-ex]]
diff --git a/docs/reference/analysis/tokenfilters/word-delimiter-graph-tokenfilter.asciidoc b/docs/reference/analysis/tokenfilters/word-delimiter-graph-tokenfilter.asciidoc
index 8581d8cb7ec17..2fa9c41ad79b6 100644
--- a/docs/reference/analysis/tokenfilters/word-delimiter-graph-tokenfilter.asciidoc
+++ b/docs/reference/analysis/tokenfilters/word-delimiter-graph-tokenfilter.asciidoc
@@ -429,7 +429,7 @@ PUT /my_index
[[analysis-word-delimiter-graph-differences]]
==== Differences between `word_delimiter_graph` and `word_delimiter`
-Both the `word_delimiter_graph` and
+Both the `word_delimiter_graph` and
<> filters produce tokens
that span multiple positions when any of the following parameters are `true`:
@@ -440,8 +440,8 @@ that span multiple positions when any of the following parameters are `true`:
However, only the `word_delimiter_graph` filter assigns multi-position tokens a
`positionLength` attribute, which indicates the number of positions a token
-spans. This ensures the `word_delimiter_graph` filter always produces valid token
-https://en.wikipedia.org/wiki/Directed_acyclic_graph[graphs].
+spans. This ensures the `word_delimiter_graph` filter always produces valid
+<>.
The `word_delimiter` filter does not assign multi-position tokens a
`positionLength` attribute. This means it produces invalid graphs for streams
diff --git a/docs/reference/api-conventions.asciidoc b/docs/reference/api-conventions.asciidoc
index 41a09c9a15ff6..3921aab0d1354 100644
--- a/docs/reference/api-conventions.asciidoc
+++ b/docs/reference/api-conventions.asciidoc
@@ -87,7 +87,7 @@ GET /%3Clogstash-%7Bnow%2Fd%7D%3E/_search
}
----------------------------------------------------------------------
// TEST[s/^/PUT logstash-2016.09.20\n/]
-// TEST[s/now/2016.09.20||/]
+// TEST[s/now/2016.09.20%7C%7C/]
[NOTE]
.Percent encoding of date math characters
@@ -141,7 +141,7 @@ GET /%3Clogstash-%7Bnow%2Fd-2d%7D%3E%2C%3Clogstash-%7Bnow%2Fd-1d%7D%3E%2C%3Clogs
}
----------------------------------------------------------------------
// TEST[s/^/PUT logstash-2016.09.20\nPUT logstash-2016.09.19\nPUT logstash-2016.09.18\n/]
-// TEST[s/now/2016.09.20||/]
+// TEST[s/now/2016.09.20%7C%7C/]
[[common-options]]
=== Common options
@@ -367,7 +367,7 @@ GET /_search?filter_path=hits.hits._source&_source=title&sort=rating:desc
[float]
==== Flat Settings
-The `flat_settings` flag affects rendering of the lists of settings. When the
+The `flat_settings` flag affects rendering of the lists of settings. When the
`flat_settings` flag is `true`, settings are returned in a flat format:
[source,console]
diff --git a/docs/reference/async-search.asciidoc b/docs/reference/async-search.asciidoc
new file mode 100644
index 0000000000000..d51e017b0f97c
--- /dev/null
+++ b/docs/reference/async-search.asciidoc
@@ -0,0 +1,22 @@
+[role="xpack"]
+[testenv="basic"]
+[[async-search-intro]]
+== Long-running searches
+
+{es} generally allows you to quickly search across big amounts of data. There are
+situations where a search executes on many many shards, possibly against
+<> and spanning multiple
+<>, for which
+results are not expected to be returned in milliseconds. When you need to
+execute long-running searches, synchronously
+waiting for its results to be returned is not ideal. Instead, Async search lets
+you submit a search request that gets executed _asynchronously_,
+monitor the progress of the request, and retrieve results at a later stage.
+You can also retrieve partial results as they become available but
+before the search has completed.
+
+You can submit an async search request using the <> API. The <> API allows you to
+monitor the progress of an async search request and retrieve its results. An
+ongoing async search can be deleted through the <> API.
diff --git a/docs/reference/autoscaling/apis/get-autoscaling-decision.asciidoc b/docs/reference/autoscaling/apis/get-autoscaling-decision.asciidoc
index aa66e3a0d034f..dfa14ac180636 100644
--- a/docs/reference/autoscaling/apis/get-autoscaling-decision.asciidoc
+++ b/docs/reference/autoscaling/apis/get-autoscaling-decision.asciidoc
@@ -47,6 +47,6 @@ The API returns the following result:
[source,console-result]
--------------------------------------------------
{
-
+ decisions: []
}
--------------------------------------------------
diff --git a/docs/reference/cat.asciidoc b/docs/reference/cat.asciidoc
index d557a8c930a20..2b303e84ab955 100644
--- a/docs/reference/cat.asciidoc
+++ b/docs/reference/cat.asciidoc
@@ -255,16 +255,18 @@ include::cat/recovery.asciidoc[]
include::cat/repositories.asciidoc[]
-include::cat/tasks.asciidoc[]
-
-include::cat/thread_pool.asciidoc[]
-
-include::cat/trainedmodel.asciidoc[]
-
include::cat/shards.asciidoc[]
include::cat/segments.asciidoc[]
include::cat/snapshots.asciidoc[]
+include::cat/tasks.asciidoc[]
+
include::cat/templates.asciidoc[]
+
+include::cat/thread_pool.asciidoc[]
+
+include::cat/trainedmodel.asciidoc[]
+
+include::cat/transforms.asciidoc[]
diff --git a/docs/reference/cat/nodes.asciidoc b/docs/reference/cat/nodes.asciidoc
index d6d67a35ef342..94b004a6f9f45 100644
--- a/docs/reference/cat/nodes.asciidoc
+++ b/docs/reference/cat/nodes.asciidoc
@@ -44,7 +44,7 @@ Valid columns are:
`node.role`, `r`, `role`, `nodeRole`::
(Default) Roles of the node. Returned values include `d` (data node), `i`
(ingest node), `m` (master-eligible node), `l` (machine learning node), `v`
-(voting-only node), and `-` (coordinating node only).
+(voting-only node), `t` ({transform} node), and `-` (coordinating node only).
+
For example, `dim` indicates a master-eligible data and ingest node. See
<>.
diff --git a/docs/reference/cat/transforms.asciidoc b/docs/reference/cat/transforms.asciidoc
new file mode 100644
index 0000000000000..78013e394ef24
--- /dev/null
+++ b/docs/reference/cat/transforms.asciidoc
@@ -0,0 +1,28 @@
+[[cat-transforms]]
+=== cat {transforms} API
+++++
+cat transforms
+++++
+
+Returns configuration and usage information about {transforms}.
+
+
+[[cat-transforms-api-request]]
+==== {api-request-title}
+
+`GET /_cat/transforms`
+
+
+//[[cat-transforms-api-desc]]
+//==== {api-description-title}
+
+
+//[[cat-transforms-api-query-params]]
+//==== {api-query-parms-title}
+
+
+//[[cat-transforms-api-response-codes]]
+//==== {api-response-codes-title}
+
+//[[cat-transforms-api-examples]]
+//==== {api-examples-title}
diff --git a/docs/reference/frozen-indices.asciidoc b/docs/reference/frozen-indices.asciidoc
index b6cafad30f5dc..5a2ef125a8cc3 100644
--- a/docs/reference/frozen-indices.asciidoc
+++ b/docs/reference/frozen-indices.asciidoc
@@ -74,8 +74,8 @@ POST /twitter/_forcemerge?max_num_segments=1
== Searching a frozen index
Frozen indices are throttled in order to limit memory consumptions per node. The number of concurrently loaded frozen indices per node is
-limited by the number of threads in the <> threadpool, which is `1` by default.
-Search requests will not be executed against frozen indices by default, even if a frozen index is named explicitly. This is
+limited by the number of threads in the <> threadpool, which is `1` by default.
+Search requests will not be executed against frozen indices by default, even if a frozen index is named explicitly. This is
to prevent accidental slowdowns by targeting a frozen index by mistake. To include frozen indices a search request must be executed with
the query parameter `ignore_throttled=false`.
@@ -85,15 +85,6 @@ GET /twitter/_search?q=user:kimchy&ignore_throttled=false
--------------------------------------------------
// TEST[setup:twitter]
-[IMPORTANT]
-================================
-While frozen indices are slow to search, they can be pre-filtered efficiently. The request parameter `pre_filter_shard_size` specifies
-a threshold that, when exceeded, will enforce a round-trip to pre-filter search shards that cannot possibly match.
-This filter phase can limit the number of shards significantly. For instance, if a date range filter is applied, then all indices (frozen or unfrozen) that do not contain documents within the date range can be skipped efficiently.
-The default value for `pre_filter_shard_size` is `128` but it's recommended to set it to `1` when searching frozen indices. There is no
-significant overhead associated with this pre-filter phase.
-================================
-
[role="xpack"]
[testenv="basic"]
[[monitoring_frozen_indices]]
diff --git a/docs/reference/images/analysis/token-graph-dns-ex.svg b/docs/reference/images/analysis/token-graph-dns-ex.svg
new file mode 100644
index 0000000000000..0eda4fa54bd20
--- /dev/null
+++ b/docs/reference/images/analysis/token-graph-dns-ex.svg
@@ -0,0 +1,65 @@
+
+
+
+ Slice 1
+ Created with Sketch.
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/docs/reference/images/analysis/token-graph-dns-invalid-ex.svg b/docs/reference/images/analysis/token-graph-dns-invalid-ex.svg
new file mode 100644
index 0000000000000..5614f39bfe35c
--- /dev/null
+++ b/docs/reference/images/analysis/token-graph-dns-invalid-ex.svg
@@ -0,0 +1,72 @@
+
+
+
+ Slice 1
+ Created with Sketch.
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/docs/reference/images/analysis/token-graph-dns-synonym-ex.svg b/docs/reference/images/analysis/token-graph-dns-synonym-ex.svg
new file mode 100644
index 0000000000000..cff5b1306b73b
--- /dev/null
+++ b/docs/reference/images/analysis/token-graph-dns-synonym-ex.svg
@@ -0,0 +1,72 @@
+
+
+
+ Slice 1
+ Created with Sketch.
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/docs/reference/images/analysis/token-graph-qbf-ex.svg b/docs/reference/images/analysis/token-graph-qbf-ex.svg
new file mode 100644
index 0000000000000..63970673092d4
--- /dev/null
+++ b/docs/reference/images/analysis/token-graph-qbf-ex.svg
@@ -0,0 +1,45 @@
+
+
+
+ Slice 1
+ Created with Sketch.
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/docs/reference/images/analysis/token-graph-qbf-synonym-ex.svg b/docs/reference/images/analysis/token-graph-qbf-synonym-ex.svg
new file mode 100644
index 0000000000000..2baa3d9e63cb5
--- /dev/null
+++ b/docs/reference/images/analysis/token-graph-qbf-synonym-ex.svg
@@ -0,0 +1,52 @@
+
+
+
+ Slice 1
+ Created with Sketch.
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/docs/reference/index-modules/history-retention.asciidoc b/docs/reference/index-modules/history-retention.asciidoc
index 6ace77c3533ff..fb4aa26ab9b07 100644
--- a/docs/reference/index-modules/history-retention.asciidoc
+++ b/docs/reference/index-modules/history-retention.asciidoc
@@ -54,12 +54,11 @@ reasonable recovery scenarios.
`index.soft_deletes.enabled`::
+ deprecated:[7.6.0, Creating indices with soft-deletes disabled is deprecated and will be removed in future Elasticsearch versions.]
Whether or not soft deletes are enabled on the index. Soft deletes can only be
configured at index creation and only on indices created on or after 6.5.0.
The default value is `true`.
- deprecated::[7.6, Creating indices with soft-deletes disabled is
- deprecated and will be removed in future Elasticsearch versions.]
`index.soft_deletes.retention_lease.period`::
diff --git a/docs/reference/index.asciidoc b/docs/reference/index.asciidoc
index fcfc85d7184c5..bce1d615f264d 100644
--- a/docs/reference/index.asciidoc
+++ b/docs/reference/index.asciidoc
@@ -26,6 +26,8 @@ include::query-dsl.asciidoc[]
include::modules/cross-cluster-search.asciidoc[]
+include::async-search.asciidoc[]
+
include::scripting.asciidoc[]
include::mapping.asciidoc[]
diff --git a/docs/reference/indices/rollover-index.asciidoc b/docs/reference/indices/rollover-index.asciidoc
index b75961a52559e..42c4d9a8a5b44 100644
--- a/docs/reference/indices/rollover-index.asciidoc
+++ b/docs/reference/indices/rollover-index.asciidoc
@@ -300,7 +300,7 @@ POST /logs_write/_rollover <2>
}
}
--------------------------------------------------
-// TEST[s/now/2016.10.31||/]
+// TEST[s/now/2016.10.31%7C%7C/]
<1> Creates an index named with today's date (e.g.) `logs-2016.10.31-1`
<2> Rolls over to a new index with today's date, e.g. `logs-2016.10.31-000002` if run immediately, or `logs-2016.11.01-000002` if run after 24 hours
@@ -339,7 +339,7 @@ over indices created in the last three days, you could do the following:
GET /%3Clogs-%7Bnow%2Fd%7D-*%3E%2C%3Clogs-%7Bnow%2Fd-1d%7D-*%3E%2C%3Clogs-%7Bnow%2Fd-2d%7D-*%3E/_search
--------------------------------------------------
// TEST[continued]
-// TEST[s/now/2016.10.31||/]
+// TEST[s/now/2016.10.31%7C%7C/]
[[rollover-index-api-dry-run-ex]]
diff --git a/docs/reference/indices/templates.asciidoc b/docs/reference/indices/templates.asciidoc
index 995efe28eea74..eeea74b5a544a 100644
--- a/docs/reference/indices/templates.asciidoc
+++ b/docs/reference/indices/templates.asciidoc
@@ -93,8 +93,6 @@ Name of the index template to create.
If `true`, this request cannot replace or update existing index templates.
Defaults to `false`.
-include::{docdir}/rest-api/common-parms.asciidoc[tag=flat-settings]
-
`order`::
(Optional,integer)
Order in which {es} applies this template
@@ -104,7 +102,7 @@ Templates with lower `order` values are merged first.
Templates with higher `order` values are merged later,
overriding templates with lower values.
-include::{docdir}/rest-api/common-parms.asciidoc[tag=timeoutparms]
+include::{docdir}/rest-api/common-parms.asciidoc[tag=master-timeout]
[[put-index-template-api-request-body]]
diff --git a/docs/reference/ingest/processors/bytes.asciidoc b/docs/reference/ingest/processors/bytes.asciidoc
index 76f054cac64c2..5a551f8a82eac 100644
--- a/docs/reference/ingest/processors/bytes.asciidoc
+++ b/docs/reference/ingest/processors/bytes.asciidoc
@@ -1,6 +1,6 @@
[[bytes-processor]]
=== Bytes Processor
-Converts a human readable byte value (e.g. 1kb) to its value in bytes (e.g. 1024).
+Converts a human readable byte value (e.g. 1kb) to its value in bytes (e.g. 1024). If the field is an array of strings, all members of the array will be converted.
Supported human readable units are "b", "kb", "mb", "gb", "tb", "pb" case insensitive. An error will occur if
the field is not a supported format or resultant value exceeds 2^63.
diff --git a/docs/reference/ingest/processors/gsub.asciidoc b/docs/reference/ingest/processors/gsub.asciidoc
index f6919eb1e95f1..2defa6e7cd138 100644
--- a/docs/reference/ingest/processors/gsub.asciidoc
+++ b/docs/reference/ingest/processors/gsub.asciidoc
@@ -1,7 +1,7 @@
[[gsub-processor]]
=== Gsub Processor
Converts a string field by applying a regular expression and a replacement.
-If the field is not a string, the processor will throw an exception.
+If the field is an array of string, all members of the array will be converted. If any non-string values are encountered, the processor will throw an exception.
[[gsub-options]]
.Gsub Options
diff --git a/docs/reference/ingest/processors/html_strip.asciidoc b/docs/reference/ingest/processors/html_strip.asciidoc
index 2fa3cd7bbb8ae..bd4e8e8ccd920 100644
--- a/docs/reference/ingest/processors/html_strip.asciidoc
+++ b/docs/reference/ingest/processors/html_strip.asciidoc
@@ -1,6 +1,6 @@
[[htmlstrip-processor]]
=== HTML Strip Processor
-Removes HTML from field.
+Removes HTML tags from the field. If the field is an array of strings, HTML tags will be removed from all members of the array.
NOTE: Each HTML tag is replaced with a `\n` character.
diff --git a/docs/reference/ingest/processors/lowercase.asciidoc b/docs/reference/ingest/processors/lowercase.asciidoc
index 878b74ed9ba24..903d69625352f 100644
--- a/docs/reference/ingest/processors/lowercase.asciidoc
+++ b/docs/reference/ingest/processors/lowercase.asciidoc
@@ -1,6 +1,6 @@
[[lowercase-processor]]
=== Lowercase Processor
-Converts a string to its lowercase equivalent.
+Converts a string to its lowercase equivalent. If the field is an array of strings, all members of the array will be converted.
[[lowercase-options]]
.Lowercase Options
diff --git a/docs/reference/ingest/processors/trim.asciidoc b/docs/reference/ingest/processors/trim.asciidoc
index 7c28767076ecc..ef3611161e2e2 100644
--- a/docs/reference/ingest/processors/trim.asciidoc
+++ b/docs/reference/ingest/processors/trim.asciidoc
@@ -1,6 +1,6 @@
[[trim-processor]]
=== Trim Processor
-Trims whitespace from field.
+Trims whitespace from field. If the field is an array of strings, all members of the array will be trimmed.
NOTE: This only works on leading and trailing whitespace.
diff --git a/docs/reference/ingest/processors/uppercase.asciidoc b/docs/reference/ingest/processors/uppercase.asciidoc
index 7565be1c7c303..3e26cedcf9cce 100644
--- a/docs/reference/ingest/processors/uppercase.asciidoc
+++ b/docs/reference/ingest/processors/uppercase.asciidoc
@@ -1,6 +1,6 @@
[[uppercase-processor]]
=== Uppercase Processor
-Converts a string to its uppercase equivalent.
+Converts a string to its uppercase equivalent. If the field is an array of strings, all members of the array will be converted.
[[uppercase-options]]
.Uppercase Options
diff --git a/docs/reference/ingest/processors/url-decode.asciidoc b/docs/reference/ingest/processors/url-decode.asciidoc
index 76fc00c80f679..268fce1c18c2a 100644
--- a/docs/reference/ingest/processors/url-decode.asciidoc
+++ b/docs/reference/ingest/processors/url-decode.asciidoc
@@ -1,6 +1,6 @@
[[urldecode-processor]]
=== URL Decode Processor
-URL-decodes a string
+URL-decodes a string. If the field is an array of strings, all members of the array will be decoded.
[[urldecode-options]]
.URL Decode Options
diff --git a/docs/reference/mapping.asciidoc b/docs/reference/mapping.asciidoc
index 3f89571ae5808..96153f5e2a355 100644
--- a/docs/reference/mapping.asciidoc
+++ b/docs/reference/mapping.asciidoc
@@ -13,29 +13,22 @@ are stored and indexed. For instance, use mappings to define:
* custom rules to control the mapping for
<>.
-[float]
-[[mapping-type]]
-== Mapping Type
-
-Each index has one _mapping type_ which determines how the document will be
-indexed.
-
-deprecated::[6.0.0,See <>]
-
-A mapping type has:
+A mapping definition has:
<>::
Meta-fields are used to customize how a document's metadata associated is
treated. Examples of meta-fields include the document's
-<>, <>,
-<>, and <> fields.
+<>, <>, and
+<> fields.
<> or _properties_::
-A mapping type contains a list of fields or `properties` pertinent to the
+A mapping contains a list of fields or `properties` pertinent to the
document.
+NOTE: Before 7.0.0, the 'mappings' definition used to include a type name.
+For more details, please see <>.
[float]
[[field-datatypes]]
diff --git a/docs/reference/mapping/fields.asciidoc b/docs/reference/mapping/fields.asciidoc
index f6d5f00a9b5e0..0ea4b77441c42 100644
--- a/docs/reference/mapping/fields.asciidoc
+++ b/docs/reference/mapping/fields.asciidoc
@@ -15,7 +15,7 @@ can be customised when a mapping type is created.
<>::
- The document's <>.
+ The document's mapping type.
<>::
diff --git a/docs/reference/mapping/fields/routing-field.asciidoc b/docs/reference/mapping/fields/routing-field.asciidoc
index 25c3571f08e8d..46a204ccddfde 100644
--- a/docs/reference/mapping/fields/routing-field.asciidoc
+++ b/docs/reference/mapping/fields/routing-field.asciidoc
@@ -92,7 +92,7 @@ PUT my_index2/_doc/1 <2>
------------------------------
// TEST[catch:bad_request]
-<1> Routing is required for `_doc` documents.
+<1> Routing is required for all documents.
<2> This index request throws a `routing_missing_exception`.
==== Unique IDs with custom routing
@@ -128,4 +128,4 @@ less than `index.number_of_shards`.
Once enabled, the partitioned index will have the following limitations:
* Mappings with <> relationships cannot be created within it.
-* All mappings within the index must have the `_routing` field marked as required.
\ No newline at end of file
+* All mappings within the index must have the `_routing` field marked as required.
diff --git a/docs/reference/mapping/fields/type-field.asciidoc b/docs/reference/mapping/fields/type-field.asciidoc
index 2c5dc7195d643..4e6126cde9c9d 100644
--- a/docs/reference/mapping/fields/type-field.asciidoc
+++ b/docs/reference/mapping/fields/type-field.asciidoc
@@ -3,9 +3,9 @@
deprecated[6.0.0,See <>]
-Each document indexed is associated with a <> (see
-<