diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/MLRequestConverters.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/MLRequestConverters.java
index 8a04c229de261..35898a8a8e4d8 100644
--- a/client/rest-high-level/src/main/java/org/elasticsearch/client/MLRequestConverters.java
+++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/MLRequestConverters.java
@@ -26,14 +26,15 @@
import org.elasticsearch.client.RequestConverters.EndpointBuilder;
import org.elasticsearch.client.ml.CloseJobRequest;
import org.elasticsearch.client.ml.DeleteJobRequest;
+import org.elasticsearch.client.ml.FlushJobRequest;
import org.elasticsearch.client.ml.GetBucketsRequest;
import org.elasticsearch.client.ml.GetJobRequest;
import org.elasticsearch.client.ml.GetJobStatsRequest;
+import org.elasticsearch.client.ml.GetOverallBucketsRequest;
import org.elasticsearch.client.ml.GetRecordsRequest;
import org.elasticsearch.client.ml.OpenJobRequest;
import org.elasticsearch.client.ml.PutJobRequest;
import org.elasticsearch.common.Strings;
-import org.elasticsearch.client.ml.FlushJobRequest;
import java.io.IOException;
@@ -73,6 +74,23 @@ static Request getJob(GetJobRequest getJobRequest) {
return request;
}
+ static Request getJobStats(GetJobStatsRequest getJobStatsRequest) {
+ String endpoint = new EndpointBuilder()
+ .addPathPartAsIs("_xpack")
+ .addPathPartAsIs("ml")
+ .addPathPartAsIs("anomaly_detectors")
+ .addPathPart(Strings.collectionToCommaDelimitedString(getJobStatsRequest.getJobIds()))
+ .addPathPartAsIs("_stats")
+ .build();
+ Request request = new Request(HttpGet.METHOD_NAME, endpoint);
+
+ RequestConverters.Params params = new RequestConverters.Params(request);
+ if (getJobStatsRequest.isAllowNoJobs() != null) {
+ params.putParam("allow_no_jobs", Boolean.toString(getJobStatsRequest.isAllowNoJobs()));
+ }
+ return request;
+ }
+
static Request openJob(OpenJobRequest openJobRequest) throws IOException {
String endpoint = new EndpointBuilder()
.addPathPartAsIs("_xpack")
@@ -114,6 +132,19 @@ static Request deleteJob(DeleteJobRequest deleteJobRequest) {
return request;
}
+ static Request flushJob(FlushJobRequest flushJobRequest) throws IOException {
+ String endpoint = new EndpointBuilder()
+ .addPathPartAsIs("_xpack")
+ .addPathPartAsIs("ml")
+ .addPathPartAsIs("anomaly_detectors")
+ .addPathPart(flushJobRequest.getJobId())
+ .addPathPartAsIs("_flush")
+ .build();
+ Request request = new Request(HttpPost.METHOD_NAME, endpoint);
+ request.setEntity(createEntity(flushJobRequest, REQUEST_BODY_CONTENT_TYPE));
+ return request;
+ }
+
static Request getBuckets(GetBucketsRequest getBucketsRequest) throws IOException {
String endpoint = new EndpointBuilder()
.addPathPartAsIs("_xpack")
@@ -128,33 +159,17 @@ static Request getBuckets(GetBucketsRequest getBucketsRequest) throws IOExceptio
return request;
}
- static Request flushJob(FlushJobRequest flushJobRequest) throws IOException {
- String endpoint = new EndpointBuilder()
- .addPathPartAsIs("_xpack")
- .addPathPartAsIs("ml")
- .addPathPartAsIs("anomaly_detectors")
- .addPathPart(flushJobRequest.getJobId())
- .addPathPartAsIs("_flush")
- .build();
- Request request = new Request(HttpPost.METHOD_NAME, endpoint);
- request.setEntity(createEntity(flushJobRequest, REQUEST_BODY_CONTENT_TYPE));
- return request;
- }
-
- static Request getJobStats(GetJobStatsRequest getJobStatsRequest) {
+ static Request getOverallBuckets(GetOverallBucketsRequest getOverallBucketsRequest) throws IOException {
String endpoint = new EndpointBuilder()
- .addPathPartAsIs("_xpack")
- .addPathPartAsIs("ml")
- .addPathPartAsIs("anomaly_detectors")
- .addPathPart(Strings.collectionToCommaDelimitedString(getJobStatsRequest.getJobIds()))
- .addPathPartAsIs("_stats")
- .build();
+ .addPathPartAsIs("_xpack")
+ .addPathPartAsIs("ml")
+ .addPathPartAsIs("anomaly_detectors")
+ .addPathPart(Strings.collectionToCommaDelimitedString(getOverallBucketsRequest.getJobIds()))
+ .addPathPartAsIs("results")
+ .addPathPartAsIs("overall_buckets")
+ .build();
Request request = new Request(HttpGet.METHOD_NAME, endpoint);
-
- RequestConverters.Params params = new RequestConverters.Params(request);
- if (getJobStatsRequest.isAllowNoJobs() != null) {
- params.putParam("allow_no_jobs", Boolean.toString(getJobStatsRequest.isAllowNoJobs()));
- }
+ request.setEntity(createEntity(getOverallBucketsRequest, REQUEST_BODY_CONTENT_TYPE));
return request;
}
diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/MachineLearningClient.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/MachineLearningClient.java
index ac44f16b80b16..4757ec3182b40 100644
--- a/client/rest-high-level/src/main/java/org/elasticsearch/client/MachineLearningClient.java
+++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/MachineLearningClient.java
@@ -32,6 +32,8 @@
import org.elasticsearch.client.ml.GetBucketsResponse;
import org.elasticsearch.client.ml.GetJobRequest;
import org.elasticsearch.client.ml.GetJobResponse;
+import org.elasticsearch.client.ml.GetOverallBucketsRequest;
+import org.elasticsearch.client.ml.GetOverallBucketsResponse;
import org.elasticsearch.client.ml.GetRecordsRequest;
import org.elasticsearch.client.ml.GetRecordsResponse;
import org.elasticsearch.client.ml.OpenJobRequest;
@@ -136,6 +138,47 @@ public void getJobAsync(GetJobRequest request, RequestOptions options, ActionLis
Collections.emptySet());
}
+ /**
+ * Gets usage statistics for one or more Machine Learning jobs
+ *
+ *
listener) {
+ restHighLevelClient.performRequestAsyncAndParseEntity(request,
+ MLRequestConverters::getJobStats,
+ options,
+ GetJobStatsResponse::fromXContent,
+ listener,
+ Collections.emptySet());
+ }
+
/**
* Deletes the given Machine Learning Job
*
@@ -257,42 +300,6 @@ public void closeJobAsync(CloseJobRequest request, RequestOptions options, Actio
Collections.emptySet());
}
- /**
- * Gets the buckets for a Machine Learning Job.
- *
- * For additional info
- * see ML GET buckets documentation
- *
- * @param request The request
- * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
- */
- public GetBucketsResponse getBuckets(GetBucketsRequest request, RequestOptions options) throws IOException {
- return restHighLevelClient.performRequestAndParseEntity(request,
- MLRequestConverters::getBuckets,
- options,
- GetBucketsResponse::fromXContent,
- Collections.emptySet());
- }
-
- /**
- * Gets the buckets for a Machine Learning Job, notifies listener once the requested buckets are retrieved.
- *
- * For additional info
- * see ML GET buckets documentation
- *
- * @param request The request
- * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
- * @param listener Listener to be notified upon request completion
- */
- public void getBucketsAsync(GetBucketsRequest request, RequestOptions options, ActionListener listener) {
- restHighLevelClient.performRequestAsyncAndParseEntity(request,
- MLRequestConverters::getBuckets,
- options,
- GetBucketsResponse::fromXContent,
- listener,
- Collections.emptySet());
- }
-
/**
* Flushes internally buffered data for the given Machine Learning Job ensuring all data sent to the has been processed.
* This may cause new results to be calculated depending on the contents of the buffer
@@ -311,13 +318,13 @@ public void getBucketsAsync(GetBucketsRequest request, RequestOptions options, A
* @param request The {@link FlushJobRequest} object enclosing the `jobId` and additional request options
* @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
*/
- public FlushJobResponse flushJob(FlushJobRequest request, RequestOptions options) throws IOException {
+ public FlushJobResponse flushJob(FlushJobRequest request, RequestOptions options) throws IOException {
return restHighLevelClient.performRequestAndParseEntity(request,
- MLRequestConverters::flushJob,
- options,
- FlushJobResponse::fromXContent,
- Collections.emptySet());
- }
+ MLRequestConverters::flushJob,
+ options,
+ FlushJobResponse::fromXContent,
+ Collections.emptySet());
+ }
/**
* Flushes internally buffered data for the given Machine Learning Job asynchronously ensuring all data sent to the has been processed.
@@ -338,54 +345,88 @@ public FlushJobResponse flushJob(FlushJobRequest request, RequestOptions options
* @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
* @param listener Listener to be notified upon request completion
*/
- public void flushJobAsync(FlushJobRequest request, RequestOptions options, ActionListener listener) {
- restHighLevelClient.performRequestAsyncAndParseEntity(request,
- MLRequestConverters::flushJob,
- options,
- FlushJobResponse::fromXContent,
- listener,
- Collections.emptySet());
- }
+ public void flushJobAsync(FlushJobRequest request, RequestOptions options, ActionListener listener) {
+ restHighLevelClient.performRequestAsyncAndParseEntity(request,
+ MLRequestConverters::flushJob,
+ options,
+ FlushJobResponse::fromXContent,
+ listener,
+ Collections.emptySet());
+ }
- /**
- * Gets usage statistics for one or more Machine Learning jobs
- *
+ /**
+ * Gets the buckets for a Machine Learning Job.
*
- * For additional info
- * see Get Job stats docs
- *
- * @param request {@link GetJobStatsRequest} Request containing a list of jobId(s) and additional options
+ * For additional info
+ * see ML GET buckets documentation
+ *
+ * @param request The request
* @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
- * @return {@link GetJobStatsResponse} response object containing
- * the {@link JobStats} objects and the number of jobs found
- * @throws IOException when there is a serialization issue sending the request or receiving the response
*/
- public GetJobStatsResponse getJobStats(GetJobStatsRequest request, RequestOptions options) throws IOException {
+ public GetBucketsResponse getBuckets(GetBucketsRequest request, RequestOptions options) throws IOException {
return restHighLevelClient.performRequestAndParseEntity(request,
- MLRequestConverters::getJobStats,
- options,
- GetJobStatsResponse::fromXContent,
- Collections.emptySet());
+ MLRequestConverters::getBuckets,
+ options,
+ GetBucketsResponse::fromXContent,
+ Collections.emptySet());
}
/**
- * Gets one or more Machine Learning job configuration info, asynchronously.
+ * Gets the buckets for a Machine Learning Job, notifies listener once the requested buckets are retrieved.
+ *
+ * For additional info
+ * see ML GET buckets documentation
+ *
+ * @param request The request
+ * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
+ * @param listener Listener to be notified upon request completion
+ */
+ public void getBucketsAsync(GetBucketsRequest request, RequestOptions options, ActionListener listener) {
+ restHighLevelClient.performRequestAsyncAndParseEntity(request,
+ MLRequestConverters::getBuckets,
+ options,
+ GetBucketsResponse::fromXContent,
+ listener,
+ Collections.emptySet());
+ }
+
+ /**
+ * Gets overall buckets for a set of Machine Learning Jobs.
+ *
+ * For additional info
+ * see
+ * ML GET overall buckets documentation
*
+ * @param request The request
+ * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
+ */
+ public GetOverallBucketsResponse getOverallBuckets(GetOverallBucketsRequest request, RequestOptions options) throws IOException {
+ return restHighLevelClient.performRequestAndParseEntity(request,
+ MLRequestConverters::getOverallBuckets,
+ options,
+ GetOverallBucketsResponse::fromXContent,
+ Collections.emptySet());
+ }
+
+ /**
+ * Gets overall buckets for a set of Machine Learning Jobs, notifies listener once the requested buckets are retrieved.
*
- * For additional info
- * see Get Job stats docs
- *
- * @param request {@link GetJobStatsRequest} Request containing a list of jobId(s) and additional options
+ * For additional info
+ * see
+ * ML GET overall buckets documentation
+ *
+ * @param request The request
* @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
- * @param listener Listener to be notified with {@link GetJobStatsResponse} upon request completion
+ * @param listener Listener to be notified upon request completion
*/
- public void getJobStatsAsync(GetJobStatsRequest request, RequestOptions options, ActionListener listener) {
+ public void getOverallBucketsAsync(GetOverallBucketsRequest request, RequestOptions options,
+ ActionListener listener) {
restHighLevelClient.performRequestAsyncAndParseEntity(request,
- MLRequestConverters::getJobStats,
- options,
- GetJobStatsResponse::fromXContent,
- listener,
- Collections.emptySet());
+ MLRequestConverters::getOverallBuckets,
+ options,
+ GetOverallBucketsResponse::fromXContent,
+ listener,
+ Collections.emptySet());
}
/**
diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetOverallBucketsRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetOverallBucketsRequest.java
new file mode 100644
index 0000000000000..d78ac5d44ba17
--- /dev/null
+++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetOverallBucketsRequest.java
@@ -0,0 +1,266 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.elasticsearch.client.ml;
+
+import org.elasticsearch.action.ActionRequest;
+import org.elasticsearch.action.ActionRequestValidationException;
+import org.elasticsearch.client.ml.job.config.Job;
+import org.elasticsearch.common.ParseField;
+import org.elasticsearch.common.Strings;
+import org.elasticsearch.common.unit.TimeValue;
+import org.elasticsearch.common.xcontent.ConstructingObjectParser;
+import org.elasticsearch.common.xcontent.ToXContentObject;
+import org.elasticsearch.common.xcontent.XContentBuilder;
+
+import java.io.IOException;
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.List;
+import java.util.Objects;
+
+/**
+ * A request to retrieve overall buckets of set of jobs
+ */
+public class GetOverallBucketsRequest extends ActionRequest implements ToXContentObject {
+
+ public static final ParseField TOP_N = new ParseField("top_n");
+ public static final ParseField BUCKET_SPAN = new ParseField("bucket_span");
+ public static final ParseField OVERALL_SCORE = new ParseField("overall_score");
+ public static final ParseField EXCLUDE_INTERIM = new ParseField("exclude_interim");
+ public static final ParseField START = new ParseField("start");
+ public static final ParseField END = new ParseField("end");
+ public static final ParseField ALLOW_NO_JOBS = new ParseField("allow_no_jobs");
+
+ private static final String ALL_JOBS = "_all";
+
+ @SuppressWarnings("unchecked")
+ public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>(
+ "get_overall_buckets_request", a -> new GetOverallBucketsRequest((String) a[0]));
+
+ static {
+ PARSER.declareString(ConstructingObjectParser.constructorArg(), Job.ID);
+ PARSER.declareInt(GetOverallBucketsRequest::setTopN, TOP_N);
+ PARSER.declareString(GetOverallBucketsRequest::setBucketSpan, BUCKET_SPAN);
+ PARSER.declareBoolean(GetOverallBucketsRequest::setExcludeInterim, EXCLUDE_INTERIM);
+ PARSER.declareDouble(GetOverallBucketsRequest::setOverallScore, OVERALL_SCORE);
+ PARSER.declareStringOrNull(GetOverallBucketsRequest::setStart, START);
+ PARSER.declareStringOrNull(GetOverallBucketsRequest::setEnd, END);
+ PARSER.declareBoolean(GetOverallBucketsRequest::setAllowNoJobs, ALLOW_NO_JOBS);
+ }
+
+ private final List jobIds;
+ private Integer topN;
+ private TimeValue bucketSpan;
+ private Boolean excludeInterim;
+ private Double overallScore;
+ private String start;
+ private String end;
+ private Boolean allowNoJobs;
+
+ private GetOverallBucketsRequest(String jobId) {
+ this(Strings.tokenizeToStringArray(jobId, ","));
+ }
+
+ /**
+ * Constructs a request to retrieve overall buckets for a set of jobs
+ * @param jobIds The job identifiers. Each can be a job identifier, a group name, or a wildcard expression.
+ */
+ public GetOverallBucketsRequest(String... jobIds) {
+ this(Arrays.asList(jobIds));
+ }
+
+ /**
+ * Constructs a request to retrieve overall buckets for a set of jobs
+ * @param jobIds The job identifiers. Each can be a job identifier, a group name, or a wildcard expression.
+ */
+ public GetOverallBucketsRequest(List jobIds) {
+ if (jobIds.stream().anyMatch(Objects::isNull)) {
+ throw new NullPointerException("jobIds must not contain null values");
+ }
+ if (jobIds.isEmpty()) {
+ this.jobIds = Collections.singletonList(ALL_JOBS);
+ } else {
+ this.jobIds = Collections.unmodifiableList(jobIds);
+ }
+ }
+
+ public List getJobIds() {
+ return jobIds;
+ }
+
+ public Integer getTopN() {
+ return topN;
+ }
+
+ /**
+ * Sets the value of `top_n`.
+ * @param topN The number of top job bucket scores to be used in the overall_score calculation. Defaults to 1.
+ */
+ public void setTopN(Integer topN) {
+ this.topN = topN;
+ }
+
+ public TimeValue getBucketSpan() {
+ return bucketSpan;
+ }
+
+ /**
+ * Sets the value of `bucket_span`.
+ * @param bucketSpan The span of the overall buckets. Must be greater or equal to the largest job’s bucket_span.
+ * Defaults to the largest job’s bucket_span.
+ */
+ public void setBucketSpan(TimeValue bucketSpan) {
+ this.bucketSpan = bucketSpan;
+ }
+
+ private void setBucketSpan(String bucketSpan) {
+ this.bucketSpan = TimeValue.parseTimeValue(bucketSpan, BUCKET_SPAN.getPreferredName());
+ }
+
+ public boolean isExcludeInterim() {
+ return excludeInterim;
+ }
+
+ /**
+ * Sets the value of "exclude_interim".
+ * When {@code true}, interim overall buckets will be filtered out.
+ * Overall buckets are interim if any of the job buckets within the overall bucket interval are interim.
+ * @param excludeInterim value of "exclude_interim" to be set
+ */
+ public void setExcludeInterim(Boolean excludeInterim) {
+ this.excludeInterim = excludeInterim;
+ }
+
+ public String getStart() {
+ return start;
+ }
+
+ /**
+ * Sets the value of "start" which is a timestamp.
+ * Only overall buckets whose timestamp is on or after the "start" value will be returned.
+ * @param start value of "start" to be set
+ */
+ public void setStart(String start) {
+ this.start = start;
+ }
+
+ public String getEnd() {
+ return end;
+ }
+
+ /**
+ * Sets the value of "end" which is a timestamp.
+ * Only overall buckets whose timestamp is before the "end" value will be returned.
+ * @param end value of "end" to be set
+ */
+ public void setEnd(String end) {
+ this.end = end;
+ }
+
+ public Double getOverallScore() {
+ return overallScore;
+ }
+
+ /**
+ * Sets the value of "overall_score".
+ * Only buckets with "overall_score" equal or greater will be returned.
+ * @param overallScore value of "anomaly_score".
+ */
+ public void setOverallScore(double overallScore) {
+ this.overallScore = overallScore;
+ }
+
+ /**
+ * See {@link GetJobRequest#isAllowNoJobs()}
+ * @param allowNoJobs
+ */
+ public void setAllowNoJobs(boolean allowNoJobs) {
+ this.allowNoJobs = allowNoJobs;
+ }
+
+ /**
+ * Whether to ignore if a wildcard expression matches no jobs.
+ *
+ * If this is `false`, then an error is returned when a wildcard (or `_all`) does not match any jobs
+ */
+ public Boolean isAllowNoJobs() {
+ return allowNoJobs;
+ }
+
+ @Override
+ public ActionRequestValidationException validate() {
+ return null;
+ }
+
+ @Override
+ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
+ builder.startObject();
+
+ if (jobIds.isEmpty() == false) {
+ builder.field(Job.ID.getPreferredName(), Strings.collectionToCommaDelimitedString(jobIds));
+ }
+ if (topN != null) {
+ builder.field(TOP_N.getPreferredName(), topN);
+ }
+ if (bucketSpan != null) {
+ builder.field(BUCKET_SPAN.getPreferredName(), bucketSpan.getStringRep());
+ }
+ if (excludeInterim != null) {
+ builder.field(EXCLUDE_INTERIM.getPreferredName(), excludeInterim);
+ }
+ if (start != null) {
+ builder.field(START.getPreferredName(), start);
+ }
+ if (end != null) {
+ builder.field(END.getPreferredName(), end);
+ }
+ if (overallScore != null) {
+ builder.field(OVERALL_SCORE.getPreferredName(), overallScore);
+ }
+ if (allowNoJobs != null) {
+ builder.field(ALLOW_NO_JOBS.getPreferredName(), allowNoJobs);
+ }
+ builder.endObject();
+ return builder;
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(jobIds, topN, bucketSpan, excludeInterim, overallScore, start, end, allowNoJobs);
+ }
+
+ @Override
+ public boolean equals(Object obj) {
+ if (obj == null) {
+ return false;
+ }
+ if (getClass() != obj.getClass()) {
+ return false;
+ }
+ GetOverallBucketsRequest other = (GetOverallBucketsRequest) obj;
+ return Objects.equals(jobIds, other.jobIds) &&
+ Objects.equals(topN, other.topN) &&
+ Objects.equals(bucketSpan, other.bucketSpan) &&
+ Objects.equals(excludeInterim, other.excludeInterim) &&
+ Objects.equals(overallScore, other.overallScore) &&
+ Objects.equals(start, other.start) &&
+ Objects.equals(end, other.end) &&
+ Objects.equals(allowNoJobs, other.allowNoJobs);
+ }
+}
diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetOverallBucketsResponse.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetOverallBucketsResponse.java
new file mode 100644
index 0000000000000..8c9b7e29cb670
--- /dev/null
+++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetOverallBucketsResponse.java
@@ -0,0 +1,78 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.elasticsearch.client.ml;
+
+import org.elasticsearch.client.ml.job.results.OverallBucket;
+import org.elasticsearch.common.ParseField;
+import org.elasticsearch.common.xcontent.ConstructingObjectParser;
+import org.elasticsearch.common.xcontent.XContentParser;
+
+import java.io.IOException;
+import java.util.List;
+import java.util.Objects;
+
+/**
+ * A response containing the requested overall buckets
+ */
+public class GetOverallBucketsResponse extends AbstractResultResponse {
+
+ public static final ParseField OVERALL_BUCKETS = new ParseField("overall_buckets");
+
+ @SuppressWarnings("unchecked")
+ public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>(
+ "get_overall_buckets_response", true, a -> new GetOverallBucketsResponse((List) a[0], (long) a[1]));
+
+ static {
+ PARSER.declareObjectArray(ConstructingObjectParser.constructorArg(), OverallBucket.PARSER, OVERALL_BUCKETS);
+ PARSER.declareLong(ConstructingObjectParser.constructorArg(), COUNT);
+ }
+
+ public static GetOverallBucketsResponse fromXContent(XContentParser parser) throws IOException {
+ return PARSER.parse(parser, null);
+ }
+
+ GetOverallBucketsResponse(List overallBuckets, long count) {
+ super(OVERALL_BUCKETS, overallBuckets, count);
+ }
+
+ /**
+ * The retrieved overall buckets
+ * @return the retrieved overall buckets
+ */
+ public List overallBuckets() {
+ return results;
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(count, results);
+ }
+
+ @Override
+ public boolean equals(Object obj) {
+ if (obj == null) {
+ return false;
+ }
+ if (getClass() != obj.getClass()) {
+ return false;
+ }
+ GetOverallBucketsResponse other = (GetOverallBucketsResponse) obj;
+ return count == other.count && Objects.equals(results, other.results);
+ }
+}
diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/MLRequestConvertersTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/MLRequestConvertersTests.java
index d84099d9a3c40..bd997224bebda 100644
--- a/client/rest-high-level/src/test/java/org/elasticsearch/client/MLRequestConvertersTests.java
+++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/MLRequestConvertersTests.java
@@ -25,8 +25,12 @@
import org.apache.http.client.methods.HttpPut;
import org.elasticsearch.client.ml.CloseJobRequest;
import org.elasticsearch.client.ml.DeleteJobRequest;
+import org.elasticsearch.client.ml.FlushJobRequest;
import org.elasticsearch.client.ml.GetBucketsRequest;
import org.elasticsearch.client.ml.GetJobRequest;
+import org.elasticsearch.client.ml.GetJobStatsRequest;
+import org.elasticsearch.client.ml.GetOverallBucketsRequest;
+import org.elasticsearch.client.ml.GetRecordsRequest;
import org.elasticsearch.client.ml.OpenJobRequest;
import org.elasticsearch.client.ml.PutJobRequest;
import org.elasticsearch.client.ml.job.config.AnalysisConfig;
@@ -36,8 +40,6 @@
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.json.JsonXContent;
-import org.elasticsearch.client.ml.FlushJobRequest;
-import org.elasticsearch.client.ml.GetJobStatsRequest;
import org.elasticsearch.test.ESTestCase;
import java.io.ByteArrayOutputStream;
@@ -79,6 +81,24 @@ public void testGetJob() {
assertEquals(Boolean.toString(true), request.getParameters().get("allow_no_jobs"));
}
+ public void testGetJobStats() {
+ GetJobStatsRequest getJobStatsRequestRequest = new GetJobStatsRequest();
+
+ Request request = MLRequestConverters.getJobStats(getJobStatsRequestRequest);
+
+ assertEquals(HttpGet.METHOD_NAME, request.getMethod());
+ assertEquals("/_xpack/ml/anomaly_detectors/_stats", request.getEndpoint());
+ assertFalse(request.getParameters().containsKey("allow_no_jobs"));
+
+ getJobStatsRequestRequest = new GetJobStatsRequest("job1", "jobs*");
+ getJobStatsRequestRequest.setAllowNoJobs(true);
+ request = MLRequestConverters.getJobStats(getJobStatsRequestRequest);
+
+ assertEquals("/_xpack/ml/anomaly_detectors/job1,jobs*/_stats", request.getEndpoint());
+ assertEquals(Boolean.toString(true), request.getParameters().get("allow_no_jobs"));
+ }
+
+
public void testOpenJob() throws Exception {
String jobId = "some-job-id";
OpenJobRequest openJobRequest = new OpenJobRequest(jobId);
@@ -124,23 +144,6 @@ public void testDeleteJob() {
assertEquals(Boolean.toString(true), request.getParameters().get("force"));
}
- public void testGetBuckets() throws IOException {
- String jobId = randomAlphaOfLength(10);
- GetBucketsRequest getBucketsRequest = new GetBucketsRequest(jobId);
- getBucketsRequest.setPageParams(new PageParams(100, 300));
- getBucketsRequest.setAnomalyScore(75.0);
- getBucketsRequest.setSort("anomaly_score");
- getBucketsRequest.setDescending(true);
-
- Request request = MLRequestConverters.getBuckets(getBucketsRequest);
- assertEquals(HttpGet.METHOD_NAME, request.getMethod());
- assertEquals("/_xpack/ml/anomaly_detectors/" + jobId + "/results/buckets", request.getEndpoint());
- try (XContentParser parser = createParser(JsonXContent.jsonXContent, request.getEntity().getContent())) {
- GetBucketsRequest parsedRequest = GetBucketsRequest.PARSER.apply(parser, null);
- assertThat(parsedRequest, equalTo(getBucketsRequest));
- }
- }
-
public void testFlushJob() throws Exception {
String jobId = randomAlphaOfLength(10);
FlushJobRequest flushJobRequest = new FlushJobRequest(jobId);
@@ -157,26 +160,64 @@ public void testFlushJob() throws Exception {
flushJobRequest.setCalcInterim(true);
request = MLRequestConverters.flushJob(flushJobRequest);
assertEquals(
- "{\"job_id\":\"" + jobId + "\",\"calc_interim\":true,\"start\":\"105\"," +
- "\"end\":\"200\",\"advance_time\":\"100\",\"skip_time\":\"1000\"}",
- requestEntityToString(request));
+ "{\"job_id\":\"" + jobId + "\",\"calc_interim\":true,\"start\":\"105\"," +
+ "\"end\":\"200\",\"advance_time\":\"100\",\"skip_time\":\"1000\"}",
+ requestEntityToString(request));
}
- public void testGetJobStats() {
- GetJobStatsRequest getJobStatsRequestRequest = new GetJobStatsRequest();
-
- Request request = MLRequestConverters.getJobStats(getJobStatsRequestRequest);
+ public void testGetBuckets() throws IOException {
+ String jobId = randomAlphaOfLength(10);
+ GetBucketsRequest getBucketsRequest = new GetBucketsRequest(jobId);
+ getBucketsRequest.setPageParams(new PageParams(100, 300));
+ getBucketsRequest.setAnomalyScore(75.0);
+ getBucketsRequest.setSort("anomaly_score");
+ getBucketsRequest.setDescending(true);
+ Request request = MLRequestConverters.getBuckets(getBucketsRequest);
assertEquals(HttpGet.METHOD_NAME, request.getMethod());
- assertEquals("/_xpack/ml/anomaly_detectors/_stats", request.getEndpoint());
- assertFalse(request.getParameters().containsKey("allow_no_jobs"));
+ assertEquals("/_xpack/ml/anomaly_detectors/" + jobId + "/results/buckets", request.getEndpoint());
+ try (XContentParser parser = createParser(JsonXContent.jsonXContent, request.getEntity().getContent())) {
+ GetBucketsRequest parsedRequest = GetBucketsRequest.PARSER.apply(parser, null);
+ assertThat(parsedRequest, equalTo(getBucketsRequest));
+ }
+ }
- getJobStatsRequestRequest = new GetJobStatsRequest("job1", "jobs*");
- getJobStatsRequestRequest.setAllowNoJobs(true);
- request = MLRequestConverters.getJobStats(getJobStatsRequestRequest);
+ public void testGetOverallBuckets() throws IOException {
+ String jobId = randomAlphaOfLength(10);
+ GetOverallBucketsRequest getOverallBucketsRequest = new GetOverallBucketsRequest(jobId);
+ getOverallBucketsRequest.setBucketSpan(TimeValue.timeValueHours(3));
+ getOverallBucketsRequest.setTopN(3);
+ getOverallBucketsRequest.setStart("2018-08-08T00:00:00Z");
+ getOverallBucketsRequest.setEnd("2018-09-08T00:00:00Z");
+ getOverallBucketsRequest.setExcludeInterim(true);
+
+ Request request = MLRequestConverters.getOverallBuckets(getOverallBucketsRequest);
+ assertEquals(HttpGet.METHOD_NAME, request.getMethod());
+ assertEquals("/_xpack/ml/anomaly_detectors/" + jobId + "/results/overall_buckets", request.getEndpoint());
+ try (XContentParser parser = createParser(JsonXContent.jsonXContent, request.getEntity().getContent())) {
+ GetOverallBucketsRequest parsedRequest = GetOverallBucketsRequest.PARSER.apply(parser, null);
+ assertThat(parsedRequest, equalTo(getOverallBucketsRequest));
+ }
+ }
- assertEquals("/_xpack/ml/anomaly_detectors/job1,jobs*/_stats", request.getEndpoint());
- assertEquals(Boolean.toString(true), request.getParameters().get("allow_no_jobs"));
+ public void testGetRecords() throws IOException {
+ String jobId = randomAlphaOfLength(10);
+ GetRecordsRequest getRecordsRequest = new GetRecordsRequest(jobId);
+ getRecordsRequest.setStart("2018-08-08T00:00:00Z");
+ getRecordsRequest.setEnd("2018-09-08T00:00:00Z");
+ getRecordsRequest.setPageParams(new PageParams(100, 300));
+ getRecordsRequest.setRecordScore(75.0);
+ getRecordsRequest.setSort("anomaly_score");
+ getRecordsRequest.setDescending(true);
+ getRecordsRequest.setExcludeInterim(true);
+
+ Request request = MLRequestConverters.getRecords(getRecordsRequest);
+ assertEquals(HttpGet.METHOD_NAME, request.getMethod());
+ assertEquals("/_xpack/ml/anomaly_detectors/" + jobId + "/results/records", request.getEndpoint());
+ try (XContentParser parser = createParser(JsonXContent.jsonXContent, request.getEntity().getContent())) {
+ GetRecordsRequest parsedRequest = GetRecordsRequest.PARSER.apply(parser, null);
+ assertThat(parsedRequest, equalTo(getRecordsRequest));
+ }
}
private static Job createValidJob(String jobId) {
diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/MachineLearningGetResultsIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/MachineLearningGetResultsIT.java
index 6c8ca81cea224..b1c743098db4a 100644
--- a/client/rest-high-level/src/test/java/org/elasticsearch/client/MachineLearningGetResultsIT.java
+++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/MachineLearningGetResultsIT.java
@@ -23,19 +23,29 @@
import org.elasticsearch.action.support.WriteRequest;
import org.elasticsearch.client.ml.GetBucketsRequest;
import org.elasticsearch.client.ml.GetBucketsResponse;
+import org.elasticsearch.client.ml.GetOverallBucketsRequest;
+import org.elasticsearch.client.ml.GetOverallBucketsResponse;
import org.elasticsearch.client.ml.GetRecordsRequest;
import org.elasticsearch.client.ml.GetRecordsResponse;
import org.elasticsearch.client.ml.PutJobRequest;
+import org.elasticsearch.client.ml.job.config.AnalysisConfig;
+import org.elasticsearch.client.ml.job.config.DataDescription;
+import org.elasticsearch.client.ml.job.config.Detector;
import org.elasticsearch.client.ml.job.config.Job;
import org.elasticsearch.client.ml.job.results.AnomalyRecord;
import org.elasticsearch.client.ml.job.results.Bucket;
+import org.elasticsearch.client.ml.job.results.OverallBucket;
import org.elasticsearch.client.ml.job.util.PageParams;
+import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.xcontent.XContentType;
import org.junit.After;
import org.junit.Before;
import java.io.IOException;
+import java.util.Arrays;
+import java.util.List;
+import static org.hamcrest.Matchers.closeTo;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.greaterThan;
import static org.hamcrest.Matchers.greaterThanOrEqualTo;
@@ -59,7 +69,7 @@ public class MachineLearningGetResultsIT extends ESRestHighLevelClientTestCase {
@Before
public void createJobAndIndexResults() throws IOException {
MachineLearningClient machineLearningClient = highLevelClient().machineLearning();
- Job job = MachineLearningIT.buildJob(JOB_ID);
+ Job job = buildJob(JOB_ID);
machineLearningClient.putJob(new PutJobRequest(job), RequestOptions.DEFAULT);
BulkRequest bulkRequest = new BulkRequest();
@@ -206,6 +216,111 @@ public void testGetBuckets() throws IOException {
}
}
+ public void testGetOverallBuckets() throws IOException {
+ MachineLearningClient machineLearningClient = highLevelClient().machineLearning();
+
+ GetBucketsRequest getBucketsRequest = new GetBucketsRequest(JOB_ID);
+ getBucketsRequest.setPageParams(new PageParams(0, 3));
+ List firstBuckets = machineLearningClient.getBuckets(getBucketsRequest, RequestOptions.DEFAULT).buckets();
+
+ String anotherJobId = "test-get-overall-buckets-job";
+ Job anotherJob = buildJob(anotherJobId);
+ machineLearningClient.putJob(new PutJobRequest(anotherJob), RequestOptions.DEFAULT);
+
+ // Let's index matching buckets with the score being 10.0 higher
+ BulkRequest bulkRequest = new BulkRequest();
+ bulkRequest.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE);
+ for (Bucket bucket : firstBuckets) {
+ IndexRequest indexRequest = new IndexRequest(RESULTS_INDEX, DOC);
+ indexRequest.source("{\"job_id\":\"" + anotherJobId + "\", \"result_type\":\"bucket\", \"timestamp\": " +
+ bucket.getTimestamp().getTime() + "," + "\"bucket_span\": 3600,\"is_interim\": " + bucket.isInterim()
+ + ", \"anomaly_score\": " + String.valueOf(bucket.getAnomalyScore() + 10.0) + "}", XContentType.JSON);
+ bulkRequest.add(indexRequest);
+ }
+ highLevelClient().bulk(bulkRequest, RequestOptions.DEFAULT);
+
+ {
+ GetOverallBucketsRequest request = new GetOverallBucketsRequest(JOB_ID, anotherJobId);
+
+ GetOverallBucketsResponse response = execute(request, machineLearningClient::getOverallBuckets,
+ machineLearningClient::getOverallBucketsAsync);
+
+ assertThat(response.count(), equalTo(241L));
+ List overallBuckets = response.overallBuckets();
+ assertThat(overallBuckets.size(), equalTo(241));
+ assertThat(overallBuckets.stream().allMatch(b -> b.getBucketSpan() == 3600L), is(true));
+ assertThat(overallBuckets.get(0).getTimestamp().getTime(), equalTo(START_TIME_EPOCH_MS));
+ assertThat(overallBuckets.get(240).isInterim(), is(true));
+ }
+ {
+ GetOverallBucketsRequest request = new GetOverallBucketsRequest(JOB_ID, anotherJobId);
+ request.setBucketSpan(TimeValue.timeValueHours(2));
+
+ GetOverallBucketsResponse response = execute(request, machineLearningClient::getOverallBuckets,
+ machineLearningClient::getOverallBucketsAsync);
+
+ assertThat(response.count(), equalTo(121L));
+ }
+ {
+ long end = START_TIME_EPOCH_MS + 10 * 3600000L;
+ GetOverallBucketsRequest request = new GetOverallBucketsRequest(JOB_ID, anotherJobId);
+ request.setEnd(String.valueOf(end));
+
+ GetOverallBucketsResponse response = execute(request, machineLearningClient::getOverallBuckets,
+ machineLearningClient::getOverallBucketsAsync);
+
+ assertThat(response.count(), equalTo(10L));
+ assertThat(response.overallBuckets().get(0).getTimestamp().getTime(), equalTo(START_TIME_EPOCH_MS));
+ assertThat(response.overallBuckets().get(9).getTimestamp().getTime(), equalTo(end - 3600000L));
+ }
+ {
+ GetOverallBucketsRequest request = new GetOverallBucketsRequest(JOB_ID, anotherJobId);
+ request.setExcludeInterim(true);
+
+ GetOverallBucketsResponse response = execute(request, machineLearningClient::getOverallBuckets,
+ machineLearningClient::getOverallBucketsAsync);
+
+ assertThat(response.count(), equalTo(240L));
+ assertThat(response.overallBuckets().stream().allMatch(b -> b.isInterim() == false), is(true));
+ }
+ {
+ GetOverallBucketsRequest request = new GetOverallBucketsRequest(JOB_ID);
+ request.setOverallScore(75.0);
+
+ GetOverallBucketsResponse response = execute(request, machineLearningClient::getOverallBuckets,
+ machineLearningClient::getOverallBucketsAsync);
+
+ assertThat(response.count(), equalTo(bucketStats.criticalCount));
+ assertThat(response.overallBuckets().stream().allMatch(b -> b.getOverallScore() >= 75.0), is(true));
+ }
+ {
+ long start = START_TIME_EPOCH_MS + 10 * 3600000L;
+ GetOverallBucketsRequest request = new GetOverallBucketsRequest(JOB_ID, anotherJobId);
+ request.setStart(String.valueOf(start));
+
+ GetOverallBucketsResponse response = execute(request, machineLearningClient::getOverallBuckets,
+ machineLearningClient::getOverallBucketsAsync);
+
+ assertThat(response.count(), equalTo(231L));
+ assertThat(response.overallBuckets().get(0).getTimestamp().getTime(), equalTo(start));
+ }
+ {
+ GetOverallBucketsRequest request = new GetOverallBucketsRequest(JOB_ID, anotherJobId);
+ request.setEnd(String.valueOf(START_TIME_EPOCH_MS + 3 * 3600000L));
+ request.setTopN(2);
+
+ GetOverallBucketsResponse response = execute(request, machineLearningClient::getOverallBuckets,
+ machineLearningClient::getOverallBucketsAsync);
+
+ assertThat(response.count(), equalTo(3L));
+ List overallBuckets = response.overallBuckets();
+ for (int i = 0; i < overallBuckets.size(); ++i) {
+ // As the second job has scores that are -10 from the first, the overall buckets should be +5 from the initial job
+ assertThat(overallBuckets.get(i).getOverallScore(), is(closeTo(firstBuckets.get(i).getAnomalyScore() + 5.0, 0.0001)));
+ }
+ }
+ }
+
public void testGetRecords() throws IOException {
MachineLearningClient machineLearningClient = highLevelClient().machineLearning();
@@ -272,6 +387,19 @@ public void testGetRecords() throws IOException {
}
}
+ public static Job buildJob(String jobId) {
+ Job.Builder builder = new Job.Builder(jobId);
+
+ Detector detector = new Detector.Builder("count", null).build();
+ AnalysisConfig.Builder configBuilder = new AnalysisConfig.Builder(Arrays.asList(detector));
+ configBuilder.setBucketSpan(TimeValue.timeValueHours(1));
+ builder.setAnalysisConfig(configBuilder);
+
+ DataDescription.Builder dataDescription = new DataDescription.Builder();
+ builder.setDataDescription(dataDescription);
+ return builder.build();
+ }
+
private static class Stats {
// score < 50.0
private long minorCount;
diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/MlClientDocumentationIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/MlClientDocumentationIT.java
index f92f01f6bad19..427f75a80d029 100644
--- a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/MlClientDocumentationIT.java
+++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/MlClientDocumentationIT.java
@@ -20,9 +20,11 @@
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.LatchedActionListener;
+import org.elasticsearch.action.bulk.BulkRequest;
import org.elasticsearch.action.index.IndexRequest;
import org.elasticsearch.action.support.WriteRequest;
import org.elasticsearch.client.ESRestHighLevelClientTestCase;
+import org.elasticsearch.client.MachineLearningGetResultsIT;
import org.elasticsearch.client.MachineLearningIT;
import org.elasticsearch.client.MlRestTestStateCleaner;
import org.elasticsearch.client.RequestOptions;
@@ -31,12 +33,16 @@
import org.elasticsearch.client.ml.CloseJobResponse;
import org.elasticsearch.client.ml.DeleteJobRequest;
import org.elasticsearch.client.ml.DeleteJobResponse;
+import org.elasticsearch.client.ml.FlushJobRequest;
+import org.elasticsearch.client.ml.FlushJobResponse;
import org.elasticsearch.client.ml.GetBucketsRequest;
import org.elasticsearch.client.ml.GetBucketsResponse;
import org.elasticsearch.client.ml.GetJobRequest;
import org.elasticsearch.client.ml.GetJobResponse;
import org.elasticsearch.client.ml.GetJobStatsRequest;
import org.elasticsearch.client.ml.GetJobStatsResponse;
+import org.elasticsearch.client.ml.GetOverallBucketsRequest;
+import org.elasticsearch.client.ml.GetOverallBucketsResponse;
import org.elasticsearch.client.ml.GetRecordsRequest;
import org.elasticsearch.client.ml.GetRecordsResponse;
import org.elasticsearch.client.ml.OpenJobRequest;
@@ -49,12 +55,11 @@
import org.elasticsearch.client.ml.job.config.Job;
import org.elasticsearch.client.ml.job.results.AnomalyRecord;
import org.elasticsearch.client.ml.job.results.Bucket;
+import org.elasticsearch.client.ml.job.results.OverallBucket;
+import org.elasticsearch.client.ml.job.stats.JobStats;
import org.elasticsearch.client.ml.job.util.PageParams;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.xcontent.XContentType;
-import org.elasticsearch.client.ml.FlushJobRequest;
-import org.elasticsearch.client.ml.FlushJobResponse;
-import org.elasticsearch.client.ml.job.stats.JobStats;
import org.junit.After;
import java.io.IOException;
@@ -65,9 +70,11 @@
import java.util.concurrent.TimeUnit;
import java.util.stream.Collectors;
+import static org.hamcrest.Matchers.closeTo;
import static org.hamcrest.Matchers.containsInAnyOrder;
import static org.hamcrest.Matchers.greaterThan;
import static org.hamcrest.Matchers.hasSize;
+import static org.hamcrest.core.Is.is;
public class MlClientDocumentationIT extends ESRestHighLevelClientTestCase {
@@ -584,6 +591,107 @@ public void onFailure(Exception e) {
}
}
+ public void testGetOverallBuckets() throws IOException, InterruptedException {
+ RestHighLevelClient client = highLevelClient();
+
+ String jobId1 = "test-get-overall-buckets-1";
+ String jobId2 = "test-get-overall-buckets-2";
+ Job job1 = MachineLearningGetResultsIT.buildJob(jobId1);
+ Job job2 = MachineLearningGetResultsIT.buildJob(jobId2);
+ client.machineLearning().putJob(new PutJobRequest(job1), RequestOptions.DEFAULT);
+ client.machineLearning().putJob(new PutJobRequest(job2), RequestOptions.DEFAULT);
+
+ // Let us index some buckets
+ BulkRequest bulkRequest = new BulkRequest();
+ bulkRequest.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE);
+
+ {
+ IndexRequest indexRequest = new IndexRequest(".ml-anomalies-shared", "doc");
+ indexRequest.source("{\"job_id\":\"test-get-overall-buckets-1\", \"result_type\":\"bucket\", \"timestamp\": 1533081600000," +
+ "\"bucket_span\": 600,\"is_interim\": false, \"anomaly_score\": 60.0}", XContentType.JSON);
+ bulkRequest.add(indexRequest);
+ }
+ {
+ IndexRequest indexRequest = new IndexRequest(".ml-anomalies-shared", "doc");
+ indexRequest.source("{\"job_id\":\"test-get-overall-buckets-2\", \"result_type\":\"bucket\", \"timestamp\": 1533081600000," +
+ "\"bucket_span\": 3600,\"is_interim\": false, \"anomaly_score\": 100.0}", XContentType.JSON);
+ bulkRequest.add(indexRequest);
+ }
+
+ client.bulk(bulkRequest, RequestOptions.DEFAULT);
+
+ {
+ // tag::x-pack-ml-get-overall-buckets-request
+ GetOverallBucketsRequest request = new GetOverallBucketsRequest(jobId1, jobId2); // <1>
+ // end::x-pack-ml-get-overall-buckets-request
+
+ // tag::x-pack-ml-get-overall-buckets-bucket-span
+ request.setBucketSpan(TimeValue.timeValueHours(24)); // <1>
+ // end::x-pack-ml-get-overall-buckets-bucket-span
+
+ // tag::x-pack-ml-get-overall-buckets-end
+ request.setEnd("2018-08-21T00:00:00Z"); // <1>
+ // end::x-pack-ml-get-overall-buckets-end
+
+ // tag::x-pack-ml-get-overall-buckets-exclude-interim
+ request.setExcludeInterim(true); // <1>
+ // end::x-pack-ml-get-overall-buckets-exclude-interim
+
+ // tag::x-pack-ml-get-overall-buckets-overall-score
+ request.setOverallScore(75.0); // <1>
+ // end::x-pack-ml-get-overall-buckets-overall-score
+
+ // tag::x-pack-ml-get-overall-buckets-start
+ request.setStart("2018-08-01T00:00:00Z"); // <1>
+ // end::x-pack-ml-get-overall-buckets-start
+
+ // tag::x-pack-ml-get-overall-buckets-top-n
+ request.setTopN(2); // <1>
+ // end::x-pack-ml-get-overall-buckets-top-n
+
+ // tag::x-pack-ml-get-overall-buckets-execute
+ GetOverallBucketsResponse response = client.machineLearning().getOverallBuckets(request, RequestOptions.DEFAULT);
+ // end::x-pack-ml-get-overall-buckets-execute
+
+ // tag::x-pack-ml-get-overall-buckets-response
+ long count = response.count(); // <1>
+ List overallBuckets = response.overallBuckets(); // <2>
+ // end::x-pack-ml-get-overall-buckets-response
+
+ assertEquals(1, overallBuckets.size());
+ assertThat(overallBuckets.get(0).getOverallScore(), is(closeTo(80.0, 0.001)));
+
+ }
+ {
+ GetOverallBucketsRequest request = new GetOverallBucketsRequest(jobId1, jobId2);
+
+ // tag::x-pack-ml-get-overall-buckets-listener
+ ActionListener listener =
+ new ActionListener() {
+ @Override
+ public void onResponse(GetOverallBucketsResponse getOverallBucketsResponse) {
+ // <1>
+ }
+
+ @Override
+ public void onFailure(Exception e) {
+ // <2>
+ }
+ };
+ // end::x-pack-ml-get-overall-buckets-listener
+
+ // Replace the empty listener by a blocking listener in test
+ final CountDownLatch latch = new CountDownLatch(1);
+ listener = new LatchedActionListener<>(listener, latch);
+
+ // tag::x-pack-ml-get-overall-buckets-execute-async
+ client.machineLearning().getOverallBucketsAsync(request, RequestOptions.DEFAULT, listener); // <1>
+ // end::x-pack-ml-get-overall-buckets-execute-async
+
+ assertTrue(latch.await(30L, TimeUnit.SECONDS));
+ }
+ }
+
public void testGetRecords() throws IOException, InterruptedException {
RestHighLevelClient client = highLevelClient();
diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/GetOverallBucketsRequestTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/GetOverallBucketsRequestTests.java
new file mode 100644
index 0000000000000..e50278fabbd14
--- /dev/null
+++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/GetOverallBucketsRequestTests.java
@@ -0,0 +1,67 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.elasticsearch.client.ml;
+
+import org.elasticsearch.common.unit.TimeValue;
+import org.elasticsearch.common.xcontent.XContentParser;
+import org.elasticsearch.test.AbstractXContentTestCase;
+
+import java.io.IOException;
+
+public class GetOverallBucketsRequestTests extends AbstractXContentTestCase {
+
+ @Override
+ protected GetOverallBucketsRequest createTestInstance() {
+ GetOverallBucketsRequest request = new GetOverallBucketsRequest(randomAlphaOfLengthBetween(1, 20));
+
+ if (randomBoolean()) {
+ request.setTopN(randomIntBetween(1, 10));
+ }
+
+ if (randomBoolean()) {
+ request.setBucketSpan(TimeValue.timeValueSeconds(randomIntBetween(1, 1_000_000)));
+ }
+ if (randomBoolean()) {
+ request.setStart(String.valueOf(randomLong()));
+ }
+ if (randomBoolean()) {
+ request.setEnd(String.valueOf(randomLong()));
+ }
+ if (randomBoolean()) {
+ request.setExcludeInterim(randomBoolean());
+ }
+ if (randomBoolean()) {
+ request.setOverallScore(randomDouble());
+ }
+ if (randomBoolean()) {
+ request.setExcludeInterim(randomBoolean());
+ }
+ return request;
+ }
+
+ @Override
+ protected GetOverallBucketsRequest doParseInstance(XContentParser parser) throws IOException {
+ return GetOverallBucketsRequest.PARSER.apply(parser, null);
+ }
+
+ @Override
+ protected boolean supportsUnknownFields() {
+ return false;
+ }
+}
diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/GetOverallBucketsResponseTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/GetOverallBucketsResponseTests.java
new file mode 100644
index 0000000000000..2c67dad4aa44d
--- /dev/null
+++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/GetOverallBucketsResponseTests.java
@@ -0,0 +1,52 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.elasticsearch.client.ml;
+
+import org.elasticsearch.client.ml.job.results.OverallBucket;
+import org.elasticsearch.client.ml.job.results.OverallBucketTests;
+import org.elasticsearch.common.xcontent.XContentParser;
+import org.elasticsearch.test.AbstractXContentTestCase;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.List;
+
+public class GetOverallBucketsResponseTests extends AbstractXContentTestCase {
+
+ @Override
+ protected GetOverallBucketsResponse createTestInstance() {
+ int listSize = randomInt(10);
+ List overallBuckets = new ArrayList<>(listSize);
+ for (int j = 0; j < listSize; j++) {
+ OverallBucket overallBucket = OverallBucketTests.createRandom();
+ overallBuckets.add(overallBucket);
+ }
+ return new GetOverallBucketsResponse(overallBuckets, listSize);
+ }
+
+ @Override
+ protected GetOverallBucketsResponse doParseInstance(XContentParser parser) throws IOException {
+ return GetOverallBucketsResponse.fromXContent(parser);
+ }
+
+ @Override
+ protected boolean supportsUnknownFields() {
+ return true;
+ }
+}
diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/job/results/OverallBucketTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/job/results/OverallBucketTests.java
index 9ee6a2025b692..7f1af91d4dfa6 100644
--- a/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/job/results/OverallBucketTests.java
+++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/job/results/OverallBucketTests.java
@@ -32,6 +32,10 @@ public class OverallBucketTests extends AbstractXContentTestCase
@Override
protected OverallBucket createTestInstance() {
+ return createRandom();
+ }
+
+ public static OverallBucket createRandom() {
int jobCount = randomIntBetween(0, 10);
List jobs = new ArrayList<>(jobCount);
for (int i = 0; i < jobCount; ++i) {
diff --git a/docs/java-rest/high-level/ml/get-buckets.asciidoc b/docs/java-rest/high-level/ml/get-buckets.asciidoc
index 81a21d3d18ac1..33a3059166c3f 100644
--- a/docs/java-rest/high-level/ml/get-buckets.asciidoc
+++ b/docs/java-rest/high-level/ml/get-buckets.asciidoc
@@ -70,7 +70,7 @@ include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-get-buckets-s
["source","java",subs="attributes,callouts,macros"]
--------------------------------------------------
-include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-get-buckets-end]
+include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-get-buckets-start]
--------------------------------------------------
<1> Buckets with timestamps on or after this time will be returned.
diff --git a/docs/java-rest/high-level/ml/get-overall-buckets.asciidoc b/docs/java-rest/high-level/ml/get-overall-buckets.asciidoc
new file mode 100644
index 0000000000000..832eb8f251481
--- /dev/null
+++ b/docs/java-rest/high-level/ml/get-overall-buckets.asciidoc
@@ -0,0 +1,107 @@
+[[java-rest-high-x-pack-ml-get-overall-buckets]]
+=== Get Overall Buckets API
+
+The Get Overall Buckets API retrieves overall bucket results that
+summarize the bucket results of multiple jobs.
+It accepts a `GetOverallBucketsRequest` object and responds
+with a `GetOverallBucketsResponse` object.
+
+[[java-rest-high-x-pack-ml-get-overall-buckets-request]]
+==== Get Overall Buckets Request
+
+A `GetOverallBucketsRequest` object gets created with one or more `jobId`.
+
+["source","java",subs="attributes,callouts,macros"]
+--------------------------------------------------
+include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-get-overall-buckets-request]
+--------------------------------------------------
+<1> Constructing a new request referencing job IDs `jobId1` and `jobId2`.
+
+==== Optional Arguments
+The following arguments are optional:
+
+["source","java",subs="attributes,callouts,macros"]
+--------------------------------------------------
+include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-get-overall-buckets-bucket-span]
+--------------------------------------------------
+<1> The span of the overall buckets. Must be greater or equal to the jobs' largest `bucket_span`.
+
+["source","java",subs="attributes,callouts,macros"]
+--------------------------------------------------
+include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-get-overall-buckets-end]
+--------------------------------------------------
+<1> Overall buckets with timestamps earlier than this time will be returned.
+
+["source","java",subs="attributes,callouts,macros"]
+--------------------------------------------------
+include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-get-overall-buckets-exclude-interim]
+--------------------------------------------------
+<1> If `true`, interim results will be excluded. Overall buckets are interim if any of the job buckets
+within the overall bucket interval are interim. Defaults to `false`.
+
+["source","java",subs="attributes,callouts,macros"]
+--------------------------------------------------
+include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-get-overall-buckets-overall-score]
+--------------------------------------------------
+<1> Overall buckets with overall scores greater or equal than this value will be returned.
+
+["source","java",subs="attributes,callouts,macros"]
+--------------------------------------------------
+include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-get-overall-buckets-start]
+--------------------------------------------------
+<1> Overall buckets with timestamps on or after this time will be returned.
+
+["source","java",subs="attributes,callouts,macros"]
+--------------------------------------------------
+include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-get-overall-buckets-top-n]
+--------------------------------------------------
+<1> The number of top job bucket scores to be used in the `overall_score` calculation. Defaults to `1`.
+
+[[java-rest-high-x-pack-ml-get-overall-buckets-execution]]
+==== Execution
+
+The request can be executed through the `MachineLearningClient` contained
+in the `RestHighLevelClient` object, accessed via the `machineLearningClient()` method.
+
+["source","java",subs="attributes,callouts,macros"]
+--------------------------------------------------
+include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-get-overall-buckets-execute]
+--------------------------------------------------
+
+[[java-rest-high-x-pack-ml-get-overall-buckets-execution-async]]
+==== Asynchronous Execution
+
+The request can also be executed asynchronously:
+
+["source","java",subs="attributes,callouts,macros"]
+--------------------------------------------------
+include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-get-overall-buckets-execute-async]
+--------------------------------------------------
+<1> The `GetOverallBucketsRequest` to execute and the `ActionListener` to use when
+the execution completes
+
+The asynchronous method does not block and returns immediately. Once it is
+completed the `ActionListener` is called back with the `onResponse` method
+if the execution is successful or the `onFailure` method if the execution
+failed.
+
+A typical listener for `GetBucketsResponse` looks like:
+
+["source","java",subs="attributes,callouts,macros"]
+--------------------------------------------------
+include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-get-overall-buckets-listener]
+--------------------------------------------------
+<1> `onResponse` is called back when the action is completed successfully
+<2> `onFailure` is called back when some unexpected error occurs
+
+[[java-rest-high-snapshot-ml-get-overall-buckets-response]]
+==== Get Overall Buckets Response
+
+The returned `GetOverallBucketsResponse` contains the requested buckets:
+
+["source","java",subs="attributes,callouts,macros"]
+--------------------------------------------------
+include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-get-overall-buckets-response]
+--------------------------------------------------
+<1> The count of overall buckets that were matched
+<2> The overall buckets retrieved
\ No newline at end of file
diff --git a/docs/java-rest/high-level/supported-apis.asciidoc b/docs/java-rest/high-level/supported-apis.asciidoc
index b791dbc0f8cfc..b39c83b691318 100644
--- a/docs/java-rest/high-level/supported-apis.asciidoc
+++ b/docs/java-rest/high-level/supported-apis.asciidoc
@@ -216,6 +216,7 @@ The Java High Level REST Client supports the following Machine Learning APIs:
* <>
* <>
* <>
+* <>
* <>
include::ml/put-job.asciidoc[]
@@ -226,6 +227,7 @@ include::ml/close-job.asciidoc[]
include::ml/flush-job.asciidoc[]
include::ml/get-job-stats.asciidoc[]
include::ml/get-buckets.asciidoc[]
+include::ml/get-overall-buckets.asciidoc[]
include::ml/get-records.asciidoc[]
== Migration APIs