diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/MLRequestConverters.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/MLRequestConverters.java index 9a4e825be7c66..587f2cee38f69 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/MLRequestConverters.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/MLRequestConverters.java @@ -28,6 +28,7 @@ import org.elasticsearch.protocol.xpack.ml.CloseJobRequest; import org.elasticsearch.protocol.xpack.ml.DeleteJobRequest; import org.elasticsearch.protocol.xpack.ml.GetJobRequest; +import org.elasticsearch.protocol.xpack.ml.GetBucketsRequest; import org.elasticsearch.protocol.xpack.ml.OpenJobRequest; import org.elasticsearch.protocol.xpack.ml.PutJobRequest; @@ -69,7 +70,7 @@ static Request getJob(GetJobRequest getJobRequest) { return request; } - static Request openJob(OpenJobRequest openJobRequest) throws IOException { + static Request openJob(OpenJobRequest openJobRequest) { String endpoint = new EndpointBuilder() .addPathPartAsIs("_xpack") .addPathPartAsIs("ml") @@ -120,4 +121,18 @@ static Request deleteJob(DeleteJobRequest deleteJobRequest) { return request; } + + static Request getBuckets(GetBucketsRequest getBucketsRequest) throws IOException { + String endpoint = new EndpointBuilder() + .addPathPartAsIs("_xpack") + .addPathPartAsIs("ml") + .addPathPartAsIs("anomaly_detectors") + .addPathPart(getBucketsRequest.getJobId()) + .addPathPartAsIs("results") + .addPathPartAsIs("buckets") + .build(); + Request request = new Request(HttpGet.METHOD_NAME, endpoint); + request.setEntity(createEntity(getBucketsRequest, REQUEST_BODY_CONTENT_TYPE)); + return request; + } } diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/MachineLearningClient.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/MachineLearningClient.java index 90acabfbdd8a4..c4dcc1eaffc5a 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/MachineLearningClient.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/MachineLearningClient.java @@ -23,6 +23,8 @@ import org.elasticsearch.protocol.xpack.ml.CloseJobResponse; import org.elasticsearch.protocol.xpack.ml.DeleteJobRequest; import org.elasticsearch.protocol.xpack.ml.DeleteJobResponse; +import org.elasticsearch.protocol.xpack.ml.GetBucketsRequest; +import org.elasticsearch.protocol.xpack.ml.GetBucketsResponse; import org.elasticsearch.protocol.xpack.ml.GetJobRequest; import org.elasticsearch.protocol.xpack.ml.GetJobResponse; import org.elasticsearch.protocol.xpack.ml.OpenJobRequest; @@ -54,7 +56,7 @@ public final class MachineLearningClient { * For additional info * see ML PUT job documentation * - * @param request the PutJobRequest containing the {@link org.elasticsearch.protocol.xpack.ml.job.config.Job} settings + * @param request The PutJobRequest containing the {@link org.elasticsearch.protocol.xpack.ml.job.config.Job} settings * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized * @return PutJobResponse with enclosed {@link org.elasticsearch.protocol.xpack.ml.job.config.Job} object * @throws IOException when there is a serialization issue sending the request or receiving the response @@ -73,7 +75,7 @@ public PutJobResponse putJob(PutJobRequest request, RequestOptions options) thro * For additional info * see ML PUT job documentation * - * @param request the request containing the {@link org.elasticsearch.protocol.xpack.ml.job.config.Job} settings + * @param request The request containing the {@link org.elasticsearch.protocol.xpack.ml.job.config.Job} settings * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized * @param listener Listener to be notified upon request completion */ @@ -93,7 +95,7 @@ public void putJobAsync(PutJobRequest request, RequestOptions options, ActionLis * For additional info * see *

- * @param request {@link GetJobRequest} request containing a list of jobId(s) and additional options + * @param request {@link GetJobRequest} Request containing a list of jobId(s) and additional options * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized * @return {@link GetJobResponse} response object containing * the {@link org.elasticsearch.protocol.xpack.ml.job.config.Job} objects and the number of jobs found @@ -114,7 +116,7 @@ public GetJobResponse getJob(GetJobRequest request, RequestOptions options) thro * For additional info * see *

- * @param request {@link GetJobRequest} request containing a list of jobId(s) and additional options + * @param request {@link GetJobRequest} Request containing a list of jobId(s) and additional options * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized * @param listener Listener to be notified with {@link GetJobResponse} upon request completion */ @@ -133,7 +135,7 @@ public void getJobAsync(GetJobRequest request, RequestOptions options, ActionLis * For additional info * see ML Delete Job documentation *

- * @param request the request to delete the job + * @param request The request to delete the job * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized * @return action acknowledgement * @throws IOException when there is a serialization issue sending the request or receiving the response @@ -152,7 +154,7 @@ public DeleteJobResponse deleteJob(DeleteJobRequest request, RequestOptions opti * For additional info * see ML Delete Job documentation *

- * @param request the request to delete the job + * @param request The request to delete the job * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized * @param listener Listener to be notified upon request completion */ @@ -176,7 +178,7 @@ public void deleteJobAsync(DeleteJobRequest request, RequestOptions options, Act * For additional info * see *

- * @param request request containing job_id and additional optional options + * @param request Request containing job_id and additional optional options * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized * @return response containing if the job was successfully opened or not. * @throws IOException when there is a serialization issue sending the request or receiving the response @@ -199,7 +201,7 @@ public OpenJobResponse openJob(OpenJobRequest request, RequestOptions options) t * For additional info * see *

- * @param request request containing job_id and additional optional options + * @param request Request containing job_id and additional optional options * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized * @param listener Listener to be notified upon request completion */ @@ -217,7 +219,7 @@ public void openJobAsync(OpenJobRequest request, RequestOptions options, ActionL * * A closed job cannot receive data or perform analysis operations, but you can still explore and navigate results. * - * @param request request containing job_ids and additional options. See {@link CloseJobRequest} + * @param request Request containing job_ids and additional options. See {@link CloseJobRequest} * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized * @return response containing if the job was successfully closed or not. * @throws IOException when there is a serialization issue sending the request or receiving the response @@ -235,7 +237,7 @@ public CloseJobResponse closeJob(CloseJobRequest request, RequestOptions options * * A closed job cannot receive data or perform analysis operations, but you can still explore and navigate results. * - * @param request request containing job_ids and additional options. See {@link CloseJobRequest} + * @param request Request containing job_ids and additional options. See {@link CloseJobRequest} * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized * @param listener Listener to be notified upon request completion */ @@ -247,4 +249,40 @@ public void closeJobAsync(CloseJobRequest request, RequestOptions options, Actio listener, Collections.emptySet()); } + + /** + * Gets the buckets for a Machine Learning Job. + *

+ * For additional info + * see ML GET buckets documentation + * + * @param request The request + * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized + */ + public GetBucketsResponse getBuckets(GetBucketsRequest request, RequestOptions options) throws IOException { + return restHighLevelClient.performRequestAndParseEntity(request, + MLRequestConverters::getBuckets, + options, + GetBucketsResponse::fromXContent, + Collections.emptySet()); + } + + /** + * Gets the buckets for a Machine Learning Job, notifies listener once the requested buckets are retrieved. + *

+ * For additional info + * see ML GET buckets documentation + * + * @param request The request + * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized + * @param listener Listener to be notified upon request completion + */ + public void getBucketsAsync(GetBucketsRequest request, RequestOptions options, ActionListener listener) { + restHighLevelClient.performRequestAsyncAndParseEntity(request, + MLRequestConverters::getBuckets, + options, + GetBucketsResponse::fromXContent, + listener, + Collections.emptySet()); + } } diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/MLRequestConvertersTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/MLRequestConvertersTests.java index 9ed09d06b72f1..ae78ad717dee7 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/MLRequestConvertersTests.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/MLRequestConvertersTests.java @@ -22,17 +22,20 @@ import org.apache.http.client.methods.HttpDelete; import org.apache.http.client.methods.HttpGet; import org.apache.http.client.methods.HttpPost; +import org.apache.http.client.methods.HttpPut; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.json.JsonXContent; import org.elasticsearch.protocol.xpack.ml.CloseJobRequest; import org.elasticsearch.protocol.xpack.ml.DeleteJobRequest; +import org.elasticsearch.protocol.xpack.ml.GetBucketsRequest; import org.elasticsearch.protocol.xpack.ml.GetJobRequest; import org.elasticsearch.protocol.xpack.ml.OpenJobRequest; import org.elasticsearch.protocol.xpack.ml.PutJobRequest; import org.elasticsearch.protocol.xpack.ml.job.config.AnalysisConfig; import org.elasticsearch.protocol.xpack.ml.job.config.Detector; import org.elasticsearch.protocol.xpack.ml.job.config.Job; +import org.elasticsearch.protocol.xpack.ml.job.util.PageParams; import org.elasticsearch.test.ESTestCase; import java.io.ByteArrayOutputStream; @@ -49,6 +52,7 @@ public void testPutJob() throws IOException { Request request = MLRequestConverters.putJob(putJobRequest); + assertEquals(HttpPut.METHOD_NAME, request.getMethod()); assertThat(request.getEndpoint(), equalTo("/_xpack/ml/anomaly_detectors/foo")); try (XContentParser parser = createParser(JsonXContent.jsonXContent, request.getEntity().getContent())) { Job parsedJob = Job.PARSER.apply(parser, null).build(); @@ -123,6 +127,23 @@ public void testDeleteJob() { assertEquals(Boolean.toString(true), request.getParameters().get("force")); } + public void testGetBuckets() throws IOException { + String jobId = randomAlphaOfLength(10); + GetBucketsRequest getBucketsRequest = new GetBucketsRequest(jobId); + getBucketsRequest.setPageParams(new PageParams(100, 300)); + getBucketsRequest.setAnomalyScore(75.0); + getBucketsRequest.setSort("anomaly_score"); + getBucketsRequest.setDescending(true); + + Request request = MLRequestConverters.getBuckets(getBucketsRequest); + assertEquals(HttpGet.METHOD_NAME, request.getMethod()); + assertEquals("/_xpack/ml/anomaly_detectors/" + jobId + "/results/buckets", request.getEndpoint()); + try (XContentParser parser = createParser(JsonXContent.jsonXContent, request.getEntity().getContent())) { + GetBucketsRequest parsedRequest = GetBucketsRequest.PARSER.apply(parser, null); + assertThat(parsedRequest, equalTo(getBucketsRequest)); + } + } + private static Job createValidJob(String jobId) { AnalysisConfig.Builder analysisConfig = AnalysisConfig.builder(Collections.singletonList( Detector.builder().setFunction("count").build())); diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/MachineLearningGetResultsIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/MachineLearningGetResultsIT.java new file mode 100644 index 0000000000000..a4f83c347ad13 --- /dev/null +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/MachineLearningGetResultsIT.java @@ -0,0 +1,217 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.client; + +import org.elasticsearch.action.bulk.BulkRequest; +import org.elasticsearch.action.index.IndexRequest; +import org.elasticsearch.action.support.WriteRequest; +import org.elasticsearch.common.xcontent.XContentType; +import org.elasticsearch.protocol.xpack.ml.GetBucketsRequest; +import org.elasticsearch.protocol.xpack.ml.GetBucketsResponse; +import org.elasticsearch.protocol.xpack.ml.PutJobRequest; +import org.elasticsearch.protocol.xpack.ml.job.config.Job; +import org.elasticsearch.protocol.xpack.ml.job.results.Bucket; +import org.elasticsearch.protocol.xpack.ml.job.util.PageParams; +import org.junit.After; +import org.junit.Before; + +import java.io.IOException; + +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.lessThanOrEqualTo; + +public class MachineLearningGetResultsIT extends ESRestHighLevelClientTestCase { + + private static final String RESULTS_INDEX = ".ml-anomalies-shared"; + private static final String DOC = "doc"; + + private static final String JOB_ID = "get-results-it-job"; + + // 2018-08-01T00:00:00Z + private static final long START_TIME_EPOCH_MS = 1533081600000L; + + private BucketStats bucketStats = new BucketStats(); + + @Before + public void createJobAndIndexResults() throws IOException { + MachineLearningClient machineLearningClient = highLevelClient().machineLearning(); + Job job = MachineLearningIT.buildJob(JOB_ID); + machineLearningClient.putJob(new PutJobRequest(job), RequestOptions.DEFAULT); + + BulkRequest bulkRequest = new BulkRequest(); + bulkRequest.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE); + + long time = START_TIME_EPOCH_MS; + long endTime = time + 3600000L * 24 * 10; // 10 days of hourly buckets + while (time < endTime) { + addBucketIndexRequest(time, false, bulkRequest); + addRecordIndexRequests(time, false, bulkRequest); + time += 3600000L; + } + + // Also index an interim bucket + addBucketIndexRequest(time, true, bulkRequest); + addRecordIndexRequests(time, true, bulkRequest); + + highLevelClient().bulk(bulkRequest, RequestOptions.DEFAULT); + } + + private void addBucketIndexRequest(long timestamp, boolean isInterim, BulkRequest bulkRequest) { + IndexRequest indexRequest = new IndexRequest(RESULTS_INDEX, DOC); + double bucketScore = randomDoubleBetween(0.0, 100.0, true); + bucketStats.report(bucketScore); + indexRequest.source("{\"job_id\":\"" + JOB_ID + "\", \"result_type\":\"bucket\", \"timestamp\": " + timestamp + "," + + "\"bucket_span\": 3600,\"is_interim\": " + isInterim + ", \"anomaly_score\": " + bucketScore + + ", \"bucket_influencers\":[{\"job_id\": \"" + JOB_ID + "\", \"result_type\":\"bucket_influencer\", " + + "\"influencer_field_name\": \"bucket_time\", \"timestamp\": " + timestamp + ", \"bucket_span\": 3600, " + + "\"is_interim\": " + isInterim + "}]}", XContentType.JSON); + bulkRequest.add(indexRequest); + } + + private void addRecordIndexRequests(long timestamp, boolean isInterim, BulkRequest bulkRequest) { + if (randomBoolean()) { + return; + } + int recordCount = randomIntBetween(1, 3); + for (int i = 0; i < recordCount; ++i) { + IndexRequest indexRequest = new IndexRequest(RESULTS_INDEX, DOC); + double recordScore = randomDoubleBetween(0.0, 100.0, true); + double p = randomDoubleBetween(0.0, 0.05, false); + indexRequest.source("{\"job_id\":\"" + JOB_ID + "\", \"result_type\":\"record\", \"timestamp\": " + timestamp + "," + + "\"bucket_span\": 3600,\"is_interim\": " + isInterim + ", \"record_score\": " + recordScore + ", \"probability\": " + + p + "}", XContentType.JSON); + bulkRequest.add(indexRequest); + } + } + + @After + public void deleteJob() throws IOException { + new MlRestTestStateCleaner(logger, client()).clearMlMetadata(); + } + + public void testGetBuckets() throws IOException { + MachineLearningClient machineLearningClient = highLevelClient().machineLearning(); + + { + GetBucketsRequest request = new GetBucketsRequest(JOB_ID); + + GetBucketsResponse response = execute(request, machineLearningClient::getBuckets, machineLearningClient::getBucketsAsync); + + assertThat(response.count(), equalTo(241L)); + assertThat(response.buckets().size(), equalTo(100)); + assertThat(response.buckets().get(0).getTimestamp().getTime(), equalTo(START_TIME_EPOCH_MS)); + } + { + GetBucketsRequest request = new GetBucketsRequest(JOB_ID); + request.setTimestamp("1533081600000"); + + GetBucketsResponse response = execute(request, machineLearningClient::getBuckets, machineLearningClient::getBucketsAsync); + + assertThat(response.count(), equalTo(1L)); + assertThat(response.buckets().size(), equalTo(1)); + assertThat(response.buckets().get(0).getTimestamp().getTime(), equalTo(START_TIME_EPOCH_MS)); + } + { + GetBucketsRequest request = new GetBucketsRequest(JOB_ID); + request.setAnomalyScore(75.0); + + GetBucketsResponse response = execute(request, machineLearningClient::getBuckets, machineLearningClient::getBucketsAsync); + + assertThat(response.count(), equalTo(bucketStats.criticalCount)); + assertThat(response.buckets().size(), equalTo((int) Math.min(100, bucketStats.criticalCount))); + assertThat(response.buckets().stream().anyMatch(b -> b.getAnomalyScore() < 75.0), is(false)); + } + { + GetBucketsRequest request = new GetBucketsRequest(JOB_ID); + request.setExcludeInterim(true); + + GetBucketsResponse response = execute(request, machineLearningClient::getBuckets, machineLearningClient::getBucketsAsync); + + assertThat(response.count(), equalTo(240L)); + } + { + GetBucketsRequest request = new GetBucketsRequest(JOB_ID); + request.setStart("1533081600000"); + request.setEnd("1533092400000"); + + GetBucketsResponse response = execute(request, machineLearningClient::getBuckets, machineLearningClient::getBucketsAsync); + + assertThat(response.count(), equalTo(3L)); + assertThat(response.buckets().get(0).getTimestamp().getTime(), equalTo(START_TIME_EPOCH_MS)); + assertThat(response.buckets().get(1).getTimestamp().getTime(), equalTo(START_TIME_EPOCH_MS + 3600000L)); + assertThat(response.buckets().get(2).getTimestamp().getTime(), equalTo(START_TIME_EPOCH_MS + 2 * + 3600000L)); + } + { + GetBucketsRequest request = new GetBucketsRequest(JOB_ID); + request.setPageParams(new PageParams(3, 3)); + + GetBucketsResponse response = execute(request, machineLearningClient::getBuckets, machineLearningClient::getBucketsAsync); + + assertThat(response.buckets().size(), equalTo(3)); + assertThat(response.buckets().get(0).getTimestamp().getTime(), equalTo(START_TIME_EPOCH_MS + 3 * 3600000L)); + assertThat(response.buckets().get(1).getTimestamp().getTime(), equalTo(START_TIME_EPOCH_MS + 4 * 3600000L)); + assertThat(response.buckets().get(2).getTimestamp().getTime(), equalTo(START_TIME_EPOCH_MS + 5 * 3600000L)); + } + { + GetBucketsRequest request = new GetBucketsRequest(JOB_ID); + request.setSort("anomaly_score"); + request.setDescending(true); + + GetBucketsResponse response = execute(request, machineLearningClient::getBuckets, machineLearningClient::getBucketsAsync); + + double previousScore = 100.0; + for (Bucket bucket : response.buckets()) { + assertThat(bucket.getAnomalyScore(), lessThanOrEqualTo(previousScore)); + previousScore = bucket.getAnomalyScore(); + } + } + { + GetBucketsRequest request = new GetBucketsRequest(JOB_ID); + // Make sure we get all buckets + request.setPageParams(new PageParams(0, 10000)); + request.setExpand(true); + + GetBucketsResponse response = execute(request, machineLearningClient::getBuckets, machineLearningClient::getBucketsAsync); + + assertThat(response.buckets().stream().anyMatch(b -> b.getRecords().size() > 0), is(true)); + } + } + + private static class BucketStats { + // score < 50.0 + private long minorCount; + + // score < 75.0 + private long majorCount; + + // score > 75.0 + private long criticalCount; + + private void report(double anomalyScore) { + if (anomalyScore < 50.0) { + minorCount++; + } else if (anomalyScore < 75.0) { + majorCount++; + } else { + criticalCount++; + } + } + } +} diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/MlClientDocumentationIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/MlClientDocumentationIT.java index 73531bae5532f..683f91dae2eb1 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/MlClientDocumentationIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/MlClientDocumentationIT.java @@ -20,16 +20,21 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.LatchedActionListener; +import org.elasticsearch.action.index.IndexRequest; +import org.elasticsearch.action.support.WriteRequest; import org.elasticsearch.client.ESRestHighLevelClientTestCase; import org.elasticsearch.client.MachineLearningIT; import org.elasticsearch.client.MlRestTestStateCleaner; import org.elasticsearch.client.RequestOptions; import org.elasticsearch.client.RestHighLevelClient; import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.protocol.xpack.ml.CloseJobRequest; import org.elasticsearch.protocol.xpack.ml.CloseJobResponse; import org.elasticsearch.protocol.xpack.ml.DeleteJobRequest; import org.elasticsearch.protocol.xpack.ml.DeleteJobResponse; +import org.elasticsearch.protocol.xpack.ml.GetBucketsRequest; +import org.elasticsearch.protocol.xpack.ml.GetBucketsResponse; import org.elasticsearch.protocol.xpack.ml.GetJobRequest; import org.elasticsearch.protocol.xpack.ml.GetJobResponse; import org.elasticsearch.protocol.xpack.ml.OpenJobRequest; @@ -40,6 +45,8 @@ import org.elasticsearch.protocol.xpack.ml.job.config.DataDescription; import org.elasticsearch.protocol.xpack.ml.job.config.Detector; import org.elasticsearch.protocol.xpack.ml.job.config.Job; +import org.elasticsearch.protocol.xpack.ml.job.results.Bucket; +import org.elasticsearch.protocol.xpack.ml.job.util.PageParams; import org.junit.After; import java.io.IOException; @@ -293,7 +300,7 @@ public void onFailure(Exception e) { assertTrue(latch.await(30L, TimeUnit.SECONDS)); } } - + public void testCloseJob() throws Exception { RestHighLevelClient client = highLevelClient(); @@ -334,6 +341,7 @@ public void onFailure(Exception e) { }; //end::x-pack-ml-close-job-listener CloseJobRequest closeJobRequest = new CloseJobRequest("closing-my-second-machine-learning-job"); + // Replace the empty listener by a blocking listener in test final CountDownLatch latch = new CountDownLatch(1); listener = new LatchedActionListener<>(listener, latch); @@ -345,4 +353,105 @@ public void onFailure(Exception e) { assertTrue(latch.await(30L, TimeUnit.SECONDS)); } } + + public void testGetBuckets() throws IOException, InterruptedException { + RestHighLevelClient client = highLevelClient(); + + String jobId = "test-get-buckets"; + Job job = MachineLearningIT.buildJob(jobId); + client.machineLearning().putJob(new PutJobRequest(job), RequestOptions.DEFAULT); + + // Let us index a bucket + IndexRequest indexRequest = new IndexRequest(".ml-anomalies-shared", "doc"); + indexRequest.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE); + indexRequest.source("{\"job_id\":\"test-get-buckets\", \"result_type\":\"bucket\", \"timestamp\": 1533081600000," + + "\"bucket_span\": 600,\"is_interim\": false, \"anomaly_score\": 80.0}", XContentType.JSON); + client.index(indexRequest, RequestOptions.DEFAULT); + + { + // tag::x-pack-ml-get-buckets-request + GetBucketsRequest request = new GetBucketsRequest(jobId); // <1> + // end::x-pack-ml-get-buckets-request + + // tag::x-pack-ml-get-buckets-timestamp + request.setTimestamp("2018-08-17T00:00:00Z"); // <1> + // end::x-pack-ml-get-buckets-timestamp + + // Set timestamp to null as it is incompatible with other args + request.setTimestamp(null); + + // tag::x-pack-ml-get-buckets-anomaly-score + request.setAnomalyScore(75.0); // <1> + // end::x-pack-ml-get-buckets-anomaly-score + + // tag::x-pack-ml-get-buckets-desc + request.setDescending(true); // <1> + // end::x-pack-ml-get-buckets-desc + + // tag::x-pack-ml-get-buckets-end + request.setEnd("2018-08-21T00:00:00Z"); // <1> + // end::x-pack-ml-get-buckets-end + + // tag::x-pack-ml-get-buckets-exclude-interim + request.setExcludeInterim(true); // <1> + // end::x-pack-ml-get-buckets-exclude-interim + + // tag::x-pack-ml-get-buckets-expand + request.setExpand(true); // <1> + // end::x-pack-ml-get-buckets-expand + + // tag::x-pack-ml-get-buckets-page + request.setPageParams(new PageParams(100, 200)); // <1> + // end::x-pack-ml-get-buckets-page + + // Set page params back to null so the response contains the bucket we indexed + request.setPageParams(null); + + // tag::x-pack-ml-get-buckets-sort + request.setSort("anomaly_score"); // <1> + // end::x-pack-ml-get-buckets-sort + + // tag::x-pack-ml-get-buckets-start + request.setStart("2018-08-01T00:00:00Z"); // <1> + // end::x-pack-ml-get-buckets-start + + // tag::x-pack-ml-get-buckets-execute + GetBucketsResponse response = client.machineLearning().getBuckets(request, RequestOptions.DEFAULT); + // end::x-pack-ml-get-buckets-execute + + // tag::x-pack-ml-get-buckets-response + long count = response.count(); // <1> + List buckets = response.buckets(); // <2> + // end::x-pack-ml-get-buckets-response + assertEquals(1, buckets.size()); + } + { + GetBucketsRequest request = new GetBucketsRequest(jobId); + + // tag::x-pack-ml-get-buckets-listener + ActionListener listener = + new ActionListener() { + @Override + public void onResponse(GetBucketsResponse getBucketsResponse) { + // <1> + } + + @Override + public void onFailure(Exception e) { + // <2> + } + }; + // end::x-pack-ml-get-buckets-listener + + // Replace the empty listener by a blocking listener in test + final CountDownLatch latch = new CountDownLatch(1); + listener = new LatchedActionListener<>(listener, latch); + + // tag::x-pack-ml-get-buckets-execute-async + client.machineLearning().getBucketsAsync(request, RequestOptions.DEFAULT, listener); // <1> + // end::x-pack-ml-get-buckets-execute-async + + assertTrue(latch.await(30L, TimeUnit.SECONDS)); + } + } } diff --git a/docs/java-rest/high-level/ml/get-buckets.asciidoc b/docs/java-rest/high-level/ml/get-buckets.asciidoc new file mode 100644 index 0000000000000..81a21d3d18ac1 --- /dev/null +++ b/docs/java-rest/high-level/ml/get-buckets.asciidoc @@ -0,0 +1,125 @@ +[[java-rest-high-x-pack-ml-get-buckets]] +=== Get Buckets API + +The Get Buckets API retrieves one or more bucket results. +It accepts a `GetBucketsRequest` object and responds +with a `GetBucketsResponse` object. + +[[java-rest-high-x-pack-ml-get-buckets-request]] +==== Get Buckets Request + +A `GetBucketsRequest` object gets created with an existing non-null `jobId`. + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-get-buckets-request] +-------------------------------------------------- +<1> Constructing a new request referencing an existing `jobId` + +==== Optional Arguments +The following arguments are optional: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-get-buckets-timestamp] +-------------------------------------------------- +<1> The timestamp of the bucket to get. Otherwise it will return all buckets. + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-get-buckets-anomaly-score] +-------------------------------------------------- +<1> Buckets with anomaly scores greater or equal than this value will be returned. + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-get-buckets-desc] +-------------------------------------------------- +<1> If `true`, the buckets are sorted in descending order. Defaults to `false`. + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-get-buckets-end] +-------------------------------------------------- +<1> Buckets with timestamps earlier than this time will be returned. + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-get-buckets-exclude-interim] +-------------------------------------------------- +<1> If `true`, interim results will be excluded. Defaults to `false`. + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-get-buckets-expand] +-------------------------------------------------- +<1> If `true`, buckets will include their anomaly records. Defaults to `false`. + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-get-buckets-page] +-------------------------------------------------- +<1> The page parameters `from` and `size`. `from` specifies the number of buckets to skip. +`size` specifies the maximum number of buckets to get. Defaults to `0` and `100` respectively. + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-get-buckets-sort] +-------------------------------------------------- +<1> The field to sort buckets on. Defaults to `timestamp`. + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-get-buckets-end] +-------------------------------------------------- +<1> Buckets with timestamps on or after this time will be returned. + +[[java-rest-high-x-pack-ml-get-buckets-execution]] +==== Execution + +The request can be executed through the `MachineLearningClient` contained +in the `RestHighLevelClient` object, accessed via the `machineLearningClient()` method. + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-get-buckets-execute] +-------------------------------------------------- + + +[[java-rest-high-x-pack-ml-get-buckets-execution-async]] +==== Asynchronous Execution + +The request can also be executed asynchronously: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-get-buckets-execute-async] +-------------------------------------------------- +<1> The `GetBucketsRequest` to execute and the `ActionListener` to use when +the execution completes + +The asynchronous method does not block and returns immediately. Once it is +completed the `ActionListener` is called back with the `onResponse` method +if the execution is successful or the `onFailure` method if the execution +failed. + +A typical listener for `GetBucketsResponse` looks like: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-get-buckets-listener] +-------------------------------------------------- +<1> `onResponse` is called back when the action is completed successfully +<2> `onFailure` is called back when some unexpected error occurs + +[[java-rest-high-snapshot-ml-get-buckets-response]] +==== Get Buckets Response + +The returned `GetBucketsResponse` contains the requested buckets: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-get-buckets-response] +-------------------------------------------------- +<1> The count of buckets that were matched +<2> The buckets retrieved \ No newline at end of file diff --git a/docs/java-rest/high-level/supported-apis.asciidoc b/docs/java-rest/high-level/supported-apis.asciidoc index c7b46b399622f..e04e391f3e0b7 100644 --- a/docs/java-rest/high-level/supported-apis.asciidoc +++ b/docs/java-rest/high-level/supported-apis.asciidoc @@ -209,12 +209,14 @@ The Java High Level REST Client supports the following Machine Learning APIs: * <> * <> * <> +* <> include::ml/put-job.asciidoc[] include::ml/get-job.asciidoc[] include::ml/delete-job.asciidoc[] include::ml/open-job.asciidoc[] include::ml/close-job.asciidoc[] +include::ml/get-buckets.asciidoc[] == Migration APIs diff --git a/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/GetBucketsRequest.java b/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/GetBucketsRequest.java new file mode 100644 index 0000000000000..4957f9b6ff6e7 --- /dev/null +++ b/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/GetBucketsRequest.java @@ -0,0 +1,268 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.protocol.xpack.ml; + +import org.elasticsearch.action.ActionRequest; +import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.xcontent.ObjectParser; +import org.elasticsearch.common.xcontent.ToXContentObject; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.protocol.xpack.ml.job.config.Job; +import org.elasticsearch.protocol.xpack.ml.job.results.Result; +import org.elasticsearch.protocol.xpack.ml.job.util.PageParams; + +import java.io.IOException; +import java.util.Objects; + +/** + * A request to retrieve buckets of a given job + */ +public class GetBucketsRequest extends ActionRequest implements ToXContentObject { + + public static final ParseField EXPAND = new ParseField("expand"); + public static final ParseField EXCLUDE_INTERIM = new ParseField("exclude_interim"); + public static final ParseField START = new ParseField("start"); + public static final ParseField END = new ParseField("end"); + public static final ParseField ANOMALY_SCORE = new ParseField("anomaly_score"); + public static final ParseField TIMESTAMP = new ParseField("timestamp"); + public static final ParseField SORT = new ParseField("sort"); + public static final ParseField DESCENDING = new ParseField("desc"); + + public static final ObjectParser PARSER = new ObjectParser<>("get_buckets_request", GetBucketsRequest::new); + + static { + PARSER.declareString((request, jobId) -> request.jobId = jobId, Job.ID); + PARSER.declareString(GetBucketsRequest::setTimestamp, Result.TIMESTAMP); + PARSER.declareBoolean(GetBucketsRequest::setExpand, EXPAND); + PARSER.declareBoolean(GetBucketsRequest::setExcludeInterim, EXCLUDE_INTERIM); + PARSER.declareStringOrNull(GetBucketsRequest::setStart, START); + PARSER.declareStringOrNull(GetBucketsRequest::setEnd, END); + PARSER.declareObject(GetBucketsRequest::setPageParams, PageParams.PARSER, PageParams.PAGE); + PARSER.declareDouble(GetBucketsRequest::setAnomalyScore, ANOMALY_SCORE); + PARSER.declareString(GetBucketsRequest::setSort, SORT); + PARSER.declareBoolean(GetBucketsRequest::setDescending, DESCENDING); + } + + private String jobId; + private String timestamp; + private Boolean expand; + private Boolean excludeInterim; + private String start; + private String end; + private PageParams pageParams; + private Double anomalyScore; + private String sort; + private Boolean descending; + + private GetBucketsRequest() {} + + /** + * Constructs a request to retrieve buckets of a given job + * @param jobId id of the job to retrieve buckets of + */ + public GetBucketsRequest(String jobId) { + this.jobId = Objects.requireNonNull(jobId); + } + + public String getJobId() { + return jobId; + } + + /** + * Sets the timestamp of a specific bucket to be retrieved. + * @param timestamp the timestamp of a specific bucket to be retrieved + */ + public void setTimestamp(String timestamp) { + this.timestamp = timestamp; + } + + public String getTimestamp() { + return timestamp; + } + + public boolean isExpand() { + return expand; + } + + /** + * Sets the value of "expand". + * When {@code true}, buckets will be expanded to include their records. + * @param expand value of "expand" to be set + */ + public void setExpand(boolean expand) { + this.expand = expand; + } + + public boolean isExcludeInterim() { + return excludeInterim; + } + + /** + * Sets the value of "exclude_interim". + * When {@code true}, interim buckets will be filtered out. + * @param excludeInterim value of "exclude_interim" to be set + */ + public void setExcludeInterim(boolean excludeInterim) { + this.excludeInterim = excludeInterim; + } + + public String getStart() { + return start; + } + + /** + * Sets the value of "start" which is a timestamp. + * Only buckets whose timestamp is on or after the "start" value will be returned. + * @param start value of "start" to be set + */ + public void setStart(String start) { + this.start = start; + } + + public String getEnd() { + return end; + } + + /** + * Sets the value of "end" which is a timestamp. + * Only buckets whose timestamp is before the "end" value will be returned. + * @param end value of "end" to be set + */ + public void setEnd(String end) { + this.end = end; + } + + public PageParams getPageParams() { + return pageParams; + } + + /** + * Sets the paging parameters + * @param pageParams the paging parameters + */ + public void setPageParams(PageParams pageParams) { + this.pageParams = pageParams; + } + + public Double getAnomalyScore() { + return anomalyScore; + } + + /** + * Sets the value of "anomaly_score". + * Only buckets with "anomaly_score" equal or greater will be returned. + * @param anomalyScore value of "anomaly_score". + */ + public void setAnomalyScore(double anomalyScore) { + this.anomalyScore = anomalyScore; + } + + public String getSort() { + return sort; + } + + /** + * Sets the value of "sort". + * Specifies the bucket field to sort on. + * @param sort value of "sort". + */ + public void setSort(String sort) { + this.sort = sort; + } + + public boolean isDescending() { + return descending; + } + + /** + * Sets the value of "desc". + * Specifies the sorting order. + * @param descending value of "desc" + */ + public void setDescending(boolean descending) { + this.descending = descending; + } + + @Override + public ActionRequestValidationException validate() { + return null; + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + builder.field(Job.ID.getPreferredName(), jobId); + if (timestamp != null) { + builder.field(Result.TIMESTAMP.getPreferredName(), timestamp); + } + if (expand != null) { + builder.field(EXPAND.getPreferredName(), expand); + } + if (excludeInterim != null) { + builder.field(EXCLUDE_INTERIM.getPreferredName(), excludeInterim); + } + if (start != null) { + builder.field(START.getPreferredName(), start); + } + if (end != null) { + builder.field(END.getPreferredName(), end); + } + if (pageParams != null) { + builder.field(PageParams.PAGE.getPreferredName(), pageParams); + } + if (anomalyScore != null) { + builder.field(ANOMALY_SCORE.getPreferredName(), anomalyScore); + } + if (sort != null) { + builder.field(SORT.getPreferredName(), sort); + } + if (descending != null) { + builder.field(DESCENDING.getPreferredName(), descending); + } + builder.endObject(); + return builder; + } + + @Override + public int hashCode() { + return Objects.hash(jobId, timestamp, expand, excludeInterim, anomalyScore, pageParams, start, end, sort, descending); + } + + @Override + public boolean equals(Object obj) { + if (obj == null) { + return false; + } + if (getClass() != obj.getClass()) { + return false; + } + GetBucketsRequest other = (GetBucketsRequest) obj; + return Objects.equals(jobId, other.jobId) && + Objects.equals(timestamp, other.timestamp) && + Objects.equals(expand, other.expand) && + Objects.equals(excludeInterim, other.excludeInterim) && + Objects.equals(anomalyScore, other.anomalyScore) && + Objects.equals(pageParams, other.pageParams) && + Objects.equals(start, other.start) && + Objects.equals(end, other.end) && + Objects.equals(sort, other.sort) && + Objects.equals(descending, other.descending); + } +} diff --git a/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/GetBucketsResponse.java b/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/GetBucketsResponse.java new file mode 100644 index 0000000000000..4350661f68b33 --- /dev/null +++ b/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/GetBucketsResponse.java @@ -0,0 +1,78 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.protocol.xpack.ml; + +import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.xcontent.ConstructingObjectParser; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.protocol.xpack.ml.job.results.Bucket; + +import java.io.IOException; +import java.util.List; +import java.util.Objects; + +/** + * A response containing the requested buckets + */ +public class GetBucketsResponse extends AbstractResultResponse { + + public static final ParseField BUCKETS = new ParseField("buckets"); + + @SuppressWarnings("unchecked") + public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>("get_buckets_response", + true, a -> new GetBucketsResponse((List) a[0], (long) a[1])); + + static { + PARSER.declareObjectArray(ConstructingObjectParser.constructorArg(), Bucket.PARSER, BUCKETS); + PARSER.declareLong(ConstructingObjectParser.constructorArg(), COUNT); + } + + public static GetBucketsResponse fromXContent(XContentParser parser) throws IOException { + return PARSER.parse(parser, null); + } + + GetBucketsResponse(List buckets, long count) { + super(BUCKETS, buckets, count); + } + + /** + * The retrieved buckets + * @return the retrieved buckets + */ + public List buckets() { + return results; + } + + @Override + public int hashCode() { + return Objects.hash(count, results); + } + + @Override + public boolean equals(Object obj) { + if (obj == null) { + return false; + } + if (getClass() != obj.getClass()) { + return false; + } + GetBucketsResponse other = (GetBucketsResponse) obj; + return count == other.count && Objects.equals(results, other.results); + } +} diff --git a/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/PutJobResponse.java b/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/PutJobResponse.java index b37bd35d6b17f..1bd9e87f6544c 100644 --- a/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/PutJobResponse.java +++ b/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/PutJobResponse.java @@ -39,9 +39,6 @@ public PutJobResponse(Job job) { this.job = job; } - public PutJobResponse() { - } - public Job getResponse() { return job; } diff --git a/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/job/util/PageParams.java b/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/job/util/PageParams.java new file mode 100644 index 0000000000000..2e20e84d7b81b --- /dev/null +++ b/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/job/util/PageParams.java @@ -0,0 +1,99 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.protocol.xpack.ml.job.util; + +import org.elasticsearch.common.Nullable; +import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.xcontent.ConstructingObjectParser; +import org.elasticsearch.common.xcontent.ToXContentObject; +import org.elasticsearch.common.xcontent.XContentBuilder; + +import java.io.IOException; +import java.util.Objects; + +/** + * Paging parameters for GET requests + */ +public class PageParams implements ToXContentObject { + + public static final ParseField PAGE = new ParseField("page"); + public static final ParseField FROM = new ParseField("from"); + public static final ParseField SIZE = new ParseField("size"); + + public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>(PAGE.getPreferredName(), + a -> new PageParams((Integer) a[0], (Integer) a[1])); + + static { + PARSER.declareInt(ConstructingObjectParser.optionalConstructorArg(), FROM); + PARSER.declareInt(ConstructingObjectParser.optionalConstructorArg(), SIZE); + } + + private final Integer from; + private final Integer size; + + /** + * Constructs paging parameters + * @param from skips the specified number of items. When {@code null} the default value will be used. + * @param size specifies the maximum number of items to obtain. When {@code null} the default value will be used. + */ + public PageParams(@Nullable Integer from, @Nullable Integer size) { + this.from = from; + this.size = size; + } + + public int getFrom() { + return from; + } + + public int getSize() { + return size; + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + if (from != null) { + builder.field(FROM.getPreferredName(), from); + } + if (size != null) { + builder.field(SIZE.getPreferredName(), size); + } + builder.endObject(); + return builder; + } + + @Override + public int hashCode() { + return Objects.hash(from, size); + } + + @Override + public boolean equals(Object obj) { + if (obj == null) { + return false; + } + if (getClass() != obj.getClass()) { + return false; + } + PageParams other = (PageParams) obj; + return Objects.equals(from, other.from) && + Objects.equals(size, other.size); + } + +} diff --git a/x-pack/protocol/src/test/java/org/elasticsearch/protocol/xpack/ml/GetBucketsRequestTests.java b/x-pack/protocol/src/test/java/org/elasticsearch/protocol/xpack/ml/GetBucketsRequestTests.java new file mode 100644 index 0000000000000..6364ad339b120 --- /dev/null +++ b/x-pack/protocol/src/test/java/org/elasticsearch/protocol/xpack/ml/GetBucketsRequestTests.java @@ -0,0 +1,78 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.protocol.xpack.ml; + +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.protocol.xpack.ml.job.util.PageParams; +import org.elasticsearch.test.AbstractXContentTestCase; + +import java.io.IOException; + +public class GetBucketsRequestTests extends AbstractXContentTestCase { + + @Override + protected GetBucketsRequest createTestInstance() { + GetBucketsRequest request = new GetBucketsRequest(randomAlphaOfLengthBetween(1, 20)); + + if (randomBoolean()) { + request.setTimestamp(String.valueOf(randomLong())); + } else { + if (randomBoolean()) { + request.setStart(String.valueOf(randomLong())); + } + if (randomBoolean()) { + request.setEnd(String.valueOf(randomLong())); + } + if (randomBoolean()) { + request.setExcludeInterim(randomBoolean()); + } + if (randomBoolean()) { + request.setAnomalyScore(randomDouble()); + } + if (randomBoolean()) { + int from = randomInt(10000); + int size = randomInt(10000); + request.setPageParams(new PageParams(from, size)); + } + if (randomBoolean()) { + request.setSort("anomaly_score"); + } + if (randomBoolean()) { + request.setDescending(randomBoolean()); + } + } + if (randomBoolean()) { + request.setExpand(randomBoolean()); + } + if (randomBoolean()) { + request.setExcludeInterim(randomBoolean()); + } + return request; + } + + @Override + protected GetBucketsRequest doParseInstance(XContentParser parser) throws IOException { + return GetBucketsRequest.PARSER.apply(parser, null); + } + + @Override + protected boolean supportsUnknownFields() { + return false; + } +} diff --git a/x-pack/protocol/src/test/java/org/elasticsearch/protocol/xpack/ml/GetBucketsResponseTests.java b/x-pack/protocol/src/test/java/org/elasticsearch/protocol/xpack/ml/GetBucketsResponseTests.java new file mode 100644 index 0000000000000..889c3e93bc708 --- /dev/null +++ b/x-pack/protocol/src/test/java/org/elasticsearch/protocol/xpack/ml/GetBucketsResponseTests.java @@ -0,0 +1,53 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.protocol.xpack.ml; + +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.protocol.xpack.ml.job.results.Bucket; +import org.elasticsearch.protocol.xpack.ml.job.results.BucketTests; +import org.elasticsearch.test.AbstractXContentTestCase; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; + +public class GetBucketsResponseTests extends AbstractXContentTestCase { + + @Override + protected GetBucketsResponse createTestInstance() { + String jobId = randomAlphaOfLength(20); + int listSize = randomInt(10); + List buckets = new ArrayList<>(listSize); + for (int j = 0; j < listSize; j++) { + Bucket bucket = BucketTests.createTestInstance(jobId); + buckets.add(bucket); + } + return new GetBucketsResponse(buckets, listSize); + } + + @Override + protected GetBucketsResponse doParseInstance(XContentParser parser) throws IOException { + return GetBucketsResponse.fromXContent(parser); + } + + @Override + protected boolean supportsUnknownFields() { + return true; + } +} diff --git a/x-pack/protocol/src/test/java/org/elasticsearch/protocol/xpack/ml/job/results/BucketTests.java b/x-pack/protocol/src/test/java/org/elasticsearch/protocol/xpack/ml/job/results/BucketTests.java index 28b1893afe18b..0eb988d8eb82b 100644 --- a/x-pack/protocol/src/test/java/org/elasticsearch/protocol/xpack/ml/job/results/BucketTests.java +++ b/x-pack/protocol/src/test/java/org/elasticsearch/protocol/xpack/ml/job/results/BucketTests.java @@ -35,7 +35,7 @@ public Bucket createTestInstance() { return createTestInstance("foo"); } - public Bucket createTestInstance(String jobId) { + public static Bucket createTestInstance(String jobId) { Bucket bucket = new Bucket(jobId, new Date(randomNonNegativeLong()), randomNonNegativeLong()); if (randomBoolean()) { bucket.setAnomalyScore(randomDouble()); diff --git a/x-pack/protocol/src/test/java/org/elasticsearch/protocol/xpack/ml/util/PageParamsTests.java b/x-pack/protocol/src/test/java/org/elasticsearch/protocol/xpack/ml/util/PageParamsTests.java new file mode 100644 index 0000000000000..6bd51e93c6f37 --- /dev/null +++ b/x-pack/protocol/src/test/java/org/elasticsearch/protocol/xpack/ml/util/PageParamsTests.java @@ -0,0 +1,43 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.protocol.xpack.ml.util; + +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.protocol.xpack.ml.job.util.PageParams; +import org.elasticsearch.test.AbstractXContentTestCase; + +public class PageParamsTests extends AbstractXContentTestCase { + + @Override + protected PageParams doParseInstance(XContentParser parser) { + return PageParams.PARSER.apply(parser, null); + } + + @Override + protected boolean supportsUnknownFields() { + return false; + } + + @Override + protected PageParams createTestInstance() { + Integer from = randomBoolean() ? randomInt() : null; + Integer size = randomBoolean() ? randomInt() : null; + return new PageParams(from, size); + } +}