From 91ff7b5056592a644f82579a05fab1b2d3425b86 Mon Sep 17 00:00:00 2001 From: Benjamin Trent <4357155+benwtrent@users.noreply.github.com> Date: Thu, 24 Oct 2019 16:12:30 -0400 Subject: [PATCH 1/6] [ML][Inference] Adding _stats endpoint for inference --- .../org/elasticsearch/ingest/IngestStats.java | 61 ++++ .../xpack/core/XPackClientPlugin.java | 6 + .../action/GetTrainedModelsStatsAction.java | 193 ++++++++++ ...TrainedModelsStatsActionResponseTests.java | 60 ++++ .../xpack/ml/MachineLearning.java | 7 +- .../TransportGetTrainedModelsStatsAction.java | 336 ++++++++++++++++++ .../RestGetTrainedModelsStatsAction.java | 52 +++ ...sportGetTrainedModelsStatsActionTests.java | 282 +++++++++++++++ .../api/ml.get_trained_models_stats.json | 48 +++ .../test/ml/inference_stats_crud.yml | 228 ++++++++++++ 10 files changed, 1272 insertions(+), 1 deletion(-) create mode 100644 x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetTrainedModelsStatsAction.java create mode 100644 x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/GetTrainedModelsStatsActionResponseTests.java create mode 100644 x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetTrainedModelsStatsAction.java create mode 100644 x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/inference/RestGetTrainedModelsStatsAction.java create mode 100644 x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/action/TransportGetTrainedModelsStatsActionTests.java create mode 100644 x-pack/plugin/src/test/resources/rest-api-spec/api/ml.get_trained_models_stats.json create mode 100644 x-pack/plugin/src/test/resources/rest-api-spec/test/ml/inference_stats_crud.yml diff --git a/server/src/main/java/org/elasticsearch/ingest/IngestStats.java b/server/src/main/java/org/elasticsearch/ingest/IngestStats.java index f140c5f155563..29b088745ee37 100644 --- a/server/src/main/java/org/elasticsearch/ingest/IngestStats.java +++ b/server/src/main/java/org/elasticsearch/ingest/IngestStats.java @@ -32,6 +32,7 @@ import java.util.HashMap; import java.util.List; import java.util.Map; +import java.util.Objects; import java.util.concurrent.TimeUnit; public class IngestStats implements Writeable, ToXContentFragment { @@ -135,6 +136,21 @@ public Map> getProcessorStats() { return processorStats; } + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + IngestStats that = (IngestStats) o; + return Objects.equals(totalStats, that.totalStats) + && Objects.equals(pipelineStats, that.pipelineStats) + && Objects.equals(processorStats, that.processorStats); + } + + @Override + public int hashCode() { + return Objects.hash(totalStats, pipelineStats, processorStats); + } + public static class Stats implements Writeable, ToXContentFragment { private final long ingestCount; @@ -203,6 +219,22 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws builder.field("failed", ingestFailedCount); return builder; } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + IngestStats.Stats that = (IngestStats.Stats) o; + return Objects.equals(ingestCount, that.ingestCount) + && Objects.equals(ingestTimeInMillis, that.ingestTimeInMillis) + && Objects.equals(ingestFailedCount, that.ingestFailedCount) + && Objects.equals(ingestCurrent, that.ingestCurrent); + } + + @Override + public int hashCode() { + return Objects.hash(ingestCount, ingestTimeInMillis, ingestFailedCount, ingestCurrent); + } } /** @@ -255,6 +287,20 @@ public String getPipelineId() { public Stats getStats() { return stats; } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + IngestStats.PipelineStat that = (IngestStats.PipelineStat) o; + return Objects.equals(pipelineId, that.pipelineId) + && Objects.equals(stats, that.stats); + } + + @Override + public int hashCode() { + return Objects.hash(pipelineId, stats); + } } /** @@ -276,5 +322,20 @@ public String getName() { public Stats getStats() { return stats; } + + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + IngestStats.ProcessorStat that = (IngestStats.ProcessorStat) o; + return Objects.equals(name, that.name) + && Objects.equals(stats, that.stats); + } + + @Override + public int hashCode() { + return Objects.hash(name, stats); + } } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackClientPlugin.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackClientPlugin.java index 08e595dc9e057..ec4d1cdce0fed 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackClientPlugin.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackClientPlugin.java @@ -77,6 +77,7 @@ import org.elasticsearch.xpack.core.ml.action.DeleteForecastAction; import org.elasticsearch.xpack.core.ml.action.DeleteJobAction; import org.elasticsearch.xpack.core.ml.action.DeleteModelSnapshotAction; +import org.elasticsearch.xpack.core.ml.action.DeleteTrainedModelAction; import org.elasticsearch.xpack.core.ml.action.EstimateMemoryUsageAction; import org.elasticsearch.xpack.core.ml.action.EvaluateDataFrameAction; import org.elasticsearch.xpack.core.ml.action.FinalizeJobExecutionAction; @@ -98,6 +99,8 @@ import org.elasticsearch.xpack.core.ml.action.GetModelSnapshotsAction; import org.elasticsearch.xpack.core.ml.action.GetOverallBucketsAction; import org.elasticsearch.xpack.core.ml.action.GetRecordsAction; +import org.elasticsearch.xpack.core.ml.action.GetTrainedModelsAction; +import org.elasticsearch.xpack.core.ml.action.GetTrainedModelsStatsAction; import org.elasticsearch.xpack.core.ml.action.InferModelAction; import org.elasticsearch.xpack.core.ml.action.IsolateDatafeedAction; import org.elasticsearch.xpack.core.ml.action.KillProcessAction; @@ -344,6 +347,9 @@ public List> getClientActions() { EvaluateDataFrameAction.INSTANCE, EstimateMemoryUsageAction.INSTANCE, InferModelAction.INSTANCE, + GetTrainedModelsAction.INSTANCE, + DeleteTrainedModelAction.INSTANCE, + GetTrainedModelsStatsAction.INSTANCE, // security ClearRealmCacheAction.INSTANCE, ClearRolesCacheAction.INSTANCE, diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetTrainedModelsStatsAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetTrainedModelsStatsAction.java new file mode 100644 index 0000000000000..b8ed168aea0d4 --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetTrainedModelsStatsAction.java @@ -0,0 +1,193 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.core.ml.action; + +import org.elasticsearch.action.ActionRequestBuilder; +import org.elasticsearch.action.ActionType; +import org.elasticsearch.client.ElasticsearchClient; +import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.common.xcontent.ToXContentObject; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.ingest.IngestStats; +import org.elasticsearch.xpack.core.action.AbstractGetResourcesRequest; +import org.elasticsearch.xpack.core.action.AbstractGetResourcesResponse; +import org.elasticsearch.xpack.core.action.util.QueryPage; +import org.elasticsearch.xpack.core.ml.inference.TrainedModelConfig; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; +import java.util.Map; +import java.util.Objects; +import java.util.Set; + +public class GetTrainedModelsStatsAction extends ActionType { + + public static final GetTrainedModelsStatsAction INSTANCE = new GetTrainedModelsStatsAction(); + public static final String NAME = "cluster:monitor/xpack/ml/inference/stats/get"; + + public static final ParseField MODEL_ID = new ParseField("model_id"); + public static final ParseField INGEST_STATS = new ParseField("ingest_stats"); + public static final ParseField PIPELINE_COUNT = new ParseField("pipeline_count"); + + private GetTrainedModelsStatsAction() { + super(NAME, GetTrainedModelsStatsAction.Response::new); + } + + public static class Request extends AbstractGetResourcesRequest { + + public static final ParseField ALLOW_NO_MATCH = new ParseField("allow_no_match"); + + public Request() { + setAllowNoResources(true); + } + + public Request(String id) { + setResourceId(id); + setAllowNoResources(true); + } + + public Request(StreamInput in) throws IOException { + super(in); + } + + @Override + public String getResourceIdField() { + return TrainedModelConfig.MODEL_ID.getPreferredName(); + } + + } + + public static class RequestBuilder extends ActionRequestBuilder { + + public RequestBuilder(ElasticsearchClient client, GetTrainedModelsStatsAction action) { + super(client, action, new Request()); + } + } + + public static class Response extends AbstractGetResourcesResponse { + + public static class TrainedModelStats implements ToXContentObject, Writeable { + private final String modelId; + private final IngestStats ingestStats; + private final int pipelineCount; + + private static final IngestStats EMPTY_INGEST_STATS = new IngestStats(new IngestStats.Stats(0, 0, 0, 0), + Collections.emptyList(), + Collections.emptyMap()); + + public TrainedModelStats(String modelId, IngestStats ingestStats, int pipelineCount) { + this.modelId = Objects.requireNonNull(modelId); + this.ingestStats = ingestStats == null ? EMPTY_INGEST_STATS : ingestStats; + this.pipelineCount = pipelineCount; + } + + public TrainedModelStats(StreamInput in) throws IOException { + modelId = in.readString(); + ingestStats = new IngestStats(in); + pipelineCount = in.readVInt(); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + builder.field(MODEL_ID.getPreferredName(), modelId); + builder.field(PIPELINE_COUNT.getPreferredName(), pipelineCount); + if (pipelineCount > 0) { + builder.startObject(INGEST_STATS.getPreferredName()); + // Ingest stats is a fragment + builder.value(ingestStats); + builder.endObject(); + } + builder.endObject(); + return builder; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeString(modelId); + ingestStats.writeTo(out); + out.writeVInt(pipelineCount); + } + + @Override + public int hashCode() { + return Objects.hash(modelId, ingestStats, pipelineCount); + } + + @Override + public boolean equals(Object obj) { + if (obj == null) { + return false; + } + if (getClass() != obj.getClass()) { + return false; + } + TrainedModelStats other = (TrainedModelStats) obj; + return Objects.equals(this.modelId, other.modelId) + && Objects.equals(this.ingestStats, other.ingestStats) + && Objects.equals(this.pipelineCount, other.pipelineCount); + } + } + + public static final ParseField RESULTS_FIELD = new ParseField("trained_model_stats"); + + public Response(StreamInput in) throws IOException { + super(in); + } + + public Response(QueryPage trainedModels) { + super(trainedModels); + } + + @Override + protected Reader getReader() { + return Response.TrainedModelStats::new; + } + + public static class Builder { + + private long totalModelCount; + private Set expandedIds; + private Map ingestStatsMap; + + public Builder setTotalModelCount(long totalModelCount) { + this.totalModelCount = totalModelCount; + return this; + } + + public Builder setExpandedIds(Set expandedIds) { + this.expandedIds = expandedIds; + return this; + } + + public Set getExpandedIds() { + return this.expandedIds; + } + + public Builder setIngestStatsByModelId(Map ingestStatsByModelId) { + this.ingestStatsMap = ingestStatsByModelId; + return this; + } + + public Response build() { + List trainedModelStats = new ArrayList<>(expandedIds.size()); + expandedIds.forEach(id -> { + IngestStats ingestStats = ingestStatsMap.get(id); + trainedModelStats.add(new TrainedModelStats(id, ingestStats, ingestStats == null ? + 0 : + ingestStats.getPipelineStats().size())); + }); + return new Response(new QueryPage<>(trainedModelStats, totalModelCount, RESULTS_FIELD)); + } + } + } + +} diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/GetTrainedModelsStatsActionResponseTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/GetTrainedModelsStatsActionResponseTests.java new file mode 100644 index 0000000000000..3e49f73917a44 --- /dev/null +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/GetTrainedModelsStatsActionResponseTests.java @@ -0,0 +1,60 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.core.ml.action; + +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.ingest.IngestStats; +import org.elasticsearch.test.AbstractWireSerializingTestCase; +import org.elasticsearch.xpack.core.action.util.QueryPage; +import org.elasticsearch.xpack.core.ml.action.GetTrainedModelsStatsAction.Response; + +import java.util.List; +import java.util.function.Function; +import java.util.stream.Collectors; +import java.util.stream.Stream; + + +public class GetTrainedModelsStatsActionResponseTests extends AbstractWireSerializingTestCase { + + @Override + protected Response createTestInstance() { + int listSize = randomInt(10); + List trainedModelStats = Stream.generate(() -> randomAlphaOfLength(10)) + .limit(listSize).map(id -> + new Response.TrainedModelStats(id, + randomBoolean() ? randomIngestStats() : null, + randomIntBetween(0, 10)) + ) + .collect(Collectors.toList()); + return new Response(new QueryPage<>(trainedModelStats, randomLongBetween(listSize, 1000), Response.RESULTS_FIELD)); + } + + private IngestStats randomIngestStats() { + List pipelineIds = Stream.generate(()-> randomAlphaOfLength(10)) + .limit(randomIntBetween(0, 10)) + .collect(Collectors.toList()); + return new IngestStats( + new IngestStats.Stats(randomNonNegativeLong(), randomNonNegativeLong(), randomNonNegativeLong(), randomNonNegativeLong()), + pipelineIds.stream().map(id -> new IngestStats.PipelineStat(id, randomStats())).collect(Collectors.toList()), + pipelineIds.stream().collect(Collectors.toMap(Function.identity(), (v) -> randomProcessorStats()))); + } + + private IngestStats.Stats randomStats(){ + return new IngestStats.Stats(randomNonNegativeLong(), randomNonNegativeLong(), randomNonNegativeLong(), randomNonNegativeLong()); + } + + private List randomProcessorStats() { + return Stream.generate(() -> randomAlphaOfLength(10)) + .limit(randomIntBetween(0, 10)) + .map(name -> new IngestStats.ProcessorStat(name, randomStats())) + .collect(Collectors.toList()); + } + + @Override + protected Writeable.Reader instanceReader() { + return Response::new; + } +} diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MachineLearning.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MachineLearning.java index 72e903d79baf5..2a23f9e786df2 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MachineLearning.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MachineLearning.java @@ -98,6 +98,7 @@ import org.elasticsearch.xpack.core.ml.action.GetOverallBucketsAction; import org.elasticsearch.xpack.core.ml.action.GetRecordsAction; import org.elasticsearch.xpack.core.ml.action.GetTrainedModelsAction; +import org.elasticsearch.xpack.core.ml.action.GetTrainedModelsStatsAction; import org.elasticsearch.xpack.core.ml.action.IsolateDatafeedAction; import org.elasticsearch.xpack.core.ml.action.InferModelAction; import org.elasticsearch.xpack.core.ml.action.KillProcessAction; @@ -167,6 +168,7 @@ import org.elasticsearch.xpack.ml.action.TransportGetModelSnapshotsAction; import org.elasticsearch.xpack.ml.action.TransportGetOverallBucketsAction; import org.elasticsearch.xpack.ml.action.TransportGetRecordsAction; +import org.elasticsearch.xpack.ml.action.TransportGetTrainedModelsStatsAction; import org.elasticsearch.xpack.ml.action.TransportInferModelAction; import org.elasticsearch.xpack.ml.action.TransportGetTrainedModelsAction; import org.elasticsearch.xpack.ml.action.TransportIsolateDatafeedAction; @@ -274,6 +276,7 @@ import org.elasticsearch.xpack.ml.rest.filter.RestUpdateFilterAction; import org.elasticsearch.xpack.ml.rest.inference.RestDeleteTrainedModelAction; import org.elasticsearch.xpack.ml.rest.inference.RestGetTrainedModelsAction; +import org.elasticsearch.xpack.ml.rest.inference.RestGetTrainedModelsStatsAction; import org.elasticsearch.xpack.ml.rest.job.RestCloseJobAction; import org.elasticsearch.xpack.ml.rest.job.RestDeleteForecastAction; import org.elasticsearch.xpack.ml.rest.job.RestDeleteJobAction; @@ -736,7 +739,8 @@ public List getRestHandlers(Settings settings, RestController restC new RestEvaluateDataFrameAction(restController), new RestEstimateMemoryUsageAction(restController), new RestGetTrainedModelsAction(restController), - new RestDeleteTrainedModelAction(restController) + new RestDeleteTrainedModelAction(restController), + new RestGetTrainedModelsStatsAction(restController) ); } @@ -811,6 +815,7 @@ public List getRestHandlers(Settings settings, RestController restC new ActionHandler<>(InferModelAction.INSTANCE, TransportInferModelAction.class), new ActionHandler<>(GetTrainedModelsAction.INSTANCE, TransportGetTrainedModelsAction.class), new ActionHandler<>(DeleteTrainedModelAction.INSTANCE, TransportDeleteTrainedModelAction.class), + new ActionHandler<>(GetTrainedModelsStatsAction.INSTANCE, TransportGetTrainedModelsStatsAction.class), usageAction, infoAction); } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetTrainedModelsStatsAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetTrainedModelsStatsAction.java new file mode 100644 index 0000000000000..150e8609712a7 --- /dev/null +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetTrainedModelsStatsAction.java @@ -0,0 +1,336 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.ml.action; + +import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.admin.cluster.node.stats.NodeStats; +import org.elasticsearch.action.admin.cluster.node.stats.NodesStatsAction; +import org.elasticsearch.action.admin.cluster.node.stats.NodesStatsRequest; +import org.elasticsearch.action.admin.cluster.node.stats.NodesStatsResponse; +import org.elasticsearch.action.search.SearchRequest; +import org.elasticsearch.action.search.SearchResponse; +import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.HandledTransportAction; +import org.elasticsearch.action.support.IndicesOptions; +import org.elasticsearch.client.Client; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.collect.Tuple; +import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.metrics.CounterMetric; +import org.elasticsearch.common.regex.Regex; +import org.elasticsearch.index.query.BoolQueryBuilder; +import org.elasticsearch.index.query.QueryBuilder; +import org.elasticsearch.index.query.QueryBuilders; +import org.elasticsearch.ingest.IngestMetadata; +import org.elasticsearch.ingest.IngestService; +import org.elasticsearch.ingest.IngestStats; +import org.elasticsearch.ingest.Pipeline; +import org.elasticsearch.search.SearchHit; +import org.elasticsearch.search.builder.SearchSourceBuilder; +import org.elasticsearch.search.sort.SortBuilders; +import org.elasticsearch.tasks.Task; +import org.elasticsearch.transport.TransportService; +import org.elasticsearch.xpack.core.action.util.ExpandedIdsMatcher; +import org.elasticsearch.xpack.core.ml.action.GetTrainedModelsStatsAction; +import org.elasticsearch.xpack.core.ml.inference.TrainedModelConfig; +import org.elasticsearch.xpack.core.ml.inference.persistence.InferenceIndexConstants; +import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper; +import org.elasticsearch.xpack.ml.inference.ingest.InferenceProcessor; + +import java.util.ArrayList; +import java.util.HashMap; +import java.util.Iterator; +import java.util.LinkedHashMap; +import java.util.LinkedHashSet; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.stream.Collectors; + +import static org.elasticsearch.xpack.core.ClientHelper.ML_ORIGIN; +import static org.elasticsearch.xpack.core.ClientHelper.executeAsyncWithOrigin; + + +public class TransportGetTrainedModelsStatsAction extends HandledTransportAction { + + private final Client client; + private final ClusterService clusterService; + private final IngestService ingestService; + + @Inject + public TransportGetTrainedModelsStatsAction(TransportService transportService, + ActionFilters actionFilters, + ClusterService clusterService, + IngestService ingestService, + Client client) { + super(GetTrainedModelsStatsAction.NAME, transportService, actionFilters, GetTrainedModelsStatsAction.Request::new); + this.client = client; + this.clusterService = clusterService; + this.ingestService = ingestService; + } + + @Override + protected void doExecute(Task task, + GetTrainedModelsStatsAction.Request request, + ActionListener listener) { + + GetTrainedModelsStatsAction.Response.Builder responseBuilder = new GetTrainedModelsStatsAction.Response.Builder(); + + ActionListener nodesStatsListener = ActionListener.wrap( + nodesStatsResponse -> { + Map modelIdIngestStats = inferenceIngestStatsByPipelineId(nodesStatsResponse, + pipelineIdsByModelIds(clusterService.state(), + ingestService, + responseBuilder.getExpandedIds())); + listener.onResponse(responseBuilder.setIngestStatsByModelId(modelIdIngestStats).build()); + }, + listener::onFailure + ); + + ActionListener>> idsListener = ActionListener.wrap( + tuple -> { + responseBuilder.setExpandedIds(tuple.v2()) + .setTotalModelCount(tuple.v1()); + String[] ingestNodes = ingestNodes(clusterService.state()); + NodesStatsRequest nodesStatsRequest = new NodesStatsRequest(ingestNodes).clear().ingest(true); + client.execute(NodesStatsAction.INSTANCE, nodesStatsRequest, nodesStatsListener); + }, + listener::onFailure + ); + + expandIds(request, idsListener); + } + + static Map inferenceIngestStatsByPipelineId(NodesStatsResponse response, + Map> modelIdToPipelineId) { + + Map ingestStatsMap = new HashMap<>(); + + modelIdToPipelineId.forEach((modelId, pipelineIds) -> { + List collectedStats = response.getNodes() + .stream() + .map(nodeStats -> ingestStatsForPipelineIds(nodeStats, pipelineIds)) + .collect(Collectors.toList()); + ingestStatsMap.put(modelId, mergeStats(collectedStats)); + }); + + return ingestStatsMap; + } + + + private void expandIds(GetTrainedModelsStatsAction.Request request, ActionListener>> idsListener) { + String[] tokens = Strings.tokenizeToStringArray(request.getResourceId(), ","); + SearchSourceBuilder sourceBuilder = new SearchSourceBuilder() + .sort(SortBuilders.fieldSort(request.getResourceIdField()) + // If there are no resources, there might be no mapping for the id field. + // This makes sure we don't get an error if that happens. + .unmappedType("long")) + .query(buildQuery(tokens, request.getResourceIdField())); + if (request.getPageParams() != null) { + sourceBuilder.from(request.getPageParams().getFrom()) + .size(request.getPageParams().getSize()); + } + sourceBuilder.trackTotalHits(true) + // we only care about the item IDS, there is no need to load large model definitions. + .fetchSource(TrainedModelConfig.MODEL_ID.getPreferredName(), null); + + IndicesOptions indicesOptions = SearchRequest.DEFAULT_INDICES_OPTIONS; + SearchRequest searchRequest = new SearchRequest(InferenceIndexConstants.INDEX_PATTERN) + .indicesOptions(IndicesOptions.fromOptions(true, + indicesOptions.allowNoIndices(), + indicesOptions.expandWildcardsOpen(), + indicesOptions.expandWildcardsClosed(), + indicesOptions)) + .source(sourceBuilder); + + executeAsyncWithOrigin(client.threadPool().getThreadContext(), + ML_ORIGIN, + searchRequest, + ActionListener.wrap( + response -> { + Set foundResourceIds = new LinkedHashSet<>(); + long totalHitCount = response.getHits().getTotalHits().value; + for (SearchHit hit : response.getHits().getHits()) { + Map docSource = hit.getSourceAsMap(); + if (docSource == null) { + continue; + } + Object idValue = docSource.get(TrainedModelConfig.MODEL_ID.getPreferredName()); + if (idValue instanceof String) { + foundResourceIds.add(idValue.toString()); + } + } + ExpandedIdsMatcher requiredMatches = new ExpandedIdsMatcher(tokens, request.isAllowNoResources()); + requiredMatches.filterMatchedIds(foundResourceIds); + if (requiredMatches.hasUnmatchedIds()) { + idsListener.onFailure(ExceptionsHelper.missingTrainedModel(requiredMatches.unmatchedIdsString())); + } else { + idsListener.onResponse(Tuple.tuple(totalHitCount, foundResourceIds)); + } + }, + idsListener::onFailure + ), + client::search); + + } + + private QueryBuilder buildQuery(String[] tokens, String resourceIdField) { + BoolQueryBuilder boolQuery = QueryBuilders.boolQuery() + .filter(QueryBuilders.termQuery(InferenceIndexConstants.DOC_TYPE.getPreferredName(), TrainedModelConfig.NAME)); + + if (Strings.isAllOrWildcard(tokens)) { + return boolQuery; + } + // If the resourceId is not _all or *, we should see if it is a comma delimited string with wild-cards + // e.g. id1,id2*,id3 + BoolQueryBuilder shouldQueries = new BoolQueryBuilder(); + List terms = new ArrayList<>(); + for (String token : tokens) { + if (Regex.isSimpleMatchPattern(token)) { + shouldQueries.should(QueryBuilders.wildcardQuery(resourceIdField, token)); + } else { + terms.add(token); + } + } + if (terms.isEmpty() == false) { + shouldQueries.should(QueryBuilders.termsQuery(resourceIdField, terms)); + } + + if (shouldQueries.should().isEmpty() == false) { + boolQuery.filter(shouldQueries); + } + return boolQuery; + } + + static String[] ingestNodes(final ClusterState clusterState) { + String[] ingestNodes = new String[clusterState.nodes().getIngestNodes().size()]; + Iterator nodeIterator = clusterState.nodes().getIngestNodes().keysIt(); + int i = 0; + while(nodeIterator.hasNext()) { + ingestNodes[i++] = nodeIterator.next(); + } + return ingestNodes; + } + + static Map> pipelineIdsByModelIds(ClusterState state, IngestService ingestService, Set modelIds) { + IngestMetadata ingestMetadata = state.metaData().custom(IngestMetadata.TYPE); + Map> pipelineIdsByModelIds = new HashMap<>(); + if (ingestMetadata == null) { + return pipelineIdsByModelIds; + } + + ingestMetadata.getPipelines().forEach((pipelineId, pipelineConfiguration) -> { + try { + Pipeline pipeline = Pipeline.create(pipelineId, + pipelineConfiguration.getConfigAsMap(), + ingestService.getProcessorFactories(), + ingestService.getScriptService()); + pipeline.getProcessors().forEach(processor -> { + if (processor instanceof InferenceProcessor) { + InferenceProcessor inferenceProcessor = (InferenceProcessor) processor; + if (modelIds.contains(inferenceProcessor.getModelId())) { + pipelineIdsByModelIds.computeIfAbsent(inferenceProcessor.getModelId(), + m -> new LinkedHashSet<>()).add(pipelineId); + } + } + }); + } catch (Exception ex) { + throw new ElasticsearchException("unexpected failure gathering pipeline information", ex); + } + }); + + return pipelineIdsByModelIds; + } + + static IngestStats ingestStatsForPipelineIds(NodeStats nodeStats, Set pipelineIds) { + IngestStats fullNodeStats = nodeStats.getIngestStats(); + Map> filteredProcessorStats = new HashMap<>(fullNodeStats.getProcessorStats()); + filteredProcessorStats.keySet().retainAll(pipelineIds); + List filteredPipelineStats = fullNodeStats.getPipelineStats() + .stream() + .filter(pipelineStat -> pipelineIds.contains(pipelineStat.getPipelineId())) + .collect(Collectors.toList()); + CounterMetric ingestCount = new CounterMetric(); + CounterMetric ingestTimeInMillis = new CounterMetric(); + CounterMetric ingestCurrent = new CounterMetric(); + CounterMetric ingestFailedCount = new CounterMetric(); + + filteredPipelineStats.forEach(pipelineStat -> { + IngestStats.Stats stats = pipelineStat.getStats(); + ingestCount.inc(stats.getIngestCount()); + ingestTimeInMillis.inc(stats.getIngestTimeInMillis()); + ingestCurrent.inc(stats.getIngestCurrent()); + ingestFailedCount.inc(stats.getIngestFailedCount()); + }); + + return new IngestStats( + new IngestStats.Stats(ingestCount.count(), ingestTimeInMillis.count(), ingestCurrent.count(), ingestFailedCount.count()), + filteredPipelineStats, + filteredProcessorStats); + } + + private static IngestStats mergeStats(List ingestStatsList) { + + Map pipelineStatsAcc = new LinkedHashMap<>(ingestStatsList.size()); + Map> processorStatsAcc = new LinkedHashMap<>(ingestStatsList.size()); + IngestStatsAccumulator totalStats = new IngestStatsAccumulator(); + ingestStatsList.forEach(ingestStats -> { + + ingestStats.getPipelineStats() + .forEach(pipelineStat -> + pipelineStatsAcc.computeIfAbsent(pipelineStat.getPipelineId(), + p -> new IngestStatsAccumulator()).inc(pipelineStat.getStats())); + + ingestStats.getProcessorStats() + .forEach((pipelineId, processorStat) -> { + Map processorAcc = processorStatsAcc.computeIfAbsent(pipelineId, + k -> new LinkedHashMap<>()); + processorStat.forEach(p -> + processorAcc.computeIfAbsent(p.getName(), + k -> new IngestStatsAccumulator()).inc(p.getStats())); + }); + + totalStats.inc(ingestStats.getTotalStats()); + }); + + List pipelineStatList = new ArrayList<>(pipelineStatsAcc.size()); + pipelineStatsAcc.forEach((pipelineId, accumulator) -> + pipelineStatList.add(new IngestStats.PipelineStat(pipelineId, accumulator.build()))); + + Map> processorStatList = new LinkedHashMap<>(processorStatsAcc.size()); + processorStatsAcc.forEach((pipelineId, accumulatorMap) -> { + List processorStats = new ArrayList<>(accumulatorMap.size()); + accumulatorMap.forEach((processorName, acc) -> processorStats.add(new IngestStats.ProcessorStat(processorName, acc.build()))); + processorStatList.put(pipelineId, processorStats); + }); + + return new IngestStats(totalStats.build(), pipelineStatList, processorStatList); + } + + private static class IngestStatsAccumulator { + CounterMetric ingestCount = new CounterMetric(); + CounterMetric ingestTimeInMillis = new CounterMetric(); + CounterMetric ingestCurrent = new CounterMetric(); + CounterMetric ingestFailedCount = new CounterMetric(); + + IngestStatsAccumulator inc(IngestStats.Stats s) { + ingestCount.inc(s.getIngestCount()); + ingestTimeInMillis.inc(s.getIngestTimeInMillis()); + ingestCurrent.inc(s.getIngestCurrent()); + ingestFailedCount.inc(s.getIngestFailedCount()); + return this; + } + + IngestStats.Stats build() { + return new IngestStats.Stats(ingestCount.count(), ingestTimeInMillis.count(), ingestCurrent.count(), ingestFailedCount.count()); + } + } + +} diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/inference/RestGetTrainedModelsStatsAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/inference/RestGetTrainedModelsStatsAction.java new file mode 100644 index 0000000000000..100c8cfa2f922 --- /dev/null +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/inference/RestGetTrainedModelsStatsAction.java @@ -0,0 +1,52 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.ml.rest.inference; + +import org.elasticsearch.client.node.NodeClient; +import org.elasticsearch.cluster.metadata.MetaData; +import org.elasticsearch.common.Strings; +import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.RestController; +import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.action.RestToXContentListener; +import org.elasticsearch.xpack.core.action.util.PageParams; +import org.elasticsearch.xpack.core.ml.action.GetTrainedModelsStatsAction; +import org.elasticsearch.xpack.core.ml.inference.TrainedModelConfig; +import org.elasticsearch.xpack.ml.MachineLearning; + +import java.io.IOException; + +import static org.elasticsearch.rest.RestRequest.Method.GET; +import static org.elasticsearch.xpack.core.ml.action.GetTrainedModelsAction.Request.ALLOW_NO_MATCH; + +public class RestGetTrainedModelsStatsAction extends BaseRestHandler { + + public RestGetTrainedModelsStatsAction(RestController controller) { + controller.registerHandler( + GET, MachineLearning.BASE_PATH + "inference/{" + TrainedModelConfig.MODEL_ID.getPreferredName() + "}/_stats", this); + controller.registerHandler(GET, MachineLearning.BASE_PATH + "inference/_stats", this); + } + + @Override + public String getName() { + return "ml_get_trained_models_stats_action"; + } + + @Override + protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient client) throws IOException { + String modelId = restRequest.param(TrainedModelConfig.MODEL_ID.getPreferredName()); + if (Strings.isNullOrEmpty(modelId)) { + modelId = MetaData.ALL; + } + GetTrainedModelsStatsAction.Request request = new GetTrainedModelsStatsAction.Request(modelId); + if (restRequest.hasParam(PageParams.FROM.getPreferredName()) || restRequest.hasParam(PageParams.SIZE.getPreferredName())) { + request.setPageParams(new PageParams(restRequest.paramAsInt(PageParams.FROM.getPreferredName(), PageParams.DEFAULT_FROM), + restRequest.paramAsInt(PageParams.SIZE.getPreferredName(), PageParams.DEFAULT_SIZE))); + } + request.setAllowNoResources(restRequest.paramAsBoolean(ALLOW_NO_MATCH.getPreferredName(), request.isAllowNoResources())); + return channel -> client.execute(GetTrainedModelsStatsAction.INSTANCE, request, new RestToXContentListener<>(channel)); + } +} diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/action/TransportGetTrainedModelsStatsActionTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/action/TransportGetTrainedModelsStatsActionTests.java new file mode 100644 index 0000000000000..d00c5a8cab5f3 --- /dev/null +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/action/TransportGetTrainedModelsStatsActionTests.java @@ -0,0 +1,282 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.ml.action; + +import org.elasticsearch.action.admin.cluster.node.stats.NodeStats; +import org.elasticsearch.action.admin.cluster.node.stats.NodesStatsResponse; +import org.elasticsearch.client.Client; +import org.elasticsearch.cluster.ClusterName; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.metadata.MetaData; +import org.elasticsearch.cluster.node.DiscoveryNode; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.settings.ClusterSettings; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentFactory; +import org.elasticsearch.common.xcontent.XContentType; +import org.elasticsearch.ingest.IngestDocument; +import org.elasticsearch.ingest.IngestMetadata; +import org.elasticsearch.ingest.IngestService; +import org.elasticsearch.ingest.IngestStats; +import org.elasticsearch.ingest.PipelineConfiguration; +import org.elasticsearch.ingest.Processor; +import org.elasticsearch.plugins.IngestPlugin; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.xpack.ml.inference.ingest.InferenceProcessor; +import org.junit.Before; + +import java.io.IOException; +import java.time.Instant; +import java.util.Arrays; +import java.util.Collections; +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.stream.Collectors; +import java.util.stream.IntStream; + +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.hasEntry; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + +public class TransportGetTrainedModelsStatsActionTests extends ESTestCase { + + private static class NotInferenceProcessor implements Processor { + + @Override + public IngestDocument execute(IngestDocument ingestDocument) throws Exception { + return ingestDocument; + } + + @Override + public String getType() { + return "not_inference"; + } + + @Override + public String getTag() { + return null; + } + + static class Factory implements Processor.Factory { + + @Override + public Processor create(Map processorFactories, String tag, Map config) { + return new NotInferenceProcessor(); + } + } + } + + private static final IngestPlugin SKINNY_INGEST_PLUGIN = new IngestPlugin() { + @Override + public Map getProcessors(Processor.Parameters parameters) { + Map factoryMap = new HashMap<>(); + factoryMap.put(InferenceProcessor.TYPE, + new InferenceProcessor.Factory(parameters.client, + parameters.ingestService.getClusterService(), + Settings.EMPTY, + parameters.ingestService)); + + factoryMap.put("not_inference", new NotInferenceProcessor.Factory()); + + return factoryMap; + } + }; + + private ClusterService clusterService; + private IngestService ingestService; + private Client client; + + @Before + public void setUpVariables() { + ThreadPool tp = mock(ThreadPool.class); + client = mock(Client.class); + clusterService = mock(ClusterService.class); + ClusterSettings clusterSettings = new ClusterSettings(Settings.EMPTY, + Collections.singleton(InferenceProcessor.MAX_INFERENCE_PROCESSORS)); + when(clusterService.getClusterSettings()).thenReturn(clusterSettings); + ingestService = new IngestService(clusterService, tp, null, null, + null, Collections.singletonList(SKINNY_INGEST_PLUGIN), client); + } + + + public void testInferenceIngestStatsByPipelineId() throws IOException { + List nodeStatsList = Arrays.asList( + buildNodeStats( + new IngestStats.Stats(2, 2, 3, 4), + Arrays.asList( + new IngestStats.PipelineStat( + "pipeline1", + new IngestStats.Stats(0, 0, 3, 1)), + new IngestStats.PipelineStat( + "pipeline2", + new IngestStats.Stats(1, 1, 0, 1)), + new IngestStats.PipelineStat( + "pipeline3", + new IngestStats.Stats(2, 1, 1, 1)) + ), + Arrays.asList( + Arrays.asList( + new IngestStats.ProcessorStat(InferenceProcessor.TYPE, new IngestStats.Stats(10, 1, 0, 0)), + new IngestStats.ProcessorStat("grok", new IngestStats.Stats(10, 1, 0, 0)), + new IngestStats.ProcessorStat(InferenceProcessor.TYPE, new IngestStats.Stats(100, 10, 0, 1)) + ), + Arrays.asList( + new IngestStats.ProcessorStat(InferenceProcessor.TYPE, new IngestStats.Stats(5, 1, 0, 0)), + new IngestStats.ProcessorStat("grok", new IngestStats.Stats(10, 1, 0, 0)) + ), + Arrays.asList( + new IngestStats.ProcessorStat("grok", new IngestStats.Stats(10, 1, 0, 0)) + ) + )), + buildNodeStats( + new IngestStats.Stats(15, 5, 3, 4), + Arrays.asList( + new IngestStats.PipelineStat( + "pipeline1", + new IngestStats.Stats(10, 1, 3, 1)), + new IngestStats.PipelineStat( + "pipeline2", + new IngestStats.Stats(1, 1, 0, 1)), + new IngestStats.PipelineStat( + "pipeline3", + new IngestStats.Stats(2, 1, 1, 1)) + ), + Arrays.asList( + Arrays.asList( + new IngestStats.ProcessorStat(InferenceProcessor.TYPE, new IngestStats.Stats(0, 0, 0, 0)), + new IngestStats.ProcessorStat("grok", new IngestStats.Stats(0, 0, 0, 0)), + new IngestStats.ProcessorStat(InferenceProcessor.TYPE, new IngestStats.Stats(10, 1, 0, 0)) + ), + Arrays.asList( + new IngestStats.ProcessorStat(InferenceProcessor.TYPE, new IngestStats.Stats(5, 1, 0, 0)), + new IngestStats.ProcessorStat("grok", new IngestStats.Stats(10, 1, 0, 0)) + ), + Arrays.asList( + new IngestStats.ProcessorStat("grok", new IngestStats.Stats(10, 1, 0, 0)) + ) + )) + ); + + NodesStatsResponse response = new NodesStatsResponse(new ClusterName("_name"), nodeStatsList, Collections.emptyList()); + + Map> pipelineIdsByModelIds = new HashMap<>(){{ + put("trained_model_1", Collections.singleton("pipeline1")); + put("trained_model_2", new HashSet<>(Arrays.asList("pipeline1", "pipeline2"))); + }}; + Map ingestStatsMap = TransportGetTrainedModelsStatsAction.inferenceIngestStatsByPipelineId(response, + pipelineIdsByModelIds); + + assertThat(ingestStatsMap.keySet(), equalTo(new HashSet<>(Arrays.asList("trained_model_1", "trained_model_2")))); + + IngestStats expectedStatsModel1 = new IngestStats( + new IngestStats.Stats(10, 1, 6, 2), + Collections.singletonList(new IngestStats.PipelineStat("pipeline1", new IngestStats.Stats(10, 1, 6, 2))), + Collections.singletonMap("pipeline1", Arrays.asList( + new IngestStats.ProcessorStat("inference", new IngestStats.Stats(120, 12, 0, 1)), + new IngestStats.ProcessorStat("grok", new IngestStats.Stats(10, 1, 0, 0)))) + ); + + IngestStats expectedStatsModel2 = new IngestStats( + new IngestStats.Stats(12, 3, 6, 4), + Arrays.asList( + new IngestStats.PipelineStat("pipeline1", new IngestStats.Stats(10, 1, 6, 2)), + new IngestStats.PipelineStat("pipeline2", new IngestStats.Stats(2, 2, 0, 2))), + new HashMap<>() {{ + put("pipeline2", Arrays.asList( + new IngestStats.ProcessorStat("inference", new IngestStats.Stats(10, 2, 0, 0)), + new IngestStats.ProcessorStat("grok", new IngestStats.Stats(20, 2, 0, 0)))); + put("pipeline1", Arrays.asList( + new IngestStats.ProcessorStat("inference", new IngestStats.Stats(120, 12, 0, 1)), + new IngestStats.ProcessorStat("grok", new IngestStats.Stats(10, 1, 0, 0)))); + }} + ); + + assertThat(ingestStatsMap, hasEntry("trained_model_1", expectedStatsModel1)); + assertThat(ingestStatsMap, hasEntry("trained_model_2", expectedStatsModel2)); + } + + public void testPipelineIdsByModelIds() throws IOException { + String modelId1 = "trained_model_1"; + String modelId2 = "trained_model_2"; + String modelId3 = "trained_model_3"; + Set modelIds = new HashSet<>(Arrays.asList(modelId1, modelId2, modelId3)); + + ClusterState clusterState = buildClusterStateWithModelReferences(modelId1, modelId2, modelId3); + + Map> pipelineIdsByModelIds = + TransportGetTrainedModelsStatsAction.pipelineIdsByModelIds(clusterState, ingestService, modelIds); + + assertThat(pipelineIdsByModelIds.keySet(), equalTo(modelIds)); + assertThat(pipelineIdsByModelIds, + hasEntry(modelId1, new HashSet<>(Arrays.asList("pipeline_with_model_" + modelId1 + 0, "pipeline_with_model_" + modelId1 + 1)))); + assertThat(pipelineIdsByModelIds, + hasEntry(modelId2, new HashSet<>(Arrays.asList("pipeline_with_model_" + modelId2 + 0, "pipeline_with_model_" + modelId2 + 1)))); + assertThat(pipelineIdsByModelIds, + hasEntry(modelId3, new HashSet<>(Arrays.asList("pipeline_with_model_" + modelId3 + 0, "pipeline_with_model_" + modelId3 + 1)))); + + } + + private static ClusterState buildClusterStateWithModelReferences(String... modelId) throws IOException { + Map configurations = new HashMap<>(modelId.length); + for (String id : modelId) { + configurations.put("pipeline_with_model_" + id + 0, newConfigurationWithInferenceProcessor(id, 0)); + configurations.put("pipeline_with_model_" + id + 1, newConfigurationWithInferenceProcessor(id, 1)); + } + for (int i = 0; i < 3; i++) { + configurations.put("pipeline_without_model_" + i, newConfigurationWithOutInferenceProcessor(i)); + } + IngestMetadata ingestMetadata = new IngestMetadata(configurations); + + return ClusterState.builder(new ClusterName("_name")) + .metaData(MetaData.builder().putCustom(IngestMetadata.TYPE, ingestMetadata)) + .build(); + } + + private static PipelineConfiguration newConfigurationWithInferenceProcessor(String modelId, int num) throws IOException { + try(XContentBuilder xContentBuilder = XContentFactory.jsonBuilder().map(Collections.singletonMap("processors", + Collections.singletonList( + Collections.singletonMap(InferenceProcessor.TYPE, + new HashMap() {{ + put(InferenceProcessor.MODEL_ID, modelId); + put("inference_config", Collections.singletonMap("regression", Collections.emptyMap())); + put("field_mappings", Collections.emptyMap()); + put("target_field", randomAlphaOfLength(10)); + }}))))) { + return new PipelineConfiguration("pipeline_with_model_" + modelId + num, + BytesReference.bytes(xContentBuilder), + XContentType.JSON); + } + } + + private static PipelineConfiguration newConfigurationWithOutInferenceProcessor(int i) throws IOException { + try(XContentBuilder xContentBuilder = XContentFactory.jsonBuilder().map(Collections.singletonMap("processors", + Collections.singletonList(Collections.singletonMap("not_inference", Collections.emptyMap()))))) { + return new PipelineConfiguration("pipeline_without_model_" + i, BytesReference.bytes(xContentBuilder), XContentType.JSON); + } + } + + private static NodeStats buildNodeStats(IngestStats.Stats overallStats, + List pipelineNames, + List> processorStats) { + List pipelineids = pipelineNames.stream().map(IngestStats.PipelineStat::getPipelineId).collect(Collectors.toList()); + IngestStats ingestStats = new IngestStats( + overallStats, + pipelineNames, + IntStream.range(0, pipelineids.size()).boxed().collect(Collectors.toMap(pipelineids::get, processorStats::get))); + return new NodeStats(mock(DiscoveryNode.class), + Instant.now().toEpochMilli(), null, null, null, null, null, null, null, null, + null, null, null, ingestStats, null); + + } + +} diff --git a/x-pack/plugin/src/test/resources/rest-api-spec/api/ml.get_trained_models_stats.json b/x-pack/plugin/src/test/resources/rest-api-spec/api/ml.get_trained_models_stats.json new file mode 100644 index 0000000000000..703380c708703 --- /dev/null +++ b/x-pack/plugin/src/test/resources/rest-api-spec/api/ml.get_trained_models_stats.json @@ -0,0 +1,48 @@ +{ + "ml.get_trained_models_stats":{ + "documentation":{ + "url":"TODO" + }, + "stability":"experimental", + "url":{ + "paths":[ + { + "path":"/_ml/inference/{model_id}/_stats", + "methods":[ + "GET" + ], + "parts":{ + "model_id":{ + "type":"string", + "description":"The ID of the trained models stats to fetch" + } + } + }, + { + "path":"/_ml/inference/_stats", + "methods":[ + "GET" + ] + } + ] + }, + "params":{ + "allow_no_match":{ + "type":"boolean", + "required":false, + "description":"Whether to ignore if a wildcard expression matches no trained models. (This includes `_all` string or when no trained models have been specified)", + "default":true + }, + "from":{ + "type":"int", + "description":"skips a number of trained models", + "default":0 + }, + "size":{ + "type":"int", + "description":"specifies a max number of trained models to get", + "default":100 + } + } + } +} diff --git a/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/inference_stats_crud.yml b/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/inference_stats_crud.yml new file mode 100644 index 0000000000000..294907768ce69 --- /dev/null +++ b/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/inference_stats_crud.yml @@ -0,0 +1,228 @@ +--- +"Test get-all stats given no trained models exist": + + - do: + ml.get_trained_models_stats: + model_id: "_all" + - match: { count: 0 } + - match: { trained_model_stats: [] } + + - do: + ml.get_trained_models_stats: + model_id: "*" + - match: { count: 0 } + - match: { trained_model_stats: [] } + +--- +"Test get stats given missing trained model": + + - do: + catch: missing + ml.get_trained_models_stats: + model_id: "missing-trained-model" +--- +"Test get stats given expression without matches and allow_no_match is false": + + - do: + catch: missing + ml.get_trained_models_stats: + model_id: "missing-trained-model*" + allow_no_match: false + +--- +"Test get stats given expression without matches and allow_no_match is true": + + - do: + ml.get_trained_models_stats: + model_id: "missing-trained-model*" + allow_no_match: true + - match: { count: 0 } + - match: { trained_model_stats: [] } +--- +"Test get stats given unused trained models": + + - do: + index: + id: trained_model_config-unused-regression-model-0 + index: .ml-inference-000001 + body: > + { + "model_id": "unused-regression-model", + "created_by": "ml_tests", + "version": "8.0.0", + "description": "empty model for tests", + "create_time": 0, + "model_version": 0, + "model_type": "local", + "doc_type": "trained_model_config" + } + - do: + index: + id: trained_model_config-unused-regression-model1-0 + index: .ml-inference-000001 + body: > + { + "model_id": "unused-regression-model1", + "created_by": "ml_tests", + "version": "8.0.0", + "description": "empty model for tests", + "create_time": 0, + "model_version": 0, + "model_type": "local", + "doc_type": "trained_model_config" + } + - do: + indices.refresh: {} + + - do: + ml.get_trained_models_stats: + model_id: "unused-regression-model" + + - match: { count: 1 } + + - do: + ml.get_trained_models_stats: + model_id: "_all" + - match: { count: 2 } + - match: { trained_model_stats.0.model_id: unused-regression-model } + - match: { trained_model_stats.0.pipeline_count: 0 } + - is_false: trained_model_stats.0.ingest_stats + - match: { trained_model_stats.1.model_id: unused-regression-model1 } + - match: { trained_model_stats.1.pipeline_count: 0 } + - is_false: trained_model_stats.1.ingest_stats + + - do: + ml.get_trained_models_stats: + model_id: "*" + - match: { count: 2 } + - match: { trained_model_stats.0.model_id: unused-regression-model } + - match: { trained_model_stats.0.pipeline_count: 0 } + - is_false: trained_model_stats.0.ingest_stats + - match: { trained_model_stats.1.model_id: unused-regression-model1 } + - match: { trained_model_stats.1.pipeline_count: 0 } + - is_false: trained_model_stats.1.ingest_stats + + - do: + ml.get_trained_models_stats: + model_id: "unused-regression-model*" + - match: { count: 2 } + - match: { trained_model_stats.0.model_id: unused-regression-model } + - match: { trained_model_stats.0.pipeline_count: 0 } + - is_false: trained_model_stats.0.ingest_stats + - match: { trained_model_stats.1.model_id: unused-regression-model1 } + - match: { trained_model_stats.1.pipeline_count: 0 } + - is_false: trained_model_stats.1.ingest_stats + + - do: + ml.get_trained_models_stats: + model_id: "unused-regression-model*" + size: 1 + - match: { count: 2 } + - match: { trained_model_stats.0.model_id: unused-regression-model } + - match: { trained_model_stats.0.pipeline_count: 0 } + - is_false: trained_model_stats.0.ingest_stats + + - do: + ml.get_trained_models_stats: + model_id: "unused-regression-model*" + from: 1 + size: 1 + - match: { count: 2 } + - match: { trained_model_stats.0.model_id: unused-regression-model1 } + - match: { trained_model_stats.0.pipeline_count: 0 } + - is_false: trained_model_stats.0.ingest_stats +--- +"Test get stats given used trained model": + - do: + index: + id: trained_model_config-used-regression-model-0 + index: .ml-inference-000001 + body: > + { + "model_id": "used-regression-model", + "created_by": "ml_tests", + "version": "8.0.0", + "description": "empty model for tests", + "create_time": 0, + "model_version": 0, + "model_type": "local", + "doc_type": "trained_model_config" + } + - do: + indices.refresh: {} + + - do: + ingest.put_pipeline: + id: "regression-model-pipeline" + body: > + { + "processors": [ + { + "inference" : { + "model_id" : "used-regression-model", + "inference_config": {"regression": {}}, + "target_field": "regression_field", + "field_mappings": {} + } + } + ] + } + - match: { acknowledged: true } + + - do: + ingest.put_pipeline: + id: "regression-model-pipeline-1" + body: > + { + "processors": [ + { + "inference" : { + "model_id" : "used-regression-model", + "inference_config": {"regression": {}}, + "target_field": "regression_field", + "field_mappings": {} + } + } + ] + } + - match: { acknowledged: true } + + - do: + ml.get_trained_models_stats: + model_id: "used-regression-model" + + - match: { count: 1 } + - match: { trained_model_stats.0.model_id: used-regression-model } + - match: { trained_model_stats.0.pipeline_count: 2 } + - match: + trained_model_stats.0.ingest_stats.ingest.total: + count: 0 + time_in_millis: 0 + current: 0 + failed: 0 + + - match: + trained_model_stats.0.ingest_stats.ingest.pipelines.regression-model-pipeline: + count: 0 + time_in_millis: 0 + current: 0 + failed: 0 + processors: + - inference: + count: 0 + time_in_millis: 0 + current: 0 + failed: 0 + + - match: + trained_model_stats.0.ingest_stats.ingest.pipelines.regression-model-pipeline-1: + count: 0 + time_in_millis: 0 + current: 0 + failed: 0 + processors: + - inference: + count: 0 + time_in_millis: 0 + current: 0 + failed: 0 From 73a57e3a0a4ec68229e31d55f430f44c3eafa020 Mon Sep 17 00:00:00 2001 From: Benjamin Trent <4357155+benwtrent@users.noreply.github.com> Date: Fri, 25 Oct 2019 12:55:35 -0400 Subject: [PATCH 2/6] updating tests and returned format --- .../action/GetTrainedModelsStatsAction.java | 3 - .../ml/qa/ml-with-security/build.gradle | 2 + .../test/ml/inference_stats_crud.yml | 220 +++++++++--------- 3 files changed, 111 insertions(+), 114 deletions(-) diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetTrainedModelsStatsAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetTrainedModelsStatsAction.java index b8ed168aea0d4..03bad58be3e2d 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetTrainedModelsStatsAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetTrainedModelsStatsAction.java @@ -34,7 +34,6 @@ public class GetTrainedModelsStatsAction extends ActionType 0) { - builder.startObject(INGEST_STATS.getPreferredName()); // Ingest stats is a fragment builder.value(ingestStats); - builder.endObject(); } builder.endObject(); return builder; diff --git a/x-pack/plugin/ml/qa/ml-with-security/build.gradle b/x-pack/plugin/ml/qa/ml-with-security/build.gradle index 2dd63883b523a..a8481a9dee966 100644 --- a/x-pack/plugin/ml/qa/ml-with-security/build.gradle +++ b/x-pack/plugin/ml/qa/ml-with-security/build.gradle @@ -130,6 +130,8 @@ integTest.runner { 'ml/inference_crud/Test delete with missing model', 'ml/inference_crud/Test get given missing trained model', 'ml/inference_crud/Test get given expression without matches and allow_no_match is false', + 'ml/inference_stats_crud/Test get stats given missing trained model', + 'ml/inference_stats_crud/Test get stats given expression without matches and allow_no_match is false', 'ml/jobs_crud/Test cannot create job with existing categorizer state document', 'ml/jobs_crud/Test cannot create job with existing quantiles document', 'ml/jobs_crud/Test cannot create job with existing result document', diff --git a/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/inference_stats_crud.yml b/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/inference_stats_crud.yml index 294907768ce69..efc6b784dbeac 100644 --- a/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/inference_stats_crud.yml +++ b/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/inference_stats_crud.yml @@ -1,47 +1,27 @@ ---- -"Test get-all stats given no trained models exist": - - - do: - ml.get_trained_models_stats: - model_id: "_all" - - match: { count: 0 } - - match: { trained_model_stats: [] } - - - do: - ml.get_trained_models_stats: - model_id: "*" - - match: { count: 0 } - - match: { trained_model_stats: [] } - ---- -"Test get stats given missing trained model": - +setup: + - skip: + features: headers - do: - catch: missing - ml.get_trained_models_stats: - model_id: "missing-trained-model" ---- -"Test get stats given expression without matches and allow_no_match is false": - - - do: - catch: missing - ml.get_trained_models_stats: - model_id: "missing-trained-model*" - allow_no_match: false - ---- -"Test get stats given expression without matches and allow_no_match is true": - - - do: - ml.get_trained_models_stats: - model_id: "missing-trained-model*" - allow_no_match: true - - match: { count: 0 } - - match: { trained_model_stats: [] } ---- -"Test get stats given unused trained models": + headers: + Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser + index: + id: trained_model_config-unused-regression-model1-0 + index: .ml-inference-000001 + body: > + { + "model_id": "unused-regression-model1", + "created_by": "ml_tests", + "version": "8.0.0", + "description": "empty model for tests", + "create_time": 0, + "model_version": 0, + "model_type": "local", + "doc_type": "trained_model_config" + } - do: + headers: + Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser index: id: trained_model_config-unused-regression-model-0 index: .ml-inference-000001 @@ -57,12 +37,14 @@ "doc_type": "trained_model_config" } - do: + headers: + Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser index: - id: trained_model_config-unused-regression-model1-0 + id: trained_model_config-used-regression-model-0 index: .ml-inference-000001 body: > { - "model_id": "unused-regression-model1", + "model_id": "used-regression-model", "created_by": "ml_tests", "version": "8.0.0", "description": "empty model for tests", @@ -71,9 +53,76 @@ "model_type": "local", "doc_type": "trained_model_config" } + - do: + headers: + Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser indices.refresh: {} + - do: + headers: + Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser + ingest.put_pipeline: + id: "regression-model-pipeline" + body: > + { + "processors": [ + { + "inference" : { + "model_id" : "used-regression-model", + "inference_config": {"regression": {}}, + "target_field": "regression_field", + "field_mappings": {} + } + } + ] + } + - do: + headers: + Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser + ingest.put_pipeline: + id: "regression-model-pipeline-1" + body: > + { + "processors": [ + { + "inference" : { + "model_id" : "used-regression-model", + "inference_config": {"regression": {}}, + "target_field": "regression_field", + "field_mappings": {} + } + } + ] + } +--- +"Test get stats given missing trained model": + + - do: + catch: missing + ml.get_trained_models_stats: + model_id: "missing-trained-model" +--- +"Test get stats given expression without matches and allow_no_match is false": + + - do: + catch: missing + ml.get_trained_models_stats: + model_id: "missing-trained-model*" + allow_no_match: false + +--- +"Test get stats given expression without matches and allow_no_match is true": + + - do: + ml.get_trained_models_stats: + model_id: "missing-trained-model*" + allow_no_match: true + - match: { count: 0 } + - match: { trained_model_stats: [] } +--- +"Test get stats given trained models": + - do: ml.get_trained_models_stats: model_id: "unused-regression-model" @@ -83,24 +132,28 @@ - do: ml.get_trained_models_stats: model_id: "_all" - - match: { count: 2 } + - match: { count: 3 } - match: { trained_model_stats.0.model_id: unused-regression-model } - match: { trained_model_stats.0.pipeline_count: 0 } - - is_false: trained_model_stats.0.ingest_stats + - is_false: trained_model_stats.0.ingest - match: { trained_model_stats.1.model_id: unused-regression-model1 } - match: { trained_model_stats.1.pipeline_count: 0 } - - is_false: trained_model_stats.1.ingest_stats + - is_false: trained_model_stats.1.ingest + - match: { trained_model_stats.2.pipeline_count: 2 } + - is_true: trained_model_stats.2.ingest - do: ml.get_trained_models_stats: model_id: "*" - - match: { count: 2 } + - match: { count: 3 } - match: { trained_model_stats.0.model_id: unused-regression-model } - match: { trained_model_stats.0.pipeline_count: 0 } - - is_false: trained_model_stats.0.ingest_stats + - is_false: trained_model_stats.0.ingest - match: { trained_model_stats.1.model_id: unused-regression-model1 } - match: { trained_model_stats.1.pipeline_count: 0 } - - is_false: trained_model_stats.1.ingest_stats + - is_false: trained_model_stats.1.ingest + - match: { trained_model_stats.2.pipeline_count: 2 } + - is_true: trained_model_stats.2.ingest - do: ml.get_trained_models_stats: @@ -108,10 +161,10 @@ - match: { count: 2 } - match: { trained_model_stats.0.model_id: unused-regression-model } - match: { trained_model_stats.0.pipeline_count: 0 } - - is_false: trained_model_stats.0.ingest_stats + - is_false: trained_model_stats.0.ingest - match: { trained_model_stats.1.model_id: unused-regression-model1 } - match: { trained_model_stats.1.pipeline_count: 0 } - - is_false: trained_model_stats.1.ingest_stats + - is_false: trained_model_stats.1.ingest - do: ml.get_trained_models_stats: @@ -120,7 +173,7 @@ - match: { count: 2 } - match: { trained_model_stats.0.model_id: unused-regression-model } - match: { trained_model_stats.0.pipeline_count: 0 } - - is_false: trained_model_stats.0.ingest_stats + - is_false: trained_model_stats.0.ingest - do: ml.get_trained_models_stats: @@ -130,62 +183,7 @@ - match: { count: 2 } - match: { trained_model_stats.0.model_id: unused-regression-model1 } - match: { trained_model_stats.0.pipeline_count: 0 } - - is_false: trained_model_stats.0.ingest_stats ---- -"Test get stats given used trained model": - - do: - index: - id: trained_model_config-used-regression-model-0 - index: .ml-inference-000001 - body: > - { - "model_id": "used-regression-model", - "created_by": "ml_tests", - "version": "8.0.0", - "description": "empty model for tests", - "create_time": 0, - "model_version": 0, - "model_type": "local", - "doc_type": "trained_model_config" - } - - do: - indices.refresh: {} - - - do: - ingest.put_pipeline: - id: "regression-model-pipeline" - body: > - { - "processors": [ - { - "inference" : { - "model_id" : "used-regression-model", - "inference_config": {"regression": {}}, - "target_field": "regression_field", - "field_mappings": {} - } - } - ] - } - - match: { acknowledged: true } - - - do: - ingest.put_pipeline: - id: "regression-model-pipeline-1" - body: > - { - "processors": [ - { - "inference" : { - "model_id" : "used-regression-model", - "inference_config": {"regression": {}}, - "target_field": "regression_field", - "field_mappings": {} - } - } - ] - } - - match: { acknowledged: true } + - is_false: trained_model_stats.0.ingest - do: ml.get_trained_models_stats: @@ -195,14 +193,14 @@ - match: { trained_model_stats.0.model_id: used-regression-model } - match: { trained_model_stats.0.pipeline_count: 2 } - match: - trained_model_stats.0.ingest_stats.ingest.total: + trained_model_stats.0.ingest.total: count: 0 time_in_millis: 0 current: 0 failed: 0 - match: - trained_model_stats.0.ingest_stats.ingest.pipelines.regression-model-pipeline: + trained_model_stats.0.ingest.pipelines.regression-model-pipeline: count: 0 time_in_millis: 0 current: 0 @@ -215,7 +213,7 @@ failed: 0 - match: - trained_model_stats.0.ingest_stats.ingest.pipelines.regression-model-pipeline-1: + trained_model_stats.0.ingest.pipelines.regression-model-pipeline-1: count: 0 time_in_millis: 0 current: 0 From 660439d1cd6a36fe5797fb98046a03443c18a29e Mon Sep 17 00:00:00 2001 From: Benjamin Trent <4357155+benwtrent@users.noreply.github.com> Date: Fri, 25 Oct 2019 13:35:05 -0400 Subject: [PATCH 3/6] setting origin of stats request --- .../xpack/ml/action/TransportGetTrainedModelsStatsAction.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetTrainedModelsStatsAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetTrainedModelsStatsAction.java index 150e8609712a7..a0b26a0e2c1da 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetTrainedModelsStatsAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetTrainedModelsStatsAction.java @@ -100,7 +100,7 @@ protected void doExecute(Task task, .setTotalModelCount(tuple.v1()); String[] ingestNodes = ingestNodes(clusterService.state()); NodesStatsRequest nodesStatsRequest = new NodesStatsRequest(ingestNodes).clear().ingest(true); - client.execute(NodesStatsAction.INSTANCE, nodesStatsRequest, nodesStatsListener); + executeAsyncWithOrigin(client, ML_ORIGIN, NodesStatsAction.INSTANCE, nodesStatsRequest, nodesStatsListener); }, listener::onFailure ); From 367033db40db115cf7b141a85d3f837edd5b3d6d Mon Sep 17 00:00:00 2001 From: Benjamin Trent <4357155+benwtrent@users.noreply.github.com> Date: Tue, 29 Oct 2019 07:27:41 -0400 Subject: [PATCH 4/6] addressing PR comments --- .../xpack/core/ml/action/GetTrainedModelsStatsAction.java | 7 ++++++- .../ml/action/TransportGetTrainedModelsStatsAction.java | 2 +- 2 files changed, 7 insertions(+), 2 deletions(-) diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetTrainedModelsStatsAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetTrainedModelsStatsAction.java index 03bad58be3e2d..f7b1ad13a807b 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetTrainedModelsStatsAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetTrainedModelsStatsAction.java @@ -5,6 +5,7 @@ */ package org.elasticsearch.xpack.core.ml.action; +import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.ActionRequestBuilder; import org.elasticsearch.action.ActionType; import org.elasticsearch.client.ElasticsearchClient; @@ -19,6 +20,7 @@ import org.elasticsearch.xpack.core.action.AbstractGetResourcesResponse; import org.elasticsearch.xpack.core.action.util.QueryPage; import org.elasticsearch.xpack.core.ml.inference.TrainedModelConfig; +import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper; import java.io.IOException; import java.util.ArrayList; @@ -85,6 +87,9 @@ public static class TrainedModelStats implements ToXContentObject, Writeable { public TrainedModelStats(String modelId, IngestStats ingestStats, int pipelineCount) { this.modelId = Objects.requireNonNull(modelId); this.ingestStats = ingestStats == null ? EMPTY_INGEST_STATS : ingestStats; + if (pipelineCount < 0) { + throw new ElasticsearchException("[{}] must be a greater than or equal to 0", PIPELINE_COUNT.getPreferredName()); + } this.pipelineCount = pipelineCount; } @@ -101,7 +106,7 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws builder.field(PIPELINE_COUNT.getPreferredName(), pipelineCount); if (pipelineCount > 0) { // Ingest stats is a fragment - builder.value(ingestStats); + ingestStats.toXContent(builder, params); } builder.endObject(); return builder; diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetTrainedModelsStatsAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetTrainedModelsStatsAction.java index a0b26a0e2c1da..773577d92d1a0 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetTrainedModelsStatsAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetTrainedModelsStatsAction.java @@ -138,7 +138,7 @@ private void expandIds(GetTrainedModelsStatsAction.Request request, ActionListen .size(request.getPageParams().getSize()); } sourceBuilder.trackTotalHits(true) - // we only care about the item IDS, there is no need to load large model definitions. + // we only care about the item id's, there is no need to load large model definitions. .fetchSource(TrainedModelConfig.MODEL_ID.getPreferredName(), null); IndicesOptions indicesOptions = SearchRequest.DEFAULT_INDICES_OPTIONS; From efb4062e355537c1b1ba4fefc7309808bbdbc1aa Mon Sep 17 00:00:00 2001 From: Benjamin Trent <4357155+benwtrent@users.noreply.github.com> Date: Tue, 29 Oct 2019 07:41:22 -0400 Subject: [PATCH 5/6] removing unused import --- .../xpack/core/ml/action/GetTrainedModelsStatsAction.java | 1 - 1 file changed, 1 deletion(-) diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetTrainedModelsStatsAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetTrainedModelsStatsAction.java index f7b1ad13a807b..f3cb43e8ef790 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetTrainedModelsStatsAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetTrainedModelsStatsAction.java @@ -20,7 +20,6 @@ import org.elasticsearch.xpack.core.action.AbstractGetResourcesResponse; import org.elasticsearch.xpack.core.action.util.QueryPage; import org.elasticsearch.xpack.core.ml.inference.TrainedModelConfig; -import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper; import java.io.IOException; import java.util.ArrayList; From 9044262dab4ed201f9c14c6729d711046f4ea1b1 Mon Sep 17 00:00:00 2001 From: Benjamin Trent <4357155+benwtrent@users.noreply.github.com> Date: Tue, 29 Oct 2019 08:49:47 -0400 Subject: [PATCH 6/6] fixing method call --- .../xpack/ml/integration/InferenceIngestIT.java | 10 ++++------ 1 file changed, 4 insertions(+), 6 deletions(-) diff --git a/x-pack/plugin/ml/qa/native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/InferenceIngestIT.java b/x-pack/plugin/ml/qa/native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/InferenceIngestIT.java index 3533afe947535..1099c70ee238a 100644 --- a/x-pack/plugin/ml/qa/native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/InferenceIngestIT.java +++ b/x-pack/plugin/ml/qa/native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/InferenceIngestIT.java @@ -31,15 +31,13 @@ public class InferenceIngestIT extends MlNativeAutodetectIntegTestCase { @Before public void createBothModels() { - assertThat(client().prepareIndex(InferenceIndexConstants.LATEST_INDEX_NAME, - "_doc", - "test_classification") + assertThat(client().prepareIndex(InferenceIndexConstants.LATEST_INDEX_NAME) + .setId("test_classification") .setSource(CLASSIFICATION_MODEL, XContentType.JSON) .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE) .get().status(), equalTo(RestStatus.CREATED)); - assertThat(client().prepareIndex(InferenceIndexConstants.LATEST_INDEX_NAME, - "_doc", - "test_regression") + assertThat(client().prepareIndex(InferenceIndexConstants.LATEST_INDEX_NAME) + .setId("test_regression") .setSource(REGRESSION_MODEL, XContentType.JSON) .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE) .get().status(), equalTo(RestStatus.CREATED));