Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
23 commits
Select commit Hold shift + click to select a range
ee48686
[ML] Gathering inference stats in localModel and loading service
benwtrent Feb 24, 2020
0112705
Merge branch 'master' into feature/ml-inference-stats-collection
benwtrent Mar 12, 2020
6b45143
making stats loading failure fail the listeners
benwtrent Mar 12, 2020
3afa69e
Merge branch 'master' into feature/ml-inference-stats-collection
benwtrent Mar 12, 2020
c406e8e
allowing missing stats index
benwtrent Mar 12, 2020
b7bda48
addressing pr comments
benwtrent Mar 20, 2020
6bf2263
Merge branch 'master' into feature/ml-inference-stats-collection
benwtrent Mar 20, 2020
bf4051a
Merge branch 'master' into feature/ml-inference-stats-collection
benwtrent Mar 31, 2020
0d6ba92
addressing pr comments
benwtrent Mar 31, 2020
d2aecfa
Merge branch 'master' into feature/ml-inference-stats-collection
elasticmachine Mar 31, 2020
9faca62
fixing style checks
benwtrent Mar 31, 2020
5b0cf39
Merge branch 'feature/ml-inference-stats-collection' of github.com:be…
benwtrent Mar 31, 2020
f5f2821
Merge branch 'master' into feature/ml-inference-stats-collection
elasticmachine Mar 31, 2020
fe44488
Update InferenceIngestIT.java
benwtrent Apr 1, 2020
650947b
Merge branch 'master' into feature/ml-inference-stats-collection
elasticmachine Apr 1, 2020
c2c1522
Merge branch 'master' into feature/ml-inference-stats-collection
benwtrent Apr 2, 2020
5f86f95
addressing PR comments
benwtrent Apr 2, 2020
0360c3b
incrementally updating stats instead of overwriting
benwtrent Apr 3, 2020
8802fc4
Merge remote-tracking branch 'upstream/master' into feature/ml-infere…
benwtrent Apr 3, 2020
0cca932
fixing bwc serialization versions
benwtrent Apr 3, 2020
ff15e97
minor fixes
benwtrent Apr 3, 2020
74b95d5
handling situation where aggs are null
benwtrent Apr 3, 2020
1d7abd5
fixing stats queueing
benwtrent Apr 3, 2020
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@
package org.elasticsearch.xpack.core.ml.action;

import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.Version;
import org.elasticsearch.action.ActionRequestBuilder;
import org.elasticsearch.action.ActionType;
import org.elasticsearch.client.ElasticsearchClient;
Expand All @@ -20,6 +21,7 @@
import org.elasticsearch.xpack.core.action.AbstractGetResourcesResponse;
import org.elasticsearch.xpack.core.action.util.QueryPage;
import org.elasticsearch.xpack.core.ml.inference.TrainedModelConfig;
import org.elasticsearch.xpack.core.ml.inference.trainedmodel.InferenceStats;

import java.io.IOException;
import java.util.ArrayList;
Expand All @@ -37,6 +39,7 @@ public class GetTrainedModelsStatsAction extends ActionType<GetTrainedModelsStat

public static final ParseField MODEL_ID = new ParseField("model_id");
public static final ParseField PIPELINE_COUNT = new ParseField("pipeline_count");
public static final ParseField INFERENCE_STATS = new ParseField("inference_stats");

private GetTrainedModelsStatsAction() {
super(NAME, GetTrainedModelsStatsAction.Response::new);
Expand Down Expand Up @@ -78,25 +81,32 @@ public static class Response extends AbstractGetResourcesResponse<Response.Train
public static class TrainedModelStats implements ToXContentObject, Writeable {
private final String modelId;
private final IngestStats ingestStats;
private final InferenceStats inferenceStats;
private final int pipelineCount;

private static final IngestStats EMPTY_INGEST_STATS = new IngestStats(new IngestStats.Stats(0, 0, 0, 0),
Collections.emptyList(),
Collections.emptyMap());

public TrainedModelStats(String modelId, IngestStats ingestStats, int pipelineCount) {
public TrainedModelStats(String modelId, IngestStats ingestStats, int pipelineCount, InferenceStats inferenceStats) {
this.modelId = Objects.requireNonNull(modelId);
this.ingestStats = ingestStats == null ? EMPTY_INGEST_STATS : ingestStats;
if (pipelineCount < 0) {
throw new ElasticsearchException("[{}] must be a greater than or equal to 0", PIPELINE_COUNT.getPreferredName());
}
this.pipelineCount = pipelineCount;
this.inferenceStats = inferenceStats;
}

public TrainedModelStats(StreamInput in) throws IOException {
modelId = in.readString();
ingestStats = new IngestStats(in);
pipelineCount = in.readVInt();
if (in.getVersion().onOrAfter(Version.V_7_8_0)) {
this.inferenceStats = in.readOptionalWriteable(InferenceStats::new);
} else {
this.inferenceStats = null;
}
}

public String getModelId() {
Expand All @@ -120,6 +130,9 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws
// Ingest stats is a fragment
ingestStats.toXContent(builder, params);
}
if (this.inferenceStats != null) {
builder.field(INFERENCE_STATS.getPreferredName(), this.inferenceStats);
}
builder.endObject();
return builder;
}
Expand All @@ -129,11 +142,14 @@ public void writeTo(StreamOutput out) throws IOException {
out.writeString(modelId);
ingestStats.writeTo(out);
out.writeVInt(pipelineCount);
if (out.getVersion().onOrAfter(Version.V_7_8_0)) {
out.writeOptionalWriteable(this.inferenceStats);
}
}

@Override
public int hashCode() {
return Objects.hash(modelId, ingestStats, pipelineCount);
return Objects.hash(modelId, ingestStats, pipelineCount, inferenceStats);
}

@Override
Expand All @@ -147,7 +163,8 @@ public boolean equals(Object obj) {
TrainedModelStats other = (TrainedModelStats) obj;
return Objects.equals(this.modelId, other.modelId)
&& Objects.equals(this.ingestStats, other.ingestStats)
&& Objects.equals(this.pipelineCount, other.pipelineCount);
&& Objects.equals(this.pipelineCount, other.pipelineCount)
&& Objects.equals(this.inferenceStats, other.inferenceStats);
}
}

Expand All @@ -171,6 +188,7 @@ public static class Builder {
private long totalModelCount;
private Set<String> expandedIds;
private Map<String, IngestStats> ingestStatsMap;
private Map<String, InferenceStats> inferenceStatsMap;

public Builder setTotalModelCount(long totalModelCount) {
this.totalModelCount = totalModelCount;
Expand All @@ -191,13 +209,23 @@ public Builder setIngestStatsByModelId(Map<String, IngestStats> ingestStatsByMod
return this;
}

public Builder setInferenceStatsByModelId(Map<String, InferenceStats> infereceStatsByModelId) {
this.inferenceStatsMap = infereceStatsByModelId;
return this;
}

public Response build() {
List<TrainedModelStats> trainedModelStats = new ArrayList<>(expandedIds.size());
expandedIds.forEach(id -> {
IngestStats ingestStats = ingestStatsMap.get(id);
trainedModelStats.add(new TrainedModelStats(id, ingestStats, ingestStats == null ?
0 :
ingestStats.getPipelineStats().size()));
InferenceStats inferenceStats = inferenceStatsMap.get(id);
trainedModelStats.add(new TrainedModelStats(
id,
ingestStats,
ingestStats == null ?
0 :
ingestStats.getPipelineStats().size(),
inferenceStats));
});
trainedModelStats.sort(Comparator.comparing(TrainedModelStats::getModelId));
return new Response(new QueryPage<>(trainedModelStats, totalModelCount, RESULTS_FIELD));
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,249 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.core.ml.inference.trainedmodel;

import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.io.stream.Writeable;
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
import org.elasticsearch.common.xcontent.ObjectParser;
import org.elasticsearch.common.xcontent.ToXContentObject;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.xpack.core.common.time.TimeUtils;
import org.elasticsearch.xpack.core.ml.utils.ToXContentParams;

import java.io.IOException;
import java.time.Instant;
import java.util.Objects;
import java.util.concurrent.atomic.LongAdder;

public class InferenceStats implements ToXContentObject, Writeable {

public static final String NAME = "inference_stats";
public static final ParseField MISSING_ALL_FIELDS_COUNT = new ParseField("missing_all_fields_count");
public static final ParseField INFERENCE_COUNT = new ParseField("inference_count");
public static final ParseField MODEL_ID = new ParseField("model_id");
public static final ParseField NODE_ID = new ParseField("node_id");
public static final ParseField FAILURE_COUNT = new ParseField("failure_count");
public static final ParseField TYPE = new ParseField("type");
public static final ParseField TIMESTAMP = new ParseField("time_stamp");

public static final ConstructingObjectParser<InferenceStats, Void> PARSER = new ConstructingObjectParser<>(
NAME,
true,
a -> new InferenceStats((Long)a[0], (Long)a[1], (Long)a[2], (String)a[3], (String)a[4], (Instant)a[5])
);
static {
PARSER.declareLong(ConstructingObjectParser.constructorArg(), MISSING_ALL_FIELDS_COUNT);
PARSER.declareLong(ConstructingObjectParser.constructorArg(), INFERENCE_COUNT);
PARSER.declareLong(ConstructingObjectParser.constructorArg(), FAILURE_COUNT);
PARSER.declareString(ConstructingObjectParser.constructorArg(), MODEL_ID);
PARSER.declareString(ConstructingObjectParser.constructorArg(), NODE_ID);
PARSER.declareField(ConstructingObjectParser.constructorArg(),
p -> TimeUtils.parseTimeFieldToInstant(p, TIMESTAMP.getPreferredName()),
TIMESTAMP,
ObjectParser.ValueType.VALUE);
}
public static InferenceStats emptyStats(String modelId, String nodeId) {
return new InferenceStats(0L, 0L, 0L, modelId, nodeId, Instant.now());
}

public static String docId(String modelId, String nodeId) {
return NAME + "-" + modelId + "-" + nodeId;
}

private final long missingAllFieldsCount;
private final long inferenceCount;
private final long failureCount;
private final String modelId;
private final String nodeId;
private final Instant timeStamp;

private InferenceStats(Long missingAllFieldsCount,
Long inferenceCount,
Long failureCount,
String modelId,
String nodeId,
Instant instant) {
this(unbox(missingAllFieldsCount),
unbox(inferenceCount),
unbox(failureCount),
modelId,
nodeId,
instant);
}

public InferenceStats(long missingAllFieldsCount,
long inferenceCount,
long failureCount,
String modelId,
String nodeId,
Instant timeStamp) {
this.missingAllFieldsCount = missingAllFieldsCount;
this.inferenceCount = inferenceCount;
this.failureCount = failureCount;
this.modelId = modelId;
this.nodeId = nodeId;
this.timeStamp = timeStamp == null ?
Instant.ofEpochMilli(Instant.now().toEpochMilli()) :
Instant.ofEpochMilli(timeStamp.toEpochMilli());
}

public InferenceStats(StreamInput in) throws IOException {
this.missingAllFieldsCount = in.readVLong();
this.inferenceCount = in.readVLong();
this.failureCount = in.readVLong();
this.modelId = in.readOptionalString();
this.nodeId = in.readOptionalString();
this.timeStamp = in.readInstant();
}

public long getMissingAllFieldsCount() {
return missingAllFieldsCount;
}

public long getInferenceCount() {
return inferenceCount;
}

public long getFailureCount() {
return failureCount;
}

public String getModelId() {
return modelId;
}

public String getNodeId() {
return nodeId;
}

public Instant getTimeStamp() {
return timeStamp;
}

@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject();
if (params.paramAsBoolean(ToXContentParams.FOR_INTERNAL_STORAGE, false)) {
assert modelId != null : "model_id cannot be null when storing inference stats";
assert nodeId != null : "node_id cannot be null when storing inference stats";
builder.field(TYPE.getPreferredName(), NAME);
builder.field(MODEL_ID.getPreferredName(), modelId);
builder.field(NODE_ID.getPreferredName(), nodeId);
}
builder.field(FAILURE_COUNT.getPreferredName(), failureCount);
builder.field(INFERENCE_COUNT.getPreferredName(), inferenceCount);
builder.field(MISSING_ALL_FIELDS_COUNT.getPreferredName(), missingAllFieldsCount);
builder.timeField(TIMESTAMP.getPreferredName(), TIMESTAMP.getPreferredName() + "_string", timeStamp.toEpochMilli());
builder.endObject();
return builder;
}

@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
InferenceStats that = (InferenceStats) o;
return missingAllFieldsCount == that.missingAllFieldsCount
&& inferenceCount == that.inferenceCount
&& failureCount == that.failureCount
&& Objects.equals(modelId, that.modelId)
&& Objects.equals(nodeId, that.nodeId)
&& Objects.equals(timeStamp, that.timeStamp);
}

@Override
public int hashCode() {
return Objects.hash(missingAllFieldsCount, inferenceCount, failureCount, modelId, nodeId, timeStamp);
}

@Override
public String toString() {
return "InferenceStats{" +
"missingAllFieldsCount=" + missingAllFieldsCount +
", inferenceCount=" + inferenceCount +
", failureCount=" + failureCount +
", modelId='" + modelId + '\'' +
", nodeId='" + nodeId + '\'' +
", timeStamp=" + timeStamp +
'}';
}

private static long unbox(@Nullable Long value) {
return value == null ? 0L : value;
}

public static Accumulator accumulator(InferenceStats stats) {
return new Accumulator(stats);
}

@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeVLong(this.missingAllFieldsCount);
out.writeVLong(this.inferenceCount);
out.writeVLong(this.failureCount);
out.writeOptionalString(this.modelId);
out.writeOptionalString(this.nodeId);
out.writeInstant(timeStamp);
}

public static class Accumulator {

private final LongAdder missingFieldsAccumulator = new LongAdder();
private final LongAdder inferenceAccumulator = new LongAdder();
private final LongAdder failureCountAccumulator = new LongAdder();
private final String modelId;
private final String nodeId;

public Accumulator(String modelId, String nodeId) {
this.modelId = modelId;
this.nodeId = nodeId;
}

public Accumulator(InferenceStats previousStats) {
this.modelId = previousStats.modelId;
this.nodeId = previousStats.nodeId;
this.missingFieldsAccumulator.add(previousStats.missingAllFieldsCount);
this.inferenceAccumulator.add(previousStats.inferenceCount);
this.failureCountAccumulator.add(previousStats.failureCount);
}

public Accumulator merge(InferenceStats otherStats) {
this.missingFieldsAccumulator.add(otherStats.missingAllFieldsCount);
this.inferenceAccumulator.add(otherStats.inferenceCount);
this.failureCountAccumulator.add(otherStats.failureCount);
return this;
}

public void incMissingFields() {
this.missingFieldsAccumulator.increment();
}

public void incInference() {
this.inferenceAccumulator.increment();
}

public void incFailure() {
this.failureCountAccumulator.increment();
}

public InferenceStats currentStats() {
return currentStats(Instant.now());
}

public InferenceStats currentStats(Instant timeStamp) {
return new InferenceStats(missingFieldsAccumulator.longValue(),
inferenceAccumulator.longValue(),
failureCountAccumulator.longValue(),
modelId,
nodeId,
timeStamp);
}
}
}
Loading