-
Notifications
You must be signed in to change notification settings - Fork 25.6k
[ML] Job in Index: Enable integ tests #34851
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
Changes from all commits
1f0fb22
9e0fc5c
e873419
0cdff43
088617a
c81e54d
d9dde59
cecaa89
4b40e57
7c3a7ad
97bbfd3
11054d7
9b3774c
f24bff1
c6b6882
f5499a1
d330b51
501438c
f2e7012
9aec620
553928d
1274e16
cc223ed
daffc3e
f0f976e
2e0b983
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
| Original file line number | Diff line number | Diff line change |
|---|---|---|
|
|
@@ -98,12 +98,15 @@ Map<String, DatafeedConfig> expandClusterStateDatafeeds(String datafeedExpressio | |
| ClusterState clusterState) { | ||
|
|
||
| Map<String, DatafeedConfig> configById = new HashMap<>(); | ||
|
|
||
| MlMetadata mlMetadata = MlMetadata.getMlMetadata(clusterState); | ||
| Set<String> expandedDatafeedIds = mlMetadata.expandDatafeedIds(datafeedExpression, allowNoDatafeeds); | ||
|
|
||
| for (String expandedDatafeedId : expandedDatafeedIds) { | ||
| configById.put(expandedDatafeedId, mlMetadata.getDatafeed(expandedDatafeedId)); | ||
| try { | ||
| MlMetadata mlMetadata = MlMetadata.getMlMetadata(clusterState); | ||
| Set<String> expandedDatafeedIds = mlMetadata.expandDatafeedIds(datafeedExpression, allowNoDatafeeds); | ||
|
||
|
|
||
| for (String expandedDatafeedId : expandedDatafeedIds) { | ||
| configById.put(expandedDatafeedId, mlMetadata.getDatafeed(expandedDatafeedId)); | ||
| } | ||
| } catch (Exception e){ | ||
| // ignore | ||
|
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Based on your comment above it sounds like exceptions are unexpected here? If so, we could
Member
Author
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. The purpose here is to check that the job is not defined in both the cluster state and the index and will error in that case. Unfortunately expandDatafeedIds throws if the argument is a wild card (foo*) and there is no match |
||
| } | ||
|
|
||
| return configById; | ||
|
|
||
| Original file line number | Diff line number | Diff line change |
|---|---|---|
|
|
@@ -21,6 +21,7 @@ | |
| import org.elasticsearch.action.search.SearchRequest; | ||
| import org.elasticsearch.action.search.SearchResponse; | ||
| import org.elasticsearch.action.support.IndicesOptions; | ||
| import org.elasticsearch.action.support.WriteRequest; | ||
| import org.elasticsearch.client.Client; | ||
| import org.elasticsearch.common.Strings; | ||
| import org.elasticsearch.common.bytes.BytesReference; | ||
|
|
@@ -43,6 +44,7 @@ | |
| import org.elasticsearch.index.query.WildcardQueryBuilder; | ||
| import org.elasticsearch.search.SearchHit; | ||
| import org.elasticsearch.search.builder.SearchSourceBuilder; | ||
| import org.elasticsearch.search.fetch.subphase.DocValueFieldsContext; | ||
| import org.elasticsearch.xpack.core.ClientHelper; | ||
| import org.elasticsearch.xpack.core.ml.datafeed.DatafeedConfig; | ||
| import org.elasticsearch.xpack.core.ml.datafeed.DatafeedUpdate; | ||
|
|
@@ -62,6 +64,8 @@ | |
| import java.util.List; | ||
| import java.util.Map; | ||
| import java.util.Set; | ||
| import java.util.SortedSet; | ||
| import java.util.TreeSet; | ||
| import java.util.function.BiConsumer; | ||
| import java.util.stream.Collectors; | ||
|
|
||
|
|
@@ -114,6 +118,7 @@ public void putDatafeedConfig(DatafeedConfig config, Map<String, String> headers | |
| ElasticsearchMappings.DOC_TYPE, DatafeedConfig.documentId(datafeedId)) | ||
| .setSource(source) | ||
| .setOpType(DocWriteRequest.OpType.CREATE) | ||
| .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE) | ||
| .request(); | ||
|
|
||
| executeAsyncWithOrigin(client, ML_ORIGIN, IndexAction.INSTANCE, indexRequest, ActionListener.wrap( | ||
|
|
@@ -181,19 +186,20 @@ public void onFailure(Exception e) { | |
| public void findDatafeedsForJobIds(Collection<String> jobIds, ActionListener<Set<String>> listener) { | ||
| SearchSourceBuilder sourceBuilder = new SearchSourceBuilder().query(buildDatafeedJobIdsQuery(jobIds)); | ||
| sourceBuilder.fetchSource(false); | ||
| sourceBuilder.docValueField(DatafeedConfig.ID.getPreferredName()); | ||
| sourceBuilder.docValueField(DatafeedConfig.ID.getPreferredName(), DocValueFieldsContext.USE_DEFAULT_FORMAT); | ||
|
||
|
|
||
| SearchRequest searchRequest = client.prepareSearch(AnomalyDetectorsIndex.configIndexName()) | ||
| .setIndicesOptions(IndicesOptions.lenientExpandOpen()) | ||
| .setSize(jobIds.size()) | ||
| .setSource(sourceBuilder).request(); | ||
|
|
||
| executeAsyncWithOrigin(client.threadPool().getThreadContext(), ML_ORIGIN, searchRequest, | ||
| ActionListener.<SearchResponse>wrap( | ||
| response -> { | ||
| Set<String> datafeedIds = new HashSet<>(); | ||
| SearchHit[] hits = response.getHits().getHits(); | ||
| // There cannot be more than one datafeed per job | ||
| assert hits.length <= jobIds.size(); | ||
| assert response.getHits().totalHits <= jobIds.size(); | ||
| SearchHit[] hits = response.getHits().getHits(); | ||
|
|
||
| for (SearchHit hit : hits) { | ||
| datafeedIds.add(hit.field(DatafeedConfig.ID.getPreferredName()).getValue()); | ||
|
|
@@ -214,6 +220,7 @@ public void findDatafeedsForJobIds(Collection<String> jobIds, ActionListener<Set | |
| public void deleteDatafeedConfig(String datafeedId, ActionListener<DeleteResponse> actionListener) { | ||
| DeleteRequest request = new DeleteRequest(AnomalyDetectorsIndex.configIndexName(), | ||
| ElasticsearchMappings.DOC_TYPE, DatafeedConfig.documentId(datafeedId)); | ||
| request.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE); | ||
| executeAsyncWithOrigin(client, ML_ORIGIN, DeleteAction.INSTANCE, request, new ActionListener<DeleteResponse>() { | ||
| @Override | ||
| public void onResponse(DeleteResponse deleteResponse) { | ||
|
|
@@ -307,6 +314,7 @@ private void indexUpdatedConfig(DatafeedConfig updatedConfig, long version, Acti | |
| ElasticsearchMappings.DOC_TYPE, DatafeedConfig.documentId(updatedConfig.getId())) | ||
| .setSource(updatedSource) | ||
| .setVersion(version) | ||
| .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE) | ||
| .request(); | ||
|
|
||
| executeAsyncWithOrigin(client, ML_ORIGIN, IndexAction.INSTANCE, indexRequest, listener); | ||
|
|
@@ -341,12 +349,12 @@ private void indexUpdatedConfig(DatafeedConfig updatedConfig, long version, Acti | |
| * wildcard then setting this true will not suppress the exception | ||
| * @param listener The expanded datafeed IDs listener | ||
| */ | ||
| public void expandDatafeedIds(String expression, boolean allowNoDatafeeds, ActionListener<Set<String>> listener) { | ||
| public void expandDatafeedIds(String expression, boolean allowNoDatafeeds, ActionListener<SortedSet<String>> listener) { | ||
| String [] tokens = ExpandedIdsMatcher.tokenizeExpression(expression); | ||
| SearchSourceBuilder sourceBuilder = new SearchSourceBuilder().query(buildDatafeedIdQuery(tokens)); | ||
| sourceBuilder.sort(DatafeedConfig.ID.getPreferredName()); | ||
| sourceBuilder.fetchSource(false); | ||
| sourceBuilder.docValueField(DatafeedConfig.ID.getPreferredName()); | ||
| sourceBuilder.docValueField(DatafeedConfig.ID.getPreferredName(), DocValueFieldsContext.USE_DEFAULT_FORMAT); | ||
|
|
||
| SearchRequest searchRequest = client.prepareSearch(AnomalyDetectorsIndex.configIndexName()) | ||
| .setIndicesOptions(IndicesOptions.lenientExpandOpen()) | ||
|
|
@@ -357,7 +365,7 @@ public void expandDatafeedIds(String expression, boolean allowNoDatafeeds, Actio | |
| executeAsyncWithOrigin(client.threadPool().getThreadContext(), ML_ORIGIN, searchRequest, | ||
| ActionListener.<SearchResponse>wrap( | ||
| response -> { | ||
| Set<String> datafeedIds = new HashSet<>(); | ||
| SortedSet<String> datafeedIds = new TreeSet<>(); | ||
| SearchHit[] hits = response.getHits().getHits(); | ||
| for (SearchHit hit : hits) { | ||
| datafeedIds.add(hit.field(DatafeedConfig.ID.getPreferredName()).getValue()); | ||
|
|
||
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
The order of checks has changed. The response code is the same (409) only the error message has change which does not constitute a breaking change.