From 20cab22875dec109bf6e1c9c8a2058b073e95d0d Mon Sep 17 00:00:00 2001 From: Lee Hinman Date: Tue, 9 Feb 2021 13:16:01 -0700 Subject: [PATCH 01/24] Disable BWC for backporting ILM partial searchable snapshot support (#68764) Relates to backporting #68714 --- build.gradle | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/build.gradle b/build.gradle index b24219df5533b..0327103eb2e0c 100644 --- a/build.gradle +++ b/build.gradle @@ -169,8 +169,8 @@ tasks.register("verifyVersions") { * after the backport of the backcompat code is complete. */ -boolean bwc_tests_enabled = true -String bwc_tests_disabled_issue = "" /* place a PR link here when committing bwc changes */ +boolean bwc_tests_enabled = false +String bwc_tests_disabled_issue = "https://github.com/elastic/elasticsearch/pull/68762" /* place a PR link here when committing bwc changes */ /* * FIPS 140-2 behavior was fixed in 7.11.0. Before that there is no way to run elasticsearch in a * JVM that is properly configured to be in fips mode with BCFIPS. For now we need to disable From c65615911f15274fa5b707eefe2376777976db9c Mon Sep 17 00:00:00 2001 From: James Rodewig <40268737+jrodewig@users.noreply.github.com> Date: Tue, 9 Feb 2021 16:07:02 -0500 Subject: [PATCH 02/24] [DOCS] Expand simple query string query's multi-position token section (#68753) --- docs/reference/analysis/token-graphs.asciidoc | 4 ++ .../simple-query-string-query.asciidoc | 44 ++++++++++--------- 2 files changed, 28 insertions(+), 20 deletions(-) diff --git a/docs/reference/analysis/token-graphs.asciidoc b/docs/reference/analysis/token-graphs.asciidoc index 20f91891aed5b..dfd700176c99c 100644 --- a/docs/reference/analysis/token-graphs.asciidoc +++ b/docs/reference/analysis/token-graphs.asciidoc @@ -39,6 +39,10 @@ record the `positionLength` for multi-position tokens. This filters include: * <> * <> +Some tokenizers, such as the +{plugin}/analysis-nori-tokenizer.html[`nori_tokenizer`], also accurately +decompose compound tokens into multi-position tokens. + In the following graph, `domain name system` and its synonym, `dns`, both have a position of `0`. However, `dns` has a `positionLength` of `3`. Other tokens in the graph have a default `positionLength` of `1`. diff --git a/docs/reference/query-dsl/simple-query-string-query.asciidoc b/docs/reference/query-dsl/simple-query-string-query.asciidoc index 9596191f24d9d..7927f36367dac 100644 --- a/docs/reference/query-dsl/simple-query-string-query.asciidoc +++ b/docs/reference/query-dsl/simple-query-string-query.asciidoc @@ -86,9 +86,10 @@ query string into tokens. Defaults to the `default_field`. If no analyzer is mapped, the index's default analyzer is used. `auto_generate_synonyms_phrase_query`:: -(Optional, Boolean) If `true`, <> -queries are automatically created for multi-term synonyms. Defaults to `true`. -See <> for an example. +(Optional, Boolean) If `true`, the parser creates a +<> query for each +<>. Defaults to `true`. +For examples, see <>. `flags`:: (Optional, string) List of enabled operators for the @@ -273,33 +274,36 @@ GET /_search <1> The `subject` field is three times as important as the `message` field. [[simple-query-string-synonyms]] -===== Synonyms +===== Multi-position tokens -The `simple_query_string` query supports multi-terms synonym expansion with the <> token filter. When this filter is used, the parser creates a phrase query for each multi-terms synonyms. -For example, the following synonym: `"ny, new york"` would produce: +By default, the `simple_query_string` query parser creates a +<> query for each +<> in the query string. +For example, the parser creates a `match_phrase` query for the multi-word +synonym `ny, new york`: `(ny OR ("new york"))` -It is also possible to match multi terms synonyms with conjunctions instead: +To match multi-position tokens with an `AND` conjunction instead, set +`auto_generate_synonyms_phrase_query` to `false`: [source,console] --------------------------------------------------- +---- GET /_search { - "query": { - "simple_query_string" : { - "query" : "ny city", - "auto_generate_synonyms_phrase_query" : false - } - } + "query": { + "simple_query_string": { + "query": "ny city", + "auto_generate_synonyms_phrase_query": false + } + } } --------------------------------------------------- +---- -The example above creates a boolean query: +For the above example, the parser creates the following +<> query: `(ny OR (new AND york)) city)` -that matches documents with the term `ny` or the conjunction `new AND york`. -By default the parameter `auto_generate_synonyms_phrase_query` is set to `true`. - +This `bool` query matches documents with the term `ny` or the conjunction +`new AND york`. From f1d8dedbca179934ff4fb2ca38761ec7a9436fdc Mon Sep 17 00:00:00 2001 From: Lee Hinman Date: Tue, 9 Feb 2021 14:39:09 -0700 Subject: [PATCH 03/24] Adjust version serialization and re-enable BWC after backport (#68781) Now that https://github.com/elastic/elasticsearch/pull/68762 is merged, BWC should be re-enabled. --- build.gradle | 4 ++-- .../xpack/core/ilm/SearchableSnapshotAction.java | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/build.gradle b/build.gradle index 0327103eb2e0c..b24219df5533b 100644 --- a/build.gradle +++ b/build.gradle @@ -169,8 +169,8 @@ tasks.register("verifyVersions") { * after the backport of the backcompat code is complete. */ -boolean bwc_tests_enabled = false -String bwc_tests_disabled_issue = "https://github.com/elastic/elasticsearch/pull/68762" /* place a PR link here when committing bwc changes */ +boolean bwc_tests_enabled = true +String bwc_tests_disabled_issue = "" /* place a PR link here when committing bwc changes */ /* * FIPS 140-2 behavior was fixed in 7.11.0. Before that there is no way to run elasticsearch in a * JVM that is properly configured to be in fips mode with BCFIPS. For now we need to disable diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/SearchableSnapshotAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/SearchableSnapshotAction.java index 93d889bfff556..76b56809d2d5a 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/SearchableSnapshotAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/SearchableSnapshotAction.java @@ -101,7 +101,7 @@ public SearchableSnapshotAction(StreamInput in) throws IOException { } else { this.forceMergeIndex = true; } - if (in.getVersion().onOrAfter(Version.V_8_0_0)) { + if (in.getVersion().onOrAfter(Version.V_7_12_0)) { this.storageType = in.readOptionalEnum(MountSearchableSnapshotRequest.Storage.class); } else { this.storageType = null; @@ -333,7 +333,7 @@ public void writeTo(StreamOutput out) throws IOException { if (out.getVersion().onOrAfter(Version.V_7_10_0)) { out.writeBoolean(forceMergeIndex); } - if (out.getVersion().onOrAfter(Version.V_8_0_0)) { + if (out.getVersion().onOrAfter(Version.V_7_12_0)) { out.writeOptionalEnum(storageType); } } From b914994b40ec26bf8b593080f43bfc17d13b4068 Mon Sep 17 00:00:00 2001 From: David Turner Date: Tue, 9 Feb 2021 16:35:51 +0000 Subject: [PATCH 04/24] Fix Javadoc issue for JDKs <15 Relates backport of #68735 in c57546c2fe18a8fffbfe5e1e16cbdd4a2729105b. --- .../elasticsearch/action/support/ListenableActionFuture.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/server/src/main/java/org/elasticsearch/action/support/ListenableActionFuture.java b/server/src/main/java/org/elasticsearch/action/support/ListenableActionFuture.java index 36b56fd7cb1a1..553b7b988ff7e 100644 --- a/server/src/main/java/org/elasticsearch/action/support/ListenableActionFuture.java +++ b/server/src/main/java/org/elasticsearch/action/support/ListenableActionFuture.java @@ -23,7 +23,7 @@ public class ListenableActionFuture extends AdapterActionFuture { private boolean executedListeners = false; /** - * Registers an {@link ActionListener} to be notified when this future is completed. If the future is already completed then the + * Registers an {@link ActionListener} to be notified when this future is completed. If the future is already completed then the * listener is notified immediately, on the calling thread. If not, the listener is notified on the thread that completes the listener. */ @SuppressWarnings("unchecked,rawtypes") From 7cce0709b07fed726aad05e805b3beb8af84bbcc Mon Sep 17 00:00:00 2001 From: David Turner Date: Wed, 10 Feb 2021 07:02:18 +0000 Subject: [PATCH 05/24] Add StepListener#addListener (#68770) A common pattern today is to set up a sequence of `StepListener` objects which ultimately notify an outer `ActionListener`. Today we do this as follows: step.whenComplete(listener::onResponse, listener::onFailure); Since this is such a common pattern, this commit exposes a method that that adds the listener directly. --- .../elasticsearch/snapshots/ConcurrentSnapshotsIT.java | 2 +- .../java/org/elasticsearch/action/StepListener.java | 10 +++++++++- .../indices/recovery/RecoverySourceHandler.java | 4 ++-- .../repositories/RepositoriesService.java | 4 ++-- .../elasticsearch/tasks/TaskCancellationService.java | 6 +++--- .../transport/AbstractSimpleTransportTestCase.java | 2 +- .../index/store/SearchableSnapshotDirectory.java | 2 +- .../xpack/security/authz/AuthorizationService.java | 2 +- 8 files changed, 20 insertions(+), 12 deletions(-) diff --git a/server/src/internalClusterTest/java/org/elasticsearch/snapshots/ConcurrentSnapshotsIT.java b/server/src/internalClusterTest/java/org/elasticsearch/snapshots/ConcurrentSnapshotsIT.java index 179d303a47520..8aab2b11c3adf 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/snapshots/ConcurrentSnapshotsIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/snapshots/ConcurrentSnapshotsIT.java @@ -142,7 +142,7 @@ public void testDeletesAreBatched() throws Exception { final PlainActionFuture> allDeletesDone = new PlainActionFuture<>(); final ActionListener deletesListener = new GroupedActionListener<>(allDeletesDone, deleteFutures.size()); for (StepListener deleteFuture : deleteFutures) { - deleteFuture.whenComplete(deletesListener::onResponse, deletesListener::onFailure); + deleteFuture.addListener(deletesListener); } allDeletesDone.get(); diff --git a/server/src/main/java/org/elasticsearch/action/StepListener.java b/server/src/main/java/org/elasticsearch/action/StepListener.java index e6e56e610d761..e3328ef073373 100644 --- a/server/src/main/java/org/elasticsearch/action/StepListener.java +++ b/server/src/main/java/org/elasticsearch/action/StepListener.java @@ -66,7 +66,7 @@ protected void innerOnFailure(Exception e) { * @param onFailure is called when this step is completed with a failure */ public void whenComplete(CheckedConsumer onResponse, Consumer onFailure) { - delegate.addListener(ActionListener.wrap(onResponse, onFailure), EsExecutors.newDirectExecutorService(), null); + addListener(ActionListener.wrap(onResponse, onFailure)); } /** @@ -100,4 +100,12 @@ public Response result() { } return FutureUtils.get(delegate, 0L, TimeUnit.NANOSECONDS); // this future is done already - use a non-blocking method. } + + /** + * Registers the given listener to be notified with the result of this step. + */ + public void addListener(ActionListener listener) { + delegate.addListener(listener, EsExecutors.newDirectExecutorService()); + } + } diff --git a/server/src/main/java/org/elasticsearch/indices/recovery/RecoverySourceHandler.java b/server/src/main/java/org/elasticsearch/indices/recovery/RecoverySourceHandler.java index 48ddbbce1d999..6bf132a5b0323 100644 --- a/server/src/main/java/org/elasticsearch/indices/recovery/RecoverySourceHandler.java +++ b/server/src/main/java/org/elasticsearch/indices/recovery/RecoverySourceHandler.java @@ -583,7 +583,7 @@ void createRetentionLease(final long startingSeqNo, ActionListener(logger, shard.getThreadPool(), ThreadPool.Names.GENERIC, cloneRetentionLeaseStep, false)); logger.trace("cloned primary's retention lease as [{}]", clonedLease); - cloneRetentionLeaseStep.whenComplete(rr -> listener.onResponse(clonedLease), listener::onFailure); + cloneRetentionLeaseStep.addListener(listener.map(rr -> clonedLease)); } catch (RetentionLeaseNotFoundException e) { // it's possible that the primary has no retention lease yet if we are doing a rolling upgrade from a version before // 7.4, and in that case we just create a lease using the local checkpoint of the safe commit which we're using for @@ -595,7 +595,7 @@ void createRetentionLease(final long startingSeqNo, ActionListener(logger, shard.getThreadPool(), ThreadPool.Names.GENERIC, addRetentionLeaseStep, false)); - addRetentionLeaseStep.whenComplete(rr -> listener.onResponse(newLease), listener::onFailure); + addRetentionLeaseStep.addListener(listener.map(rr -> newLease)); logger.trace("created retention lease with estimated checkpoint of [{}]", estimatedGlobalCheckpoint); } }, shardId + " establishing retention lease for [" + request.targetAllocationId() + "]", diff --git a/server/src/main/java/org/elasticsearch/repositories/RepositoriesService.java b/server/src/main/java/org/elasticsearch/repositories/RepositoriesService.java index 13aea18b8206e..fb1b511fa240c 100644 --- a/server/src/main/java/org/elasticsearch/repositories/RepositoriesService.java +++ b/server/src/main/java/org/elasticsearch/repositories/RepositoriesService.java @@ -158,11 +158,11 @@ public void registerRepository(final PutRepositoryRequest request, final ActionL // Finally respond to the outer listener with the response from the original cluster state update updateRepoUuidStep.whenComplete( - ignored -> acknowledgementStep.whenComplete(listener::onResponse, listener::onFailure), + ignored -> acknowledgementStep.addListener(listener), listener::onFailure); } else { - acknowledgementStep.whenComplete(listener::onResponse, listener::onFailure); + acknowledgementStep.addListener(listener); } clusterService.submitStateUpdateTask("put_repository [" + request.name() + "]", diff --git a/server/src/main/java/org/elasticsearch/tasks/TaskCancellationService.java b/server/src/main/java/org/elasticsearch/tasks/TaskCancellationService.java index d3946dd06279a..fe69df544ae7d 100644 --- a/server/src/main/java/org/elasticsearch/tasks/TaskCancellationService.java +++ b/server/src/main/java/org/elasticsearch/tasks/TaskCancellationService.java @@ -98,7 +98,7 @@ void doCancelTaskAndDescendants(CancellableTask task, String reason, boolean wai }); StepListener setBanListener = new StepListener<>(); setBanOnChildConnections(reason, waitForCompletion, task, childConnections, setBanListener); - setBanListener.whenComplete(groupedListener::onResponse, groupedListener::onFailure); + setBanListener.addListener(groupedListener); // If we start unbanning when the last child task completed and that child task executed with a specific user, then unban // requests are denied because internal requests can't run with a user. We need to remove bans with the current thread context. final Runnable removeBansRunnable = transportService.getThreadPool().getThreadContext() @@ -108,9 +108,9 @@ void doCancelTaskAndDescendants(CancellableTask task, String reason, boolean wai // if wait_for_completion is true, then only return when (1) bans are placed on child connections, (2) child tasks are // completed or failed, (3) the main task is cancelled. Otherwise, return after bans are placed on child connections. if (waitForCompletion) { - completedListener.whenComplete(r -> listener.onResponse(null), listener::onFailure); + completedListener.addListener(listener); } else { - setBanListener.whenComplete(r -> listener.onResponse(null), listener::onFailure); + setBanListener.addListener(listener); } } else { logger.trace("task [{}] doesn't have any children that should be cancelled", taskId); diff --git a/test/framework/src/main/java/org/elasticsearch/transport/AbstractSimpleTransportTestCase.java b/test/framework/src/main/java/org/elasticsearch/transport/AbstractSimpleTransportTestCase.java index 055c9bf223656..6adb1470a4433 100644 --- a/test/framework/src/main/java/org/elasticsearch/transport/AbstractSimpleTransportTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/transport/AbstractSimpleTransportTestCase.java @@ -2744,7 +2744,7 @@ public static Future submitRequest(TransportSer } responseListener.whenComplete(handler::handleResponse, e -> handler.handleException((TransportException) e)); final PlainActionFuture future = PlainActionFuture.newFuture(); - responseListener.whenComplete(future::onResponse, future::onFailure); + responseListener.addListener(future); return future; } } diff --git a/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/index/store/SearchableSnapshotDirectory.java b/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/index/store/SearchableSnapshotDirectory.java index 8f3ab7aa7cf4f..8a3e52f7b21df 100644 --- a/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/index/store/SearchableSnapshotDirectory.java +++ b/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/index/store/SearchableSnapshotDirectory.java @@ -495,7 +495,7 @@ private void prewarmCache(ActionListener listener) { final int numberOfParts = file.numberOfParts(); final StepListener> fileCompletionListener = new StepListener<>(); fileCompletionListener.whenComplete(voids -> input.close(), e -> IOUtils.closeWhileHandlingException(input)); - fileCompletionListener.whenComplete(voids -> completionListener.onResponse(null), completionListener::onFailure); + fileCompletionListener.addListener(completionListener.map(voids -> null)); final GroupedActionListener partsListener = new GroupedActionListener<>(fileCompletionListener, numberOfParts); diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/AuthorizationService.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/AuthorizationService.java index 166dd60909d6e..86e907187514a 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/AuthorizationService.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/AuthorizationService.java @@ -380,7 +380,7 @@ private void runRequestInterceptors(RequestInfo requestInfo, AuthorizationInfo a prevListener = current; } - prevListener.whenComplete(v -> listener.onResponse(null), listener::onFailure); + prevListener.addListener(listener); first.intercept(requestInfo, authorizationEngine, authorizationInfo, firstStepListener); } } From a7abc0a5560b7cd74afaa63958bd1ec1a80b63a1 Mon Sep 17 00:00:00 2001 From: Martijn van Groningen Date: Wed, 10 Feb 2021 09:04:26 +0100 Subject: [PATCH 06/24] Add more trace logging when installing monitor watches and (#68752) unmute TransportMonitoringMigrateAlertsActionTests#testLocalAlertsRemoval and TransportMonitoringMigrateAlertsActionTests#testRepeatedLocalAlertsRemoval tests Somehow during these tests the monitor watches are not installed. Both tests use the local exporter and this exporter only installs the watches under specific conditions via the elected master node. I suspect the conditions are never met. The http exporter is more relaxed when attempting to install monitor watches and the tests using the http exporter seem not to be prone by the fact that tests fail because monitor watches have not been installed. Relates to #66586 --- .../monitoring/exporter/local/LocalExporter.java | 12 ++++++++++++ .../TransportMonitoringMigrateAlertsActionTests.java | 2 -- 2 files changed, 12 insertions(+), 2 deletions(-) diff --git a/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/exporter/local/LocalExporter.java b/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/exporter/local/LocalExporter.java index 1dddcf5a47849..9a1532f60a6bd 100644 --- a/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/exporter/local/LocalExporter.java +++ b/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/exporter/local/LocalExporter.java @@ -429,9 +429,20 @@ private void setupClusterAlertsTasks(ClusterState clusterState, boolean clusterS if (watches != null && watches.allPrimaryShardsActive() == false) { logger.trace("cannot manage cluster alerts because [.watches] index is not allocated"); } else if ((watches == null || indexExists) && watcherSetup.compareAndSet(false, true)) { + logger.trace("installing monitoring watches"); getClusterAlertsInstallationAsyncActions(indexExists, asyncActions, pendingResponses); + } else { + logger.trace("skipping installing monitoring watches, watches=[{}], indexExists=[{}], watcherSetup=[{}]", + watches, indexExists, watcherSetup.get()); } + } else { + logger.trace("watches shouldn't be setup, because state=[{}] and clusterStateChange=[{}]", state.get(), clusterStateChange); } + } else { + logger.trace("watches can't be used, because xpack.watcher.enabled=[{}] and " + + "xpack.monitoring.exporters._local.cluster_alerts.management.enabled=[{}]", + XPackSettings.WATCHER_ENABLED.get(config.settings()), + CLUSTER_ALERTS_MANAGEMENT_SETTING.getConcreteSettingForNamespace(config.name()).get(config.settings())); } } @@ -577,6 +588,7 @@ private void getClusterAlertsInstallationAsyncActions(final boolean indexExists, new ResponseActionListener<>("watch", uniqueWatchId, pendingResponses))); } } else if (addWatch) { + logger.trace("adding monitoring watch [{}]", uniqueWatchId); asyncActions.add(() -> putWatch(client, watchId, uniqueWatchId, pendingResponses)); } } diff --git a/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/action/TransportMonitoringMigrateAlertsActionTests.java b/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/action/TransportMonitoringMigrateAlertsActionTests.java index 1ea0eb2d9038f..fb7b8a62802f9 100644 --- a/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/action/TransportMonitoringMigrateAlertsActionTests.java +++ b/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/action/TransportMonitoringMigrateAlertsActionTests.java @@ -109,7 +109,6 @@ private void stopMonitoring() { )); } - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/66586") @TestLogging( value = "org.elasticsearch.xpack.monitoring.exporter.local:trace", reason = "to ensure we log local exporter on trace level") @@ -147,7 +146,6 @@ public void testLocalAlertsRemoval() throws Exception { } } - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/66586") @TestLogging( value = "org.elasticsearch.xpack.monitoring.exporter.local:trace", reason = "to ensure we log local exporter on trace level") From ab260626e83a4910d0523cb2c3b887fd7ddd5909 Mon Sep 17 00:00:00 2001 From: Bogdan Pintea Date: Wed, 10 Feb 2021 09:05:35 +0100 Subject: [PATCH 07/24] SQL: Enhance error message on filtering check against aggs (#68763) * Enhance error msg on filtering check against aggs Distinguish between the case where the filtering is a WHERE with aggs appart from a HAVING with missing aggs. --- .../xpack/sql/analysis/analyzer/Verifier.java | 17 +++++++++++++---- .../analyzer/VerifierErrorMessagesTests.java | 19 ++++++++++++++++++- 2 files changed, 31 insertions(+), 5 deletions(-) diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/analysis/analyzer/Verifier.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/analysis/analyzer/Verifier.java index b85c56c9ac424..e449fb488aa99 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/analysis/analyzer/Verifier.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/analysis/analyzer/Verifier.java @@ -391,9 +391,9 @@ private static boolean checkGroupByHaving(LogicalPlan p, Set localFailu Expressions.names(unsupported))); groupingFailures.add(a); return false; + } } } - } return true; } @@ -662,11 +662,20 @@ private static void checkGroupingFunctionTarget(GroupingFunction f, Set private static void checkFilterOnAggs(LogicalPlan p, Set localFailures, AttributeMap attributeRefs) { if (p instanceof Filter) { Filter filter = (Filter) p; - if ((filter.child() instanceof Aggregate) == false) { + LogicalPlan filterChild = filter.child(); + if (filterChild instanceof Aggregate == false) { filter.condition().forEachDown(Expression.class, e -> { if (Functions.isAggregate(attributeRefs.getOrDefault(e, e))) { - localFailures.add( - fail(e, "Cannot use WHERE filtering on aggregate function [{}], use HAVING instead", Expressions.name(e))); + if (filterChild instanceof Project) { + filter.condition().forEachDown(FieldAttribute.class, + f -> localFailures.add(fail(e, "[{}] field must appear in the GROUP BY clause or in an aggregate function", + Expressions.name(f))) + ); + } else { + localFailures.add(fail(e, "Cannot use WHERE filtering on aggregate function [{}], use HAVING instead", + Expressions.name(e))); + + } } }); } diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/analysis/analyzer/VerifierErrorMessagesTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/analysis/analyzer/VerifierErrorMessagesTests.java index 3ffc5756242ea..bc2b8ff59b62a 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/analysis/analyzer/VerifierErrorMessagesTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/analysis/analyzer/VerifierErrorMessagesTests.java @@ -912,7 +912,24 @@ public void testIifWithDifferentResultAndDefaultValueDataTypes() { public void testAggsInWhere() { assertEquals("1:33: Cannot use WHERE filtering on aggregate function [MAX(int)], use HAVING instead", - error("SELECT MAX(int) FROM test WHERE MAX(int) > 10 GROUP BY bool")); + error("SELECT MAX(int) FROM test WHERE MAX(int) > 10 GROUP BY bool")); + } + + public void testHavingInAggs() { + assertEquals("1:29: [int] field must appear in the GROUP BY clause or in an aggregate function", + error("SELECT int FROM test HAVING MAX(int) = 0")); + + assertEquals("1:35: [int] field must appear in the GROUP BY clause or in an aggregate function", + error("SELECT int FROM test HAVING int = count(1)")); + } + + public void testHavingAsWhere() { + // TODO: this query works, though it normally shouldn't; a check about it could only be enforced if the Filter would be qualified + // (WHERE vs HAVING). Otoh, this "extra flexibility" shouldn't be harmful atp. + accept("SELECT int FROM test HAVING int = 1"); + accept("SELECT int FROM test HAVING SIN(int) + 5 > 5.5"); + // HAVING's expression being AND'ed to WHERE's + accept("SELECT int FROM test WHERE int > 3 HAVING POWER(int, 2) < 100"); } public void testHistogramInFilter() { From 2bc171a01cdb680bef07279705fd7bfed296d2bd Mon Sep 17 00:00:00 2001 From: Armin Braun Date: Wed, 10 Feb 2021 09:11:05 +0100 Subject: [PATCH 08/24] Remove Dead Test Code Branch in SparseFileTrackerTests (#68801) This branch makes no sense and was failing for the `start == 0` case now that we assert that byte ranges are well formed. --- .../index/store/cache/SparseFileTrackerTests.java | 11 ----------- 1 file changed, 11 deletions(-) diff --git a/x-pack/plugin/searchable-snapshots/src/test/java/org/elasticsearch/index/store/cache/SparseFileTrackerTests.java b/x-pack/plugin/searchable-snapshots/src/test/java/org/elasticsearch/index/store/cache/SparseFileTrackerTests.java index 54b43a9ad86a1..97d541e342f77 100644 --- a/x-pack/plugin/searchable-snapshots/src/test/java/org/elasticsearch/index/store/cache/SparseFileTrackerTests.java +++ b/x-pack/plugin/searchable-snapshots/src/test/java/org/elasticsearch/index/store/cache/SparseFileTrackerTests.java @@ -91,17 +91,6 @@ public void testInvalidRange() { containsString("unable to listen to range") ); assertThat(invoked.get(), is(false)); - } else { - e = expectThrows( - IllegalArgumentException.class, - () -> sparseFileTracker.waitForRange(ByteRange.of(start, end), ByteRange.of(start - 1L, end), listener) - ); - assertThat( - "listener range start must not be smaller than zero", - e.getMessage(), - containsString("invalid range to listen to") - ); - assertThat(invoked.get(), is(false)); } if (end < length) { From bae65dde3ff788673fb9d3607486ba1bdce07d3d Mon Sep 17 00:00:00 2001 From: Rene Groeschke Date: Wed, 10 Feb 2021 09:27:37 +0100 Subject: [PATCH 09/24] build-tools check are fixed on aarch64 (#68630) * Igmore BwcVersionsTests on aarch64 * Introduce classifier field to Architecture * Add test coverage for downloading adoptjdk aarch64 jdks --- .../DistributionDownloadPluginFuncTest.groovy | 10 ++--- .../gradle/JdkDownloadPluginFuncTest.groovy | 43 +++++++++++-------- .../DistributionDownloadFixture.groovy | 3 +- ...lDistributionDownloadPluginFuncTest.groovy | 25 ++++++----- .../remote/.ci/java-versions.properties | 4 +- .../elasticsearch/gradle/Architecture.java | 10 ++++- .../gradle/DistributionDownloadPlugin.java | 7 +-- .../gradle/BwcVersionsTests.java | 7 +++ 8 files changed, 62 insertions(+), 47 deletions(-) diff --git a/buildSrc/src/integTest/groovy/org/elasticsearch/gradle/DistributionDownloadPluginFuncTest.groovy b/buildSrc/src/integTest/groovy/org/elasticsearch/gradle/DistributionDownloadPluginFuncTest.groovy index 7ee368fee7687..c7d775dec85a3 100644 --- a/buildSrc/src/integTest/groovy/org/elasticsearch/gradle/DistributionDownloadPluginFuncTest.groovy +++ b/buildSrc/src/integTest/groovy/org/elasticsearch/gradle/DistributionDownloadPluginFuncTest.groovy @@ -51,19 +51,19 @@ class DistributionDownloadPluginFuncTest extends AbstractGradleFuncTest { when: def guh = new File(testProjectDir.getRoot(), "gradle-user-home").absolutePath; def runner = gradleRunner('clean', 'setupDistro', '-i', '-g', guh) + def unpackingMessage = "Unpacking elasticsearch-${version}-linux-${Architecture.current().classifier}.tar.gz " + + "using SymbolicLinkPreservingUntarTransform" def result = withMockedDistributionDownload(version, platform, runner) { // initial run def firstRun = build() - assertOutputContains(firstRun.output, "Unpacking elasticsearch-${version}-linux-x86_64.tar.gz " + - "using SymbolicLinkPreservingUntarTransform") + assertOutputContains(firstRun.output, unpackingMessage) // 2nd invocation build() } then: result.task(":setupDistro").outcome == TaskOutcome.SUCCESS - assertOutputMissing(result.output, "Unpacking elasticsearch-${version}-linux-x86_64.tar.gz " + - "using SymbolicLinkPreservingUntarTransform") + assertOutputMissing(result.output, unpackingMessage) } def "transforms are reused across projects"() { @@ -100,7 +100,7 @@ class DistributionDownloadPluginFuncTest extends AbstractGradleFuncTest { then: result.tasks.size() == 3 - result.output.count("Unpacking elasticsearch-${version}-linux-x86_64.tar.gz " + + result.output.count("Unpacking elasticsearch-${version}-linux-${Architecture.current().classifier}.tar.gz " + "using SymbolicLinkPreservingUntarTransform") == 1 } diff --git a/buildSrc/src/integTest/groovy/org/elasticsearch/gradle/JdkDownloadPluginFuncTest.groovy b/buildSrc/src/integTest/groovy/org/elasticsearch/gradle/JdkDownloadPluginFuncTest.groovy index c5184a79c7214..e80c9095f69c5 100644 --- a/buildSrc/src/integTest/groovy/org/elasticsearch/gradle/JdkDownloadPluginFuncTest.groovy +++ b/buildSrc/src/integTest/groovy/org/elasticsearch/gradle/JdkDownloadPluginFuncTest.groovy @@ -29,6 +29,8 @@ class JdkDownloadPluginFuncTest extends AbstractGradleFuncTest { private static final String OPENJDK_VERSION_OLD = "1+99" private static final String ADOPT_JDK_VERSION = "12.0.2+10" + private static final String ADOPT_JDK_VERSION_11 = "11.0.10+9" + private static final String ADOPT_JDK_VERSION_15 = "15.0.2+7" private static final String OPEN_JDK_VERSION = "12.0.1+99@123456789123456789123456789abcde" private static final String AZUL_AARCH_VERSION = "15.0.1+99@123456789123456789123456789abcde" private static final Pattern JDK_HOME_LOGLINE = Pattern.compile("JDK HOME: (.*)"); @@ -36,7 +38,7 @@ class JdkDownloadPluginFuncTest extends AbstractGradleFuncTest { @Unroll def "jdk #jdkVendor for #platform#suffix are downloaded and extracted"() { given: - def mockRepoUrl = urlPath(jdkVendor, jdkVersion, platform); + def mockRepoUrl = urlPath(jdkVendor, jdkVersion, platform, arch); def mockedContent = filebytes(jdkVendor, platform) buildFile.text = """ plugins { @@ -70,20 +72,22 @@ class JdkDownloadPluginFuncTest extends AbstractGradleFuncTest { assertExtraction(result.output, expectedJavaBin); where: - platform | arch | jdkVendor | jdkVersion | expectedJavaBin | suffix - "linux" | "x64" | VENDOR_ADOPTOPENJDK | ADOPT_JDK_VERSION | "bin/java" | "" - "linux" | "x64" | VENDOR_OPENJDK | OPEN_JDK_VERSION | "bin/java" | "" - "linux" | "x64" | VENDOR_OPENJDK | OPENJDK_VERSION_OLD | "bin/java" | "(old version)" - "windows" | "x64" | VENDOR_ADOPTOPENJDK | ADOPT_JDK_VERSION | "bin/java" | "" - "windows" | "x64" | VENDOR_OPENJDK | OPEN_JDK_VERSION | "bin/java" | "" - "windows" | "x64" | VENDOR_OPENJDK | OPENJDK_VERSION_OLD | "bin/java" | "(old version)" - "darwin" | "x64" | VENDOR_ADOPTOPENJDK | ADOPT_JDK_VERSION | "Contents/Home/bin/java" | "" - "darwin" | "x64" | VENDOR_OPENJDK | OPEN_JDK_VERSION | "Contents/Home/bin/java" | "" - "darwin" | "x64" | VENDOR_OPENJDK | OPENJDK_VERSION_OLD | "Contents/Home/bin/java" | "(old version)" - "mac" | "x64" | VENDOR_OPENJDK | OPEN_JDK_VERSION | "Contents/Home/bin/java" | "" - "mac" | "x64" | VENDOR_OPENJDK | OPENJDK_VERSION_OLD | "Contents/Home/bin/java" | "(old version)" - "darwin" | "aarch64" | VENDOR_AZUL | AZUL_AARCH_VERSION | "Contents/Home/bin/java" | "" - "linux" | "aarch64" | VENDOR_AZUL | AZUL_AARCH_VERSION | "bin/java" | "" + platform | arch | jdkVendor | jdkVersion | expectedJavaBin | suffix + "linux" | "x64" | VENDOR_ADOPTOPENJDK | ADOPT_JDK_VERSION | "bin/java" | "" + "linux" | "x64" | VENDOR_OPENJDK | OPEN_JDK_VERSION | "bin/java" | "" + "linux" | "x64" | VENDOR_OPENJDK | OPENJDK_VERSION_OLD | "bin/java" | "(old version)" + "windows" | "x64" | VENDOR_ADOPTOPENJDK | ADOPT_JDK_VERSION | "bin/java" | "" + "windows" | "x64" | VENDOR_OPENJDK | OPEN_JDK_VERSION | "bin/java" | "" + "windows" | "x64" | VENDOR_OPENJDK | OPENJDK_VERSION_OLD | "bin/java" | "(old version)" + "darwin" | "x64" | VENDOR_ADOPTOPENJDK | ADOPT_JDK_VERSION | "Contents/Home/bin/java" | "" + "darwin" | "x64" | VENDOR_OPENJDK | OPEN_JDK_VERSION | "Contents/Home/bin/java" | "" + "darwin" | "x64" | VENDOR_OPENJDK | OPENJDK_VERSION_OLD | "Contents/Home/bin/java" | "(old version)" + "mac" | "x64" | VENDOR_OPENJDK | OPEN_JDK_VERSION | "Contents/Home/bin/java" | "" + "mac" | "x64" | VENDOR_OPENJDK | OPENJDK_VERSION_OLD | "Contents/Home/bin/java" | "(old version)" + "darwin" | "aarch64" | VENDOR_AZUL | AZUL_AARCH_VERSION | "Contents/Home/bin/java" | "" + "linux" | "aarch64" | VENDOR_AZUL | AZUL_AARCH_VERSION | "bin/java" | "" + "linux" | "aarch64" | VENDOR_ADOPTOPENJDK | ADOPT_JDK_VERSION_11 | "bin/java" | "(jdk 11)" + "linux" | "aarch64" | VENDOR_ADOPTOPENJDK | ADOPT_JDK_VERSION_15 | "bin/java" | "(jdk 15)" } def "transforms are reused across projects"() { @@ -195,10 +199,13 @@ class JdkDownloadPluginFuncTest extends AbstractGradleFuncTest { true } - private static String urlPath(final String vendor, final String version, final String platform) { + private static String urlPath(final String vendor, + final String version, + final String platform, + final String arch = 'x64') { if (vendor.equals(VENDOR_ADOPTOPENJDK)) { final String module = isMac(platform) ? "mac" : platform; - return "/jdk-12.0.2+10/" + module + "/x64/jdk/hotspot/normal/adoptopenjdk"; + return "/jdk-" + version + "/" + module + "/${arch}/jdk/hotspot/normal/adoptopenjdk"; } else if (vendor.equals(VENDOR_OPENJDK)) { final String effectivePlatform = isMac(platform) ? "osx" : platform; final boolean isOld = version.equals(OPENJDK_VERSION_OLD); @@ -208,7 +215,7 @@ class JdkDownloadPluginFuncTest extends AbstractGradleFuncTest { } else if (vendor.equals(VENDOR_AZUL)) { final String module = isMac(platform) ? "macosx" : platform; // we only test zulu 15 darwin aarch64 for now - return "/zulu${module.equals('linux') ? '-embedded' : ''}/bin/zulu15.29.15-ca-jdk15.0.2-${module}_aarch64.tar.gz"; + return "/zulu${module.equals('linux') ? '-embedded' : ''}/bin/zulu15.29.15-ca-jdk15.0.2-${module}_${arch}.tar.gz"; } } diff --git a/buildSrc/src/integTest/groovy/org/elasticsearch/gradle/fixtures/DistributionDownloadFixture.groovy b/buildSrc/src/integTest/groovy/org/elasticsearch/gradle/fixtures/DistributionDownloadFixture.groovy index df6dab6ca1b3d..c9afcff1394c8 100644 --- a/buildSrc/src/integTest/groovy/org/elasticsearch/gradle/fixtures/DistributionDownloadFixture.groovy +++ b/buildSrc/src/integTest/groovy/org/elasticsearch/gradle/fixtures/DistributionDownloadFixture.groovy @@ -44,8 +44,7 @@ class DistributionDownloadFixture { private static String urlPath(String version,ElasticsearchDistribution.Platform platform) { String fileType = ((platform == ElasticsearchDistribution.Platform.LINUX || platform == ElasticsearchDistribution.Platform.DARWIN)) ? "tar.gz" : "zip" - String arch = Architecture.current() == Architecture.AARCH64 ? "aarch64" : "x86_64" - "/downloads/elasticsearch/elasticsearch-${version}-${platform}-${arch}.$fileType" + "/downloads/elasticsearch/elasticsearch-${version}-${platform}-${Architecture.current().classifier}.$fileType" } private static byte[] filebytes(String urlPath) throws IOException { diff --git a/buildSrc/src/integTest/groovy/org/elasticsearch/gradle/internal/InternalDistributionDownloadPluginFuncTest.groovy b/buildSrc/src/integTest/groovy/org/elasticsearch/gradle/internal/InternalDistributionDownloadPluginFuncTest.groovy index 22c8b53213545..8d0c00fddf01d 100644 --- a/buildSrc/src/integTest/groovy/org/elasticsearch/gradle/internal/InternalDistributionDownloadPluginFuncTest.groovy +++ b/buildSrc/src/integTest/groovy/org/elasticsearch/gradle/internal/InternalDistributionDownloadPluginFuncTest.groovy @@ -8,14 +8,11 @@ package org.elasticsearch.gradle.internal +import org.elasticsearch.gradle.Architecture import org.elasticsearch.gradle.VersionProperties import org.elasticsearch.gradle.fixtures.AbstractGradleFuncTest -import org.gradle.testkit.runner.GradleRunner import org.gradle.testkit.runner.TaskOutcome -import org.junit.Rule -import org.junit.rules.TemporaryFolder -import java.lang.management.ManagementFactory class InternalDistributionDownloadPluginFuncTest extends AbstractGradleFuncTest { @@ -61,7 +58,7 @@ class InternalDistributionDownloadPluginFuncTest extends AbstractGradleFuncTest def result = gradleRunner("setupDistro", '-g', testProjectDir.newFolder('GUH').path).build() then: - result.task(":distribution:archives:linux-tar:buildExpanded").outcome == TaskOutcome.SUCCESS + result.task(":distribution:archives:${testArchiveProjectName}:buildExpanded").outcome == TaskOutcome.SUCCESS result.task(":setupDistro").outcome == TaskOutcome.SUCCESS assertExtractedDistroIsCreated("build/distro", 'current-marker.txt') } @@ -133,24 +130,24 @@ class InternalDistributionDownloadPluginFuncTest extends AbstractGradleFuncTest apply plugin:'base' // packed distro - configurations.create("linux-tar") + configurations.create("${testArchiveProjectName}") tasks.register("buildBwcTask", Tar) { from('bwc-marker.txt') archiveExtension = "tar.gz" compression = Compression.GZIP } artifacts { - it.add("linux-tar", buildBwcTask) + it.add("${testArchiveProjectName}", buildBwcTask) } // expanded distro - configurations.create("expanded-linux-tar") + configurations.create("expanded-${testArchiveProjectName}") def expandedTask = tasks.register("buildBwcExpandedTask", Copy) { from('bwc-marker.txt') into('build/install/elastic-distro') } artifacts { - it.add("expanded-linux-tar", file('build/install')) { + it.add("expanded-${testArchiveProjectName}", file('build/install')) { builtBy expandedTask type = 'directory' } @@ -160,9 +157,9 @@ class InternalDistributionDownloadPluginFuncTest extends AbstractGradleFuncTest private void localDistroSetup() { settingsFile << """ - include ":distribution:archives:linux-tar" + include ":distribution:archives:${testArchiveProjectName}" """ - def bwcSubProjectFolder = testProjectDir.newFolder("distribution", "archives", "linux-tar") + def bwcSubProjectFolder = testProjectDir.newFolder("distribution", "archives", testArchiveProjectName) new File(bwcSubProjectFolder, 'current-marker.txt') << "current" new File(bwcSubProjectFolder, 'build.gradle') << """ import org.gradle.api.internal.artifacts.ArtifactAttributes; @@ -190,10 +187,12 @@ class InternalDistributionDownloadPluginFuncTest extends AbstractGradleFuncTest it.add("extracted", buildExpanded) } """ - buildFile << """ - """ } + String getTestArchiveProjectName() { + def archSuffix = Architecture.current() == Architecture.AARCH64 ? '-aarch64' : '' + return "linux${archSuffix}-tar" + } boolean assertExtractedDistroIsCreated(String relativeDistroPath, String markerFileName) { File extractedFolder = new File(testProjectDir.root, relativeDistroPath) assert extractedFolder.exists() diff --git a/buildSrc/src/integTest/resources/org/elasticsearch/gradle/internal/fake_git/remote/.ci/java-versions.properties b/buildSrc/src/integTest/resources/org/elasticsearch/gradle/internal/fake_git/remote/.ci/java-versions.properties index 0ccc6bab285a6..4f728deb42e0c 100644 --- a/buildSrc/src/integTest/resources/org/elasticsearch/gradle/internal/fake_git/remote/.ci/java-versions.properties +++ b/buildSrc/src/integTest/resources/org/elasticsearch/gradle/internal/fake_git/remote/.ci/java-versions.properties @@ -5,6 +5,6 @@ # in compliance with, at your election, the Elastic License 2.0 or the Server # Side Public License, v 1. # -ES_BUILD_JAVA=openjdk12 -ES_RUNTIME_JAVA=openjdk12 +ES_BUILD_JAVA=openjdk11 +ES_RUNTIME_JAVA=openjdk11 GRADLE_TASK=build diff --git a/buildSrc/src/main/java/org/elasticsearch/gradle/Architecture.java b/buildSrc/src/main/java/org/elasticsearch/gradle/Architecture.java index c9a7e1eeb8edd..ccc81954aabcb 100644 --- a/buildSrc/src/main/java/org/elasticsearch/gradle/Architecture.java +++ b/buildSrc/src/main/java/org/elasticsearch/gradle/Architecture.java @@ -10,8 +10,14 @@ public enum Architecture { - X64, - AARCH64; + X64("x86_64"), + AARCH64("aarch64"); + + public final String classifier; + + Architecture(String classifier) { + this.classifier = classifier; + } public static Architecture current() { final String architecture = System.getProperty("os.arch", ""); diff --git a/buildSrc/src/main/java/org/elasticsearch/gradle/DistributionDownloadPlugin.java b/buildSrc/src/main/java/org/elasticsearch/gradle/DistributionDownloadPlugin.java index 32cdfc5ecef96..5772a29d20ec7 100644 --- a/buildSrc/src/main/java/org/elasticsearch/gradle/DistributionDownloadPlugin.java +++ b/buildSrc/src/main/java/org/elasticsearch/gradle/DistributionDownloadPlugin.java @@ -166,14 +166,11 @@ private String dependencyNotation(ElasticsearchDistribution distribution) { Version distroVersion = Version.fromString(distribution.getVersion()); String extension = distribution.getType().toString(); - String classifier = ":" + (Architecture.current() == Architecture.AARCH64 ? "aarch64" : "x86_64"); + String classifier = ":" + Architecture.current().classifier; if (distribution.getType() == Type.ARCHIVE) { extension = distribution.getPlatform() == Platform.WINDOWS ? "zip" : "tar.gz"; if (distroVersion.onOrAfter("7.0.0")) { - classifier = ":" - + distribution.getPlatform() - + "-" - + (Architecture.current() == Architecture.AARCH64 ? "aarch64" : "x86_64"); + classifier = ":" + distribution.getPlatform() + "-" + Architecture.current().classifier; } else { classifier = ""; } diff --git a/buildSrc/src/test/java/org/elasticsearch/gradle/BwcVersionsTests.java b/buildSrc/src/test/java/org/elasticsearch/gradle/BwcVersionsTests.java index a9db70d365dfa..7e340a1682729 100644 --- a/buildSrc/src/test/java/org/elasticsearch/gradle/BwcVersionsTests.java +++ b/buildSrc/src/test/java/org/elasticsearch/gradle/BwcVersionsTests.java @@ -1,6 +1,8 @@ package org.elasticsearch.gradle; import org.elasticsearch.gradle.test.GradleUnitTestCase; +import org.junit.Assume; +import org.junit.BeforeClass; import org.junit.Rule; import org.junit.Test; import org.junit.rules.ExpectedException; @@ -279,6 +281,11 @@ public class BwcVersionsTests extends GradleUnitTestCase { sampleVersions.put("7.1.0", asList("7_1_0", "7_0_0", "6_7_0", "6_6_1", "6_6_0")); } + @BeforeClass + public static void setupAll() { + Assume.assumeFalse(Architecture.current() == Architecture.AARCH64); + } + @Test(expected = IllegalArgumentException.class) public void testExceptionOnEmpty() { new BwcVersions(asList("foo", "bar"), Version.fromString("7.0.0")); From ee5cc5442a8e01d7d8fa426748d0b086959bf99d Mon Sep 17 00:00:00 2001 From: Andrei Stefan Date: Wed, 10 Feb 2021 11:17:10 +0200 Subject: [PATCH 10/24] QL: "fields" api implementation in QL (#68802) * Integrate "fields" API into QL (#68467) * QL: retry SQL and EQL requests in a mixed-node (rolling upgrade) cluster (#68602) * Adapt nested fields extraction from "fields" API output to the new un-flattened structure (#68745) --- .../sql/endpoints/translate.asciidoc | 20 +- x-pack/plugin/eql/qa/mixed-node/build.gradle | 66 +++ .../xpack/eql/qa/mixed_node/EqlSearchIT.java | 239 ++++++++ .../src/test/resources/eql_data.json | 30 + .../src/test/resources/eql_mapping.json | 35 ++ .../execution/search/BasicQueryClient.java | 4 +- .../eql/execution/search/RuntimeUtils.java | 20 +- .../eql/execution/search/SourceGenerator.java | 11 +- .../search/extractor/FieldHitExtractor.java | 4 +- .../extractor/TimestampFieldHitExtractor.java | 2 +- .../eql/plugin/TransportEqlSearchAction.java | 26 +- .../container/FieldExtractorRegistry.java | 36 +- .../querydsl/container/SearchHitFieldRef.java | 32 +- .../xpack/eql/analysis/CancellationTests.java | 30 +- .../CriterionOrdinalExtractionTests.java | 6 +- .../ql/execution/search/QlSourceBuilder.java | 28 +- .../extractor/AbstractFieldHitExtractor.java | 226 +------- .../xpack/ql/plugin/TransportActionUtils.java | 71 +++ .../xpack/ql/querydsl/query/NestedQuery.java | 19 +- .../org/elasticsearch/xpack/ql/TestNode.java | 41 ++ .../org/elasticsearch/xpack/ql/TestNodes.java | 43 ++ .../org/elasticsearch/xpack/ql/TestUtils.java | 35 ++ x-pack/plugin/sql/qa/mixed-node/build.gradle | 66 +++ .../xpack/sql/qa/mixed_node/SqlSearchIT.java | 273 +++++++++ .../src/test/resources/all_field_types.json | 59 ++ .../sql/qa/single_node/CliExplainIT.java | 29 +- .../xpack/sql/qa/FieldExtractorTestCase.java | 148 +++-- .../xpack/sql/qa/rest/RestSqlTestCase.java | 16 +- .../src/main/resources/docs/geo.csv-spec | 2 +- .../src/main/resources/geo/geosql.csv-spec | 138 ++--- .../sql/action/SqlQueryResponseTests.java | 3 +- .../xpack/sql/proto/ColumnInfo.java | 2 +- .../sql/action/SqlTranslateActionIT.java | 23 +- .../xpack/sql/execution/PlanExecutor.java | 2 +- .../xpack/sql/execution/search/Querier.java | 24 +- .../sql/execution/search/SourceGenerator.java | 21 - .../search/extractor/FieldHitExtractor.java | 22 +- .../sql/plugin/TransportSqlQueryAction.java | 20 +- .../querydsl/container/QueryContainer.java | 24 - .../querydsl/container/SearchHitFieldRef.java | 6 +- .../search/SqlSourceBuilderTests.java | 27 +- .../extractor/ComputingExtractorTests.java | 2 +- .../extractor/FieldHitExtractorTests.java | 518 ++---------------- .../rest-api-spec/test/sql/translate.yml | 7 +- 44 files changed, 1364 insertions(+), 1092 deletions(-) create mode 100644 x-pack/plugin/eql/qa/mixed-node/build.gradle create mode 100644 x-pack/plugin/eql/qa/mixed-node/src/test/java/org/elasticsearch/xpack/eql/qa/mixed_node/EqlSearchIT.java create mode 100644 x-pack/plugin/eql/qa/mixed-node/src/test/resources/eql_data.json create mode 100644 x-pack/plugin/eql/qa/mixed-node/src/test/resources/eql_mapping.json create mode 100644 x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/plugin/TransportActionUtils.java create mode 100644 x-pack/plugin/ql/test/src/main/java/org/elasticsearch/xpack/ql/TestNode.java create mode 100644 x-pack/plugin/ql/test/src/main/java/org/elasticsearch/xpack/ql/TestNodes.java create mode 100644 x-pack/plugin/sql/qa/mixed-node/build.gradle create mode 100644 x-pack/plugin/sql/qa/mixed-node/src/test/java/org/elasticsearch/xpack/sql/qa/mixed_node/SqlSearchIT.java create mode 100644 x-pack/plugin/sql/qa/mixed-node/src/test/resources/all_field_types.json diff --git a/docs/reference/sql/endpoints/translate.asciidoc b/docs/reference/sql/endpoints/translate.asciidoc index fdccbf00956b4..086efe1e1de9c 100644 --- a/docs/reference/sql/endpoints/translate.asciidoc +++ b/docs/reference/sql/endpoints/translate.asciidoc @@ -22,20 +22,22 @@ Which returns: -------------------------------------------------- { "size": 10, - "docvalue_fields": [ + "_source": false, + "fields": [ + { + "field": "author" + }, + { + "field": "name" + }, + { + "field": "page_count" + }, { "field": "release_date", "format": "epoch_millis" } ], - "_source": { - "includes": [ - "author", - "name", - "page_count" - ], - "excludes": [] - }, "sort": [ { "page_count": { diff --git a/x-pack/plugin/eql/qa/mixed-node/build.gradle b/x-pack/plugin/eql/qa/mixed-node/build.gradle new file mode 100644 index 0000000000000..5e041a2aa5f2d --- /dev/null +++ b/x-pack/plugin/eql/qa/mixed-node/build.gradle @@ -0,0 +1,66 @@ +apply plugin: 'elasticsearch.testclusters' +apply plugin: 'elasticsearch.standalone-rest-test' +apply from : "$rootDir/gradle/bwc-test.gradle" +apply plugin: 'elasticsearch.rest-test' + +import org.elasticsearch.gradle.Version +import org.elasticsearch.gradle.VersionProperties +import org.elasticsearch.gradle.info.BuildParams +import org.elasticsearch.gradle.testclusters.StandaloneRestIntegTestTask + +dependencies { + testImplementation project(':x-pack:qa') + testImplementation(project(xpackModule('ql:test'))) + testImplementation project(path: xpackModule('eql'), configuration: 'default') +} + +tasks.named("integTest").configure{ enabled = false} + +for (Version bwcVersion : BuildParams.bwcVersions.wireCompatible.findAll { it.onOrAfter('7.10.0') }) { + if (bwcVersion == VersionProperties.getElasticsearchVersion()) { + // Not really a mixed cluster + continue; + } + + String baseName = "v${bwcVersion}" + + testClusters { + "${baseName}" { + versions = [bwcVersion.toString(), project.version] + numberOfNodes = 3 + testDistribution = 'DEFAULT' + setting 'xpack.security.enabled', 'false' + setting 'xpack.watcher.enabled', 'false' + setting 'xpack.ml.enabled', 'false' + setting 'xpack.eql.enabled', 'true' + setting 'xpack.license.self_generated.type', 'trial' + // for debugging purposes + // setting 'logger.org.elasticsearch.xpack.eql.plugin.TransportEqlSearchAction', 'TRACE' + } + } + + tasks.register("${baseName}#mixedClusterTest", StandaloneRestIntegTestTask) { + useCluster testClusters."${baseName}" + mustRunAfter("precommit") + doFirst { + // Getting the endpoints causes a wait for the cluster + println "Endpoints are: ${-> testClusters."${baseName}".allHttpSocketURI.join(",")}" + println "Upgrading one node to create a mixed cluster" + testClusters."${baseName}".nextNodeToNextVersion() + + println "Upgrade complete, endpoints are: ${-> testClusters."${baseName}".allHttpSocketURI.join(",")}" + nonInputProperties.systemProperty('tests.rest.cluster', "${-> testClusters."${baseName}".allHttpSocketURI.join(",")}") + nonInputProperties.systemProperty('tests.clustername', "${-> testClusters."${baseName}".getName()}") + } + onlyIf { project.bwc_tests_enabled } + } + + tasks.register(bwcTaskName(bwcVersion)) { + dependsOn "${baseName}#mixedClusterTest" + } + + // run these bwc tests as part of the "check" task + tasks.named("check").configure { + dependsOn "${baseName}#mixedClusterTest" + } +} diff --git a/x-pack/plugin/eql/qa/mixed-node/src/test/java/org/elasticsearch/xpack/eql/qa/mixed_node/EqlSearchIT.java b/x-pack/plugin/eql/qa/mixed-node/src/test/java/org/elasticsearch/xpack/eql/qa/mixed_node/EqlSearchIT.java new file mode 100644 index 0000000000000..8e3c45f816ffc --- /dev/null +++ b/x-pack/plugin/eql/qa/mixed-node/src/test/java/org/elasticsearch/xpack/eql/qa/mixed_node/EqlSearchIT.java @@ -0,0 +1,239 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.eql.qa.mixed_node; + +import org.apache.http.HttpHost; +import org.elasticsearch.client.Request; +import org.elasticsearch.client.Response; +import org.elasticsearch.client.RestClient; +import org.elasticsearch.cluster.metadata.IndexMetadata; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.xcontent.XContentHelper; +import org.elasticsearch.common.xcontent.json.JsonXContent; +import org.elasticsearch.test.NotEqualMessageBuilder; +import org.elasticsearch.test.rest.ESRestTestCase; +import org.elasticsearch.xpack.ql.TestNode; +import org.elasticsearch.xpack.ql.TestNodes; +import org.junit.After; +import org.junit.Before; + +import java.io.IOException; +import java.io.InputStream; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +import static java.util.Arrays.asList; +import static java.util.Collections.emptyMap; +import static java.util.Collections.singletonList; +import static java.util.Collections.singletonMap; +import static java.util.Collections.unmodifiableList; +import static org.elasticsearch.xpack.ql.TestUtils.buildNodeAndVersions; +import static org.elasticsearch.xpack.ql.TestUtils.readResource; + +/** + * Class testing the behavior of events and sequence queries in a mixed cluster scenario (during rolling upgrade). + * The test is against a three-node cluster where one node is upgraded, the other two are on the old version. + * + */ +public class EqlSearchIT extends ESRestTestCase { + + private static final String index = "test_eql_mixed_versions"; + private static int numShards; + private static int numReplicas = 1; + private static int numDocs; + private static TestNodes nodes; + private static List newNodes; + private static List bwcNodes; + + @Before + public void createIndex() throws IOException { + nodes = buildNodeAndVersions(client()); + numShards = nodes.size(); + numDocs = randomIntBetween(numShards, 15); + newNodes = new ArrayList<>(nodes.getNewNodes()); + bwcNodes = new ArrayList<>(nodes.getBWCNodes()); + + String mappings = readResource(EqlSearchIT.class.getResourceAsStream("/eql_mapping.json")); + createIndex( + index, + Settings.builder() + .put(IndexMetadata.INDEX_NUMBER_OF_SHARDS_SETTING.getKey(), numShards) + .put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, numReplicas) + .build(), + mappings + ); + } + + @After + public void cleanUpIndex() throws IOException { + if (indexExists(index)) { + deleteIndex(index); + } + } + + public void testEventsWithRequestToOldNodes() throws Exception { + assertEventsQueryOnNodes(bwcNodes); + } + + public void testEventsWithRequestToUpgradedNodes() throws Exception { + assertEventsQueryOnNodes(newNodes); + } + + public void testSequencesWithRequestToOldNodes() throws Exception { + assertSequncesQueryOnNodes(bwcNodes); + } + + public void testSequencesWithRequestToUpgradedNodes() throws Exception { + assertSequncesQueryOnNodes(newNodes); + } + + private void assertEventsQueryOnNodes(List nodesList) throws Exception { + final String event = randomEvent(); + Map expectedResponse = prepareEventsTestData(event); + try ( + RestClient client = buildClient(restClientSettings(), + nodesList.stream().map(TestNode::getPublishAddress).toArray(HttpHost[]::new)) + ) { + // filter only the relevant bits of the response + String filterPath = "filter_path=hits.events._source.@timestamp,hits.events._source.event_type,hits.events._source.sequence"; + + Request request = new Request("POST", index + "/_eql/search?" + filterPath); + request.setJsonEntity("{\"query\":\"" + event + " where true\"}"); + assertBusy(() -> { assertResponse(expectedResponse, runEql(client, request)); }); + } + } + + private void assertSequncesQueryOnNodes(List nodesList) throws Exception { + Map expectedResponse = prepareSequencesTestData(); + try ( + RestClient client = buildClient(restClientSettings(), + nodesList.stream().map(TestNode::getPublishAddress).toArray(HttpHost[]::new)) + ) { + String filterPath = "filter_path=hits.sequences.join_keys,hits.sequences.events._id,hits.sequences.events._source"; + String query = "sequence by `sequence` with maxspan=100ms [success where true] by correlation_success1, correlation_success2 " + + "[failure where true] by correlation_failure1, correlation_failure2"; + String filter = "{\"range\":{\"@timestamp\":{\"gte\":\"1970-05-01\"}}}"; + + Request request = new Request("POST", index + "/_eql/search?" + filterPath); + request.setJsonEntity("{\"query\":\"" + query + "\",\"filter\":" + filter + "}"); + assertBusy(() -> { assertResponse(expectedResponse, runEql(client, request)); }); + } + } + + private String randomEvent() { + return randomFrom("success", "failure"); + } + + private Map prepareEventsTestData(String event) throws IOException { + List> sourceEvents = new ArrayList>(); + Map expectedResponse = singletonMap("hits", singletonMap("events", sourceEvents)); + + for (int i = 0; i < numDocs; i++) { + StringBuilder builder = new StringBuilder(); + final String randomEvent = randomEvent(); + builder.append("{"); + builder.append("\"@timestamp\":" + i + ","); + builder.append("\"event_type\":\"" + randomEvent + "\","); + builder.append("\"sequence\":" + i); + builder.append("}"); + if (randomEvent.equals(event)) { + Map eventSource = new HashMap<>(); + eventSource.put("@timestamp", i); + eventSource.put("event_type", randomEvent); + eventSource.put("sequence", i); + sourceEvents.add(singletonMap("_source", eventSource)); + } + + Request request = new Request("PUT", index + "/_doc/" + i); + request.setJsonEntity(builder.toString()); + assertOK(client().performRequest(request)); + } + if (sourceEvents.isEmpty()) { + return emptyMap(); + } + return expectedResponse; + } + + /* + * Output to compare with looks like this: + * { + * "hits": { + * "sequences": [ + * { + * "join_keys": [ + * 44, + * "C", + * "D" + * ], + * "events": [ + * { + * "_id": "14", + * "_source": { + * ... + * } + * } + * ] + * } + * } + * } + * + */ + private Map prepareSequencesTestData() throws IOException { + Map event14 = new HashMap<>(); + Map event14Source = new HashMap<>(); + event14.put("_id", "14"); + event14.put("_source", event14Source); + event14Source.put("@timestamp", "12345678914"); + event14Source.put("event_type", "success"); + event14Source.put("sequence", 44); + event14Source.put("correlation_success1", "C"); + event14Source.put("correlation_success2", "D"); + + Map event15 = new HashMap<>(); + Map event15Source = new HashMap<>(); + event15.put("_id", "15"); + event15.put("_source", event15Source); + event15Source.put("@timestamp", "12345678999"); + event15Source.put("event_type", "failure"); + event15Source.put("sequence", 44); + event15Source.put("correlation_failure1", "C"); + event15Source.put("correlation_failure2", "D"); + + Map sequence = new HashMap<>(); + List> events = unmodifiableList(asList(event14, event15)); + List> sequences = singletonList(sequence); + Map expectedResponse = singletonMap("hits", singletonMap("sequences", sequences)); + + sequence.put("join_keys", asList(44, "C", "D")); + sequence.put("events", events); + + final String bulkEntries = readResource(EqlSearchIT.class.getResourceAsStream("/eql_data.json")); + Request request = new Request("POST", index + "/_bulk?refresh"); + request.setJsonEntity(bulkEntries); + assertOK(client().performRequest(request)); + + return expectedResponse; + } + + private void assertResponse(Map expected, Map actual) { + if (false == expected.equals(actual)) { + NotEqualMessageBuilder message = new NotEqualMessageBuilder(); + message.compareMaps(actual, expected); + fail("Response does not match:\n" + message.toString()); + } + } + + private Map runEql(RestClient client, Request request) throws IOException { + Response response = client.performRequest(request); + try (InputStream content = response.getEntity().getContent()) { + return XContentHelper.convertToMap(JsonXContent.jsonXContent, content, false); + } + } +} diff --git a/x-pack/plugin/eql/qa/mixed-node/src/test/resources/eql_data.json b/x-pack/plugin/eql/qa/mixed-node/src/test/resources/eql_data.json new file mode 100644 index 0000000000000..3fc6e28834ea8 --- /dev/null +++ b/x-pack/plugin/eql/qa/mixed-node/src/test/resources/eql_data.json @@ -0,0 +1,30 @@ +{"index":{"_id":1}} +{"@timestamp":"1234567891","event_type":"success","sequence":1,"correlation_success1":"A","correlation_success2":"B"} +{"index":{"_id":2}} +{"@timestamp":"1234567892","event_type":"failure","sequence":2,"correlation_failure1":"A","correlation_failure2":"B"} +{"index":{"_id":3}} +{"@timestamp":"1234567893","event_type":"success","sequence":3,"correlation_success1":"A","correlation_success2":"A"} +{"index":{"_id":4}} +{"@timestamp":"1234567894","event_type":"success","sequence":4,"correlation_success1":"C","correlation_success2":"C"} +{"index":{"_id":5}} +{"@timestamp":"1234567895","event_type":"failure","sequence":5,"correlation_failure1":"B","correlation_failure2":"C"} +{"index":{"_id":6}} +{"@timestamp":"1234567896","event_type":"success","sequence":1,"correlation_success1":"A","correlation_success2":"A"} +{"index":{"_id":7}} +{"@timestamp":"1234567897","event_type":"failure","sequence":1,"correlation_failure1":"A","correlation_failure2":"A"} +{"index":{"_id":8}} +{"@timestamp":"1234567898","event_type":"success","sequence":3,"correlation_success1":"A","correlation_success2":"A"} +{"index":{"_id":9}} +{"@timestamp":"1234567899","event_type":"success","sequence":4,"correlation_success1":"C","correlation_success2":"B"} +{"index":{"_id":10}} +{"@timestamp":"12345678910","event_type":"failure","sequence":4,"correlation_failure1":"B","correlation_failure2":"B"} +{"index":{"_id":11}} +{"@timestamp":"12345678911","event_type":"success","sequence":1,"correlation_success1":"A","correlation_success2":"A"} +{"index":{"_id":12}} +{"@timestamp":"12345678912","event_type":"failure","sequence":1,"correlation_failure1":"A","correlation_failure2":"B"} +{"index":{"_id":13}} +{"@timestamp":"12345678913","event_type":"success","sequence":3,"correlation_success1":"A","correlation_success2":"A"} +{"index":{"_id":14}} +{"@timestamp":"12345678914","event_type":"success","sequence":44,"correlation_success1":"C","correlation_success2":"D"} +{"index":{"_id":15}} +{"@timestamp":"12345678999","event_type":"failure","sequence":44,"correlation_failure1":"C","correlation_failure2":"D"} diff --git a/x-pack/plugin/eql/qa/mixed-node/src/test/resources/eql_mapping.json b/x-pack/plugin/eql/qa/mixed-node/src/test/resources/eql_mapping.json new file mode 100644 index 0000000000000..f56dea6722183 --- /dev/null +++ b/x-pack/plugin/eql/qa/mixed-node/src/test/resources/eql_mapping.json @@ -0,0 +1,35 @@ + "properties": { + "@timestamp": { + "type": "date" + }, + "event_type": { + "type": "keyword" + }, + "sequence": { + "type": "long" + }, + "correlation_success1": { + "type": "wildcard" + }, + "correlation_failure1": { + "type": "wildcard" + }, + "correlation_success2": { + "type": "keyword" + }, + "correlation_failure2": { + "type": "keyword" + }, + "event": { + "properties": { + "category": { + "type": "alias", + "path": "event_type" + }, + "sequence": { + "type": "alias", + "path": "sequence" + } + } + } + } diff --git a/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/execution/search/BasicQueryClient.java b/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/execution/search/BasicQueryClient.java index a357491496085..38a2b292cb9c9 100644 --- a/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/execution/search/BasicQueryClient.java +++ b/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/execution/search/BasicQueryClient.java @@ -57,7 +57,7 @@ public void query(QueryRequest request, ActionListener listener) // set query timeout searchSource.timeout(cfg.requestTimeout()); - SearchRequest search = prepareRequest(client, searchSource, false, indices); + SearchRequest search = prepareRequest(searchSource, false, indices); search(search, searchLogListener(listener, log)); } @@ -138,7 +138,7 @@ public void fetchHits(Iterable> refs, ActionListener createExtractor(List fields, E public static HitExtractor createExtractor(FieldExtraction ref, EqlConfiguration cfg) { if (ref instanceof SearchHitFieldRef) { SearchHitFieldRef f = (SearchHitFieldRef) ref; - return new FieldHitExtractor(f.name(), f.fullFieldName(), f.getDataType(), cfg.zoneId(), f.useDocValue(), f.hitName(), false); + return new FieldHitExtractor(f.name(), f.getDataType(), cfg.zoneId(), f.hitName(), false); } if (ref instanceof ComputedRef) { @@ -145,16 +145,16 @@ public static HitExtractor createExtractor(FieldExtraction ref, EqlConfiguration } - public static SearchRequest prepareRequest(Client client, - SearchSourceBuilder source, + public static SearchRequest prepareRequest(SearchSourceBuilder source, boolean includeFrozen, String... indices) { - return client.prepareSearch(indices) - .setSource(source) - .setAllowPartialSearchResults(false) - .setIndicesOptions( - includeFrozen ? IndexResolver.FIELD_CAPS_FROZEN_INDICES_OPTIONS : IndexResolver.FIELD_CAPS_INDICES_OPTIONS) - .request(); + SearchRequest searchRequest = new SearchRequest(SWITCH_TO_FIELDS_API_VERSION); + searchRequest.indices(indices); + searchRequest.source(source); + searchRequest.allowPartialSearchResults(false); + searchRequest.indicesOptions( + includeFrozen ? IndexResolver.FIELD_CAPS_FROZEN_INDICES_OPTIONS : IndexResolver.FIELD_CAPS_INDICES_OPTIONS); + return searchRequest; } public static List searchHits(SearchResponse response) { diff --git a/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/execution/search/SourceGenerator.java b/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/execution/search/SourceGenerator.java index adff22ec90bf2..74d59279d26d5 100644 --- a/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/execution/search/SourceGenerator.java +++ b/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/execution/search/SourceGenerator.java @@ -6,10 +6,8 @@ */ package org.elasticsearch.xpack.eql.execution.search; -import org.elasticsearch.common.util.CollectionUtils; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.search.builder.SearchSourceBuilder; -import org.elasticsearch.search.fetch.subphase.FetchSourceContext; import org.elasticsearch.search.sort.FieldSortBuilder; import org.elasticsearch.search.sort.NestedSortBuilder; import org.elasticsearch.search.sort.ScriptSortBuilder.ScriptSortType; @@ -60,13 +58,8 @@ public static SearchSourceBuilder sourceBuilder(QueryContainer container, QueryB sorting(container, source); - // disable the source if there are no includes - if (source.fetchSource() == null || CollectionUtils.isEmpty(source.fetchSource().includes())) { - source.fetchSource(FetchSourceContext.DO_NOT_FETCH_SOURCE); - } else { - // use true to fetch only the needed bits from the source - source.fetchSource(true); - } + // disable the source, as we rely on "fields" API + source.fetchSource(false); if (container.limit() != null) { // add size and from diff --git a/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/execution/search/extractor/FieldHitExtractor.java b/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/execution/search/extractor/FieldHitExtractor.java index 36cfc4a70201a..5ed427b383b33 100644 --- a/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/execution/search/extractor/FieldHitExtractor.java +++ b/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/execution/search/extractor/FieldHitExtractor.java @@ -29,9 +29,9 @@ public FieldHitExtractor(StreamInput in) throws IOException { super(in); } - public FieldHitExtractor(String name, String fullFieldName, DataType dataType, ZoneId zoneId, boolean useDocValue, String hitName, + public FieldHitExtractor(String name, DataType dataType, ZoneId zoneId, String hitName, boolean arrayLeniency) { - super(name, fullFieldName, dataType, zoneId, useDocValue, hitName, arrayLeniency); + super(name, dataType, zoneId, hitName, arrayLeniency); } @Override diff --git a/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/execution/search/extractor/TimestampFieldHitExtractor.java b/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/execution/search/extractor/TimestampFieldHitExtractor.java index 127efa521daeb..42530c202800c 100644 --- a/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/execution/search/extractor/TimestampFieldHitExtractor.java +++ b/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/execution/search/extractor/TimestampFieldHitExtractor.java @@ -10,7 +10,7 @@ public class TimestampFieldHitExtractor extends FieldHitExtractor { public TimestampFieldHitExtractor(FieldHitExtractor target) { - super(target.fieldName(), target.fullFieldName(), target.dataType(), target.zoneId(), target.useDocValues(), target.hitName(), + super(target.fieldName(), target.dataType(), target.zoneId(), target.hitName(), target.arrayLeniency()); } diff --git a/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/plugin/TransportEqlSearchAction.java b/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/plugin/TransportEqlSearchAction.java index 8e7a5133d1117..e454b7c908c4e 100644 --- a/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/plugin/TransportEqlSearchAction.java +++ b/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/plugin/TransportEqlSearchAction.java @@ -6,7 +6,10 @@ */ package org.elasticsearch.xpack.eql.plugin; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.ActionListenerResponseHandler; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.HandledTransportAction; import org.elasticsearch.client.Client; @@ -43,14 +46,17 @@ import static org.elasticsearch.action.ActionListener.wrap; import static org.elasticsearch.xpack.core.ClientHelper.ASYNC_SEARCH_ORIGIN; +import static org.elasticsearch.xpack.ql.plugin.TransportActionUtils.executeRequestWithRetryAttempt; public class TransportEqlSearchAction extends HandledTransportAction implements AsyncTaskManagementService.AsyncOperation { + private static final Logger log = LogManager.getLogger(TransportEqlSearchAction.class); private final SecurityContext securityContext; private final ClusterService clusterService; private final PlanExecutor planExecutor; private final ThreadPool threadPool; + private final TransportService transportService; private final AsyncTaskManagementService asyncTaskManagementService; @Inject @@ -64,6 +70,7 @@ public TransportEqlSearchAction(Settings settings, ClusterService clusterService this.clusterService = clusterService; this.planExecutor = planExecutor; this.threadPool = threadPool; + this.transportService = transportService; this.asyncTaskManagementService = new AsyncTaskManagementService<>(XPackPlugin.ASYNC_RESULTS_INDEX, client, ASYNC_SEARCH_ORIGIN, registry, taskManager, EqlSearchAction.INSTANCE.name(), this, EqlSearchTask.class, clusterService, threadPool); @@ -78,8 +85,7 @@ public EqlSearchTask createTask(EqlSearchRequest request, long id, String type, @Override public void execute(EqlSearchRequest request, EqlSearchTask task, ActionListener listener) { - operation(planExecutor, task, request, username(securityContext), clusterName(clusterService), - clusterService.localNode().getId(), listener); + operation(planExecutor, task, request, username(securityContext), transportService, clusterService, listener); } @Override @@ -99,13 +105,15 @@ protected void doExecute(Task task, EqlSearchRequest request, ActionListener listener) { + TransportService transportService, ClusterService clusterService, + ActionListener listener) { + String nodeId = clusterService.localNode().getId(); + String clusterName = clusterName(clusterService); // TODO: these should be sent by the client ZoneId zoneId = DateUtils.of("Z"); QueryBuilder filter = request.filter(); @@ -122,8 +130,12 @@ public static void operation(PlanExecutor planExecutor, EqlSearchTask task, EqlS EqlConfiguration cfg = new EqlConfiguration(request.indices(), zoneId, username, clusterName, filter, timeout, request.indicesOptions(), request.fetchSize(), clientId, new TaskId(nodeId, task.getId()), task); - planExecutor.eql(cfg, request.query(), params, wrap(r -> listener.onResponse(createResponse(r, task.getExecutionId())), - listener::onFailure)); + executeRequestWithRetryAttempt(clusterService, listener::onFailure, + onFailure -> planExecutor.eql(cfg, request.query(), params, + wrap(r -> listener.onResponse(createResponse(r, task.getExecutionId())), onFailure)), + node -> transportService.sendRequest(node, EqlSearchAction.NAME, request, + new ActionListenerResponseHandler<>(listener, EqlSearchResponse::new, ThreadPool.Names.SAME)), + log); } static EqlSearchResponse createResponse(Results results, AsyncExecutionId id) { diff --git a/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/querydsl/container/FieldExtractorRegistry.java b/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/querydsl/container/FieldExtractorRegistry.java index 2c4049d1511a5..7726d0d7e3a0d 100644 --- a/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/querydsl/container/FieldExtractorRegistry.java +++ b/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/querydsl/container/FieldExtractorRegistry.java @@ -13,7 +13,6 @@ import org.elasticsearch.xpack.ql.expression.Expressions; import org.elasticsearch.xpack.ql.expression.FieldAttribute; import org.elasticsearch.xpack.ql.expression.gen.pipeline.ConstantInput; -import org.elasticsearch.xpack.ql.type.DataTypes; import java.util.HashMap; import java.util.Map; @@ -46,39 +45,6 @@ private FieldExtraction createFieldExtractionFor(Expression expression) { } private FieldExtraction topHitFieldExtractor(FieldAttribute fieldAttr) { - FieldAttribute actualField = fieldAttr; - FieldAttribute rootField = fieldAttr; - StringBuilder fullFieldName = new StringBuilder(fieldAttr.field().getName()); - - // Only if the field is not an alias (in which case it will be taken out from docvalue_fields if it's isAggregatable()), - // go up the tree of parents until a non-object (and non-nested) type of field is found and use that specific parent - // as the field to extract data from, from _source. We do it like this because sub-fields are not in the _source, only - // the root field to which those sub-fields belong to, are. Instead of "text_field.keyword_subfield" for _source extraction, - // we use "text_field", because there is no source for "keyword_subfield". - /* - * "text_field": { - * "type": "text", - * "fields": { - * "keyword_subfield": { - * "type": "keyword" - * } - * } - * } - */ - if (fieldAttr.field().isAlias() == false) { - while (actualField.parent() != null - && actualField.parent().field().getDataType() != DataTypes.OBJECT - && actualField.parent().field().getDataType() != DataTypes.NESTED - && actualField.field().getDataType().hasDocValues() == false) { - actualField = actualField.parent(); - } - } - while (rootField.parent() != null) { - fullFieldName.insert(0, ".").insert(0, rootField.parent().field().getName()); - rootField = rootField.parent(); - } - - return new SearchHitFieldRef(actualField.name(), fullFieldName.toString(), fieldAttr.field().getDataType(), - fieldAttr.field().isAggregatable(), fieldAttr.field().isAlias()); + return new SearchHitFieldRef(fieldAttr.name(), fieldAttr.field().getDataType(), fieldAttr.field().isAlias()); } } diff --git a/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/querydsl/container/SearchHitFieldRef.java b/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/querydsl/container/SearchHitFieldRef.java index c95d6b2707e4a..9778eecd6a38b 100644 --- a/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/querydsl/container/SearchHitFieldRef.java +++ b/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/querydsl/container/SearchHitFieldRef.java @@ -13,7 +13,6 @@ import static org.elasticsearch.xpack.ql.type.DataTypes.DATETIME; import static org.elasticsearch.xpack.ql.type.DataTypes.DATETIME_NANOS; -import static org.elasticsearch.xpack.ql.type.DataTypes.KEYWORD; // NB: this class is taken from SQL - it hasn't been ported over to QL // since at this stage is unclear whether the whole FieldExtraction infrastructure @@ -21,23 +20,16 @@ public class SearchHitFieldRef implements FieldExtraction { private final String name; - private final String fullFieldName; // path included. If field full path is a.b.c, full field name is "a.b.c" and name is "c" private final DataType dataType; - private final boolean docValue; private final String hitName; - public SearchHitFieldRef(String name, String fullFieldName, DataType dataType, boolean useDocValueInsteadOfSource, boolean isAlias) { - this(name, fullFieldName, dataType, useDocValueInsteadOfSource, isAlias, null); + public SearchHitFieldRef(String name, DataType dataType, boolean isAlias) { + this(name, dataType, isAlias, null); } - public SearchHitFieldRef(String name, String fullFieldName, DataType dataType, boolean useDocValueInsteadOfSource, boolean isAlias, - String hitName) { + public SearchHitFieldRef(String name, DataType dataType, boolean isAlias, String hitName) { this.name = name; - this.fullFieldName = fullFieldName; this.dataType = dataType; - // these field types can only be extracted from docvalue_fields (ie, values already computed by Elasticsearch) - // because, for us to be able to extract them from _source, we would need the mapping of those fields (which we don't have) - this.docValue = isAlias ? useDocValueInsteadOfSource : (hasDocValues(dataType) ? useDocValueInsteadOfSource : false); this.hitName = hitName; } @@ -49,29 +41,17 @@ public String name() { return name; } - public String fullFieldName() { - return fullFieldName; - } - public DataType getDataType() { return dataType; } - public boolean useDocValue() { - return docValue; - } - @Override public void collectFields(QlSourceBuilder sourceBuilder) { // nested fields are handled by inner hits if (hitName != null) { return; } - if (docValue) { - sourceBuilder.addDocField(name, format(dataType)); - } else { - sourceBuilder.addSourceField(name); - } + sourceBuilder.addFetchField(name, format(dataType)); } @Override @@ -84,10 +64,6 @@ public String toString() { return name; } - private static boolean hasDocValues(DataType dataType) { - return dataType == KEYWORD || dataType == DATETIME || dataType == DATETIME_NANOS; - } - private static String format(DataType dataType) { if (dataType == DATETIME_NANOS) { return "strict_date_optional_time_nanos"; diff --git a/x-pack/plugin/eql/src/test/java/org/elasticsearch/xpack/eql/analysis/CancellationTests.java b/x-pack/plugin/eql/src/test/java/org/elasticsearch/xpack/eql/analysis/CancellationTests.java index e3704ba26540a..d69f522f8bf47 100644 --- a/x-pack/plugin/eql/src/test/java/org/elasticsearch/xpack/eql/analysis/CancellationTests.java +++ b/x-pack/plugin/eql/src/test/java/org/elasticsearch/xpack/eql/analysis/CancellationTests.java @@ -14,10 +14,14 @@ import org.elasticsearch.action.search.SearchRequestBuilder; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.client.Client; +import org.elasticsearch.cluster.ClusterName; +import org.elasticsearch.cluster.node.DiscoveryNode; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.tasks.TaskCancelledException; import org.elasticsearch.tasks.TaskId; import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.eql.action.EqlSearchRequest; import org.elasticsearch.xpack.eql.action.EqlSearchResponse; import org.elasticsearch.xpack.eql.action.EqlSearchTask; @@ -51,12 +55,13 @@ public void testCancellationBeforeFieldCaps() throws InterruptedException { Client client = mock(Client.class); EqlSearchTask task = mock(EqlSearchTask.class); when(task.isCancelled()).thenReturn(true); + ClusterService mockClusterService = mockClusterService(); IndexResolver indexResolver = new IndexResolver(client, randomAlphaOfLength(10), DefaultDataTypeRegistry.INSTANCE); PlanExecutor planExecutor = new PlanExecutor(client, indexResolver, new NamedWriteableRegistry(Collections.emptyList())); CountDownLatch countDownLatch = new CountDownLatch(1); - TransportEqlSearchAction.operation(planExecutor, task, new EqlSearchRequest().query("foo where blah"), "", "", "node_id", - new ActionListener<>() { + TransportEqlSearchAction.operation(planExecutor, task, new EqlSearchRequest().query("foo where blah"), "", + mock(TransportService.class), mockClusterService, new ActionListener<>() { @Override public void onResponse(EqlSearchResponse eqlSearchResponse) { fail("Shouldn't be here"); @@ -96,10 +101,10 @@ public void testCancellationBeforeSearch() throws InterruptedException { AtomicBoolean cancelled = new AtomicBoolean(false); EqlSearchTask task = mock(EqlSearchTask.class); - String nodeId = randomAlphaOfLength(10); long taskId = randomNonNegativeLong(); when(task.isCancelled()).then(invocationOnMock -> cancelled.get()); when(task.getId()).thenReturn(taskId); + ClusterService mockClusterService = mockClusterService(); String[] indices = new String[]{"endgame"}; @@ -119,7 +124,7 @@ public void testCancellationBeforeSearch() throws InterruptedException { PlanExecutor planExecutor = new PlanExecutor(client, indexResolver, new NamedWriteableRegistry(Collections.emptyList())); CountDownLatch countDownLatch = new CountDownLatch(1); TransportEqlSearchAction.operation(planExecutor, task, new EqlSearchRequest().indices("endgame") - .query("process where foo==3"), "", "", nodeId, new ActionListener<>() { + .query("process where foo==3"), "", mock(TransportService.class), mockClusterService, new ActionListener<>() { @Override public void onResponse(EqlSearchResponse eqlSearchResponse) { fail("Shouldn't be here"); @@ -149,6 +154,7 @@ public void testCancellationDuringSearch() throws InterruptedException { long taskId = randomNonNegativeLong(); when(task.isCancelled()).thenReturn(false); when(task.getId()).thenReturn(taskId); + ClusterService mockClusterService = mockClusterService(nodeId); String[] indices = new String[]{"endgame"}; @@ -183,7 +189,7 @@ public void testCancellationDuringSearch() throws InterruptedException { PlanExecutor planExecutor = new PlanExecutor(client, indexResolver, new NamedWriteableRegistry(Collections.emptyList())); CountDownLatch countDownLatch = new CountDownLatch(1); TransportEqlSearchAction.operation(planExecutor, task, new EqlSearchRequest().indices("endgame") - .query("process where foo==3"), "", "", nodeId, new ActionListener<>() { + .query("process where foo==3"), "", mock(TransportService.class), mockClusterService, new ActionListener<>() { @Override public void onResponse(EqlSearchResponse eqlSearchResponse) { fail("Shouldn't be here"); @@ -207,4 +213,18 @@ public void onFailure(Exception e) { verifyNoMoreInteractions(client, task); } + private ClusterService mockClusterService() { + return mockClusterService(null); + } + + private ClusterService mockClusterService(String nodeId) { + final ClusterService mockClusterService = mock(ClusterService.class); + final DiscoveryNode mockNode = mock(DiscoveryNode.class); + final ClusterName mockClusterName = mock(ClusterName.class); + when(mockNode.getId()).thenReturn(nodeId == null ? randomAlphaOfLength(10) : nodeId); + when(mockClusterService.localNode()).thenReturn(mockNode); + when(mockClusterName.value()).thenReturn(randomAlphaOfLength(10)); + when(mockClusterService.getClusterName()).thenReturn(mockClusterName); + return mockClusterService; + } } diff --git a/x-pack/plugin/eql/src/test/java/org/elasticsearch/xpack/eql/execution/search/CriterionOrdinalExtractionTests.java b/x-pack/plugin/eql/src/test/java/org/elasticsearch/xpack/eql/execution/search/CriterionOrdinalExtractionTests.java index d0e20d2a6c7d4..bb92fff60559d 100644 --- a/x-pack/plugin/eql/src/test/java/org/elasticsearch/xpack/eql/execution/search/CriterionOrdinalExtractionTests.java +++ b/x-pack/plugin/eql/src/test/java/org/elasticsearch/xpack/eql/execution/search/CriterionOrdinalExtractionTests.java @@ -30,8 +30,8 @@ public class CriterionOrdinalExtractionTests extends ESTestCase { private String tsField = "timestamp"; private String tbField = "tiebreaker"; - private HitExtractor tsExtractor = new FieldHitExtractor(tsField, tsField, DataTypes.LONG, null, true, null, false); - private HitExtractor tbExtractor = new FieldHitExtractor(tbField, tbField, DataTypes.LONG, null, true, null, false); + private HitExtractor tsExtractor = new FieldHitExtractor(tsField, DataTypes.LONG, null, null, false); + private HitExtractor tbExtractor = new FieldHitExtractor(tbField, DataTypes.LONG, null, null, false); public void testTimeOnly() throws Exception { long time = randomLong(); @@ -56,7 +56,7 @@ public void testTimeAndTiebreakerNull() throws Exception { } public void testTimeNotComparable() throws Exception { - HitExtractor badExtractor = new FieldHitExtractor(tsField, tsField, DataTypes.BINARY, null, true, null, false); + HitExtractor badExtractor = new FieldHitExtractor(tsField, DataTypes.BINARY, null, null, false); SearchHit hit = searchHit(randomAlphaOfLength(10), null); Criterion criterion = new Criterion(0, null, emptyList(), badExtractor, null, false); EqlIllegalArgumentException exception = expectThrows(EqlIllegalArgumentException.class, () -> criterion.ordinal(hit)); diff --git a/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/execution/search/QlSourceBuilder.java b/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/execution/search/QlSourceBuilder.java index 327a3151110bf..4f0c1f623b772 100644 --- a/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/execution/search/QlSourceBuilder.java +++ b/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/execution/search/QlSourceBuilder.java @@ -6,7 +6,7 @@ */ package org.elasticsearch.xpack.ql.execution.search; -import org.elasticsearch.common.Strings; +import org.elasticsearch.Version; import org.elasticsearch.script.Script; import org.elasticsearch.search.builder.SearchSourceBuilder; import org.elasticsearch.search.fetch.subphase.FieldAndFormat; @@ -22,9 +22,9 @@ * the resulting ES document as a field. */ public class QlSourceBuilder { + public static final Version SWITCH_TO_FIELDS_API_VERSION = Version.V_7_10_0; // The LinkedHashMaps preserve the order of the fields in the response - private final Set sourceFields = new LinkedHashSet<>(); - private final Set docFields = new LinkedHashSet<>(); + private final Set fetchFields = new LinkedHashSet<>(); private final Map scriptFields = new LinkedHashMap<>(); boolean trackScores = false; @@ -40,17 +40,10 @@ public void trackScores() { } /** - * Retrieve the requested field from the {@code _source} of the document + * Retrieve the requested field using the "fields" API */ - public void addSourceField(String field) { - sourceFields.add(field); - } - - /** - * Retrieve the requested field from doc values (or fielddata) of the document - */ - public void addDocField(String field, String format) { - docFields.add(new FieldAndFormat(field, format)); + public void addFetchField(String field, String format) { + fetchFields.add(new FieldAndFormat(field, format)); } /** @@ -66,14 +59,7 @@ public void addScriptField(String name, Script script) { */ public void build(SearchSourceBuilder sourceBuilder) { sourceBuilder.trackScores(this.trackScores); - if (sourceFields.isEmpty() == false) { - sourceBuilder.fetchSource(sourceFields.toArray(Strings.EMPTY_ARRAY), null); - } - docFields.forEach(field -> sourceBuilder.docValueField(field.field, field.format)); + fetchFields.forEach(field -> sourceBuilder.fetchField(new FieldAndFormat(field.field, field.format, null))); scriptFields.forEach(sourceBuilder::scriptField); } - - public boolean noSource() { - return sourceFields.isEmpty(); - } } diff --git a/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/execution/search/extractor/AbstractFieldHitExtractor.java b/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/execution/search/extractor/AbstractFieldHitExtractor.java index 350c757f461fc..9a174ac7221e0 100644 --- a/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/execution/search/extractor/AbstractFieldHitExtractor.java +++ b/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/execution/search/extractor/AbstractFieldHitExtractor.java @@ -6,33 +6,19 @@ */ package org.elasticsearch.xpack.ql.execution.search.extractor; -import java.io.IOException; -import java.time.ZoneId; -import java.util.ArrayDeque; -import java.util.Deque; -import java.util.List; -import java.util.Locale; -import java.util.Map; -import java.util.Objects; -import java.util.StringJoiner; - -import org.elasticsearch.Version; -import org.elasticsearch.common.Strings; -import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.document.DocumentField; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.index.mapper.IgnoredFieldMapper; -import org.elasticsearch.index.mapper.NumberFieldMapper.NumberType; import org.elasticsearch.search.SearchHit; import org.elasticsearch.xpack.ql.QlIllegalArgumentException; import org.elasticsearch.xpack.ql.type.DataType; import org.elasticsearch.xpack.ql.type.DataTypes; -import static org.elasticsearch.xpack.ql.type.DataTypes.DATETIME; -import static org.elasticsearch.xpack.ql.type.DataTypes.DATETIME_NANOS; -import static org.elasticsearch.xpack.ql.type.DataTypes.KEYWORD; -import static org.elasticsearch.xpack.ql.type.DataTypes.SCALED_FLOAT; +import java.io.IOException; +import java.time.ZoneId; +import java.util.List; +import java.util.Map; +import java.util.Objects; /** * Extractor for ES fields. Works for both 'normal' fields but also nested ones (which require hitName to be set). @@ -40,39 +26,23 @@ */ public abstract class AbstractFieldHitExtractor implements HitExtractor { - private static final Version SWITCHED_FROM_DOCVALUES_TO_SOURCE_EXTRACTION = Version.V_7_4_0; - - /** - * Source extraction requires only the (relative) field name, without its parent path. - */ - private static String[] sourcePath(String name, boolean useDocValue, String hitName) { - return useDocValue ? Strings.EMPTY_ARRAY : Strings - .tokenizeToStringArray(hitName == null ? name : name.substring(hitName.length() + 1), "."); - } - private final String fieldName, hitName; - private final String fullFieldName; // used to look at the _ignored section of the query response for the actual full field name private final DataType dataType; private final ZoneId zoneId; - private final boolean useDocValue; private final boolean arrayLeniency; - private final String[] path; - protected AbstractFieldHitExtractor(String name, DataType dataType, ZoneId zoneId, boolean useDocValue) { - this(name, null, dataType, zoneId, useDocValue, null, false); + protected AbstractFieldHitExtractor(String name, DataType dataType, ZoneId zoneId) { + this(name, dataType, zoneId, null, false); } - protected AbstractFieldHitExtractor(String name, DataType dataType, ZoneId zoneId, boolean useDocValue, boolean arrayLeniency) { - this(name, null, dataType, zoneId, useDocValue, null, arrayLeniency); + protected AbstractFieldHitExtractor(String name, DataType dataType, ZoneId zoneId, boolean arrayLeniency) { + this(name, dataType, zoneId, null, arrayLeniency); } - protected AbstractFieldHitExtractor(String name, String fullFieldName, DataType dataType, ZoneId zoneId, boolean useDocValue, - String hitName, boolean arrayLeniency) { + protected AbstractFieldHitExtractor(String name, DataType dataType, ZoneId zoneId, String hitName, boolean arrayLeniency) { this.fieldName = name; - this.fullFieldName = fullFieldName; this.dataType = dataType; this.zoneId = zoneId; - this.useDocValue = useDocValue; this.arrayLeniency = arrayLeniency; this.hitName = hitName; @@ -81,23 +51,14 @@ protected AbstractFieldHitExtractor(String name, String fullFieldName, DataType throw new QlIllegalArgumentException("Hitname [{}] specified but not part of the name [{}]", hitName, name); } } - - this.path = sourcePath(fieldName, useDocValue, hitName); } protected AbstractFieldHitExtractor(StreamInput in) throws IOException { fieldName = in.readString(); - if (in.getVersion().onOrAfter(SWITCHED_FROM_DOCVALUES_TO_SOURCE_EXTRACTION)) { - fullFieldName = in.readOptionalString(); - } else { - fullFieldName = null; - } String typeName = in.readOptionalString(); dataType = typeName != null ? loadTypeFromName(typeName) : null; - useDocValue = in.readBoolean(); hitName = in.readOptionalString(); arrayLeniency = in.readBoolean(); - path = sourcePath(fieldName, useDocValue, hitName); zoneId = readZoneId(in); } @@ -110,11 +71,7 @@ protected DataType loadTypeFromName(String typeName) { @Override public void writeTo(StreamOutput out) throws IOException { out.writeString(fieldName); - if (out.getVersion().onOrAfter(SWITCHED_FROM_DOCVALUES_TO_SOURCE_EXTRACTION)) { - out.writeOptionalString(fullFieldName); - } out.writeOptionalString(dataType == null ? null : dataType.typeName()); - out.writeBoolean(useDocValue); out.writeOptionalString(hitName); out.writeBoolean(arrayLeniency); } @@ -122,39 +79,27 @@ public void writeTo(StreamOutput out) throws IOException { @Override public Object extract(SearchHit hit) { Object value = null; - if (useDocValue) { - DocumentField field = hit.field(fieldName); - if (field != null) { - value = unwrapMultiValue(field.getValues()); - } + DocumentField field = null; + if (hitName != null) { + // a nested field value is grouped under the nested parent name (ie dep.dep_name lives under "dep":[{dep_name:value}]) + field = hit.field(hitName); } else { - // if the field was ignored because it was malformed and ignore_malformed was turned on - if (fullFieldName != null - && hit.getFields().containsKey(IgnoredFieldMapper.NAME) - && isFromDocValuesOnly(dataType) == false) { - /* - * We check here the presence of the field name (fullFieldName including the parent name) in the list - * of _ignored fields (due to malformed data, which was ignored). - * For example, in the case of a malformed number, a "byte" field with "ignore_malformed: true" - * with a "text" sub-field should return "null" for the "byte" parent field and the actual malformed - * data for the "text" sub-field. - */ - if (hit.getFields().get(IgnoredFieldMapper.NAME).getValues().contains(fullFieldName)) { - return null; - } - } - Map source = hit.getSourceAsMap(); - if (source != null) { - value = extractFromSource(source); - } + field = hit.field(fieldName); + } + if (field != null) { + value = unwrapFieldsMultiValue(field.getValues()); } return value; } - protected Object unwrapMultiValue(Object values) { + protected Object unwrapFieldsMultiValue(Object values) { if (values == null) { return null; } + if (values instanceof Map && hitName != null) { + // extract the sub-field from a nested field (dep.dep_name -> dep_name) + return unwrapFieldsMultiValue(((Map) values).get(fieldName.substring(hitName.length() + 1))); + } if (values instanceof List) { List list = (List) values; if (list.isEmpty()) { @@ -162,7 +107,7 @@ protected Object unwrapMultiValue(Object values) { } else { if (isPrimitive(list) == false) { if (list.size() == 1 || arrayLeniency) { - return unwrapMultiValue(list.get(0)); + return unwrapFieldsMultiValue(list.get(0)); } else { throw new QlIllegalArgumentException("Arrays (returned by [{}]) are not supported", fieldName); } @@ -175,121 +120,13 @@ protected Object unwrapMultiValue(Object values) { return unwrapped; } - // The Jackson json parser can generate for numerics - Integers, Longs, BigIntegers (if Long is not enough) - // and BigDecimal (if Double is not enough) - if (values instanceof Number || values instanceof String || values instanceof Boolean) { - if (dataType == null) { - return values; - } - if (dataType.isNumeric() && isFromDocValuesOnly(dataType) == false) { - if (dataType == DataTypes.DOUBLE || dataType == DataTypes.FLOAT || dataType == DataTypes.HALF_FLOAT) { - Number result = null; - try { - result = numberType(dataType).parse(values, true); - } catch(IllegalArgumentException iae) { - return null; - } - // docvalue_fields is always returning a Double value even if the underlying floating point data type is not Double - // even if we don't extract from docvalue_fields anymore, the behavior should be consistent - return result.doubleValue(); - } else { - Number result = null; - try { - result = numberType(dataType).parse(values, true); - } catch(IllegalArgumentException iae) { - return null; - } - return result; - } - } else if (DataTypes.isString(dataType) || dataType == DataTypes.IP) { - return values.toString(); - } else { - return values; - } - } - throw new QlIllegalArgumentException("Type {} (returned by [{}]) is not supported", values.getClass().getSimpleName(), fieldName); - } - - protected boolean isFromDocValuesOnly(DataType dataType) { - return dataType == KEYWORD // because of ignore_above. - || dataType == DATETIME - || dataType == DATETIME_NANOS - || dataType == SCALED_FLOAT; // because of scaling_factor - } - - private static NumberType numberType(DataType dataType) { - return NumberType.valueOf(dataType.esType().toUpperCase(Locale.ROOT)); + return values; } protected abstract Object unwrapCustomValue(Object values); protected abstract boolean isPrimitive(List list); - @SuppressWarnings({ "unchecked", "rawtypes" }) - public Object extractFromSource(Map map) { - Object value = null; - - // Used to avoid recursive method calls - // Holds the sub-maps in the document hierarchy that are pending to be inspected along with the current index of the `path`. - Deque>> queue = new ArrayDeque<>(); - queue.add(new Tuple<>(-1, map)); - - while (queue.isEmpty() == false) { - Tuple> tuple = queue.removeLast(); - int idx = tuple.v1(); - Map subMap = tuple.v2(); - - // Find all possible entries by examining all combinations under the current level ("idx") of the "path" - // e.g.: If the path == "a.b.c.d" and the idx == 0, we need to check the current subMap against the keys: - // "b", "b.c" and "b.c.d" - StringJoiner sj = new StringJoiner("."); - for (int i = idx + 1; i < path.length; i++) { - sj.add(path[i]); - Object node = subMap.get(sj.toString()); - - if (node instanceof List) { - List listOfValues = (List) node; - // we can only do this optimization until the last element of our pass since geo points are using arrays - // and we don't want to blindly ignore the second element of array if arrayLeniency is enabled - if ((i < path.length - 1) && (listOfValues.size() == 1 || arrayLeniency)) { - // this is a List with a size of 1 e.g.: {"a" : [{"b" : "value"}]} meaning the JSON is a list with one element - // or a list of values with one element e.g.: {"a": {"b" : ["value"]}} - // in case of being lenient about arrays, just extract the first value in the array - node = listOfValues.get(0); - } else { - // a List of elements with more than one value. Break early and let unwrapMultiValue deal with the list - return unwrapMultiValue(node); - } - } - - if (node instanceof Map) { - if (i < path.length - 1) { - // Add the sub-map to the queue along with the current path index - queue.add(new Tuple<>(i, (Map) node)); - } else { - // We exhausted the path and got a map - // If it is an object - it will be handled in the value extractor - value = node; - } - } else if (node != null) { - if (i < path.length - 1) { - // If we reach a concrete value without exhausting the full path, something is wrong with the mapping - // e.g.: map is {"a" : { "b" : "value }} and we are looking for a path: "a.b.c.d" - throw new QlIllegalArgumentException("Cannot extract value [{}] from source", fieldName); - } - if (value != null) { - // A value has already been found so this means that there are more than one - // values in the document for the same path but different hierarchy. - // e.g.: {"a" : {"b" : {"c" : "value"}}}, {"a.b" : {"c" : "value"}}, ... - throw new QlIllegalArgumentException("Multiple values (returned by [{}]) are not supported", fieldName); - } - value = node; - } - } - } - return unwrapMultiValue(value); - } - @Override public String hitName() { return hitName; @@ -299,10 +136,6 @@ public String fieldName() { return fieldName; } - public String fullFieldName() { - return fullFieldName; - } - public ZoneId zoneId() { return zoneId; } @@ -311,10 +144,6 @@ public DataType dataType() { return dataType; } - public boolean useDocValues() { - return useDocValue; - } - public boolean arrayLeniency() { return arrayLeniency; } @@ -332,12 +161,11 @@ public boolean equals(Object obj) { AbstractFieldHitExtractor other = (AbstractFieldHitExtractor) obj; return fieldName.equals(other.fieldName) && hitName.equals(other.hitName) - && useDocValue == other.useDocValue && arrayLeniency == other.arrayLeniency; } @Override public int hashCode() { - return Objects.hash(fieldName, useDocValue, hitName, arrayLeniency); + return Objects.hash(fieldName, hitName, arrayLeniency); } -} +} \ No newline at end of file diff --git a/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/plugin/TransportActionUtils.java b/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/plugin/TransportActionUtils.java new file mode 100644 index 0000000000000..f839ba54c55ef --- /dev/null +++ b/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/plugin/TransportActionUtils.java @@ -0,0 +1,71 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ +package org.elasticsearch.xpack.ql.plugin; + +import org.apache.logging.log4j.Logger; +import org.elasticsearch.action.search.SearchPhaseExecutionException; +import org.elasticsearch.action.search.VersionMismatchException; +import org.elasticsearch.cluster.node.DiscoveryNode; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.xpack.ql.util.Holder; + +import java.util.function.Consumer; + +public final class TransportActionUtils { + + /** + * Execute a *QL request and re-try it in case the first request failed with a {@code VersionMismatchException} + * + * @param clusterService The cluster service instance + * @param onFailure On-failure handler in case the request doesn't fail with a {@code VersionMismatchException} + * @param queryRunner *QL query execution code, typically a Plan Executor running the query + * @param retryRequest Re-trial logic + * @param log Log4j logger + */ + public static void executeRequestWithRetryAttempt(ClusterService clusterService, Consumer onFailure, + Consumer> queryRunner, Consumer retryRequest, Logger log) { + + Holder retrySecondTime = new Holder(false); + queryRunner.accept(e -> { + // the search request likely ran on nodes with different versions of ES + // we will retry on a node with an older version that should generate a backwards compatible _search request + if (e instanceof SearchPhaseExecutionException + && ((SearchPhaseExecutionException) e).getCause() instanceof VersionMismatchException) { + if (log.isDebugEnabled()) { + log.debug("Caught exception type [{}] with cause [{}].", e.getClass().getName(), e.getCause()); + } + DiscoveryNode localNode = clusterService.state().nodes().getLocalNode(); + DiscoveryNode candidateNode = null; + for (DiscoveryNode node : clusterService.state().nodes()) { + // find the first node that's older than the current node + if (node != localNode && node.getVersion().before(localNode.getVersion())) { + candidateNode = node; + break; + } + } + if (candidateNode != null) { + if (log.isDebugEnabled()) { + log.debug("Candidate node to resend the request to: address [{}], id [{}], name [{}], version [{}]", + candidateNode.getAddress(), candidateNode.getId(), candidateNode.getName(), candidateNode.getVersion()); + } + // re-send the request to the older node + retryRequest.accept(candidateNode); + } else { + retrySecondTime.set(true); + } + } else { + onFailure.accept(e); + } + }); + if (retrySecondTime.get()) { + if (log.isDebugEnabled()) { + log.debug("No candidate node found, likely all were upgraded in the meantime. Re-trying the original request."); + } + queryRunner.accept(onFailure); + } + } +} diff --git a/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/querydsl/query/NestedQuery.java b/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/querydsl/query/NestedQuery.java index 634f009087284..2b17a30a90efe 100644 --- a/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/querydsl/query/NestedQuery.java +++ b/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/querydsl/query/NestedQuery.java @@ -16,7 +16,6 @@ import org.elasticsearch.xpack.ql.tree.Source; import java.util.AbstractMap; -import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; @@ -116,26 +115,16 @@ public QueryBuilder asBuilder() { ihb.setSize(MAX_INNER_HITS); ihb.setName(path + "_" + COUNTER++); - boolean noSourceNeeded = true; - List sourceFields = new ArrayList<>(); - for (Map.Entry> entry : fields.entrySet()) { if (entry.getValue().getKey()) { - ihb.addDocValueField(entry.getKey(), entry.getValue().getValue()); + ihb.addFetchField(entry.getKey(), entry.getValue().getValue()); } else { - sourceFields.add(entry.getKey()); - noSourceNeeded = false; + ihb.addFetchField(entry.getKey()); } } - - if (noSourceNeeded) { - ihb.setFetchSourceContext(FetchSourceContext.DO_NOT_FETCH_SOURCE); - ihb.setStoredFieldNames(NO_STORED_FIELD); - } - else { - ihb.setFetchSourceContext(new FetchSourceContext(true, sourceFields.toArray(new String[sourceFields.size()]), null)); - } + ihb.setFetchSourceContext(FetchSourceContext.DO_NOT_FETCH_SOURCE); + ihb.setStoredFieldNames(NO_STORED_FIELD); query.innerHit(ihb); } diff --git a/x-pack/plugin/ql/test/src/main/java/org/elasticsearch/xpack/ql/TestNode.java b/x-pack/plugin/ql/test/src/main/java/org/elasticsearch/xpack/ql/TestNode.java new file mode 100644 index 0000000000000..746ff91dc902e --- /dev/null +++ b/x-pack/plugin/ql/test/src/main/java/org/elasticsearch/xpack/ql/TestNode.java @@ -0,0 +1,41 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.ql; + +import org.apache.http.HttpHost; +import org.elasticsearch.Version; + +public final class TestNode { + + private final String id; + private final Version version; + private final HttpHost publishAddress; + + public TestNode(String id, Version version, HttpHost publishAddress) { + this.id = id; + this.version = version; + this.publishAddress = publishAddress; + } + + public String getId() { + return id; + } + + public HttpHost getPublishAddress() { + return publishAddress; + } + + public Version getVersion() { + return version; + } + + @Override + public String toString() { + return "Node{" + "id='" + id + '\'' + ", version=" + version + '}'; + } +} diff --git a/x-pack/plugin/ql/test/src/main/java/org/elasticsearch/xpack/ql/TestNodes.java b/x-pack/plugin/ql/test/src/main/java/org/elasticsearch/xpack/ql/TestNodes.java new file mode 100644 index 0000000000000..9681e66183530 --- /dev/null +++ b/x-pack/plugin/ql/test/src/main/java/org/elasticsearch/xpack/ql/TestNodes.java @@ -0,0 +1,43 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.ql; + +import org.elasticsearch.Version; + +import java.util.HashMap; +import java.util.List; +import java.util.stream.Collectors; + +public final class TestNodes extends HashMap { + + public void add(TestNode node) { + put(node.getId(), node); + } + + public List getNewNodes() { + Version bwcVersion = getBWCVersion(); + return values().stream().filter(n -> n.getVersion().after(bwcVersion)).collect(Collectors.toList()); + } + + public List getBWCNodes() { + Version bwcVersion = getBWCVersion(); + return values().stream().filter(n -> n.getVersion().equals(bwcVersion)).collect(Collectors.toList()); + } + + public Version getBWCVersion() { + if (isEmpty()) { + throw new IllegalStateException("no nodes available"); + } + return Version.fromId(values().stream().map(node -> node.getVersion().id).min(Integer::compareTo).get()); + } + + @Override + public String toString() { + return "Nodes{" + values().stream().map(TestNode::toString).collect(Collectors.joining("\n")) + '}'; + } +} diff --git a/x-pack/plugin/ql/test/src/main/java/org/elasticsearch/xpack/ql/TestUtils.java b/x-pack/plugin/ql/test/src/main/java/org/elasticsearch/xpack/ql/TestUtils.java index 84eb576d2dd31..bc933c8b1fe9d 100644 --- a/x-pack/plugin/ql/test/src/main/java/org/elasticsearch/xpack/ql/TestUtils.java +++ b/x-pack/plugin/ql/test/src/main/java/org/elasticsearch/xpack/ql/TestUtils.java @@ -7,6 +7,8 @@ package org.elasticsearch.xpack.ql; +import org.apache.http.HttpHost; +import org.elasticsearch.Version; import org.elasticsearch.client.Request; import org.elasticsearch.client.Response; import org.elasticsearch.client.RestClient; @@ -16,6 +18,7 @@ import org.elasticsearch.common.regex.Regex; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.common.xcontent.json.JsonXContent; +import org.elasticsearch.test.rest.yaml.ObjectPath; import org.elasticsearch.xpack.ql.expression.Expression; import org.elasticsearch.xpack.ql.expression.FieldAttribute; import org.elasticsearch.xpack.ql.expression.Literal; @@ -265,4 +268,36 @@ public static Tuple pathAndName(String string) { } return new Tuple<>(folder, file); } + + public static TestNodes buildNodeAndVersions(RestClient client) throws IOException { + Response response = client.performRequest(new Request("GET", "_nodes")); + ObjectPath objectPath = ObjectPath.createFromResponse(response); + Map nodesAsMap = objectPath.evaluate("nodes"); + TestNodes nodes = new TestNodes(); + for (String id : nodesAsMap.keySet()) { + nodes.add( + new TestNode( + id, + Version.fromString(objectPath.evaluate("nodes." + id + ".version")), + HttpHost.create(objectPath.evaluate("nodes." + id + ".http.publish_address")) + ) + ); + } + return nodes; + } + + public static String readResource(InputStream input) throws IOException { + StringBuilder builder = new StringBuilder(); + try (BufferedReader reader = new BufferedReader(new InputStreamReader(input, StandardCharsets.UTF_8))) { + String line = reader.readLine(); + while (line != null) { + if (line.trim().startsWith("//") == false) { + builder.append(line); + builder.append('\n'); + } + line = reader.readLine(); + } + return builder.toString(); + } + } } diff --git a/x-pack/plugin/sql/qa/mixed-node/build.gradle b/x-pack/plugin/sql/qa/mixed-node/build.gradle new file mode 100644 index 0000000000000..8a39ecc795848 --- /dev/null +++ b/x-pack/plugin/sql/qa/mixed-node/build.gradle @@ -0,0 +1,66 @@ +apply plugin: 'elasticsearch.testclusters' +apply plugin: 'elasticsearch.standalone-rest-test' +apply from : "$rootDir/gradle/bwc-test.gradle" +apply plugin: 'elasticsearch.rest-test' + +import org.elasticsearch.gradle.Version +import org.elasticsearch.gradle.VersionProperties +import org.elasticsearch.gradle.info.BuildParams +import org.elasticsearch.gradle.testclusters.StandaloneRestIntegTestTask + +dependencies { + testImplementation project(':x-pack:qa') + testImplementation(project(xpackModule('ql:test'))) + testImplementation project(path: xpackModule('sql'), configuration: 'default') +} + +tasks.named("integTest").configure{ enabled = false} + +// A bug (https://github.com/elastic/elasticsearch/issues/68439) limits us to perform tests with versions from 7.10.3 onwards +for (Version bwcVersion : BuildParams.bwcVersions.wireCompatible.findAll { it.onOrAfter('7.10.0') }) { + if (bwcVersion == VersionProperties.getElasticsearchVersion()) { + // Not really a mixed cluster + continue; + } + + String baseName = "v${bwcVersion}" + + testClusters { + "${baseName}" { + versions = [bwcVersion.toString(), project.version] + numberOfNodes = 3 + testDistribution = 'DEFAULT' + setting 'xpack.security.enabled', 'false' + setting 'xpack.watcher.enabled', 'false' + setting 'xpack.ml.enabled', 'false' + setting 'xpack.license.self_generated.type', 'trial' + // for debugging purposes + // setting 'logger.org.elasticsearch.xpack.sql.plugin.TransportSqlQueryAction', 'TRACE' + } + } + + tasks.register("${baseName}#mixedClusterTest", StandaloneRestIntegTestTask) { + useCluster testClusters."${baseName}" + mustRunAfter("precommit") + doFirst { + // Getting the endpoints causes a wait for the cluster + println "Endpoints are: ${-> testClusters."${baseName}".allHttpSocketURI.join(",")}" + println "Upgrading one node to create a mixed cluster" + testClusters."${baseName}".nextNodeToNextVersion() + + println "Upgrade complete, endpoints are: ${-> testClusters."${baseName}".allHttpSocketURI.join(",")}" + nonInputProperties.systemProperty('tests.rest.cluster', "${-> testClusters."${baseName}".allHttpSocketURI.join(",")}") + nonInputProperties.systemProperty('tests.clustername', "${-> testClusters."${baseName}".getName()}") + } + onlyIf { project.bwc_tests_enabled } + } + + tasks.register(bwcTaskName(bwcVersion)) { + dependsOn "${baseName}#mixedClusterTest" + } + + // run these bwc tests as part of the "check" task + tasks.named("check").configure { + dependsOn "${baseName}#mixedClusterTest" + } +} diff --git a/x-pack/plugin/sql/qa/mixed-node/src/test/java/org/elasticsearch/xpack/sql/qa/mixed_node/SqlSearchIT.java b/x-pack/plugin/sql/qa/mixed-node/src/test/java/org/elasticsearch/xpack/sql/qa/mixed_node/SqlSearchIT.java new file mode 100644 index 0000000000000..7c3c304e1c025 --- /dev/null +++ b/x-pack/plugin/sql/qa/mixed-node/src/test/java/org/elasticsearch/xpack/sql/qa/mixed_node/SqlSearchIT.java @@ -0,0 +1,273 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.sql.qa.mixed_node; + +import org.apache.http.HttpHost; +import org.elasticsearch.Version; +import org.elasticsearch.client.Request; +import org.elasticsearch.client.Response; +import org.elasticsearch.client.RestClient; +import org.elasticsearch.cluster.metadata.IndexMetadata; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.xcontent.XContentHelper; +import org.elasticsearch.common.xcontent.json.JsonXContent; +import org.elasticsearch.test.NotEqualMessageBuilder; +import org.elasticsearch.test.rest.ESRestTestCase; +import org.elasticsearch.xpack.ql.TestNode; +import org.elasticsearch.xpack.ql.TestNodes; +import org.elasticsearch.xpack.sql.type.SqlDataTypes; +import org.junit.After; +import org.junit.Before; + +import java.io.IOException; +import java.io.InputStream; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.LinkedHashMap; +import java.util.List; +import java.util.Map; +import java.util.function.BiConsumer; +import java.util.function.Consumer; +import java.util.stream.Collectors; + +import static java.util.Collections.unmodifiableMap; +import static org.elasticsearch.xpack.ql.TestUtils.buildNodeAndVersions; +import static org.elasticsearch.xpack.ql.TestUtils.readResource; +import static org.elasticsearch.xpack.ql.execution.search.QlSourceBuilder.SWITCH_TO_FIELDS_API_VERSION; + +public class SqlSearchIT extends ESRestTestCase { + + /* + * The version where we made a significant change to how we query ES and how we interpret the results we get from ES, is 7.12 + * (the switch from extracting from _source and docvalues to using the "fields" API). The behavior of the tests is slightly + * changed on some versions and it all depends on when this above mentioned change was made. + */ + private static final Version FIELDS_API_QL_INTRODUCTION = Version.V_7_12_0; + private static final String index = "test_sql_mixed_versions"; + private static int numShards; + private static int numReplicas = 1; + private static int numDocs; + private static TestNodes nodes; + private static List newNodes; + private static List bwcNodes; + private static Version bwcVersion; + private static Version newVersion; + private static boolean isBwcNodeBeforeFieldsApiInQL; + private static boolean isBwcNodeBeforeFieldsApiInES; + + @Before + public void createIndex() throws IOException { + nodes = buildNodeAndVersions(client()); + numShards = nodes.size(); + numDocs = randomIntBetween(numShards, 15); + newNodes = new ArrayList<>(nodes.getNewNodes()); + bwcNodes = new ArrayList<>(nodes.getBWCNodes()); + bwcVersion = nodes.getBWCNodes().get(0).getVersion(); + newVersion = nodes.getNewNodes().get(0).getVersion(); + // TODO: remove the 8.0.0 version check after the code reaches 7.x as well + isBwcNodeBeforeFieldsApiInQL = newVersion == Version.V_8_0_0 || bwcVersion.before(FIELDS_API_QL_INTRODUCTION); + isBwcNodeBeforeFieldsApiInES = bwcVersion.before(SWITCH_TO_FIELDS_API_VERSION); + + String mappings = readResource(SqlSearchIT.class.getResourceAsStream("/all_field_types.json")); + createIndex( + index, + Settings.builder() + .put(IndexMetadata.INDEX_NUMBER_OF_SHARDS_SETTING.getKey(), numShards) + .put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, numReplicas) + .build(), + mappings + ); + } + + @After + public void cleanUpIndex() throws IOException { + if (indexExists(index)) { + deleteIndex(index); + } + } + + public void testAllTypesWithRequestToOldNodes() throws Exception { + Map expectedResponse = prepareTestData( + columns -> { + columns.add(columnInfo("geo_point_field", "geo_point")); + columns.add(columnInfo("float_field", "float")); + columns.add(columnInfo("half_float_field", "half_float")); + }, + (builder, fieldValues) -> { + Float randomFloat = randomFloat(); + // before "fields" API being added to QL, numbers were re-parsed from _source with a similar approach to + // indexing docvalues and for floating point numbers this may be different from the actual value passed in the _source + // floats were indexed as Doubles and the values returned had a greater precision and more decimals + builder.append(","); + if (isBwcNodeBeforeFieldsApiInQL) { + builder.append("\"geo_point_field\":{\"lat\":\"37.386483\", \"lon\":\"-122.083843\"},"); + fieldValues.put("geo_point_field", "POINT (-122.08384302444756 37.38648299127817)"); + builder.append("\"float_field\":" + randomFloat + ","); + fieldValues.put("float_field", Double.valueOf(randomFloat)); + builder.append("\"half_float_field\":123.456"); + fieldValues.put("half_float_field", 123.45600128173828d); + } else { + builder.append("\"geo_point_field\":{\"lat\":\"37.386483\", \"lon\":\"-122.083843\"},"); + fieldValues.put("geo_point_field", "POINT (-122.083843 37.386483)"); + builder.append("\"float_field\":" + randomFloat + ","); + fieldValues.put("float_field", Double.valueOf(Float.valueOf(randomFloat).toString())); + builder.append("\"half_float_field\":" + fieldValues.computeIfAbsent("half_float_field", v -> 123.456)); + } + } + ); + assertAllTypesWithNodes(expectedResponse, bwcNodes); + } + + public void testAllTypesWithRequestToUpgradedNodes() throws Exception { + Map expectedResponse = prepareTestData( + columns -> { + columns.add(columnInfo("geo_point_field", "geo_point")); + columns.add(columnInfo("float_field", "float")); + columns.add(columnInfo("half_float_field", "half_float")); + }, + (builder, fieldValues) -> { + Float randomFloat = randomFloat(); + builder.append(","); + if (isBwcNodeBeforeFieldsApiInQL && isBwcNodeBeforeFieldsApiInES) { + builder.append("\"geo_point_field\":{\"lat\":\"37.386483\", \"lon\":\"-122.083843\"},"); + fieldValues.put("geo_point_field", "POINT (-122.08384302444756 37.38648299127817)"); + builder.append("\"float_field\":" + randomFloat + ","); + fieldValues.put("float_field", Double.valueOf(randomFloat)); + builder.append("\"half_float_field\":123.456"); + fieldValues.put("half_float_field", 123.45600128173828d); + } else { + builder.append("\"geo_point_field\":{\"lat\":\"37.386483\", \"lon\":\"-122.083843\"},"); + fieldValues.put("geo_point_field", "POINT (-122.083843 37.386483)"); + builder.append("\"float_field\":" + randomFloat + ","); + fieldValues.put("float_field", Double.valueOf(Float.valueOf(randomFloat).toString())); + builder.append("\"half_float_field\":" + fieldValues.computeIfAbsent("half_float_field", v -> 123.456)); + } + } + ); + assertAllTypesWithNodes(expectedResponse, newNodes); + } + + @SuppressWarnings("unchecked") + private Map prepareTestData(Consumer>> additionalColumns, + BiConsumer> additionalValues) throws IOException { + Map expectedResponse = new HashMap<>(); + List> columns = new ArrayList<>(); + columns.add(columnInfo("interval_year", "interval_year")); + columns.add(columnInfo("interval_minute", "interval_minute")); + columns.add(columnInfo("long_field", "long")); + columns.add(columnInfo("integer_field", "integer")); + columns.add(columnInfo("short_field", "short")); + columns.add(columnInfo("byte_field", "byte")); + columns.add(columnInfo("double_field", "double")); + columns.add(columnInfo("scaled_float_field", "scaled_float")); + columns.add(columnInfo("boolean_field", "boolean")); + columns.add(columnInfo("ip_field", "ip")); + columns.add(columnInfo("text_field", "text")); + columns.add(columnInfo("keyword_field", "keyword")); + columns.add(columnInfo("constant_keyword_field", "keyword")); + columns.add(columnInfo("wildcard_field", "keyword")); + columns.add(columnInfo("geo_point_no_dv_field", "geo_point")); + columns.add(columnInfo("geo_shape_field", "geo_shape")); + columns.add(columnInfo("shape_field", "shape")); + + expectedResponse.put("columns", columns); + additionalColumns.accept(columns); + List> rows = new ArrayList<>(numDocs); + expectedResponse.put("rows", rows); + + Map fieldValues; + String constantKeywordValue = randomAlphaOfLength(5); + for (int i = 0; i < numDocs; i++) { + fieldValues = new LinkedHashMap<>(); + fieldValues.put("interval_year", "P150Y"); + fieldValues.put("interval_minute", "PT2H43M"); + + StringBuilder builder = new StringBuilder(); + builder.append("{"); + builder.append("\"id\":" + i + ","); + builder.append("\"long_field\":" + fieldValues.computeIfAbsent("long_field", v -> randomLong()) + ","); + builder.append("\"integer_field\":" + fieldValues.computeIfAbsent("integer_field", v -> randomInt()) + ","); + builder.append("\"short_field\":" + fieldValues.computeIfAbsent("short_field", v -> Integer.valueOf(randomShort())) + ","); + builder.append("\"byte_field\":" + fieldValues.computeIfAbsent("byte_field", v -> Integer.valueOf(randomByte())) + ","); + builder.append("\"double_field\":" + fieldValues.computeIfAbsent("double_field", v -> randomDouble()) + ","); + builder.append("\"scaled_float_field\":" + fieldValues.computeIfAbsent("scaled_float_field", v -> 123.5d) + ","); + builder.append("\"boolean_field\":" + fieldValues.computeIfAbsent("boolean_field", v -> randomBoolean()) + ","); + builder.append("\"ip_field\":\"" + fieldValues.computeIfAbsent("ip_field", v -> "123.123.123.123") + "\","); + builder.append("\"text_field\": \"" + fieldValues.computeIfAbsent("text_field", v -> randomAlphaOfLength(5)) + "\","); + builder.append("\"keyword_field\": \"" + fieldValues.computeIfAbsent("keyword_field", v -> randomAlphaOfLength(5)) + "\","); + builder.append("\"constant_keyword_field\": \"" + fieldValues.computeIfAbsent("constant_keyword_field", + v -> constantKeywordValue) + "\","); + builder.append("\"wildcard_field\": \"" + fieldValues.computeIfAbsent("wildcard_field", v -> randomAlphaOfLength(5)) + "\","); + builder.append("\"geo_point_no_dv_field\":{\"lat\":\"40.123456\", \"lon\":\"100.234567\"},"); + fieldValues.put("geo_point_no_dv_field", "POINT (100.234567 40.123456)"); + builder.append("\"geo_shape_field\":\"POINT (-122.083843 37.386483 30)\","); + fieldValues.put("geo_shape_field", "POINT (-122.083843 37.386483 30.0)"); + builder.append("\"shape_field\":\"POINT (-122.083843 37.386483 30)\""); + fieldValues.put("shape_field", "POINT (-122.083843 37.386483 30.0)"); + additionalValues.accept(builder, fieldValues); + builder.append("}"); + + Request request = new Request("PUT", index + "/_doc/" + i); + request.setJsonEntity(builder.toString()); + assertOK(client().performRequest(request)); + + List row = new ArrayList<>(fieldValues.values()); + rows.add(row); + } + return expectedResponse; + } + + private Map columnInfo(String name, String type) { + Map column = new HashMap<>(); + column.put("name", name); + column.put("type", type); + column.put("display_size", SqlDataTypes.displaySize(SqlDataTypes.fromTypeName(type))); + return unmodifiableMap(column); + } + + private void assertAllTypesWithNodes(Map expectedResponse, List nodesList) throws Exception { + try ( + RestClient client = buildClient(restClientSettings(), + nodesList.stream().map(TestNode::getPublishAddress).toArray(HttpHost[]::new)) + ) { + Request request = new Request("POST", "_sql"); + String version = ",\"version\":\"" + newVersion.toString() + "\""; + String binaryFormat = ",\"binary_format\":\"false\""; + + @SuppressWarnings("unchecked") + List> columns = (List>) expectedResponse.get("columns"); + String intervalYearMonth = "INTERVAL '150' YEAR AS interval_year, "; + String intervalDayTime = "INTERVAL '163' MINUTE AS interval_minute, "; + + // get all fields names from the expected response built earlier, skipping the intervals as they execute locally + // and not taken from the index itself + String fieldsList = columns.stream().map(m -> (String) m.get("name")).filter(str -> str.startsWith("interval") == false) + .collect(Collectors.toList()).stream().collect(Collectors.joining(", ")); + String query = "SELECT " + intervalYearMonth + intervalDayTime + fieldsList + " FROM " + index + " ORDER BY id"; + request.setJsonEntity( + "{\"mode\":\"jdbc\"" + version + binaryFormat + ",\"query\":\"" + query + "\"}" + ); + assertBusy(() -> { assertResponse(expectedResponse, runSql(client, request)); }); + } + } + + private void assertResponse(Map expected, Map actual) { + if (false == expected.equals(actual)) { + NotEqualMessageBuilder message = new NotEqualMessageBuilder(); + message.compareMaps(actual, expected); + fail("Response does not match:\n" + message.toString()); + } + } + + private Map runSql(RestClient client, Request request) throws IOException { + Response response = client.performRequest(request); + try (InputStream content = response.getEntity().getContent()) { + return XContentHelper.convertToMap(JsonXContent.jsonXContent, content, false); + } + } +} diff --git a/x-pack/plugin/sql/qa/mixed-node/src/test/resources/all_field_types.json b/x-pack/plugin/sql/qa/mixed-node/src/test/resources/all_field_types.json new file mode 100644 index 0000000000000..491be6f0f96b6 --- /dev/null +++ b/x-pack/plugin/sql/qa/mixed-node/src/test/resources/all_field_types.json @@ -0,0 +1,59 @@ +"properties": { + "long_field": { + "type": "long" + }, + "integer_field": { + "type": "integer" + }, + "short_field": { + "type": "short" + }, + "byte_field": { + "type": "byte" + }, + "double_field": { + "type": "double" + }, + "float_field": { + "type": "float" + }, + "half_float_field": { + "type": "half_float" + }, + "scaled_float_field": { + "type": "scaled_float", + "scaling_factor": 10 + }, + "boolean_field": { + "type": "boolean" + }, + "ip_field": { + "type": "ip" + }, + "text_field": { + "type": "text" + }, + "keyword_field": { + "type": "keyword" + }, + "constant_keyword_field": { + "type": "constant_keyword" + }, + // added in 7.9.0 + "wildcard_field": { + "type": "wildcard" + }, + "geo_point_field": { + "type": "geo_point" + }, + "geo_point_no_dv_field": { + "type": "geo_point", + "doc_values": "false" + }, + "geo_shape_field": { + "type": "geo_shape" + }, + "shape_field": { + "type": "shape" + } +} \ No newline at end of file diff --git a/x-pack/plugin/sql/qa/server/single-node/src/test/java/org/elasticsearch/xpack/sql/qa/single_node/CliExplainIT.java b/x-pack/plugin/sql/qa/server/single-node/src/test/java/org/elasticsearch/xpack/sql/qa/single_node/CliExplainIT.java index 60245d1144dd5..82b49bd9f4250 100644 --- a/x-pack/plugin/sql/qa/server/single-node/src/test/java/org/elasticsearch/xpack/sql/qa/single_node/CliExplainIT.java +++ b/x-pack/plugin/sql/qa/server/single-node/src/test/java/org/elasticsearch/xpack/sql/qa/single_node/CliExplainIT.java @@ -40,12 +40,12 @@ public void testExplainBasic() throws IOException { assertThat(command("EXPLAIN (PLAN EXECUTABLE) SELECT * FROM test"), containsString("plan")); assertThat(readLine(), startsWith("----------")); assertThat(readLine(), startsWith("EsQueryExec[test,{")); - assertThat(readLine(), startsWith(" \"_source\" : {")); - assertThat(readLine(), startsWith(" \"includes\" : [")); - assertThat(readLine(), startsWith(" \"test_field\"")); - assertThat(readLine(), startsWith(" ],")); - assertThat(readLine(), startsWith(" \"excludes\" : [ ]")); - assertThat(readLine(), startsWith(" },")); + assertThat(readLine(), startsWith(" \"_source\" : false,")); + assertThat(readLine(), startsWith(" \"fields\" : [")); + assertThat(readLine(), startsWith(" {")); + assertThat(readLine(), startsWith(" \"field\" : \"test_field\"")); + assertThat(readLine(), startsWith(" }")); + assertThat(readLine(), startsWith(" ],")); assertThat(readLine(), startsWith(" \"sort\" : [")); assertThat(readLine(), startsWith(" {")); assertThat(readLine(), startsWith(" \"_doc\" :")); @@ -97,13 +97,15 @@ public void testExplainWithWhere() throws IOException { assertThat(readLine(), startsWith(" }")); assertThat(readLine(), startsWith(" }")); assertThat(readLine(), startsWith(" },")); - assertThat(readLine(), startsWith(" \"_source\" : {")); - assertThat(readLine(), startsWith(" \"includes\" : [")); - assertThat(readLine(), startsWith(" \"i\"")); - assertThat(readLine(), startsWith(" \"test_field\"")); - assertThat(readLine(), startsWith(" ],")); - assertThat(readLine(), startsWith(" \"excludes\" : [ ]")); - assertThat(readLine(), startsWith(" },")); + assertThat(readLine(), startsWith(" \"_source\" : false,")); + assertThat(readLine(), startsWith(" \"fields\" : [")); + assertThat(readLine(), startsWith(" {")); + assertThat(readLine(), startsWith(" \"field\" : \"i\"")); + assertThat(readLine(), startsWith(" },")); + assertThat(readLine(), startsWith(" {")); + assertThat(readLine(), startsWith(" \"field\" : \"test_field\"")); + assertThat(readLine(), startsWith(" }")); + assertThat(readLine(), startsWith(" ],")); assertThat(readLine(), startsWith(" \"sort\" : [")); assertThat(readLine(), startsWith(" {")); assertThat(readLine(), startsWith(" \"_doc\" :")); @@ -143,7 +145,6 @@ public void testExplainWithCount() throws IOException { assertThat(readLine(), startsWith("EsQueryExec[test,{")); assertThat(readLine(), startsWith(" \"size\" : 0,")); assertThat(readLine(), startsWith(" \"_source\" : false,")); - assertThat(readLine(), startsWith(" \"stored_fields\" : \"_none_\",")); assertThat(readLine(), startsWith(" \"sort\" : [")); assertThat(readLine(), startsWith(" {")); assertThat(readLine(), startsWith(" \"_doc\" :")); diff --git a/x-pack/plugin/sql/qa/server/src/main/java/org/elasticsearch/xpack/sql/qa/FieldExtractorTestCase.java b/x-pack/plugin/sql/qa/server/src/main/java/org/elasticsearch/xpack/sql/qa/FieldExtractorTestCase.java index cd12021aa3593..2a0391290bc1d 100644 --- a/x-pack/plugin/sql/qa/server/src/main/java/org/elasticsearch/xpack/sql/qa/FieldExtractorTestCase.java +++ b/x-pack/plugin/sql/qa/server/src/main/java/org/elasticsearch/xpack/sql/qa/FieldExtractorTestCase.java @@ -49,7 +49,7 @@ public void testTextField() throws IOException { String query = "SELECT text_field FROM test"; String text = randomAlphaOfLength(20); boolean explicitSourceSetting = randomBoolean(); // default (no _source setting) or explicit setting - boolean enableSource = randomBoolean(); // enable _source at index level + boolean enableSource = randomBoolean(); // enable _source at index level Map indexProps = new HashMap<>(1); indexProps.put("_source", enableSource); @@ -74,10 +74,10 @@ public void testTextField() throws IOException { * } */ public void testKeywordField() throws IOException { + String query = "SELECT keyword_field FROM test"; String keyword = randomAlphaOfLength(20); - // _source for `keyword` fields doesn't matter, as they should be taken from docvalue_fields boolean explicitSourceSetting = randomBoolean(); // default (no _source setting) or explicit setting - boolean enableSource = randomBoolean(); // enable _source at index level + boolean enableSource = randomBoolean(); // enable _source at index level boolean ignoreAbove = randomBoolean(); Map indexProps = new HashMap<>(1); @@ -94,10 +94,14 @@ public void testKeywordField() throws IOException { createIndexWithFieldTypeAndProperties("keyword", fieldProps, explicitSourceSetting ? indexProps : null); index("{\"keyword_field\":\"" + keyword + "\"}"); - Map expected = new HashMap<>(); - expected.put("columns", Arrays.asList(columnInfo("plain", "keyword_field", "keyword", JDBCType.VARCHAR, Integer.MAX_VALUE))); - expected.put("rows", singletonList(singletonList(ignoreAbove ? null : keyword))); - assertResponse(expected, runSql("SELECT keyword_field FROM test")); + if (explicitSourceSetting == false || enableSource) { + Map expected = new HashMap<>(); + expected.put("columns", Arrays.asList(columnInfo("plain", "keyword_field", "keyword", JDBCType.VARCHAR, Integer.MAX_VALUE))); + expected.put("rows", singletonList(singletonList(ignoreAbove ? null : keyword))); + assertResponse(expected, runSql(query)); + } else { + expectSourceDisabledError(query); + } } /* @@ -107,10 +111,10 @@ public void testKeywordField() throws IOException { * } */ public void testConstantKeywordField() throws IOException { + String query = "SELECT constant_keyword_field FROM test"; String value = randomAlphaOfLength(20); - // _source for `constant_keyword` fields doesn't matter, as they should be taken from docvalue_fields boolean explicitSourceSetting = randomBoolean(); // default (no _source setting) or explicit setting - boolean enableSource = randomBoolean(); // enable _source at index level + boolean enableSource = randomBoolean(); // enable _source at index level Map indexProps = new HashMap<>(1); indexProps.put("_source", enableSource); @@ -126,13 +130,17 @@ public void testConstantKeywordField() throws IOException { createIndexWithFieldTypeAndProperties("constant_keyword", fieldProps, explicitSourceSetting ? indexProps : null); index("{\"constant_keyword_field\":\"" + value + "\"}"); - Map expected = new HashMap<>(); - expected.put( - "columns", - Arrays.asList(columnInfo("plain", "constant_keyword_field", "keyword", JDBCType.VARCHAR, Integer.MAX_VALUE)) - ); - expected.put("rows", singletonList(singletonList(value))); - assertResponse(expected, runSql("SELECT constant_keyword_field FROM test")); + if (explicitSourceSetting == false || enableSource) { + Map expected = new HashMap<>(); + expected.put( + "columns", + Arrays.asList(columnInfo("plain", "constant_keyword_field", "keyword", JDBCType.VARCHAR, Integer.MAX_VALUE)) + ); + expected.put("rows", singletonList(singletonList(value))); + assertResponse(expected, runSql(query)); + } else { + expectSourceDisabledError(query); + } } /* @@ -142,10 +150,10 @@ public void testConstantKeywordField() throws IOException { * } */ public void testWildcardField() throws IOException { + String query = "SELECT wildcard_field FROM test"; String wildcard = randomAlphaOfLength(20); - // _source for `wildcard` fields doesn't matter, as they should be taken from docvalue_fields boolean explicitSourceSetting = randomBoolean(); // default (no _source setting) or explicit setting - boolean enableSource = randomBoolean(); // enable _source at index level + boolean enableSource = randomBoolean(); // enable _source at index level boolean ignoreAbove = randomBoolean(); Map indexProps = new HashMap<>(1); @@ -162,10 +170,14 @@ public void testWildcardField() throws IOException { createIndexWithFieldTypeAndProperties("wildcard", fieldProps, explicitSourceSetting ? indexProps : null); index("{\"wildcard_field\":\"" + wildcard + "\"}"); - Map expected = new HashMap<>(); - expected.put("columns", Arrays.asList(columnInfo("plain", "wildcard_field", "keyword", JDBCType.VARCHAR, Integer.MAX_VALUE))); - expected.put("rows", singletonList(singletonList(ignoreAbove ? null : wildcard))); - assertResponse(expected, runSql("SELECT wildcard_field FROM test")); + if (explicitSourceSetting == false || enableSource) { + Map expected = new HashMap<>(); + expected.put("columns", Arrays.asList(columnInfo("plain", "wildcard_field", "keyword", JDBCType.VARCHAR, Integer.MAX_VALUE))); + expected.put("rows", singletonList(singletonList(ignoreAbove ? null : wildcard))); + assertResponse(expected, runSql(query)); + } else { + expectSourceDisabledError(query); + } } /* @@ -222,14 +234,7 @@ public void testCoerceForFloatingPointTypes() throws IOException { // because "coerce" is true, a "123.456" floating point number STRING should be converted to 123.456 as number // and converted to 123.5 for "scaled_float" type - expected.put( - "rows", - singletonList( - singletonList( - isScaledFloat ? 123.5 : (fieldType != "double" ? Double.valueOf(123.456f) : Double.valueOf(floatingPointNumber)) - ) - ) - ); + expected.put("rows", singletonList(singletonList(isScaledFloat ? 123.5 : 123.456d))); assertResponse(expected, runSql("SELECT " + fieldType + "_field FROM test")); } @@ -282,7 +287,7 @@ private void testField(String fieldType, Object value) throws IOException { String query = "SELECT " + fieldName + " FROM test"; Object actualValue = value; boolean explicitSourceSetting = randomBoolean(); // default (no _source setting) or explicit setting - boolean enableSource = randomBoolean(); // enable _source at index level + boolean enableSource = randomBoolean(); // enable _source at index level boolean ignoreMalformed = randomBoolean(); // ignore_malformed is true, thus test a non-number value Map indexProps = new HashMap<>(1); @@ -320,7 +325,7 @@ public void testBooleanField() throws IOException { String query = "SELECT boolean_field FROM test"; boolean booleanField = randomBoolean(); boolean explicitSourceSetting = randomBoolean(); // default (no _source setting) or explicit setting - boolean enableSource = randomBoolean(); // enable _source at index level + boolean enableSource = randomBoolean(); // enable _source at index level boolean asString = randomBoolean(); // pass true or false as string "true" or "false Map indexProps = new HashMap<>(1); @@ -337,7 +342,7 @@ public void testBooleanField() throws IOException { Map expected = new HashMap<>(); expected.put("columns", Arrays.asList(columnInfo("plain", "boolean_field", "boolean", JDBCType.BOOLEAN, Integer.MAX_VALUE))); // adding the boolean as a String here because parsing the response will yield a "true"/"false" String - expected.put("rows", singletonList(singletonList(asString ? String.valueOf(booleanField) : booleanField))); + expected.put("rows", singletonList(singletonList(booleanField))); assertResponse(expected, runSql(query)); } else { expectSourceDisabledError(query); @@ -355,7 +360,7 @@ public void testIpField() throws IOException { String ipField = "192.168.1.1"; String actualValue = ipField; boolean explicitSourceSetting = randomBoolean(); // default (no _source setting) or explicit setting - boolean enableSource = randomBoolean(); // enable _source at index level + boolean enableSource = randomBoolean(); // enable _source at index level boolean ignoreMalformed = randomBoolean(); Map indexProps = new HashMap<>(1); @@ -392,7 +397,6 @@ public void testIpField() throws IOException { public void testGeoPointField() throws IOException { String query = "SELECT geo_point_field FROM test"; String geoPointField = "41.12,-71.34"; - String geoPointFromDocValues = "POINT (-71.34000004269183 41.1199999647215)"; String actualValue = geoPointField; boolean explicitSourceSetting = randomBoolean(); // default (no _source setting) or explicit setting boolean enableSource = randomBoolean(); // enable _source at index level @@ -413,11 +417,17 @@ public void testGeoPointField() throws IOException { createIndexWithFieldTypeAndProperties("geo_point", fieldProps, explicitSourceSetting ? indexProps : null); index("{\"geo_point_field\":\"" + actualValue + "\"}"); - // the values come from docvalues (vs from _source) so disabling the source doesn't have any impact on the values returned - Map expected = new HashMap<>(); - expected.put("columns", Arrays.asList(columnInfo("plain", "geo_point_field", "geo_point", JDBCType.VARCHAR, Integer.MAX_VALUE))); - expected.put("rows", singletonList(singletonList(ignoreMalformed ? null : geoPointFromDocValues))); - assertResponse(expected, runSql(query)); + if (explicitSourceSetting == false || enableSource) { + Map expected = new HashMap<>(); + expected.put( + "columns", + Arrays.asList(columnInfo("plain", "geo_point_field", "geo_point", JDBCType.VARCHAR, Integer.MAX_VALUE)) + ); + expected.put("rows", singletonList(singletonList(ignoreMalformed ? null : "POINT (-71.34 41.12)"))); + assertResponse(expected, runSql(query)); + } else { + expectSourceDisabledError(query); + } } /* @@ -467,7 +477,6 @@ public void testGeoShapeField() throws IOException { * "ignore_malformed": true/false * } */ - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/66678") public void testShapeField() throws IOException { String query = "SELECT shape_field FROM test"; String shapeField = "POINT (-377.03653 389.897676)"; @@ -494,7 +503,7 @@ public void testShapeField() throws IOException { if (explicitSourceSetting == false || enableSource) { Map expected = new HashMap<>(); expected.put("columns", Arrays.asList(columnInfo("plain", "shape_field", "shape", JDBCType.VARCHAR, Integer.MAX_VALUE))); - expected.put("rows", singletonList(singletonList(ignoreMalformed ? null : "POINT (-377.03653 389.897676)"))); + expected.put("rows", singletonList(singletonList(ignoreMalformed ? null : shapeField))); assertResponse(expected, runSql(query)); } else { expectSourceDisabledError(query); @@ -561,7 +570,7 @@ public void testAliasFromSourceField() throws IOException { columnInfo("plain", "a.b.c.text_field_alias", "text", JDBCType.VARCHAR, Integer.MAX_VALUE) ) ); - expected.put("rows", singletonList(Arrays.asList(text, null, null))); + expected.put("rows", singletonList(Arrays.asList(text, text, text))); assertResponse(expected, runSql("SELECT text_field, text_field_alias, a.b.c.text_field_alias FROM test")); } @@ -593,7 +602,7 @@ public void testAliasAggregatableFromSourceField() throws IOException { columnInfo("plain", "a.b.c.integer_field_alias", "integer", JDBCType.INTEGER, Integer.MAX_VALUE) ) ); - expected.put("rows", singletonList(Arrays.asList(number, null, number))); + expected.put("rows", singletonList(Arrays.asList(number, number, number))); assertResponse(expected, runSql("SELECT integer_field, integer_field_alias, a.b.c.integer_field_alias FROM test")); } @@ -610,9 +619,8 @@ public void testAliasAggregatableFromSourceField() throws IOException { */ public void testTextFieldWithKeywordSubfield() throws IOException { String text = randomAlphaOfLength(10) + " " + randomAlphaOfLength(10); - // _source for `keyword` fields doesn't matter, as they should be taken from docvalue_fields boolean explicitSourceSetting = randomBoolean(); // default (no _source setting) or explicit setting - boolean enableSource = randomBoolean(); // enable _source at index level + boolean enableSource = randomBoolean(); // enable _source at index level boolean ignoreAbove = randomBoolean(); String fieldName = "text_field"; String subFieldName = "text_field.keyword_subfield"; @@ -646,13 +654,7 @@ public void testTextFieldWithKeywordSubfield() throws IOException { assertResponse(expected, runSql(query)); } else { expectSourceDisabledError(query); - - // even if the _source is disabled, selecting only the keyword sub-field should work as expected - Map expected = new HashMap<>(); - expected.put("columns", Arrays.asList(columnInfo("plain", subFieldName, "keyword", JDBCType.VARCHAR, Integer.MAX_VALUE))); - - expected.put("rows", singletonList(singletonList(ignoreAbove ? null : text))); - assertResponse(expected, runSql("SELECT text_field.keyword_subfield FROM test")); + expectSourceDisabledError("SELECT " + subFieldName + " FROM test"); } } @@ -670,7 +672,7 @@ public void testTextFieldWithKeywordSubfield() throws IOException { public void testTextFieldWithIntegerNumberSubfield() throws IOException { Integer number = randomInt(); boolean explicitSourceSetting = randomBoolean(); // default (no _source setting) or explicit setting - boolean enableSource = randomBoolean(); // enable _source at index level + boolean enableSource = randomBoolean(); // enable _source at index level boolean ignoreMalformed = randomBoolean(); // ignore_malformed is true, thus test a non-number value Object actualValue = number; String fieldName = "text_field"; @@ -788,7 +790,7 @@ public void testTextFieldWithIpSubfield() throws IOException { public void testNumberFieldWithTextOrKeywordSubfield() throws IOException { Integer number = randomInt(); boolean explicitSourceSetting = randomBoolean(); // default (no _source setting) or explicit setting - boolean enableSource = randomBoolean(); // enable _source at index level + boolean enableSource = randomBoolean(); // enable _source at index level boolean ignoreMalformed = randomBoolean(); // ignore_malformed is true, thus test a non-number value boolean isKeyword = randomBoolean(); // text or keyword subfield Object actualValue = number; @@ -834,21 +836,12 @@ public void testNumberFieldWithTextOrKeywordSubfield() throws IOException { } assertResponse(expected, runSql(query)); } else { + // disabling the _source means that nothing should be retrieved by the "fields" API if (isKeyword) { - // selecting only the keyword subfield when the _source is disabled should work - Map expected = new HashMap<>(); - expected.put("columns", singletonList(columnInfo("plain", subFieldName, "keyword", JDBCType.VARCHAR, Integer.MAX_VALUE))); - if (ignoreMalformed) { - expected.put("rows", singletonList(singletonList("foo"))); - } else { - expected.put("rows", singletonList(singletonList(String.valueOf(number)))); - } - assertResponse(expected, runSql("SELECT integer_field.keyword_subfield FROM test")); + expectSourceDisabledError("SELECT integer_field.keyword_subfield FROM test"); } else { expectSourceDisabledError(query); } - - // if the _source is disabled, selecting only the integer field shouldn't work expectSourceDisabledError("SELECT " + fieldName + " FROM test"); } } @@ -913,22 +906,9 @@ public void testIpFieldWithTextOrKeywordSubfield() throws IOException { } assertResponse(expected, runSql(query)); } else { - if (isKeyword) { - // selecting only the keyword subfield when the _source is disabled should work - Map expected = new HashMap<>(); - expected.put("columns", singletonList(columnInfo("plain", subFieldName, "keyword", JDBCType.VARCHAR, Integer.MAX_VALUE))); - if (ignoreMalformed) { - expected.put("rows", singletonList(singletonList("foo"))); - } else { - expected.put("rows", singletonList(singletonList(ip))); - } - assertResponse(expected, runSql("SELECT ip_field.keyword_subfield FROM test")); - } else { - expectSourceDisabledError(query); - } - - // if the _source is disabled, selecting only the ip field shouldn't work + expectSourceDisabledError(query); expectSourceDisabledError("SELECT " + fieldName + " FROM test"); + expectSourceDisabledError("SELECT " + subFieldName + " FROM test"); } } @@ -948,7 +928,7 @@ public void testIntegerFieldWithByteSubfield() throws IOException { boolean isByte = randomBoolean(); Integer number = isByte ? randomByte() : randomIntBetween(Byte.MAX_VALUE + 1, Integer.MAX_VALUE); boolean explicitSourceSetting = randomBoolean(); // default (no _source setting) or explicit setting - boolean enableSource = randomBoolean(); // enable _source at index level + boolean enableSource = randomBoolean(); // enable _source at index level boolean rootIgnoreMalformed = randomBoolean(); // root field ignore_malformed boolean subFieldIgnoreMalformed = randomBoolean(); // sub-field ignore_malformed String fieldName = "integer_field"; @@ -1017,7 +997,7 @@ public void testByteFieldWithIntegerSubfield() throws IOException { boolean isByte = randomBoolean(); Integer number = isByte ? randomByte() : randomIntBetween(Byte.MAX_VALUE + 1, Integer.MAX_VALUE); boolean explicitSourceSetting = randomBoolean(); // default (no _source setting) or explicit setting - boolean enableSource = randomBoolean(); // enable _source at index level + boolean enableSource = randomBoolean(); // enable _source at index level boolean rootIgnoreMalformed = randomBoolean(); // root field ignore_malformed boolean subFieldIgnoreMalformed = randomBoolean(); // sub-field ignore_malformed String fieldName = "byte_field"; @@ -1074,7 +1054,7 @@ private void expectSourceDisabledError(String query) { expectBadRequest(() -> { client().performRequest(buildRequest(query)); return Collections.emptyMap(); - }, containsString("unable to fetch fields from _source field: _source is disabled in the mappings for index [test]")); + }, containsString("Unable to retrieve the requested [fields] since _source is disabled in the mappings for index [test]")); } private void createIndexWithFieldTypeAndAlias(String type, Map> fieldProps, Map indexProps) diff --git a/x-pack/plugin/sql/qa/server/src/main/java/org/elasticsearch/xpack/sql/qa/rest/RestSqlTestCase.java b/x-pack/plugin/sql/qa/server/src/main/java/org/elasticsearch/xpack/sql/qa/rest/RestSqlTestCase.java index d583d7125f662..1995ea193c034 100644 --- a/x-pack/plugin/sql/qa/server/src/main/java/org/elasticsearch/xpack/sql/qa/rest/RestSqlTestCase.java +++ b/x-pack/plugin/sql/qa/server/src/main/java/org/elasticsearch/xpack/sql/qa/rest/RestSqlTestCase.java @@ -672,11 +672,10 @@ public void testBasicTranslateQuery() throws IOException { Map response = runTranslateSql(query("SELECT * FROM test").toString()); assertEquals(1000, response.get("size")); + assertFalse((Boolean) response.get("_source")); @SuppressWarnings("unchecked") - Map source = (Map) response.get("_source"); - assertNotNull(source); - assertEquals(emptyList(), source.get("excludes")); - assertEquals(singletonList("test"), source.get("includes")); + List> source = (List>) response.get("fields"); + assertEquals(singletonList(singletonMap("field", "test")), source); } public void testBasicQueryWithFilter() throws IOException { @@ -743,11 +742,10 @@ public void testBasicTranslateQueryWithFilter() throws IOException { Map response = runTranslateSql(query("SELECT * FROM test").filter("{\"match\": {\"test\": \"foo\"}}").toString()); assertEquals(response.get("size"), 1000); + assertFalse((Boolean) response.get("_source")); @SuppressWarnings("unchecked") - Map source = (Map) response.get("_source"); - assertNotNull(source); - assertEquals(emptyList(), source.get("excludes")); - assertEquals(singletonList("test"), source.get("includes")); + List> source = (List>) response.get("fields"); + assertEquals(singletonList(singletonMap("field", "test")), source); @SuppressWarnings("unchecked") Map query = (Map) response.get("query"); @@ -784,7 +782,7 @@ public void testTranslateQueryWithGroupByAndHaving() throws IOException { assertEquals(response.get("size"), 0); assertEquals(false, response.get("_source")); - assertEquals("_none_", response.get("stored_fields")); + assertNull(response.get("stored_fields")); @SuppressWarnings("unchecked") Map aggregations = (Map) response.get("aggregations"); diff --git a/x-pack/plugin/sql/qa/server/src/main/resources/docs/geo.csv-spec b/x-pack/plugin/sql/qa/server/src/main/resources/docs/geo.csv-spec index 899147fd3e6d9..d10890e227e44 100644 --- a/x-pack/plugin/sql/qa/server/src/main/resources/docs/geo.csv-spec +++ b/x-pack/plugin/sql/qa/server/src/main/resources/docs/geo.csv-spec @@ -15,7 +15,7 @@ selectAsWKT SELECT city, ST_AsWKT(location) location FROM "geo" WHERE city = 'Amsterdam'; city:s | location:s -Amsterdam |POINT (4.850311987102032 52.347556999884546) +Amsterdam |POINT (4.850312 52.347557) // end::aswkt ; diff --git a/x-pack/plugin/sql/qa/server/src/main/resources/geo/geosql.csv-spec b/x-pack/plugin/sql/qa/server/src/main/resources/geo/geosql.csv-spec index c9d7bc85448de..391f0effc6154 100644 --- a/x-pack/plugin/sql/qa/server/src/main/resources/geo/geosql.csv-spec +++ b/x-pack/plugin/sql/qa/server/src/main/resources/geo/geosql.csv-spec @@ -32,72 +32,72 @@ shape | GEOMETRY | shape selectAllPointsAsStrings SELECT city, CAST(location AS STRING) location, CAST(location_no_dv AS STRING) location_no_dv, CAST(geoshape AS STRING) geoshape, CAST(shape AS STRING) shape, region FROM "geo" ORDER BY "city"; - city:s | location:s | location_no_dv:s | geoshape:s | shape:s | region:s -Amsterdam |POINT (4.850311987102032 52.347556999884546) |POINT (4.850312 52.347557) |POINT (4.850312 52.347557 2.0) |POINT (4.850312 52.347557 2.0) |Europe -Berlin |POINT (13.390888944268227 52.48670099303126) |POINT (13.390889 52.486701) |POINT (13.390889 52.486701 34.0) |POINT (13.390889 52.486701 34.0) |Europe -Chicago |POINT (-87.63787407428026 41.888782968744636) |POINT (-87.637874 41.888783) |POINT (-87.637874 41.888783 181.0) |POINT (-87.637874 41.888783 181.0) |Americas -Hong Kong |POINT (114.18392493389547 22.28139698971063) |POINT (114.183925 22.281397) |POINT (114.183925 22.281397 552.0) |POINT (114.183925 22.281397 552.0) |Asia -London |POINT (-0.12167204171419144 51.51087098289281)|POINT (-0.121672 51.510871) |POINT (-0.121672 51.510871 11.0) |POINT (-0.121672 51.510871 11.0) |Europe -Mountain View |POINT (-122.08384302444756 37.38648299127817) |POINT (-122.083843 37.386483) |POINT (-122.083843 37.386483 30.0) |POINT (-122.083843 37.386483 30.0) |Americas -Munich |POINT (11.537504978477955 48.14632098656148) |POINT (11.537505 48.146321) |POINT (11.537505 48.146321 519.0) |POINT (11.537505 48.146321 519.0) |Europe -New York |POINT (-73.9900270756334 40.74517097789794) |POINT (-73.990027 40.745171) |POINT (-73.990027 40.745171 10.0) |POINT (-73.990027 40.745171 10.0) |Americas -Paris |POINT (2.3517729341983795 48.84553796611726) |POINT (2.351773 48.845538) |POINT (2.351773 48.845538 35.0) |POINT (2.351773 48.845538 35.0) |Europe -Phoenix |POINT (-111.97350500151515 33.37624196894467) |POINT (-111.973505 33.376242) |POINT (-111.973505 33.376242 331.0)|POINT (-111.973505 33.376242 331.0)|Americas -San Francisco |POINT (-122.39422800019383 37.789540970698) |POINT (-122.394228 37.789541) |POINT (-122.394228 37.789541 16.0) |POINT (-122.394228 37.789541 16.0) |Americas -Seoul |POINT (127.06085099838674 37.50913198571652) |POINT (127.060851 37.509132) |POINT (127.060851 37.509132 38.0) |POINT (127.060851 37.509132 38.0) |Asia -Singapore |POINT (103.8555349688977 1.2958679627627134) |POINT (103.855535 1.295868) |POINT (103.855535 1.295868 15.0) |POINT (103.855535 1.295868 15.0) |Asia -Sydney |POINT (151.20862897485495 -33.863385021686554)|POINT (151.208629 -33.863385) |POINT (151.208629 -33.863385 100.0)|POINT (151.208629 -33.863385 100.0)|Asia -Tokyo |POINT (139.76402222178876 35.66961596254259) |POINT (139.76402225 35.669616)|POINT (139.76402225 35.669616 40.0)|POINT (139.76402225 35.669616 40.0)|Asia + city:s | location:s | location_no_dv:s | geoshape:s | shape:s | region:s +Amsterdam |POINT (4.850312 52.347557) |POINT (4.850312 52.347557) |POINT (4.850312 52.347557 2.0) |POINT (4.850312 52.347557 2.0) |Europe +Berlin |POINT (13.390889 52.486701) |POINT (13.390889 52.486701) |POINT (13.390889 52.486701 34.0) |POINT (13.390889 52.486701 34.0) |Europe +Chicago |POINT (-87.637874 41.888783) |POINT (-87.637874 41.888783) |POINT (-87.637874 41.888783 181.0) |POINT (-87.637874 41.888783 181.0) |Americas +Hong Kong |POINT (114.183925 22.281397) |POINT (114.183925 22.281397) |POINT (114.183925 22.281397 552.0) |POINT (114.183925 22.281397 552.0) |Asia +London |POINT (-0.121672 51.510871) |POINT (-0.121672 51.510871) |POINT (-0.121672 51.510871 11.0) |POINT (-0.121672 51.510871 11.0) |Europe +Mountain View |POINT (-122.083843 37.386483) |POINT (-122.083843 37.386483) |POINT (-122.083843 37.386483 30.0) |POINT (-122.083843 37.386483 30.0) |Americas +Munich |POINT (11.537505 48.146321) |POINT (11.537505 48.146321) |POINT (11.537505 48.146321 519.0) |POINT (11.537505 48.146321 519.0) |Europe +New York |POINT (-73.990027 40.745171) |POINT (-73.990027 40.745171) |POINT (-73.990027 40.745171 10.0) |POINT (-73.990027 40.745171 10.0) |Americas +Paris |POINT (2.351773 48.845538) |POINT (2.351773 48.845538) |POINT (2.351773 48.845538 35.0) |POINT (2.351773 48.845538 35.0) |Europe +Phoenix |POINT (-111.973505 33.376242) |POINT (-111.973505 33.376242) |POINT (-111.973505 33.376242 331.0)|POINT (-111.973505 33.376242 331.0)|Americas +San Francisco |POINT (-122.394228 37.789541) |POINT (-122.394228 37.789541) |POINT (-122.394228 37.789541 16.0) |POINT (-122.394228 37.789541 16.0) |Americas +Seoul |POINT (127.060851 37.509132) |POINT (127.060851 37.509132) |POINT (127.060851 37.509132 38.0) |POINT (127.060851 37.509132 38.0) |Asia +Singapore |POINT (103.855535 1.295868) |POINT (103.855535 1.295868) |POINT (103.855535 1.295868 15.0) |POINT (103.855535 1.295868 15.0) |Asia +Sydney |POINT (151.208629 -33.863385) |POINT (151.208629 -33.863385) |POINT (151.208629 -33.863385 100.0)|POINT (151.208629 -33.863385 100.0)|Asia +Tokyo |POINT (139.76402225 35.669616)|POINT (139.76402225 35.669616)|POINT (139.76402225 35.669616 40.0)|POINT (139.76402225 35.669616 40.0)|Asia ; // TODO: Both shape and location contain the same data for now, we should change it later to make things more interesting selectAllPointsAsWKT SELECT city, ST_ASWKT(location) location_wkt, ST_ASWKT(geoshape) geoshape_wkt, region FROM "geo" ORDER BY "city"; - city:s | location_wkt:s | geoshape_wkt:s | region:s -Amsterdam |POINT (4.850311987102032 52.347556999884546) |POINT (4.850312 52.347557 2.0) |Europe -Berlin |POINT (13.390888944268227 52.48670099303126) |POINT (13.390889 52.486701 34.0) |Europe -Chicago |POINT (-87.63787407428026 41.888782968744636) |POINT (-87.637874 41.888783 181.0) |Americas -Hong Kong |POINT (114.18392493389547 22.28139698971063) |POINT (114.183925 22.281397 552.0) |Asia -London |POINT (-0.12167204171419144 51.51087098289281)|POINT (-0.121672 51.510871 11.0) |Europe -Mountain View |POINT (-122.08384302444756 37.38648299127817) |POINT (-122.083843 37.386483 30.0) |Americas -Munich |POINT (11.537504978477955 48.14632098656148) |POINT (11.537505 48.146321 519.0) |Europe -New York |POINT (-73.9900270756334 40.74517097789794) |POINT (-73.990027 40.745171 10.0) |Americas -Paris |POINT (2.3517729341983795 48.84553796611726) |POINT (2.351773 48.845538 35.0) |Europe -Phoenix |POINT (-111.97350500151515 33.37624196894467) |POINT (-111.973505 33.376242 331.0) |Americas -San Francisco |POINT (-122.39422800019383 37.789540970698) |POINT (-122.394228 37.789541 16.0) |Americas -Seoul |POINT (127.06085099838674 37.50913198571652) |POINT (127.060851 37.509132 38.0) |Asia -Singapore |POINT (103.8555349688977 1.2958679627627134) |POINT (103.855535 1.295868 15.0) |Asia -Sydney |POINT (151.20862897485495 -33.863385021686554)|POINT (151.208629 -33.863385 100.0) |Asia -Tokyo |POINT (139.76402222178876 35.66961596254259) |POINT (139.76402225 35.669616 40.0) |Asia + city:s | location_wkt:s | geoshape_wkt:s | region:s +Amsterdam |POINT (4.850312 52.347557) |POINT (4.850312 52.347557 2.0) |Europe +Berlin |POINT (13.390889 52.486701) |POINT (13.390889 52.486701 34.0) |Europe +Chicago |POINT (-87.637874 41.888783) |POINT (-87.637874 41.888783 181.0) |Americas +Hong Kong |POINT (114.183925 22.281397) |POINT (114.183925 22.281397 552.0) |Asia +London |POINT (-0.121672 51.510871) |POINT (-0.121672 51.510871 11.0) |Europe +Mountain View |POINT (-122.083843 37.386483) |POINT (-122.083843 37.386483 30.0) |Americas +Munich |POINT (11.537505 48.146321) |POINT (11.537505 48.146321 519.0) |Europe +New York |POINT (-73.990027 40.745171) |POINT (-73.990027 40.745171 10.0) |Americas +Paris |POINT (2.351773 48.845538) |POINT (2.351773 48.845538 35.0) |Europe +Phoenix |POINT (-111.973505 33.376242) |POINT (-111.973505 33.376242 331.0)|Americas +San Francisco |POINT (-122.394228 37.789541) |POINT (-122.394228 37.789541 16.0) |Americas +Seoul |POINT (127.060851 37.509132) |POINT (127.060851 37.509132 38.0) |Asia +Singapore |POINT (103.855535 1.295868) |POINT (103.855535 1.295868 15.0) |Asia +Sydney |POINT (151.208629 -33.863385) |POINT (151.208629 -33.863385 100.0)|Asia +Tokyo |POINT (139.76402225 35.669616)|POINT (139.76402225 35.669616 40.0)|Asia ; selectWithAsWKTInWhere SELECT city, ST_ASWKT(location) location_wkt, region FROM "geo" WHERE LOCATE('114', ST_ASWKT(location)) > 0 ORDER BY "city"; - city:s | location_wkt:s | region:s -Hong Kong |POINT (114.18392493389547 22.28139698971063)|Asia + city:s | location_wkt:s | region:s +Hong Kong |POINT (114.183925 22.281397)|Asia ; selectAllPointsOrderByLonFromAsWKT SELECT city, SUBSTRING(ST_ASWKT(location), 8, LOCATE(' ', ST_ASWKT(location), 8) - 8) lon FROM "geo" ORDER BY lon; city:s | lon:s -London |-0.12167204171419144 -Phoenix |-111.97350500151515 -Mountain View |-122.08384302444756 -San Francisco |-122.39422800019383 -New York |-73.9900270756334 -Chicago |-87.63787407428026 -Singapore |103.8555349688977 -Munich |11.537504978477955 -Hong Kong |114.18392493389547 -Seoul |127.06085099838674 -Berlin |13.390888944268227 -Tokyo |139.76402222178876 -Sydney |151.20862897485495 -Paris |2.3517729341983795 -Amsterdam |4.850311987102032 +London |-0.121672 +Phoenix |-111.973505 +Mountain View |-122.083843 +San Francisco |-122.394228 +New York |-73.990027 +Chicago |-87.637874 +Singapore |103.855535 +Munich |11.537505 +Hong Kong |114.183925 +Seoul |127.060851 +Berlin |13.390889 +Tokyo |139.76402225 +Sydney |151.208629 +Paris |2.351773 +Amsterdam |4.850312 ; selectAllPointsGroupByHemisphereFromAsWKT @@ -157,11 +157,11 @@ selectCitiesByDistance SELECT region, city, ST_Distance(location, ST_WktToSQL('POINT (-71 42)')) distance FROM geo WHERE distance < 5000000 ORDER BY region, city; region:s | city:s | distance:d -Americas |Chicago |1373941.5140200066 -Americas |Mountain View |4335936.909375596 -Americas |New York |285839.6579622518 -Americas |Phoenix |3692895.0346903414 -Americas |San Francisco |4343565.010996301 +Americas |Chicago |1373941.5075370357 +Americas |Mountain View |4335936.907008218 +Americas |New York |285839.6512191343 +Americas |Phoenix |3692895.0329883597 +Americas |San Francisco |4343565.009715615 ; selectCitiesByDistanceFloored @@ -267,27 +267,27 @@ SELECT COUNT(*) cnt, FLOOR(ST_Y(location)/45) north, FLOOR(ST_X(location)/90) ea selectFilterByXOfLocation SELECT city, ST_X(geoshape) x, ST_Y(geoshape) y, ST_Z(geoshape) z, ST_X(location) lx, ST_Y(location) ly FROM geo WHERE lx > 0 ORDER BY ly; - city:s | x:d | y:d | z:d | lx:d | ly:d -Sydney |151.208629 |-33.863385 |100.0 |151.20862897485495|-33.863385021686554 -Singapore |103.855535 |1.295868 |15.0 |103.8555349688977 |1.2958679627627134 -Hong Kong |114.183925 |22.281397 |552.0 |114.18392493389547|22.28139698971063 -Tokyo |139.76402225 |35.669616 |40.0 |139.76402222178876|35.66961596254259 -Seoul |127.060851 |37.509132 |38.0 |127.06085099838674|37.50913198571652 -Munich |11.537505 |48.146321 |519.0 |11.537504978477955|48.14632098656148 -Paris |2.351773 |48.845538 |35.0 |2.3517729341983795|48.84553796611726 -Amsterdam |4.850312 |52.347557 |2.0 |4.850311987102032 |52.347556999884546 -Berlin |13.390889 |52.486701 |34.0 |13.390888944268227|52.48670099303126 + city:s | x:d | y:d | z:d | lx:d | ly:d +Sydney |151.208629 |-33.863385 |100.0 |151.208629 |-33.863385 +Singapore |103.855535 |1.295868 |15.0 |103.855535 |1.295868 +Hong Kong |114.183925 |22.281397 |552.0 |114.183925 |22.281397 +Tokyo |139.76402225 |35.669616 |40.0 |139.76402225 |35.669616 +Seoul |127.060851 |37.509132 |38.0 |127.060851 |37.509132 +Munich |11.537505 |48.146321 |519.0 |11.537505 |48.146321 +Paris |2.351773 |48.845538 |35.0 |2.351773 |48.845538 +Amsterdam |4.850312 |52.347557 |2.0 |4.850312 |52.347557 +Berlin |13.390889 |52.486701 |34.0 |13.390889 |52.486701 ; selectFilterByRegionPoint SELECT city, region, ST_X(location) x FROM geo WHERE ST_X(ST_WKTTOSQL(region_point)) < 0 ORDER BY x; city:s | region:s | x:d -San Francisco |Americas |-122.39422800019383 -Mountain View |Americas |-122.08384302444756 -Phoenix |Americas |-111.97350500151515 -Chicago |Americas |-87.63787407428026 -New York |Americas |-73.9900270756334 +San Francisco |Americas |-122.394228 +Mountain View |Americas |-122.083843 +Phoenix |Americas |-111.973505 +Chicago |Americas |-87.637874 +New York |Americas |-73.990027 ; selectLargeLat diff --git a/x-pack/plugin/sql/sql-action/src/test/java/org/elasticsearch/xpack/sql/action/SqlQueryResponseTests.java b/x-pack/plugin/sql/sql-action/src/test/java/org/elasticsearch/xpack/sql/action/SqlQueryResponseTests.java index de29df32f1a29..d3e14f5a00a52 100644 --- a/x-pack/plugin/sql/sql-action/src/test/java/org/elasticsearch/xpack/sql/action/SqlQueryResponseTests.java +++ b/x-pack/plugin/sql/sql-action/src/test/java/org/elasticsearch/xpack/sql/action/SqlQueryResponseTests.java @@ -54,7 +54,8 @@ public static SqlQueryResponse createRandomInstance(String cursor, Mode mode, bo if (randomBoolean()) { columns = new ArrayList<>(columnCount); for (int i = 0; i < columnCount; i++) { - columns.add(new ColumnInfo(randomAlphaOfLength(10), randomAlphaOfLength(10), randomAlphaOfLength(10), randomInt(25))); + columns.add(new ColumnInfo(randomAlphaOfLength(10), randomAlphaOfLength(10), randomAlphaOfLength(10), + randomBoolean() ? null : randomInt(25))); } } diff --git a/x-pack/plugin/sql/sql-proto/src/main/java/org/elasticsearch/xpack/sql/proto/ColumnInfo.java b/x-pack/plugin/sql/sql-proto/src/main/java/org/elasticsearch/xpack/sql/proto/ColumnInfo.java index 1efc1820a4170..ecb22a4f0274b 100644 --- a/x-pack/plugin/sql/sql-proto/src/main/java/org/elasticsearch/xpack/sql/proto/ColumnInfo.java +++ b/x-pack/plugin/sql/sql-proto/src/main/java/org/elasticsearch/xpack/sql/proto/ColumnInfo.java @@ -107,7 +107,7 @@ public String esType() { /** * Used by JDBC */ - public int displaySize() { + public Integer displaySize() { return displaySize; } diff --git a/x-pack/plugin/sql/src/internalClusterTest/java/org/elasticsearch/xpack/sql/action/SqlTranslateActionIT.java b/x-pack/plugin/sql/src/internalClusterTest/java/org/elasticsearch/xpack/sql/action/SqlTranslateActionIT.java index a32d4deb55e88..a4c30bb35aebe 100644 --- a/x-pack/plugin/sql/src/internalClusterTest/java/org/elasticsearch/xpack/sql/action/SqlTranslateActionIT.java +++ b/x-pack/plugin/sql/src/internalClusterTest/java/org/elasticsearch/xpack/sql/action/SqlTranslateActionIT.java @@ -9,10 +9,12 @@ import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.support.WriteRequest; import org.elasticsearch.search.builder.SearchSourceBuilder; -import org.elasticsearch.search.fetch.subphase.FetchSourceContext; import org.elasticsearch.search.fetch.subphase.FieldAndFormat; import org.elasticsearch.search.sort.SortBuilders; +import java.util.ArrayList; +import java.util.List; + import static java.util.Collections.singletonList; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; @@ -32,12 +34,19 @@ public void testSqlTranslateAction() { SqlTranslateResponse response = new SqlTranslateRequestBuilder(client(), SqlTranslateAction.INSTANCE) .query("SELECT " + columns + " FROM test ORDER BY count").get(); SearchSourceBuilder source = response.source(); - FetchSourceContext fetch = source.fetchSource(); - assertTrue(fetch.fetchSource()); - assertArrayEquals(new String[] { "data", "count" }, fetch.includes()); - assertEquals( - singletonList(new FieldAndFormat("date", "epoch_millis")), - source.docValueFields()); + List actualFields = source.fetchFields(); + List expectedFields = new ArrayList<>(3); + if (columnOrder) { + expectedFields.add(new FieldAndFormat("data", null)); + expectedFields.add(new FieldAndFormat("count", null)); + expectedFields.add(new FieldAndFormat("date", "epoch_millis")); + } else { + expectedFields.add(new FieldAndFormat("date", "epoch_millis")); + expectedFields.add(new FieldAndFormat("data", null)); + expectedFields.add(new FieldAndFormat("count", null)); + } + + assertEquals(expectedFields, actualFields); assertEquals(singletonList(SortBuilders.fieldSort("count").missing("_last").unmappedType("long")), source.sorts()); } } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/PlanExecutor.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/PlanExecutor.java index 4b07b007ce68a..b5a7ccb1194f4 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/PlanExecutor.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/PlanExecutor.java @@ -23,9 +23,9 @@ import org.elasticsearch.xpack.sql.planner.Planner; import org.elasticsearch.xpack.sql.planner.PlanningException; import org.elasticsearch.xpack.sql.proto.SqlTypedParamValue; -import org.elasticsearch.xpack.sql.session.SqlConfiguration; import org.elasticsearch.xpack.sql.session.Cursor; import org.elasticsearch.xpack.sql.session.Cursor.Page; +import org.elasticsearch.xpack.sql.session.SqlConfiguration; import org.elasticsearch.xpack.sql.session.SqlSession; import org.elasticsearch.xpack.sql.stats.Metrics; import org.elasticsearch.xpack.sql.stats.QueryMetric; diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/Querier.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/Querier.java index 27a95d25dafed..d40909cad9ce3 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/Querier.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/Querier.java @@ -84,10 +84,10 @@ import static java.util.Collections.singletonList; import static org.elasticsearch.action.ActionListener.wrap; +import static org.elasticsearch.xpack.ql.execution.search.QlSourceBuilder.SWITCH_TO_FIELDS_API_VERSION; // TODO: add retry/back-off public class Querier { - private static final Logger log = LogManager.getLogger(Querier.class); private final PlanExecutor planExecutor; @@ -144,12 +144,17 @@ public void query(List output, QueryContainer query, String index, Ac public static SearchRequest prepareRequest(Client client, SearchSourceBuilder source, TimeValue timeout, boolean includeFrozen, String... indices) { - return client.prepareSearch(indices) - // always track total hits accurately - .setTrackTotalHits(true).setAllowPartialSearchResults(false).setSource(source).setTimeout(timeout) - .setIndicesOptions( - includeFrozen ? IndexResolver.FIELD_CAPS_FROZEN_INDICES_OPTIONS : IndexResolver.FIELD_CAPS_INDICES_OPTIONS) - .request(); + source.trackTotalHits(true); + source.timeout(timeout); + + SearchRequest searchRequest = new SearchRequest(SWITCH_TO_FIELDS_API_VERSION); + searchRequest.indices(indices); + searchRequest.source(source); + searchRequest.allowPartialSearchResults(false); + searchRequest.indicesOptions( + includeFrozen ? IndexResolver.FIELD_CAPS_FROZEN_INDICES_OPTIONS : IndexResolver.FIELD_CAPS_INDICES_OPTIONS); + + return searchRequest; } protected static void logSearchResponse(SearchResponse response, Logger logger) { @@ -488,13 +493,12 @@ protected void handleResponse(SearchResponse response, ActionListener list private HitExtractor createExtractor(FieldExtraction ref) { if (ref instanceof SearchHitFieldRef) { SearchHitFieldRef f = (SearchHitFieldRef) ref; - return new FieldHitExtractor(f.name(), f.fullFieldName(), f.getDataType(), cfg.zoneId(), f.useDocValue(), f.hitName(), - multiValueFieldLeniency); + return new FieldHitExtractor(f.name(), f.getDataType(), cfg.zoneId(), f.hitName(), multiValueFieldLeniency); } if (ref instanceof ScriptFieldRef) { ScriptFieldRef f = (ScriptFieldRef) ref; - return new FieldHitExtractor(f.name(), null, cfg.zoneId(), true, multiValueFieldLeniency); + return new FieldHitExtractor(f.name(), null, cfg.zoneId(), multiValueFieldLeniency); } if (ref instanceof ComputedRef) { diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/SourceGenerator.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/SourceGenerator.java index 93dfba550ea52..1e9a72c67fa70 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/SourceGenerator.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/SourceGenerator.java @@ -10,7 +10,6 @@ import org.elasticsearch.search.aggregations.AggregationBuilder; import org.elasticsearch.search.aggregations.bucket.composite.CompositeAggregationBuilder; import org.elasticsearch.search.builder.SearchSourceBuilder; -import org.elasticsearch.search.fetch.StoredFieldsContext; import org.elasticsearch.search.fetch.subphase.FetchSourceContext; import org.elasticsearch.search.sort.FieldSortBuilder; import org.elasticsearch.search.sort.NestedSortBuilder; @@ -25,9 +24,6 @@ import org.elasticsearch.xpack.sql.querydsl.container.QueryContainer; import org.elasticsearch.xpack.sql.querydsl.container.ScoreSort; -import java.util.List; - -import static java.util.Collections.singletonList; import static org.elasticsearch.index.query.QueryBuilders.boolQuery; import static org.elasticsearch.search.sort.SortBuilders.fieldSort; import static org.elasticsearch.search.sort.SortBuilders.scoreSort; @@ -37,8 +33,6 @@ public abstract class SourceGenerator { private SourceGenerator() {} - private static final List NO_STORED_FIELD = singletonList(StoredFieldsContext._NONE_); - public static SearchSourceBuilder sourceBuilder(QueryContainer container, QueryBuilder filter, Integer size) { QueryBuilder finalQuery = null; // add the source @@ -64,7 +58,6 @@ public static SearchSourceBuilder sourceBuilder(QueryContainer container, QueryB // NB: the sortBuilder takes care of eliminating duplicates container.fields().forEach(f -> f.v1().collectFields(sortBuilder)); sortBuilder.build(source); - optimize(sortBuilder, source); // add the aggs (if present) AggregationBuilder aggBuilder = container.aggs().asAggBuilder(); @@ -166,29 +159,15 @@ private static void sorting(QueryContainer container, SearchSourceBuilder source } } - private static void optimize(QlSourceBuilder sqlSource, SearchSourceBuilder builder) { - if (sqlSource.noSource()) { - disableSource(builder); - } - } - private static void optimize(QueryContainer query, SearchSourceBuilder builder) { // if only aggs are needed, don't retrieve any docs and remove scoring if (query.isAggsOnly()) { builder.size(0); builder.trackScores(false); - // disable source fetching (only doc values are used) - disableSource(builder); } if (query.shouldTrackHits()) { builder.trackTotalHits(true); } - } - - private static void disableSource(SearchSourceBuilder builder) { builder.fetchSource(FetchSourceContext.DO_NOT_FETCH_SOURCE); - if (builder.storedFields() == null) { - builder.storedFields(NO_STORED_FIELD); - } } } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/extractor/FieldHitExtractor.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/extractor/FieldHitExtractor.java index 8eb5fc6b7b1a0..8f2e089c01c65 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/extractor/FieldHitExtractor.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/extractor/FieldHitExtractor.java @@ -42,17 +42,17 @@ public class FieldHitExtractor extends AbstractFieldHitExtractor { */ static final String NAME = "f"; - public FieldHitExtractor(String name, DataType dataType, ZoneId zoneId, boolean useDocValue, boolean arrayLeniency) { - super(name, dataType, zoneId, useDocValue, arrayLeniency); + public FieldHitExtractor(String name, DataType dataType, ZoneId zoneId, boolean arrayLeniency) { + super(name, dataType, zoneId, arrayLeniency); } - public FieldHitExtractor(String name, DataType dataType, ZoneId zoneId, boolean useDocValue) { - super(name, dataType, zoneId, useDocValue); + public FieldHitExtractor(String name, DataType dataType, ZoneId zoneId) { + super(name, dataType, zoneId); } - public FieldHitExtractor(String name, String fullFieldName, DataType dataType, ZoneId zoneId, boolean useDocValue, String hitName, + public FieldHitExtractor(String name, DataType dataType, ZoneId zoneId, String hitName, boolean arrayLeniency) { - super(name, fullFieldName, dataType, zoneId, useDocValue, hitName, arrayLeniency); + super(name, dataType, zoneId, hitName, arrayLeniency); } public FieldHitExtractor(StreamInput in) throws IOException { @@ -91,19 +91,15 @@ private boolean isGeoPointArray(List list) { return list.get(0) instanceof Number; } - - @Override - protected boolean isFromDocValuesOnly(DataType dataType) { - return SqlDataTypes.isFromDocValuesOnly(dataType); - } - @Override protected Object unwrapCustomValue(Object values) { DataType dataType = dataType(); if (dataType == GEO_POINT) { try { - GeoPoint geoPoint = GeoUtils.parseGeoPoint(values, true); + @SuppressWarnings("unchecked") + Map map = (Map) values; + GeoPoint geoPoint = GeoUtils.parseGeoPoint(map.get("coordinates"), true); return new GeoShape(geoPoint.lon(), geoPoint.lat()); } catch (ElasticsearchParseException ex) { throw new SqlIllegalArgumentException("Cannot parse geo_point value [{}] (returned by [{}])", values, fieldName()); diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/TransportSqlQueryAction.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/TransportSqlQueryAction.java index 4ff9ef54d8452..e2f067ed95f3f 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/TransportSqlQueryAction.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/TransportSqlQueryAction.java @@ -6,7 +6,10 @@ */ package org.elasticsearch.xpack.sql.plugin; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.ActionListenerResponseHandler; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.HandledTransportAction; import org.elasticsearch.cluster.service.ClusterService; @@ -43,15 +46,18 @@ import static java.util.Collections.unmodifiableList; import static org.elasticsearch.action.ActionListener.wrap; +import static org.elasticsearch.xpack.ql.plugin.TransportActionUtils.executeRequestWithRetryAttempt; import static org.elasticsearch.xpack.sql.plugin.Transports.clusterName; import static org.elasticsearch.xpack.sql.plugin.Transports.username; import static org.elasticsearch.xpack.sql.proto.Mode.CLI; public class TransportSqlQueryAction extends HandledTransportAction { + private static final Logger log = LogManager.getLogger(TransportSqlQueryAction.class); private final SecurityContext securityContext; private final ClusterService clusterService; private final PlanExecutor planExecutor; private final SqlLicenseChecker sqlLicenseChecker; + private final TransportService transportService; @Inject public TransportSqlQueryAction(Settings settings, ClusterService clusterService, TransportService transportService, @@ -64,19 +70,21 @@ public TransportSqlQueryAction(Settings settings, ClusterService clusterService, this.clusterService = clusterService; this.planExecutor = planExecutor; this.sqlLicenseChecker = sqlLicenseChecker; + this.transportService = transportService; } @Override protected void doExecute(Task task, SqlQueryRequest request, ActionListener listener) { sqlLicenseChecker.checkIfSqlAllowed(request.mode()); - operation(planExecutor, request, listener, username(securityContext), clusterName(clusterService)); + operation(planExecutor, request, listener, username(securityContext), clusterName(clusterService), transportService, + clusterService); } /** * Actual implementation of the action. Statically available to support embedded mode. */ static void operation(PlanExecutor planExecutor, SqlQueryRequest request, ActionListener listener, - String username, String clusterName) { + String username, String clusterName, TransportService transportService, ClusterService clusterService) { // The configuration is always created however when dealing with the next page, only the timeouts are relevant // the rest having default values (since the query is already created) SqlConfiguration cfg = new SqlConfiguration(request.zoneId(), request.fetchSize(), request.requestTimeout(), request.pageTimeout(), @@ -84,8 +92,12 @@ static void operation(PlanExecutor planExecutor, SqlQueryRequest request, Action request.fieldMultiValueLeniency(), request.indexIncludeFrozen()); if (Strings.hasText(request.cursor()) == false) { - planExecutor.sql(cfg, request.query(), request.params(), - wrap(p -> listener.onResponse(createResponseWithSchema(request, p)), listener::onFailure)); + executeRequestWithRetryAttempt(clusterService, listener::onFailure, + onFailure -> planExecutor.sql(cfg, request.query(), request.params(), + wrap(p -> listener.onResponse(createResponseWithSchema(request, p)), onFailure)), + node -> transportService.sendRequest(node, SqlQueryAction.NAME, request, + new ActionListenerResponseHandler<>(listener, SqlQueryResponse::new, ThreadPool.Names.SAME)), + log); } else { Tuple decoded = Cursors.decodeFromStringWithZone(request.cursor()); planExecutor.nextPage(cfg, decoded.v1(), diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/container/QueryContainer.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/container/QueryContainer.java index 865b1aea7b447..13b8a59882f77 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/container/QueryContainer.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/container/QueryContainer.java @@ -27,7 +27,6 @@ import org.elasticsearch.xpack.ql.querydsl.query.NestedQuery; import org.elasticsearch.xpack.ql.querydsl.query.Query; import org.elasticsearch.xpack.ql.tree.Source; -import org.elasticsearch.xpack.ql.type.DataTypes; import org.elasticsearch.xpack.sql.SqlIllegalArgumentException; import org.elasticsearch.xpack.sql.execution.search.SourceGenerator; import org.elasticsearch.xpack.sql.expression.function.Score; @@ -333,29 +332,6 @@ private FieldExtraction topHitFieldRef(FieldAttribute fieldAttr) { FieldAttribute rootField = fieldAttr; StringBuilder fullFieldName = new StringBuilder(fieldAttr.field().getName()); - // Only if the field is not an alias (in which case it will be taken out from docvalue_fields if it's isAggregatable()), - // go up the tree of parents until a non-object (and non-nested) type of field is found and use that specific parent - // as the field to extract data from, from _source. We do it like this because sub-fields are not in the _source, only - // the root field to which those sub-fields belong to, are. Instead of "text_field.keyword_subfield" for _source extraction, - // we use "text_field", because there is no source for "keyword_subfield". - /* - * "text_field": { - * "type": "text", - * "fields": { - * "keyword_subfield": { - * "type": "keyword" - * } - * } - * } - */ - if (fieldAttr.field().isAlias() == false) { - while (actualField.parent() != null - && actualField.parent().field().getDataType() != DataTypes.OBJECT - && actualField.parent().field().getDataType() != DataTypes.NESTED - && SqlDataTypes.isFromDocValuesOnly(actualField.field().getDataType()) == false) { - actualField = actualField.parent(); - } - } while (rootField.parent() != null) { fullFieldName.insert(0, ".").insert(0, rootField.parent().field().getName()); rootField = rootField.parent(); diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/container/SearchHitFieldRef.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/container/SearchHitFieldRef.java index 26c0605c87361..675c6ed0207f3 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/container/SearchHitFieldRef.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/container/SearchHitFieldRef.java @@ -60,11 +60,7 @@ public void collectFields(QlSourceBuilder sourceBuilder) { if (hitName != null) { return; } - if (docValue) { - sourceBuilder.addDocField(name, SqlDataTypes.format(dataType)); - } else { - sourceBuilder.addSourceField(name); - } + sourceBuilder.addFetchField(name, SqlDataTypes.format(dataType)); } @Override diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/execution/search/SqlSourceBuilderTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/execution/search/SqlSourceBuilderTests.java index ca9de884103ee..1f5922c4d1f35 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/execution/search/SqlSourceBuilderTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/execution/search/SqlSourceBuilderTests.java @@ -8,26 +8,26 @@ import org.elasticsearch.script.Script; import org.elasticsearch.search.builder.SearchSourceBuilder; -import org.elasticsearch.search.fetch.subphase.FetchSourceContext; +import org.elasticsearch.search.fetch.subphase.FieldAndFormat; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.ql.execution.search.QlSourceBuilder; -import java.util.Arrays; +import java.util.List; import java.util.Map; import java.util.stream.Collectors; -import static org.hamcrest.Matchers.contains; import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.nullValue; public class SqlSourceBuilderTests extends ESTestCase { + public void testSqlSourceBuilder() { final QlSourceBuilder ssb = new QlSourceBuilder(); final SearchSourceBuilder source = new SearchSourceBuilder(); ssb.trackScores(); - ssb.addSourceField("foo"); - ssb.addSourceField("foo2"); - ssb.addDocField("bar", null); - ssb.addDocField("bar2", null); + ssb.addFetchField("foo", null); + ssb.addFetchField("foo2", "test"); final Script s = new Script("eggplant"); ssb.addScriptField("baz", s); final Script s2 = new Script("potato"); @@ -35,9 +35,16 @@ public void testSqlSourceBuilder() { ssb.build(source); assertTrue(source.trackScores()); - FetchSourceContext fsc = source.fetchSource(); - assertThat(Arrays.asList(fsc.includes()), contains("foo", "foo2")); - assertThat(source.docValueFields().stream().map(ff -> ff.field).collect(Collectors.toList()), contains("bar", "bar2")); + assertNull(source.fetchSource()); + assertNull(source.docValueFields()); + + List fetchFields = source.fetchFields(); + assertThat(fetchFields.size(), equalTo(2)); + assertThat(fetchFields.get(0).field, equalTo("foo")); + assertThat(fetchFields.get(0).format, is(nullValue())); + assertThat(fetchFields.get(1).field, equalTo("foo2")); + assertThat(fetchFields.get(1).format, equalTo("test")); + Map scriptFields = source.scriptFields() .stream() .collect(Collectors.toMap(SearchSourceBuilder.ScriptField::fieldName, SearchSourceBuilder.ScriptField::script)); diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/execution/search/extractor/ComputingExtractorTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/execution/search/extractor/ComputingExtractorTests.java index fc02d27221166..1b35d50be0f99 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/execution/search/extractor/ComputingExtractorTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/execution/search/extractor/ComputingExtractorTests.java @@ -74,7 +74,7 @@ protected ComputingExtractor mutateInstance(ComputingExtractor instance) throws public void testGet() { String fieldName = randomAlphaOfLength(5); ChainingProcessor extractor = new ChainingProcessor( - new HitExtractorProcessor(new FieldHitExtractor(fieldName, DOUBLE, UTC, true, false)), + new HitExtractorProcessor(new FieldHitExtractor(fieldName, DOUBLE, UTC, false)), new MathProcessor(MathOperation.LOG)); int times = between(1, 1000); diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/execution/search/extractor/FieldHitExtractorTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/execution/search/extractor/FieldHitExtractorTests.java index b0dc9af8542c0..72e2dc04b3ec9 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/execution/search/extractor/FieldHitExtractorTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/execution/search/extractor/FieldHitExtractorTests.java @@ -6,11 +6,8 @@ */ package org.elasticsearch.xpack.sql.execution.search.extractor; -import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.document.DocumentField; import org.elasticsearch.common.io.stream.Writeable.Reader; -import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.common.xcontent.json.JsonXContent; import org.elasticsearch.search.SearchHit; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.ql.QlIllegalArgumentException; @@ -31,7 +28,6 @@ import java.util.HashMap; import java.util.List; import java.util.Map; -import java.util.StringJoiner; import java.util.function.Supplier; import static java.util.Arrays.asList; @@ -40,7 +36,6 @@ import static org.elasticsearch.common.time.DateUtils.toMilliSeconds; import static org.elasticsearch.xpack.ql.type.DataTypes.DATETIME; import static org.elasticsearch.xpack.ql.type.DataTypes.DATETIME_NANOS; -import static org.elasticsearch.xpack.sql.type.SqlDataTypes.GEO_POINT; import static org.elasticsearch.xpack.sql.type.SqlDataTypes.GEO_SHAPE; import static org.elasticsearch.xpack.sql.type.SqlDataTypes.SHAPE; import static org.elasticsearch.xpack.sql.util.DateUtils.UTC; @@ -51,7 +46,7 @@ public class FieldHitExtractorTests extends AbstractSqlWireSerializingTestCase randomFrom(SqlDataTypes.types())), randomValueOtherThan(instance.zoneId(), ESTestCase::randomZone), - randomBoolean(), instance.hitName() + "mutated", - randomBoolean()); + randomBoolean() + ); } public void testGetDottedValueWithDocValues() { @@ -87,7 +81,7 @@ public void testGetDottedValueWithDocValues() { String child = randomAlphaOfLength(5); String fieldName = grandparent + "." + parent + "." + child; - FieldHitExtractor extractor = getFieldHitExtractor(fieldName, true); + FieldHitExtractor extractor = getFieldHitExtractor(fieldName); int times = between(1, 1000); for (int i = 0; i < times; i++) { @@ -104,52 +98,9 @@ public void testGetDottedValueWithDocValues() { } } - public void testGetDottedValueWithSource() throws Exception { - String grandparent = randomAlphaOfLength(5); - String parent = randomAlphaOfLength(5); - String child = randomAlphaOfLength(5); - String fieldName = grandparent + "." + parent + "." + child; - - FieldHitExtractor extractor = getFieldHitExtractor(fieldName, false); - - int times = between(1, 1000); - for (int i = 0; i < times; i++) { - /* We use values that are parsed from json as "equal" to make the - * test simpler. */ - Object value = randomValue(); - SearchHit hit = new SearchHit(1); - XContentBuilder source = JsonXContent.contentBuilder(); - boolean hasGrandparent = randomBoolean(); - boolean hasParent = randomBoolean(); - boolean hasChild = randomBoolean(); - boolean hasSource = hasGrandparent && hasParent && hasChild; - - source.startObject(); - if (hasGrandparent) { - source.startObject(grandparent); - if (hasParent) { - source.startObject(parent); - if (hasChild) { - source.field(child, value); - if (randomBoolean()) { - source.field(fieldName + randomAlphaOfLength(3), value + randomAlphaOfLength(3)); - } - } - source.endObject(); - } - source.endObject(); - } - source.endObject(); - BytesReference sourceRef = BytesReference.bytes(source); - hit.sourceRef(sourceRef); - Object extract = extractor.extract(hit); - assertFieldHitEquals(hasSource ? value : null, extract); - } - } - public void testGetDocValue() { String fieldName = randomAlphaOfLength(5); - FieldHitExtractor extractor = getFieldHitExtractor(fieldName, true); + FieldHitExtractor extractor = getFieldHitExtractor(fieldName); int times = between(1, 1000); for (int i = 0; i < times; i++) { @@ -187,463 +138,98 @@ public void testGetDateNanos() { assertEquals(zdt, extractor.extract(hit)); } - public void testGetSource() throws IOException { - String fieldName = randomAlphaOfLength(5); - FieldHitExtractor extractor = getFieldHitExtractor(fieldName, false); - - int times = between(1, 1000); - for (int i = 0; i < times; i++) { - /* We use values that are parsed from json as "equal" to make the - * test simpler. */ - Object value = randomValue(); - SearchHit hit = new SearchHit(1); - XContentBuilder source = JsonXContent.contentBuilder(); - source.startObject(); { - source.field(fieldName, value); - if (randomBoolean()) { - source.field(fieldName + "_random_junk", value + "_random_junk"); - } - } - source.endObject(); - BytesReference sourceRef = BytesReference.bytes(source); - hit.sourceRef(sourceRef); - assertFieldHitEquals(value, extractor.extract(hit)); - } - } - public void testToString() { - assertEquals("hit.field@hit@Europe/Berlin", - new FieldHitExtractor("hit.field", null, null, ZoneId.of("Europe/Berlin"), true, "hit", false).toString()); + assertEquals( + "hit.field@hit@Europe/Berlin", + new FieldHitExtractor("hit.field", null, ZoneId.of("Europe/Berlin"), "hit", false).toString() + ); } public void testMultiValuedDocValue() { String fieldName = randomAlphaOfLength(5); - FieldHitExtractor fe = getFieldHitExtractor(fieldName, true); + FieldHitExtractor fe = getFieldHitExtractor(fieldName); DocumentField field = new DocumentField(fieldName, asList("a", "b")); SearchHit hit = new SearchHit(1, null, singletonMap(fieldName, field), null); QlIllegalArgumentException ex = expectThrows(QlIllegalArgumentException.class, () -> fe.extract(hit)); assertThat(ex.getMessage(), is("Arrays (returned by [" + fieldName + "]) are not supported")); } - public void testMultiValuedSourceValue() throws IOException { - String fieldName = randomAlphaOfLength(5); - FieldHitExtractor fe = getFieldHitExtractor(fieldName, false); - SearchHit hit = new SearchHit(1); - XContentBuilder source = JsonXContent.contentBuilder(); - source.startObject(); { - source.field(fieldName, asList("a", "b")); - } - source.endObject(); - BytesReference sourceRef = BytesReference.bytes(source); - hit.sourceRef(sourceRef); - QlIllegalArgumentException ex = expectThrows(QlIllegalArgumentException.class, () -> fe.extract(hit)); - assertThat(ex.getMessage(), is("Arrays (returned by [" + fieldName + "]) are not supported")); - } - - public void testSingleValueArrayInSource() throws IOException { - String fieldName = randomAlphaOfLength(5); - FieldHitExtractor fe = getFieldHitExtractor(fieldName, false); - SearchHit hit = new SearchHit(1); - XContentBuilder source = JsonXContent.contentBuilder(); - Object value = randomValue(); - source.startObject(); { - source.field(fieldName, Collections.singletonList(value)); - } - source.endObject(); - BytesReference sourceRef = BytesReference.bytes(source); - hit.sourceRef(sourceRef); - assertFieldHitEquals(value, fe.extract(hit)); - } - - public void testExtractSourcePath() { - FieldHitExtractor fe = getFieldHitExtractor("a.b.c", false); + public void testExtractSourcePath() throws IOException { + FieldHitExtractor fe = getFieldHitExtractor("a.b.c"); Object value = randomValue(); - Map map = singletonMap("a", singletonMap("b", singletonMap("c", value))); - assertThat(fe.extractFromSource(map), is(value)); - } - - public void testExtractSourceIncorrectPath() { - FieldHitExtractor fe = getFieldHitExtractor("a.b.c.d", false); - Object value = randomNonNullValue(); - Map map = singletonMap("a", singletonMap("b", singletonMap("c", value))); - QlIllegalArgumentException ex = expectThrows(QlIllegalArgumentException.class, () -> fe.extractFromSource(map)); - assertThat(ex.getMessage(), is("Cannot extract value [a.b.c.d] from source")); + DocumentField field = new DocumentField("a.b.c", singletonList(value)); + SearchHit hit = new SearchHit(1, null, null, singletonMap("a.b.c", field), null); + assertThat(fe.extract(hit), is(value)); } - + public void testMultiValuedSource() { - FieldHitExtractor fe = getFieldHitExtractor("a", false); + FieldHitExtractor fe = getFieldHitExtractor("a"); Object value = randomValue(); - Map map = singletonMap("a", asList(value, value)); - QlIllegalArgumentException ex = expectThrows(QlIllegalArgumentException.class, () -> fe.extractFromSource(map)); + DocumentField field = new DocumentField("a", asList(value, value)); + SearchHit hit = new SearchHit(1, null, null, singletonMap("a", field), null); + QlIllegalArgumentException ex = expectThrows(QlIllegalArgumentException.class, () -> fe.extract(hit)); assertThat(ex.getMessage(), is("Arrays (returned by [a]) are not supported")); } - + public void testMultiValuedSourceAllowed() { - FieldHitExtractor fe = new FieldHitExtractor("a", null, UTC, false, true); + FieldHitExtractor fe = new FieldHitExtractor("a", null, UTC, true); Object valueA = randomValue(); Object valueB = randomValue(); - Map map = singletonMap("a", asList(valueA, valueB)); - assertEquals(valueA, fe.extractFromSource(map)); - } - - public void testFieldWithDots() { - FieldHitExtractor fe = getFieldHitExtractor("a.b", false); - Object value = randomValue(); - Map map = singletonMap("a.b", value); - assertEquals(value, fe.extractFromSource(map)); - } - - public void testNestedFieldWithDots() { - FieldHitExtractor fe = getFieldHitExtractor("a.b.c", false); - Object value = randomValue(); - Map map = singletonMap("a", singletonMap("b.c", value)); - assertEquals(value, fe.extractFromSource(map)); - } - - public void testNestedFieldWithDotsWithNestedField() { - FieldHitExtractor fe = getFieldHitExtractor("a.b.c.d", false); - Object value = randomValue(); - Map map = singletonMap("a", singletonMap("b.c", singletonMap("d", value))); - assertEquals(value, fe.extractFromSource(map)); - } - - public void testNestedFieldWithDotsWithNestedFieldWithDots() { - FieldHitExtractor fe = getFieldHitExtractor("a.b.c.d.e", false); - Object value = randomValue(); - Map map = singletonMap("a", singletonMap("b.c", singletonMap("d.e", value))); - assertEquals(value, fe.extractFromSource(map)); - } - - @SuppressWarnings({ "rawtypes", "unchecked" }) - public void testNestedFieldsWithDotsAndRandomHierarchy() { - String[] path = new String[100]; - StringJoiner sj = new StringJoiner("."); - for (int i = 0; i < 100; i++) { - path[i] = randomAlphaOfLength(randomIntBetween(1, 10)); - sj.add(path[i]); - } - boolean arrayLeniency = randomBoolean(); - FieldHitExtractor fe = new FieldHitExtractor(sj.toString(), null, UTC, false, arrayLeniency); - - List paths = new ArrayList<>(path.length); - int start = 0; - while (start < path.length) { - int end = randomIntBetween(start + 1, path.length); - sj = new StringJoiner("."); - for (int j = start; j < end; j++) { - sj.add(path[j]); - } - paths.add(sj.toString()); - start = end; - } - - /* - * Randomize how many values the field to look for will have (1 - 3). It's not really relevant how many values there are in the list - * but that the list has one element or more than one. - * If it has one value, then randomize the way it's indexed: as a single-value array or not e.g.: "a":"value" or "a":["value"]. - * If it has more than one value, it will always be an array e.g.: "a":["v1","v2","v3"]. - */ - int valuesCount = randomIntBetween(1, 3); - Object value = randomValue(); - if (valuesCount == 1) { - value = randomBoolean() ? singletonList(value) : value; - } else { - value = new ArrayList(valuesCount); - for(int i = 0; i < valuesCount; i++) { - ((List) value).add(randomValue()); - } - } - - // the path to the randomly generated fields path - StringBuilder expected = new StringBuilder(paths.get(paths.size() - 1)); - // the actual value we will be looking for in the test at the end - Map map = singletonMap(paths.get(paths.size() - 1), value); - // build the rest of the path and the expected path to check against in the error message - for (int i = paths.size() - 2; i >= 0; i--) { - map = singletonMap(paths.get(i), randomBoolean() ? singletonList(map) : map); - expected.insert(0, paths.get(i) + "."); - } - - if (valuesCount == 1 || arrayLeniency) { - // if the number of generated values is 1, just check we return the correct value - assertEquals(value instanceof List ? ((List) value).get(0) : value, fe.extractFromSource(map)); - } else { - // if we have an array with more than one value in it, check that we throw the correct exception and exception message - final Map map2 = Collections.unmodifiableMap(map); - QlIllegalArgumentException ex = expectThrows(QlIllegalArgumentException.class, () -> fe.extractFromSource(map2)); - assertThat(ex.getMessage(), is("Arrays (returned by [" + expected + "]) are not supported")); - } - } - - public void testExtractSourceIncorrectPathWithFieldWithDots() { - FieldHitExtractor fe = getFieldHitExtractor("a.b.c.d.e", false); - Object value = randomNonNullValue(); - Map map = singletonMap("a", singletonMap("b.c", singletonMap("d", value))); - QlIllegalArgumentException ex = expectThrows(QlIllegalArgumentException.class, () -> fe.extractFromSource(map)); - assertThat(ex.getMessage(), is("Cannot extract value [a.b.c.d.e] from source")); - } - - public void testFieldWithDotsAndCommonPrefix() { - FieldHitExtractor fe1 = getFieldHitExtractor("a.d", false); - FieldHitExtractor fe2 = getFieldHitExtractor("a.b.c", false); - Object value = randomNonNullValue(); - Map map = new HashMap<>(); - map.put("a", singletonMap("d", value)); - map.put("a.b", singletonMap("c", value)); - assertEquals(value, fe1.extractFromSource(map)); - assertEquals(value, fe2.extractFromSource(map)); - } - - public void testFieldWithDotsAndCommonPrefixes() { - FieldHitExtractor fe1 = getFieldHitExtractor("a1.b.c.d1.e.f.g1", false); - FieldHitExtractor fe2 = getFieldHitExtractor("a2.b.c.d2.e.f.g2", false); - Object value = randomNonNullValue(); - Map map = new HashMap<>(); - map.put("a1", singletonMap("b.c", singletonMap("d1", singletonMap("e.f", singletonMap("g1", value))))); - map.put("a2", singletonMap("b.c", singletonMap("d2", singletonMap("e.f", singletonMap("g2", value))))); - assertEquals(value, fe1.extractFromSource(map)); - assertEquals(value, fe2.extractFromSource(map)); - } - - public void testFieldWithDotsAndSamePathButDifferentHierarchy() { - FieldHitExtractor fe = getFieldHitExtractor("a.b.c.d.e.f.g", false); - Object value = randomNonNullValue(); - Map map = new HashMap<>(); - map.put("a.b", singletonMap("c", singletonMap("d.e", singletonMap("f.g", value)))); - map.put("a", singletonMap("b.c", singletonMap("d.e", singletonMap("f", singletonMap("g", value))))); - QlIllegalArgumentException ex = expectThrows(QlIllegalArgumentException.class, () -> fe.extractFromSource(map)); - assertThat(ex.getMessage(), is("Multiple values (returned by [a.b.c.d.e.f.g]) are not supported")); - } - - public void testFieldsWithSingleValueArrayAsSubfield() { - FieldHitExtractor fe = getFieldHitExtractor("a.b", false); - Object value = randomNonNullValue(); - Map map = new HashMap<>(); - // "a" : [{"b" : "value"}] - map.put("a", singletonList(singletonMap("b", value))); - assertEquals(value, fe.extractFromSource(map)); - } - - public void testFieldsWithMultiValueArrayAsSubfield() { - FieldHitExtractor fe = getFieldHitExtractor("a.b", false); - Map map = new HashMap<>(); - // "a" : [{"b" : "value1"}, {"b" : "value2"}] - map.put("a", asList(singletonMap("b", randomNonNullValue()), singletonMap("b", randomNonNullValue()))); - QlIllegalArgumentException ex = expectThrows(QlIllegalArgumentException.class, () -> fe.extractFromSource(map)); - assertThat(ex.getMessage(), is("Arrays (returned by [a.b]) are not supported")); - } - - public void testFieldsWithSingleValueArrayAsSubfield_TwoNestedLists() { - FieldHitExtractor fe = getFieldHitExtractor("a.b.c", false); - Object value = randomNonNullValue(); - Map map = new HashMap<>(); - // "a" : [{"b" : [{"c" : "value"}]}] - map.put("a", singletonList(singletonMap("b", singletonList(singletonMap("c", value))))); - assertEquals(value, fe.extractFromSource(map)); - } - - public void testFieldsWithMultiValueArrayAsSubfield_ThreeNestedLists() { - FieldHitExtractor fe = getFieldHitExtractor("a.b.c", false); - Map map = new HashMap<>(); - // "a" : [{"b" : [{"c" : ["value1", "value2"]}]}] - map.put("a", singletonList(singletonMap("b", singletonList(singletonMap("c", asList("value1", "value2")))))); - QlIllegalArgumentException ex = expectThrows(QlIllegalArgumentException.class, () -> fe.extractFromSource(map)); - assertThat(ex.getMessage(), is("Arrays (returned by [a.b.c]) are not supported")); - } - - public void testFieldsWithSingleValueArrayAsSubfield_TwoNestedLists2() { - FieldHitExtractor fe = getFieldHitExtractor("a.b.c", false); - Object value = randomNonNullValue(); - Map map = new HashMap<>(); - // "a" : [{"b" : {"c" : ["value"]}]}] - map.put("a", singletonList(singletonMap("b", singletonMap("c", singletonList(value))))); - assertEquals(value, fe.extractFromSource(map)); - } - - public void testObjectsForSourceValue() throws IOException { - String fieldName = randomAlphaOfLength(5); - FieldHitExtractor fe = getFieldHitExtractor(fieldName, false); - SearchHit hit = new SearchHit(1); - XContentBuilder source = JsonXContent.contentBuilder(); - source.startObject(); { - source.startObject(fieldName); { - source.field("b", "c"); - } - source.endObject(); - } - source.endObject(); - BytesReference sourceRef = BytesReference.bytes(source); - hit.sourceRef(sourceRef); - QlIllegalArgumentException ex = expectThrows(QlIllegalArgumentException.class, () -> fe.extract(hit)); - assertThat(ex.getMessage(), is("Objects (returned by [" + fieldName + "]) are not supported")); + DocumentField field = new DocumentField("a", asList(valueA, valueB)); + SearchHit hit = new SearchHit(1, null, null, singletonMap("a", field), null); + assertEquals(valueA, fe.extract(hit)); } public void testGeoShapeExtraction() { String fieldName = randomAlphaOfLength(5); FieldHitExtractor fe = new FieldHitExtractor(fieldName, randomBoolean() ? GEO_SHAPE : SHAPE, UTC, false); - Map map = new HashMap<>(); - map.put(fieldName, "POINT (1 2)"); - assertEquals(new GeoShape(1, 2), fe.extractFromSource(map)); - - map = new HashMap<>(); - assertNull(fe.extractFromSource(map)); - } + Map map = new HashMap<>(2); + map.put("coordinates", asList(1d, 2d)); + map.put("type", "Point"); + DocumentField field = new DocumentField(fieldName, singletonList(map)); + SearchHit hit = new SearchHit(1, null, null, singletonMap(fieldName, field), null); + assertEquals(new GeoShape(1, 2), fe.extract(hit)); + } + public void testMultipleGeoShapeExtraction() { String fieldName = randomAlphaOfLength(5); FieldHitExtractor fe = new FieldHitExtractor(fieldName, randomBoolean() ? GEO_SHAPE : SHAPE, UTC, false); - Map map = new HashMap<>(); - map.put(fieldName, "POINT (1 2)"); - assertEquals(new GeoShape(1, 2), fe.extractFromSource(map)); - - map = new HashMap<>(); - assertNull(fe.extractFromSource(map)); - - Map map2 = new HashMap<>(); - map2.put(fieldName, Arrays.asList("POINT (1 2)", "POINT (3 4)")); - QlIllegalArgumentException ex = expectThrows(QlIllegalArgumentException.class, () -> fe.extractFromSource(map2)); - assertThat(ex.getMessage(), is("Arrays (returned by [" + fieldName + "]) are not supported")); - - FieldHitExtractor lenientFe = new FieldHitExtractor(fieldName, - randomBoolean() ? GEO_SHAPE : SHAPE, UTC, false, true); - assertEquals(new GeoShape(1, 2), lenientFe.extractFromSource(map2)); - } - - public void testGeoPointExtractionFromSource() throws IOException { - int layers = randomIntBetween(1, 3); - String pathCombined = ""; - double lat = randomDoubleBetween(-90, 90, true); - double lon = randomDoubleBetween(-180, 180, true); - SearchHit hit = new SearchHit(1); - XContentBuilder source = JsonXContent.contentBuilder(); - boolean[] arrayWrap = new boolean[layers - 1]; - source.startObject(); { - for (int i = 0; i < layers - 1; i++) { - arrayWrap[i] = randomBoolean(); - String name = randomAlphaOfLength(10); - source.field(name); - if (arrayWrap[i]) { - source.startArray(); - } - source.startObject(); - pathCombined = pathCombined + name + "."; - } - String name = randomAlphaOfLength(10); - pathCombined = pathCombined + name; - source.field(name, randomPoint(lat, lon)); - for (int i = layers - 2; i >= 0; i--) { - source.endObject(); - if (arrayWrap[i]) { - source.endArray(); - } - } - } - source.endObject(); - BytesReference sourceRef = BytesReference.bytes(source); - hit.sourceRef(sourceRef); - - FieldHitExtractor fe = new FieldHitExtractor(pathCombined, GEO_POINT, UTC, false); - assertEquals(new GeoShape(lon, lat), fe.extract(hit)); - } - - public void testMultipleGeoPointExtractionFromSource() throws IOException { - double lat = randomDoubleBetween(-90, 90, true); - double lon = randomDoubleBetween(-180, 180, true); - SearchHit hit = new SearchHit(1); - String fieldName = randomAlphaOfLength(5); - int arraySize = randomIntBetween(2, 4); - XContentBuilder source = JsonXContent.contentBuilder(); - source.startObject(); { - source.startArray(fieldName); - source.value(randomPoint(lat, lon)); - for (int i = 1; i < arraySize; i++) { - source.value(randomPoint(lat, lon)); - } - source.endArray(); - } - source.endObject(); - BytesReference sourceRef = BytesReference.bytes(source); - hit.sourceRef(sourceRef); - - FieldHitExtractor fe = new FieldHitExtractor(fieldName, GEO_POINT, UTC, false); - QlIllegalArgumentException ex = expectThrows(QlIllegalArgumentException.class, () -> fe.extract(hit)); - assertThat(ex.getMessage(), is("Arrays (returned by [" + fieldName + "]) are not supported")); - - FieldHitExtractor lenientFe = new FieldHitExtractor(fieldName, GEO_POINT, UTC, false, true); - assertEquals(new GeoShape(lon, lat), lenientFe.extract(hit)); - } - - public void testGeoPointExtractionFromDocValues() { - String fieldName = randomAlphaOfLength(5); - FieldHitExtractor fe = new FieldHitExtractor(fieldName, GEO_POINT, UTC, true); - DocumentField field = new DocumentField(fieldName, singletonList("2, 1")); + + Map map1 = new HashMap<>(2); + map1.put("coordinates", asList(1d, 2d)); + map1.put("type", "Point"); + Map map2 = new HashMap<>(2); + map2.put("coordinates", asList(3d, 4d)); + map2.put("type", "Point"); + DocumentField field = new DocumentField(fieldName, asList(map1, map2)); SearchHit hit = new SearchHit(1, null, singletonMap(fieldName, field), null); - assertEquals(new GeoShape(1, 2), fe.extract(hit)); - hit = new SearchHit(1); - assertNull(fe.extract(hit)); - } - public void testGeoPointExtractionFromMultipleDocValues() { - String fieldName = randomAlphaOfLength(5); - FieldHitExtractor fe = new FieldHitExtractor(fieldName, GEO_POINT, UTC, true); - SearchHit hit = new SearchHit(1, null, singletonMap(fieldName, - new DocumentField(fieldName, Arrays.asList("2,1", "3,4"))), null); QlIllegalArgumentException ex = expectThrows(QlIllegalArgumentException.class, () -> fe.extract(hit)); assertThat(ex.getMessage(), is("Arrays (returned by [" + fieldName + "]) are not supported")); - - FieldHitExtractor lenientFe = new FieldHitExtractor(fieldName, GEO_POINT, UTC, true, true); - assertEquals(new GeoShape(1, 2), lenientFe.extract(hit)); + + FieldHitExtractor lenientFe = new FieldHitExtractor(fieldName, randomBoolean() ? GEO_SHAPE : SHAPE, UTC, true); + assertEquals(new GeoShape(3, 4), lenientFe.extract(new SearchHit(1, null, null, singletonMap(fieldName, + new DocumentField(fieldName, singletonList(map2))), null))); } - private FieldHitExtractor getFieldHitExtractor(String fieldName, boolean useDocValue) { - return new FieldHitExtractor(fieldName, null, UTC, useDocValue); + private FieldHitExtractor getFieldHitExtractor(String fieldName) { + return new FieldHitExtractor(fieldName, null, UTC); } private Object randomValue() { - Supplier value = randomFrom(Arrays.asList( + Supplier value = randomFrom( + Arrays.asList( () -> randomAlphaOfLength(10), ESTestCase::randomLong, ESTestCase::randomDouble, ESTestCase::randomInt, () -> BigInteger.valueOf(Long.MAX_VALUE).add(BigInteger.ONE), () -> new BigDecimal("20012312345621343256123456254.20012312345621343256123456254"), - () -> null)); - return value.get(); - } - - private Object randomNonNullValue() { - Supplier value = randomFrom(Arrays.asList( - () -> randomAlphaOfLength(10), - ESTestCase::randomLong, - ESTestCase::randomDouble, - ESTestCase::randomInt, - () -> BigInteger.valueOf(Long.MAX_VALUE).add(BigInteger.ONE), - () -> new BigDecimal("20012312345621343256123456254.20012312345621343256123456254"))); - return value.get(); - } - - private void assertFieldHitEquals(Object expected, Object actual) { - if (expected instanceof BigDecimal) { - // parsing will, by default, build a Double even if the initial value is BigDecimal - // Elasticsearch does this the same when returning the results - assertEquals(((BigDecimal) expected).doubleValue(), actual); - } else { - assertEquals(expected, actual); - } - } - - private Object randomPoint(double lat, double lon) { - Supplier value = randomFrom(Arrays.asList( - () -> lat + "," + lon, - () -> Arrays.asList(lon, lat), - () -> { - Map map1 = new HashMap<>(); - map1.put("lat", lat); - map1.put("lon", lon); - return map1; - } - )); + () -> null + ) + ); return value.get(); } } diff --git a/x-pack/plugin/src/test/resources/rest-api-spec/test/sql/translate.yml b/x-pack/plugin/src/test/resources/rest-api-spec/test/sql/translate.yml index 3e61e2ed0e9eb..6439b6f1be92b 100644 --- a/x-pack/plugin/src/test/resources/rest-api-spec/test/sql/translate.yml +++ b/x-pack/plugin/src/test/resources/rest-api-spec/test/sql/translate.yml @@ -17,11 +17,8 @@ - match: $body: size: 1000 - _source: - includes: - - int - - str - excludes: [] + _source: false + fields: [ {"field" : "int" }, {"field" : "str" } ] sort: - int: order: asc From 71d43b598d7d65fe9c9a810f20e7c76fbaec45f2 Mon Sep 17 00:00:00 2001 From: Przemyslaw Gomulka Date: Wed, 10 Feb 2021 10:22:34 +0100 Subject: [PATCH 11/24] Refactor usage of compatible version (#68648) Compatible API version is at the moment represented by both Version and byte - representing a major version. This can lead to a confusion which representation to use, as well as to incorrect assumptions that minor versions are supported (with the use of Version.V_7_0_0) Current usage of XContentParser.useCompatible is also not allowing to define two compatible implementations. This is not about support N-2 compatibility, but to allow to continue development when a major release is performed. This commit is introducing the CompatibleVersion object responsible for wrapping around a major version of compatible API. relates #68100 --- .../RestApiCompatibleVersion.java | 44 +++++++++ .../xcontent/NamedXContentRegistry.java | 4 +- .../common/xcontent/XContent.java | 7 +- .../common/xcontent/XContentBuilder.java | 22 +++-- .../common/xcontent/XContentParser.java | 3 +- .../common/xcontent/XContentSubParser.java | 5 +- .../common/xcontent/cbor/CborXContent.java | 6 +- .../xcontent/cbor/CborXContentParser.java | 6 +- .../common/xcontent/json/JsonXContent.java | 6 +- .../xcontent/json/JsonXContentParser.java | 8 +- .../common/xcontent/smile/SmileXContent.java | 6 +- .../xcontent/smile/SmileXContentParser.java | 6 +- .../support/AbstractXContentParser.java | 14 +-- .../common/xcontent/yaml/YamlXContent.java | 6 +- .../xcontent/yaml/YamlXContentParser.java | 6 +- .../main/java/org/elasticsearch/Version.java | 11 +-- .../java/org/elasticsearch/node/Node.java | 9 +- .../elasticsearch/rest/MethodHandlers.java | 10 +- .../rest/RestCompatibleVersionHelper.java | 34 +++---- .../elasticsearch/rest/RestController.java | 33 +++---- .../org/elasticsearch/rest/RestHandler.java | 6 +- .../org/elasticsearch/rest/RestRequest.java | 18 ++-- .../CompatibleNamedXContentRegistryTests.java | 3 +- .../org/elasticsearch/node/NodeTests.java | 95 +++++++++++++++++-- .../rest/MethodHandlersTests.java | 20 ++-- .../RestCompatibleVersionHelperTests.java | 14 +-- .../rest/RestControllerTests.java | 54 ++++------- .../xcontent/WatcherXContentParser.java | 5 +- .../security/rest/SecurityRestFilter.java | 8 +- 29 files changed, 302 insertions(+), 167 deletions(-) create mode 100644 libs/core/src/main/java/org/elasticsearch/common/compatibility/RestApiCompatibleVersion.java diff --git a/libs/core/src/main/java/org/elasticsearch/common/compatibility/RestApiCompatibleVersion.java b/libs/core/src/main/java/org/elasticsearch/common/compatibility/RestApiCompatibleVersion.java new file mode 100644 index 0000000000000..f78661aef1079 --- /dev/null +++ b/libs/core/src/main/java/org/elasticsearch/common/compatibility/RestApiCompatibleVersion.java @@ -0,0 +1,44 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.common.compatibility; + +/** + * A enum representing versions which are used by a REST Compatible API. + * A CURRENT instance, represents a major Version.CURRENT from server module. + * + * Only major versions are supported. + */ +public enum RestApiCompatibleVersion { + + V_8(8), + V_7(7); + + public byte major; + private static RestApiCompatibleVersion CURRENT = V_8; + + RestApiCompatibleVersion(int major) { + this.major = (byte) major; + } + + public RestApiCompatibleVersion previousMajor() { + return fromMajorVersion(major - 1); + } + + public static RestApiCompatibleVersion fromMajorVersion(int majorVersion) { + return valueOf("V_" + majorVersion); + } + + public static RestApiCompatibleVersion minimumSupported() { + return currentVersion().previousMajor(); + } + + public static RestApiCompatibleVersion currentVersion() { + return CURRENT; + }; +} diff --git a/libs/x-content/src/main/java/org/elasticsearch/common/xcontent/NamedXContentRegistry.java b/libs/x-content/src/main/java/org/elasticsearch/common/xcontent/NamedXContentRegistry.java index 0d7a454ac1bab..6a7964883f05f 100644 --- a/libs/x-content/src/main/java/org/elasticsearch/common/xcontent/NamedXContentRegistry.java +++ b/libs/x-content/src/main/java/org/elasticsearch/common/xcontent/NamedXContentRegistry.java @@ -10,6 +10,7 @@ import org.elasticsearch.common.CheckedFunction; import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.compatibility.RestApiCompatibleVersion; import java.io.IOException; import java.util.ArrayList; @@ -138,7 +139,8 @@ private Map, Map> getRegistry(List entries){ */ public T parseNamedObject(Class categoryClass, String name, XContentParser parser, C context) throws IOException { - Map parsers = parser.useCompatibility() ? compatibleRegistry.get(categoryClass) : registry.get(categoryClass); + Map parsers = parser.getRestApiCompatibleVersion() == RestApiCompatibleVersion.minimumSupported() ? + compatibleRegistry.get(categoryClass) : registry.get(categoryClass); if (parsers == null) { if (registry.isEmpty()) { // The "empty" registry will never work so we throw a better exception as a hint. diff --git a/libs/x-content/src/main/java/org/elasticsearch/common/xcontent/XContent.java b/libs/x-content/src/main/java/org/elasticsearch/common/xcontent/XContent.java index 49dbb5a0a99dc..839a9b13a84b1 100644 --- a/libs/x-content/src/main/java/org/elasticsearch/common/xcontent/XContent.java +++ b/libs/x-content/src/main/java/org/elasticsearch/common/xcontent/XContent.java @@ -8,6 +8,8 @@ package org.elasticsearch.common.xcontent; +import org.elasticsearch.common.compatibility.RestApiCompatibleVersion; + import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; @@ -76,9 +78,10 @@ XContentParser createParser(NamedXContentRegistry xContentRegistry, DeprecationH /** * Creates a parser over the provided input stream and with the indication that a request is using REST compatible API. - * Parses XContent using the N-1 compatible logic. + * Depending on restApiCompatibleVersionParses + * @param restApiCompatibleVersion - indicates if the N-1 or N compatible XContent parsing logic will be used. */ XContentParser createParserForCompatibility(NamedXContentRegistry xContentRegistry, DeprecationHandler deprecationHandler, - InputStream is) throws IOException; + InputStream is, RestApiCompatibleVersion restApiCompatibleVersion) throws IOException; } diff --git a/libs/x-content/src/main/java/org/elasticsearch/common/xcontent/XContentBuilder.java b/libs/x-content/src/main/java/org/elasticsearch/common/xcontent/XContentBuilder.java index f3ddff5b58746..708f42df752df 100644 --- a/libs/x-content/src/main/java/org/elasticsearch/common/xcontent/XContentBuilder.java +++ b/libs/x-content/src/main/java/org/elasticsearch/common/xcontent/XContentBuilder.java @@ -8,6 +8,8 @@ package org.elasticsearch.common.xcontent; +import org.elasticsearch.common.compatibility.RestApiCompatibleVersion; + import java.io.ByteArrayOutputStream; import java.io.Closeable; import java.io.Flushable; @@ -155,7 +157,7 @@ public interface HumanReadableTransformer { */ private boolean humanReadable = false; - private byte compatibleMajorVersion; + private RestApiCompatibleVersion restApiCompatibilityVersion; private ParsedMediaType responseContentType; @@ -1006,21 +1008,21 @@ public XContentBuilder copyCurrentStructure(XContentParser parser) throws IOExce /** * Sets a version used for serialising a response compatible with a previous version. + * @param restApiCompatibleVersion - indicates requested a version of API that the builder will be creating */ - public XContentBuilder withCompatibleMajorVersion(byte compatibleMajorVersion) { - assert this.compatibleMajorVersion == 0 : "Compatible version has already been set"; - if (compatibleMajorVersion == 0) { - throw new IllegalArgumentException("Compatible major version must not be equal to 0"); - } - this.compatibleMajorVersion = compatibleMajorVersion; + public XContentBuilder withCompatibleVersion(RestApiCompatibleVersion restApiCompatibleVersion) { + assert this.restApiCompatibilityVersion == null : "restApiCompatibleVersion has already been set"; + Objects.requireNonNull(restApiCompatibleVersion, "restApiCompatibleVersion cannot be null"); + this.restApiCompatibilityVersion = restApiCompatibleVersion; return this; } /** - * Returns a version used for serialising a response compatible with a previous version. + * Returns a version used for serialising a response. + * @return a compatible version */ - public byte getCompatibleMajorVersion() { - return compatibleMajorVersion; + public RestApiCompatibleVersion getRestApiCompatibilityVersion() { + return restApiCompatibilityVersion; } @Override diff --git a/libs/x-content/src/main/java/org/elasticsearch/common/xcontent/XContentParser.java b/libs/x-content/src/main/java/org/elasticsearch/common/xcontent/XContentParser.java index d653f8becfe3f..b7dea17ee5d70 100644 --- a/libs/x-content/src/main/java/org/elasticsearch/common/xcontent/XContentParser.java +++ b/libs/x-content/src/main/java/org/elasticsearch/common/xcontent/XContentParser.java @@ -9,6 +9,7 @@ package org.elasticsearch.common.xcontent; import org.elasticsearch.common.CheckedFunction; +import org.elasticsearch.common.compatibility.RestApiCompatibleVersion; import java.io.Closeable; import java.io.IOException; @@ -251,7 +252,7 @@ Map map( boolean isClosed(); - boolean useCompatibility(); + RestApiCompatibleVersion getRestApiCompatibleVersion(); /** * The callback to notify when parsing encounters a deprecated field. diff --git a/libs/x-content/src/main/java/org/elasticsearch/common/xcontent/XContentSubParser.java b/libs/x-content/src/main/java/org/elasticsearch/common/xcontent/XContentSubParser.java index c9c214470426c..b318bc5ad343d 100644 --- a/libs/x-content/src/main/java/org/elasticsearch/common/xcontent/XContentSubParser.java +++ b/libs/x-content/src/main/java/org/elasticsearch/common/xcontent/XContentSubParser.java @@ -9,6 +9,7 @@ package org.elasticsearch.common.xcontent; import org.elasticsearch.common.CheckedFunction; +import org.elasticsearch.common.compatibility.RestApiCompatibleVersion; import java.io.IOException; import java.nio.CharBuffer; @@ -259,8 +260,8 @@ public boolean isClosed() { } @Override - public boolean useCompatibility() { - return parser.useCompatibility(); + public RestApiCompatibleVersion getRestApiCompatibleVersion() { + return parser.getRestApiCompatibleVersion(); } @Override diff --git a/libs/x-content/src/main/java/org/elasticsearch/common/xcontent/cbor/CborXContent.java b/libs/x-content/src/main/java/org/elasticsearch/common/xcontent/cbor/CborXContent.java index 864e3089f2279..bad16abe7d216 100644 --- a/libs/x-content/src/main/java/org/elasticsearch/common/xcontent/cbor/CborXContent.java +++ b/libs/x-content/src/main/java/org/elasticsearch/common/xcontent/cbor/CborXContent.java @@ -12,6 +12,7 @@ import com.fasterxml.jackson.core.JsonGenerator; import com.fasterxml.jackson.core.JsonParser; import com.fasterxml.jackson.dataformat.cbor.CBORFactory; +import org.elasticsearch.common.compatibility.RestApiCompatibleVersion; import org.elasticsearch.common.xcontent.DeprecationHandler; import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.common.xcontent.XContent; @@ -98,9 +99,10 @@ public XContentParser createParser(NamedXContentRegistry xContentRegistry, @Override public XContentParser createParserForCompatibility(NamedXContentRegistry xContentRegistry, - DeprecationHandler deprecationHandler, InputStream is) + DeprecationHandler deprecationHandler, InputStream is, + RestApiCompatibleVersion restApiCompatibleVersion) throws IOException { - return new CborXContentParser(xContentRegistry, deprecationHandler, cborFactory.createParser(is), true); + return new CborXContentParser(xContentRegistry, deprecationHandler, cborFactory.createParser(is), restApiCompatibleVersion); } } diff --git a/libs/x-content/src/main/java/org/elasticsearch/common/xcontent/cbor/CborXContentParser.java b/libs/x-content/src/main/java/org/elasticsearch/common/xcontent/cbor/CborXContentParser.java index 67e848cc5c368..a12b50efc7aac 100644 --- a/libs/x-content/src/main/java/org/elasticsearch/common/xcontent/cbor/CborXContentParser.java +++ b/libs/x-content/src/main/java/org/elasticsearch/common/xcontent/cbor/CborXContentParser.java @@ -9,6 +9,7 @@ package org.elasticsearch.common.xcontent.cbor; import com.fasterxml.jackson.core.JsonParser; +import org.elasticsearch.common.compatibility.RestApiCompatibleVersion; import org.elasticsearch.common.xcontent.DeprecationHandler; import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.common.xcontent.XContentType; @@ -22,8 +23,9 @@ public CborXContentParser(NamedXContentRegistry xContentRegistry, } public CborXContentParser(NamedXContentRegistry xContentRegistry, - DeprecationHandler deprecationHandler, JsonParser parser, boolean useCompatibility) { - super(xContentRegistry, deprecationHandler, parser, useCompatibility); + DeprecationHandler deprecationHandler, JsonParser parser, + RestApiCompatibleVersion restApiCompatibleVersion) { + super(xContentRegistry, deprecationHandler, parser, restApiCompatibleVersion); } @Override diff --git a/libs/x-content/src/main/java/org/elasticsearch/common/xcontent/json/JsonXContent.java b/libs/x-content/src/main/java/org/elasticsearch/common/xcontent/json/JsonXContent.java index c237169b7abe5..21fb8e9c3e4e5 100644 --- a/libs/x-content/src/main/java/org/elasticsearch/common/xcontent/json/JsonXContent.java +++ b/libs/x-content/src/main/java/org/elasticsearch/common/xcontent/json/JsonXContent.java @@ -12,6 +12,7 @@ import com.fasterxml.jackson.core.JsonFactory; import com.fasterxml.jackson.core.JsonGenerator; import com.fasterxml.jackson.core.JsonParser; +import org.elasticsearch.common.compatibility.RestApiCompatibleVersion; import org.elasticsearch.common.xcontent.DeprecationHandler; import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.common.xcontent.XContent; @@ -99,8 +100,9 @@ public XContentParser createParser(NamedXContentRegistry xContentRegistry, @Override public XContentParser createParserForCompatibility(NamedXContentRegistry xContentRegistry, - DeprecationHandler deprecationHandler, InputStream is) throws IOException { - return new JsonXContentParser(xContentRegistry, deprecationHandler, jsonFactory.createParser(is), true); + DeprecationHandler deprecationHandler, InputStream is, + RestApiCompatibleVersion restApiCompatibleVersion) throws IOException { + return new JsonXContentParser(xContentRegistry, deprecationHandler, jsonFactory.createParser(is), restApiCompatibleVersion); } } diff --git a/libs/x-content/src/main/java/org/elasticsearch/common/xcontent/json/JsonXContentParser.java b/libs/x-content/src/main/java/org/elasticsearch/common/xcontent/json/JsonXContentParser.java index 801c6058f3da4..20038b1dc3c1c 100644 --- a/libs/x-content/src/main/java/org/elasticsearch/common/xcontent/json/JsonXContentParser.java +++ b/libs/x-content/src/main/java/org/elasticsearch/common/xcontent/json/JsonXContentParser.java @@ -11,6 +11,7 @@ import com.fasterxml.jackson.core.JsonLocation; import com.fasterxml.jackson.core.JsonParser; import com.fasterxml.jackson.core.JsonToken; +import org.elasticsearch.common.compatibility.RestApiCompatibleVersion; import org.elasticsearch.common.xcontent.DeprecationHandler; import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.common.xcontent.XContentLocation; @@ -27,13 +28,14 @@ public class JsonXContentParser extends AbstractXContentParser { public JsonXContentParser(NamedXContentRegistry xContentRegistry, DeprecationHandler deprecationHandler, JsonParser parser) { - super(xContentRegistry, deprecationHandler, false); + super(xContentRegistry, deprecationHandler, RestApiCompatibleVersion.currentVersion()); this.parser = parser; } public JsonXContentParser(NamedXContentRegistry xContentRegistry, - DeprecationHandler deprecationHandler, JsonParser parser, boolean useCompatibility) { - super(xContentRegistry, deprecationHandler, useCompatibility); + DeprecationHandler deprecationHandler, JsonParser parser, + RestApiCompatibleVersion restApiCompatibleVersion) { + super(xContentRegistry, deprecationHandler, restApiCompatibleVersion); this.parser = parser; } diff --git a/libs/x-content/src/main/java/org/elasticsearch/common/xcontent/smile/SmileXContent.java b/libs/x-content/src/main/java/org/elasticsearch/common/xcontent/smile/SmileXContent.java index 0b240b49a79cf..c077ba41a4eb8 100644 --- a/libs/x-content/src/main/java/org/elasticsearch/common/xcontent/smile/SmileXContent.java +++ b/libs/x-content/src/main/java/org/elasticsearch/common/xcontent/smile/SmileXContent.java @@ -13,6 +13,7 @@ import com.fasterxml.jackson.core.JsonParser; import com.fasterxml.jackson.dataformat.smile.SmileFactory; import com.fasterxml.jackson.dataformat.smile.SmileGenerator; +import org.elasticsearch.common.compatibility.RestApiCompatibleVersion; import org.elasticsearch.common.xcontent.DeprecationHandler; import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.common.xcontent.XContent; @@ -100,7 +101,8 @@ public XContentParser createParser(NamedXContentRegistry xContentRegistry, @Override public XContentParser createParserForCompatibility(NamedXContentRegistry xContentRegistry, - DeprecationHandler deprecationHandler, InputStream is) throws IOException { - return new SmileXContentParser(xContentRegistry, deprecationHandler, smileFactory.createParser(is), true); + DeprecationHandler deprecationHandler, InputStream is, + RestApiCompatibleVersion restApiCompatibleVersion) throws IOException { + return new SmileXContentParser(xContentRegistry, deprecationHandler, smileFactory.createParser(is), restApiCompatibleVersion); } } diff --git a/libs/x-content/src/main/java/org/elasticsearch/common/xcontent/smile/SmileXContentParser.java b/libs/x-content/src/main/java/org/elasticsearch/common/xcontent/smile/SmileXContentParser.java index 68c424b73f890..12f901fb1c3c0 100644 --- a/libs/x-content/src/main/java/org/elasticsearch/common/xcontent/smile/SmileXContentParser.java +++ b/libs/x-content/src/main/java/org/elasticsearch/common/xcontent/smile/SmileXContentParser.java @@ -9,6 +9,7 @@ package org.elasticsearch.common.xcontent.smile; import com.fasterxml.jackson.core.JsonParser; +import org.elasticsearch.common.compatibility.RestApiCompatibleVersion; import org.elasticsearch.common.xcontent.DeprecationHandler; import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.common.xcontent.XContentType; @@ -22,8 +23,9 @@ public SmileXContentParser(NamedXContentRegistry xContentRegistry, } public SmileXContentParser(NamedXContentRegistry xContentRegistry, - DeprecationHandler deprecationHandler, JsonParser parser, boolean useCompatibility) { - super(xContentRegistry, deprecationHandler, parser, useCompatibility); + DeprecationHandler deprecationHandler, JsonParser parser, + RestApiCompatibleVersion restApiCompatibleVersion) { + super(xContentRegistry, deprecationHandler, parser, restApiCompatibleVersion); } @Override diff --git a/libs/x-content/src/main/java/org/elasticsearch/common/xcontent/support/AbstractXContentParser.java b/libs/x-content/src/main/java/org/elasticsearch/common/xcontent/support/AbstractXContentParser.java index 43a150f72db55..c2ae840dcc12e 100644 --- a/libs/x-content/src/main/java/org/elasticsearch/common/xcontent/support/AbstractXContentParser.java +++ b/libs/x-content/src/main/java/org/elasticsearch/common/xcontent/support/AbstractXContentParser.java @@ -10,6 +10,7 @@ import org.elasticsearch.common.Booleans; import org.elasticsearch.common.CheckedFunction; +import org.elasticsearch.common.compatibility.RestApiCompatibleVersion; import org.elasticsearch.common.xcontent.DeprecationHandler; import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.common.xcontent.XContentParseException; @@ -46,16 +47,17 @@ private static void checkCoerceString(boolean coerce, Class cl private final NamedXContentRegistry xContentRegistry; private final DeprecationHandler deprecationHandler; - private final boolean useCompatibility; + private final RestApiCompatibleVersion restApiCompatibleVersion; - public AbstractXContentParser(NamedXContentRegistry xContentRegistry, DeprecationHandler deprecationHandler, boolean useCompatibility) { + public AbstractXContentParser(NamedXContentRegistry xContentRegistry, DeprecationHandler deprecationHandler, + RestApiCompatibleVersion restApiCompatibleVersion) { this.xContentRegistry = xContentRegistry; this.deprecationHandler = deprecationHandler; - this.useCompatibility = useCompatibility; + this.restApiCompatibleVersion = restApiCompatibleVersion; } public AbstractXContentParser(NamedXContentRegistry xContentRegistry, DeprecationHandler deprecationHandler) { - this(xContentRegistry, deprecationHandler, false); + this(xContentRegistry, deprecationHandler, RestApiCompatibleVersion.currentVersion()); } // The 3rd party parsers we rely on are known to silently truncate fractions: see @@ -413,8 +415,8 @@ public NamedXContentRegistry getXContentRegistry() { public abstract boolean isClosed(); @Override - public boolean useCompatibility() { - return useCompatibility; + public RestApiCompatibleVersion getRestApiCompatibleVersion() { + return restApiCompatibleVersion; } @Override diff --git a/libs/x-content/src/main/java/org/elasticsearch/common/xcontent/yaml/YamlXContent.java b/libs/x-content/src/main/java/org/elasticsearch/common/xcontent/yaml/YamlXContent.java index f271851698aa2..8ce0c4144118e 100644 --- a/libs/x-content/src/main/java/org/elasticsearch/common/xcontent/yaml/YamlXContent.java +++ b/libs/x-content/src/main/java/org/elasticsearch/common/xcontent/yaml/YamlXContent.java @@ -11,6 +11,7 @@ import com.fasterxml.jackson.core.JsonEncoding; import com.fasterxml.jackson.core.JsonParser; import com.fasterxml.jackson.dataformat.yaml.YAMLFactory; +import org.elasticsearch.common.compatibility.RestApiCompatibleVersion; import org.elasticsearch.common.xcontent.DeprecationHandler; import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.common.xcontent.XContent; @@ -93,8 +94,9 @@ public XContentParser createParser(NamedXContentRegistry xContentRegistry, @Override public XContentParser createParserForCompatibility(NamedXContentRegistry xContentRegistry, - DeprecationHandler deprecationHandler, InputStream is) throws IOException { - return new YamlXContentParser(xContentRegistry, deprecationHandler, yamlFactory.createParser(is), true); + DeprecationHandler deprecationHandler, InputStream is, + RestApiCompatibleVersion restApiCompatibleVersion) throws IOException { + return new YamlXContentParser(xContentRegistry, deprecationHandler, yamlFactory.createParser(is), restApiCompatibleVersion); } } diff --git a/libs/x-content/src/main/java/org/elasticsearch/common/xcontent/yaml/YamlXContentParser.java b/libs/x-content/src/main/java/org/elasticsearch/common/xcontent/yaml/YamlXContentParser.java index 02f9b3b3f5907..35d85dca59fbb 100644 --- a/libs/x-content/src/main/java/org/elasticsearch/common/xcontent/yaml/YamlXContentParser.java +++ b/libs/x-content/src/main/java/org/elasticsearch/common/xcontent/yaml/YamlXContentParser.java @@ -9,6 +9,7 @@ package org.elasticsearch.common.xcontent.yaml; import com.fasterxml.jackson.core.JsonParser; +import org.elasticsearch.common.compatibility.RestApiCompatibleVersion; import org.elasticsearch.common.xcontent.DeprecationHandler; import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.common.xcontent.XContentType; @@ -22,8 +23,9 @@ public YamlXContentParser(NamedXContentRegistry xContentRegistry, } public YamlXContentParser(NamedXContentRegistry xContentRegistry, - DeprecationHandler deprecationHandler, JsonParser parser, boolean useCompatibility) { - super(xContentRegistry, deprecationHandler, parser, useCompatibility); + DeprecationHandler deprecationHandler, JsonParser parser, + RestApiCompatibleVersion restApiCompatibleVersion) { + super(xContentRegistry, deprecationHandler, parser, restApiCompatibleVersion); } @Override diff --git a/server/src/main/java/org/elasticsearch/Version.java b/server/src/main/java/org/elasticsearch/Version.java index 792fa066ac161..95ac300f66925 100644 --- a/server/src/main/java/org/elasticsearch/Version.java +++ b/server/src/main/java/org/elasticsearch/Version.java @@ -12,6 +12,7 @@ import org.elasticsearch.common.SuppressForbidden; import org.elasticsearch.common.collect.ImmutableOpenIntMap; import org.elasticsearch.common.collect.ImmutableOpenMap; +import org.elasticsearch.common.compatibility.RestApiCompatibleVersion; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.ToXContentFragment; @@ -117,7 +118,9 @@ public class Version implements Comparable, ToXContentFragment { } assert CURRENT.luceneVersion.equals(org.apache.lucene.util.Version.LATEST) : "Version must be upgraded to [" + org.apache.lucene.util.Version.LATEST + "] is still set to [" + CURRENT.luceneVersion + "]"; - + assert RestApiCompatibleVersion.currentVersion().major == CURRENT.major : "RestApiCompatibleVersion must be upgraded " + + "to reflect major from Version.CURRENT [" + CURRENT.major + "]" + + " but is still set to [" + RestApiCompatibleVersion.currentVersion().major + "]"; builder.put(V_EMPTY_ID, V_EMPTY); builderByString.put(V_EMPTY.toString(), V_EMPTY); idToVersion = builder.build(); @@ -364,12 +367,6 @@ public boolean isCompatible(Version version) { return compatible; } - /** - * Returns the minimum version that can be used for compatible REST API - */ - public Version minimumRestCompatibilityVersion() { - return this.previousMajor(); - } /** * Returns a first major version previous to the version stored in this object. diff --git a/server/src/main/java/org/elasticsearch/node/Node.java b/server/src/main/java/org/elasticsearch/node/Node.java index db2b89104cba3..f0ecdf761a8d6 100644 --- a/server/src/main/java/org/elasticsearch/node/Node.java +++ b/server/src/main/java/org/elasticsearch/node/Node.java @@ -454,8 +454,7 @@ protected Node(final Environment initialEnvironment, .flatMap(p -> p.getNamedXContent().stream()), ClusterModule.getNamedXWriteables().stream()) .flatMap(Function.identity()).collect(toList()), - pluginsService.filterPlugins(Plugin.class).stream() - .flatMap(p -> p.getNamedXContentForCompatibility().stream()).collect(toList()) + getCompatibleNamedXContents() ); final MetaStateService metaStateService = new MetaStateService(nodeEnvironment, xContentRegistry); final PersistedClusterStateService lucenePersistedStateFactory @@ -730,6 +729,12 @@ protected Node(final Environment initialEnvironment, } } + // package scope for testing + List getCompatibleNamedXContents() { + return pluginsService.filterPlugins(Plugin.class).stream() + .flatMap(p -> p.getNamedXContentForCompatibility().stream()).collect(toList()); + } + protected TransportService newTransportService(Settings settings, Transport transport, ThreadPool threadPool, TransportInterceptor interceptor, Function localNodeFactory, diff --git a/server/src/main/java/org/elasticsearch/rest/MethodHandlers.java b/server/src/main/java/org/elasticsearch/rest/MethodHandlers.java index e893c5fa639c6..aa18c8ce43c1e 100644 --- a/server/src/main/java/org/elasticsearch/rest/MethodHandlers.java +++ b/server/src/main/java/org/elasticsearch/rest/MethodHandlers.java @@ -8,7 +8,7 @@ package org.elasticsearch.rest; -import org.elasticsearch.Version; +import org.elasticsearch.common.compatibility.RestApiCompatibleVersion; import java.util.HashMap; import java.util.Map; @@ -20,7 +20,7 @@ final class MethodHandlers { private final String path; - private final Map> methodHandlers; + private final Map> methodHandlers; MethodHandlers(String path, RestHandler handler, RestRequest.Method... methods) { this.path = path; @@ -54,13 +54,13 @@ MethodHandlers addMethods(RestHandler handler, RestRequest.Method... methods) { * (as opposed to non-compatible/breaking) * or {@code null} if none exists. */ - RestHandler getHandler(RestRequest.Method method, Version version) { - Map versionToHandlers = methodHandlers.get(method); + RestHandler getHandler(RestRequest.Method method, RestApiCompatibleVersion version) { + Map versionToHandlers = methodHandlers.get(method); if (versionToHandlers == null) { return null; //method not found } final RestHandler handler = versionToHandlers.get(version); - return handler == null ? versionToHandlers.get(Version.CURRENT) : handler; + return handler == null ? versionToHandlers.get(RestApiCompatibleVersion.currentVersion()) : handler; } diff --git a/server/src/main/java/org/elasticsearch/rest/RestCompatibleVersionHelper.java b/server/src/main/java/org/elasticsearch/rest/RestCompatibleVersionHelper.java index fe53ae212939f..24f88cc6ff00d 100644 --- a/server/src/main/java/org/elasticsearch/rest/RestCompatibleVersionHelper.java +++ b/server/src/main/java/org/elasticsearch/rest/RestCompatibleVersionHelper.java @@ -15,8 +15,8 @@ package org.elasticsearch.rest; import org.elasticsearch.ElasticsearchStatusException; -import org.elasticsearch.Version; import org.elasticsearch.common.Nullable; +import org.elasticsearch.common.compatibility.RestApiCompatibleVersion; import org.elasticsearch.common.xcontent.MediaType; import org.elasticsearch.common.xcontent.ParsedMediaType; @@ -27,23 +27,25 @@ */ class RestCompatibleVersionHelper { - static Version getCompatibleVersion( + static RestApiCompatibleVersion getCompatibleVersion( @Nullable ParsedMediaType acceptHeader, @Nullable ParsedMediaType contentTypeHeader, boolean hasContent ) { Byte aVersion = parseVersion(acceptHeader); - byte acceptVersion = aVersion == null ? Version.CURRENT.major : Integer.valueOf(aVersion).byteValue(); + byte acceptVersion = aVersion == null ? RestApiCompatibleVersion.currentVersion().major : Integer.valueOf(aVersion).byteValue(); Byte cVersion = parseVersion(contentTypeHeader); - byte contentTypeVersion = cVersion == null ? Version.CURRENT.major : Integer.valueOf(cVersion).byteValue(); + byte contentTypeVersion = cVersion == null ? + RestApiCompatibleVersion.currentVersion().major : Integer.valueOf(cVersion).byteValue(); // accept version must be current or prior - if (acceptVersion > Version.CURRENT.major || acceptVersion < Version.CURRENT.minimumRestCompatibilityVersion().major) { + if (acceptVersion > RestApiCompatibleVersion.currentVersion().major || + acceptVersion < RestApiCompatibleVersion.minimumSupported().major) { throw new ElasticsearchStatusException( "Accept version must be either version {} or {}, but found {}. Accept={}", RestStatus.BAD_REQUEST, - Version.CURRENT.major, - Version.CURRENT.minimumRestCompatibilityVersion().major, + RestApiCompatibleVersion.currentVersion().major, + RestApiCompatibleVersion.minimumSupported().major, acceptVersion, acceptHeader ); @@ -51,13 +53,13 @@ static Version getCompatibleVersion( if (hasContent) { // content-type version must be current or prior - if (contentTypeVersion > Version.CURRENT.major - || contentTypeVersion < Version.CURRENT.minimumRestCompatibilityVersion().major) { + if (contentTypeVersion > RestApiCompatibleVersion.currentVersion().major + || contentTypeVersion < RestApiCompatibleVersion.minimumSupported().major) { throw new ElasticsearchStatusException( "Content-Type version must be either version {} or {}, but found {}. Content-Type={}", RestStatus.BAD_REQUEST, - Version.CURRENT.major, - Version.CURRENT.minimumRestCompatibilityVersion().major, + RestApiCompatibleVersion.currentVersion().major, + RestApiCompatibleVersion.minimumSupported().major, contentTypeVersion, contentTypeHeader ); @@ -83,16 +85,16 @@ static Version getCompatibleVersion( contentTypeHeader ); } - if (contentTypeVersion < Version.CURRENT.major) { - return Version.CURRENT.previousMajor(); + if (contentTypeVersion < RestApiCompatibleVersion.currentVersion().major) { + return RestApiCompatibleVersion.minimumSupported(); } } - if (acceptVersion < Version.CURRENT.major) { - return Version.CURRENT.previousMajor(); + if (acceptVersion < RestApiCompatibleVersion.currentVersion().major) { + return RestApiCompatibleVersion.minimumSupported(); } - return Version.CURRENT; + return RestApiCompatibleVersion.currentVersion(); } static Byte parseVersion(ParsedMediaType parsedMediaType) { diff --git a/server/src/main/java/org/elasticsearch/rest/RestController.java b/server/src/main/java/org/elasticsearch/rest/RestController.java index b10ab89f6ada0..fa2d533d99f06 100644 --- a/server/src/main/java/org/elasticsearch/rest/RestController.java +++ b/server/src/main/java/org/elasticsearch/rest/RestController.java @@ -12,13 +12,13 @@ import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.message.ParameterizedMessage; import org.elasticsearch.ElasticsearchException; -import org.elasticsearch.Version; import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.Strings; import org.elasticsearch.common.breaker.CircuitBreaker; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.compatibility.RestApiCompatibleVersion; import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.path.PathTrie; @@ -158,9 +158,9 @@ protected void registerHandler(RestRequest.Method method, String path, RestHandl } private void registerHandlerNoWrap(RestRequest.Method method, String path, RestHandler maybeWrappedHandler) { - final Version version = maybeWrappedHandler.compatibleWithVersion(); - assert Version.CURRENT.minimumRestCompatibilityVersion() == version || Version.CURRENT == version - : "REST API compatibility is only supported for version " + Version.CURRENT.minimumRestCompatibilityVersion().major; + final RestApiCompatibleVersion version = maybeWrappedHandler.compatibleWithVersion(); + assert RestApiCompatibleVersion.minimumSupported() == version || RestApiCompatibleVersion.currentVersion() == version + : "REST API compatibility is only supported for version " + RestApiCompatibleVersion.minimumSupported().major; handlers.insertOrUpdate(path, new MethodHandlers(path, maybeWrappedHandler, method), (mHandlers, newMHandler) -> mHandlers.addMethods(maybeWrappedHandler, method)); @@ -214,7 +214,8 @@ public void dispatchBadRequest(final RestChannel channel, final ThreadContext th } } - private void dispatchRequest(RestRequest request, RestChannel channel, RestHandler handler, Version compatibleVersion) + private void dispatchRequest(RestRequest request, RestChannel channel, RestHandler handler, + RestApiCompatibleVersion restApiCompatibleVersion) throws Exception { final int contentLength = request.contentLength(); if (contentLength > 0) { @@ -239,7 +240,7 @@ private void dispatchRequest(RestRequest request, RestChannel channel, RestHandl inFlightRequestsBreaker(circuitBreakerService).addWithoutBreaking(contentLength); } // iff we could reserve bytes for the request we need to send the response also over this channel - responseChannel = new ResourceHandlingHttpChannel(channel, circuitBreakerService, contentLength, compatibleVersion); + responseChannel = new ResourceHandlingHttpChannel(channel, circuitBreakerService, contentLength, restApiCompatibleVersion); // TODO: Count requests double in the circuit breaker if they need copying? if (handler.allowsUnsafeBuffers() == false) { request.ensureSafeBuffers(); @@ -316,7 +317,7 @@ private void tryAllHandlers(final RestRequest request, final RestChannel channel final String uri = request.uri(); final RestRequest.Method requestMethod; - Version compatibleVersion = request.getCompatibleVersion(); + RestApiCompatibleVersion restApiCompatibleVersion = request.getRestApiCompatibleVersion(); try { // Resolves the HTTP method and fails if the method is invalid requestMethod = request.method(); @@ -328,14 +329,14 @@ private void tryAllHandlers(final RestRequest request, final RestChannel channel if (handlers == null) { handler = null; } else { - handler = handlers.getHandler(requestMethod, compatibleVersion); + handler = handlers.getHandler(requestMethod, restApiCompatibleVersion); } if (handler == null) { if (handleNoHandlerFound(rawPath, requestMethod, uri, channel)) { return; } } else { - dispatchRequest(request, channel, handler, compatibleVersion); + dispatchRequest(request, channel, handler, restApiCompatibleVersion); return; } } @@ -453,40 +454,40 @@ private static final class ResourceHandlingHttpChannel implements RestChannel { private final RestChannel delegate; private final CircuitBreakerService circuitBreakerService; private final int contentLength; - private final Version compatibleVersion; + private final RestApiCompatibleVersion restApiCompatibleVersion; private final AtomicBoolean closed = new AtomicBoolean(); ResourceHandlingHttpChannel(RestChannel delegate, CircuitBreakerService circuitBreakerService, int contentLength, - Version compatibleVersion) { + RestApiCompatibleVersion restApiCompatibleVersion) { this.delegate = delegate; this.circuitBreakerService = circuitBreakerService; this.contentLength = contentLength; - this.compatibleVersion = compatibleVersion; + this.restApiCompatibleVersion = restApiCompatibleVersion; } @Override public XContentBuilder newBuilder() throws IOException { return delegate.newBuilder() - .withCompatibleMajorVersion(compatibleVersion.major); + .withCompatibleVersion(restApiCompatibleVersion); } @Override public XContentBuilder newErrorBuilder() throws IOException { return delegate.newErrorBuilder() - .withCompatibleMajorVersion(compatibleVersion.major); + .withCompatibleVersion(restApiCompatibleVersion); } @Override public XContentBuilder newBuilder(@Nullable XContentType xContentType, boolean useFiltering) throws IOException { return delegate.newBuilder(xContentType, useFiltering) - .withCompatibleMajorVersion(compatibleVersion.major); + .withCompatibleVersion(restApiCompatibleVersion); } @Override public XContentBuilder newBuilder(XContentType xContentType, XContentType responseContentType, boolean useFiltering) throws IOException { return delegate.newBuilder(xContentType, responseContentType, useFiltering) - .withCompatibleMajorVersion(compatibleVersion.major); + .withCompatibleVersion(restApiCompatibleVersion); } @Override diff --git a/server/src/main/java/org/elasticsearch/rest/RestHandler.java b/server/src/main/java/org/elasticsearch/rest/RestHandler.java index 88e271b63e919..9b1902e27a85d 100644 --- a/server/src/main/java/org/elasticsearch/rest/RestHandler.java +++ b/server/src/main/java/org/elasticsearch/rest/RestHandler.java @@ -8,8 +8,8 @@ package org.elasticsearch.rest; -import org.elasticsearch.Version; import org.elasticsearch.client.node.NodeClient; +import org.elasticsearch.common.compatibility.RestApiCompatibleVersion; import org.elasticsearch.common.xcontent.MediaType; import org.elasticsearch.common.xcontent.MediaTypeRegistry; import org.elasticsearch.common.xcontent.XContent; @@ -102,8 +102,8 @@ default MediaTypeRegistry validAcceptMediaTypes() { * If no version is specified, handler is assumed to be compatible with Version.CURRENT * @return a version */ - default Version compatibleWithVersion() { - return Version.CURRENT; + default RestApiCompatibleVersion compatibleWithVersion() { + return RestApiCompatibleVersion.currentVersion(); } class Route { diff --git a/server/src/main/java/org/elasticsearch/rest/RestRequest.java b/server/src/main/java/org/elasticsearch/rest/RestRequest.java index a1caa2d0c7535..8895fcf9ad023 100644 --- a/server/src/main/java/org/elasticsearch/rest/RestRequest.java +++ b/server/src/main/java/org/elasticsearch/rest/RestRequest.java @@ -10,7 +10,6 @@ import org.apache.lucene.util.SetOnce; import org.elasticsearch.ElasticsearchParseException; -import org.elasticsearch.Version; import org.elasticsearch.common.Booleans; import org.elasticsearch.common.CheckedConsumer; import org.elasticsearch.common.Nullable; @@ -18,6 +17,7 @@ import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.collect.Tuple; +import org.elasticsearch.common.compatibility.RestApiCompatibleVersion; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.xcontent.LoggingDeprecationHandler; @@ -62,7 +62,7 @@ public class RestRequest implements ToXContent.Params { private final HttpChannel httpChannel; private final ParsedMediaType parsedAccept; private final ParsedMediaType parsedContentType; - private final Version compatibleVersion; + private final RestApiCompatibleVersion restApiCompatibleVersion; private HttpRequest httpRequest; private boolean contentConsumed = false; @@ -100,7 +100,7 @@ private RestRequest(NamedXContentRegistry xContentRegistry, Map this.rawPath = path; this.headers = Collections.unmodifiableMap(headers); this.requestId = requestId; - this.compatibleVersion = RestCompatibleVersionHelper.getCompatibleVersion(parsedAccept, parsedContentType, hasContent()); + this.restApiCompatibleVersion = RestCompatibleVersionHelper.getCompatibleVersion(parsedAccept, parsedContentType, hasContent()); } private static @Nullable ParsedMediaType parseHeaderWithMediaType(Map> headers, String headerName) { @@ -439,11 +439,9 @@ public NamedXContentRegistry getXContentRegistry() { public final XContentParser contentParser() throws IOException { BytesReference content = requiredContent(); // will throw exception if body or content type missing XContent xContent = xContentType.get().xContent(); - if (compatibleVersion == Version.CURRENT.minimumRestCompatibilityVersion()) { - return xContent.createParserForCompatibility(xContentRegistry, LoggingDeprecationHandler.INSTANCE, content.streamInput()); - } else { - return xContent.createParser(xContentRegistry, LoggingDeprecationHandler.INSTANCE, content.streamInput()); - } + return xContent.createParserForCompatibility(xContentRegistry, LoggingDeprecationHandler.INSTANCE, content.streamInput(), + restApiCompatibleVersion); + } /** @@ -551,8 +549,8 @@ public static XContentType parseContentType(List header) { throw new IllegalArgumentException("empty Content-Type header"); } - public Version getCompatibleVersion() { - return compatibleVersion; + public RestApiCompatibleVersion getRestApiCompatibleVersion() { + return restApiCompatibleVersion; } public static class MediaTypeHeaderException extends RuntimeException { diff --git a/server/src/test/java/org/elasticsearch/common/xcontent/CompatibleNamedXContentRegistryTests.java b/server/src/test/java/org/elasticsearch/common/xcontent/CompatibleNamedXContentRegistryTests.java index 5792ddbe95c9b..25ca050a848f0 100644 --- a/server/src/test/java/org/elasticsearch/common/xcontent/CompatibleNamedXContentRegistryTests.java +++ b/server/src/test/java/org/elasticsearch/common/xcontent/CompatibleNamedXContentRegistryTests.java @@ -11,6 +11,7 @@ import org.elasticsearch.Version; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.compatibility.RestApiCompatibleVersion; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.rest.FakeRestRequest; @@ -138,7 +139,7 @@ public void testCompatibleRequest() throws IOException { b.endObject(); String mediaType = XContentType.VND_JSON.toParsedMediaType() .responseContentTypeHeader(Map.of(MediaType.COMPATIBLE_WITH_PARAMETER_NAME, - String.valueOf(Version.CURRENT.minimumRestCompatibilityVersion().major))); + String.valueOf(RestApiCompatibleVersion.minimumSupported().major))); List mediaTypeList = Collections.singletonList(mediaType); RestRequest restRequest2 = new FakeRestRequest.Builder(compatibleRegistry) diff --git a/server/src/test/java/org/elasticsearch/node/NodeTests.java b/server/src/test/java/org/elasticsearch/node/NodeTests.java index fd0865061e377..408d7ba19a0b8 100644 --- a/server/src/test/java/org/elasticsearch/node/NodeTests.java +++ b/server/src/test/java/org/elasticsearch/node/NodeTests.java @@ -12,10 +12,14 @@ import org.elasticsearch.bootstrap.BootstrapCheck; import org.elasticsearch.bootstrap.BootstrapContext; import org.elasticsearch.cluster.ClusterName; +import org.elasticsearch.common.ParseField; import org.elasticsearch.common.breaker.CircuitBreaker; +import org.elasticsearch.common.compatibility.RestApiCompatibleVersion; import org.elasticsearch.common.network.NetworkModule; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.transport.BoundTransportAddress; +import org.elasticsearch.common.xcontent.ContextParser; +import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.env.Environment; import org.elasticsearch.index.IndexService; import org.elasticsearch.index.engine.Engine.Searcher; @@ -29,6 +33,7 @@ import org.elasticsearch.test.InternalTestCluster; import org.elasticsearch.test.MockHttpTransport; import org.elasticsearch.threadpool.ThreadPool; +import org.mockito.Mockito; import java.io.IOException; import java.nio.file.Path; @@ -44,6 +49,7 @@ import static org.elasticsearch.cluster.metadata.IndexMetadata.SETTING_NUMBER_OF_SHARDS; import static org.elasticsearch.test.NodeRoles.dataNode; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; +import static org.hamcrest.Matchers.contains; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; @@ -142,10 +148,10 @@ public void testServerNameNodeAttribute() throws IOException { private static Settings.Builder baseSettings() { final Path tempDir = createTempDir(); return Settings.builder() - .put(ClusterName.CLUSTER_NAME_SETTING.getKey(), InternalTestCluster.clusterName("single-node-cluster", randomLong())) - .put(Environment.PATH_HOME_SETTING.getKey(), tempDir) - .put(NetworkModule.TRANSPORT_TYPE_KEY, getTestTransportType()) - .put(dataNode()); + .put(ClusterName.CLUSTER_NAME_SETTING.getKey(), InternalTestCluster.clusterName("single-node-cluster", randomLong())) + .put(Environment.PATH_HOME_SETTING.getKey(), tempDir) + .put(NetworkModule.TRANSPORT_TYPE_KEY, getTestTransportType()) + .put(dataNode()); } public void testCloseOnOutstandingTask() throws Exception { @@ -156,7 +162,7 @@ public void testCloseOnOutstandingTask() throws Exception { final CountDownLatch threadRunning = new CountDownLatch(1); threadpool.executor(ThreadPool.Names.SEARCH).execute(() -> { threadRunning.countDown(); - while (shouldRun.get()); + while (shouldRun.get()) ; }); threadRunning.await(); node.close(); @@ -179,7 +185,7 @@ public void testCloseRaceWithTaskExecution() throws Exception { } try { threadpool.executor(ThreadPool.Names.SEARCH).execute(() -> { - while (shouldRun.get()); + while (shouldRun.get()) ; }); } catch (RejectedExecutionException e) { assertThat(e.getMessage(), containsString("[Terminated,")); @@ -218,7 +224,7 @@ public void testAwaitCloseTimeoutsOnNonInterruptibleTask() throws Exception { final CountDownLatch threadRunning = new CountDownLatch(1); threadpool.executor(ThreadPool.Names.SEARCH).execute(() -> { threadRunning.countDown(); - while (shouldRun.get()); + while (shouldRun.get()) ; }); threadRunning.await(); node.close(); @@ -261,7 +267,7 @@ public void testCloseOnLeakedIndexReaderReference() throws Exception { node.start(); IndicesService indicesService = node.injector().getInstance(IndicesService.class); assertAcked(node.client().admin().indices().prepareCreate("test") - .setSettings(Settings.builder().put(SETTING_NUMBER_OF_SHARDS, 1).put(SETTING_NUMBER_OF_REPLICAS, 0))); + .setSettings(Settings.builder().put(SETTING_NUMBER_OF_SHARDS, 1).put(SETTING_NUMBER_OF_REPLICAS, 0))); IndexService indexService = indicesService.iterator().next(); IndexShard shard = indexService.getShard(0); Searcher searcher = shard.acquireSearcher("test"); @@ -277,7 +283,7 @@ public void testCloseOnLeakedStoreReference() throws Exception { node.start(); IndicesService indicesService = node.injector().getInstance(IndicesService.class); assertAcked(node.client().admin().indices().prepareCreate("test") - .setSettings(Settings.builder().put(SETTING_NUMBER_OF_SHARDS, 1).put(SETTING_NUMBER_OF_REPLICAS, 0))); + .setSettings(Settings.builder().put(SETTING_NUMBER_OF_SHARDS, 1).put(SETTING_NUMBER_OF_REPLICAS, 0))); IndexService indexService = indicesService.iterator().next(); IndexShard shard = indexService.getShard(0); shard.store().incRef(); @@ -300,7 +306,7 @@ public void testCreateWithCircuitBreakerPlugins() throws IOException { CircuitBreakerPlugin breakerPlugin = node.getPluginsService().filterPlugins(CircuitBreakerPlugin.class).get(0); assertTrue(breakerPlugin instanceof MockCircuitBreakerPlugin); assertSame("plugin circuit breaker instance is not the same as breaker service's instance", - ((MockCircuitBreakerPlugin)breakerPlugin).myCircuitBreaker.get(), + ((MockCircuitBreakerPlugin) breakerPlugin).myCircuitBreaker.get(), service.getBreaker("test_breaker")); } } @@ -328,4 +334,73 @@ public void setCircuitBreaker(CircuitBreaker circuitBreaker) { myCircuitBreaker.set(circuitBreaker); } } + + + interface MockRestApiCompatibleVersion { + RestApiCompatibleVersion minimumRestCompatibilityVersion(); + } + + static MockRestApiCompatibleVersion MockCompatibleVersion = Mockito.mock(MockRestApiCompatibleVersion.class); + + static NamedXContentRegistry.Entry v7CompatibleEntries = new NamedXContentRegistry.Entry(Integer.class, + new ParseField("name"), Mockito.mock(ContextParser.class)); + static NamedXContentRegistry.Entry v8CompatibleEntries = new NamedXContentRegistry.Entry(Integer.class, + new ParseField("name2"), Mockito.mock(ContextParser.class)); + + public static class TestRestCompatibility1 extends Plugin { + + @Override + public List getNamedXContentForCompatibility() { + // real plugin will use CompatibleVersion.minimumRestCompatibilityVersion() + if (/*CompatibleVersion.minimumRestCompatibilityVersion()*/ + MockCompatibleVersion.minimumRestCompatibilityVersion().equals(RestApiCompatibleVersion.V_7)) { + //return set of N-1 entries + return List.of(v7CompatibleEntries); + } + // after major release, new compatible apis can be added before the old ones are removed. + if (/*CompatibleVersion.minimumRestCompatibilityVersion()*/ + MockCompatibleVersion.minimumRestCompatibilityVersion().equals(RestApiCompatibleVersion.V_8)) { + return List.of(v8CompatibleEntries); + + } + return super.getNamedXContentForCompatibility(); + } + } + + // This test shows an example on how multiple compatible namedxcontent can be present at the same time. + public void testLoadingMultipleRestCompatibilityPlugins() throws IOException { + + Mockito.when(MockCompatibleVersion.minimumRestCompatibilityVersion()) + .thenReturn(RestApiCompatibleVersion.V_7); + + { + Settings.Builder settings = baseSettings(); + + // throw an exception when two plugins are registered + List> plugins = basePlugins(); + plugins.add(TestRestCompatibility1.class); + + try (Node node = new MockNode(settings.build(), plugins)) { + List compatibleNamedXContents = node.getCompatibleNamedXContents(); + assertThat(compatibleNamedXContents, contains(v7CompatibleEntries)); + } + } + // after version bump CompatibleVersion.minimumRestCompatibilityVersion() will return V_8 + Mockito.when(MockCompatibleVersion.minimumRestCompatibilityVersion()) + .thenReturn(RestApiCompatibleVersion.V_8); + { + Settings.Builder settings = baseSettings(); + + // throw an exception when two plugins are registered + List> plugins = basePlugins(); + plugins.add(TestRestCompatibility1.class); + + try (Node node = new MockNode(settings.build(), plugins)) { + List compatibleNamedXContents = node.getCompatibleNamedXContents(); + assertThat(compatibleNamedXContents, contains(v8CompatibleEntries)); + } + } + } + + } diff --git a/server/src/test/java/org/elasticsearch/rest/MethodHandlersTests.java b/server/src/test/java/org/elasticsearch/rest/MethodHandlersTests.java index 312f26ea272ff..a827eaa523060 100644 --- a/server/src/test/java/org/elasticsearch/rest/MethodHandlersTests.java +++ b/server/src/test/java/org/elasticsearch/rest/MethodHandlersTests.java @@ -8,8 +8,8 @@ package org.elasticsearch.rest; -import org.elasticsearch.Version; import org.elasticsearch.client.node.NodeClient; +import org.elasticsearch.common.compatibility.RestApiCompatibleVersion; import org.elasticsearch.test.ESTestCase; import static org.hamcrest.Matchers.sameInstance; @@ -22,7 +22,7 @@ public void testLookupForDifferentMethodsSameVersion() { MethodHandlers methodHandlers = new MethodHandlers("path", putHandler, RestRequest.Method.PUT); methodHandlers.addMethods(postHandler, RestRequest.Method.POST); - RestHandler handler = methodHandlers.getHandler(RestRequest.Method.PUT, Version.CURRENT); + RestHandler handler = methodHandlers.getHandler(RestRequest.Method.PUT, RestApiCompatibleVersion.currentVersion()); assertThat(handler, sameInstance(putHandler)); } @@ -30,10 +30,10 @@ public void testLookupForHandlerUnderMultipleMethods() { RestHandler handler = new CurrentVersionHandler(); MethodHandlers methodHandlers = new MethodHandlers("path", handler, RestRequest.Method.PUT, RestRequest.Method.POST); - RestHandler handlerFound = methodHandlers.getHandler(RestRequest.Method.PUT, Version.CURRENT); + RestHandler handlerFound = methodHandlers.getHandler(RestRequest.Method.PUT, RestApiCompatibleVersion.currentVersion()); assertThat(handlerFound, sameInstance(handler)); - handlerFound = methodHandlers.getHandler(RestRequest.Method.POST, Version.CURRENT); + handlerFound = methodHandlers.getHandler(RestRequest.Method.POST, RestApiCompatibleVersion.currentVersion()); assertThat(handlerFound, sameInstance(handler)); } @@ -43,10 +43,10 @@ public void testLookupForHandlersUnderDifferentVersions() { MethodHandlers methodHandlers = new MethodHandlers("path", currentVersionHandler, RestRequest.Method.PUT); methodHandlers.addMethods(previousVersionHandler, RestRequest.Method.PUT); - RestHandler handler = methodHandlers.getHandler(RestRequest.Method.PUT, Version.CURRENT); + RestHandler handler = methodHandlers.getHandler(RestRequest.Method.PUT, RestApiCompatibleVersion.currentVersion()); assertThat(handler, sameInstance(currentVersionHandler)); - handler = methodHandlers.getHandler(RestRequest.Method.PUT, Version.CURRENT.previousMajor()); + handler = methodHandlers.getHandler(RestRequest.Method.PUT, RestApiCompatibleVersion.currentVersion().previousMajor()); assertThat(handler, sameInstance(previousVersionHandler)); } @@ -60,14 +60,14 @@ public void testExceptionOnOverride() { public void testMissingCurrentHandler(){ RestHandler previousVersionHandler = new PreviousVersionHandler(); MethodHandlers methodHandlers = new MethodHandlers("path", previousVersionHandler, RestRequest.Method.PUT, RestRequest.Method.POST); - RestHandler handler = methodHandlers.getHandler(RestRequest.Method.PUT, Version.CURRENT); + RestHandler handler = methodHandlers.getHandler(RestRequest.Method.PUT, RestApiCompatibleVersion.currentVersion()); assertNull(handler); } public void testMissingPriorHandlerReturnsCurrentHandler(){ RestHandler currentVersionHandler = new CurrentVersionHandler(); MethodHandlers methodHandlers = new MethodHandlers("path", currentVersionHandler, RestRequest.Method.PUT, RestRequest.Method.POST); - RestHandler handler = methodHandlers.getHandler(RestRequest.Method.PUT, Version.CURRENT.previousMajor()); + RestHandler handler = methodHandlers.getHandler(RestRequest.Method.PUT, RestApiCompatibleVersion.currentVersion().previousMajor()); assertThat(handler, sameInstance(currentVersionHandler)); } @@ -85,8 +85,8 @@ public void handleRequest(RestRequest request, RestChannel channel, NodeClient c } @Override - public Version compatibleWithVersion() { - return Version.CURRENT.previousMajor(); + public RestApiCompatibleVersion compatibleWithVersion() { + return RestApiCompatibleVersion.currentVersion().previousMajor(); } } } diff --git a/server/src/test/java/org/elasticsearch/rest/RestCompatibleVersionHelperTests.java b/server/src/test/java/org/elasticsearch/rest/RestCompatibleVersionHelperTests.java index 2548b2dfd5873..ae19cb7bde33a 100644 --- a/server/src/test/java/org/elasticsearch/rest/RestCompatibleVersionHelperTests.java +++ b/server/src/test/java/org/elasticsearch/rest/RestCompatibleVersionHelperTests.java @@ -8,7 +8,7 @@ package org.elasticsearch.rest; import org.elasticsearch.ElasticsearchStatusException; -import org.elasticsearch.Version; +import org.elasticsearch.common.compatibility.RestApiCompatibleVersion; import org.elasticsearch.common.xcontent.ParsedMediaType; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.hamcrest.ElasticsearchMatchers; @@ -20,9 +20,9 @@ import static org.hamcrest.Matchers.nullValue; public class RestCompatibleVersionHelperTests extends ESTestCase { - int CURRENT_VERSION = Version.CURRENT.major; - int PREVIOUS_VERSION = Version.CURRENT.major - 1; - int OBSOLETE_VERSION = Version.CURRENT.major - 2; + int CURRENT_VERSION = RestApiCompatibleVersion.currentVersion().major; + int PREVIOUS_VERSION = RestApiCompatibleVersion.currentVersion().major - 1; + int OBSOLETE_VERSION = RestApiCompatibleVersion.currentVersion().major - 2; public void testAcceptAndContentTypeCombinations() { assertThat(requestWith(acceptHeader(PREVIOUS_VERSION), contentTypeHeader(PREVIOUS_VERSION), bodyPresent()), isCompatible()); @@ -322,11 +322,11 @@ public void testVersionParsing() { } - private Matcher isCompatible() { + private Matcher isCompatible() { return requestHasVersion(PREVIOUS_VERSION); } - private Matcher requestHasVersion(int version) { + private Matcher requestHasVersion(int version) { return ElasticsearchMatchers.HasPropertyLambdaMatcher.hasProperty(v -> (int) v.major, equalTo(version)); } @@ -361,7 +361,7 @@ private String mediaType(String version) { return null; } - private Version requestWith(String accept, String contentType, String body) { + private RestApiCompatibleVersion requestWith(String accept, String contentType, String body) { ParsedMediaType parsedAccept = ParsedMediaType.parseMediaType(accept); ParsedMediaType parsedContentType = ParsedMediaType.parseMediaType(contentType); return RestCompatibleVersionHelper.getCompatibleVersion(parsedAccept, parsedContentType, body.isEmpty() == false); diff --git a/server/src/test/java/org/elasticsearch/rest/RestControllerTests.java b/server/src/test/java/org/elasticsearch/rest/RestControllerTests.java index fa3d5c37c3802..301132a233f99 100644 --- a/server/src/test/java/org/elasticsearch/rest/RestControllerTests.java +++ b/server/src/test/java/org/elasticsearch/rest/RestControllerTests.java @@ -13,6 +13,7 @@ import org.elasticsearch.common.breaker.CircuitBreaker; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.compatibility.RestApiCompatibleVersion; import org.elasticsearch.common.component.AbstractLifecycleComponent; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Settings; @@ -233,7 +234,7 @@ public void testRegisterSecondMethodWithDifferentNamedWildcard() { private RestHandler v8mockHandler() { RestHandler mock = mock(RestHandler.class); - Mockito.when(mock.compatibleWithVersion()).thenReturn(Version.CURRENT); + Mockito.when(mock.compatibleWithVersion()).thenReturn(RestApiCompatibleVersion.currentVersion()); return mock; } @@ -368,7 +369,7 @@ public void testDispatchWorksWithNewlineDelimitedJson() { restController.registerHandler(RestRequest.Method.GET, "/foo", new RestHandler() { @Override public void handleRequest(RestRequest request, RestChannel channel, NodeClient client) throws Exception { - assertThat(request.contentParser().useCompatibility(), is(false)); + assertThat(request.contentParser().getRestApiCompatibleVersion(), is(RestApiCompatibleVersion.currentVersion())); channel.sendResponse(new BytesRestResponse(RestStatus.OK, BytesRestResponse.TEXT_CONTENT_TYPE, BytesArray.EMPTY)); } @@ -395,7 +396,7 @@ public void testDispatchWithContentStream() { restController.registerHandler(RestRequest.Method.GET, "/foo", new RestHandler() { @Override public void handleRequest(RestRequest request, RestChannel channel, NodeClient client) throws Exception { - assertThat(request.contentParser().useCompatibility(), is(false)); + assertThat(request.contentParser().getRestApiCompatibleVersion(), is(RestApiCompatibleVersion.currentVersion())); channel.sendResponse(new BytesRestResponse(RestStatus.OK, BytesRestResponse.TEXT_CONTENT_TYPE, BytesArray.EMPTY)); } @@ -627,7 +628,7 @@ public void testDispatchCompatibleHandler() { RestController restController = new RestController(Collections.emptySet(), null, client, circuitBreakerService, usageService); - final byte version = Version.CURRENT.minimumRestCompatibilityVersion().major; + final byte version = RestApiCompatibleVersion.minimumSupported().major; final String mediaType = randomCompatibleMediaType(version); FakeRestRequest fakeRestRequest = requestWithContent(mediaType); @@ -636,15 +637,16 @@ public void testDispatchCompatibleHandler() { restController.registerHandler(RestRequest.Method.GET, "/foo", new RestHandler() { @Override public void handleRequest(RestRequest request, RestChannel channel, NodeClient client) throws Exception { - assertThat(request.contentParser().useCompatibility(), is(true)); + // in real use case we will use exact version RestApiCompatibleVersion.V_7 XContentBuilder xContentBuilder = channel.newBuilder(); - assertThat(xContentBuilder.getCompatibleMajorVersion(), equalTo(version)); + assertThat(xContentBuilder.getRestApiCompatibilityVersion(), equalTo(RestApiCompatibleVersion.minimumSupported())); + assertThat(request.contentParser().getRestApiCompatibleVersion(), equalTo(RestApiCompatibleVersion.minimumSupported())); channel.sendResponse(new BytesRestResponse(RestStatus.OK, BytesRestResponse.TEXT_CONTENT_TYPE, BytesArray.EMPTY)); } @Override - public Version compatibleWithVersion() { - return Version.CURRENT.minimumRestCompatibilityVersion(); + public RestApiCompatibleVersion compatibleWithVersion() { + return RestApiCompatibleVersion.minimumSupported(); } }); @@ -657,7 +659,7 @@ public void testDispatchCompatibleRequestToNewlyAddedHandler() { RestController restController = new RestController(Collections.emptySet(), null, client, circuitBreakerService, usageService); - final byte version = Version.CURRENT.minimumRestCompatibilityVersion().major; + final byte version = RestApiCompatibleVersion.minimumSupported().major; final String mediaType = randomCompatibleMediaType(version); FakeRestRequest fakeRestRequest = requestWithContent(mediaType); @@ -667,19 +669,20 @@ public void testDispatchCompatibleRequestToNewlyAddedHandler() { restController.registerHandler(RestRequest.Method.GET, "/foo", new RestHandler() { @Override public void handleRequest(RestRequest request, RestChannel channel, NodeClient client) throws Exception { - assertThat(request.contentParser().useCompatibility(), is(true)); XContentBuilder xContentBuilder = channel.newBuilder(); // even though the handler is CURRENT, the xContentBuilder has the version requested by a client. // This allows to implement the compatible logic within the serialisation without introducing V7 (compatible) handler // when only response shape has changed - assertThat(xContentBuilder.getCompatibleMajorVersion(), equalTo(version)); + assertThat(xContentBuilder.getRestApiCompatibilityVersion(), equalTo(RestApiCompatibleVersion.minimumSupported())); + assertThat(request.contentParser().getRestApiCompatibleVersion(), equalTo(RestApiCompatibleVersion.minimumSupported())); + channel.sendResponse(new BytesRestResponse(RestStatus.OK, BytesRestResponse.TEXT_CONTENT_TYPE, BytesArray.EMPTY)); } @Override - public Version compatibleWithVersion() { - return Version.CURRENT; + public RestApiCompatibleVersion compatibleWithVersion() { + return RestApiCompatibleVersion.currentVersion(); } }); @@ -713,16 +716,16 @@ public void testCurrentVersionVNDMediaTypeIsNotUsingCompatibility() { public void handleRequest(RestRequest request, RestChannel channel, NodeClient client) throws Exception { // the media type is in application/vnd.elasticsearch form but with compatible-with=CURRENT. // Hence compatibility is not used. - assertThat(request.contentParser().useCompatibility(), is(false)); XContentBuilder xContentBuilder = channel.newBuilder(); - assertThat(xContentBuilder.getCompatibleMajorVersion(), equalTo(version)); + assertThat(request.contentParser().getRestApiCompatibleVersion(), equalTo(RestApiCompatibleVersion.currentVersion())); + assertThat(xContentBuilder.getRestApiCompatibilityVersion(), equalTo(RestApiCompatibleVersion.currentVersion())); channel.sendResponse(new BytesRestResponse(RestStatus.OK, BytesRestResponse.TEXT_CONTENT_TYPE, BytesArray.EMPTY)); } @Override - public Version compatibleWithVersion() { - return Version.CURRENT; + public RestApiCompatibleVersion compatibleWithVersion() { + return RestApiCompatibleVersion.currentVersion(); } }); @@ -731,23 +734,6 @@ public Version compatibleWithVersion() { assertTrue(channel.getSendResponseCalled()); } - public void testRegisterIncompatibleVersionHandler() { - //using restController which uses a compatible version function returning always Version.CURRENT - final byte version = (byte) (Version.CURRENT.major - 2); - - expectThrows(AssertionError.class, - () -> restController.registerHandler(RestRequest.Method.GET, "/foo", new RestHandler() { - @Override - public void handleRequest(RestRequest request, RestChannel channel, NodeClient client) throws Exception { - } - - @Override - public Version compatibleWithVersion() { - return Version.fromString(version + ".0.0"); - } - })); - } - private static final class TestHttpServerTransport extends AbstractLifecycleComponent implements HttpServerTransport { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/support/xcontent/WatcherXContentParser.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/support/xcontent/WatcherXContentParser.java index c06f3a2af84a4..8d76a1b7b312b 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/support/xcontent/WatcherXContentParser.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/support/xcontent/WatcherXContentParser.java @@ -9,6 +9,7 @@ import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.common.CheckedFunction; import org.elasticsearch.common.Nullable; +import org.elasticsearch.common.compatibility.RestApiCompatibleVersion; import org.elasticsearch.common.xcontent.DeprecationHandler; import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.common.xcontent.XContentLocation; @@ -288,8 +289,8 @@ public void close() throws IOException { } @Override - public boolean useCompatibility() { - return false; + public RestApiCompatibleVersion getRestApiCompatibleVersion() { + return RestApiCompatibleVersion.currentVersion(); } @Override diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/SecurityRestFilter.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/SecurityRestFilter.java index 32b0003dff6cc..2f567c5e53c4c 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/SecurityRestFilter.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/SecurityRestFilter.java @@ -11,9 +11,9 @@ import org.apache.logging.log4j.message.ParameterizedMessage; import org.apache.logging.log4j.util.Supplier; import org.elasticsearch.ExceptionsHelper; -import org.elasticsearch.Version; import org.elasticsearch.action.ActionListener; import org.elasticsearch.client.node.NodeClient; +import org.elasticsearch.common.compatibility.RestApiCompatibleVersion; import org.elasticsearch.common.util.Maps; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.common.xcontent.MediaType; @@ -25,15 +25,13 @@ import org.elasticsearch.rest.RestHandler; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.RestRequest.Method; -import org.elasticsearch.rest.RestStatus; import org.elasticsearch.rest.RestRequestFilter; - +import org.elasticsearch.rest.RestStatus; import org.elasticsearch.xpack.security.authc.AuthenticationService; import org.elasticsearch.xpack.security.authc.support.SecondaryAuthenticator; import org.elasticsearch.xpack.security.transport.SSLEngineUtils; import java.io.IOException; - import java.util.List; import java.util.Map; @@ -163,7 +161,7 @@ public MediaTypeRegistry validAcceptMediaTypes() { } @Override - public Version compatibleWithVersion() { + public RestApiCompatibleVersion compatibleWithVersion() { return restHandler.compatibleWithVersion(); } } From 1e29fb34102b34cb3e0612b39e471812f4737ebf Mon Sep 17 00:00:00 2001 From: David Turner Date: Wed, 10 Feb 2021 09:47:25 +0000 Subject: [PATCH 12/24] Fix testListenersNotifiedOnCorrectThreads (#68805) This test assumed, incorrectly, that `future#done()` completes before `future#set()` returns, but this isn't true if there are multiple threads racing to complete the future. In other words listeners added before calling `onResponse()` are not necessarily notified by the time `onResponse()` returns. This commit fixes the test to account for this subtle point. Closes #68772 --- .../action/support/ListenableActionFutureTests.java | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/server/src/test/java/org/elasticsearch/action/support/ListenableActionFutureTests.java b/server/src/test/java/org/elasticsearch/action/support/ListenableActionFutureTests.java index a04035564d536..8954fd6ede9da 100644 --- a/server/src/test/java/org/elasticsearch/action/support/ListenableActionFutureTests.java +++ b/server/src/test/java/org/elasticsearch/action/support/ListenableActionFutureTests.java @@ -68,7 +68,6 @@ protected void doRun() { } } - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/68772") public void testListenersNotifiedOnCorrectThreads() throws InterruptedException { final int adderThreads = between(1, 5); @@ -90,7 +89,10 @@ public void testListenersNotifiedOnCorrectThreads() throws InterruptedException awaitSafe(barrier); final AtomicBoolean isComplete = new AtomicBoolean(); - if (postComplete.get()) { + if (completerThreads == 1 && postComplete.get()) { + // If there are multiple completer threads then onResponse might return on one thread, and hence postComplete is + // set, before the other completer thread notifies all the listeners. OTOH with one completer thread we know that + // postComplete indicates that the listeners were already notified. future.addListener(new ActionListener<>() { @Override public void onResponse(Void response) { From 0c1d7998c5855f62a4faea7207b0a9eea1416abc Mon Sep 17 00:00:00 2001 From: Andrei Dan Date: Wed, 10 Feb 2021 10:05:53 +0000 Subject: [PATCH 13/24] Fix testDeleteActionDeletesSearchableSnapshot (#68751) It could happen for ILM to run so fast the test did not get to pick up the snapshot name from the ILM execution state. This changes the implementation of the test to not rely on that snapshot name, but to assert that the test repository is empty after ILM completes the cycle for the first generation backing index. --- .../actions/SearchableSnapshotActionIT.java | 50 ++++++++----------- 1 file changed, 21 insertions(+), 29 deletions(-) diff --git a/x-pack/plugin/ilm/qa/multi-node/src/javaRestTest/java/org/elasticsearch/xpack/ilm/actions/SearchableSnapshotActionIT.java b/x-pack/plugin/ilm/qa/multi-node/src/javaRestTest/java/org/elasticsearch/xpack/ilm/actions/SearchableSnapshotActionIT.java index 7798130057137..a3b657e3cb9fb 100644 --- a/x-pack/plugin/ilm/qa/multi-node/src/javaRestTest/java/org/elasticsearch/xpack/ilm/actions/SearchableSnapshotActionIT.java +++ b/x-pack/plugin/ilm/qa/multi-node/src/javaRestTest/java/org/elasticsearch/xpack/ilm/actions/SearchableSnapshotActionIT.java @@ -9,8 +9,6 @@ import org.apache.http.entity.ContentType; import org.apache.http.entity.StringEntity; -import org.apache.http.util.EntityUtils; -import org.elasticsearch.client.Client; import org.elasticsearch.client.Request; import org.elasticsearch.client.Response; import org.elasticsearch.cluster.metadata.DataStream; @@ -22,7 +20,6 @@ import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.common.xcontent.XContentType; -import org.elasticsearch.test.client.NoOpClient; import org.elasticsearch.test.rest.ESRestTestCase; import org.elasticsearch.xpack.core.ilm.DeleteAction; import org.elasticsearch.xpack.core.ilm.ForceMergeAction; @@ -30,7 +27,6 @@ import org.elasticsearch.xpack.core.ilm.LifecycleAction; import org.elasticsearch.xpack.core.ilm.LifecyclePolicy; import org.elasticsearch.xpack.core.ilm.LifecycleSettings; -import org.elasticsearch.xpack.core.ilm.MountSnapshotStep; import org.elasticsearch.xpack.core.ilm.Phase; import org.elasticsearch.xpack.core.ilm.PhaseCompleteStep; import org.elasticsearch.xpack.core.ilm.RolloverAction; @@ -38,7 +34,6 @@ import org.elasticsearch.xpack.core.ilm.SetPriorityAction; import org.elasticsearch.xpack.core.ilm.ShrinkAction; import org.elasticsearch.xpack.core.ilm.Step; -import org.elasticsearch.xpack.core.ilm.StepKeyTests; import org.elasticsearch.xpack.core.searchablesnapshots.MountSearchableSnapshotRequest; import org.junit.Before; @@ -163,8 +158,8 @@ public void testSearchableSnapshotForceMergesIndexToOneSegment() throws Exceptio TimeUnit.SECONDS); } - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/pull/54433") - public void testDeleteActionDeletesSearchableSnapshot() throws Exception { + @SuppressWarnings("unchecked") + public void testDeleteActionDeletesSearchableSnapshot() throws Exception { createSnapshotRepo(client(), snapshotRepo, randomBoolean()); // create policy with cold and delete phases @@ -192,32 +187,29 @@ public void testDeleteActionDeletesSearchableSnapshot() throws Exception { // rolling over the data stream so we can apply the searchable snapshot policy to a backing index that's not the write index rolloverMaxOneDocCondition(client(), dataStream); - String[] snapshotName = new String[1]; String backingIndexName = DataStream.getDefaultBackingIndexName(dataStream, 1L); String restoredIndexName = SearchableSnapshotAction.FULL_RESTORED_INDEX_PREFIX + backingIndexName; - assertTrue(waitUntil(() -> { - try { - Map explainIndex = explainIndex(client(), backingIndexName); - if (explainIndex == null) { - // in case we missed the original index and it was deleted - explainIndex = explainIndex(client(), restoredIndexName); - } - snapshotName[0] = (String) explainIndex.get("snapshot_name"); - return snapshotName[0] != null; - } catch (IOException e) { - return false; - } - }, 30, TimeUnit.SECONDS)); - assertBusy(() -> assertFalse(indexExists(restoredIndexName))); + + // let's wait for ILM to finish + assertBusy(() -> assertFalse(indexExists(backingIndexName)), 60, TimeUnit.SECONDS); + assertBusy(() -> assertFalse(indexExists(restoredIndexName)), 60, TimeUnit.SECONDS); assertTrue("the snapshot we generate in the cold phase should be deleted by the delete phase", waitUntil(() -> { - try { - Request getSnapshotsRequest = new Request("GET", "_snapshot/" + snapshotRepo + "/" + snapshotName[0]); - Response getSnapshotsResponse = client().performRequest(getSnapshotsRequest); - return EntityUtils.toString(getSnapshotsResponse.getEntity()).contains("snapshot_missing_exception"); - } catch (IOException e) { - return false; - } + try { + Request getSnapshotsRequest = new Request("GET", "_snapshot/" + snapshotRepo + "/_all"); + Response getSnapshotsResponse = client().performRequest(getSnapshotsRequest); + + Map responseMap; + try (InputStream is = getSnapshotsResponse.getEntity().getContent()) { + responseMap = XContentHelper.convertToMap(XContentType.JSON.xContent(), is, true); + } + List responses = (List) responseMap.get("responses"); + Object snapshots = ((Map) responses.get(0)).get("snapshots"); + return ((List>) snapshots).size() == 0; + } catch (Exception e) { + logger.error(e.getMessage(), e); + return false; + } }, 30, TimeUnit.SECONDS)); } From 114c39625bd10c0c2b1263d40df06abfca64465d Mon Sep 17 00:00:00 2001 From: David Turner Date: Wed, 10 Feb 2021 12:23:51 +0000 Subject: [PATCH 14/24] Make GET _cluster/stats cancellable (#68676) Today `GET _cluster/stats` can be quite expensive, and is typically retrieved periodically by monitoring systems (e.g. Metricbeat) that implement a client-side timeout. When the client times out it closes the HTTP connection in use. With this commit we react to the close of the HTTP connection by cancelling the ongoing stats request, avoiding unnecessary duplicated work. Relates #55550 --- .../http/ClusterStatsRestCancellationIT.java | 189 ++++++++++++ .../admin/cluster/stats/AnalysisStats.java | 3 +- .../cluster/stats/ClusterStatsRequest.java | 9 + .../admin/cluster/stats/MappingStats.java | 3 +- .../stats/TransportClusterStatsAction.java | 131 +++++--- .../support/nodes/TransportNodesAction.java | 38 ++- .../util/CancellableSingleObjectCache.java | 283 +++++++++++++++++ .../admin/cluster/RestClusterStatsAction.java | 4 +- .../cluster/stats/AnalysisStatsTests.java | 21 +- .../cluster/stats/MappingStatsTests.java | 21 +- .../nodes/TransportNodesActionTests.java | 14 +- .../CancellableSingleObjectCacheTests.java | 292 ++++++++++++++++++ .../ClusterStatsMonitoringDocTests.java | 4 +- 13 files changed, 937 insertions(+), 75 deletions(-) create mode 100644 qa/smoke-test-http/src/test/java/org/elasticsearch/http/ClusterStatsRestCancellationIT.java create mode 100644 server/src/main/java/org/elasticsearch/common/util/CancellableSingleObjectCache.java create mode 100644 server/src/test/java/org/elasticsearch/common/util/CancellableSingleObjectCacheTests.java diff --git a/qa/smoke-test-http/src/test/java/org/elasticsearch/http/ClusterStatsRestCancellationIT.java b/qa/smoke-test-http/src/test/java/org/elasticsearch/http/ClusterStatsRestCancellationIT.java new file mode 100644 index 0000000000000..0faf470658301 --- /dev/null +++ b/qa/smoke-test-http/src/test/java/org/elasticsearch/http/ClusterStatsRestCancellationIT.java @@ -0,0 +1,189 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.http; + +import org.apache.http.client.methods.HttpGet; +import org.elasticsearch.action.admin.cluster.stats.ClusterStatsAction; +import org.elasticsearch.action.support.PlainActionFuture; +import org.elasticsearch.client.Cancellable; +import org.elasticsearch.client.Request; +import org.elasticsearch.client.Response; +import org.elasticsearch.client.ResponseListener; +import org.elasticsearch.cluster.routing.allocation.DiskThresholdSettings; +import org.elasticsearch.common.lease.Releasable; +import org.elasticsearch.common.lease.Releasables; +import org.elasticsearch.common.settings.Setting; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.util.CollectionUtils; +import org.elasticsearch.index.IndexService; +import org.elasticsearch.index.IndexSettings; +import org.elasticsearch.index.engine.Engine; +import org.elasticsearch.index.engine.EngineConfig; +import org.elasticsearch.index.engine.EngineFactory; +import org.elasticsearch.index.engine.ReadOnlyEngine; +import org.elasticsearch.index.seqno.SeqNoStats; +import org.elasticsearch.index.shard.IndexShard; +import org.elasticsearch.index.shard.IndexShardTestCase; +import org.elasticsearch.index.translog.TranslogStats; +import org.elasticsearch.indices.IndicesService; +import org.elasticsearch.plugins.EnginePlugin; +import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.tasks.CancellableTask; +import org.elasticsearch.tasks.TaskInfo; +import org.elasticsearch.transport.TransportService; + +import java.util.ArrayList; +import java.util.Collection; +import java.util.List; +import java.util.Optional; +import java.util.concurrent.CancellationException; +import java.util.concurrent.Semaphore; +import java.util.function.Function; + +import static java.util.Collections.singletonList; +import static org.hamcrest.Matchers.empty; +import static org.hamcrest.Matchers.not; + +public class ClusterStatsRestCancellationIT extends HttpSmokeTestCase { + + public static final Setting BLOCK_STATS_SETTING = Setting.boolSetting("index.block_stats", false, Setting.Property.IndexScope); + + @Override + protected Collection> nodePlugins() { + return CollectionUtils.appendToCopy(super.nodePlugins(), ClusterStatsRestCancellationIT.StatsBlockingPlugin.class); + } + + @Override + protected boolean addMockInternalEngine() { + return false; + } + + @Override + protected Settings nodeSettings(int nodeOrdinal) { + return Settings.builder() + .put(super.nodeSettings(nodeOrdinal)) + // disable internal cluster info service to avoid internal cluster stats calls + .put(DiskThresholdSettings.CLUSTER_ROUTING_ALLOCATION_DISK_THRESHOLD_ENABLED_SETTING.getKey(), false) + .build(); + } + + public void testClusterStateRestCancellation() throws Exception { + + createIndex("test", Settings.builder().put(BLOCK_STATS_SETTING.getKey(), true).build()); + ensureGreen("test"); + + final List statsBlocks = new ArrayList<>(); + for (final IndicesService indicesService : internalCluster().getInstances(IndicesService.class)) { + for (final IndexService indexService : indicesService) { + for (final IndexShard indexShard : indexService) { + final Engine engine = IndexShardTestCase.getEngine(indexShard); + if (engine instanceof StatsBlockingEngine) { + statsBlocks.add(((StatsBlockingEngine) engine).statsBlock); + } + } + } + } + assertThat(statsBlocks, not(empty())); + + final List releasables = new ArrayList<>(); + try { + for (final Semaphore statsBlock : statsBlocks) { + statsBlock.acquire(); + releasables.add(statsBlock::release); + } + + final Request clusterStatsRequest = new Request(HttpGet.METHOD_NAME, "/_cluster/stats"); + + final PlainActionFuture future = new PlainActionFuture<>(); + logger.info("--> sending cluster state request"); + final Cancellable cancellable = getRestClient().performRequestAsync(clusterStatsRequest, new ResponseListener() { + @Override + public void onSuccess(Response response) { + future.onResponse(null); + } + + @Override + public void onFailure(Exception exception) { + future.onFailure(exception); + } + }); + + logger.info("--> waiting for task to start"); + assertBusy(() -> { + final List tasks = client().admin().cluster().prepareListTasks().get().getTasks(); + assertTrue(tasks.toString(), tasks.stream().anyMatch(t -> t.getAction().startsWith(ClusterStatsAction.NAME))); + }); + + logger.info("--> waiting for at least one task to hit a block"); + assertBusy(() -> assertTrue(statsBlocks.stream().anyMatch(Semaphore::hasQueuedThreads))); + + logger.info("--> cancelling cluster stats request"); + cancellable.cancel(); + expectThrows(CancellationException.class, future::actionGet); + + logger.info("--> checking that all cluster stats tasks are marked as cancelled"); + assertBusy(() -> { + boolean foundTask = false; + for (TransportService transportService : internalCluster().getInstances(TransportService.class)) { + for (CancellableTask cancellableTask : transportService.getTaskManager().getCancellableTasks().values()) { + if (cancellableTask.getAction().startsWith(ClusterStatsAction.NAME)) { + foundTask = true; + assertTrue(cancellableTask.isCancelled()); + } + } + } + assertTrue(foundTask); + }); + } finally { + Releasables.close(releasables); + } + + logger.info("--> checking that all cluster stats tasks have finished"); + assertBusy(() -> { + final List tasks = client().admin().cluster().prepareListTasks().get().getTasks(); + assertTrue(tasks.toString(), tasks.stream().noneMatch(t -> t.getAction().startsWith(ClusterStatsAction.NAME))); + }); + } + + public static class StatsBlockingPlugin extends Plugin implements EnginePlugin { + + @Override + public Optional getEngineFactory(IndexSettings indexSettings) { + if (BLOCK_STATS_SETTING.get(indexSettings.getSettings())) { + return Optional.of(StatsBlockingEngine::new); + } + return Optional.empty(); + } + + @Override + public List> getSettings() { + return singletonList(BLOCK_STATS_SETTING); + } + } + + private static class StatsBlockingEngine extends ReadOnlyEngine { + + final Semaphore statsBlock = new Semaphore(1); + + StatsBlockingEngine(EngineConfig config) { + super(config, null, new TranslogStats(), true, Function.identity(), true); + } + + @Override + public SeqNoStats getSeqNoStats(long globalCheckpoint) { + try { + statsBlock.acquire(); + } catch (InterruptedException e) { + throw new AssertionError(e); + } + statsBlock.release(); + return super.getSeqNoStats(globalCheckpoint); + } + } +} diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/stats/AnalysisStats.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/stats/AnalysisStats.java index ba5d3bc3261f3..94cee3f6f475a 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/stats/AnalysisStats.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/stats/AnalysisStats.java @@ -41,7 +41,7 @@ public final class AnalysisStats implements ToXContentFragment, Writeable { /** * Create {@link AnalysisStats} from the given cluster state. */ - public static AnalysisStats of(Metadata metadata) { + public static AnalysisStats of(Metadata metadata, Runnable ensureNotCancelled) { final Map usedCharFilterTypes = new HashMap<>(); final Map usedTokenizerTypes = new HashMap<>(); final Map usedTokenFilterTypes = new HashMap<>(); @@ -52,6 +52,7 @@ public static AnalysisStats of(Metadata metadata) { final Map usedBuiltInAnalyzers = new HashMap<>(); for (IndexMetadata indexMetadata : metadata) { + ensureNotCancelled.run(); if (indexMetadata.isSystem()) { // Don't include system indices in statistics about analysis, // we care about the user's indices. diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/stats/ClusterStatsRequest.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/stats/ClusterStatsRequest.java index cbe6539fff186..ca2ec4e5607e3 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/stats/ClusterStatsRequest.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/stats/ClusterStatsRequest.java @@ -11,8 +11,12 @@ import org.elasticsearch.action.support.nodes.BaseNodesRequest; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.tasks.CancellableTask; +import org.elasticsearch.tasks.Task; +import org.elasticsearch.tasks.TaskId; import java.io.IOException; +import java.util.Map; /** * A request to get cluster level stats. @@ -31,6 +35,11 @@ public ClusterStatsRequest(String... nodesIds) { super(nodesIds); } + @Override + public Task createTask(long id, String type, String action, TaskId parentTaskId, Map headers) { + return new CancellableTask(id, type, action, "", parentTaskId, headers); + } + @Override public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/stats/MappingStats.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/stats/MappingStats.java index 4faecdd537f6b..0f9d5d85946a2 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/stats/MappingStats.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/stats/MappingStats.java @@ -38,9 +38,10 @@ public final class MappingStats implements ToXContentFragment, Writeable { /** * Create {@link MappingStats} from the given cluster state. */ - public static MappingStats of(Metadata metadata) { + public static MappingStats of(Metadata metadata, Runnable ensureNotCancelled) { Map fieldTypes = new HashMap<>(); for (IndexMetadata indexMetadata : metadata) { + ensureNotCancelled.run(); if (indexMetadata.isSystem()) { // Don't include system indices in statistics about mappings, // we care about the user's indices. diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/stats/TransportClusterStatsAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/stats/TransportClusterStatsAction.java index 6760e0219bc66..21d4747fa9f4e 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/stats/TransportClusterStatsAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/stats/TransportClusterStatsAction.java @@ -8,10 +8,10 @@ package org.elasticsearch.action.admin.cluster.stats; -import org.apache.logging.log4j.LogManager; -import org.apache.logging.log4j.Logger; import org.apache.lucene.store.AlreadyClosedException; +import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.FailedNodeException; +import org.elasticsearch.action.StepListener; import org.elasticsearch.action.admin.cluster.node.info.NodeInfo; import org.elasticsearch.action.admin.cluster.node.stats.NodeStats; import org.elasticsearch.action.admin.indices.stats.CommonStats; @@ -27,6 +27,7 @@ import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.util.CancellableSingleObjectCache; import org.elasticsearch.index.IndexService; import org.elasticsearch.index.engine.CommitStats; import org.elasticsearch.index.seqno.RetentionLeaseStats; @@ -34,7 +35,10 @@ import org.elasticsearch.index.shard.IndexShard; import org.elasticsearch.indices.IndicesService; import org.elasticsearch.node.NodeService; +import org.elasticsearch.tasks.CancellableTask; import org.elasticsearch.tasks.Task; +import org.elasticsearch.tasks.TaskCancelledException; +import org.elasticsearch.tasks.TaskId; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportRequest; import org.elasticsearch.transport.TransportService; @@ -43,12 +47,12 @@ import java.io.IOException; import java.util.ArrayList; import java.util.List; +import java.util.Map; +import java.util.function.BiFunction; public class TransportClusterStatsAction extends TransportNodesAction { - private static final Logger logger = LogManager.getLogger(TransportClusterStatsAction.class); - private static final CommonStatsFlags SHARD_STATS_FLAGS = new CommonStatsFlags(CommonStatsFlags.Flag.Docs, CommonStatsFlags.Flag.Store, CommonStatsFlags.Flag.FieldData, CommonStatsFlags.Flag.QueryCache, CommonStatsFlags.Flag.Completion, CommonStatsFlags.Flag.Segments); @@ -56,12 +60,8 @@ public class TransportClusterStatsAction extends TransportNodesAction mappingStatsCache = new MetadataStatsCache<>(MappingStats::of); + private final MetadataStatsCache analysisStatsCache = new MetadataStatsCache<>(AnalysisStats::of); @Inject public TransportClusterStatsAction(ThreadPool threadPool, ClusterService clusterService, TransportService transportService, @@ -73,49 +73,45 @@ public TransportClusterStatsAction(ThreadPool threadPool, ClusterService cluster } @Override - protected ClusterStatsResponse newResponse(ClusterStatsRequest request, - List responses, List failures) { - assert Transports.assertNotTransportThread("Constructor of ClusterStatsResponse runs expensive computations on mappings found in" + - " the cluster state that are too slow for a transport thread"); + protected void newResponseAsync( + final Task task, + final ClusterStatsRequest request, + final List responses, + final List failures, + final ActionListener listener) { + assert Transports.assertNotTransportThread("Computation of mapping/analysis stats runs expensive computations on mappings found in " + + "the cluster state that are too slow for a transport thread"); + assert Thread.currentThread().getName().contains("[" + ThreadPool.Names.MANAGEMENT + "]") : Thread.currentThread().getName(); + assert task instanceof CancellableTask; + final CancellableTask cancellableTask = (CancellableTask) task; final ClusterState state = clusterService.state(); final Metadata metadata = state.metadata(); - MappingStats currentMappingStats = null; - AnalysisStats currentAnalysisStats = null; - // check if we already served a stats request for the current metadata version and have the stats cached - synchronized (statsMutex) { - if (metadata.version() == metaVersion) { - logger.trace("Found cached mapping and analysis stats for metadata version [{}]", metadata.version()); - currentMappingStats = this.mappingStats; - currentAnalysisStats = this.analysisStats; - } - } - if (currentMappingStats == null) { - // we didn't find any cached stats so we recompute them outside the mutex since the computation might be expensive for larger - // cluster states - logger.trace("Computing mapping and analysis stats for metadata version [{}]", metadata.version()); - currentMappingStats = MappingStats.of(metadata); - currentAnalysisStats = AnalysisStats.of(metadata); - synchronized (statsMutex) { - // cache the computed stats unless they became outdated because of a concurrent cluster state update and a concurrent - // stats request has already cached a newer version - if (metadata.version() > metaVersion) { - logger.trace("Caching mapping and analysis stats for metadata version [{}]", metadata.version()); - metaVersion = metadata.version(); - this.mappingStats = currentMappingStats; - this.analysisStats = currentAnalysisStats; - } - } - } - VersionStats versionStats = VersionStats.of(metadata, responses); - return new ClusterStatsResponse( - System.currentTimeMillis(), - state.metadata().clusterUUID(), - clusterService.getClusterName(), - responses, - failures, - currentMappingStats, - currentAnalysisStats, - versionStats); + + final StepListener mappingStatsStep = new StepListener<>(); + final StepListener analysisStatsStep = new StepListener<>(); + mappingStatsCache.get(metadata, cancellableTask::isCancelled, mappingStatsStep); + analysisStatsCache.get(metadata, cancellableTask::isCancelled, analysisStatsStep); + mappingStatsStep.whenComplete(mappingStats -> analysisStatsStep.whenComplete(analysisStats -> ActionListener.completeWith( + listener, + () -> new ClusterStatsResponse( + System.currentTimeMillis(), + metadata.clusterUUID(), + clusterService.getClusterName(), + responses, + failures, + mappingStats, + analysisStats, + VersionStats.of(metadata, responses)) + ), listener::onFailure), listener::onFailure); + } + + @Override + protected ClusterStatsResponse newResponse( + ClusterStatsRequest request, + List responses, + List failures) { + assert false; + throw new UnsupportedOperationException("use newResponseAsync instead"); } @Override @@ -130,12 +126,17 @@ protected ClusterStatsNodeResponse newNodeResponse(StreamInput in) throws IOExce @Override protected ClusterStatsNodeResponse nodeOperation(ClusterStatsNodeRequest nodeRequest, Task task) { + assert task instanceof CancellableTask; + final CancellableTask cancellableTask = (CancellableTask) task; NodeInfo nodeInfo = nodeService.info(true, true, false, true, false, true, false, true, false, false, false); NodeStats nodeStats = nodeService.stats(CommonStatsFlags.NONE, true, true, true, false, true, false, false, false, false, false, true, false, false, false); List shardsStats = new ArrayList<>(); for (IndexService indexService : indicesService) { for (IndexShard indexShard : indexService) { + if (cancellableTask.isCancelled()) { + throw new TaskCancelledException("task cancelled"); + } if (indexShard.routingEntry() != null && indexShard.routingEntry().active()) { // only report on fully started shards CommitStats commitStats; @@ -186,10 +187,38 @@ public ClusterStatsNodeRequest(StreamInput in) throws IOException { this.request = request; } + @Override + public Task createTask(long id, String type, String action, TaskId parentTaskId, Map headers) { + return new CancellableTask(id, type, action, "", parentTaskId, headers); + } + @Override public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); request.writeTo(out); } } + + private static class MetadataStatsCache extends CancellableSingleObjectCache { + private final BiFunction function; + + MetadataStatsCache(BiFunction function) { + this.function = function; + } + + @Override + protected void refresh(Metadata metadata, Runnable ensureNotCancelled, ActionListener listener) { + ActionListener.completeWith(listener, () -> function.apply(metadata, ensureNotCancelled)); + } + + @Override + protected Long getKey(Metadata indexMetadata) { + return indexMetadata.version(); + } + + @Override + protected boolean isFresh(Long currentKey, Long newKey) { + return newKey <= currentKey; + } + } } diff --git a/server/src/main/java/org/elasticsearch/action/support/nodes/TransportNodesAction.java b/server/src/main/java/org/elasticsearch/action/support/nodes/TransportNodesAction.java index 95422969710df..7f83785178331 100644 --- a/server/src/main/java/org/elasticsearch/action/support/nodes/TransportNodesAction.java +++ b/server/src/main/java/org/elasticsearch/action/support/nodes/TransportNodesAction.java @@ -10,7 +10,6 @@ import org.apache.logging.log4j.message.ParameterizedMessage; import org.elasticsearch.action.ActionListener; -import org.elasticsearch.action.ActionRunnable; import org.elasticsearch.action.FailedNodeException; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.HandledTransportAction; @@ -101,15 +100,22 @@ protected void doExecute(Task task, NodesRequest request, ActionListener nodesResponses) { + // exposed for tests + void newResponse(Task task, NodesRequest request, AtomicReferenceArray nodesResponses, ActionListener listener) { + + if (nodesResponses == null) { + listener.onFailure(new NullPointerException("nodesResponses")); + return; + } + final List responses = new ArrayList<>(); final List failures = new ArrayList<>(); @@ -123,7 +129,7 @@ protected NodesResponse newResponse(NodesRequest request, AtomicReferenceArray responses, List failures); + /** + * Create a new {@link NodesResponse}, possibly asynchronously. The default implementation is synchronous and calls + * {@link #newResponse(BaseNodesRequest, List, List)} + */ + protected void newResponseAsync( + Task task, + NodesRequest request, + List responses, + List failures, + ActionListener listener) { + ActionListener.completeWith(listener, () -> newResponse(request, responses, failures)); + } + protected abstract NodeRequest newNodeRequest(NodesRequest request); protected abstract NodeResponse newNodeResponse(StreamInput in) throws IOException; @@ -181,8 +200,9 @@ class AsyncAction { void start() { final DiscoveryNode[] nodes = request.concreteNodes(); if (nodes.length == 0) { - // nothing to notify - threadPool.generic().execute(() -> listener.onResponse(newResponse(request, responses))); + // nothing to notify, so respond immediately, but always fork even if finalExecutor == SAME + final String executor = finalExecutor.equals(ThreadPool.Names.SAME) ? ThreadPool.Names.GENERIC : finalExecutor; + threadPool.executor(executor).execute(() -> newResponse(task, request, responses, listener)); return; } final TransportRequestOptions transportRequestOptions = TransportRequestOptions.timeout(request.timeout()); @@ -237,7 +257,7 @@ private void onFailure(int idx, String nodeId, Throwable t) { } private void finishHim() { - threadPool.executor(finalExecutor).execute(ActionRunnable.supply(listener, () -> newResponse(request, responses))); + threadPool.executor(finalExecutor).execute(() -> newResponse(task, request, responses, listener)); } } diff --git a/server/src/main/java/org/elasticsearch/common/util/CancellableSingleObjectCache.java b/server/src/main/java/org/elasticsearch/common/util/CancellableSingleObjectCache.java new file mode 100644 index 0000000000000..e994f3e531bd0 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/common/util/CancellableSingleObjectCache.java @@ -0,0 +1,283 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.common.util; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.support.ListenableActionFuture; +import org.elasticsearch.common.Nullable; +import org.elasticsearch.common.util.concurrent.AbstractRefCounted; +import org.elasticsearch.tasks.TaskCancelledException; + +import java.util.ArrayList; +import java.util.concurrent.atomic.AtomicBoolean; +import java.util.concurrent.atomic.AtomicReference; +import java.util.function.BooleanSupplier; + +/** + * A cache of a single object whose refresh process can be cancelled. The cached value is computed lazily on the first retrieval, and + * associated with a key which is used to determine its freshness for subsequent retrievals. + *

+ * This is useful for things like computing stats over cluster metadata: the first time stats are requested they are computed, but + * subsequent calls re-use the computed value as long as they pertain to the same metadata version. If stats are requested for a different + * metadata version then the cached value is dropped and a new one is computed. + *

+ * Retrievals happen via the async {@link #get} method. If a retrieval is cancelled (e.g. the channel on which to return the stats is + * closed) then the computation carries on running in case another retrieval for the same key arrives in future. However if all of the + * retrievals for a key are cancelled and a retrieval occurs for a fresher key then the computation itself is cancelled. + *

+ * Cancellation is based on polling: the {@link #refresh} method checks whether it should abort whenever it is convenient to do so, which in + * turn checks all the pending retrievals to see whether they have been cancelled. + * + * @param The type of the input to the computation of the cached value. + * @param The key type. The cached value is associated with a key, and subsequent {@link #get} calls compare keys of the given input + * value to determine whether the cached value is fresh or not. See {@link #isFresh}. + * @param The type of the cached value. + */ +public abstract class CancellableSingleObjectCache { + + private final AtomicReference currentCachedItemRef = new AtomicReference<>(); + + /** + * Compute a new value for the cache. + *

+ * If an exception is thrown, or passed to the {@code listener}, then it is passed on to all waiting listeners but it is not cached so + * that subsequent retrievals will trigger subsequent calls to this method. + *

+ * Implementations of this method should poll for cancellation by running {@code ensureNotCancelled} whenever appropriate. The + * computation is cancelled if all of the corresponding retrievals have been cancelled and a retrieval has since happened for a + * fresher key. + * + * @param input The input to this computation, which will be converted to a key and used to determine whether it is + * suitably fresh for future requests too. + * @param ensureNotCancelled A {@link Runnable} which throws a {@link TaskCancelledException} if the result of the computation is no + * longer needed. On cancellation, notifying the {@code listener} is optional. + * @param listener A {@link ActionListener} which should be notified when the computation completes. If the computation fails + * by calling {@link ActionListener#onFailure} then the result is returned to the pending listeners but is not + * cached. + */ + protected abstract void refresh(Input input, Runnable ensureNotCancelled, ActionListener listener); + + /** + * Compute the key for the given input value. + */ + protected abstract Key getKey(Input input); + + /** + * Compute whether the {@code currentKey} is fresh enough for a retrieval associated with {@code newKey}. + * + * @param currentKey The key of the current (cached or pending) value. + * @param newKey The key associated with a new retrival. + * @return {@code true} if a value computed for {@code currentKey} is fresh enough to satisfy a retrieval for {@code newKey}. + */ + protected boolean isFresh(Key currentKey, Key newKey) { + return currentKey.equals(newKey); + } + + /** + * Start a retrieval for the value associated with the given {@code input}, and pass it to the given {@code listener}. + *

+ * If a fresh-enough result is available when this method is called then the {@code listener} is notified immediately, on this thread. + * If a fresh-enough result is already being computed then the {@code listener} is captured and will be notified when the result becomes + * available, on the thread on which the refresh completes. If no fresh-enough result is either pending or available then this method + * starts to compute one by calling {@link #refresh} on this thread. + * + * @param input The input to compute the desired value, converted to a {@link Key} to determine if the value that's currently + * cached or pending is fresh enough. + * @param isCancelled Returns {@code true} if the listener no longer requires the value being computed. + * @param listener The listener to notify when the desired value becomes available. + */ + public final void get(Input input, BooleanSupplier isCancelled, ActionListener listener) { + + final Key key = getKey(input); + + CachedItem newCachedItem = null; + + do { + if (isCancelled.getAsBoolean()) { + listener.onFailure(new TaskCancelledException("task cancelled")); + return; + } + + final CachedItem currentCachedItem = currentCachedItemRef.get(); + if (currentCachedItem != null && isFresh(currentCachedItem.getKey(), key)) { + final boolean listenerAdded = currentCachedItem.addListener(listener, isCancelled); + if (listenerAdded) { + return; + } + + assert currentCachedItem.refCount() == 0 : currentCachedItem.refCount(); + assert currentCachedItemRef.get() != currentCachedItem; + + // Our item was only just released, possibly cancelled, by another get() with a fresher key. We don't simply retry + // since that would evict the new item. Instead let's see if it was cancelled or whether it completed properly. + if (currentCachedItem.getFuture().isDone()) { + try { + listener.onResponse(currentCachedItem.getFuture().actionGet(0L)); + return; + } catch (TaskCancelledException e) { + // previous task was cancelled before completion, therefore we must perform our own one-shot refresh + } catch (Exception e) { + // either the refresh completed exceptionally or the listener threw an exception; call onFailure() either way + listener.onFailure(e); + return; + } + } // else it's just about to be cancelled, so we can just retry knowing that it will be removed very soon + + continue; + } + + if (newCachedItem == null) { + newCachedItem = new CachedItem(key); + } + + if (currentCachedItemRef.compareAndSet(currentCachedItem, newCachedItem)) { + if (currentCachedItem != null) { + currentCachedItem.decRef(); + } + startRefresh(input, newCachedItem); + final boolean listenerAdded = newCachedItem.addListener(listener, isCancelled); + assert listenerAdded; + newCachedItem.decRef(); + return; + } + // else the CAS failed because we lost a race to a concurrent retrieval; try again from the top since we expect the race winner + // to be fresh enough for us and therefore we can just wait for its result. + } while (true); + } + + private void startRefresh(Input input, CachedItem cachedItem) { + try { + refresh(input, cachedItem::ensureNotCancelled, cachedItem.getFuture()); + } catch (Exception e) { + cachedItem.getFuture().onFailure(e); + } + } + + /** + * An item in the cache, representing a single invocation of {@link #refresh}. + *

+ * This item is ref-counted so that it can be cancelled if it becomes irrelevant. References are held by: + *

    + *
  • Every listener that is waiting for the result, released on cancellation. There's no need to release on completion because + * there's nothing to cancel once the refresh has completed.
  • + *
  • The cache itself, released once this item is no longer the current one in the cache, either because it failed or because a + * fresher computation was started.
  • + *
  • The process that adds the first listener, released once the first listener is added.
  • + *
+ */ + private final class CachedItem extends AbstractRefCounted { + + private final Key key; + private final ListenableActionFuture future = new ListenableActionFuture<>(); + private final CancellationChecks cancellationChecks = new CancellationChecks(); + + CachedItem(Key key) { + super("cached item"); + this.key = key; + incRef(); // start with a refcount of 2 so we're not closed while adding the first listener + this.future.addListener(new ActionListener<>() { + @Override + public void onResponse(Value value) { + cancellationChecks.clear(); + } + + @Override + public void onFailure(Exception e) { + cancellationChecks.clear(); + // Do not cache this failure + if (currentCachedItemRef.compareAndSet(CachedItem.this, null)) { + // Release reference held by the cache, so that concurrent calls to addListener() fail and retry. Not totally + // necessary, we could also fail those listeners as if they'd been added slightly sooner, but it makes the ref + // counting easier to document. + decRef(); + } + } + }); + } + + Key getKey() { + return key; + } + + ListenableActionFuture getFuture() { + return future; + } + + boolean addListener(ActionListener listener, BooleanSupplier isCancelled) { + if (tryIncRef()) { + if (future.isDone()) { + // No need to bother with ref counting & cancellation any more, just complete the listener. + // We know it wasn't cancelled because there are still references. + ActionListener.completeWith(listener, () -> future.actionGet(0L)); + } else { + // Refresh is still pending; it's not cancelled because there are still references. + future.addListener(listener); + final AtomicBoolean released = new AtomicBoolean(); + cancellationChecks.add(() -> { + if (released.get() == false && isCancelled.getAsBoolean() && released.compareAndSet(false, true)) { + decRef(); + } + }); + } + return true; + } else { + return false; + } + } + + void ensureNotCancelled() { + cancellationChecks.runAll(); + if (refCount() == 0) { + throw new TaskCancelledException("task cancelled"); + } + } + + @Override + protected void closeInternal() { + // Complete the future (and hence all its listeners) with an exception if it hasn't already been completed. + future.onFailure(new TaskCancelledException("task cancelled")); + } + } + + private static final class CancellationChecks { + @Nullable // if cleared + private ArrayList checks = new ArrayList<>(); + + synchronized void clear() { + checks = null; + } + + synchronized void add(Runnable check) { + if (checks != null) { + checks.add(check); + } + } + + void runAll() { + // It's ok not to run all the checks so there's no need for a completely synchronized iteration. + final int count; + synchronized (this) { + if (checks == null) { + return; + } + count = checks.size(); + } + for (int i = 0; i < count; i++) { + final Runnable cancellationCheck; + synchronized (this) { + if (checks == null) { + return; + } + cancellationCheck = checks.get(i); + } + cancellationCheck.run(); + } + } + } +} diff --git a/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestClusterStatsAction.java b/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestClusterStatsAction.java index bc9221a5c1b69..1fee2db8429e1 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestClusterStatsAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestClusterStatsAction.java @@ -13,6 +13,7 @@ import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.action.RestActions.NodesResponseRestListener; +import org.elasticsearch.rest.action.RestCancellableNodeClient; import java.io.IOException; import java.util.List; @@ -37,7 +38,8 @@ public String getName() { public RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException { ClusterStatsRequest clusterStatsRequest = new ClusterStatsRequest().nodesIds(request.paramAsStringArray("nodeId", null)); clusterStatsRequest.timeout(request.param("timeout")); - return channel -> client.admin().cluster().clusterStats(clusterStatsRequest, new NodesResponseRestListener<>(channel)); + return channel -> new RestCancellableNodeClient(client, request.getHttpChannel()) + .admin().cluster().clusterStats(clusterStatsRequest, new NodesResponseRestListener<>(channel)); } @Override diff --git a/server/src/test/java/org/elasticsearch/action/admin/cluster/stats/AnalysisStatsTests.java b/server/src/test/java/org/elasticsearch/action/admin/cluster/stats/AnalysisStatsTests.java index e14718c83d378..ed08a51ca5840 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/cluster/stats/AnalysisStatsTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/cluster/stats/AnalysisStatsTests.java @@ -13,6 +13,7 @@ import org.elasticsearch.cluster.metadata.Metadata; import org.elasticsearch.common.io.stream.Writeable.Reader; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.tasks.TaskCancelledException; import org.elasticsearch.test.AbstractWireSerializingTestCase; import java.io.IOException; @@ -175,7 +176,7 @@ public void testAccountsRegularIndices() { Metadata metadata = new Metadata.Builder() .put(indexMetadata) .build(); - AnalysisStats analysisStats = AnalysisStats.of(metadata); + AnalysisStats analysisStats = AnalysisStats.of(metadata, () -> {}); IndexFeatureStats expectedStats = new IndexFeatureStats("german"); expectedStats.count = 1; expectedStats.indexCount = 1; @@ -198,7 +199,23 @@ public void testIgnoreSystemIndices() { Metadata metadata = new Metadata.Builder() .put(indexMetadata) .build(); - AnalysisStats analysisStats = AnalysisStats.of(metadata); + AnalysisStats analysisStats = AnalysisStats.of(metadata, () -> {}); assertEquals(Collections.emptySet(), analysisStats.getUsedBuiltInAnalyzers()); } + + public void testChecksForCancellation() { + Settings settings = Settings.builder() + .put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT) + .put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 4) + .put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 1) + .build(); + IndexMetadata.Builder indexMetadata = new IndexMetadata.Builder("foo") + .settings(settings); + Metadata metadata = new Metadata.Builder() + .put(indexMetadata) + .build(); + expectThrows(TaskCancelledException.class, () -> AnalysisStats.of(metadata, () -> { + throw new TaskCancelledException("task cancelled"); + })); + } } diff --git a/server/src/test/java/org/elasticsearch/action/admin/cluster/stats/MappingStatsTests.java b/server/src/test/java/org/elasticsearch/action/admin/cluster/stats/MappingStatsTests.java index f01c85c16f47f..262bd251bd207 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/cluster/stats/MappingStatsTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/cluster/stats/MappingStatsTests.java @@ -13,6 +13,7 @@ import org.elasticsearch.cluster.metadata.Metadata; import org.elasticsearch.common.io.stream.Writeable.Reader; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.tasks.TaskCancelledException; import org.elasticsearch.test.AbstractWireSerializingTestCase; import java.io.IOException; @@ -75,7 +76,7 @@ public void testAccountsRegularIndices() { Metadata metadata = new Metadata.Builder() .put(indexMetadata) .build(); - MappingStats mappingStats = MappingStats.of(metadata); + MappingStats mappingStats = MappingStats.of(metadata, () -> {}); IndexFeatureStats expectedStats = new IndexFeatureStats("long"); expectedStats.count = 1; expectedStats.indexCount = 1; @@ -98,7 +99,23 @@ public void testIgnoreSystemIndices() { Metadata metadata = new Metadata.Builder() .put(indexMetadata) .build(); - MappingStats mappingStats = MappingStats.of(metadata); + MappingStats mappingStats = MappingStats.of(metadata, () -> {}); assertEquals(Collections.emptySet(), mappingStats.getFieldTypeStats()); } + + public void testChecksForCancellation() { + Settings settings = Settings.builder() + .put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT) + .put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 4) + .put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 1) + .build(); + IndexMetadata.Builder indexMetadata = new IndexMetadata.Builder("foo") + .settings(settings); + Metadata metadata = new Metadata.Builder() + .put(indexMetadata) + .build(); + expectThrows(TaskCancelledException.class, () -> MappingStats.of(metadata, () -> { + throw new TaskCancelledException("task cancelled"); + })); + } } diff --git a/server/src/test/java/org/elasticsearch/action/support/nodes/TransportNodesActionTests.java b/server/src/test/java/org/elasticsearch/action/support/nodes/TransportNodesActionTests.java index 4764da06eab5a..48f044d509a6d 100644 --- a/server/src/test/java/org/elasticsearch/action/support/nodes/TransportNodesActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/support/nodes/TransportNodesActionTests.java @@ -46,6 +46,7 @@ import java.util.concurrent.atomic.AtomicReferenceArray; import java.util.function.Supplier; +import static java.util.Collections.emptyMap; import static org.elasticsearch.test.ClusterServiceUtils.createClusterService; import static org.elasticsearch.test.ClusterServiceUtils.setState; import static org.mockito.Mockito.mock; @@ -90,8 +91,10 @@ public void testNodesSelectors() { } public void testNewResponseNullArray() { - TransportNodesAction action = getTestTransportNodesAction(); - expectThrows(NullPointerException.class, () -> action.newResponse(new TestNodesRequest(), null)); + TransportNodesAction action = getTestTransportNodesAction(); + final PlainActionFuture future = new PlainActionFuture<>(); + action.newResponse(new Task(1, "test", "test", "", null, emptyMap()), new TestNodesRequest(), null, future); + expectThrows(NullPointerException.class, future::actionGet); } public void testNewResponse() { @@ -99,9 +102,6 @@ public void testNewResponse() { TestNodesRequest request = new TestNodesRequest(); List expectedNodeResponses = mockList(TestNodeResponse::new, randomIntBetween(0, 2)); expectedNodeResponses.add(new TestNodeResponse()); - List nodeResponses = new ArrayList<>(expectedNodeResponses); - // This should be ignored: - nodeResponses.add(new OtherNodeResponse()); List failures = mockList( () -> new FailedNodeException( randomAlphaOfLength(8), @@ -116,7 +116,9 @@ public void testNewResponse() { AtomicReferenceArray atomicArray = new AtomicReferenceArray<>(allResponses.toArray()); - TestNodesResponse response = action.newResponse(request, atomicArray); + final PlainActionFuture future = new PlainActionFuture<>(); + action.newResponse(new Task(1, "test", "test", "", null, emptyMap()), request, atomicArray, future); + TestNodesResponse response = future.actionGet(); assertSame(request, response.request); // note: I shuffled the overall list, so it's not possible to guarantee that it's in the right order diff --git a/server/src/test/java/org/elasticsearch/common/util/CancellableSingleObjectCacheTests.java b/server/src/test/java/org/elasticsearch/common/util/CancellableSingleObjectCacheTests.java new file mode 100644 index 0000000000000..521e03cc8aac6 --- /dev/null +++ b/server/src/test/java/org/elasticsearch/common/util/CancellableSingleObjectCacheTests.java @@ -0,0 +1,292 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.common.util; + +import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.StepListener; +import org.elasticsearch.action.support.PlainActionFuture; +import org.elasticsearch.common.util.concurrent.ConcurrentCollections; +import org.elasticsearch.tasks.TaskCancelledException; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.threadpool.TestThreadPool; +import org.elasticsearch.threadpool.ThreadPool; + +import java.util.LinkedList; +import java.util.Objects; +import java.util.concurrent.BlockingQueue; +import java.util.concurrent.CountDownLatch; +import java.util.concurrent.ExecutionException; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.atomic.AtomicBoolean; +import java.util.function.Function; + +import static org.hamcrest.Matchers.empty; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.not; +import static org.hamcrest.Matchers.sameInstance; + +public class CancellableSingleObjectCacheTests extends ESTestCase { + + public void testNoPendingRefreshIfAlreadyCancelled() { + final TestCache testCache = new TestCache(); + final TestFuture future = new TestFuture(); + testCache.get("foo", () -> true, future); + testCache.assertPendingRefreshes(0); + assertTrue(future.isDone()); + expectThrows(ExecutionException.class, TaskCancelledException.class, future::get); + } + + public void testListenersCompletedByRefresh() { + final TestCache testCache = new TestCache(); + + // The first get() calls the refresh function + final TestFuture future0 = new TestFuture(); + testCache.get("foo", () -> false, future0); + testCache.assertPendingRefreshes(1); + + // The second get() with a matching key does not refresh again + final TestFuture future1 = new TestFuture(); + testCache.get("foo", () -> false, future1); + assertFalse(future0.isDone()); + assertFalse(future1.isDone()); + testCache.assertPendingRefreshes(1); + testCache.completeNextRefresh("foo", 1); + assertThat(future0.actionGet(0L), equalTo(1)); + assertThat(future0.actionGet(0L), sameInstance(future1.actionGet(0L))); + + // A further get() call with a matching key re-uses the cached value + final TestFuture future2 = new TestFuture(); + testCache.get("foo", () -> false, future2); + testCache.assertNoPendingRefreshes(); + assertThat(future2.actionGet(0L), sameInstance(future1.actionGet(0L))); + + // A call with a different key triggers another refresh + final TestFuture future3 = new TestFuture(); + testCache.get("bar", () -> false, future3); + assertFalse(future3.isDone()); + testCache.assertPendingRefreshes(1); + testCache.completeNextRefresh("bar", 2); + assertThat(future3.actionGet(0L), equalTo(2)); + } + + public void testListenerCompletedByRefreshEvenIfDiscarded() { + final TestCache testCache = new TestCache(); + + // This computation is discarded before it completes. + final TestFuture future1 = new TestFuture(); + final AtomicBoolean future1Cancelled = new AtomicBoolean(); + testCache.get("foo", future1Cancelled::get, future1); + future1Cancelled.set(true); + testCache.assertPendingRefreshes(1); + assertFalse(future1.isDone()); + + // However the refresh continues and makes its result available to a later get() call for the same value. + final TestFuture future2 = new TestFuture(); + testCache.get("foo", () -> false, future2); + testCache.assertPendingRefreshes(1); + testCache.completeNextRefresh("foo", 1); + assertThat(future2.actionGet(0L), equalTo(1)); + + // ... and the original listener is also completed successfully + assertThat(future1.actionGet(0L), sameInstance(future2.actionGet(0L))); + } + + public void testListenerCompletedWithCancellationExceptionIfRefreshCancelled() { + final TestCache testCache = new TestCache(); + + // This computation is discarded before it completes. + final TestFuture future1 = new TestFuture(); + final AtomicBoolean future1Cancelled = new AtomicBoolean(); + testCache.get("foo", future1Cancelled::get, future1); + future1Cancelled.set(true); + testCache.assertPendingRefreshes(1); + + assertFalse(future1.isDone()); + + // A second get() call with a non-matching key cancels the original refresh and starts another one + final TestFuture future2 = new TestFuture(); + testCache.get("bar", () -> false, future2); + testCache.assertPendingRefreshes(2); + testCache.assertNextRefreshCancelled(); + expectThrows(TaskCancelledException.class, () -> future1.actionGet(0L)); + testCache.completeNextRefresh("bar", 2); + assertThat(future2.actionGet(0L), equalTo(2)); + } + + public void testExceptionCompletesListenersButIsNotCached() { + final TestCache testCache = new TestCache(); + + // If a refresh results in an exception then all the pending get() calls complete exceptionally + final TestFuture future0 = new TestFuture(); + final TestFuture future1 = new TestFuture(); + testCache.get("foo", () -> false, future0); + testCache.get("foo", () -> false, future1); + testCache.assertPendingRefreshes(1); + final ElasticsearchException exception = new ElasticsearchException("simulated"); + testCache.completeNextRefresh(exception); + assertSame(exception, expectThrows(ElasticsearchException.class, () -> future0.actionGet(0L))); + assertSame(exception, expectThrows(ElasticsearchException.class, () -> future1.actionGet(0L))); + + testCache.assertNoPendingRefreshes(); + // The exception is not cached, however, so a subsequent get() call with a matching key performs another refresh + final TestFuture future2 = new TestFuture(); + testCache.get("foo", () -> false, future2); + testCache.assertPendingRefreshes(1); + testCache.completeNextRefresh("foo", 1); + assertThat(future2.actionGet(0L), equalTo(1)); + } + + public void testConcurrentRefreshesAndCancellation() throws InterruptedException { + final ThreadPool threadPool = new TestThreadPool("test"); + try { + final CancellableSingleObjectCache testCache = new CancellableSingleObjectCache<>() { + @Override + protected void refresh(String s, Runnable ensureNotCancelled, ActionListener listener) { + threadPool.generic().execute(() -> ActionListener.completeWith(listener, () -> { + ensureNotCancelled.run(); + if (s.equals("FAIL")) { + throw new ElasticsearchException("simulated"); + } + return s.length(); + })); + } + + @Override + protected String getKey(String s) { + return s; + } + }; + final int count = 1000; + final CountDownLatch startLatch = new CountDownLatch(1); + final CountDownLatch finishLatch = new CountDownLatch(count); + final BlockingQueue queue = ConcurrentCollections.newBlockingQueue(); + + for (int i = 0; i < count; i++) { + final boolean cancel = randomBoolean(); + final String input = randomFrom("FAIL", "foo", "barbaz", "quux", "gruly"); + queue.offer(() -> { + try { + assertTrue(startLatch.await(10, TimeUnit.SECONDS)); + } catch (InterruptedException e) { + throw new AssertionError(e); + } + + final StepListener stepListener = new StepListener<>(); + final AtomicBoolean isComplete = new AtomicBoolean(); + final AtomicBoolean isCancelled = new AtomicBoolean(); + testCache.get(input, isCancelled::get, ActionListener.runBefore(stepListener, + () -> assertTrue(isComplete.compareAndSet(false, true)))); + + final Runnable next = queue.poll(); + if (next != null) { + threadPool.generic().execute(next); + } + + if (cancel) { + isCancelled.set(true); + } + + stepListener.whenComplete(len -> { + finishLatch.countDown(); + assertThat(len, equalTo(input.length())); + assertNotEquals("FAIL", input); + }, e -> { + finishLatch.countDown(); + if (e instanceof TaskCancelledException) { + assertTrue(cancel); + } else { + assertEquals("FAIL", input); + } + }); + }); + } + + for (int i = 0; i < 10; i++) { + threadPool.generic().execute(Objects.requireNonNull(queue.poll())); + } + + startLatch.countDown(); + assertTrue(finishLatch.await(10, TimeUnit.SECONDS)); + } finally { + ThreadPool.terminate(threadPool, 10, TimeUnit.SECONDS); + } + } + + private static class TestCache extends CancellableSingleObjectCache { + + private final LinkedList>> pendingRefreshes = new LinkedList<>(); + + @Override + protected void refresh(String input, Runnable ensureNotCancelled, ActionListener listener) { + final StepListener> stepListener = new StepListener<>(); + pendingRefreshes.offer(stepListener); + stepListener.whenComplete(f -> ActionListener.completeWith(listener, () -> { + ensureNotCancelled.run(); + return f.apply(input); + }), listener::onFailure); + } + + @Override + protected String getKey(String s) { + return s; + } + + void assertPendingRefreshes(int expected) { + assertThat(pendingRefreshes.size(), equalTo(expected)); + } + + void assertNoPendingRefreshes() { + assertThat(pendingRefreshes, empty()); + } + + void completeNextRefresh(String key, int value) { + nextRefresh().onResponse(k -> { + assertThat(k, equalTo(key)); + return value; + }); + } + + void completeNextRefresh(Exception e) { + nextRefresh().onFailure(e); + } + + void assertNextRefreshCancelled() { + nextRefresh().onResponse(k -> { + throw new AssertionError("should not be called"); + }); + } + + private StepListener> nextRefresh() { + assertThat(pendingRefreshes, not(empty())); + final StepListener> nextRefresh = pendingRefreshes.poll(); + assertNotNull(nextRefresh); + return nextRefresh; + } + + } + + private static class TestFuture extends PlainActionFuture { + + private final AtomicBoolean isCompleted = new AtomicBoolean(); + + @Override + public void onResponse(Integer result) { + assertTrue(isCompleted.compareAndSet(false, true)); + super.onResponse(result); + } + + @Override + public void onFailure(Exception e) { + assertTrue(isCompleted.compareAndSet(false, true)); + super.onFailure(e); + } + } + +} diff --git a/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/collector/cluster/ClusterStatsMonitoringDocTests.java b/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/collector/cluster/ClusterStatsMonitoringDocTests.java index 4f8ec94c126c4..95ce27d6c56f1 100644 --- a/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/collector/cluster/ClusterStatsMonitoringDocTests.java +++ b/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/collector/cluster/ClusterStatsMonitoringDocTests.java @@ -336,8 +336,8 @@ public void testToXContent() throws IOException { clusterName, singletonList(mockNodeResponse), emptyList(), - MappingStats.of(metadata), - AnalysisStats.of(metadata), + MappingStats.of(metadata, () -> {}), + AnalysisStats.of(metadata, () -> {}), VersionStats.of(metadata, singletonList(mockNodeResponse))); final MonitoringDoc.Node node = new MonitoringDoc.Node("_uuid", "_host", "_addr", "_ip", "_name", 1504169190855L); From 3b249fa124534f3907db915eb502142171e54d97 Mon Sep 17 00:00:00 2001 From: Armin Braun Date: Wed, 10 Feb 2021 14:26:55 +0100 Subject: [PATCH 15/24] Use JNA to Speed up Snapshot Cache File Creation (#68687) Use JNA to speed up snapshot cache file creation. Do this in `:server` to bypass the security filter and move necessary bits of code to `:server` to enable the logic. Fall back to trying to create the file by writing zeros if anything except for the step determining free disk space fails. --- .../elasticsearch/bootstrap/Bootstrap.java | 13 ++++++ .../elasticsearch/bootstrap/JNACLibrary.java | 3 ++ .../elasticsearch/bootstrap/JNANatives.java | 43 +++++++++++++++++++ .../org/elasticsearch/bootstrap/Natives.java | 18 ++++++++ .../org/elasticsearch/env/Environment.java | 10 +++++ .../snapshots/SnapshotUtils.java | 30 +++++++++++++ .../snapshots/SnapshotsService.java | 21 +++++++++ .../ml/process/NativeStorageProvider.java | 9 +--- .../BaseSearchableSnapshotsIntegTestCase.java | 7 +-- .../SearchableSnapshots.java | 7 +-- .../cache/FrozenCacheService.java | 34 ++++----------- .../cache/SharedBytes.java | 37 ++++------------ .../store/cache/FrozenIndexInputTests.java | 11 ++--- .../AbstractSearchableSnapshotsTestCase.java | 11 ++--- .../cache/FrozenCacheServiceTests.java | 17 ++++---- 15 files changed, 187 insertions(+), 84 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/bootstrap/Bootstrap.java b/server/src/main/java/org/elasticsearch/bootstrap/Bootstrap.java index a7a12f880b49a..3e27ce8460907 100644 --- a/server/src/main/java/org/elasticsearch/bootstrap/Bootstrap.java +++ b/server/src/main/java/org/elasticsearch/bootstrap/Bootstrap.java @@ -40,6 +40,7 @@ import org.elasticsearch.node.InternalSettingsPreparer; import org.elasticsearch.node.Node; import org.elasticsearch.node.NodeValidationException; +import org.elasticsearch.snapshots.SnapshotsService; import java.io.ByteArrayOutputStream; import java.io.IOException; @@ -168,6 +169,18 @@ private void setup(boolean addShutdownHook, Environment environment) throws Boot BootstrapSettings.SYSTEM_CALL_FILTER_SETTING.get(settings), BootstrapSettings.CTRLHANDLER_SETTING.get(settings)); + final long cacheSize = SnapshotsService.SNAPSHOT_CACHE_SIZE_SETTING.get(settings).getBytes(); + final long regionSize = SnapshotsService.SNAPSHOT_CACHE_REGION_SIZE_SETTING.get(settings).getBytes(); + final int numRegions = Math.toIntExact(cacheSize / regionSize); + final long fileSize = numRegions * regionSize; + if (fileSize > 0) { + try { + Natives.tryCreateCacheFile(environment, fileSize); + } catch (Exception e) { + throw new BootstrapException(e); + } + } + // initialize probes before the security manager is installed initializeProbes(); diff --git a/server/src/main/java/org/elasticsearch/bootstrap/JNACLibrary.java b/server/src/main/java/org/elasticsearch/bootstrap/JNACLibrary.java index 52dcd5438ed58..1b8ca51815aa2 100644 --- a/server/src/main/java/org/elasticsearch/bootstrap/JNACLibrary.java +++ b/server/src/main/java/org/elasticsearch/bootstrap/JNACLibrary.java @@ -61,6 +61,9 @@ protected List getFieldOrder() { static native String strerror(int errno); + // TODO: Bind POSIX fallocate as well to support non-Linux? (this would only apply to OSX in practice?) + static native int fallocate(int fd, int mode, long offset, long length); + private JNACLibrary() { } } diff --git a/server/src/main/java/org/elasticsearch/bootstrap/JNANatives.java b/server/src/main/java/org/elasticsearch/bootstrap/JNANatives.java index 90d576b29fdd8..ed402e642a98f 100644 --- a/server/src/main/java/org/elasticsearch/bootstrap/JNANatives.java +++ b/server/src/main/java/org/elasticsearch/bootstrap/JNANatives.java @@ -14,9 +14,17 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; +import org.apache.logging.log4j.message.ParameterizedMessage; import org.apache.lucene.util.Constants; +import org.elasticsearch.common.SuppressForbidden; +import org.elasticsearch.env.Environment; import org.elasticsearch.monitor.jvm.JvmInfo; +import org.elasticsearch.snapshots.SnapshotUtils; +import java.io.FileOutputStream; +import java.io.IOException; +import java.lang.reflect.Field; +import java.nio.file.Files; import java.nio.file.Path; import static org.elasticsearch.bootstrap.JNAKernel32Library.SizeT; @@ -260,4 +268,39 @@ static void tryInstallSystemCallFilter(Path tmpFile) { logger.warn("unable to install syscall filter: ", e); } } + + @SuppressForbidden(reason = "need access to fd on FileOutputStream") + static void fallocateSnapshotCacheFile(Environment environment, long fileSize) throws IOException { + if (Constants.LINUX == false) { + logger.debug("not trying to create a shared cache file using fallocate on non-Linux platform"); + return; + } + Path cacheFile = SnapshotUtils.findCacheSnapshotCacheFilePath(environment, fileSize); + if (cacheFile == null) { + throw new IOException("could not find a directory with adequate free space for cache file"); + } + boolean success = false; + try (FileOutputStream fileChannel = new FileOutputStream(cacheFile.toFile())) { + long currentSize = fileChannel.getChannel().size(); + if (currentSize < fileSize) { + final Field field = fileChannel.getFD().getClass().getDeclaredField("fd"); + field.setAccessible(true); + final int result = JNACLibrary.fallocate((int) field.get(fileChannel.getFD()), 0, currentSize, fileSize - currentSize); + final int errno = result == 0 ? 0 : Native.getLastError(); + if (errno == 0) { + success = true; + logger.info("allocated cache file [{}] using fallocate", cacheFile); + } else { + logger.warn("failed to initialize cache file [{}] using fallocate errno [{}]", cacheFile, errno); + } + } + } catch (Exception e) { + logger.warn(new ParameterizedMessage("failed to initialize cache file [{}] using fallocate", cacheFile), e); + } finally { + if (success == false) { + // if anything goes wrong, delete the potentially created file to not waste disk space + Files.deleteIfExists(cacheFile); + } + } + } } diff --git a/server/src/main/java/org/elasticsearch/bootstrap/Natives.java b/server/src/main/java/org/elasticsearch/bootstrap/Natives.java index b9f072a52e817..e91116a5508ec 100644 --- a/server/src/main/java/org/elasticsearch/bootstrap/Natives.java +++ b/server/src/main/java/org/elasticsearch/bootstrap/Natives.java @@ -10,7 +10,9 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; +import org.elasticsearch.env.Environment; +import java.io.IOException; import java.nio.file.Path; /** @@ -132,4 +134,20 @@ static boolean isSystemCallFilterInstalled() { } return JNANatives.LOCAL_SYSTEM_CALL_FILTER; } + + /** + * On Linux, this method tries to create the searchable snapshot frozen cache file using fallocate if JNA is available. This enables + * a much faster creation of the file than the fallback mechanism in the searchable snapshots plugin that will pre-allocate the cache + * file by writing zeros to the file. + * + * @throws IOException on failure to determine free disk space for a data path + */ + public static void tryCreateCacheFile(Environment environment, long fileSize) throws IOException { + if (JNA_AVAILABLE == false) { + logger.warn("cannot use fallocate to create cache file because JNA is not available"); + return; + } + JNANatives.fallocateSnapshotCacheFile(environment, fileSize); + } + } diff --git a/server/src/main/java/org/elasticsearch/env/Environment.java b/server/src/main/java/org/elasticsearch/env/Environment.java index 18b509c6f58bf..8bf2a5fbbd241 100644 --- a/server/src/main/java/org/elasticsearch/env/Environment.java +++ b/server/src/main/java/org/elasticsearch/env/Environment.java @@ -300,6 +300,16 @@ public static FileStore getFileStore(final Path path) throws IOException { return new ESFileStore(Files.getFileStore(path)); } + public static long getUsableSpace(Path path) throws IOException { + long freeSpaceInBytes = Environment.getFileStore(path).getUsableSpace(); + + /* See: https://bugs.openjdk.java.net/browse/JDK-8162520 */ + if (freeSpaceInBytes < 0) { + freeSpaceInBytes = Long.MAX_VALUE; + } + return freeSpaceInBytes; + } + /** * asserts that the two environments are equivalent for all things the environment cares about (i.e., all but the setting * object which may contain different setting) diff --git a/server/src/main/java/org/elasticsearch/snapshots/SnapshotUtils.java b/server/src/main/java/org/elasticsearch/snapshots/SnapshotUtils.java index 58f41ffd9b306..16d479d0f94f8 100644 --- a/server/src/main/java/org/elasticsearch/snapshots/SnapshotUtils.java +++ b/server/src/main/java/org/elasticsearch/snapshots/SnapshotUtils.java @@ -9,9 +9,14 @@ import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; +import org.elasticsearch.common.Nullable; import org.elasticsearch.common.regex.Regex; +import org.elasticsearch.env.Environment; import org.elasticsearch.index.IndexNotFoundException; +import java.io.IOException; +import java.nio.file.Files; +import java.nio.file.Path; import java.util.Arrays; import java.util.HashSet; import java.util.List; @@ -107,4 +112,29 @@ public static List filterIndices(List availableIndices, String[] } return List.copyOf(result); } + + /** + * Tries to find a suitable path to a searchable snapshots shared cache file in the data paths founds in the environment. + * + * @return path for the cache file or {@code null} if none could be found + */ + @Nullable + public static Path findCacheSnapshotCacheFilePath(Environment environment, long fileSize) throws IOException { + Path cacheFile = null; + for (Path path : environment.dataFiles()) { + Files.createDirectories(path); + // TODO: be resilient to this check failing and try next path? + long usableSpace = Environment.getUsableSpace(path); + Path p = path.resolve(SnapshotsService.CACHE_FILE_NAME); + if (Files.exists(p)) { + usableSpace += Files.size(p); + } + // TODO: leave some margin for error here + if (usableSpace > fileSize) { + cacheFile = p; + break; + } + } + return cacheFile; + } } diff --git a/server/src/main/java/org/elasticsearch/snapshots/SnapshotsService.java b/server/src/main/java/org/elasticsearch/snapshots/SnapshotsService.java index 7fe2ce8caca49..913e99736323e 100644 --- a/server/src/main/java/org/elasticsearch/snapshots/SnapshotsService.java +++ b/server/src/main/java/org/elasticsearch/snapshots/SnapshotsService.java @@ -37,6 +37,7 @@ import org.elasticsearch.cluster.RestoreInProgress; import org.elasticsearch.cluster.SnapshotDeletionsInProgress; import org.elasticsearch.cluster.SnapshotsInProgress; +import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.util.CollectionUtils; import org.elasticsearch.repositories.RepositoryShardId; import org.elasticsearch.cluster.SnapshotsInProgress.ShardSnapshotStatus; @@ -132,6 +133,26 @@ public class SnapshotsService extends AbstractLifecycleComponent implements Clus public static final String UPDATE_SNAPSHOT_STATUS_ACTION_NAME = "internal:cluster/snapshot/update_snapshot_status"; + public static final String SHARED_CACHE_SETTINGS_PREFIX = "xpack.searchable.snapshot.shared_cache."; + + public static final Setting SHARED_CACHE_RANGE_SIZE_SETTING = Setting.byteSizeSetting( + SHARED_CACHE_SETTINGS_PREFIX + "range_size", + ByteSizeValue.ofMb(16), // default + Setting.Property.NodeScope + ); + public static final Setting SNAPSHOT_CACHE_REGION_SIZE_SETTING = Setting.byteSizeSetting( + SHARED_CACHE_SETTINGS_PREFIX + "region_size", + SHARED_CACHE_RANGE_SIZE_SETTING, + Setting.Property.NodeScope + ); + public static final Setting SNAPSHOT_CACHE_SIZE_SETTING = Setting.byteSizeSetting( + SHARED_CACHE_SETTINGS_PREFIX + "size", + ByteSizeValue.ZERO, + Setting.Property.NodeScope + ); + + public static final String CACHE_FILE_NAME = "shared_snapshot_cache"; + private final ClusterService clusterService; private final IndexNameExpressionResolver indexNameExpressionResolver; diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/process/NativeStorageProvider.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/process/NativeStorageProvider.java index d5e2edf479060..23307d4452856 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/process/NativeStorageProvider.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/process/NativeStorageProvider.java @@ -134,13 +134,8 @@ public ByteSizeValue getMinLocalStorageAvailable() { return minLocalStorageAvailable; } + // non-static indirection to enable mocking in tests long getUsableSpace(Path path) throws IOException { - long freeSpaceInBytes = Environment.getFileStore(path).getUsableSpace(); - - /* See: https://bugs.openjdk.java.net/browse/JDK-8162520 */ - if (freeSpaceInBytes < 0) { - freeSpaceInBytes = Long.MAX_VALUE; - } - return freeSpaceInBytes; + return Environment.getUsableSpace(path); } } diff --git a/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/BaseSearchableSnapshotsIntegTestCase.java b/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/BaseSearchableSnapshotsIntegTestCase.java index 62efbc0ac72f7..8bab23714d365 100644 --- a/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/BaseSearchableSnapshotsIntegTestCase.java +++ b/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/BaseSearchableSnapshotsIntegTestCase.java @@ -23,6 +23,7 @@ import org.elasticsearch.index.IndexSettings; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.snapshots.AbstractSnapshotIntegTestCase; +import org.elasticsearch.snapshots.SnapshotsService; import org.elasticsearch.xpack.core.searchablesnapshots.MountSearchableSnapshotAction; import org.elasticsearch.xpack.core.searchablesnapshots.MountSearchableSnapshotRequest; import org.elasticsearch.xpack.searchablesnapshots.cache.CacheService; @@ -72,7 +73,7 @@ protected Settings nodeSettings(int nodeOrdinal) { ); } builder.put( - FrozenCacheService.SNAPSHOT_CACHE_SIZE_SETTING.getKey(), + SnapshotsService.SNAPSHOT_CACHE_SIZE_SETTING.getKey(), rarely() ? randomBoolean() ? new ByteSizeValue(randomIntBetween(0, 10), ByteSizeUnit.KB) @@ -80,14 +81,14 @@ protected Settings nodeSettings(int nodeOrdinal) { : new ByteSizeValue(randomIntBetween(1, 10), ByteSizeUnit.MB) ); builder.put( - FrozenCacheService.SNAPSHOT_CACHE_REGION_SIZE_SETTING.getKey(), + SnapshotsService.SNAPSHOT_CACHE_REGION_SIZE_SETTING.getKey(), rarely() ? new ByteSizeValue(randomIntBetween(4, 1024), ByteSizeUnit.KB) : new ByteSizeValue(randomIntBetween(1, 10), ByteSizeUnit.MB) ); if (randomBoolean()) { builder.put( - FrozenCacheService.FROZEN_CACHE_RANGE_SIZE_SETTING.getKey(), + SnapshotsService.SHARED_CACHE_RANGE_SIZE_SETTING.getKey(), rarely() ? new ByteSizeValue(randomIntBetween(4, 1024), ByteSizeUnit.KB) : new ByteSizeValue(randomIntBetween(1, 10), ByteSizeUnit.MB) diff --git a/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/SearchableSnapshots.java b/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/SearchableSnapshots.java index d3ad5807c7b23..1b76d6b3312b8 100644 --- a/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/SearchableSnapshots.java +++ b/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/SearchableSnapshots.java @@ -57,6 +57,7 @@ import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestHandler; import org.elasticsearch.script.ScriptService; +import org.elasticsearch.snapshots.SnapshotsService; import org.elasticsearch.snapshots.SourceOnlySnapshotRepository; import org.elasticsearch.threadpool.ExecutorBuilder; import org.elasticsearch.threadpool.ScalingExecutorBuilder; @@ -250,9 +251,9 @@ public List> getSettings() { CacheService.SNAPSHOT_CACHE_MAX_FILES_TO_SYNC_AT_ONCE_SETTING, CacheService.SNAPSHOT_CACHE_SYNC_SHUTDOWN_TIMEOUT, SearchableSnapshotEnableAllocationDecider.SEARCHABLE_SNAPSHOTS_ALLOCATE_ON_ROLLING_RESTART, - FrozenCacheService.SNAPSHOT_CACHE_SIZE_SETTING, - FrozenCacheService.SNAPSHOT_CACHE_REGION_SIZE_SETTING, - FrozenCacheService.FROZEN_CACHE_RANGE_SIZE_SETTING, + SnapshotsService.SNAPSHOT_CACHE_SIZE_SETTING, + SnapshotsService.SNAPSHOT_CACHE_REGION_SIZE_SETTING, + SnapshotsService.SHARED_CACHE_RANGE_SIZE_SETTING, FrozenCacheService.FROZEN_CACHE_RECOVERY_RANGE_SIZE_SETTING, FrozenCacheService.SNAPSHOT_CACHE_MAX_FREQ_SETTING, FrozenCacheService.SNAPSHOT_CACHE_DECAY_INTERVAL_SETTING, diff --git a/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/cache/FrozenCacheService.java b/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/cache/FrozenCacheService.java index f9c498545a5e9..94d192cb11973 100644 --- a/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/cache/FrozenCacheService.java +++ b/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/cache/FrozenCacheService.java @@ -46,35 +46,19 @@ import java.util.function.LongSupplier; import java.util.function.Predicate; +import static org.elasticsearch.snapshots.SnapshotsService.SHARED_CACHE_RANGE_SIZE_SETTING; +import static org.elasticsearch.snapshots.SnapshotsService.SHARED_CACHE_SETTINGS_PREFIX; +import static org.elasticsearch.snapshots.SnapshotsService.SNAPSHOT_CACHE_REGION_SIZE_SETTING; +import static org.elasticsearch.snapshots.SnapshotsService.SNAPSHOT_CACHE_SIZE_SETTING; import static org.elasticsearch.xpack.searchablesnapshots.SearchableSnapshotsUtils.toIntBytes; public class FrozenCacheService implements Releasable { - private static final String SETTINGS_PREFIX = "xpack.searchable.snapshot.shared_cache."; - - public static final Setting SNAPSHOT_CACHE_SIZE_SETTING = Setting.byteSizeSetting( - SETTINGS_PREFIX + "size", - ByteSizeValue.ZERO, - Setting.Property.NodeScope - ); - public static final ByteSizeValue MIN_SNAPSHOT_CACHE_RANGE_SIZE = new ByteSizeValue(4, ByteSizeUnit.KB); public static final ByteSizeValue MAX_SNAPSHOT_CACHE_RANGE_SIZE = new ByteSizeValue(Integer.MAX_VALUE, ByteSizeUnit.BYTES); - public static final Setting FROZEN_CACHE_RANGE_SIZE_SETTING = Setting.byteSizeSetting( - SETTINGS_PREFIX + "range_size", - ByteSizeValue.ofMb(16), // default - Setting.Property.NodeScope - ); - - public static final Setting SNAPSHOT_CACHE_REGION_SIZE_SETTING = Setting.byteSizeSetting( - SETTINGS_PREFIX + "region_size", - FROZEN_CACHE_RANGE_SIZE_SETTING, - Setting.Property.NodeScope - ); - public static final Setting FROZEN_CACHE_RECOVERY_RANGE_SIZE_SETTING = Setting.byteSizeSetting( - SETTINGS_PREFIX + "recovery_range_size", + SHARED_CACHE_SETTINGS_PREFIX + "recovery_range_size", new ByteSizeValue(128, ByteSizeUnit.KB), // default MIN_SNAPSHOT_CACHE_RANGE_SIZE, // min MAX_SNAPSHOT_CACHE_RANGE_SIZE, // max @@ -83,7 +67,7 @@ public class FrozenCacheService implements Releasable { public static final TimeValue MIN_SNAPSHOT_CACHE_DECAY_INTERVAL = TimeValue.timeValueSeconds(1L); public static final Setting SNAPSHOT_CACHE_DECAY_INTERVAL_SETTING = Setting.timeSetting( - SETTINGS_PREFIX + "decay.interval", + SHARED_CACHE_SETTINGS_PREFIX + "decay.interval", TimeValue.timeValueSeconds(60L), // default MIN_SNAPSHOT_CACHE_DECAY_INTERVAL, // min Setting.Property.NodeScope, @@ -91,14 +75,14 @@ public class FrozenCacheService implements Releasable { ); public static final Setting SNAPSHOT_CACHE_MAX_FREQ_SETTING = Setting.intSetting( - SETTINGS_PREFIX + "max_freq", + SHARED_CACHE_SETTINGS_PREFIX + "max_freq", 100, // default 1, // min Setting.Property.NodeScope ); public static final Setting SNAPSHOT_CACHE_MIN_TIME_DELTA_SETTING = Setting.timeSetting( - SETTINGS_PREFIX + "min_time_delta", + SHARED_CACHE_SETTINGS_PREFIX + "min_time_delta", TimeValue.timeValueSeconds(60L), // default TimeValue.timeValueSeconds(0L), // min Setting.Property.NodeScope @@ -157,7 +141,7 @@ public FrozenCacheService(Environment environment, ThreadPool threadPool) { } decayTask = new CacheDecayTask(threadPool, SNAPSHOT_CACHE_DECAY_INTERVAL_SETTING.get(settings)); decayTask.rescheduleIfNecessary(); - this.rangeSize = FROZEN_CACHE_RANGE_SIZE_SETTING.get(settings); + this.rangeSize = SHARED_CACHE_RANGE_SIZE_SETTING.get(settings); this.recoveryRangeSize = FROZEN_CACHE_RECOVERY_RANGE_SIZE_SETTING.get(settings); } diff --git a/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/cache/SharedBytes.java b/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/cache/SharedBytes.java index 82bcb1082c3a4..4dfd9ba195b85 100644 --- a/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/cache/SharedBytes.java +++ b/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/cache/SharedBytes.java @@ -15,6 +15,8 @@ import org.elasticsearch.common.util.concurrent.ConcurrentCollections; import org.elasticsearch.core.internal.io.IOUtils; import org.elasticsearch.env.Environment; +import org.elasticsearch.snapshots.SnapshotUtils; +import org.elasticsearch.snapshots.SnapshotsService; import java.io.IOException; import java.nio.ByteBuffer; @@ -33,8 +35,6 @@ public class SharedBytes extends AbstractRefCounted { StandardOpenOption.WRITE, StandardOpenOption.CREATE }; - private static final String CACHE_FILE_NAME = "snap_cache"; - final int numRegions; final long regionSize; @@ -51,29 +51,21 @@ public class SharedBytes extends AbstractRefCounted { final long fileSize = numRegions * regionSize; Path cacheFile = null; if (fileSize > 0) { - for (Path path : environment.dataFiles()) { - // TODO: be resilient to this check failing and try next path? - long usableSpace = getUsableSpace(path); - Path p = path.resolve(CACHE_FILE_NAME); - if (Files.exists(p)) { - usableSpace += Files.size(p); - } - // TODO: leave some margin for error here - if (usableSpace > fileSize) { - cacheFile = p; - break; - } - } + cacheFile = SnapshotUtils.findCacheSnapshotCacheFilePath(environment, fileSize); if (cacheFile == null) { throw new IOException("Could not find a directory with adequate free space for cache file"); } // TODO: maybe make this faster by allocating a larger direct buffer if this is too slow for very large files // We fill either the full file or the bytes between its current size and the desired size once with zeros to fully allocate // the file up front - logger.info("creating shared snapshot cache file [size={}, path={}]", fileSize, cacheFile); final ByteBuffer fillBytes = ByteBuffer.allocate(Channels.WRITE_CHUNK_SIZE); this.fileChannel = FileChannel.open(cacheFile, OPEN_OPTIONS); long written = fileChannel.size(); + if (fileSize < written) { + logger.info("creating shared snapshot cache file [size={}, path={}]", fileSize, cacheFile); + } else if (fileSize == written) { + logger.debug("reusing existing shared snapshot cache file [size={}, path={}]", fileSize, cacheFile); + } fileChannel.position(written); while (written < fileSize) { final int toWrite = Math.toIntExact(Math.min(fileSize - written, Channels.WRITE_CHUNK_SIZE)); @@ -87,23 +79,12 @@ public class SharedBytes extends AbstractRefCounted { } else { this.fileChannel = null; for (Path path : environment.dataFiles()) { - Files.deleteIfExists(path.resolve(CACHE_FILE_NAME)); + Files.deleteIfExists(path.resolve(SnapshotsService.CACHE_FILE_NAME)); } } this.path = cacheFile; } - // TODO: dry up against MLs usage of the same method - private static long getUsableSpace(Path path) throws IOException { - long freeSpaceInBytes = Environment.getFileStore(path).getUsableSpace(); - - /* See: https://bugs.openjdk.java.net/browse/JDK-8162520 */ - if (freeSpaceInBytes < 0) { - freeSpaceInBytes = Long.MAX_VALUE; - } - return freeSpaceInBytes; - } - @Override protected void closeInternal() { try { diff --git a/x-pack/plugin/searchable-snapshots/src/test/java/org/elasticsearch/index/store/cache/FrozenIndexInputTests.java b/x-pack/plugin/searchable-snapshots/src/test/java/org/elasticsearch/index/store/cache/FrozenIndexInputTests.java index 6e55d51e920ae..d943a83c8664f 100644 --- a/x-pack/plugin/searchable-snapshots/src/test/java/org/elasticsearch/index/store/cache/FrozenIndexInputTests.java +++ b/x-pack/plugin/searchable-snapshots/src/test/java/org/elasticsearch/index/store/cache/FrozenIndexInputTests.java @@ -23,6 +23,7 @@ import org.elasticsearch.index.store.StoreFileMetadata; import org.elasticsearch.repositories.IndexId; import org.elasticsearch.snapshots.SnapshotId; +import org.elasticsearch.snapshots.SnapshotsService; import org.elasticsearch.xpack.searchablesnapshots.AbstractSearchableSnapshotsTestCase; import org.elasticsearch.xpack.searchablesnapshots.SearchableSnapshots; import org.elasticsearch.xpack.searchablesnapshots.cache.CacheService; @@ -55,7 +56,7 @@ public void testRandomReads() throws IOException { final ByteSizeValue rangeSize; if (rarely()) { - rangeSize = FrozenCacheService.FROZEN_CACHE_RANGE_SIZE_SETTING.get(Settings.EMPTY); + rangeSize = SnapshotsService.SHARED_CACHE_RANGE_SIZE_SETTING.get(Settings.EMPTY); } else if (randomBoolean()) { rangeSize = new ByteSizeValue( randomLongBetween(CacheService.MIN_SNAPSHOT_CACHE_RANGE_SIZE.getBytes(), ByteSizeValue.ofKb(8L).getBytes()) @@ -68,7 +69,7 @@ public void testRandomReads() throws IOException { final ByteSizeValue regionSize; if (rarely()) { - regionSize = FrozenCacheService.SNAPSHOT_CACHE_REGION_SIZE_SETTING.get(Settings.EMPTY); + regionSize = SnapshotsService.SNAPSHOT_CACHE_REGION_SIZE_SETTING.get(Settings.EMPTY); } else if (randomBoolean()) { regionSize = new ByteSizeValue(randomLongBetween(ByteSizeValue.ofKb(1L).getBytes(), ByteSizeValue.ofKb(8L).getBytes())); } else { @@ -83,9 +84,9 @@ public void testRandomReads() throws IOException { } final Settings settings = Settings.builder() - .put(FrozenCacheService.SNAPSHOT_CACHE_REGION_SIZE_SETTING.getKey(), regionSize) - .put(FrozenCacheService.FROZEN_CACHE_RANGE_SIZE_SETTING.getKey(), rangeSize) - .put(FrozenCacheService.SNAPSHOT_CACHE_SIZE_SETTING.getKey(), cacheSize) + .put(SnapshotsService.SNAPSHOT_CACHE_REGION_SIZE_SETTING.getKey(), regionSize) + .put(SnapshotsService.SHARED_CACHE_RANGE_SIZE_SETTING.getKey(), rangeSize) + .put(SnapshotsService.SNAPSHOT_CACHE_SIZE_SETTING.getKey(), cacheSize) .put("path.home", createTempDir()) .build(); final Environment environment = TestEnvironment.newEnvironment(settings); diff --git a/x-pack/plugin/searchable-snapshots/src/test/java/org/elasticsearch/xpack/searchablesnapshots/AbstractSearchableSnapshotsTestCase.java b/x-pack/plugin/searchable-snapshots/src/test/java/org/elasticsearch/xpack/searchablesnapshots/AbstractSearchableSnapshotsTestCase.java index 1b8c5d5c5dfd5..6044db3f31dab 100644 --- a/x-pack/plugin/searchable-snapshots/src/test/java/org/elasticsearch/xpack/searchablesnapshots/AbstractSearchableSnapshotsTestCase.java +++ b/x-pack/plugin/searchable-snapshots/src/test/java/org/elasticsearch/xpack/searchablesnapshots/AbstractSearchableSnapshotsTestCase.java @@ -33,6 +33,7 @@ import org.elasticsearch.repositories.IndexId; import org.elasticsearch.snapshots.Snapshot; import org.elasticsearch.snapshots.SnapshotId; +import org.elasticsearch.snapshots.SnapshotsService; import org.elasticsearch.test.ClusterServiceUtils; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.threadpool.TestThreadPool; @@ -141,13 +142,13 @@ protected FrozenCacheService defaultFrozenCacheService() { protected FrozenCacheService randomFrozenCacheService() { final Settings.Builder cacheSettings = Settings.builder(); if (randomBoolean()) { - cacheSettings.put(FrozenCacheService.SNAPSHOT_CACHE_SIZE_SETTING.getKey(), randomFrozenCacheSize()); + cacheSettings.put(SnapshotsService.SNAPSHOT_CACHE_SIZE_SETTING.getKey(), randomFrozenCacheSize()); } if (randomBoolean()) { - cacheSettings.put(FrozenCacheService.SNAPSHOT_CACHE_REGION_SIZE_SETTING.getKey(), randomFrozenCacheSize()); + cacheSettings.put(SnapshotsService.SNAPSHOT_CACHE_REGION_SIZE_SETTING.getKey(), randomFrozenCacheSize()); } if (randomBoolean()) { - cacheSettings.put(FrozenCacheService.FROZEN_CACHE_RANGE_SIZE_SETTING.getKey(), randomCacheRangeSize()); + cacheSettings.put(SnapshotsService.SHARED_CACHE_RANGE_SIZE_SETTING.getKey(), randomCacheRangeSize()); } if (randomBoolean()) { cacheSettings.put(FrozenCacheService.FROZEN_CACHE_RECOVERY_RANGE_SIZE_SETTING.getKey(), randomCacheRangeSize()); @@ -174,8 +175,8 @@ protected FrozenCacheService createFrozenCacheService(final ByteSizeValue cacheS return new FrozenCacheService( newEnvironment( Settings.builder() - .put(FrozenCacheService.SNAPSHOT_CACHE_SIZE_SETTING.getKey(), cacheSize) - .put(FrozenCacheService.FROZEN_CACHE_RANGE_SIZE_SETTING.getKey(), cacheRangeSize) + .put(SnapshotsService.SNAPSHOT_CACHE_SIZE_SETTING.getKey(), cacheSize) + .put(SnapshotsService.SHARED_CACHE_RANGE_SIZE_SETTING.getKey(), cacheRangeSize) .build() ), threadPool diff --git a/x-pack/plugin/searchable-snapshots/src/test/java/org/elasticsearch/xpack/searchablesnapshots/cache/FrozenCacheServiceTests.java b/x-pack/plugin/searchable-snapshots/src/test/java/org/elasticsearch/xpack/searchablesnapshots/cache/FrozenCacheServiceTests.java index 769b924a24238..bda1258f3bee2 100644 --- a/x-pack/plugin/searchable-snapshots/src/test/java/org/elasticsearch/xpack/searchablesnapshots/cache/FrozenCacheServiceTests.java +++ b/x-pack/plugin/searchable-snapshots/src/test/java/org/elasticsearch/xpack/searchablesnapshots/cache/FrozenCacheServiceTests.java @@ -13,6 +13,7 @@ import org.elasticsearch.env.TestEnvironment; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.index.store.cache.CacheKey; +import org.elasticsearch.snapshots.SnapshotsService; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xpack.searchablesnapshots.cache.FrozenCacheService.CacheFileRegion; @@ -28,8 +29,8 @@ public class FrozenCacheServiceTests extends ESTestCase { public void testBasicEviction() throws IOException { Settings settings = Settings.builder() .put(NODE_NAME_SETTING.getKey(), "node") - .put(FrozenCacheService.SNAPSHOT_CACHE_SIZE_SETTING.getKey(), "500b") - .put(FrozenCacheService.SNAPSHOT_CACHE_REGION_SIZE_SETTING.getKey(), "100b") + .put(SnapshotsService.SNAPSHOT_CACHE_SIZE_SETTING.getKey(), "500b") + .put(SnapshotsService.SNAPSHOT_CACHE_REGION_SIZE_SETTING.getKey(), "100b") .put("path.home", createTempDir()) .build(); final DeterministicTaskQueue taskQueue = new DeterministicTaskQueue(settings, random()); @@ -74,8 +75,8 @@ public void testBasicEviction() throws IOException { public void testAutoEviction() throws IOException { Settings settings = Settings.builder() .put(NODE_NAME_SETTING.getKey(), "node") - .put(FrozenCacheService.SNAPSHOT_CACHE_SIZE_SETTING.getKey(), "200b") - .put(FrozenCacheService.SNAPSHOT_CACHE_REGION_SIZE_SETTING.getKey(), "100b") + .put(SnapshotsService.SNAPSHOT_CACHE_SIZE_SETTING.getKey(), "200b") + .put(SnapshotsService.SNAPSHOT_CACHE_REGION_SIZE_SETTING.getKey(), "100b") .put("path.home", createTempDir()) .build(); final DeterministicTaskQueue taskQueue = new DeterministicTaskQueue(settings, random()); @@ -111,8 +112,8 @@ public void testAutoEviction() throws IOException { public void testForceEviction() throws IOException { Settings settings = Settings.builder() .put(NODE_NAME_SETTING.getKey(), "node") - .put(FrozenCacheService.SNAPSHOT_CACHE_SIZE_SETTING.getKey(), "500b") - .put(FrozenCacheService.SNAPSHOT_CACHE_REGION_SIZE_SETTING.getKey(), "100b") + .put(SnapshotsService.SNAPSHOT_CACHE_SIZE_SETTING.getKey(), "500b") + .put(SnapshotsService.SNAPSHOT_CACHE_REGION_SIZE_SETTING.getKey(), "100b") .put("path.home", createTempDir()) .build(); final DeterministicTaskQueue taskQueue = new DeterministicTaskQueue(settings, random()); @@ -140,8 +141,8 @@ public void testForceEviction() throws IOException { public void testDecay() throws IOException { Settings settings = Settings.builder() .put(NODE_NAME_SETTING.getKey(), "node") - .put(FrozenCacheService.SNAPSHOT_CACHE_SIZE_SETTING.getKey(), "500b") - .put(FrozenCacheService.SNAPSHOT_CACHE_REGION_SIZE_SETTING.getKey(), "100b") + .put(SnapshotsService.SNAPSHOT_CACHE_SIZE_SETTING.getKey(), "500b") + .put(SnapshotsService.SNAPSHOT_CACHE_REGION_SIZE_SETTING.getKey(), "100b") .put("path.home", createTempDir()) .build(); final DeterministicTaskQueue taskQueue = new DeterministicTaskQueue(settings, random()); From 414fabbbc3ea61c09f965446af86fa36012ca5ad Mon Sep 17 00:00:00 2001 From: Andrei Stefan Date: Wed, 10 Feb 2021 16:02:54 +0200 Subject: [PATCH 16/24] Remove the version 8.0.0 "restriction" (#68822) --- .../org/elasticsearch/xpack/sql/qa/mixed_node/SqlSearchIT.java | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/x-pack/plugin/sql/qa/mixed-node/src/test/java/org/elasticsearch/xpack/sql/qa/mixed_node/SqlSearchIT.java b/x-pack/plugin/sql/qa/mixed-node/src/test/java/org/elasticsearch/xpack/sql/qa/mixed_node/SqlSearchIT.java index 7c3c304e1c025..267ae786c3667 100644 --- a/x-pack/plugin/sql/qa/mixed-node/src/test/java/org/elasticsearch/xpack/sql/qa/mixed_node/SqlSearchIT.java +++ b/x-pack/plugin/sql/qa/mixed-node/src/test/java/org/elasticsearch/xpack/sql/qa/mixed_node/SqlSearchIT.java @@ -69,8 +69,7 @@ public void createIndex() throws IOException { bwcNodes = new ArrayList<>(nodes.getBWCNodes()); bwcVersion = nodes.getBWCNodes().get(0).getVersion(); newVersion = nodes.getNewNodes().get(0).getVersion(); - // TODO: remove the 8.0.0 version check after the code reaches 7.x as well - isBwcNodeBeforeFieldsApiInQL = newVersion == Version.V_8_0_0 || bwcVersion.before(FIELDS_API_QL_INTRODUCTION); + isBwcNodeBeforeFieldsApiInQL = bwcVersion.before(FIELDS_API_QL_INTRODUCTION); isBwcNodeBeforeFieldsApiInES = bwcVersion.before(SWITCH_TO_FIELDS_API_VERSION); String mappings = readResource(SqlSearchIT.class.getResourceAsStream("/all_field_types.json")); From 2b9585880c517a6f19d6e09c62af903c81181dfb Mon Sep 17 00:00:00 2001 From: Andras Palinkas Date: Wed, 10 Feb 2021 09:24:12 -0500 Subject: [PATCH 17/24] SQL: Fix the MINUTE_OF_DAY() function that throws exception when used in comparisons (#68783) The `MINUTE_OF_DAY()` extraction function does not have an equivalent expressable using a datetime format pattern. The `MinuteOfDay.dateTimeFormat()` is called during the query translation and throws an exception, but the return value actually does not impact the translated query (binary comparisons with `DateTimeFunction` on one side always turn into a script query). This change fixes the immediate issue raised as part of #67872, add integration tests covering the problem, but leaves the removal of the unnecessary `dateTimeFormat()` function a separate PR. --- .../sql/qa/server/src/main/resources/datetime.csv-spec | 10 ++++++++++ .../function/scalar/datetime/MinuteOfDay.java | 2 +- 2 files changed, 11 insertions(+), 1 deletion(-) diff --git a/x-pack/plugin/sql/qa/server/src/main/resources/datetime.csv-spec b/x-pack/plugin/sql/qa/server/src/main/resources/datetime.csv-spec index ec80d4669c0d4..3263dc8f7f747 100644 --- a/x-pack/plugin/sql/qa/server/src/main/resources/datetime.csv-spec +++ b/x-pack/plugin/sql/qa/server/src/main/resources/datetime.csv-spec @@ -153,6 +153,16 @@ SELECT WEEK(birth_date) week, birth_date FROM test_emp WHERE WEEK(birth_date) > 2 |1953-01-07T00:00:00.000Z ; +minuteOfDayFilterEquality +SELECT MINUTE_OF_DAY(CONCAT(CONCAT('2021-01-22T14:26:06.', (salary % 2)::text), 'Z')::datetime) AS min_of_day +FROM test_emp WHERE min_of_day = 866 LIMIT 2; + + min_of_day:i +--------------- +866 +866 +; + selectAddWithDateTime schema::dt_year:s|dt_quarter:s|dt_month:s|dt_week:s|dt_day:s|dt_hours:s|dt_min:s|dt_sec:s|dt_millis:s|dt_mcsec:s|dt_nsec:s SELECT DATE_ADD('year', 10, '2019-09-04T11:22:33.123Z'::datetime)::string as dt_year, DATE_ADD('quarter', -10, '2019-09-04T11:22:33.123Z'::datetime)::string as dt_quarter, DATE_ADD('month', 20, '2019-09-04T11:22:33.123Z'::datetime)::string as dt_month, diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/MinuteOfDay.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/MinuteOfDay.java index a9fb385547bb0..ef83c5c0a2471 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/MinuteOfDay.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/MinuteOfDay.java @@ -34,6 +34,6 @@ protected MinuteOfDay replaceChild(Expression newChild) { @Override public String dateTimeFormat() { - throw new UnsupportedOperationException("is there a format for it?"); + return null; } } From b7c089a222f30aa2aac898f06d3b3ad0130134d9 Mon Sep 17 00:00:00 2001 From: Rory Hunter Date: Wed, 10 Feb 2021 14:17:51 +0000 Subject: [PATCH 18/24] Add 7.11.1 to BWC versions --- .ci/bwcVersions | 1 + server/src/main/java/org/elasticsearch/Version.java | 1 + 2 files changed, 2 insertions(+) diff --git a/.ci/bwcVersions b/.ci/bwcVersions index 3abfe35e5ea63..6ba54163b8868 100644 --- a/.ci/bwcVersions +++ b/.ci/bwcVersions @@ -30,5 +30,6 @@ BWC_VERSION: - "7.10.2" - "7.10.3" - "7.11.0" + - "7.11.1" - "7.12.0" - "8.0.0" diff --git a/server/src/main/java/org/elasticsearch/Version.java b/server/src/main/java/org/elasticsearch/Version.java index 95ac300f66925..fabe5b4aad6a1 100644 --- a/server/src/main/java/org/elasticsearch/Version.java +++ b/server/src/main/java/org/elasticsearch/Version.java @@ -77,6 +77,7 @@ public class Version implements Comparable, ToXContentFragment { public static final Version V_7_10_2 = new Version(7100299, org.apache.lucene.util.Version.LUCENE_8_7_0); public static final Version V_7_10_3 = new Version(7100399, org.apache.lucene.util.Version.LUCENE_8_7_0); public static final Version V_7_11_0 = new Version(7110099, org.apache.lucene.util.Version.LUCENE_8_7_0); + public static final Version V_7_11_1 = new Version(7110199, org.apache.lucene.util.Version.LUCENE_8_7_0); public static final Version V_7_12_0 = new Version(7120099, org.apache.lucene.util.Version.LUCENE_8_8_0); public static final Version V_8_0_0 = new Version(8000099, org.apache.lucene.util.Version.LUCENE_8_8_0); public static final Version CURRENT = V_8_0_0; From b7d178dccfb1485fee3a5c5f0302789ea2913dae Mon Sep 17 00:00:00 2001 From: Rory Hunter Date: Wed, 10 Feb 2021 14:45:06 +0000 Subject: [PATCH 19/24] Remove 7.10.3 after 7.11.0 release --- .ci/bwcVersions | 1 - server/src/main/java/org/elasticsearch/Version.java | 1 - 2 files changed, 2 deletions(-) diff --git a/.ci/bwcVersions b/.ci/bwcVersions index 6ba54163b8868..ef936fe88588a 100644 --- a/.ci/bwcVersions +++ b/.ci/bwcVersions @@ -28,7 +28,6 @@ BWC_VERSION: - "7.10.0" - "7.10.1" - "7.10.2" - - "7.10.3" - "7.11.0" - "7.11.1" - "7.12.0" diff --git a/server/src/main/java/org/elasticsearch/Version.java b/server/src/main/java/org/elasticsearch/Version.java index fabe5b4aad6a1..447cc954ae6a3 100644 --- a/server/src/main/java/org/elasticsearch/Version.java +++ b/server/src/main/java/org/elasticsearch/Version.java @@ -75,7 +75,6 @@ public class Version implements Comparable, ToXContentFragment { public static final Version V_7_10_0 = new Version(7100099, org.apache.lucene.util.Version.LUCENE_8_7_0); public static final Version V_7_10_1 = new Version(7100199, org.apache.lucene.util.Version.LUCENE_8_7_0); public static final Version V_7_10_2 = new Version(7100299, org.apache.lucene.util.Version.LUCENE_8_7_0); - public static final Version V_7_10_3 = new Version(7100399, org.apache.lucene.util.Version.LUCENE_8_7_0); public static final Version V_7_11_0 = new Version(7110099, org.apache.lucene.util.Version.LUCENE_8_7_0); public static final Version V_7_11_1 = new Version(7110199, org.apache.lucene.util.Version.LUCENE_8_7_0); public static final Version V_7_12_0 = new Version(7120099, org.apache.lucene.util.Version.LUCENE_8_8_0); From 0b4848244925045bdc95e7bf6073e51ad2ef7c2b Mon Sep 17 00:00:00 2001 From: Bogdan Pintea Date: Wed, 10 Feb 2021 16:13:50 +0100 Subject: [PATCH 20/24] Replace forbidden apis - s/@Ignore/@AwaitsFix/ --- .../elasticsearch/xpack/sql/qa/rest/RestSqlTestCase.java | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/x-pack/plugin/sql/qa/server/src/main/java/org/elasticsearch/xpack/sql/qa/rest/RestSqlTestCase.java b/x-pack/plugin/sql/qa/server/src/main/java/org/elasticsearch/xpack/sql/qa/rest/RestSqlTestCase.java index fec41c302ae73..a864171be6035 100644 --- a/x-pack/plugin/sql/qa/server/src/main/java/org/elasticsearch/xpack/sql/qa/rest/RestSqlTestCase.java +++ b/x-pack/plugin/sql/qa/server/src/main/java/org/elasticsearch/xpack/sql/qa/rest/RestSqlTestCase.java @@ -734,7 +734,7 @@ public void testBasicQueryWithParameters() throws IOException { ); } - @Ignore("Test disabled while merging fields API in") + @AwaitsFix(bugUrl = "Test disabled while merging fields API in") public void testBasicQueryWithMultiValues() throws IOException { List values = randomList(1, 5, ESTestCase::randomLong); String field = randomAlphaOfLength(5); @@ -755,7 +755,7 @@ public void testBasicQueryWithMultiValues() throws IOException { ); } - @Ignore("Test disabled while merging fields API in") + @AwaitsFix(bugUrl = "Test disabled while merging fields API in") public void testBasicQueryWithMultiValuesAndMultiPathAndMultiDoc() throws IOException { // formatter will leave first argument as is, but fold the following on a line index( @@ -814,7 +814,7 @@ public void testBasicQueryWithMultiValuesAndMultiPathAndMultiDoc() throws IOExce ); } - @Ignore("Test disabled while merging fields API in") + @AwaitsFix(bugUrl = "Test disabled while merging fields API in") public void testFilteringQueryWithMultiValuesAndWithout() throws IOException { index("{\"a\": [2, 3, 4, 5]}", "{\"a\": 6}", "{\"a\": [7, 8]}"); String mode = randomMode(); @@ -1101,7 +1101,7 @@ private void executeQueryWithNextPage(String format, String expectedHeader, Stri assertEquals(0, getNumberOfSearchContexts(client(), "test")); } - @Ignore("Test disabled while merging fields API in") + @AwaitsFix(bugUrl = "Test disabled while merging fields API in") public void testMultiValueQueryText() throws IOException { index( "{" From c35eebea9d352f7d15fb51fa0ccd2c6a10b8f497 Mon Sep 17 00:00:00 2001 From: Stuart Tettemer Date: Wed, 10 Feb 2021 09:20:52 -0600 Subject: [PATCH 21/24] Scripting: capture structured javadoc from stdlib (#68782) Clean javadoc tags and strip html. Methods and constructors have an optional `javadoc` field. All fields under `javadoc` are optional but at least one will be present. Fields also have optional `javadoc` field which, if present, is a string. ``` "javadoc": { "description": "...", // from @param "parameters": { "p1": "", "p2": "" }, // from @return "return": "...", // from @throws "throws": [ [ "IndexOutOfBoundsException", "" ], [ "IOException", "" ] ] } ``` --- modules/lang-painless/build.gradle | 2 + .../painless/JavadocExtractor.java | 108 +++++++++++++++++- .../painless/PainlessInfoJson.java | 36 ++++-- 3 files changed, 129 insertions(+), 17 deletions(-) diff --git a/modules/lang-painless/build.gradle b/modules/lang-painless/build.gradle index 1cad5898d2042..38ec117401490 100644 --- a/modules/lang-painless/build.gradle +++ b/modules/lang-painless/build.gradle @@ -77,6 +77,7 @@ dependencies { docImplementation project(':server') docImplementation project(':modules:lang-painless') docImplementation 'com.github.javaparser:javaparser-core:3.18.0' + docImplementation 'org.jsoup:jsoup:1.13.1' if (isEclipse) { /* * Eclipse isn't quite "with it" enough to understand the different @@ -84,6 +85,7 @@ dependencies { * can compile the doc java files. */ implementation 'com.github.javaparser:javaparser-core:3.18.0' + implementation 'org.jsoup:jsoup:1.13.1' } } diff --git a/modules/lang-painless/src/doc/java/org/elasticsearch/painless/JavadocExtractor.java b/modules/lang-painless/src/doc/java/org/elasticsearch/painless/JavadocExtractor.java index 2b27c0cedb247..4e572ae65fb2b 100644 --- a/modules/lang-painless/src/doc/java/org/elasticsearch/painless/JavadocExtractor.java +++ b/modules/lang-painless/src/doc/java/org/elasticsearch/painless/JavadocExtractor.java @@ -18,9 +18,19 @@ import com.github.javaparser.ast.comments.Comment; import com.github.javaparser.ast.visitor.VoidVisitorAdapter; import com.github.javaparser.javadoc.Javadoc; +import com.github.javaparser.javadoc.JavadocBlockTag; +import com.github.javaparser.javadoc.description.JavadocDescription; +import com.github.javaparser.javadoc.description.JavadocDescriptionElement; +import com.github.javaparser.javadoc.description.JavadocInlineTag; +import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.jsoup.Jsoup; +import org.jsoup.safety.Whitelist; import java.io.IOException; import java.io.InputStream; +import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; @@ -94,7 +104,7 @@ public void putMethod(MethodDeclaration declaration) { methods.put( MethodSignature.fromDeclaration(declaration), new ParsedMethod( - declaration.getJavadoc().map(Javadoc::toText).orElse(""), + declaration.getJavadoc().map(JavadocExtractor::clean).orElse(null), declaration.getParameters() .stream() .map(p -> p.getName().asString()) @@ -110,7 +120,7 @@ public void putConstructor(ConstructorDeclaration declaration) { constructors.put( declaration.getParameters().stream().map(p -> stripTypeParameters(p.getType().asString())).collect(Collectors.toList()), new ParsedMethod( - declaration.getJavadoc().map(Javadoc::toText).orElse(""), + declaration.getJavadoc().map(JavadocExtractor::clean).orElse(null), declaration.getParameters() .stream() .map(p -> p.getName().asString()) @@ -152,7 +162,7 @@ public void putField(FieldDeclaration declaration) { return; } for (VariableDeclarator var : declaration.getVariables()) { - fields.put(var.getNameAsString(), declaration.getJavadoc().map(Javadoc::toText).orElse("")); + fields.put(var.getNameAsString(), declaration.getJavadoc().map(v -> JavadocExtractor.clean(v).description).orElse("")); } } } @@ -192,15 +202,103 @@ public int hashCode() { } public static class ParsedMethod { - public final String javadoc; + public final ParsedJavadoc javadoc; public final List parameterNames; - public ParsedMethod(String javadoc, List parameterNames) { + public ParsedMethod(ParsedJavadoc javadoc, List parameterNames) { this.javadoc = javadoc; this.parameterNames = parameterNames; } } + public static ParsedJavadoc clean(Javadoc javadoc) { + JavadocDescription description = javadoc.getDescription(); + List tags = javadoc.getBlockTags(); + List cleaned = new ArrayList<>(description.getElements().size() + tags.size()); + cleaned.addAll(stripInlineTags(description)); + ParsedJavadoc parsed = new ParsedJavadoc(cleaned(cleaned)); + for (JavadocBlockTag tag: tags) { + String tagName = tag.getTagName(); + // https://docs.oracle.com/en/java/javase/14/docs/specs/javadoc/doc-comment-spec.html#standard-tags + // ignore author, deprecated, hidden, provides, uses, see, serial*, since and version. + if ("param".equals(tagName)) { + tag.getName().ifPresent(t -> parsed.param.put(t, cleaned(stripInlineTags(tag.getContent())))); + } else if ("return".equals(tagName)) { + parsed.returns = cleaned(stripInlineTags(tag.getContent())); + } else if ("exception".equals(tagName) || "throws".equals(tagName)) { + if (tag.getName().isPresent() == false) { + throw new IllegalStateException("Missing tag " + tag.toText()); + } + parsed.thrws.add(List.of(tag.getName().get(), cleaned(stripInlineTags(tag.getContent())))); + } + } + return parsed; + } + + private static String cleaned(List segments) { + return Jsoup.clean(String.join("", segments), Whitelist.none()).replaceAll("[\n\\s]*\n[\n\\s]*", " "); + } + + private static List stripInlineTags(JavadocDescription description) { + List elements = description.getElements(); + List stripped = new ArrayList<>(elements.size()); + for (JavadocDescriptionElement element: elements) { + if (element instanceof JavadocInlineTag) { + stripped.add(((JavadocInlineTag)element).getContent()); + } else { + stripped.add(element.toText()); + } + } + return stripped; + } + + public static class ParsedJavadoc implements ToXContent { + public final Map param = new HashMap<>(); + public String returns; + public String description; + public List> thrws = new ArrayList<>(); + + public static final ParseField PARAMETERS = new ParseField("parameters"); + public static final ParseField RETURN = new ParseField("return"); + public static final ParseField THROWS = new ParseField("throws"); + public static final ParseField DESCRIPTION = new ParseField("description"); + + public ParsedJavadoc(String description) { + this.description = description; + } + + public boolean isEmpty() { + return param.size() == 0 && + (description == null || description.isEmpty()) && + (returns == null || returns.isEmpty()) && + thrws.size() == 0; + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + if (description != null && description.isEmpty() == false) { + builder.field(DESCRIPTION.getPreferredName(), description); + } + if (param.isEmpty() == false) { + builder.field(PARAMETERS.getPreferredName(), param); + } + if (returns != null && returns.isEmpty() == false) { + builder.field(RETURN.getPreferredName(), returns); + } + if (thrws.isEmpty() == false) { + builder.field(THROWS.getPreferredName(), thrws); + } + builder.endObject(); + return builder; + } + + @Override + public boolean isFragment() { + return true; + } + } + private static class ClassFileVisitor extends VoidVisitorAdapter { @Override public void visit(CompilationUnit compilationUnit, ParsedJavaClass parsed) { diff --git a/modules/lang-painless/src/doc/java/org/elasticsearch/painless/PainlessInfoJson.java b/modules/lang-painless/src/doc/java/org/elasticsearch/painless/PainlessInfoJson.java index 0838d5dc4b7fe..eb98d256c8de5 100644 --- a/modules/lang-painless/src/doc/java/org/elasticsearch/painless/PainlessInfoJson.java +++ b/modules/lang-painless/src/doc/java/org/elasticsearch/painless/PainlessInfoJson.java @@ -173,13 +173,20 @@ public static class Method implements ToXContentObject { private final String declaring; private final String name; private final String rtn; - private final String javadoc; + private final JavadocExtractor.ParsedJavadoc javadoc; private final List parameters; private final List parameterNames; public static final ParseField PARAMETER_NAMES = new ParseField("parameter_names"); public static final ParseField JAVADOC = new ParseField("javadoc"); - private Method(String declaring, String name, String rtn, String javadoc, List parameters, List parameterNames) { + private Method( + String declaring, + String name, + String rtn, + JavadocExtractor.ParsedJavadoc javadoc, + List parameters, + List parameterNames + ) { this.declaring = declaring; this.name = name; this.rtn = rtn; @@ -210,11 +217,11 @@ public static List fromInfos( ) { List methods = new ArrayList<>(infos.size()); for (PainlessContextMethodInfo info: infos) { - String javadoc = null; + JavadocExtractor.ParsedJavadoc javadoc = null; List parameterNames = null; String name = info.getName(); - List parameterTypes = info.getParameters(); + List parameterTypes = toDisplayParameterTypes(info.getParameters(), javaNamesToDisplayNames); JavadocExtractor.ParsedMethod parsedMethod = parsed.getMethod(name, parameterTypes); if (parsedMethod != null) { @@ -227,7 +234,7 @@ public static List fromInfos( name, ContextGeneratorCommon.getType(javaNamesToDisplayNames, info.getRtn()), javadoc, - toDisplayParameterTypes(parameterTypes, javaNamesToDisplayNames), + parameterTypes, parameterNames )); } @@ -240,7 +247,7 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws builder.field(PainlessContextMethodInfo.DECLARING.getPreferredName(), declaring); builder.field(PainlessContextMethodInfo.NAME.getPreferredName(), name); builder.field(PainlessContextMethodInfo.RTN.getPreferredName(), rtn); - if (javadoc != null && "".equals(javadoc) == false) { + if (javadoc != null && javadoc.isEmpty() == false) { builder.field(JAVADOC.getPreferredName(), javadoc); } builder.field(PainlessContextMethodInfo.PARAMETERS.getPreferredName(), parameters); @@ -257,12 +264,17 @@ public static class Constructor implements ToXContentObject { private final String declaring; private final List parameters; private final List parameterNames; - private final String javadoc; + private final JavadocExtractor.ParsedJavadoc javadoc; public static final ParseField JAVADOC = new ParseField("javadoc"); public static final ParseField PARAMETER_NAMES = new ParseField("parameter_names"); - private Constructor(String declaring, List parameters, List parameterNames, String javadoc) { + private Constructor( + String declaring, + List parameters, + List parameterNames, + JavadocExtractor.ParsedJavadoc javadoc + ) { this.declaring = declaring; this.parameters = parameters; this.parameterNames = parameterNames; @@ -289,9 +301,9 @@ private static List fromInfos( ) { List constructors = new ArrayList<>(infos.size()); for (PainlessContextConstructorInfo info: infos) { - List parameterTypes = info.getParameters(); + List parameterTypes = toDisplayParameterTypes(info.getParameters(), javaNamesToDisplayNames); List parameterNames = null; - String javadoc = null; + JavadocExtractor.ParsedJavadoc javadoc = null; JavadocExtractor.ParsedMethod parsed = pj.getConstructor(parameterTypes); if (parsed != null) { @@ -301,7 +313,7 @@ private static List fromInfos( constructors.add(new Constructor( javaNamesToDisplayNames.get(info.getDeclaring()), - toDisplayParameterTypes(parameterTypes, javaNamesToDisplayNames), + parameterTypes, parameterNames, javadoc )); @@ -317,7 +329,7 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws if (parameterNames != null && parameterNames.size() > 0) { builder.field(PARAMETER_NAMES.getPreferredName(), parameterNames); } - if (javadoc != null && "".equals(javadoc) == false) { + if (javadoc != null && javadoc.isEmpty() == false) { builder.field(JAVADOC.getPreferredName(), javadoc); } builder.endObject(); From b61556c347bb5c7ef649107432c3ac393af51dae Mon Sep 17 00:00:00 2001 From: Bogdan Pintea Date: Wed, 10 Feb 2021 16:21:52 +0100 Subject: [PATCH 22/24] Style fix --- .../org/elasticsearch/xpack/sql/qa/rest/RestSqlTestCase.java | 1 - 1 file changed, 1 deletion(-) diff --git a/x-pack/plugin/sql/qa/server/src/main/java/org/elasticsearch/xpack/sql/qa/rest/RestSqlTestCase.java b/x-pack/plugin/sql/qa/server/src/main/java/org/elasticsearch/xpack/sql/qa/rest/RestSqlTestCase.java index a864171be6035..5d9341e799377 100644 --- a/x-pack/plugin/sql/qa/server/src/main/java/org/elasticsearch/xpack/sql/qa/rest/RestSqlTestCase.java +++ b/x-pack/plugin/sql/qa/server/src/main/java/org/elasticsearch/xpack/sql/qa/rest/RestSqlTestCase.java @@ -27,7 +27,6 @@ import org.elasticsearch.xpack.sql.proto.StringUtils; import org.elasticsearch.xpack.sql.qa.ErrorsTestCase; import org.hamcrest.Matcher; -import org.junit.Ignore; import java.io.IOException; import java.io.InputStream; From 3c6437f72af9e5cb8539c58c66a09cd63aac7144 Mon Sep 17 00:00:00 2001 From: Joe Gallo Date: Wed, 10 Feb 2021 11:25:10 -0500 Subject: [PATCH 23/24] Add max_single_primary_size to ResizeRequest's toXContent (#68793) Co-authored-by: bellengao --- .../action/admin/indices/shrink/ResizeRequest.java | 3 +++ .../action/admin/indices/shrink/ResizeRequestTests.java | 8 ++++++++ 2 files changed, 11 insertions(+) diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/shrink/ResizeRequest.java b/server/src/main/java/org/elasticsearch/action/admin/indices/shrink/ResizeRequest.java index 2b6de1bb47900..46e3a5fdc3e60 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/shrink/ResizeRequest.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/shrink/ResizeRequest.java @@ -226,6 +226,9 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws } } builder.endObject(); + if (maxSinglePrimarySize != null) { + builder.field(MAX_SINGLE_PRIMARY_SIZE.getPreferredName(), maxSinglePrimarySize); + } } builder.endObject(); return builder; diff --git a/server/src/test/java/org/elasticsearch/action/admin/indices/shrink/ResizeRequestTests.java b/server/src/test/java/org/elasticsearch/action/admin/indices/shrink/ResizeRequestTests.java index c3bf3ed9aa9f2..99218d1545083 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/indices/shrink/ResizeRequestTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/indices/shrink/ResizeRequestTests.java @@ -14,6 +14,8 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.unit.ByteSizeUnit; +import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.index.RandomCreateIndexGenerator; @@ -55,6 +57,12 @@ public void testToXContent() throws IOException { String actualRequestBody = Strings.toString(request); assertEquals("{\"settings\":{},\"aliases\":{}}", actualRequestBody); } + { + ResizeRequest request = new ResizeRequest("target", "source"); + request.setMaxSinglePrimarySize(new ByteSizeValue(100, ByteSizeUnit.MB)); + String actualRequestBody = Strings.toString(request); + assertEquals("{\"settings\":{},\"aliases\":{},\"max_single_primary_size\":\"100mb\"}", actualRequestBody); + } { ResizeRequest request = new ResizeRequest(); CreateIndexRequest target = new CreateIndexRequest("target"); From 6d5ab2d7c2fe30cc975b77581b3c023187db2b1b Mon Sep 17 00:00:00 2001 From: David Turner Date: Wed, 10 Feb 2021 16:41:25 +0000 Subject: [PATCH 24/24] Reject remounting snapshot of a searchable snapshot (#68816) Today you can mount a snapshot of a searchable snapshot index, but the shard fails to allocate since the underlying snapshot is devoid of content. Doing this is a mistake, you probably meant to restore the index instead, so this commit rejects it earlier with a more helpful message. Closes #68792 --- .../org/elasticsearch/index/store/Store.java | 2 +- .../SearchableSnapshotsIntegTests.java | 31 +++++++++++++++++++ ...ransportMountSearchableSnapshotAction.java | 21 +++++++++++++ 3 files changed, 53 insertions(+), 1 deletion(-) diff --git a/server/src/main/java/org/elasticsearch/index/store/Store.java b/server/src/main/java/org/elasticsearch/index/store/Store.java index c43dd162bebc1..66757375a636b 100644 --- a/server/src/main/java/org/elasticsearch/index/store/Store.java +++ b/server/src/main/java/org/elasticsearch/index/store/Store.java @@ -1360,7 +1360,7 @@ public void markStoreCorrupted(IOException exception) throws IOException { BytesRef ref = bytes.toBytesRef(); output.writeBytes(ref.bytes, ref.offset, ref.length); CodecUtil.writeFooter(output); - } catch (IOException ex) { + } catch (IOException | ImmutableDirectoryException ex) { logger.warn("Can't mark store as corrupted", ex); } directory().sync(Collections.singleton(corruptionMarkerName)); diff --git a/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/SearchableSnapshotsIntegTests.java b/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/SearchableSnapshotsIntegTests.java index e81ad34dd6e5b..9af516afdaf6f 100644 --- a/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/SearchableSnapshotsIntegTests.java +++ b/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/SearchableSnapshotsIntegTests.java @@ -9,6 +9,7 @@ import com.carrotsearch.hppc.cursors.ObjectCursor; import org.apache.lucene.index.IndexFileNames; import org.apache.lucene.search.TotalHits; +import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.ResourceNotFoundException; import org.elasticsearch.action.admin.cluster.snapshots.restore.RestoreSnapshotResponse; import org.elasticsearch.action.admin.cluster.snapshots.status.SnapshotIndexShardStatus; @@ -96,7 +97,9 @@ import static org.elasticsearch.xpack.searchablesnapshots.SearchableSnapshots.getDataTiersPreference; import static org.elasticsearch.xpack.searchablesnapshots.SearchableSnapshotsConstants.SNAPSHOT_DIRECTORY_FACTORY_KEY; import static org.elasticsearch.xpack.searchablesnapshots.SearchableSnapshotsConstants.SNAPSHOT_RECOVERY_STATE_FACTORY_KEY; +import static org.hamcrest.Matchers.allOf; import static org.hamcrest.Matchers.anyOf; +import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThan; import static org.hamcrest.Matchers.greaterThanOrEqualTo; @@ -1288,6 +1291,34 @@ public void testSnapshotOfSearchableSnapshotIncludesNoDataButCanBeRestored() thr logger.info("--> finished restoring snapshot-2"); assertTotalHits(restoredIndexName, originalAllHits, originalBarHits); + + final IllegalArgumentException remountException = expectThrows(IllegalArgumentException.class, () -> { + try { + mountSnapshot( + restoreRepositoryName, + snapshotTwo.getName(), + restoredIndexName, + randomAlphaOfLength(10).toLowerCase(Locale.ROOT), + Settings.EMPTY + ); + } catch (Exception e) { + final Throwable cause = ExceptionsHelper.unwrap(e, IllegalArgumentException.class); + throw cause == null ? e : cause; + } + }); + assertThat( + remountException.getMessage(), + allOf( + containsString("is a snapshot of a searchable snapshot index backed by index"), + containsString(repositoryName), + containsString(snapshotOne.getName()), + containsString(indexName), + containsString(restoreRepositoryName), + containsString(snapshotTwo.getName()), + containsString(restoredIndexName), + containsString("cannot be mounted; did you mean to restore it instead?") + ) + ); } private void assertTotalHits(String indexName, TotalHits originalAllHits, TotalHits originalBarHits) throws Exception { diff --git a/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/action/TransportMountSearchableSnapshotAction.java b/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/action/TransportMountSearchableSnapshotAction.java index 3fa05b4b89c07..7f31b53466125 100644 --- a/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/action/TransportMountSearchableSnapshotAction.java +++ b/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/action/TransportMountSearchableSnapshotAction.java @@ -43,6 +43,7 @@ import org.elasticsearch.xpack.searchablesnapshots.SearchableSnapshotsConstants; import java.util.Arrays; +import java.util.Locale; import java.util.Map; import java.util.Objects; import java.util.Optional; @@ -50,6 +51,7 @@ import static org.elasticsearch.index.IndexModule.INDEX_RECOVERY_TYPE_SETTING; import static org.elasticsearch.index.IndexModule.INDEX_STORE_TYPE_SETTING; import static org.elasticsearch.xpack.searchablesnapshots.SearchableSnapshots.getDataTiersPreference; +import static org.elasticsearch.xpack.searchablesnapshots.SearchableSnapshotsConstants.isSearchableSnapshotStore; /** * Action that mounts a snapshot as a searchable snapshot, by converting the mount request into a restore request with specific settings @@ -179,6 +181,25 @@ protected void masterOperation( final String[] ignoreIndexSettings = Arrays.copyOf(request.ignoreIndexSettings(), request.ignoreIndexSettings().length + 1); ignoreIndexSettings[ignoreIndexSettings.length - 1] = IndexMetadata.SETTING_DATA_PATH; + final IndexMetadata indexMetadata = repository.getSnapshotIndexMetaData(repoData, snapshotId, indexId); + if (isSearchableSnapshotStore(indexMetadata.getSettings())) { + throw new IllegalArgumentException( + String.format( + Locale.ROOT, + "index [%s] in snapshot [%s/%s:%s] is a snapshot of a searchable snapshot index " + + "backed by index [%s] in snapshot [%s/%s:%s] and cannot be mounted; did you mean to restore it instead?", + indexName, + repoName, + repository.getMetadata().uuid(), + snapName, + SearchableSnapshots.SNAPSHOT_INDEX_NAME_SETTING.get(indexMetadata.getSettings()), + SearchableSnapshots.SNAPSHOT_REPOSITORY_NAME_SETTING.get(indexMetadata.getSettings()), + SearchableSnapshots.SNAPSHOT_REPOSITORY_UUID_SETTING.get(indexMetadata.getSettings()), + SearchableSnapshots.SNAPSHOT_SNAPSHOT_NAME_SETTING.get(indexMetadata.getSettings()) + ) + ); + } + client.admin() .cluster() .restoreSnapshot(