From 5af56b92c228cd1eab4a716f724aabf91549425c Mon Sep 17 00:00:00 2001 From: Ioannis Kakavas Date: Wed, 5 Dec 2018 16:33:53 +0200 Subject: [PATCH 01/71] Add REST and Transport actions for oidc authentication --- .../oidc/OpenIdConnectAuthenticateAction.java | 17 +++++ .../OpenIdConnectAuthenticateRequest.java | 42 +++++++++++ .../OpenIdConnectAuthenticateResponse.java | 64 +++++++++++++++++ ...nIdConnectPrepareAuthenticationAction.java | 17 +++++ ...IdConnectPrepareAuthenticationRequest.java | 47 +++++++++++++ ...dConnectPrepareAuthenticationResponse.java | 49 +++++++++++++ .../oidc/OpenIdConnectRealmSettings.java | 11 +++ .../xpack/security/Security.java | 4 ++ ...nsportOpenIdConnectAuthenticateAction.java | 29 ++++++++ ...nIdConnectPrepareAuthenticationAction.java | 30 ++++++++ .../authc/oidc/OpenIdConnectRealm.java | 46 +++++++++++++ .../oidc/OpenIdConnectBaseRestHandler.java | 40 +++++++++++ .../RestOpenIdConnectAuthenticateAction.java | 65 +++++++++++++++++ ...nIdConnectPrepareAuthenticationAction.java | 69 +++++++++++++++++++ 14 files changed, 530 insertions(+) create mode 100644 x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/oidc/OpenIdConnectAuthenticateAction.java create mode 100644 x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/oidc/OpenIdConnectAuthenticateRequest.java create mode 100644 x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/oidc/OpenIdConnectAuthenticateResponse.java create mode 100644 x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/oidc/OpenIdConnectPrepareAuthenticationAction.java create mode 100644 x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/oidc/OpenIdConnectPrepareAuthenticationRequest.java create mode 100644 x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/oidc/OpenIdConnectPrepareAuthenticationResponse.java create mode 100644 x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/oidc/OpenIdConnectRealmSettings.java create mode 100644 x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/oidc/TransportOpenIdConnectAuthenticateAction.java create mode 100644 x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/oidc/TransportOpenIdConnectPrepareAuthenticationAction.java create mode 100644 x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/oidc/OpenIdConnectRealm.java create mode 100644 x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/oidc/OpenIdConnectBaseRestHandler.java create mode 100644 x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/oidc/RestOpenIdConnectAuthenticateAction.java create mode 100644 x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/oidc/RestOpenIdConnectPrepareAuthenticationAction.java diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/oidc/OpenIdConnectAuthenticateAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/oidc/OpenIdConnectAuthenticateAction.java new file mode 100644 index 0000000000000..c81d502f09ab3 --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/oidc/OpenIdConnectAuthenticateAction.java @@ -0,0 +1,17 @@ +package org.elasticsearch.xpack.core.security.action.oidc; + +import org.elasticsearch.action.Action; + +public final class OpenIdConnectAuthenticateAction extends Action { + + public static final OpenIdConnectAuthenticateAction INSTANCE = new OpenIdConnectAuthenticateAction(); + public static final String NAME = "cluster:admin/xpack/security/oidc/authenticate"; + + protected OpenIdConnectAuthenticateAction() { + super(NAME); + } + + public OpenIdConnectAuthenticateResponse newResponse() { + return new OpenIdConnectAuthenticateResponse(); + } +} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/oidc/OpenIdConnectAuthenticateRequest.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/oidc/OpenIdConnectAuthenticateRequest.java new file mode 100644 index 0000000000000..d6c6b37ad4bc6 --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/oidc/OpenIdConnectAuthenticateRequest.java @@ -0,0 +1,42 @@ +package org.elasticsearch.xpack.core.security.action.oidc; + +import org.elasticsearch.action.ActionRequest; +import org.elasticsearch.action.ActionRequestValidationException; + +/** + * Represents a request for authentication using an OAuth 2.0 Authorization Code + */ +public class OpenIdConnectAuthenticateRequest extends ActionRequest { + + /* + * OAuth 2.0 Authorization Code + */ + private String code; + + /* + * OAuth 2.0 state value. + */ + private String state; + + public String getCode() { + return code; + } + + public void setCode(String code) { + this.code = code; + } + + public String getState() { + return state; + } + + public void setState(String state) { + this.state = state; + } + + @Override + public ActionRequestValidationException validate() { + return null; + } +} + diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/oidc/OpenIdConnectAuthenticateResponse.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/oidc/OpenIdConnectAuthenticateResponse.java new file mode 100644 index 0000000000000..033d496139615 --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/oidc/OpenIdConnectAuthenticateResponse.java @@ -0,0 +1,64 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.core.security.action.oidc; + +import org.elasticsearch.action.ActionResponse; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.unit.TimeValue; + +import java.io.IOException; + +public class OpenIdConnectAuthenticateResponse extends ActionResponse { + private String principal; + private String accessTokenString; + private String refreshTokenString; + private TimeValue expiresIn; + + public OpenIdConnectAuthenticateResponse(String principal, String accessTokenString, String refreshTokenString, TimeValue expiresIn) { + this.principal = principal; + this.accessTokenString = accessTokenString; + this.refreshTokenString = refreshTokenString; + this.expiresIn = expiresIn; + } + + public OpenIdConnectAuthenticateResponse() { + } + + public String getPrincipal() { + return principal; + } + + public String getAccessTokenString() { + return accessTokenString; + } + + public String getRefreshTokenString() { + return refreshTokenString; + } + + public TimeValue getExpiresIn() { + return expiresIn; + } + + @Override + public void readFrom(StreamInput in) throws IOException { + super.readFrom(in); + principal = in.readString(); + accessTokenString = in.readString(); + refreshTokenString = in.readString(); + expiresIn = in.readTimeValue(); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + out.writeString(principal); + out.writeString(accessTokenString); + out.writeString(refreshTokenString); + out.writeTimeValue(expiresIn); + } +} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/oidc/OpenIdConnectPrepareAuthenticationAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/oidc/OpenIdConnectPrepareAuthenticationAction.java new file mode 100644 index 0000000000000..a7356feae6191 --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/oidc/OpenIdConnectPrepareAuthenticationAction.java @@ -0,0 +1,17 @@ +package org.elasticsearch.xpack.core.security.action.oidc; + +import org.elasticsearch.action.Action; + +public class OpenIdConnectPrepareAuthenticationAction extends Action { + + public static final OpenIdConnectPrepareAuthenticationAction INSTANCE = new OpenIdConnectPrepareAuthenticationAction(); + public static final String NAME = "cluster:admin/xpack/security/oidc/prepare"; + + protected OpenIdConnectPrepareAuthenticationAction() { + super(NAME); + } + + public OpenIdConnectPrepareAuthenticationResponse newResponse() { + return new OpenIdConnectPrepareAuthenticationResponse(); + } +} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/oidc/OpenIdConnectPrepareAuthenticationRequest.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/oidc/OpenIdConnectPrepareAuthenticationRequest.java new file mode 100644 index 0000000000000..5cec9e79d3d59 --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/oidc/OpenIdConnectPrepareAuthenticationRequest.java @@ -0,0 +1,47 @@ +package org.elasticsearch.xpack.core.security.action.oidc; + +import org.elasticsearch.action.ActionRequest; +import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.common.Nullable; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; + +import java.io.IOException; + +/** + * Represents a request to prepare an OAuth 2.0 authentication request + */ +public class OpenIdConnectPrepareAuthenticationRequest extends ActionRequest { + + @Nullable + private String realmName; + + public String getRealmName() { + return realmName; + } + + public void setRealmName(String realmName) { + this.realmName = realmName; + } + + @Override + public ActionRequestValidationException validate() { + return null; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + out.writeOptionalString(realmName); + } + + @Override + public void readFrom(StreamInput in) throws IOException { + super.readFrom(in); + realmName = in.readOptionalString(); + } + + public String toString() { + return "{realmName=" + realmName + "}"; + } +} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/oidc/OpenIdConnectPrepareAuthenticationResponse.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/oidc/OpenIdConnectPrepareAuthenticationResponse.java new file mode 100644 index 0000000000000..d80c43db06129 --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/oidc/OpenIdConnectPrepareAuthenticationResponse.java @@ -0,0 +1,49 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.core.security.action.oidc; + +import org.elasticsearch.action.ActionResponse; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; + +import java.io.IOException; + + +public class OpenIdConnectPrepareAuthenticationResponse extends ActionResponse { + + private String redirectUrl; + private String state; + + public OpenIdConnectPrepareAuthenticationResponse(String redirectUrl, String state) { + this.redirectUrl = redirectUrl; + this.state = state; + } + + public OpenIdConnectPrepareAuthenticationResponse() { + } + + public String getRedirectUrl() { + return redirectUrl; + } + + public String getState() { + return state; + } + + @Override + public void readFrom(StreamInput in) throws IOException { + super.readFrom(in); + state = in.readString(); + redirectUrl = in.readString(); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + out.writeString(state); + out.writeString(redirectUrl); + } +} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/oidc/OpenIdConnectRealmSettings.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/oidc/OpenIdConnectRealmSettings.java new file mode 100644 index 0000000000000..2438beb70605d --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/oidc/OpenIdConnectRealmSettings.java @@ -0,0 +1,11 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.core.security.authc.oidc; + +public class OpenIdConnectRealmSettings { + + public static final String TYPE = "oidc"; +} diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java index 7ebd50d19445f..f5bd75ae0ecf7 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java @@ -202,6 +202,8 @@ import org.elasticsearch.xpack.security.rest.action.rolemapping.RestDeleteRoleMappingAction; import org.elasticsearch.xpack.security.rest.action.rolemapping.RestGetRoleMappingsAction; import org.elasticsearch.xpack.security.rest.action.rolemapping.RestPutRoleMappingAction; +import org.elasticsearch.xpack.security.rest.action.oidc.RestOpenIdConnectAuthenticateAction; +import org.elasticsearch.xpack.security.rest.action.oidc.RestOpenIdConnectPrepareAuthenticationAction; import org.elasticsearch.xpack.security.rest.action.saml.RestSamlAuthenticateAction; import org.elasticsearch.xpack.security.rest.action.saml.RestSamlInvalidateSessionAction; import org.elasticsearch.xpack.security.rest.action.saml.RestSamlLogoutAction; @@ -767,6 +769,8 @@ public List getRestHandlers(Settings settings, RestController restC new RestSamlAuthenticateAction(settings, restController, getLicenseState()), new RestSamlLogoutAction(settings, restController, getLicenseState()), new RestSamlInvalidateSessionAction(settings, restController, getLicenseState()), + new RestOpenIdConnectPrepareAuthenticationAction(settings, restController, getLicenseState()), + new RestOpenIdConnectAuthenticateAction(settings, restController, getLicenseState()), new RestGetPrivilegesAction(settings, restController, getLicenseState()), new RestPutPrivilegesAction(settings, restController, getLicenseState()), new RestDeletePrivilegesAction(settings, restController, getLicenseState()) diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/oidc/TransportOpenIdConnectAuthenticateAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/oidc/TransportOpenIdConnectAuthenticateAction.java new file mode 100644 index 0000000000000..1e544dc774ce4 --- /dev/null +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/oidc/TransportOpenIdConnectAuthenticateAction.java @@ -0,0 +1,29 @@ +package org.elasticsearch.xpack.security.action.oidc; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.HandledTransportAction; +import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.tasks.Task; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.transport.TransportService; +import org.elasticsearch.xpack.core.security.action.oidc.OpenIdConnectAuthenticateRequest; +import org.elasticsearch.xpack.core.security.action.oidc.OpenIdConnectAuthenticateResponse; +import org.elasticsearch.xpack.core.security.action.oidc.OpenIdConnectAuthenticateAction; +import org.elasticsearch.xpack.security.authc.AuthenticationService; +import org.elasticsearch.xpack.security.authc.TokenService; + +public class TransportOpenIdConnectAuthenticateAction extends HandledTransportAction { + + @Inject + public TransportOpenIdConnectAuthenticateAction(ThreadPool threadPool, TransportService transportService, + ActionFilters actionFilters, AuthenticationService authenticationService, + TokenService tokenService) { + super(OpenIdConnectAuthenticateAction.NAME, transportService, actionFilters, OpenIdConnectAuthenticateRequest::new); + } + @Override + protected void doExecute(Task task, OpenIdConnectAuthenticateRequest request, ActionListener listener) { + + } +} diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/oidc/TransportOpenIdConnectPrepareAuthenticationAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/oidc/TransportOpenIdConnectPrepareAuthenticationAction.java new file mode 100644 index 0000000000000..1acc4ed903359 --- /dev/null +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/oidc/TransportOpenIdConnectPrepareAuthenticationAction.java @@ -0,0 +1,30 @@ +package org.elasticsearch.xpack.security.action.oidc; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.HandledTransportAction; +import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.tasks.Task; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.transport.TransportService; +import org.elasticsearch.xpack.core.security.action.oidc.OpenIdConnectPrepareAuthenticateRequest; +import org.elasticsearch.xpack.core.security.action.oidc.OpenIdConnectPrepareAuthenticationResponse; +import org.elasticsearch.xpack.core.security.action.oidc.OpenIdConnectAuthenticateAction; +import org.elasticsearch.xpack.security.authc.AuthenticationService; +import org.elasticsearch.xpack.security.authc.TokenService; + +public class TransportOpenIdConnectPrepareAuthenticationAction extends HandledTransportAction { + + @Inject + public TransportOpenIdConnectPrepareAuthenticationAction(ThreadPool threadPool, TransportService transportService, + ActionFilters actionFilters, AuthenticationService authenticationService, + TokenService tokenService) { + super(OpenIdConnectAuthenticateAction.NAME, transportService, actionFilters, OpenIdConnectPrepareAuthenticateRequest::new); + } + + @Override + protected void doExecute(Task task, OpenIdConnectPrepareAuthenticateRequest request, ActionListener listener) { + + } +} diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/oidc/OpenIdConnectRealm.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/oidc/OpenIdConnectRealm.java new file mode 100644 index 0000000000000..615b92c3d865e --- /dev/null +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/oidc/OpenIdConnectRealm.java @@ -0,0 +1,46 @@ +package org.elasticsearch.xpack.security.authc.oidc; + +import org.apache.log4j.LogManager; +import org.apache.log4j.Logger; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.common.lease.Releasable; +import org.elasticsearch.common.util.concurrent.ThreadContext; +import org.elasticsearch.xpack.core.security.authc.AuthenticationResult; +import org.elasticsearch.xpack.core.security.authc.AuthenticationToken; +import org.elasticsearch.xpack.core.security.authc.Realm; +import org.elasticsearch.xpack.core.security.authc.RealmConfig; +import org.elasticsearch.xpack.core.security.user.User; + +public class OpenIdConnectRealm extends Realm implements Releasable { + + private static final Logger logger = LogManager.getLogger(OpenIdConnectRealm.class); + + public OpenIdConnectRealm(RealmConfig config) { + super(config); + } + + @Override + public void close() { + + } + + @Override + public boolean supports(AuthenticationToken token) { + return false; + } + + @Override + public AuthenticationToken token(ThreadContext context) { + return null; + } + + @Override + public void authenticate(AuthenticationToken token, ActionListener listener) { + + } + + @Override + public void lookupUser(String username, ActionListener listener) { + + } +} diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/oidc/OpenIdConnectBaseRestHandler.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/oidc/OpenIdConnectBaseRestHandler.java new file mode 100644 index 0000000000000..008b5d0676e2c --- /dev/null +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/oidc/OpenIdConnectBaseRestHandler.java @@ -0,0 +1,40 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.security.rest.action.oidc; + +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.license.LicenseUtils; +import org.elasticsearch.license.XPackLicenseState; +import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.xpack.core.security.authc.oidc.OpenIdConnectRealmSettings; +import org.elasticsearch.xpack.security.authc.Realms; +import org.elasticsearch.xpack.security.rest.action.SecurityBaseRestHandler; + +public abstract class OpenIdConnectBaseRestHandler extends SecurityBaseRestHandler { + + private static final String OIDC_REALM_TYPE = OpenIdConnectRealmSettings.TYPE; + + /** + * @param settings the node's settings + * @param licenseState the license state that will be used to determine if security is licensed + */ + protected OpenIdConnectBaseRestHandler(Settings settings, XPackLicenseState licenseState) { + super(settings, licenseState); + } + + @Override + protected Exception checkFeatureAvailable(RestRequest request) { + Exception failedFeature = super.checkFeatureAvailable(request); + if (failedFeature != null) { + return failedFeature; + } else if (Realms.isRealmTypeAvailable(licenseState.allowedRealmType(), OIDC_REALM_TYPE)) { + return null; + } else { + logger.info("The '{}' realm is not available under the current license", OIDC_REALM_TYPE); + return LicenseUtils.newComplianceException(OIDC_REALM_TYPE); + } + } +} diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/oidc/RestOpenIdConnectAuthenticateAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/oidc/RestOpenIdConnectAuthenticateAction.java new file mode 100644 index 0000000000000..cd3205403f34f --- /dev/null +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/oidc/RestOpenIdConnectAuthenticateAction.java @@ -0,0 +1,65 @@ +package org.elasticsearch.xpack.security.rest.action.oidc; + +import org.elasticsearch.client.node.NodeClient; +import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.xcontent.ObjectParser; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.license.XPackLicenseState; +import org.elasticsearch.rest.BytesRestResponse; +import org.elasticsearch.rest.RestController; +import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.RestResponse; +import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.rest.action.RestBuilderListener; +import org.elasticsearch.xpack.core.security.action.oidc.OpenIdConnectAuthenticateAction; +import org.elasticsearch.xpack.core.security.action.oidc.OpenIdConnectAuthenticateRequest; +import org.elasticsearch.xpack.core.security.action.oidc.OpenIdConnectAuthenticateResponse; +import org.elasticsearch.xpack.security.rest.action.oidc.OpenIdConnectBaseRestHandler; + +import java.io.IOException; + +import static org.elasticsearch.rest.RestRequest.Method.POST; + +public class RestOpenIdConnectAuthenticateAction extends OpenIdConnectBaseRestHandler { + + static final ObjectParser PARSER = new ObjectParser<>("oidc_authn", + OpenIdConnectAuthenticateRequest::new); + + static { + PARSER.declareString(OpenIdConnectAuthenticateRequest::setCode, new ParseField("code")); + PARSER.declareString(OpenIdConnectAuthenticateRequest::setState, new ParseField("state")); + } + + public RestOpenIdConnectAuthenticateAction(Settings settings, RestController controller, XPackLicenseState licenseState) { + super(settings, licenseState); + controller.registerHandler(POST, "/_xpath/security/oidc/authenticate", this); + } + + @Override + protected RestChannelConsumer innerPrepareRequest(RestRequest request, NodeClient client) throws IOException { + try (XContentParser parser = request.contentParser()) { + final OpenIdConnectAuthenticateRequest authenticateRequest = PARSER.parse(parser, null); + return channel -> client.execute(OpenIdConnectAuthenticateAction.INSTANCE, authenticateRequest, + new RestBuilderListener(channel) { + @Override + public RestResponse buildResponse(OpenIdConnectAuthenticateResponse response, XContentBuilder builder) throws Exception { + builder.startObject(); + builder.startObject() + .field("username", response.getPrincipal()) + .field("access_token", response.getAccessTokenString()) + .field("refresh_token", response.getRefreshTokenString()) + .field("expires_in", response.getExpiresIn().seconds()) + .endObject(); + return new BytesRestResponse(RestStatus.OK, builder); + } + }); + } + } + + @Override + public String getName() { + return "xpack_security_oidc_authenticate_action"; + } +} diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/oidc/RestOpenIdConnectPrepareAuthenticationAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/oidc/RestOpenIdConnectPrepareAuthenticationAction.java new file mode 100644 index 0000000000000..bb00b57ef172e --- /dev/null +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/oidc/RestOpenIdConnectPrepareAuthenticationAction.java @@ -0,0 +1,69 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.security.rest.action.oidc; + +import org.elasticsearch.client.node.NodeClient; +import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.xcontent.ObjectParser; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.license.XPackLicenseState; +import org.elasticsearch.rest.BytesRestResponse; +import org.elasticsearch.rest.RestController; +import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.RestResponse; +import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.rest.action.RestBuilderListener; +import org.elasticsearch.xpack.core.security.action.oidc.OpenIdConnectPrepareAuthenticationAction; +import org.elasticsearch.xpack.core.security.action.oidc.OpenIdConnectPrepareAuthenticationRequest; +import org.elasticsearch.xpack.core.security.action.oidc.OpenIdConnectPrepareAuthenticationResponse; + +import java.io.IOException; + +import static org.elasticsearch.rest.RestRequest.Method.POST; + +/** + * Generates an oAuth 2.0 authentication request as a URL string and returns it to the REST client. + */ +public class RestOpenIdConnectPrepareAuthenticationAction extends OpenIdConnectBaseRestHandler { + + static final ObjectParser PARSER = new ObjectParser<>("oidc_prepare_auithentication", + OpenIdConnectPrepareAuthenticationRequest::new); + + static { + PARSER.declareString(OpenIdConnectPrepareAuthenticationRequest::setRealmName, new ParseField("realm")); + } + + public RestOpenIdConnectPrepareAuthenticationAction(Settings settings, RestController controller, XPackLicenseState licenseState) { + super(settings, licenseState); + controller.registerHandler(POST, "/_xpath/security/oidc/prepare", this); + } + + @Override + protected RestChannelConsumer innerPrepareRequest(RestRequest request, NodeClient client) throws IOException { + try (XContentParser parser = request.contentParser()) { + final OpenIdConnectPrepareAuthenticationRequest authenticationRequest = PARSER.parse(parser, null); + return channel -> client.execute(OpenIdConnectPrepareAuthenticationAction.INSTANCE, authenticationRequest, + new RestBuilderListener(channel) { + @Override + public RestResponse buildResponse(OpenIdConnectPrepareAuthenticationResponse actionResponse, XContentBuilder builder) + throws Exception { + builder.startObject(); + builder.field("state"); + builder.field("redirect"); + builder.endObject(); + return new BytesRestResponse(RestStatus.OK, builder); + } + }); + } + } + + @Override + public String getName() { + return "xpack_security_oidc_prepare_authentication_action"; + } +} From c4154f68e9de8c94066f0ec3a57c6651f7c0e041 Mon Sep 17 00:00:00 2001 From: Ioannis Kakavas Date: Thu, 13 Dec 2018 11:38:12 +0200 Subject: [PATCH 02/71] support state --- ...IdConnectPrepareAuthenticationRequest.java | 31 +++++++++--- ...nIdConnectPrepareAuthenticationAction.java | 49 +++++++++++++++---- .../RestOpenIdConnectAuthenticateAction.java | 8 ++- ...nIdConnectPrepareAuthenticationAction.java | 2 +- 4 files changed, 71 insertions(+), 19 deletions(-) diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/oidc/OpenIdConnectPrepareAuthenticationRequest.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/oidc/OpenIdConnectPrepareAuthenticationRequest.java index 5cec9e79d3d59..3f12523c77f3b 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/oidc/OpenIdConnectPrepareAuthenticationRequest.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/oidc/OpenIdConnectPrepareAuthenticationRequest.java @@ -2,46 +2,65 @@ import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionRequestValidationException; -import org.elasticsearch.common.Nullable; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import java.io.IOException; +import static org.elasticsearch.action.ValidateActions.addValidationError; + /** * Represents a request to prepare an OAuth 2.0 authentication request */ public class OpenIdConnectPrepareAuthenticationRequest extends ActionRequest { - @Nullable private String realmName; + private String state; public String getRealmName() { return realmName; } + public String getState() { + return state; + } + public void setRealmName(String realmName) { this.realmName = realmName; } + public void setState(String state) { + this.state = state; + } + @Override public ActionRequestValidationException validate() { - return null; + ActionRequestValidationException validationException = null; + if (Strings.hasText(realmName) == false) { + validationException = addValidationError("realm name must be provided", null); + } + if (Strings.hasText(state) == false) { + validationException = addValidationError("state must be provided", validationException); + } + return validationException; } @Override public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); - out.writeOptionalString(realmName); + out.writeString(realmName); + out.writeString(state); } @Override public void readFrom(StreamInput in) throws IOException { super.readFrom(in); - realmName = in.readOptionalString(); + realmName = in.readString(); + state = in.readString(); } public String toString() { - return "{realmName=" + realmName + "}"; + return "{realmName=" + realmName + ", state=" + state + "}"; } } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/oidc/TransportOpenIdConnectPrepareAuthenticationAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/oidc/TransportOpenIdConnectPrepareAuthenticationAction.java index 1acc4ed903359..0a01203300976 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/oidc/TransportOpenIdConnectPrepareAuthenticationAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/oidc/TransportOpenIdConnectPrepareAuthenticationAction.java @@ -1,30 +1,59 @@ package org.elasticsearch.xpack.security.action.oidc; +import org.elasticsearch.ElasticsearchSecurityException; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.HandledTransportAction; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.tasks.Task; -import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; -import org.elasticsearch.xpack.core.security.action.oidc.OpenIdConnectPrepareAuthenticateRequest; +import org.elasticsearch.xpack.core.security.action.oidc.OpenIdConnectPrepareAuthenticationRequest; import org.elasticsearch.xpack.core.security.action.oidc.OpenIdConnectPrepareAuthenticationResponse; import org.elasticsearch.xpack.core.security.action.oidc.OpenIdConnectAuthenticateAction; -import org.elasticsearch.xpack.security.authc.AuthenticationService; -import org.elasticsearch.xpack.security.authc.TokenService; +import org.elasticsearch.xpack.security.authc.Realms; +import org.elasticsearch.xpack.security.authc.oidc.OpenIdConnectRealm; -public class TransportOpenIdConnectPrepareAuthenticationAction extends HandledTransportAction { + private final Realms realms; + @Inject - public TransportOpenIdConnectPrepareAuthenticationAction(ThreadPool threadPool, TransportService transportService, - ActionFilters actionFilters, AuthenticationService authenticationService, - TokenService tokenService) { - super(OpenIdConnectAuthenticateAction.NAME, transportService, actionFilters, OpenIdConnectPrepareAuthenticateRequest::new); + public TransportOpenIdConnectPrepareAuthenticationAction(TransportService transportService, + ActionFilters actionFilters, Realms realms) { + super(OpenIdConnectAuthenticateAction.NAME, transportService, actionFilters, OpenIdConnectPrepareAuthenticationRequest::new); + this.realms = realms; } @Override - protected void doExecute(Task task, OpenIdConnectPrepareAuthenticateRequest request, ActionListener listener) { + protected void doExecute(Task task, OpenIdConnectPrepareAuthenticationRequest request, + ActionListener listener) { + List realms = this.realms.stream() + .filter(r -> r instanceof OpenIdConnectRealm) + .map(r -> (OpenIdConnectRealm) r) + .filter(r -> r.name().equals(request.getRealmName())) + .collect(Collectors.toList()); + if (realms.isEmpty()) { + listener.onFailure(new ElasticsearchSecurityException("Cannot find OIDC realm with name [{}]", request.getRealmName())); + } else if (realms.size() > 1) { + // Can't define multiple realms with the same name in configuration, but check still. + listener.onFailure(new ElasticsearchSecurityException("Found multiple ([{}]) OIDC realms with name [{}]", realms.size(), + request.getRealmName())); + } else if (Strings.isNullOrEmpty(request.getState())) { + listener.onFailure(new ElasticsearchSecurityException("State parameter cannot be empty")); + } else { + prepareAuthenticationResponse(realms.get(0), request.getState(), listener); + } + } + private void prepareAuthenticationResponse(OpenIdConnectRealm realm, String state, + ActionListener listener) { + //TODO: Generate the Authorization URL from the OP metadata and the configuration + final String authorizationEndpointURl = ""; + listener.onResponse(new OpenIdConnectPrepareAuthenticationResponse(authorizationEndpointURl, state)); } } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/oidc/RestOpenIdConnectAuthenticateAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/oidc/RestOpenIdConnectAuthenticateAction.java index cd3205403f34f..7ec7e8bebb5aa 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/oidc/RestOpenIdConnectAuthenticateAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/oidc/RestOpenIdConnectAuthenticateAction.java @@ -1,3 +1,8 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ package org.elasticsearch.xpack.security.rest.action.oidc; import org.elasticsearch.client.node.NodeClient; @@ -16,7 +21,6 @@ import org.elasticsearch.xpack.core.security.action.oidc.OpenIdConnectAuthenticateAction; import org.elasticsearch.xpack.core.security.action.oidc.OpenIdConnectAuthenticateRequest; import org.elasticsearch.xpack.core.security.action.oidc.OpenIdConnectAuthenticateResponse; -import org.elasticsearch.xpack.security.rest.action.oidc.OpenIdConnectBaseRestHandler; import java.io.IOException; @@ -34,7 +38,7 @@ public class RestOpenIdConnectAuthenticateAction extends OpenIdConnectBaseRestHa public RestOpenIdConnectAuthenticateAction(Settings settings, RestController controller, XPackLicenseState licenseState) { super(settings, licenseState); - controller.registerHandler(POST, "/_xpath/security/oidc/authenticate", this); + controller.registerHandler(POST, "/_security/oidc/authenticate", this); } @Override diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/oidc/RestOpenIdConnectPrepareAuthenticationAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/oidc/RestOpenIdConnectPrepareAuthenticationAction.java index bb00b57ef172e..671cbeb3476c4 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/oidc/RestOpenIdConnectPrepareAuthenticationAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/oidc/RestOpenIdConnectPrepareAuthenticationAction.java @@ -40,7 +40,7 @@ public class RestOpenIdConnectPrepareAuthenticationAction extends OpenIdConnectB public RestOpenIdConnectPrepareAuthenticationAction(Settings settings, RestController controller, XPackLicenseState licenseState) { super(settings, licenseState); - controller.registerHandler(POST, "/_xpath/security/oidc/prepare", this); + controller.registerHandler(POST, "/_security/oidc/prepare", this); } @Override From 8d54c7c24d9b78fbde9f0481bc3b6d11db12def4 Mon Sep 17 00:00:00 2001 From: Ioannis Kakavas Date: Fri, 21 Dec 2018 13:22:36 +0200 Subject: [PATCH 03/71] track current changes --- ...nIdConnectPrepareAuthenticationAction.java | 5 +++ ...dConnectPrepareAuthenticationResponse.java | 22 ++++------ .../authz/privilege/ClusterPrivilege.java | 3 ++ .../xpack/security/Security.java | 7 ++++ ...nsportOpenIdConnectAuthenticateAction.java | 41 +++++++++++++++++++ ...nIdConnectPrepareAuthenticationAction.java | 11 +++-- .../authc/oidc/OpenIdConnectRealm.java | 1 + .../authc/saml/OpenIdConnectToken.java | 39 ++++++++++++++++++ .../RestOpenIdConnectAuthenticateAction.java | 5 ++- ...nIdConnectPrepareAuthenticationAction.java | 7 ++-- 10 files changed, 118 insertions(+), 23 deletions(-) create mode 100644 x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/saml/OpenIdConnectToken.java diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/oidc/OpenIdConnectPrepareAuthenticationAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/oidc/OpenIdConnectPrepareAuthenticationAction.java index a7356feae6191..0fe2b356e8313 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/oidc/OpenIdConnectPrepareAuthenticationAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/oidc/OpenIdConnectPrepareAuthenticationAction.java @@ -1,3 +1,8 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ package org.elasticsearch.xpack.core.security.action.oidc; import org.elasticsearch.action.Action; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/oidc/OpenIdConnectPrepareAuthenticationResponse.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/oidc/OpenIdConnectPrepareAuthenticationResponse.java index d80c43db06129..17c4472d6c0a5 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/oidc/OpenIdConnectPrepareAuthenticationResponse.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/oidc/OpenIdConnectPrepareAuthenticationResponse.java @@ -14,36 +14,28 @@ public class OpenIdConnectPrepareAuthenticationResponse extends ActionResponse { - private String redirectUrl; - private String state; + private String authorizationEndpointUrl; - public OpenIdConnectPrepareAuthenticationResponse(String redirectUrl, String state) { - this.redirectUrl = redirectUrl; - this.state = state; + public OpenIdConnectPrepareAuthenticationResponse(String authorizationEndpointUrl, String state) { + this.authorizationEndpointUrl = authorizationEndpointUrl; } public OpenIdConnectPrepareAuthenticationResponse() { } - public String getRedirectUrl() { - return redirectUrl; - } - - public String getState() { - return state; + public String getAuthorizationEndpointUrl() { + return authorizationEndpointUrl; } @Override public void readFrom(StreamInput in) throws IOException { super.readFrom(in); - state = in.readString(); - redirectUrl = in.readString(); + authorizationEndpointUrl = in.readString(); } @Override public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); - out.writeString(state); - out.writeString(redirectUrl); + out.writeString(authorizationEndpointUrl); } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/privilege/ClusterPrivilege.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/privilege/ClusterPrivilege.java index fba595e7a09e4..52c79a6050c2d 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/privilege/ClusterPrivilege.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/privilege/ClusterPrivilege.java @@ -33,6 +33,7 @@ public final class ClusterPrivilege extends Privilege { private static final Automaton MANAGE_SECURITY_AUTOMATON = patterns("cluster:admin/xpack/security/*"); private static final Automaton MANAGE_SAML_AUTOMATON = patterns("cluster:admin/xpack/security/saml/*", InvalidateTokenAction.NAME, RefreshTokenAction.NAME); + private static final Automaton MANAGE_OIDC_AUTOMATON = patterns("cluster:admin/xpack/security/oidc/*"); private static final Automaton MANAGE_TOKEN_AUTOMATON = patterns("cluster:admin/xpack/security/token/*"); private static final Automaton MONITOR_AUTOMATON = patterns("cluster:monitor/*"); private static final Automaton MONITOR_ML_AUTOMATON = patterns("cluster:monitor/xpack/ml/*"); @@ -70,6 +71,7 @@ public final class ClusterPrivilege extends Privilege { public static final ClusterPrivilege TRANSPORT_CLIENT = new ClusterPrivilege("transport_client", TRANSPORT_CLIENT_AUTOMATON); public static final ClusterPrivilege MANAGE_SECURITY = new ClusterPrivilege("manage_security", MANAGE_SECURITY_AUTOMATON); public static final ClusterPrivilege MANAGE_SAML = new ClusterPrivilege("manage_saml", MANAGE_SAML_AUTOMATON); + public static final ClusterPrivilege MANAGE_OIDC = new ClusterPrivilege("manage_oidc", MANAGE_OIDC_AUTOMATON); public static final ClusterPrivilege MANAGE_PIPELINE = new ClusterPrivilege("manage_pipeline", "cluster:admin/ingest/pipeline/*"); public static final ClusterPrivilege MANAGE_CCR = new ClusterPrivilege("manage_ccr", MANAGE_CCR_AUTOMATON); public static final ClusterPrivilege READ_CCR = new ClusterPrivilege("read_ccr", READ_CCR_AUTOMATON); @@ -94,6 +96,7 @@ public final class ClusterPrivilege extends Privilege { .put("transport_client", TRANSPORT_CLIENT) .put("manage_security", MANAGE_SECURITY) .put("manage_saml", MANAGE_SAML) + .put("manage_oidc", MANAGE_OIDC) .put("manage_pipeline", MANAGE_PIPELINE) .put("manage_rollup", MANAGE_ROLLUP) .put("manage_ccr", MANAGE_CCR) diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java index e5eed5d2980a3..3ecb6e5585640 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java @@ -84,6 +84,8 @@ import org.elasticsearch.xpack.core.security.SecurityExtension; import org.elasticsearch.xpack.core.security.SecurityField; import org.elasticsearch.xpack.core.security.SecuritySettings; +import org.elasticsearch.xpack.core.security.action.oidc.OpenIdConnectAuthenticateAction; +import org.elasticsearch.xpack.core.security.action.oidc.OpenIdConnectPrepareAuthenticationAction; import org.elasticsearch.xpack.core.security.action.privilege.DeletePrivilegesAction; import org.elasticsearch.xpack.core.security.action.privilege.GetPrivilegesAction; import org.elasticsearch.xpack.core.security.action.privilege.PutPrivilegesAction; @@ -141,6 +143,8 @@ import org.elasticsearch.xpack.security.action.interceptor.ResizeRequestInterceptor; import org.elasticsearch.xpack.security.action.interceptor.SearchRequestInterceptor; import org.elasticsearch.xpack.security.action.interceptor.UpdateRequestInterceptor; +import org.elasticsearch.xpack.security.action.oidc.TransportOpenIdConnectAuthenticateAction; +import org.elasticsearch.xpack.security.action.oidc.TransportOpenIdConnectPrepareAuthenticationAction; import org.elasticsearch.xpack.security.action.privilege.TransportDeletePrivilegesAction; import org.elasticsearch.xpack.security.action.privilege.TransportGetPrivilegesAction; import org.elasticsearch.xpack.security.action.privilege.TransportPutPrivilegesAction; @@ -720,6 +724,9 @@ public void onIndexModule(IndexModule module) { new ActionHandler<>(SamlAuthenticateAction.INSTANCE, TransportSamlAuthenticateAction.class), new ActionHandler<>(SamlLogoutAction.INSTANCE, TransportSamlLogoutAction.class), new ActionHandler<>(SamlInvalidateSessionAction.INSTANCE, TransportSamlInvalidateSessionAction.class), + new ActionHandler<>(OpenIdConnectPrepareAuthenticationAction.INSTANCE, + TransportOpenIdConnectPrepareAuthenticationAction.class), + new ActionHandler<>(OpenIdConnectAuthenticateAction.INSTANCE, TransportOpenIdConnectAuthenticateAction.class), new ActionHandler<>(GetPrivilegesAction.INSTANCE, TransportGetPrivilegesAction.class), new ActionHandler<>(PutPrivilegesAction.INSTANCE, TransportPutPrivilegesAction.class), new ActionHandler<>(DeletePrivilegesAction.INSTANCE, TransportDeletePrivilegesAction.class) diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/oidc/TransportOpenIdConnectAuthenticateAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/oidc/TransportOpenIdConnectAuthenticateAction.java index 1e544dc774ce4..fc403d3574301 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/oidc/TransportOpenIdConnectAuthenticateAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/oidc/TransportOpenIdConnectAuthenticateAction.java @@ -4,26 +4,67 @@ import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.HandledTransportAction; import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.security.action.oidc.OpenIdConnectAuthenticateRequest; import org.elasticsearch.xpack.core.security.action.oidc.OpenIdConnectAuthenticateResponse; import org.elasticsearch.xpack.core.security.action.oidc.OpenIdConnectAuthenticateAction; +import org.elasticsearch.xpack.core.security.authc.Authentication; +import org.elasticsearch.xpack.core.security.authc.AuthenticationResult; import org.elasticsearch.xpack.security.authc.AuthenticationService; import org.elasticsearch.xpack.security.authc.TokenService; +import org.elasticsearch.xpack.security.authc.oidc.OpenIdConnectRealm; +import org.elasticsearch.xpack.security.authc.saml.OpenIdConnectToken; + +import java.util.Map; public class TransportOpenIdConnectAuthenticateAction extends HandledTransportAction { + private final ThreadPool threadPool; + private final AuthenticationService authenticationService; + private final TokenService tokenService; + @Inject public TransportOpenIdConnectAuthenticateAction(ThreadPool threadPool, TransportService transportService, ActionFilters actionFilters, AuthenticationService authenticationService, TokenService tokenService) { super(OpenIdConnectAuthenticateAction.NAME, transportService, actionFilters, OpenIdConnectAuthenticateRequest::new); + this.threadPool = threadPool; + this.authenticationService = authenticationService; + this.tokenService = tokenService; } @Override protected void doExecute(Task task, OpenIdConnectAuthenticateRequest request, ActionListener listener) { + final OpenIdConnectToken token = new OpenIdConnectToken(request.getCode()); + final ThreadContext threadContext = threadPool.getThreadContext(); + Authentication originatingAuthentication = Authentication.getAuthentication(threadContext); + try (ThreadContext.StoredContext ignore = threadContext.stashContext()) { + authenticationService.authenticate(OpenIdConnectAuthenticateAction.NAME, request, token, ActionListener.wrap( + authentication -> { + AuthenticationResult result = threadContext.getTransient(AuthenticationResult.THREAD_CONTEXT_KEY); + if (result == null) { + listener.onFailure(new IllegalStateException("Cannot find AuthenticationResult on thread context")); + return; + } + @SuppressWarnings("unchecked") final Map tokenMetadata = (Map) result.getMetadata() + .get(OpenIdConnectRealm.CONTEXT_TOKEN_DATA); + tokenService.createUserToken(authentication, originatingAuthentication, + ActionListener.wrap(tuple -> { + final String tokenString = tokenService.getUserTokenString(tuple.v1()); + final TimeValue expiresIn = tokenService.getExpirationDelay(); + listener.onResponse(new OpenIdConnectAuthenticateResponse(authentication.getUser().principal(), tokenString, tuple.v2(), expiresIn))) + ; + }, listener::onFailure), tokenMetadata, true); + }, e -> { + listener.onFailure(e); + } + )); + } } } + diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/oidc/TransportOpenIdConnectPrepareAuthenticationAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/oidc/TransportOpenIdConnectPrepareAuthenticationAction.java index 0a01203300976..58ca3ca649b4a 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/oidc/TransportOpenIdConnectPrepareAuthenticationAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/oidc/TransportOpenIdConnectPrepareAuthenticationAction.java @@ -1,3 +1,8 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ package org.elasticsearch.xpack.security.action.oidc; import org.elasticsearch.ElasticsearchSecurityException; @@ -8,9 +13,9 @@ import org.elasticsearch.common.inject.Inject; import org.elasticsearch.tasks.Task; import org.elasticsearch.transport.TransportService; +import org.elasticsearch.xpack.core.security.action.oidc.OpenIdConnectPrepareAuthenticationAction; import org.elasticsearch.xpack.core.security.action.oidc.OpenIdConnectPrepareAuthenticationRequest; import org.elasticsearch.xpack.core.security.action.oidc.OpenIdConnectPrepareAuthenticationResponse; -import org.elasticsearch.xpack.core.security.action.oidc.OpenIdConnectAuthenticateAction; import org.elasticsearch.xpack.security.authc.Realms; import org.elasticsearch.xpack.security.authc.oidc.OpenIdConnectRealm; @@ -25,7 +30,7 @@ public class TransportOpenIdConnectPrepareAuthenticationAction extends HandledTr @Inject public TransportOpenIdConnectPrepareAuthenticationAction(TransportService transportService, ActionFilters actionFilters, Realms realms) { - super(OpenIdConnectAuthenticateAction.NAME, transportService, actionFilters, OpenIdConnectPrepareAuthenticationRequest::new); + super(OpenIdConnectPrepareAuthenticationAction.NAME, transportService, actionFilters, OpenIdConnectPrepareAuthenticationRequest::new); this.realms = realms; } @@ -40,7 +45,7 @@ protected void doExecute(Task task, OpenIdConnectPrepareAuthenticationRequest re if (realms.isEmpty()) { listener.onFailure(new ElasticsearchSecurityException("Cannot find OIDC realm with name [{}]", request.getRealmName())); } else if (realms.size() > 1) { - // Can't define multiple realms with the same name in configuration, but check still. + // Can't define multiple realms with the same name in configuration, but check, still. listener.onFailure(new ElasticsearchSecurityException("Found multiple ([{}]) OIDC realms with name [{}]", realms.size(), request.getRealmName())); } else if (Strings.isNullOrEmpty(request.getState())) { diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/oidc/OpenIdConnectRealm.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/oidc/OpenIdConnectRealm.java index 615b92c3d865e..c53657c53d8f8 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/oidc/OpenIdConnectRealm.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/oidc/OpenIdConnectRealm.java @@ -13,6 +13,7 @@ public class OpenIdConnectRealm extends Realm implements Releasable { + public static final String CONTEXT_TOKEN_DATA = "_oidc_tokendata"; private static final Logger logger = LogManager.getLogger(OpenIdConnectRealm.class); public OpenIdConnectRealm(RealmConfig config) { diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/saml/OpenIdConnectToken.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/saml/OpenIdConnectToken.java new file mode 100644 index 0000000000000..9a6246246e94e --- /dev/null +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/saml/OpenIdConnectToken.java @@ -0,0 +1,39 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.security.authc.saml; + +import org.elasticsearch.common.Nullable; +import org.elasticsearch.xpack.core.security.authc.AuthenticationToken; + +/** + * A {@link AuthenticationToken} to hold OpenID Connect related content. + * Depending on the flow this can content only a code ( oAuth2 authorization code + * grant flow ) or an Identity Token ( oAuth2 implicit flow ) + */ +public class OpenIdConnectToken implements AuthenticationToken { + + @Nullable + private String code; + + public OpenIdConnectToken(String code) { + this.code = code; + } + + @Override + public String principal() { + return ""; + } + + @Override + public Object credentials() { + return code; + } + + @Override + public void clearCredentials() { + this.code = null; + } +} diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/oidc/RestOpenIdConnectAuthenticateAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/oidc/RestOpenIdConnectAuthenticateAction.java index 7ec7e8bebb5aa..d78c8e83e1cda 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/oidc/RestOpenIdConnectAuthenticateAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/oidc/RestOpenIdConnectAuthenticateAction.java @@ -26,6 +26,9 @@ import static org.elasticsearch.rest.RestRequest.Method.POST; +/** + * Rest handler that authenticates the user based on the provided oauth2 code + */ public class RestOpenIdConnectAuthenticateAction extends OpenIdConnectBaseRestHandler { static final ObjectParser PARSER = new ObjectParser<>("oidc_authn", @@ -64,6 +67,6 @@ public RestResponse buildResponse(OpenIdConnectAuthenticateResponse response, XC @Override public String getName() { - return "xpack_security_oidc_authenticate_action"; + return "security_oidc_authenticate_action"; } } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/oidc/RestOpenIdConnectPrepareAuthenticationAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/oidc/RestOpenIdConnectPrepareAuthenticationAction.java index 671cbeb3476c4..165699926c718 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/oidc/RestOpenIdConnectPrepareAuthenticationAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/oidc/RestOpenIdConnectPrepareAuthenticationAction.java @@ -50,11 +50,10 @@ protected RestChannelConsumer innerPrepareRequest(RestRequest request, NodeClien return channel -> client.execute(OpenIdConnectPrepareAuthenticationAction.INSTANCE, authenticationRequest, new RestBuilderListener(channel) { @Override - public RestResponse buildResponse(OpenIdConnectPrepareAuthenticationResponse actionResponse, XContentBuilder builder) + public RestResponse buildResponse(OpenIdConnectPrepareAuthenticationResponse response, XContentBuilder builder) throws Exception { builder.startObject(); - builder.field("state"); - builder.field("redirect"); + builder.field("authorization_endpoint_url", response.getAuthorizationEndpointUrl()); builder.endObject(); return new BytesRestResponse(RestStatus.OK, builder); } @@ -64,6 +63,6 @@ public RestResponse buildResponse(OpenIdConnectPrepareAuthenticationResponse act @Override public String getName() { - return "xpack_security_oidc_prepare_authentication_action"; + return "security_oidc_prepare_authentication_action"; } } From cc68f5a269d47b906ab72eefd14f51c396c034f8 Mon Sep 17 00:00:00 2001 From: Ioannis Kakavas Date: Tue, 25 Dec 2018 00:24:52 +0200 Subject: [PATCH 04/71] Ongoing oidc work - Added Realm settings - Added RP/OP configuration classes - Added realm configuration tests - Completed the Prepare Auethentication API --- ...IdConnectPrepareAuthenticationRequest.java | 17 ++- .../oidc/OpenIdConnectRealmSettings.java | 34 +++++ ...nsportOpenIdConnectAuthenticateAction.java | 4 +- ...nIdConnectPrepareAuthenticationAction.java | 14 +- .../security/authc/oidc/OPConfiguration.java | 50 +++++++ .../authc/oidc/OpenIdConnectRealm.java | 86 ++++++++++++ .../security/authc/oidc/RPConfiguration.java | 56 ++++++++ ...nIdConnectPrepareAuthenticationAction.java | 2 + .../authc/oidc/OpenIdConnectRealmTests.java | 127 ++++++++++++++++++ 9 files changed, 382 insertions(+), 8 deletions(-) create mode 100644 x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/oidc/OPConfiguration.java create mode 100644 x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/oidc/RPConfiguration.java create mode 100644 x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/oidc/OpenIdConnectRealmTests.java diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/oidc/OpenIdConnectPrepareAuthenticationRequest.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/oidc/OpenIdConnectPrepareAuthenticationRequest.java index 3f12523c77f3b..3c69e2d1a5308 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/oidc/OpenIdConnectPrepareAuthenticationRequest.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/oidc/OpenIdConnectPrepareAuthenticationRequest.java @@ -17,6 +17,7 @@ public class OpenIdConnectPrepareAuthenticationRequest extends ActionRequest { private String realmName; private String state; + private String nonce; public String getRealmName() { return realmName; @@ -26,6 +27,10 @@ public String getState() { return state; } + public String getNonce() { + return nonce; + } + public void setRealmName(String realmName) { this.realmName = realmName; } @@ -34,6 +39,10 @@ public void setState(String state) { this.state = state; } + public void setNonce(String nonce) { + this.nonce = nonce; + } + @Override public ActionRequestValidationException validate() { ActionRequestValidationException validationException = null; @@ -43,6 +52,9 @@ public ActionRequestValidationException validate() { if (Strings.hasText(state) == false) { validationException = addValidationError("state must be provided", validationException); } + if (Strings.hasText(nonce) == false) { + validationException = addValidationError("nonce must be provided", validationException); + } return validationException; } @@ -51,6 +63,7 @@ public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); out.writeString(realmName); out.writeString(state); + out.writeString(nonce); } @Override @@ -58,9 +71,11 @@ public void readFrom(StreamInput in) throws IOException { super.readFrom(in); realmName = in.readString(); state = in.readString(); + nonce = in.readString(); } public String toString() { - return "{realmName=" + realmName + ", state=" + state + "}"; + return "{realmName=" + realmName + ", state=" + state + ", nonce=" + nonce + "}"; } + } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/oidc/OpenIdConnectRealmSettings.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/oidc/OpenIdConnectRealmSettings.java index 2438beb70605d..0f3f400bd075f 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/oidc/OpenIdConnectRealmSettings.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/oidc/OpenIdConnectRealmSettings.java @@ -5,7 +5,41 @@ */ package org.elasticsearch.xpack.core.security.authc.oidc; +import org.elasticsearch.common.settings.SecureSetting; +import org.elasticsearch.common.settings.SecureString; +import org.elasticsearch.common.settings.Setting; +import org.elasticsearch.xpack.core.security.authc.RealmSettings; + +import java.util.Collections; +import java.util.List; +import java.util.function.Function; + + public class OpenIdConnectRealmSettings { + private OpenIdConnectRealmSettings() { + } + public static final String TYPE = "oidc"; + + public static final Setting.AffixSetting OP_NAME + = RealmSettings.simpleString(TYPE, "op.name", Setting.Property.NodeScope); + public static final Setting.AffixSetting RP_CLIENT_ID + = RealmSettings.simpleString(TYPE, "rp.client_id", Setting.Property.NodeScope); + public static final Setting RP_CLIENT_SECRET = SecureSetting.secureString("rp.client_secret", null); + public static final Setting.AffixSetting RP_REDIRECT_URI + = RealmSettings.simpleString(TYPE, "rp.redirect_uri", Setting.Property.NodeScope); + public static final Setting.AffixSetting RP_RESPONSE_TYPE + = RealmSettings.simpleString(TYPE, "rp.response_type", Setting.Property.NodeScope); + public static final Setting.AffixSetting OP_AUTHORIZATION_ENDPOINT + = RealmSettings.simpleString(TYPE, "op.authorization_endpoint", Setting.Property.NodeScope); + public static final Setting.AffixSetting OP_TOKEN_ENDPOINT + = RealmSettings.simpleString(TYPE, "op.token_endpoint", Setting.Property.NodeScope); + public static final Setting.AffixSetting OP_USERINFO_ENDPOINT + = RealmSettings.simpleString(TYPE, "op.userinfo_endpoint", Setting.Property.NodeScope); + public static final Setting.AffixSetting OP_ISSUER + = RealmSettings.simpleString(TYPE, "op.issuer", Setting.Property.NodeScope); + public static final Setting.AffixSetting> RP_REQUESTED_SCOPES = Setting.affixKeySetting( + RealmSettings.realmSettingPrefix(TYPE), "rp.requested_scopes", + key -> Setting.listSetting(key, Collections.singletonList("openid"), Function.identity(), Setting.Property.NodeScope)); } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/oidc/TransportOpenIdConnectAuthenticateAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/oidc/TransportOpenIdConnectAuthenticateAction.java index fc403d3574301..a399302914074 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/oidc/TransportOpenIdConnectAuthenticateAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/oidc/TransportOpenIdConnectAuthenticateAction.java @@ -57,8 +57,8 @@ protected void doExecute(Task task, OpenIdConnectAuthenticateRequest request, Ac ActionListener.wrap(tuple -> { final String tokenString = tokenService.getUserTokenString(tuple.v1()); final TimeValue expiresIn = tokenService.getExpirationDelay(); - listener.onResponse(new OpenIdConnectAuthenticateResponse(authentication.getUser().principal(), tokenString, tuple.v2(), expiresIn))) - ; + listener.onResponse(new OpenIdConnectAuthenticateResponse(authentication.getUser().principal(), tokenString, + tuple.v2(), expiresIn)); }, listener::onFailure), tokenMetadata, true); }, e -> { listener.onFailure(e); diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/oidc/TransportOpenIdConnectPrepareAuthenticationAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/oidc/TransportOpenIdConnectPrepareAuthenticationAction.java index 58ca3ca649b4a..6d23e48a7bc08 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/oidc/TransportOpenIdConnectPrepareAuthenticationAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/oidc/TransportOpenIdConnectPrepareAuthenticationAction.java @@ -5,6 +5,7 @@ */ package org.elasticsearch.xpack.security.action.oidc; +import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ElasticsearchSecurityException; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; @@ -51,14 +52,17 @@ protected void doExecute(Task task, OpenIdConnectPrepareAuthenticationRequest re } else if (Strings.isNullOrEmpty(request.getState())) { listener.onFailure(new ElasticsearchSecurityException("State parameter cannot be empty")); } else { - prepareAuthenticationResponse(realms.get(0), request.getState(), listener); + prepareAuthenticationResponse(realms.get(0), request.getState(), request.getNonce(), listener); } } - private void prepareAuthenticationResponse(OpenIdConnectRealm realm, String state, + private void prepareAuthenticationResponse(OpenIdConnectRealm realm, String state, String nonce, ActionListener listener) { - //TODO: Generate the Authorization URL from the OP metadata and the configuration - final String authorizationEndpointURl = ""; - listener.onResponse(new OpenIdConnectPrepareAuthenticationResponse(authorizationEndpointURl, state)); + try { + final String authorizationEndpointURl = realm.buildAuthenticationRequestUri(state, nonce); + listener.onResponse(new OpenIdConnectPrepareAuthenticationResponse(authorizationEndpointURl, state)); + } catch (ElasticsearchException e) { + listener.onFailure(e); + } } } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/oidc/OPConfiguration.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/oidc/OPConfiguration.java new file mode 100644 index 0000000000000..4df245be21102 --- /dev/null +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/oidc/OPConfiguration.java @@ -0,0 +1,50 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.security.authc.oidc; + +import org.elasticsearch.common.Nullable; + +import java.util.Objects; + +/** + * A Class that contains all the OpenID Connect Provider configuration + */ +public class OPConfiguration { + private final String providerName; + private final String authorizationEndpoint; + private final String tokenEndpoint; + private final String userinfoEndpoint; + private final String issuer; + + public OPConfiguration(String providerName, String issuer, String authorizationEndpoint, @Nullable String tokenEndpoint, + @Nullable String userinfoEndpoint) { + this.providerName = Objects.requireNonNull(providerName, "OP Name must be provided"); + this.authorizationEndpoint = Objects.requireNonNull(authorizationEndpoint, "Authorization Endpoint must be provided"); + this.tokenEndpoint = tokenEndpoint; + this.userinfoEndpoint = userinfoEndpoint; + this.issuer = Objects.requireNonNull(issuer, "OP Issuer must be provided"); + } + + public String getProviderName() { + return providerName; + } + + public String getAuthorizationEndpoint() { + return authorizationEndpoint; + } + + public String getTokenEndpoint() { + return tokenEndpoint; + } + + public String getUserinfoEndpoint() { + return userinfoEndpoint; + } + + public String getIssuer() { + return issuer; + } +} diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/oidc/OpenIdConnectRealm.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/oidc/OpenIdConnectRealm.java index c53657c53d8f8..672aad9962320 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/oidc/OpenIdConnectRealm.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/oidc/OpenIdConnectRealm.java @@ -1,23 +1,52 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ package org.elasticsearch.xpack.security.authc.oidc; import org.apache.log4j.LogManager; import org.apache.log4j.Logger; +import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.ActionListener; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.lease.Releasable; +import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.xpack.core.security.authc.AuthenticationResult; import org.elasticsearch.xpack.core.security.authc.AuthenticationToken; import org.elasticsearch.xpack.core.security.authc.Realm; import org.elasticsearch.xpack.core.security.authc.RealmConfig; +import org.elasticsearch.xpack.core.security.authc.RealmSettings; import org.elasticsearch.xpack.core.security.user.User; +import java.io.UnsupportedEncodingException; +import java.net.URLEncoder; +import java.nio.charset.StandardCharsets; +import java.util.Collections; +import java.util.List; + +import static org.elasticsearch.xpack.core.security.authc.oidc.OpenIdConnectRealmSettings.OP_AUTHORIZATION_ENDPOINT; +import static org.elasticsearch.xpack.core.security.authc.oidc.OpenIdConnectRealmSettings.OP_ISSUER; +import static org.elasticsearch.xpack.core.security.authc.oidc.OpenIdConnectRealmSettings.OP_NAME; +import static org.elasticsearch.xpack.core.security.authc.oidc.OpenIdConnectRealmSettings.OP_TOKEN_ENDPOINT; +import static org.elasticsearch.xpack.core.security.authc.oidc.OpenIdConnectRealmSettings.OP_USERINFO_ENDPOINT; +import static org.elasticsearch.xpack.core.security.authc.oidc.OpenIdConnectRealmSettings.RP_CLIENT_ID; +import static org.elasticsearch.xpack.core.security.authc.oidc.OpenIdConnectRealmSettings.RP_REDIRECT_URI; +import static org.elasticsearch.xpack.core.security.authc.oidc.OpenIdConnectRealmSettings.RP_RESPONSE_TYPE; +import static org.elasticsearch.xpack.core.security.authc.oidc.OpenIdConnectRealmSettings.RP_REQUESTED_SCOPES; + public class OpenIdConnectRealm extends Realm implements Releasable { public static final String CONTEXT_TOKEN_DATA = "_oidc_tokendata"; private static final Logger logger = LogManager.getLogger(OpenIdConnectRealm.class); + private final OPConfiguration opConfiguration; + private final RPConfiguration rpConfiguration; public OpenIdConnectRealm(RealmConfig config) { super(config); + this.rpConfiguration = buildRPConfiguration(config); + this.opConfiguration = buildOPConfiguration(config); } @Override @@ -44,4 +73,61 @@ public void authenticate(AuthenticationToken token, ActionListener listener) { } + + private RPConfiguration buildRPConfiguration(RealmConfig config) { + String redirectUri = require(config, RP_REDIRECT_URI); + String clientId = require(config, RP_CLIENT_ID); + String responseType = require(config, RP_RESPONSE_TYPE); + List requestedScopes = config.hasSetting(RP_REQUESTED_SCOPES) ? + config.getSetting(RP_REQUESTED_SCOPES) : Collections.emptyList(); + + return new RPConfiguration(clientId, redirectUri, responseType, requestedScopes); + } + + private OPConfiguration buildOPConfiguration(RealmConfig config) { + String providerName = require(config, OP_NAME); + String authorizationEndpoint = require(config, OP_AUTHORIZATION_ENDPOINT); + String issuer = require(config, OP_ISSUER); + String tokenEndpoint = config.getSetting(OP_TOKEN_ENDPOINT, () -> null); + String userinfoEndpoint = config.getSetting(OP_USERINFO_ENDPOINT, () -> null); + + return new OPConfiguration(providerName, issuer, authorizationEndpoint, tokenEndpoint, userinfoEndpoint); + } + + static String require(RealmConfig config, Setting.AffixSetting setting) { + final String value = config.getSetting(setting); + if (value.isEmpty()) { + throw new IllegalArgumentException("The configuration setting [" + RealmSettings.getFullSettingKey(config, setting) + + "] is required"); + } + return value; + } + + /** + * Creates the URI for an OIDC Authentication Request from the realm configuration using URI Query String Serialization + * + * @param state The oAuth2 state parameter used for CSRF protection + * @return a URI at the OP where the user's browser should be redirected for authentication + */ + public String buildAuthenticationRequestUri(String state, String nonce) throws ElasticsearchException { + try { + StringBuilder builder = new StringBuilder(); + builder.append(opConfiguration.getAuthorizationEndpoint()); + addParameter(builder, "scope", Strings.collectionToDelimitedString(rpConfiguration.getRequestedScopes(), " ")); + addParameter(builder, "response_type", rpConfiguration.getResponseType()); + addParameter(builder, "client_id", rpConfiguration.getClientId()); + addParameter(builder, "redirect_uri", rpConfiguration.getRedirectUri()); + addParameter(builder, "state", state); + addParameter(builder, "nonce", nonce); + return builder.toString(); + } catch (UnsupportedEncodingException e) { + throw new ElasticsearchException("Cannot build OIDC Authentication Request", e); + } + } + + private StringBuilder addParameter(StringBuilder builder, String parameter, String value) throws UnsupportedEncodingException { + builder.append("&").append(parameter).append("="); + builder.append(URLEncoder.encode(value, StandardCharsets.UTF_8.name())); + return builder; + } } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/oidc/RPConfiguration.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/oidc/RPConfiguration.java new file mode 100644 index 0000000000000..20876ddb7bb6a --- /dev/null +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/oidc/RPConfiguration.java @@ -0,0 +1,56 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.security.authc.oidc; + +import org.elasticsearch.common.Nullable; +import org.elasticsearch.common.Strings; + +import java.util.Collections; +import java.util.List; +import java.util.Objects; + +/** + * A Class that contains all the OpenID Connect Relying Party configuration + */ +public class RPConfiguration { + private final String clientId; + private final String redirectUri; + private final String responseType; + private final List requestedScopes; + + public RPConfiguration(String clientId, String redirectUri, String responseType, @Nullable List requestedScopes) { + this.clientId = Objects.requireNonNull(clientId, "RP Client ID must be provided"); + this.redirectUri = Objects.requireNonNull(redirectUri, "RP Redirect URI must be provided"); + if (Strings.hasText(responseType) == false) { + throw new IllegalArgumentException("Response type must be provided"); + } else if (responseType.equals("code") == false && responseType.equals("implicit") == false) { + throw new IllegalArgumentException("Invalid response type provided. Only code or implicit are allowed"); + } else { + this.responseType = responseType; + } + if (null == requestedScopes || requestedScopes.isEmpty()) { + this.requestedScopes = Collections.singletonList("openid"); + } else { + this.requestedScopes = requestedScopes; + } + } + + public String getClientId() { + return clientId; + } + + public String getRedirectUri() { + return redirectUri; + } + + public String getResponseType() { + return responseType; + } + + public List getRequestedScopes() { + return requestedScopes; + } +} diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/oidc/RestOpenIdConnectPrepareAuthenticationAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/oidc/RestOpenIdConnectPrepareAuthenticationAction.java index 165699926c718..dae51a08ef204 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/oidc/RestOpenIdConnectPrepareAuthenticationAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/oidc/RestOpenIdConnectPrepareAuthenticationAction.java @@ -36,6 +36,8 @@ public class RestOpenIdConnectPrepareAuthenticationAction extends OpenIdConnectB static { PARSER.declareString(OpenIdConnectPrepareAuthenticationRequest::setRealmName, new ParseField("realm")); + PARSER.declareString(OpenIdConnectPrepareAuthenticationRequest::setState, new ParseField("state")); + PARSER.declareString(OpenIdConnectPrepareAuthenticationRequest::setNonce, new ParseField("nonce")); } public RestOpenIdConnectPrepareAuthenticationAction(Settings settings, RestController controller, XPackLicenseState licenseState) { diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/oidc/OpenIdConnectRealmTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/oidc/OpenIdConnectRealmTests.java new file mode 100644 index 0000000000000..3b00b360fe6bf --- /dev/null +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/oidc/OpenIdConnectRealmTests.java @@ -0,0 +1,127 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.security.authc.oidc; + + +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.util.concurrent.ThreadContext; +import org.elasticsearch.env.Environment; +import org.elasticsearch.env.TestEnvironment; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.core.security.authc.RealmConfig; +import org.elasticsearch.xpack.core.security.authc.oidc.OpenIdConnectRealmSettings; +import org.hamcrest.Matchers; +import org.junit.Before; + +import static org.elasticsearch.xpack.core.security.authc.RealmSettings.getFullSettingKey; + +public class OpenIdConnectRealmTests extends ESTestCase { + + private final static String REALM_NAME = "oidc1-realm"; + private static final String REALM_SETTINGS_PREFIX = "xpack.security.authc.realms.oidc." + REALM_NAME; + private Settings globalSettings; + private Environment env; + private ThreadContext threadContext; + + @Before + public void setupEnv() { + globalSettings = Settings.builder().put("path.home", createTempDir()).build(); + env = TestEnvironment.newEnvironment(globalSettings); + threadContext = new ThreadContext(globalSettings); + } + + public void testIncorrectResponseTypeThrowsError() { + final Settings.Builder settingsBuilder = Settings.builder() + .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.OP_AUTHORIZATION_ENDPOINT), "https://op.example.com/login") + .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.OP_ISSUER), "https://op.example.com") + .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.OP_NAME), "the op") + .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.RP_REDIRECT_URI), "https://rp.my.com") + .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.RP_CLIENT_ID), "rp-my") + .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.RP_RESPONSE_TYPE), "hybrid"); + IllegalArgumentException exception = expectThrows(IllegalArgumentException.class, () -> { + new OpenIdConnectRealm(buildConfig(settingsBuilder.build())); + }); + assertThat(exception.getMessage(), Matchers.containsString("Invalid response type provided")); + } + + public void testMissingAuthorizationEndpointThrowsError() { + final Settings.Builder settingsBuilder = Settings.builder() + .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.OP_ISSUER), "https://op.example.com") + .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.OP_NAME), "the op") + .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.RP_REDIRECT_URI), "https://rp.my.com") + .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.RP_CLIENT_ID), "rp-my") + .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.RP_RESPONSE_TYPE), "code"); + IllegalArgumentException exception = expectThrows(IllegalArgumentException.class, () -> { + new OpenIdConnectRealm(buildConfig(settingsBuilder.build())); + }); + assertThat(exception.getMessage(), + Matchers.containsString(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.OP_AUTHORIZATION_ENDPOINT))); + } + + public void testMissingIssuerThrowsError() { + final Settings.Builder settingsBuilder = Settings.builder() + .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.OP_AUTHORIZATION_ENDPOINT), "https://op.example.com/login") + .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.OP_NAME), "the op") + .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.RP_REDIRECT_URI), "https://rp.my.com") + .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.RP_CLIENT_ID), "rp-my") + .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.RP_RESPONSE_TYPE), "code"); + IllegalArgumentException exception = expectThrows(IllegalArgumentException.class, () -> { + new OpenIdConnectRealm(buildConfig(settingsBuilder.build())); + }); + assertThat(exception.getMessage(), + Matchers.containsString(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.OP_ISSUER))); + } + + public void testMissingNameTypeThrowsError() { + final Settings.Builder settingsBuilder = Settings.builder() + .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.OP_AUTHORIZATION_ENDPOINT), "https://op.example.com/login") + .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.OP_ISSUER), "https://op.example.com") + .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.RP_REDIRECT_URI), "https://rp.my.com") + .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.RP_CLIENT_ID), "rp-my") + .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.RP_RESPONSE_TYPE), "code"); + IllegalArgumentException exception = expectThrows(IllegalArgumentException.class, () -> { + new OpenIdConnectRealm(buildConfig(settingsBuilder.build())); + }); + assertThat(exception.getMessage(), + Matchers.containsString(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.OP_NAME))); + } + + public void testMissingRedirectUriThrowsError() { + final Settings.Builder settingsBuilder = Settings.builder() + .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.OP_AUTHORIZATION_ENDPOINT), "https://op.example.com/login") + .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.OP_ISSUER), "https://op.example.com") + .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.OP_NAME), "the op") + .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.RP_CLIENT_ID), "rp-my") + .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.RP_RESPONSE_TYPE), "code"); + IllegalArgumentException exception = expectThrows(IllegalArgumentException.class, () -> { + new OpenIdConnectRealm(buildConfig(settingsBuilder.build())); + }); + assertThat(exception.getMessage(), + Matchers.containsString(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.RP_REDIRECT_URI))); + } + + public void testMissingClientIdThrowsError() { + final Settings.Builder settingsBuilder = Settings.builder() + .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.OP_AUTHORIZATION_ENDPOINT), "https://op.example.com/login") + .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.OP_ISSUER), "https://op.example.com") + .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.OP_NAME), "the op") + .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.RP_REDIRECT_URI), "https://rp.my.com") + .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.RP_RESPONSE_TYPE), "code"); + IllegalArgumentException exception = expectThrows(IllegalArgumentException.class, () -> { + new OpenIdConnectRealm(buildConfig(settingsBuilder.build())); + }); + assertThat(exception.getMessage(), + Matchers.containsString(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.RP_CLIENT_ID))); + } + + private RealmConfig buildConfig(Settings realmSettings) { + final Settings settings = Settings.builder() + .put("path.home", createTempDir()) + .put(realmSettings).build(); + final Environment env = TestEnvironment.newEnvironment(settings); + return new RealmConfig(new RealmConfig.RealmIdentifier("oidc", REALM_NAME), settings, env, threadContext); + } +} From 72a2dd48fef3b8f629e5c23f4c68f95209750b1c Mon Sep 17 00:00:00 2001 From: Ioannis Kakavas Date: Thu, 27 Dec 2018 22:02:45 +0200 Subject: [PATCH 05/71] Add functionality for building Authentication Requests --- .../oidc/OpenIdConnectAuthenticateAction.java | 8 ++ .../OpenIdConnectAuthenticateRequest.java | 44 +++++++--- ...enIdConnectAuthenticateRequestBuilder.java | 36 ++++++++ ...IdConnectPrepareAuthenticationRequest.java | 19 ++-- ...ctPrepareAuthenticationRequestBuilder.java | 35 ++++++++ ...dConnectPrepareAuthenticationResponse.java | 10 ++- ...nsportOpenIdConnectAuthenticateAction.java | 16 ++-- ...nIdConnectPrepareAuthenticationAction.java | 14 +-- .../authc/oidc/OpenIdConnectRealm.java | 51 ++++++++--- .../authc/saml/OpenIdConnectToken.java | 32 +++++-- .../RestOpenIdConnectAuthenticateAction.java | 6 +- ...nectPrepareAuthenticationRequestTests.java | 48 ++++++++++ .../authc/oidc/OpenIdConnectRealmTests.java | 87 +++++++++++++++++-- 13 files changed, 347 insertions(+), 59 deletions(-) create mode 100644 x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/oidc/OpenIdConnectAuthenticateRequestBuilder.java create mode 100644 x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/oidc/OpenIdConnectPrepareAuthenticationRequestBuilder.java create mode 100644 x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/oidc/OpenIdConnectPrepareAuthenticationRequestTests.java diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/oidc/OpenIdConnectAuthenticateAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/oidc/OpenIdConnectAuthenticateAction.java index c81d502f09ab3..a0159d81a9220 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/oidc/OpenIdConnectAuthenticateAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/oidc/OpenIdConnectAuthenticateAction.java @@ -1,7 +1,15 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ package org.elasticsearch.xpack.core.security.action.oidc; import org.elasticsearch.action.Action; +/** + * Action for initiating an authentication process using OpenID Connect + */ public final class OpenIdConnectAuthenticateAction extends Action { public static final OpenIdConnectAuthenticateAction INSTANCE = new OpenIdConnectAuthenticateAction(); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/oidc/OpenIdConnectAuthenticateRequest.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/oidc/OpenIdConnectAuthenticateRequest.java index d6c6b37ad4bc6..7fcab31edc816 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/oidc/OpenIdConnectAuthenticateRequest.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/oidc/OpenIdConnectAuthenticateRequest.java @@ -1,29 +1,45 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ package org.elasticsearch.xpack.core.security.action.oidc; import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionRequestValidationException; /** - * Represents a request for authentication using an OAuth 2.0 Authorization Code + * Represents a request for authentication using OpenID Connect */ public class OpenIdConnectAuthenticateRequest extends ActionRequest { - /* - * OAuth 2.0 Authorization Code + /** + * The URI were the OP redirected the browser after the authentication attempt. This is passed as is from the + * facilitator entity (i.e. Kibana) */ - private String code; + private String redirectUri; - /* - * OAuth 2.0 state value. + /** + * The state value that either we or the facilitator generated for this specific flow and that was stored at the user's session with + * the facilitator */ private String state; - public String getCode() { - return code; + /** + * The nonce value that the facilitator generated for this specific flow and that was stored at the user's session with + * the facilitator + */ + private String nonce; + + public OpenIdConnectAuthenticateRequest() { + } + + public String getRedirectUri() { + return redirectUri; } - public void setCode(String code) { - this.code = code; + public void setRedirectUri(String redirectUri) { + this.redirectUri = redirectUri; } public String getState() { @@ -34,6 +50,14 @@ public void setState(String state) { this.state = state; } + public String getNonce() { + return nonce; + } + + public void setNonce(String nonce) { + this.nonce = nonce; + } + @Override public ActionRequestValidationException validate() { return null; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/oidc/OpenIdConnectAuthenticateRequestBuilder.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/oidc/OpenIdConnectAuthenticateRequestBuilder.java new file mode 100644 index 0000000000000..cbdd13aec0463 --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/oidc/OpenIdConnectAuthenticateRequestBuilder.java @@ -0,0 +1,36 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.core.security.action.oidc; + +import org.elasticsearch.action.ActionRequestBuilder; +import org.elasticsearch.client.ElasticsearchClient; + +/** + * Request builder for populating a {@link OpenIdConnectAuthenticateRequest} + */ +public class OpenIdConnectAuthenticateRequestBuilder + extends ActionRequestBuilder { + + public OpenIdConnectAuthenticateRequestBuilder(ElasticsearchClient client) { + super(client, OpenIdConnectAuthenticateAction.INSTANCE, new OpenIdConnectAuthenticateRequest()); + } + + public OpenIdConnectAuthenticateRequestBuilder redirectUri(String redirectUri) { + request.setRedirectUri(redirectUri); + return this; + } + + public OpenIdConnectAuthenticateRequestBuilder state(String state) { + request.setState(state); + return this; + } + + public OpenIdConnectAuthenticateRequestBuilder nonce(String nonce) { + request.setNonce(nonce); + return this; + } + +} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/oidc/OpenIdConnectPrepareAuthenticationRequest.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/oidc/OpenIdConnectPrepareAuthenticationRequest.java index 3c69e2d1a5308..a64d690784eca 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/oidc/OpenIdConnectPrepareAuthenticationRequest.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/oidc/OpenIdConnectPrepareAuthenticationRequest.java @@ -1,3 +1,8 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ package org.elasticsearch.xpack.core.security.action.oidc; import org.elasticsearch.action.ActionRequest; @@ -49,12 +54,6 @@ public ActionRequestValidationException validate() { if (Strings.hasText(realmName) == false) { validationException = addValidationError("realm name must be provided", null); } - if (Strings.hasText(state) == false) { - validationException = addValidationError("state must be provided", validationException); - } - if (Strings.hasText(nonce) == false) { - validationException = addValidationError("nonce must be provided", validationException); - } return validationException; } @@ -62,16 +61,16 @@ public ActionRequestValidationException validate() { public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); out.writeString(realmName); - out.writeString(state); - out.writeString(nonce); + out.writeOptionalString(state); + out.writeOptionalString(nonce); } @Override public void readFrom(StreamInput in) throws IOException { super.readFrom(in); realmName = in.readString(); - state = in.readString(); - nonce = in.readString(); + state = in.readOptionalString(); + nonce = in.readOptionalString(); } public String toString() { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/oidc/OpenIdConnectPrepareAuthenticationRequestBuilder.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/oidc/OpenIdConnectPrepareAuthenticationRequestBuilder.java new file mode 100644 index 0000000000000..1637d6f690bd4 --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/oidc/OpenIdConnectPrepareAuthenticationRequestBuilder.java @@ -0,0 +1,35 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.core.security.action.oidc; + +import org.elasticsearch.action.ActionRequestBuilder; +import org.elasticsearch.client.ElasticsearchClient; + +/** + * Request builder for populating a {@link OpenIdConnectPrepareAuthenticationRequest} + */ +public class OpenIdConnectPrepareAuthenticationRequestBuilder + extends ActionRequestBuilder { + + public OpenIdConnectPrepareAuthenticationRequestBuilder(ElasticsearchClient client) { + super(client, OpenIdConnectPrepareAuthenticationAction.INSTANCE, new OpenIdConnectPrepareAuthenticationRequest()); + } + + public OpenIdConnectPrepareAuthenticationRequestBuilder realmName(String name) { + request.setRealmName(name); + return this; + } + + public OpenIdConnectPrepareAuthenticationRequestBuilder state(String state) { + request.setState(state); + return this; + } + + public OpenIdConnectPrepareAuthenticationRequestBuilder nonce(String nonce) { + request.setNonce(nonce); + return this; + } +} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/oidc/OpenIdConnectPrepareAuthenticationResponse.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/oidc/OpenIdConnectPrepareAuthenticationResponse.java index 17c4472d6c0a5..ce64ab84a7211 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/oidc/OpenIdConnectPrepareAuthenticationResponse.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/oidc/OpenIdConnectPrepareAuthenticationResponse.java @@ -11,13 +11,17 @@ import java.io.IOException; - +/** + * A response containing the authorization endpoint URL and the appropriate request parameters as URL parameters + */ public class OpenIdConnectPrepareAuthenticationResponse extends ActionResponse { private String authorizationEndpointUrl; + private String state; public OpenIdConnectPrepareAuthenticationResponse(String authorizationEndpointUrl, String state) { this.authorizationEndpointUrl = authorizationEndpointUrl; + this.state = state; } public OpenIdConnectPrepareAuthenticationResponse() { @@ -27,6 +31,10 @@ public String getAuthorizationEndpointUrl() { return authorizationEndpointUrl; } + public String getState() { + return state; + } + @Override public void readFrom(StreamInput in) throws IOException { super.readFrom(in); diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/oidc/TransportOpenIdConnectAuthenticateAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/oidc/TransportOpenIdConnectAuthenticateAction.java index a399302914074..7ca870d9ac977 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/oidc/TransportOpenIdConnectAuthenticateAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/oidc/TransportOpenIdConnectAuthenticateAction.java @@ -1,3 +1,8 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ package org.elasticsearch.xpack.security.action.oidc; import org.elasticsearch.action.ActionListener; @@ -30,17 +35,18 @@ public class TransportOpenIdConnectAuthenticateAction extends HandledTransportAc @Inject public TransportOpenIdConnectAuthenticateAction(ThreadPool threadPool, TransportService transportService, - ActionFilters actionFilters, AuthenticationService authenticationService, - TokenService tokenService) { + ActionFilters actionFilters, AuthenticationService authenticationService, + TokenService tokenService) { super(OpenIdConnectAuthenticateAction.NAME, transportService, actionFilters, OpenIdConnectAuthenticateRequest::new); this.threadPool = threadPool; this.authenticationService = authenticationService; this.tokenService = tokenService; } - @Override - protected void doExecute(Task task, OpenIdConnectAuthenticateRequest request, ActionListener listener) { - final OpenIdConnectToken token = new OpenIdConnectToken(request.getCode()); + @Override + protected void doExecute(Task task, OpenIdConnectAuthenticateRequest request, + ActionListener listener) { + final OpenIdConnectToken token = new OpenIdConnectToken(request.getRedirectUri(), request.getState(), request.getNonce()); final ThreadContext threadContext = threadPool.getThreadContext(); Authentication originatingAuthentication = Authentication.getAuthentication(threadContext); try (ThreadContext.StoredContext ignore = threadContext.stashContext()) { diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/oidc/TransportOpenIdConnectPrepareAuthenticationAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/oidc/TransportOpenIdConnectPrepareAuthenticationAction.java index 6d23e48a7bc08..1bc8c678aeb82 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/oidc/TransportOpenIdConnectPrepareAuthenticationAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/oidc/TransportOpenIdConnectPrepareAuthenticationAction.java @@ -10,7 +10,8 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.HandledTransportAction; -import org.elasticsearch.common.Strings; +import org.elasticsearch.common.Nullable; +import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.tasks.Task; import org.elasticsearch.transport.TransportService; @@ -31,7 +32,8 @@ public class TransportOpenIdConnectPrepareAuthenticationAction extends HandledTr @Inject public TransportOpenIdConnectPrepareAuthenticationAction(TransportService transportService, ActionFilters actionFilters, Realms realms) { - super(OpenIdConnectPrepareAuthenticationAction.NAME, transportService, actionFilters, OpenIdConnectPrepareAuthenticationRequest::new); + super(OpenIdConnectPrepareAuthenticationAction.NAME, transportService, actionFilters, + OpenIdConnectPrepareAuthenticationRequest::new); this.realms = realms; } @@ -49,18 +51,16 @@ protected void doExecute(Task task, OpenIdConnectPrepareAuthenticationRequest re // Can't define multiple realms with the same name in configuration, but check, still. listener.onFailure(new ElasticsearchSecurityException("Found multiple ([{}]) OIDC realms with name [{}]", realms.size(), request.getRealmName())); - } else if (Strings.isNullOrEmpty(request.getState())) { - listener.onFailure(new ElasticsearchSecurityException("State parameter cannot be empty")); } else { prepareAuthenticationResponse(realms.get(0), request.getState(), request.getNonce(), listener); } } - private void prepareAuthenticationResponse(OpenIdConnectRealm realm, String state, String nonce, + private void prepareAuthenticationResponse(OpenIdConnectRealm realm, @Nullable String state, @Nullable String nonce, ActionListener listener) { try { - final String authorizationEndpointURl = realm.buildAuthenticationRequestUri(state, nonce); - listener.onResponse(new OpenIdConnectPrepareAuthenticationResponse(authorizationEndpointURl, state)); + final Tuple authenticationRequest = realm.buildAuthenticationRequest(state, nonce); + listener.onResponse(new OpenIdConnectPrepareAuthenticationResponse(authenticationRequest.v1(), authenticationRequest.v2())); } catch (ElasticsearchException e) { listener.onFailure(e); } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/oidc/OpenIdConnectRealm.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/oidc/OpenIdConnectRealm.java index 672aad9962320..f5c728c405c2e 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/oidc/OpenIdConnectRealm.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/oidc/OpenIdConnectRealm.java @@ -9,9 +9,13 @@ import org.apache.log4j.Logger; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.ActionListener; +import org.elasticsearch.common.Nullable; import org.elasticsearch.common.Strings; +import org.elasticsearch.common.collect.Tuple; +import org.elasticsearch.common.hash.MessageDigests; import org.elasticsearch.common.lease.Releasable; import org.elasticsearch.common.settings.Setting; +import org.elasticsearch.common.settings.SettingsException; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.xpack.core.security.authc.AuthenticationResult; import org.elasticsearch.xpack.core.security.authc.AuthenticationToken; @@ -23,6 +27,7 @@ import java.io.UnsupportedEncodingException; import java.net.URLEncoder; import java.nio.charset.StandardCharsets; +import java.security.SecureRandom; import java.util.Collections; import java.util.List; @@ -39,6 +44,7 @@ public class OpenIdConnectRealm extends Realm implements Releasable { public static final String CONTEXT_TOKEN_DATA = "_oidc_tokendata"; + private static final SecureRandom RANDOM_INSTANCE = new SecureRandom(); private static final Logger logger = LogManager.getLogger(OpenIdConnectRealm.class); private final OPConfiguration opConfiguration; private final RPConfiguration rpConfiguration; @@ -97,7 +103,7 @@ private OPConfiguration buildOPConfiguration(RealmConfig config) { static String require(RealmConfig config, Setting.AffixSetting setting) { final String value = config.getSetting(setting); if (value.isEmpty()) { - throw new IllegalArgumentException("The configuration setting [" + RealmSettings.getFullSettingKey(config, setting) + throw new SettingsException("The configuration setting [" + RealmSettings.getFullSettingKey(config, setting) + "] is required"); } return value; @@ -106,28 +112,51 @@ static String require(RealmConfig config, Setting.AffixSetting setting) /** * Creates the URI for an OIDC Authentication Request from the realm configuration using URI Query String Serialization * - * @param state The oAuth2 state parameter used for CSRF protection - * @return a URI at the OP where the user's browser should be redirected for authentication + * @param state The oAuth2 state parameter used for CSRF protection. If the facilitator doesn't supply one, we generate one ourselves + * @param nonce String value used to associate a Client session with an ID Token, and to mitigate replay attacks. If the facilitator + * doesn't supply one, we don't set one for the authentication request + * @return a Tuple of Strings with the URI at the OP where the user's browser should be redirected for authentication and the state */ - public String buildAuthenticationRequestUri(String state, String nonce) throws ElasticsearchException { + public Tuple buildAuthenticationRequest(@Nullable String state, @Nullable String nonce) throws ElasticsearchException { try { + if (Strings.hasText(state) == false) { + state = createNonce(); + } StringBuilder builder = new StringBuilder(); builder.append(opConfiguration.getAuthorizationEndpoint()); + addParameter(builder, "response_type", rpConfiguration.getResponseType(), true); addParameter(builder, "scope", Strings.collectionToDelimitedString(rpConfiguration.getRequestedScopes(), " ")); - addParameter(builder, "response_type", rpConfiguration.getResponseType()); addParameter(builder, "client_id", rpConfiguration.getClientId()); - addParameter(builder, "redirect_uri", rpConfiguration.getRedirectUri()); addParameter(builder, "state", state); - addParameter(builder, "nonce", nonce); - return builder.toString(); + if (Strings.hasText(nonce)) { + addParameter(builder, "nonce", nonce); + } + addParameter(builder, "redirect_uri", rpConfiguration.getRedirectUri()); + return new Tuple<>(builder.toString(), state); } catch (UnsupportedEncodingException e) { throw new ElasticsearchException("Cannot build OIDC Authentication Request", e); } } - private StringBuilder addParameter(StringBuilder builder, String parameter, String value) throws UnsupportedEncodingException { - builder.append("&").append(parameter).append("="); + private void addParameter(StringBuilder builder, String parameter, String value, boolean isFirstParameter) + throws UnsupportedEncodingException { + char prefix = isFirstParameter ? '?' : '&'; + builder.append(prefix).append(parameter).append("="); builder.append(URLEncoder.encode(value, StandardCharsets.UTF_8.name())); - return builder; + } + + private void addParameter(StringBuilder builder, String parameter, String value) throws UnsupportedEncodingException { + addParameter(builder, parameter, value, false); + } + + /** + * Creates a cryptographically secure alphanumeric string to be used as a nonce + * + * @return an alphanumeric string + */ + static String createNonce() { + final byte[] randomBytes = new byte[16]; + RANDOM_INSTANCE.nextBytes(randomBytes); + return MessageDigests.toHexString(randomBytes); } } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/saml/OpenIdConnectToken.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/saml/OpenIdConnectToken.java index 9a6246246e94e..0932077edd4cd 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/saml/OpenIdConnectToken.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/saml/OpenIdConnectToken.java @@ -15,25 +15,45 @@ */ public class OpenIdConnectToken implements AuthenticationToken { + private String redirectUri; + private String state; @Nullable - private String code; + private String nonce; - public OpenIdConnectToken(String code) { - this.code = code; + /** + * @param redirectUri The URI were the OP redirected the browser after the authentication attempt. This is passed as is from the + * facilitator entity (i.e. Kibana), so it is URL Encoded + * @param state The state value that either we or the facilitator generated for this specific flow and that was stored + * at the user's session with the facilitator + * @param nonce The nonce value that the facilitator generated for this specific flow and that was stored at the user's + * session with the facilitator + */ + public OpenIdConnectToken(String redirectUri, String state, String nonce) { + this.redirectUri = redirectUri; + this.state = state; + this.nonce = nonce; } @Override public String principal() { - return ""; + return ""; } @Override public Object credentials() { - return code; + return redirectUri; } @Override public void clearCredentials() { - this.code = null; + this.redirectUri = null; + } + + public String getState() { + return state; + } + + public String getNonce() { + return nonce; } } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/oidc/RestOpenIdConnectAuthenticateAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/oidc/RestOpenIdConnectAuthenticateAction.java index d78c8e83e1cda..b7ad9f935d3cd 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/oidc/RestOpenIdConnectAuthenticateAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/oidc/RestOpenIdConnectAuthenticateAction.java @@ -35,8 +35,9 @@ public class RestOpenIdConnectAuthenticateAction extends OpenIdConnectBaseRestHa OpenIdConnectAuthenticateRequest::new); static { - PARSER.declareString(OpenIdConnectAuthenticateRequest::setCode, new ParseField("code")); + PARSER.declareString(OpenIdConnectAuthenticateRequest::setRedirectUri, new ParseField("redirect_uri")); PARSER.declareString(OpenIdConnectAuthenticateRequest::setState, new ParseField("state")); + PARSER.declareString(OpenIdConnectAuthenticateRequest::setNonce, new ParseField("nonce")); } public RestOpenIdConnectAuthenticateAction(Settings settings, RestController controller, XPackLicenseState licenseState) { @@ -51,7 +52,8 @@ protected RestChannelConsumer innerPrepareRequest(RestRequest request, NodeClien return channel -> client.execute(OpenIdConnectAuthenticateAction.INSTANCE, authenticateRequest, new RestBuilderListener(channel) { @Override - public RestResponse buildResponse(OpenIdConnectAuthenticateResponse response, XContentBuilder builder) throws Exception { + public RestResponse buildResponse(OpenIdConnectAuthenticateResponse response, XContentBuilder builder) + throws Exception { builder.startObject(); builder.startObject() .field("username", response.getPrincipal()) diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/oidc/OpenIdConnectPrepareAuthenticationRequestTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/oidc/OpenIdConnectPrepareAuthenticationRequestTests.java new file mode 100644 index 0000000000000..3080cbcbeae16 --- /dev/null +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/oidc/OpenIdConnectPrepareAuthenticationRequestTests.java @@ -0,0 +1,48 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.security.action.oidc; + +import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.common.io.stream.BytesStreamOutput; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.core.security.action.oidc.OpenIdConnectPrepareAuthenticationRequest; + +import java.io.IOException; + +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.equalTo; + +public class OpenIdConnectPrepareAuthenticationRequestTests extends ESTestCase { + + public void testSerialization() throws IOException { + final OpenIdConnectPrepareAuthenticationRequest request = new OpenIdConnectPrepareAuthenticationRequest(); + final String nonce = randomBoolean() ? null : randomAlphaOfLengthBetween(8, 12); + final String state = randomBoolean() ? null : randomAlphaOfLengthBetween(8, 12); + request.setState(state); + request.setNonce(nonce); + request.setRealmName("oidc-realm1"); + final BytesStreamOutput out = new BytesStreamOutput(); + request.writeTo(out); + + final OpenIdConnectPrepareAuthenticationRequest unserialized = new OpenIdConnectPrepareAuthenticationRequest(); + unserialized.readFrom(out.bytes().streamInput()); + assertThat(unserialized.getRealmName(), equalTo("oidc-realm1")); + assertThat(unserialized.getState(), equalTo(state)); + assertThat(unserialized.getNonce(), equalTo(nonce)); + } + + public void testValidation() { + final String nonce = randomBoolean() ? null : randomAlphaOfLengthBetween(8, 12); + final String state = randomBoolean() ? null : randomAlphaOfLengthBetween(8, 12); + final OpenIdConnectPrepareAuthenticationRequest request = new OpenIdConnectPrepareAuthenticationRequest(); + request.setState(state); + request.setNonce(nonce); + final ActionRequestValidationException validation = request.validate(); + assertNotNull(validation); + assertThat(validation.validationErrors().size(), equalTo(1)); + assertThat(validation.validationErrors().get(0), containsString("realm name must be provided")); + } +} diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/oidc/OpenIdConnectRealmTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/oidc/OpenIdConnectRealmTests.java index 3b00b360fe6bf..593904fd3e823 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/oidc/OpenIdConnectRealmTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/oidc/OpenIdConnectRealmTests.java @@ -6,7 +6,9 @@ package org.elasticsearch.xpack.security.authc.oidc; +import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.settings.SettingsException; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.env.Environment; import org.elasticsearch.env.TestEnvironment; @@ -16,12 +18,14 @@ import org.hamcrest.Matchers; import org.junit.Before; +import java.util.Arrays; + import static org.elasticsearch.xpack.core.security.authc.RealmSettings.getFullSettingKey; +import static org.hamcrest.Matchers.equalTo; public class OpenIdConnectRealmTests extends ESTestCase { - private final static String REALM_NAME = "oidc1-realm"; - private static final String REALM_SETTINGS_PREFIX = "xpack.security.authc.realms.oidc." + REALM_NAME; + private static final String REALM_NAME = "oidc1-realm"; private Settings globalSettings; private Environment env; private ThreadContext threadContext; @@ -54,7 +58,7 @@ public void testMissingAuthorizationEndpointThrowsError() { .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.RP_REDIRECT_URI), "https://rp.my.com") .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.RP_CLIENT_ID), "rp-my") .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.RP_RESPONSE_TYPE), "code"); - IllegalArgumentException exception = expectThrows(IllegalArgumentException.class, () -> { + SettingsException exception = expectThrows(SettingsException.class, () -> { new OpenIdConnectRealm(buildConfig(settingsBuilder.build())); }); assertThat(exception.getMessage(), @@ -68,7 +72,7 @@ public void testMissingIssuerThrowsError() { .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.RP_REDIRECT_URI), "https://rp.my.com") .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.RP_CLIENT_ID), "rp-my") .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.RP_RESPONSE_TYPE), "code"); - IllegalArgumentException exception = expectThrows(IllegalArgumentException.class, () -> { + SettingsException exception = expectThrows(SettingsException.class, () -> { new OpenIdConnectRealm(buildConfig(settingsBuilder.build())); }); assertThat(exception.getMessage(), @@ -82,7 +86,7 @@ public void testMissingNameTypeThrowsError() { .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.RP_REDIRECT_URI), "https://rp.my.com") .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.RP_CLIENT_ID), "rp-my") .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.RP_RESPONSE_TYPE), "code"); - IllegalArgumentException exception = expectThrows(IllegalArgumentException.class, () -> { + SettingsException exception = expectThrows(SettingsException.class, () -> { new OpenIdConnectRealm(buildConfig(settingsBuilder.build())); }); assertThat(exception.getMessage(), @@ -96,7 +100,7 @@ public void testMissingRedirectUriThrowsError() { .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.OP_NAME), "the op") .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.RP_CLIENT_ID), "rp-my") .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.RP_RESPONSE_TYPE), "code"); - IllegalArgumentException exception = expectThrows(IllegalArgumentException.class, () -> { + SettingsException exception = expectThrows(SettingsException.class, () -> { new OpenIdConnectRealm(buildConfig(settingsBuilder.build())); }); assertThat(exception.getMessage(), @@ -110,13 +114,82 @@ public void testMissingClientIdThrowsError() { .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.OP_NAME), "the op") .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.RP_REDIRECT_URI), "https://rp.my.com") .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.RP_RESPONSE_TYPE), "code"); - IllegalArgumentException exception = expectThrows(IllegalArgumentException.class, () -> { + SettingsException exception = expectThrows(SettingsException.class, () -> { new OpenIdConnectRealm(buildConfig(settingsBuilder.build())); }); assertThat(exception.getMessage(), Matchers.containsString(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.RP_CLIENT_ID))); } + public void testBuilidingAuthenticationRequest() { + final Settings.Builder settingsBuilder = Settings.builder() + .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.OP_AUTHORIZATION_ENDPOINT), "https://op.example.com/login") + .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.OP_ISSUER), "https://op.example.com") + .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.OP_NAME), "the op") + .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.RP_REDIRECT_URI), "https://rp.my.com/cb") + .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.RP_CLIENT_ID), "rp-my") + .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.RP_RESPONSE_TYPE), "code") + .putList(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.RP_REQUESTED_SCOPES), + Arrays.asList("openid", "scope1", "scope2")); + final OpenIdConnectRealm realm = new OpenIdConnectRealm(buildConfig(settingsBuilder.build())); + final String nonce = randomAlphaOfLength(12); + final String state = randomAlphaOfLength(12); + final Tuple authenticationRequest = realm.buildAuthenticationRequest(state, nonce); + assertThat(authenticationRequest.v1(), equalTo("https://op.example.com/login?response_type=code&scope=openid+scope1+scope2" + + "&client_id=rp-my&state=" + state + "&nonce=" + nonce + "&redirect_uri=https%3A%2F%2Frp.my.com%2Fcb")); + assertThat(authenticationRequest.v2(), equalTo(state)); + } + + public void testBuilidingAuthenticationRequestWithoutState() { + final Settings.Builder settingsBuilder = Settings.builder() + .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.OP_AUTHORIZATION_ENDPOINT), "https://op.example.com/login") + .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.OP_ISSUER), "https://op.example.com") + .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.OP_NAME), "the op") + .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.RP_REDIRECT_URI), "https://rp.my.com/cb") + .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.RP_CLIENT_ID), "rp-my") + .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.RP_RESPONSE_TYPE), "code") + .putList(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.RP_REQUESTED_SCOPES), + Arrays.asList("openid", "scope1", "scope2")); + final OpenIdConnectRealm realm = new OpenIdConnectRealm(buildConfig(settingsBuilder.build())); + final String nonce = randomAlphaOfLength(12); + final Tuple authenticationRequest = realm.buildAuthenticationRequest(null, nonce); + final String generatedState = authenticationRequest.v2(); + assertThat(authenticationRequest.v1(), equalTo("https://op.example.com/login?response_type=code&scope=openid+scope1+scope2" + + "&client_id=rp-my&state=" + generatedState + "&nonce=" + nonce + "&redirect_uri=https%3A%2F%2Frp.my.com%2Fcb")); + } + + public void testBuilidingAuthenticationRequestWithoutStateAndNonce() { + final Settings.Builder settingsBuilder = Settings.builder() + .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.OP_AUTHORIZATION_ENDPOINT), "https://op.example.com/login") + .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.OP_ISSUER), "https://op.example.com") + .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.OP_NAME), "the op") + .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.RP_REDIRECT_URI), "https://rp.my.com/cb") + .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.RP_CLIENT_ID), "rp-my") + .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.RP_RESPONSE_TYPE), "code") + .putList(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.RP_REQUESTED_SCOPES), + Arrays.asList("openid", "scope1", "scope2")); + final OpenIdConnectRealm realm = new OpenIdConnectRealm(buildConfig(settingsBuilder.build())); + final Tuple authenticationRequest = realm.buildAuthenticationRequest(null, null); + final String generatedState = authenticationRequest.v2(); + assertThat(authenticationRequest.v1(), equalTo("https://op.example.com/login?response_type=code&scope=openid+scope1+scope2" + + "&client_id=rp-my&state=" + generatedState + "&redirect_uri=https%3A%2F%2Frp.my.com%2Fcb")); + } + + public void testBuilidingAuthenticationRequestWithDefaultScope() { + final Settings.Builder settingsBuilder = Settings.builder() + .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.OP_AUTHORIZATION_ENDPOINT), "https://op.example.com/login") + .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.OP_ISSUER), "https://op.example.com") + .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.OP_NAME), "the op") + .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.RP_REDIRECT_URI), "https://rp.my.com/cb") + .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.RP_CLIENT_ID), "rp-my") + .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.RP_RESPONSE_TYPE), "code"); + final OpenIdConnectRealm realm = new OpenIdConnectRealm(buildConfig(settingsBuilder.build())); + final Tuple authenticationRequest = realm.buildAuthenticationRequest(null, null); + final String generatedState = authenticationRequest.v2(); + assertThat(authenticationRequest.v1(), equalTo("https://op.example.com/login?response_type=code&scope=openid" + + "&client_id=rp-my&state=" + generatedState + "&redirect_uri=https%3A%2F%2Frp.my.com%2Fcb")); + } + private RealmConfig buildConfig(Settings realmSettings) { final Settings settings = Settings.builder() .put("path.home", createTempDir()) From 69fb95c945bb64b8dc7bf60f44cc730b57f7f518 Mon Sep 17 00:00:00 2001 From: Ioannis Kakavas Date: Thu, 27 Dec 2018 22:28:09 +0200 Subject: [PATCH 06/71] Fix formatting --- .../java/org/elasticsearch/xpack/security/Security.java | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java index 3ecb6e5585640..c408211fd4a7e 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java @@ -724,9 +724,9 @@ public void onIndexModule(IndexModule module) { new ActionHandler<>(SamlAuthenticateAction.INSTANCE, TransportSamlAuthenticateAction.class), new ActionHandler<>(SamlLogoutAction.INSTANCE, TransportSamlLogoutAction.class), new ActionHandler<>(SamlInvalidateSessionAction.INSTANCE, TransportSamlInvalidateSessionAction.class), - new ActionHandler<>(OpenIdConnectPrepareAuthenticationAction.INSTANCE, - TransportOpenIdConnectPrepareAuthenticationAction.class), - new ActionHandler<>(OpenIdConnectAuthenticateAction.INSTANCE, TransportOpenIdConnectAuthenticateAction.class), + new ActionHandler<>(OpenIdConnectPrepareAuthenticationAction.INSTANCE, + TransportOpenIdConnectPrepareAuthenticationAction.class), + new ActionHandler<>(OpenIdConnectAuthenticateAction.INSTANCE, TransportOpenIdConnectAuthenticateAction.class), new ActionHandler<>(GetPrivilegesAction.INSTANCE, TransportGetPrivilegesAction.class), new ActionHandler<>(PutPrivilegesAction.INSTANCE, TransportPutPrivilegesAction.class), new ActionHandler<>(DeletePrivilegesAction.INSTANCE, TransportDeletePrivilegesAction.class) From 12dd9c0be349a4683c74a5c7e13915bee3330071 Mon Sep 17 00:00:00 2001 From: Ioannis Kakavas Date: Thu, 27 Dec 2018 22:43:36 +0200 Subject: [PATCH 07/71] Fix formatting --- .../xpack/core/security/authz/privilege/ClusterPrivilege.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/privilege/ClusterPrivilege.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/privilege/ClusterPrivilege.java index 52c79a6050c2d..76e621bf5b6a2 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/privilege/ClusterPrivilege.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/privilege/ClusterPrivilege.java @@ -71,7 +71,7 @@ public final class ClusterPrivilege extends Privilege { public static final ClusterPrivilege TRANSPORT_CLIENT = new ClusterPrivilege("transport_client", TRANSPORT_CLIENT_AUTOMATON); public static final ClusterPrivilege MANAGE_SECURITY = new ClusterPrivilege("manage_security", MANAGE_SECURITY_AUTOMATON); public static final ClusterPrivilege MANAGE_SAML = new ClusterPrivilege("manage_saml", MANAGE_SAML_AUTOMATON); - public static final ClusterPrivilege MANAGE_OIDC = new ClusterPrivilege("manage_oidc", MANAGE_OIDC_AUTOMATON); + public static final ClusterPrivilege MANAGE_OIDC = new ClusterPrivilege("manage_oidc", MANAGE_OIDC_AUTOMATON); public static final ClusterPrivilege MANAGE_PIPELINE = new ClusterPrivilege("manage_pipeline", "cluster:admin/ingest/pipeline/*"); public static final ClusterPrivilege MANAGE_CCR = new ClusterPrivilege("manage_ccr", MANAGE_CCR_AUTOMATON); public static final ClusterPrivilege READ_CCR = new ClusterPrivilege("read_ccr", READ_CCR_AUTOMATON); @@ -96,7 +96,7 @@ public final class ClusterPrivilege extends Privilege { .put("transport_client", TRANSPORT_CLIENT) .put("manage_security", MANAGE_SECURITY) .put("manage_saml", MANAGE_SAML) - .put("manage_oidc", MANAGE_OIDC) + .put("manage_oidc", MANAGE_OIDC) .put("manage_pipeline", MANAGE_PIPELINE) .put("manage_rollup", MANAGE_ROLLUP) .put("manage_ccr", MANAGE_CCR) From 6a4cac6b3a01a0bb1cbdcecfbdf6537feea47c2b Mon Sep 17 00:00:00 2001 From: Ioannis Kakavas Date: Fri, 28 Dec 2018 01:04:14 +0200 Subject: [PATCH 08/71] Fix settings --- .../OpenIdConnectAuthenticateRequest.java | 4 +++ ...dConnectPrepareAuthenticationResponse.java | 16 +++++---- .../authc/InternalRealmsSettings.java | 2 ++ .../core/security/authc/RealmSettings.java | 13 +++++++ .../oidc/OpenIdConnectRealmSettings.java | 16 +++++++-- ...nsportOpenIdConnectAuthenticateAction.java | 4 ++- ...nIdConnectPrepareAuthenticationAction.java | 5 ++- .../xpack/security/authc/InternalRealms.java | 3 ++ .../authc/oidc/OpenIdConnectRealm.java | 16 +++++---- .../{saml => oidc}/OpenIdConnectToken.java | 6 +++- .../RestOpenIdConnectAuthenticateAction.java | 1 + ...nIdConnectPrepareAuthenticationAction.java | 9 +++-- .../security/authc/InternalRealmsTests.java | 3 +- .../authc/oidc/OpenIdConnectRealmTests.java | 35 ++++++++++--------- 14 files changed, 93 insertions(+), 40 deletions(-) rename x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/{saml => oidc}/OpenIdConnectToken.java (90%) diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/oidc/OpenIdConnectAuthenticateRequest.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/oidc/OpenIdConnectAuthenticateRequest.java index 7fcab31edc816..745945b0a4ca4 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/oidc/OpenIdConnectAuthenticateRequest.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/oidc/OpenIdConnectAuthenticateRequest.java @@ -62,5 +62,9 @@ public void setNonce(String nonce) { public ActionRequestValidationException validate() { return null; } + + public String toString() { + return "{redirectUri=" + redirectUri + ", state=" + state + ", nonce=" + nonce + "}"; + } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/oidc/OpenIdConnectPrepareAuthenticationResponse.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/oidc/OpenIdConnectPrepareAuthenticationResponse.java index ce64ab84a7211..ce54be4a5f5c9 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/oidc/OpenIdConnectPrepareAuthenticationResponse.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/oidc/OpenIdConnectPrepareAuthenticationResponse.java @@ -16,19 +16,19 @@ */ public class OpenIdConnectPrepareAuthenticationResponse extends ActionResponse { - private String authorizationEndpointUrl; + private String authenticationRequestUrl; private String state; public OpenIdConnectPrepareAuthenticationResponse(String authorizationEndpointUrl, String state) { - this.authorizationEndpointUrl = authorizationEndpointUrl; + this.authenticationRequestUrl = authorizationEndpointUrl; this.state = state; } public OpenIdConnectPrepareAuthenticationResponse() { } - public String getAuthorizationEndpointUrl() { - return authorizationEndpointUrl; + public String getAuthenticationRequestUrl() { + return authenticationRequestUrl; } public String getState() { @@ -38,12 +38,16 @@ public String getState() { @Override public void readFrom(StreamInput in) throws IOException { super.readFrom(in); - authorizationEndpointUrl = in.readString(); + authenticationRequestUrl = in.readString(); } @Override public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); - out.writeString(authorizationEndpointUrl); + out.writeString(authenticationRequestUrl); + } + + public String toString() { + return "{authenticationRequestUrl=" + authenticationRequestUrl + ", state=" + state + "}"; } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/InternalRealmsSettings.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/InternalRealmsSettings.java index 8b2ef18406830..dd4a843345298 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/InternalRealmsSettings.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/InternalRealmsSettings.java @@ -10,6 +10,7 @@ import org.elasticsearch.xpack.core.security.authc.file.FileRealmSettings; import org.elasticsearch.xpack.core.security.authc.kerberos.KerberosRealmSettings; import org.elasticsearch.xpack.core.security.authc.ldap.LdapRealmSettings; +import org.elasticsearch.xpack.core.security.authc.oidc.OpenIdConnectRealmSettings; import org.elasticsearch.xpack.core.security.authc.pki.PkiRealmSettings; import org.elasticsearch.xpack.core.security.authc.saml.SamlRealmSettings; @@ -34,6 +35,7 @@ public static Set> getSettings() { set.addAll(PkiRealmSettings.getSettings()); set.addAll(SamlRealmSettings.getSettings()); set.addAll(KerberosRealmSettings.getSettings()); + set.addAll(OpenIdConnectRealmSettings.getSettings()); return Collections.unmodifiableSet(set); } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/RealmSettings.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/RealmSettings.java index 913fcba3d33c8..d66a5e2ce1f21 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/RealmSettings.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/RealmSettings.java @@ -6,6 +6,8 @@ package org.elasticsearch.xpack.core.security.authc; import org.elasticsearch.common.collect.Tuple; +import org.elasticsearch.common.settings.SecureSetting; +import org.elasticsearch.common.settings.SecureString; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; @@ -55,6 +57,17 @@ public static Setting.AffixSetting simpleString(String realmType, String return Setting.affixKeySetting(realmSettingPrefix(realmType), suffix, key -> Setting.simpleString(key, properties)); } + /** + * Create a {@link SecureSetting#secureString secure string} {@link Setting} object for a realm of + * with the provided type and setting suffix. + * + * @param realmType The type of the realm, used within the setting prefix + * @param suffix The suffix of the setting (everything following the realm name in the affix setting) + */ + public static Setting.AffixSetting secureString(String realmType, String suffix) { + return Setting.affixKeySetting(realmSettingPrefix(realmType), suffix, key -> SecureSetting.secureString(key, null)); + } + /** * Create a {@link Function} that acts as a factory an {@link org.elasticsearch.common.settings.Setting.AffixSetting}. * The {@code Function} takes the realm-type as an argument. diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/oidc/OpenIdConnectRealmSettings.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/oidc/OpenIdConnectRealmSettings.java index 0f3f400bd075f..5d51d23c3c69a 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/oidc/OpenIdConnectRealmSettings.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/oidc/OpenIdConnectRealmSettings.java @@ -5,13 +5,15 @@ */ package org.elasticsearch.xpack.core.security.authc.oidc; -import org.elasticsearch.common.settings.SecureSetting; import org.elasticsearch.common.settings.SecureString; import org.elasticsearch.common.settings.Setting; +import org.elasticsearch.common.util.set.Sets; import org.elasticsearch.xpack.core.security.authc.RealmSettings; +import org.elasticsearch.xpack.core.security.authc.support.DelegatedAuthorizationSettings; import java.util.Collections; import java.util.List; +import java.util.Set; import java.util.function.Function; @@ -26,7 +28,8 @@ private OpenIdConnectRealmSettings() { = RealmSettings.simpleString(TYPE, "op.name", Setting.Property.NodeScope); public static final Setting.AffixSetting RP_CLIENT_ID = RealmSettings.simpleString(TYPE, "rp.client_id", Setting.Property.NodeScope); - public static final Setting RP_CLIENT_SECRET = SecureSetting.secureString("rp.client_secret", null); + public static final Setting.AffixSetting RP_CLIENT_SECRET + = RealmSettings.secureString(TYPE, "rp.client_secret"); public static final Setting.AffixSetting RP_REDIRECT_URI = RealmSettings.simpleString(TYPE, "rp.redirect_uri", Setting.Property.NodeScope); public static final Setting.AffixSetting RP_RESPONSE_TYPE @@ -42,4 +45,13 @@ private OpenIdConnectRealmSettings() { public static final Setting.AffixSetting> RP_REQUESTED_SCOPES = Setting.affixKeySetting( RealmSettings.realmSettingPrefix(TYPE), "rp.requested_scopes", key -> Setting.listSetting(key, Collections.singletonList("openid"), Function.identity(), Setting.Property.NodeScope)); + + public static Set> getSettings() { + final Set> set = Sets.newHashSet( + OP_NAME, RP_CLIENT_ID, RP_REDIRECT_URI, RP_RESPONSE_TYPE, RP_REQUESTED_SCOPES, RP_CLIENT_SECRET, + OP_AUTHORIZATION_ENDPOINT, OP_TOKEN_ENDPOINT, OP_USERINFO_ENDPOINT, OP_ISSUER); + set.addAll(DelegatedAuthorizationSettings.getSettings(TYPE)); + set.addAll(RealmSettings.getStandardSettings(TYPE)); + return set; + } } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/oidc/TransportOpenIdConnectAuthenticateAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/oidc/TransportOpenIdConnectAuthenticateAction.java index 7ca870d9ac977..7c0fcbf791709 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/oidc/TransportOpenIdConnectAuthenticateAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/oidc/TransportOpenIdConnectAuthenticateAction.java @@ -5,6 +5,7 @@ */ package org.elasticsearch.xpack.security.action.oidc; +import org.apache.logging.log4j.message.ParameterizedMessage; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.HandledTransportAction; @@ -22,7 +23,7 @@ import org.elasticsearch.xpack.security.authc.AuthenticationService; import org.elasticsearch.xpack.security.authc.TokenService; import org.elasticsearch.xpack.security.authc.oidc.OpenIdConnectRealm; -import org.elasticsearch.xpack.security.authc.saml.OpenIdConnectToken; +import org.elasticsearch.xpack.security.authc.oidc.OpenIdConnectToken; import java.util.Map; @@ -67,6 +68,7 @@ protected void doExecute(Task task, OpenIdConnectAuthenticateRequest request, tuple.v2(), expiresIn)); }, listener::onFailure), tokenMetadata, true); }, e -> { + logger.debug(() -> new ParameterizedMessage("OpenIDConnectToken [{}] could not be authenticated", token), e); listener.onFailure(e); } )); diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/oidc/TransportOpenIdConnectPrepareAuthenticationAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/oidc/TransportOpenIdConnectPrepareAuthenticationAction.java index 1bc8c678aeb82..3c304664bfb32 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/oidc/TransportOpenIdConnectPrepareAuthenticationAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/oidc/TransportOpenIdConnectPrepareAuthenticationAction.java @@ -11,7 +11,6 @@ import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.HandledTransportAction; import org.elasticsearch.common.Nullable; -import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.tasks.Task; import org.elasticsearch.transport.TransportService; @@ -59,8 +58,8 @@ protected void doExecute(Task task, OpenIdConnectPrepareAuthenticationRequest re private void prepareAuthenticationResponse(OpenIdConnectRealm realm, @Nullable String state, @Nullable String nonce, ActionListener listener) { try { - final Tuple authenticationRequest = realm.buildAuthenticationRequest(state, nonce); - listener.onResponse(new OpenIdConnectPrepareAuthenticationResponse(authenticationRequest.v1(), authenticationRequest.v2())); + final OpenIdConnectPrepareAuthenticationResponse authenticationResponse = realm.buildAuthenticationRequestUri(state, nonce); + listener.onResponse(authenticationResponse); } catch (ElasticsearchException e) { listener.onFailure(e); } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/InternalRealms.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/InternalRealms.java index 54bffd8a21566..6d1087e1b95ee 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/InternalRealms.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/InternalRealms.java @@ -18,6 +18,7 @@ import org.elasticsearch.xpack.core.security.authc.file.FileRealmSettings; import org.elasticsearch.xpack.core.security.authc.kerberos.KerberosRealmSettings; import org.elasticsearch.xpack.core.security.authc.ldap.LdapRealmSettings; +import org.elasticsearch.xpack.core.security.authc.oidc.OpenIdConnectRealmSettings; import org.elasticsearch.xpack.core.security.authc.pki.PkiRealmSettings; import org.elasticsearch.xpack.core.security.authc.saml.SamlRealmSettings; import org.elasticsearch.xpack.core.ssl.SSLService; @@ -27,6 +28,7 @@ import org.elasticsearch.xpack.security.authc.file.FileRealm; import org.elasticsearch.xpack.security.authc.kerberos.KerberosRealm; import org.elasticsearch.xpack.security.authc.ldap.LdapRealm; +import org.elasticsearch.xpack.security.authc.oidc.OpenIdConnectRealm; import org.elasticsearch.xpack.security.authc.pki.PkiRealm; import org.elasticsearch.xpack.security.authc.saml.SamlRealm; import org.elasticsearch.xpack.security.authc.support.RoleMappingFileBootstrapCheck; @@ -111,6 +113,7 @@ public static Map getFactories(ThreadPool threadPool, Res map.put(PkiRealmSettings.TYPE, config -> new PkiRealm(config, resourceWatcherService, nativeRoleMappingStore)); map.put(SamlRealmSettings.TYPE, config -> SamlRealm.create(config, sslService, resourceWatcherService, nativeRoleMappingStore)); map.put(KerberosRealmSettings.TYPE, config -> new KerberosRealm(config, nativeRoleMappingStore, threadPool)); + map.put(OpenIdConnectRealmSettings.TYPE, config -> new OpenIdConnectRealm(config)); return Collections.unmodifiableMap(map); } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/oidc/OpenIdConnectRealm.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/oidc/OpenIdConnectRealm.java index f5c728c405c2e..d6b24dfa4d3b7 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/oidc/OpenIdConnectRealm.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/oidc/OpenIdConnectRealm.java @@ -11,12 +11,12 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.Strings; -import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.hash.MessageDigests; import org.elasticsearch.common.lease.Releasable; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.SettingsException; import org.elasticsearch.common.util.concurrent.ThreadContext; +import org.elasticsearch.xpack.core.security.action.oidc.OpenIdConnectPrepareAuthenticationResponse; import org.elasticsearch.xpack.core.security.authc.AuthenticationResult; import org.elasticsearch.xpack.core.security.authc.AuthenticationToken; import org.elasticsearch.xpack.core.security.authc.Realm; @@ -110,17 +110,19 @@ static String require(RealmConfig config, Setting.AffixSetting setting) } /** - * Creates the URI for an OIDC Authentication Request from the realm configuration using URI Query String Serialization + * Creates the URI for an OIDC Authentication Request from the realm configuration using URI Query String Serialization and possibly + * generates a state parameter. It then returns the URI and state encapsulated in a {@link OpenIdConnectPrepareAuthenticationResponse} * * @param state The oAuth2 state parameter used for CSRF protection. If the facilitator doesn't supply one, we generate one ourselves * @param nonce String value used to associate a Client session with an ID Token, and to mitigate replay attacks. If the facilitator * doesn't supply one, we don't set one for the authentication request - * @return a Tuple of Strings with the URI at the OP where the user's browser should be redirected for authentication and the state + * @return an {@link OpenIdConnectPrepareAuthenticationResponse} */ - public Tuple buildAuthenticationRequest(@Nullable String state, @Nullable String nonce) throws ElasticsearchException { + public OpenIdConnectPrepareAuthenticationResponse buildAuthenticationRequestUri(@Nullable String state, @Nullable String nonce) + throws ElasticsearchException { try { if (Strings.hasText(state) == false) { - state = createNonce(); + state = createNonceValue(); } StringBuilder builder = new StringBuilder(); builder.append(opConfiguration.getAuthorizationEndpoint()); @@ -132,7 +134,7 @@ public Tuple buildAuthenticationRequest(@Nullable String state, addParameter(builder, "nonce", nonce); } addParameter(builder, "redirect_uri", rpConfiguration.getRedirectUri()); - return new Tuple<>(builder.toString(), state); + return new OpenIdConnectPrepareAuthenticationResponse(builder.toString(), state); } catch (UnsupportedEncodingException e) { throw new ElasticsearchException("Cannot build OIDC Authentication Request", e); } @@ -154,7 +156,7 @@ private void addParameter(StringBuilder builder, String parameter, String value) * * @return an alphanumeric string */ - static String createNonce() { + private static String createNonceValue() { final byte[] randomBytes = new byte[16]; RANDOM_INSTANCE.nextBytes(randomBytes); return MessageDigests.toHexString(randomBytes); diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/saml/OpenIdConnectToken.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/oidc/OpenIdConnectToken.java similarity index 90% rename from x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/saml/OpenIdConnectToken.java rename to x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/oidc/OpenIdConnectToken.java index 0932077edd4cd..f03e734e5ff2f 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/saml/OpenIdConnectToken.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/oidc/OpenIdConnectToken.java @@ -3,7 +3,7 @@ * or more contributor license agreements. Licensed under the Elastic License; * you may not use this file except in compliance with the Elastic License. */ -package org.elasticsearch.xpack.security.authc.saml; +package org.elasticsearch.xpack.security.authc.oidc; import org.elasticsearch.common.Nullable; import org.elasticsearch.xpack.core.security.authc.AuthenticationToken; @@ -56,4 +56,8 @@ public String getState() { public String getNonce() { return nonce; } + + public String toString() { + return getClass().getSimpleName() + "{ redirectUri=" + redirectUri + ", state=" + state + ", nonce=" + nonce + "}"; + } } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/oidc/RestOpenIdConnectAuthenticateAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/oidc/RestOpenIdConnectAuthenticateAction.java index b7ad9f935d3cd..c72ea3c364d2e 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/oidc/RestOpenIdConnectAuthenticateAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/oidc/RestOpenIdConnectAuthenticateAction.java @@ -49,6 +49,7 @@ public RestOpenIdConnectAuthenticateAction(Settings settings, RestController con protected RestChannelConsumer innerPrepareRequest(RestRequest request, NodeClient client) throws IOException { try (XContentParser parser = request.contentParser()) { final OpenIdConnectAuthenticateRequest authenticateRequest = PARSER.parse(parser, null); + logger.trace("OIDC Authenticate: " + authenticateRequest); return channel -> client.execute(OpenIdConnectAuthenticateAction.INSTANCE, authenticateRequest, new RestBuilderListener(channel) { @Override diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/oidc/RestOpenIdConnectPrepareAuthenticationAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/oidc/RestOpenIdConnectPrepareAuthenticationAction.java index dae51a08ef204..0ff8cb8027ad6 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/oidc/RestOpenIdConnectPrepareAuthenticationAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/oidc/RestOpenIdConnectPrepareAuthenticationAction.java @@ -48,14 +48,17 @@ public RestOpenIdConnectPrepareAuthenticationAction(Settings settings, RestContr @Override protected RestChannelConsumer innerPrepareRequest(RestRequest request, NodeClient client) throws IOException { try (XContentParser parser = request.contentParser()) { - final OpenIdConnectPrepareAuthenticationRequest authenticationRequest = PARSER.parse(parser, null); - return channel -> client.execute(OpenIdConnectPrepareAuthenticationAction.INSTANCE, authenticationRequest, + final OpenIdConnectPrepareAuthenticationRequest prepareAuthenticationRequest = PARSER.parse(parser, null); + logger.trace("OIDC Prepare Authentication: " + prepareAuthenticationRequest); + return channel -> client.execute(OpenIdConnectPrepareAuthenticationAction.INSTANCE, prepareAuthenticationRequest, new RestBuilderListener(channel) { @Override public RestResponse buildResponse(OpenIdConnectPrepareAuthenticationResponse response, XContentBuilder builder) throws Exception { + logger.trace("OIDC Prepare Authentication Response: " + response); builder.startObject(); - builder.field("authorization_endpoint_url", response.getAuthorizationEndpointUrl()); + builder.field("authentication_request_url", response.getAuthenticationRequestUrl()); + builder.field("state", response.getState()); builder.endObject(); return new BytesRestResponse(RestStatus.OK, builder); } diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/InternalRealmsTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/InternalRealmsTests.java index f9007583c2ca1..e3298e5103772 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/InternalRealmsTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/InternalRealmsTests.java @@ -18,6 +18,7 @@ import org.elasticsearch.xpack.core.security.authc.file.FileRealmSettings; import org.elasticsearch.xpack.core.security.authc.kerberos.KerberosRealmSettings; import org.elasticsearch.xpack.core.security.authc.ldap.LdapRealmSettings; +import org.elasticsearch.xpack.core.security.authc.oidc.OpenIdConnectRealmSettings; import org.elasticsearch.xpack.core.security.authc.pki.PkiRealmSettings; import org.elasticsearch.xpack.core.security.authc.saml.SamlRealmSettings; import org.elasticsearch.xpack.core.ssl.SSLService; @@ -61,7 +62,7 @@ public void testIsStandardType() { String type = randomFrom(NativeRealmSettings.TYPE, FileRealmSettings.TYPE, LdapRealmSettings.AD_TYPE, LdapRealmSettings.LDAP_TYPE, PkiRealmSettings.TYPE); assertThat(InternalRealms.isStandardRealm(type), is(true)); - type = randomFrom(SamlRealmSettings.TYPE, KerberosRealmSettings.TYPE); + type = randomFrom(SamlRealmSettings.TYPE, KerberosRealmSettings.TYPE, OpenIdConnectRealmSettings.TYPE); assertThat(InternalRealms.isStandardRealm(type), is(false)); } } diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/oidc/OpenIdConnectRealmTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/oidc/OpenIdConnectRealmTests.java index 593904fd3e823..310f08db6cf4a 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/oidc/OpenIdConnectRealmTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/oidc/OpenIdConnectRealmTests.java @@ -6,13 +6,13 @@ package org.elasticsearch.xpack.security.authc.oidc; -import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.SettingsException; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.env.Environment; import org.elasticsearch.env.TestEnvironment; import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.core.security.action.oidc.OpenIdConnectPrepareAuthenticationResponse; import org.elasticsearch.xpack.core.security.authc.RealmConfig; import org.elasticsearch.xpack.core.security.authc.oidc.OpenIdConnectRealmSettings; import org.hamcrest.Matchers; @@ -134,10 +134,11 @@ public void testBuilidingAuthenticationRequest() { final OpenIdConnectRealm realm = new OpenIdConnectRealm(buildConfig(settingsBuilder.build())); final String nonce = randomAlphaOfLength(12); final String state = randomAlphaOfLength(12); - final Tuple authenticationRequest = realm.buildAuthenticationRequest(state, nonce); - assertThat(authenticationRequest.v1(), equalTo("https://op.example.com/login?response_type=code&scope=openid+scope1+scope2" - + "&client_id=rp-my&state=" + state + "&nonce=" + nonce + "&redirect_uri=https%3A%2F%2Frp.my.com%2Fcb")); - assertThat(authenticationRequest.v2(), equalTo(state)); + final OpenIdConnectPrepareAuthenticationResponse response = realm.buildAuthenticationRequestUri(state, nonce); + assertThat(response.getAuthenticationRequestUrl(), + equalTo("https://op.example.com/login?response_type=code&scope=openid+scope1+scope2&client_id=rp-my&state=" + state + + "&nonce=" + nonce + "&redirect_uri=https%3A%2F%2Frp.my.com%2Fcb")); + assertThat(response.getState(), equalTo(state)); } public void testBuilidingAuthenticationRequestWithoutState() { @@ -152,10 +153,11 @@ public void testBuilidingAuthenticationRequestWithoutState() { Arrays.asList("openid", "scope1", "scope2")); final OpenIdConnectRealm realm = new OpenIdConnectRealm(buildConfig(settingsBuilder.build())); final String nonce = randomAlphaOfLength(12); - final Tuple authenticationRequest = realm.buildAuthenticationRequest(null, nonce); - final String generatedState = authenticationRequest.v2(); - assertThat(authenticationRequest.v1(), equalTo("https://op.example.com/login?response_type=code&scope=openid+scope1+scope2" - + "&client_id=rp-my&state=" + generatedState + "&nonce=" + nonce + "&redirect_uri=https%3A%2F%2Frp.my.com%2Fcb")); + final OpenIdConnectPrepareAuthenticationResponse response = realm.buildAuthenticationRequestUri(null, nonce); + final String generatedState = response.getState(); + assertThat(response.getAuthenticationRequestUrl(), + equalTo("https://op.example.com/login?response_type=code&scope=openid+scope1+scope2&client_id=rp-my&state=" + + generatedState + "&nonce=" + nonce + "&redirect_uri=https%3A%2F%2Frp.my.com%2Fcb")); } public void testBuilidingAuthenticationRequestWithoutStateAndNonce() { @@ -169,10 +171,11 @@ public void testBuilidingAuthenticationRequestWithoutStateAndNonce() { .putList(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.RP_REQUESTED_SCOPES), Arrays.asList("openid", "scope1", "scope2")); final OpenIdConnectRealm realm = new OpenIdConnectRealm(buildConfig(settingsBuilder.build())); - final Tuple authenticationRequest = realm.buildAuthenticationRequest(null, null); - final String generatedState = authenticationRequest.v2(); - assertThat(authenticationRequest.v1(), equalTo("https://op.example.com/login?response_type=code&scope=openid+scope1+scope2" - + "&client_id=rp-my&state=" + generatedState + "&redirect_uri=https%3A%2F%2Frp.my.com%2Fcb")); + final OpenIdConnectPrepareAuthenticationResponse response = realm.buildAuthenticationRequestUri(null, null); + final String generatedState = response.getState(); + assertThat(response.getAuthenticationRequestUrl(), + equalTo("https://op.example.com/login?response_type=code&scope=openid+scope1+scope2&client_id=rp-my&state=" + + generatedState + "&redirect_uri=https%3A%2F%2Frp.my.com%2Fcb")); } public void testBuilidingAuthenticationRequestWithDefaultScope() { @@ -184,9 +187,9 @@ public void testBuilidingAuthenticationRequestWithDefaultScope() { .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.RP_CLIENT_ID), "rp-my") .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.RP_RESPONSE_TYPE), "code"); final OpenIdConnectRealm realm = new OpenIdConnectRealm(buildConfig(settingsBuilder.build())); - final Tuple authenticationRequest = realm.buildAuthenticationRequest(null, null); - final String generatedState = authenticationRequest.v2(); - assertThat(authenticationRequest.v1(), equalTo("https://op.example.com/login?response_type=code&scope=openid" + final OpenIdConnectPrepareAuthenticationResponse response = realm.buildAuthenticationRequestUri(null, null); + final String generatedState = response.getState(); + assertThat(response.getAuthenticationRequestUrl(), equalTo("https://op.example.com/login?response_type=code&scope=openid" + "&client_id=rp-my&state=" + generatedState + "&redirect_uri=https%3A%2F%2Frp.my.com%2Fcb")); } From aeacfc567838913ee1730253980cf2a047677d9b Mon Sep 17 00:00:00 2001 From: Ioannis Kakavas Date: Fri, 28 Dec 2018 01:33:54 +0200 Subject: [PATCH 09/71] Correct javadoc --- .../security/authc/oidc/OpenIdConnectToken.java | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/oidc/OpenIdConnectToken.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/oidc/OpenIdConnectToken.java index f03e734e5ff2f..2fca308fdc4e3 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/oidc/OpenIdConnectToken.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/oidc/OpenIdConnectToken.java @@ -10,8 +10,8 @@ /** * A {@link AuthenticationToken} to hold OpenID Connect related content. - * Depending on the flow this can content only a code ( oAuth2 authorization code - * grant flow ) or an Identity Token ( oAuth2 implicit flow ) + * Depending on the flow the token can contain only a code ( oAuth2 authorization code + * grant flow ) or even an Identity Token ( oAuth2 implicit flow ) */ public class OpenIdConnectToken implements AuthenticationToken { @@ -21,12 +21,12 @@ public class OpenIdConnectToken implements AuthenticationToken { private String nonce; /** - * @param redirectUri The URI were the OP redirected the browser after the authentication attempt. This is passed as is from the - * facilitator entity (i.e. Kibana), so it is URL Encoded + * @param redirectUri The URI were the OP redirected the browser after the authentication event at the OP. This is passed as is from the + * facilitator entity (i.e. Kibana), so it is URL Encoded. * @param state The state value that either we or the facilitator generated for this specific flow and that was stored - * at the user's session with the facilitator + * at the user's session with the facilitator. * @param nonce The nonce value that the facilitator generated for this specific flow and that was stored at the user's - * session with the facilitator + * session with the facilitator. */ public OpenIdConnectToken(String redirectUri, String state, String nonce) { this.redirectUri = redirectUri; From 845dc36a90b9e7bcf9b54ddf36dee46932f36887 Mon Sep 17 00:00:00 2001 From: Ioannis Kakavas Date: Fri, 28 Dec 2018 01:53:41 +0200 Subject: [PATCH 10/71] Add serialiazion and tests for the AuthenticateRequest --- .../OpenIdConnectAuthenticateRequest.java | 22 +++++++++++- .../RestOpenIdConnectAuthenticateAction.java | 2 +- ...OpenIdConnectAuthenticateRequestTests.java | 36 +++++++++++++++++++ 3 files changed, 58 insertions(+), 2 deletions(-) create mode 100644 x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/oidc/OpenIdConnectAuthenticateRequestTests.java diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/oidc/OpenIdConnectAuthenticateRequest.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/oidc/OpenIdConnectAuthenticateRequest.java index 745945b0a4ca4..44d8fb44322d4 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/oidc/OpenIdConnectAuthenticateRequest.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/oidc/OpenIdConnectAuthenticateRequest.java @@ -7,6 +7,10 @@ import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; + +import java.io.IOException; /** * Represents a request for authentication using OpenID Connect @@ -26,7 +30,7 @@ public class OpenIdConnectAuthenticateRequest extends ActionRequest { private String state; /** - * The nonce value that the facilitator generated for this specific flow and that was stored at the user's session with + * The nonce value that the facilitator generated for this specific flow and that was stored at the user's session with * the facilitator */ private String nonce; @@ -63,6 +67,22 @@ public ActionRequestValidationException validate() { return null; } + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + out.writeString(redirectUri); + out.writeString(state); + out.writeOptionalString(nonce); + } + + @Override + public void readFrom(StreamInput in) throws IOException { + super.readFrom(in); + redirectUri = in.readString(); + state = in.readString(); + nonce = in.readOptionalString(); + } + public String toString() { return "{redirectUri=" + redirectUri + ", state=" + state + ", nonce=" + nonce + "}"; } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/oidc/RestOpenIdConnectAuthenticateAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/oidc/RestOpenIdConnectAuthenticateAction.java index c72ea3c364d2e..d7130da64530a 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/oidc/RestOpenIdConnectAuthenticateAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/oidc/RestOpenIdConnectAuthenticateAction.java @@ -27,7 +27,7 @@ import static org.elasticsearch.rest.RestRequest.Method.POST; /** - * Rest handler that authenticates the user based on the provided oauth2 code + * Rest handler that authenticates the user based on the information provided as parameters of the redirect_uri */ public class RestOpenIdConnectAuthenticateAction extends OpenIdConnectBaseRestHandler { diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/oidc/OpenIdConnectAuthenticateRequestTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/oidc/OpenIdConnectAuthenticateRequestTests.java new file mode 100644 index 0000000000000..0d8142fc6fdf0 --- /dev/null +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/oidc/OpenIdConnectAuthenticateRequestTests.java @@ -0,0 +1,36 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.security.action.oidc; + + +import org.elasticsearch.common.io.stream.BytesStreamOutput; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.core.security.action.oidc.OpenIdConnectAuthenticateRequest; + +import java.io.IOException; + +import static org.hamcrest.Matchers.equalTo; + +public class OpenIdConnectAuthenticateRequestTests extends ESTestCase { + + public void testSerialization() throws IOException { + final OpenIdConnectAuthenticateRequest request = new OpenIdConnectAuthenticateRequest(); + final String nonce = randomBoolean() ? null : randomAlphaOfLengthBetween(8, 12); + final String state = randomAlphaOfLengthBetween(8, 12); + final String redirectUri = "https://rp.com/cb?code=thisisacode&state=" + state; + request.setRedirectUri(redirectUri); + request.setState(state); + request.setNonce(nonce); + final BytesStreamOutput out = new BytesStreamOutput(); + request.writeTo(out); + + final OpenIdConnectAuthenticateRequest unserialized = new OpenIdConnectAuthenticateRequest(); + unserialized.readFrom(out.bytes().streamInput()); + assertThat(unserialized.getRedirectUri(), equalTo(redirectUri)); + assertThat(unserialized.getState(), equalTo(state)); + assertThat(unserialized.getNonce(), equalTo(nonce)); + } +} From 21559ddce04877853147dcf73efcd820ecd1891d Mon Sep 17 00:00:00 2001 From: Ioannis Kakavas Date: Fri, 28 Dec 2018 13:04:34 +0200 Subject: [PATCH 11/71] Add JWT model and builder Being an RP, we won't need to create JWTs but this will be useful for testing --- .../authc/support/jwt/JsonWebToken.java | 81 +++++++++++++ .../support/jwt/JsonWebTokenBuilder.java | 113 ++++++++++++++++++ .../authc/support/jwt/JsonWebTokenUtils.java | 9 ++ 3 files changed, 203 insertions(+) create mode 100644 x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/jwt/JsonWebToken.java create mode 100644 x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/jwt/JsonWebTokenBuilder.java create mode 100644 x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/jwt/JsonWebTokenUtils.java diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/jwt/JsonWebToken.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/jwt/JsonWebToken.java new file mode 100644 index 0000000000000..0fa1e7c9150c0 --- /dev/null +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/jwt/JsonWebToken.java @@ -0,0 +1,81 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.security.authc.support.jwt; + +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.xcontent.XContentBuilder; + +import java.io.IOException; +import java.io.UncheckedIOException; +import java.nio.charset.StandardCharsets; +import java.util.Base64; +import java.util.Map; + +import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; + +/** + * A class that represents a JWT according to https://tools.ietf.org/html/rfc7519 + */ +public class JsonWebToken { + private Map header; + private Map payload; + private String signature; + + public JsonWebToken(Map header, Map payload) { + this.header = header; + this.payload = payload; + } + + /** + * Encodes the JWT as defined by https://tools.ietf.org/html/rfc7515#section-7 + * + * @return + */ + public String encode() { + try { + String headerString = Base64.getEncoder().encodeToString(mapToJsonBytes(header)); + String payloadString = Base64.getEncoder().encodeToString(mapToJsonBytes(payload)); + String signatureString = Strings.hasText(signature) ? + Base64.getEncoder().encodeToString(signature.getBytes(StandardCharsets.UTF_8.name())) : + ""; + return headerString + "." + payloadString + "." + signatureString; + } catch (IOException e) { + throw new UncheckedIOException(e); + } + } + + /** + * Create a string representation of the JWT. Used for logging and debugging, not for JWT serialization + * + * @return a string representation of the JWT + */ + public String toString() { + return "{header=" + header + ", payload=" + payload + ", signature=" + signature + "}"; + } + + private String mapToJsonString(Map map) throws IOException { + try (XContentBuilder builder = jsonBuilder()) { + builder.startObject(); + for (Map.Entry entry : map.entrySet()) { + builder.field(entry.getKey(), entry.getValue()); + } + builder.endObject(); + return BytesReference.bytes(builder).utf8ToString(); + } + } + + private byte[] mapToJsonBytes(Map map) throws IOException { + try (XContentBuilder builder = jsonBuilder()) { + builder.startObject(); + for (Map.Entry entry : map.entrySet()) { + builder.field(entry.getKey(), entry.getValue()); + } + builder.endObject(); + return BytesReference.toBytes(BytesReference.bytes(builder)); + } + } +} diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/jwt/JsonWebTokenBuilder.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/jwt/JsonWebTokenBuilder.java new file mode 100644 index 0000000000000..f9c49ecdfd0ca --- /dev/null +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/jwt/JsonWebTokenBuilder.java @@ -0,0 +1,113 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.security.authc.support.jwt; + +import org.elasticsearch.common.Strings; + +import java.time.ZonedDateTime; +import java.util.HashMap; +import java.util.Map; + +public class JsonWebTokenBuilder { + + private String issuer; + private String subject; + private String audience; + private ZonedDateTime expirationTime; + private ZonedDateTime notBefore; + private ZonedDateTime issuedAt; + private String jwtId; + private String type; + private String algorithm; + private Map claims; + + public JsonWebTokenBuilder issuer(String issuer) { + this.issuer = issuer; + return this; + } + + public JsonWebTokenBuilder subject(String subject) { + this.subject = subject; + return this; + } + + public JsonWebTokenBuilder audience(String audience) { + this.audience = audience; + return this; + } + + public JsonWebTokenBuilder jwtId(String jwtId) { + this.jwtId = jwtId; + return this; + } + + public JsonWebTokenBuilder expirationTime(ZonedDateTime expirationTime) { + this.expirationTime = expirationTime; + return this; + } + + public JsonWebTokenBuilder notBefore(ZonedDateTime notBefore) { + this.notBefore = notBefore; + return this; + } + + public JsonWebTokenBuilder issuedAt(ZonedDateTime issuedAt) { + this.issuedAt = issuedAt; + return this; + } + + public JsonWebTokenBuilder type(String type) { + if (Strings.hasText(type) == false) { + throw new IllegalArgumentException("JWT type cannot be null or empty"); + } + this.type = type; + return this; + } + + public JsonWebTokenBuilder algorithm(String algorithm) { + if (Strings.hasText(algorithm) == false) { + throw new IllegalArgumentException("JWT signing algorithm cannot be null or empty"); + } + this.algorithm = algorithm; + return this; + } + + public JsonWebTokenBuilder claim(String name, Object value) { + if (Strings.hasText(name) == false) { + throw new IllegalArgumentException("Claim name cannot be null or empty"); + } + getOrCreateClaims().put(name, value); + return this; + } + + private Map getOrCreateClaims() { + if (claims == null) { + claims = new HashMap<>(); + return claims; + } else { + return claims; + } + } + + public JsonWebToken build() { + final Map header = new HashMap<>(); + header.put("typ", type); + header.put("alg", algorithm); + + final Map payload = new HashMap<>(); + payload.put("iss", issuer); + payload.put("sub", subject); + payload.put("aud", audience); + payload.put("exp", expirationTime); + payload.put("nbf", notBefore); + payload.put("iat", issuedAt); + payload.put("jti", jwtId); + if (claims != null) { + payload.putAll(claims); + } + return new JsonWebToken(header, payload); + } +} diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/jwt/JsonWebTokenUtils.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/jwt/JsonWebTokenUtils.java new file mode 100644 index 0000000000000..ce9b43ca23d8f --- /dev/null +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/jwt/JsonWebTokenUtils.java @@ -0,0 +1,9 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.security.authc.support.jwt; + +public class JsonWebTokenUtils { +} From 59f9284d276e0722ec1a0d5ed9ff928d02570c88 Mon Sep 17 00:00:00 2001 From: Ioannis Kakavas Date: Fri, 4 Jan 2019 20:03:22 +0200 Subject: [PATCH 12/71] Add JsonWebToken parser - Reworked builder - Added parser that deserializes and parses a JWT TODO: Signature validation --- .../oidc/OpenIdConnectRealmSettings.java | 5 +- .../authc/oidc/OpenIdConnectRealm.java | 5 +- .../security/authc/oidc/RPConfiguration.java | 14 +- .../security/authc/support/jwt/Claims.java | 80 ++++++ .../authc/support/jwt/JsonWebToken.java | 19 +- .../support/jwt/JsonWebTokenBuilder.java | 245 ++++++++++++++++-- .../authc/support/jwt/JsonWebTokenParser.java | 125 +++++++++ .../support/JsonWebTokenParserTests.java | 116 +++++++++ .../authc/support/JsonWebTokenTests.java | 32 +++ 9 files changed, 615 insertions(+), 26 deletions(-) create mode 100644 x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/jwt/Claims.java create mode 100644 x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/jwt/JsonWebTokenParser.java create mode 100644 x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/support/JsonWebTokenParserTests.java create mode 100644 x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/support/JsonWebTokenTests.java diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/oidc/OpenIdConnectRealmSettings.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/oidc/OpenIdConnectRealmSettings.java index 5d51d23c3c69a..36ec5062c3f03 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/oidc/OpenIdConnectRealmSettings.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/oidc/OpenIdConnectRealmSettings.java @@ -45,10 +45,13 @@ private OpenIdConnectRealmSettings() { public static final Setting.AffixSetting> RP_REQUESTED_SCOPES = Setting.affixKeySetting( RealmSettings.realmSettingPrefix(TYPE), "rp.requested_scopes", key -> Setting.listSetting(key, Collections.singletonList("openid"), Function.identity(), Setting.Property.NodeScope)); + public static final Setting.AffixSetting> RP_ALLOWED_SCOPES = Setting.affixKeySetting( + RealmSettings.realmSettingPrefix(TYPE), "rp.requested_scopes", + key -> Setting.listSetting(key, Collections.emptyList(), Function.identity(), Setting.Property.NodeScope)); public static Set> getSettings() { final Set> set = Sets.newHashSet( - OP_NAME, RP_CLIENT_ID, RP_REDIRECT_URI, RP_RESPONSE_TYPE, RP_REQUESTED_SCOPES, RP_CLIENT_SECRET, + OP_NAME, RP_CLIENT_ID, RP_REDIRECT_URI, RP_RESPONSE_TYPE, RP_REQUESTED_SCOPES, RP_ALLOWED_SCOPES, RP_CLIENT_SECRET, OP_AUTHORIZATION_ENDPOINT, OP_TOKEN_ENDPOINT, OP_USERINFO_ENDPOINT, OP_ISSUER); set.addAll(DelegatedAuthorizationSettings.getSettings(TYPE)); set.addAll(RealmSettings.getStandardSettings(TYPE)); diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/oidc/OpenIdConnectRealm.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/oidc/OpenIdConnectRealm.java index d6b24dfa4d3b7..3f35ee19cdc77 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/oidc/OpenIdConnectRealm.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/oidc/OpenIdConnectRealm.java @@ -36,6 +36,7 @@ import static org.elasticsearch.xpack.core.security.authc.oidc.OpenIdConnectRealmSettings.OP_NAME; import static org.elasticsearch.xpack.core.security.authc.oidc.OpenIdConnectRealmSettings.OP_TOKEN_ENDPOINT; import static org.elasticsearch.xpack.core.security.authc.oidc.OpenIdConnectRealmSettings.OP_USERINFO_ENDPOINT; +import static org.elasticsearch.xpack.core.security.authc.oidc.OpenIdConnectRealmSettings.RP_ALLOWED_SCOPES; import static org.elasticsearch.xpack.core.security.authc.oidc.OpenIdConnectRealmSettings.RP_CLIENT_ID; import static org.elasticsearch.xpack.core.security.authc.oidc.OpenIdConnectRealmSettings.RP_REDIRECT_URI; import static org.elasticsearch.xpack.core.security.authc.oidc.OpenIdConnectRealmSettings.RP_RESPONSE_TYPE; @@ -86,8 +87,10 @@ private RPConfiguration buildRPConfiguration(RealmConfig config) { String responseType = require(config, RP_RESPONSE_TYPE); List requestedScopes = config.hasSetting(RP_REQUESTED_SCOPES) ? config.getSetting(RP_REQUESTED_SCOPES) : Collections.emptyList(); + List allowedScopes = config.hasSetting(RP_ALLOWED_SCOPES) ? + config.getSetting(RP_ALLOWED_SCOPES) : Collections.emptyList(); - return new RPConfiguration(clientId, redirectUri, responseType, requestedScopes); + return new RPConfiguration(clientId, redirectUri, responseType, requestedScopes, allowedScopes); } private OPConfiguration buildOPConfiguration(RealmConfig config) { diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/oidc/RPConfiguration.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/oidc/RPConfiguration.java index 20876ddb7bb6a..9ce3b403864ed 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/oidc/RPConfiguration.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/oidc/RPConfiguration.java @@ -7,6 +7,7 @@ import org.elasticsearch.common.Nullable; import org.elasticsearch.common.Strings; +import org.opensaml.xmlsec.signature.P; import java.util.Collections; import java.util.List; @@ -20,8 +21,10 @@ public class RPConfiguration { private final String redirectUri; private final String responseType; private final List requestedScopes; + private final List allowedScopes; - public RPConfiguration(String clientId, String redirectUri, String responseType, @Nullable List requestedScopes) { + public RPConfiguration(String clientId, String redirectUri, String responseType, @Nullable List requestedScopes, + @Nullable List allowedScopes) { this.clientId = Objects.requireNonNull(clientId, "RP Client ID must be provided"); this.redirectUri = Objects.requireNonNull(redirectUri, "RP Redirect URI must be provided"); if (Strings.hasText(responseType) == false) { @@ -36,6 +39,11 @@ public RPConfiguration(String clientId, String redirectUri, String responseType, } else { this.requestedScopes = requestedScopes; } + if (allowedScopes != null) { + this.allowedScopes = allowedScopes; + } else { + this.allowedScopes = Collections.emptyList(); + } } public String getClientId() { @@ -53,4 +61,8 @@ public String getResponseType() { public List getRequestedScopes() { return requestedScopes; } + + public List getAllowedScopes() { + return allowedScopes; + } } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/jwt/Claims.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/jwt/Claims.java new file mode 100644 index 0000000000000..4f5db00c5f5d0 --- /dev/null +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/jwt/Claims.java @@ -0,0 +1,80 @@ +package org.elasticsearch.xpack.security.authc.support.jwt; + +import java.util.Arrays; +import java.util.List; +import java.util.stream.Collectors; +import java.util.stream.Stream; + +public class Claims { + + public enum HeaderClaims { + TYPE("type"), + ALGORITHM("alg"), + CONTENT_TYPE("cty"), + KEY_ID("kid"); + + private String name; + + HeaderClaims(String name) { + this.name = name; + } + + public String getClaimName() { + return name; + } + + public static List validHeaderClaims() { + return Stream.of(HeaderClaims.values()).map(HeaderClaims::getClaimName).collect(Collectors.toList()); + } + + } + + public enum StandardClaims { + + ISSUER("iss"), + SUBJECT("sub"), + AUDIENCE("aud"), + EXPIRATION_TIME("exp"), + NOT_BEFORE("nbf"), + ISSUED_AT("iat"), + NONCE("nonce"), + AUTHN_CONTEXT_CLASS_REF("acr"), + AUTHN_METHODS_REF("amr"), + AUTHORIZED_PARTY("azp"), + AUTH_TIME("auth_time"), + JWTID("jti"), + NAME("name"), + GIVEN_NAME("given_name"), + MIDDLE_NAME("middle_name"), + FAMILY_NAME("family_name"), + NICKNAME("nickname"), + PREFERRED_USERNAME("preferred_username"), + PROFILE("profile"), + PICTURE("picture"), + WEBSITE("website"), + EMAIL("email"), + EMAIL_VERIFIED("email_verified"), + GENDER("gender"), + BIRTHDATE("birthdate"), + ZONEINFO("zoneinfo"), + LOCALE("locale"), + PHONE_NUMBER("phone_number"), + PHONE_NUMBER_VERIFIED("phone_number_verified"), + ADDRESS("address"), + UPDATED_AT("updated_at"); + + private String name; + + StandardClaims(String name) { + this.name = name; + } + + public String getClaimName() { + return name; + } + + public static List getKnownClaims() { + return Stream.of(StandardClaims.values()).map(StandardClaims::getClaimName).collect(Collectors.toList()); + } + } +} diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/jwt/JsonWebToken.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/jwt/JsonWebToken.java index 0fa1e7c9150c0..eef54840482a6 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/jwt/JsonWebToken.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/jwt/JsonWebToken.java @@ -18,7 +18,7 @@ import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; /** - * A class that represents a JWT according to https://tools.ietf.org/html/rfc7519 + * A class that represents an OpenID Connect ID token according to https://tools.ietf.org/html/rfc7519. */ public class JsonWebToken { private Map header; @@ -30,17 +30,26 @@ public JsonWebToken(Map header, Map payload) { this.payload = payload; } + public Map getHeader() { + return header; + } + + public Map getPayload() { + return payload; + } + /** * Encodes the JWT as defined by https://tools.ietf.org/html/rfc7515#section-7 * - * @return + * @return The serialized JWT */ public String encode() { try { - String headerString = Base64.getEncoder().encodeToString(mapToJsonBytes(header)); - String payloadString = Base64.getEncoder().encodeToString(mapToJsonBytes(payload)); + // Base64 url encoding is defined in https://tools.ietf.org/html/rfc7515#appendix-C + String headerString = Base64.getUrlEncoder().withoutPadding().encodeToString(mapToJsonBytes(header)); + String payloadString = Base64.getUrlEncoder().withoutPadding().encodeToString(mapToJsonBytes(payload)); String signatureString = Strings.hasText(signature) ? - Base64.getEncoder().encodeToString(signature.getBytes(StandardCharsets.UTF_8.name())) : + Base64.getUrlEncoder().withoutPadding().encodeToString(signature.getBytes(StandardCharsets.UTF_8.name())) : ""; return headerString + "." + payloadString + "." + signatureString; } catch (IOException e) { diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/jwt/JsonWebTokenBuilder.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/jwt/JsonWebTokenBuilder.java index f9c49ecdfd0ca..74d59a3af29a7 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/jwt/JsonWebTokenBuilder.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/jwt/JsonWebTokenBuilder.java @@ -7,23 +7,143 @@ import org.elasticsearch.common.Strings; -import java.time.ZonedDateTime; +import java.util.ArrayList; import java.util.HashMap; +import java.util.HashSet; +import java.util.List; import java.util.Map; +/** + * Allows easier creation of {@link JsonWebToken}. Provides setters for the required and the standard claims + * as defined in https://openid.net/specs/openid-connect-core-1_0.html + */ public class JsonWebTokenBuilder { private String issuer; private String subject; - private String audience; - private ZonedDateTime expirationTime; - private ZonedDateTime notBefore; - private ZonedDateTime issuedAt; + private List audiences; + private long expirationTime = -1; + private long notBefore = -1; + private long issuedAt = -1; + private long updatedAt = -1; + private long authTime = -1; + private String nonce; + private String authenticationContextClassReference; + private List authenticationMethodsReferences; + private String authorizedParty; private String jwtId; private String type; private String algorithm; + private String name; + private String givenName; + private String middleName; + private String familyName; + private String nickname; + private String preferredUsername; + private String profile; + private String picture; + private String website; + private String email; + private Boolean emailVerified; + private String gender; + private String birthdate; + private String zoneinfo; + private String locale; + private String phoneNumber; + private Boolean phoneNumberVerified; + private Map address; private Map claims; + public JsonWebTokenBuilder name(String name) { + this.name = name; + return this; + } + + public JsonWebTokenBuilder givenName(String givenName) { + this.givenName = givenName; + return this; + } + + public JsonWebTokenBuilder middleName(String middleName) { + this.middleName = middleName; + return this; + } + + public JsonWebTokenBuilder familyName(String familyName) { + this.familyName = familyName; + return this; + } + + public JsonWebTokenBuilder nickname(String nickname) { + this.nickname = nickname; + return this; + } + + public JsonWebTokenBuilder preferredUsername(String preferredUsername) { + this.preferredUsername = preferredUsername; + return this; + } + + public JsonWebTokenBuilder profile(String profile) { + this.profile = profile; + return this; + } + + public JsonWebTokenBuilder picture(String picture) { + this.picture = picture; + return this; + } + + public JsonWebTokenBuilder website(String website) { + this.website = website; + return this; + } + + public JsonWebTokenBuilder email(String email) { + this.email = email; + return this; + } + + public JsonWebTokenBuilder emailVerified(boolean emailVerified) { + this.emailVerified = emailVerified; + return this; + } + + public JsonWebTokenBuilder gender(String gender) { + this.gender = gender; + return this; + } + + public JsonWebTokenBuilder birthdate(String birthdate) { + this.birthdate = birthdate; + return this; + } + + public JsonWebTokenBuilder zoneinfo(String zoneinfo) { + this.zoneinfo = zoneinfo; + return this; + } + + public JsonWebTokenBuilder locale(String locale) { + this.locale = locale; + return this; + } + + public JsonWebTokenBuilder phoneNumber(String phoneNumber) { + this.phoneNumber = phoneNumber; + return this; + } + + public JsonWebTokenBuilder phoneNumberVerified(boolean phoneNumberVerified) { + this.phoneNumberVerified = phoneNumberVerified; + return this; + } + + public JsonWebTokenBuilder address(Map address) { + this.address = address; + return this; + } + public JsonWebTokenBuilder issuer(String issuer) { this.issuer = issuer; return this; @@ -35,7 +155,10 @@ public JsonWebTokenBuilder subject(String subject) { } public JsonWebTokenBuilder audience(String audience) { - this.audience = audience; + if (null == this.audiences) { + this.audiences = new ArrayList<>(); + } + this.audiences.add(audience); return this; } @@ -44,21 +167,54 @@ public JsonWebTokenBuilder jwtId(String jwtId) { return this; } - public JsonWebTokenBuilder expirationTime(ZonedDateTime expirationTime) { + public JsonWebTokenBuilder expirationTime(long expirationTime) { this.expirationTime = expirationTime; return this; } - public JsonWebTokenBuilder notBefore(ZonedDateTime notBefore) { + public JsonWebTokenBuilder notBefore(long notBefore) { this.notBefore = notBefore; return this; } - public JsonWebTokenBuilder issuedAt(ZonedDateTime issuedAt) { + public JsonWebTokenBuilder issuedAt(long issuedAt) { this.issuedAt = issuedAt; return this; } + public JsonWebTokenBuilder updatedAt(long updatedAt) { + this.updatedAt = updatedAt; + return this; + } + + public JsonWebTokenBuilder authTime(long authTime) { + this.authTime = authTime; + return this; + } + + public JsonWebTokenBuilder nonce(String nonce) { + this.nonce = nonce; + return this; + } + + public JsonWebTokenBuilder authenticationContectClassReference(String authenticationContextClassReference) { + this.authenticationContextClassReference = authenticationContextClassReference; + return this; + } + + public JsonWebTokenBuilder authenticationMethodsReferences(String authenticationMethodsReferences) { + if (this.authenticationMethodsReferences == null) { + this.authenticationMethodsReferences = new ArrayList<>(); + } + this.authenticationMethodsReferences.add(authenticationMethodsReferences); + return this; + } + + public JsonWebTokenBuilder authorizedParty(String authorizedParty) { + this.authorizedParty = authorizedParty; + return this; + } + public JsonWebTokenBuilder type(String type) { if (Strings.hasText(type) == false) { throw new IllegalArgumentException("JWT type cannot be null or empty"); @@ -94,20 +250,73 @@ private Map getOrCreateClaims() { public JsonWebToken build() { final Map header = new HashMap<>(); - header.put("typ", type); - header.put("alg", algorithm); + putIfNotNull(header, "typ", type); + putIfNotNull(header, "alg", algorithm); final Map payload = new HashMap<>(); - payload.put("iss", issuer); - payload.put("sub", subject); - payload.put("aud", audience); - payload.put("exp", expirationTime); - payload.put("nbf", notBefore); - payload.put("iat", issuedAt); - payload.put("jti", jwtId); + putIfNotNull(payload, "iss", issuer); + putIfNotNull(payload, "sub", subject); + putIfNotNull(payload, "aud", audiences); + if (expirationTime != -1) { + putIfNotNull(payload, "exp", expirationTime); + } + if (notBefore != -1) { + putIfNotNull(payload, "nbf", notBefore); + } + if (issuedAt != -1) { + putIfNotNull(payload, "iat", issuedAt); + } + if (authTime != -1) { + putIfNotNull(payload, "auth_time", authTime); + } + putIfNotNull(payload, "nonce", nonce); + putIfNotNull(payload, "acr", authenticationContextClassReference); + putIfNotNull(payload, "amr", authenticationMethodsReferences); + putIfNotNull(payload, "azp", authorizedParty); + putIfNotNull(payload, "jti", jwtId); + putIfNotNull(payload, "name", name); + putIfNotNull(payload, "given_name", givenName); + putIfNotNull(payload, "middle_name", middleName); + putIfNotNull(payload, "family_name", familyName); + putIfNotNull(payload, "nickname", nickname); + putIfNotNull(payload, "preferred_username", preferredUsername); + putIfNotNull(payload, "profile", profile); + putIfNotNull(payload, "picture", picture); + putIfNotNull(payload, "website", website); + putIfNotNull(payload, "email", email); + putIfNotNull(payload, "email_verified", emailVerified); + putIfNotNull(payload, "gender", gender); + putIfNotNull(payload, "birthdate", birthdate); + putIfNotNull(payload, "zoneinfo", zoneinfo); + putIfNotNull(payload, "locale", locale); + putIfNotNull(payload, "phone_number", phoneNumber); + putIfNotNull(payload, "phone_number_verified", phoneNumberVerified); + putIfNotNull(payload, "address", address); + if (updatedAt != -1) { + putIfNotNull(payload, "updated_at", updatedAt); + } if (claims != null) { payload.putAll(claims); } return new JsonWebToken(header, payload); } + + /** + * Adds a key - value pair to a Map, only if the value is not null. + * + * @param map The Map to add the key value entry to + * @param key The key to populate + * @param value The value to add to the respective key + */ + private void putIfNotNull(Map map, String key, Object value) { + if (null == map) { + throw new IllegalArgumentException("The map must be provided"); + } + if (key == null) { + throw new IllegalArgumentException("The key must be provided"); + } + if (value != null) { + map.put(key, value); + } + } } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/jwt/JsonWebTokenParser.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/jwt/JsonWebTokenParser.java new file mode 100644 index 0000000000000..67dfc1cc08185 --- /dev/null +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/jwt/JsonWebTokenParser.java @@ -0,0 +1,125 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.security.authc.support.jwt; + +import org.elasticsearch.common.ParsingException; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.xcontent.DeprecationHandler; +import org.elasticsearch.common.xcontent.NamedXContentRegistry; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.common.xcontent.XContentParserUtils; +import org.elasticsearch.common.xcontent.json.JsonXContent; +import org.elasticsearch.xpack.security.authc.oidc.RPConfiguration; + +import java.io.IOException; +import java.nio.charset.StandardCharsets; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Base64; +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Locale; +import java.util.Map; + +import static org.elasticsearch.common.xcontent.XContentParserUtils.parseFieldsValue; + +/** + * Contains the necessary functionality for parsing a serialized OpenID Connect ID Token to a {@link JsonWebToken} + */ +public class JsonWebTokenParser { + private final RPConfiguration rpConfig; + + public JsonWebTokenParser(RPConfiguration rpConfig) { + this.rpConfig = rpConfig; + } + + public final JsonWebToken parseJwt(String jwt) throws IOException { + final JsonWebTokenBuilder builder = new JsonWebTokenBuilder(); + final String[] jwtParts = jwt.split("\\."); + if (jwtParts.length != 3) { + throw new IllegalArgumentException("The provided token is not a valid JWT"); + } + final String deserializedHeader = deserializePart(jwtParts[0]); + final String deserializedPayload = deserializePart(jwtParts[1]); + final byte[] deserializedSignature = Strings.hasText(jwtParts[2]) ? Base64.getUrlDecoder().decode(jwtParts[2]) : new byte[0]; + final Map headerMap = parseHeader(deserializedHeader); + final Map payloadMap = parsePayload(deserializedPayload); + return new JsonWebToken(headerMap, payloadMap); + } + + private static final String deserializePart(String encodedString) throws IOException { + return new String(Base64.getUrlDecoder().decode(encodedString), StandardCharsets.UTF_8.name()); + } + + private final Map parseHeader(String headerJson) throws IOException { + try (XContentParser parser = JsonXContent.jsonXContent.createParser(NamedXContentRegistry.EMPTY, + DeprecationHandler.THROW_UNSUPPORTED_OPERATION, headerJson)) { + final Map headerMap = new HashMap<>(); + XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.nextToken(), parser::getTokenLocation); + XContentParser.Token token; + String currentFieldName = null; + while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { + if (token == XContentParser.Token.FIELD_NAME) { + currentFieldName = parser.currentName(); + } else if (Claims.HeaderClaims.validHeaderClaims().contains(currentFieldName)) { + XContentParserUtils.ensureExpectedToken(XContentParser.Token.VALUE_STRING, parser.currentToken(), parser::getTokenLocation); + if (Strings.hasText(parser.text())) { + headerMap.put(currentFieldName, parser.text()); + } + } else { + parser.skipChildren(); + } + } + return headerMap; + } + } + + private final Map parsePayload(String payloadJson) throws IOException { + try (XContentParser parser = JsonXContent.jsonXContent.createParser(NamedXContentRegistry.EMPTY, + DeprecationHandler.THROW_UNSUPPORTED_OPERATION, payloadJson)) { + final Map payloadMap = new HashMap<>(); + XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.nextToken(), parser::getTokenLocation); + XContentParser.Token token; + String currentFieldName = null; + while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { + if (token == XContentParser.Token.FIELD_NAME) { + currentFieldName = parser.currentName(); + } else if (currentFieldName.equals(Claims.StandardClaims.AUDIENCE.getClaimName())) { + if (token == XContentParser.Token.START_ARRAY) { + while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) { + XContentParserUtils. + ensureExpectedToken(XContentParser.Token.VALUE_STRING, parser.currentToken(), parser::getTokenLocation); + payloadMap.put(currentFieldName, parseFieldsValue(parser)); + } + } else { + XContentParserUtils. + ensureExpectedToken(XContentParser.Token.VALUE_STRING, parser.currentToken(), parser::getTokenLocation); + payloadMap.put(currentFieldName, Collections.singletonList(parseFieldsValue(parser))); + } + + } else if (Claims.StandardClaims.getKnownClaims().contains(currentFieldName)) { + if (parser.currentToken() == XContentParser.Token.VALUE_NULL) { + String message = "Failed to parse object: null value for field"; + throw new ParsingException(parser.getTokenLocation(), String.format(Locale.ROOT, message, currentFieldName)); + } else if (parser.currentToken() == XContentParser.Token.VALUE_NUMBER) { + Number number = (Number) parseFieldsValue(parser); + payloadMap.put(currentFieldName, number.longValue()); + } else { + payloadMap.put(currentFieldName, parseFieldsValue(parser)); + } + } else if (this.rpConfig.getAllowedScopes().contains(currentFieldName)) { + if (Strings.hasText(parser.text())) { + payloadMap.put(currentFieldName, parseFieldsValue(parser)); + } + } else { + parser.skipChildren(); + } + } + return payloadMap; + } + } +} diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/support/JsonWebTokenParserTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/support/JsonWebTokenParserTests.java new file mode 100644 index 0000000000000..d91fe1d663530 --- /dev/null +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/support/JsonWebTokenParserTests.java @@ -0,0 +1,116 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.security.authc.support; + +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.security.authc.oidc.RPConfiguration; +import org.elasticsearch.xpack.security.authc.support.jwt.JsonWebToken; +import org.elasticsearch.xpack.security.authc.support.jwt.JsonWebTokenParser; + +import java.io.IOException; +import java.util.Arrays; +import java.util.List; + +import static org.hamcrest.Matchers.equalTo; + +public class JsonWebTokenParserTests extends ESTestCase { + + public void testIdTokenParsing() throws IOException { + final String serializedJwt = "eyJhbGciOiJIUzI1NiIsImtpZCI6IjFlOWdkazcifQ.eyJpc3MiOiJodHRwOi8vc2VydmVyLmV4YW1wbGUuY29tIiwic3ViIjo" + + "iMjQ4Mjg5NzYxMDAxIiwiYXVkIjoiczZCaGRSa3F0MyIsIm5vbmNlIjoibi0wUzZfV3pBMk1qIiwiZXhwIjoxMzExMjgxOTcwLCJpYXQiOjEzMTEyODA5NzAsIm" + + "5hbWUiOiJKYW5lIERvZSIsImdpdmVuX25hbWUiOiJKYW5lIiwiZmFtaWx5X25hbWUiOiJEb2UiLCJnZW5kZXIiOiJmZW1hbGUiLCJiaXJ0aGRhdGUiOiIxOTk0L" + + "TEwLTMxIiwiZW1haWwiOiJqYW5lZG9lQGV4YW1wbGUuY29tIiwicGljdHVyZSI6Imh0dHA6Ly9leGFtcGxlLmNvbS9qYW5lZG9lL21lLmpwZyJ9.XY8hKQ6nx8K" + + "EfuB907SuImosemSt7qPlg3HAJH85JKI"; + + JsonWebTokenParser jwtParser = new JsonWebTokenParser(new RPConfiguration("clientId", "redirectUri", "code", null, null)); + JsonWebToken jwt = jwtParser.parseJwt(serializedJwt); + assertTrue(jwt.getPayload().containsKey("iss")); + assertThat(jwt.getPayload().get("iss"), equalTo("http://server.example.com")); + assertTrue(jwt.getPayload().containsKey("sub")); + assertThat(jwt.getPayload().get("sub"), equalTo("248289761001")); + assertTrue(jwt.getPayload().containsKey("aud")); + List aud = (List) jwt.getPayload().get("aud"); + assertThat(aud.size(), equalTo(1)); + assertTrue(aud.contains("s6BhdRkqt3")); + assertTrue(jwt.getPayload().containsKey("nonce")); + assertThat(jwt.getPayload().get("nonce"), equalTo("n-0S6_WzA2Mj")); + assertTrue(jwt.getPayload().containsKey("exp")); + assertThat(jwt.getPayload().get("exp"), equalTo(1311281970L)); + assertTrue(jwt.getPayload().containsKey("iat")); + assertThat(jwt.getPayload().get("iat"), equalTo(1311280970L)); + assertTrue(jwt.getPayload().containsKey("name")); + assertThat(jwt.getPayload().get("name"), equalTo("Jane Doe")); + assertTrue(jwt.getPayload().containsKey("given_name")); + assertThat(jwt.getPayload().get("given_name"), equalTo("Jane")); + assertTrue(jwt.getPayload().containsKey("family_name")); + assertThat(jwt.getPayload().get("family_name"), equalTo("Doe")); + assertTrue(jwt.getPayload().containsKey("gender")); + assertThat(jwt.getPayload().get("gender"), equalTo("female")); + assertTrue(jwt.getPayload().containsKey("birthdate")); + assertThat(jwt.getPayload().get("birthdate"), equalTo("1994-10-31")); + assertTrue(jwt.getPayload().containsKey("email")); + assertThat(jwt.getPayload().get("email"), equalTo("janedoe@example.com")); + assertTrue(jwt.getPayload().containsKey("picture")); + assertThat(jwt.getPayload().get("picture"), equalTo("http://example.com/janedoe/me.jpg")); + assertTrue(jwt.getHeader().containsKey("alg")); + assertThat(jwt.getHeader().get("alg"), equalTo("HS256")); + assertTrue(jwt.getHeader().containsKey("kid")); + assertThat(jwt.getHeader().get("kid"), equalTo("1e9gdk7")); + + } + + public void testIdTokenWithPrivateClaimsParsing() throws IOException { + final String serializedJwt = "eyJhbGciOiJIUzI1NiIsImtpZCI6IjFlOWdkazcifQ.eyJpc3MiOiJodHRwOi8vc2VydmVyLmV4YW1wbGUuY29tIiwic3ViIjo" + + "iMjQ4Mjg5NzYxMDAxIiwiYXVkIjoiczZCaGRSa3F0MyIsIm5vbmNlIjoibi0wUzZfV3pBMk1qIiwiZXhwIjoxMzExMjgxOTcwLCJpYXQiOjEzMTEyODA5NzAsIm" + + "5hbWUiOiJKYW5lIERvZSIsImdpdmVuX25hbWUiOiJKYW5lIiwiZmFtaWx5X25hbWUiOiJEb2UiLCJnZW5kZXIiOiJmZW1hbGUiLCJjbGFpbTEiOiJ2YWx1ZTEiL" + + "CJjbGFpbTIiOiJ2YWx1ZTIiLCJjbGFpbTMiOiJ2YWx1ZTMiLCJjbGFpbTQiOiJ2YWx1ZTQiLCJiaXJ0aGRhdGUiOiIxOTk0LTEwLTMxIiwiZW1haWwiOiJqYW5l" + + "ZG9lQGV4YW1wbGUuY29tIiwicGljdHVyZSI6Imh0dHA6Ly9leGFtcGxlLmNvbS9qYW5lZG9lL21lLmpwZyJ9.5zBKLOfp-mkALbE-Uvs7wI4mEbzLdOxOlMeoDy" + + "op8MM"; + JsonWebTokenParser jwtParser = new JsonWebTokenParser(new RPConfiguration("clientId", "redirectUri", "code", null, + Arrays.asList("claim1", "claim2", "claim3", "claim4"))); + JsonWebToken jwt = jwtParser.parseJwt(serializedJwt); + assertTrue(jwt.getPayload().containsKey("iss")); + assertThat(jwt.getPayload().get("iss"), equalTo("http://server.example.com")); + assertTrue(jwt.getPayload().containsKey("sub")); + assertThat(jwt.getPayload().get("sub"), equalTo("248289761001")); + assertTrue(jwt.getPayload().containsKey("aud")); + List aud = (List) jwt.getPayload().get("aud"); + assertThat(aud.size(), equalTo(1)); + assertTrue(aud.contains("s6BhdRkqt3")); + assertTrue(jwt.getPayload().containsKey("nonce")); + assertThat(jwt.getPayload().get("nonce"), equalTo("n-0S6_WzA2Mj")); + assertTrue(jwt.getPayload().containsKey("exp")); + assertThat(jwt.getPayload().get("exp"), equalTo(1311281970L)); + assertTrue(jwt.getPayload().containsKey("iat")); + assertThat(jwt.getPayload().get("iat"), equalTo(1311280970L)); + assertTrue(jwt.getPayload().containsKey("name")); + assertThat(jwt.getPayload().get("name"), equalTo("Jane Doe")); + assertTrue(jwt.getPayload().containsKey("given_name")); + assertThat(jwt.getPayload().get("given_name"), equalTo("Jane")); + assertTrue(jwt.getPayload().containsKey("family_name")); + assertThat(jwt.getPayload().get("family_name"), equalTo("Doe")); + assertTrue(jwt.getPayload().containsKey("gender")); + assertThat(jwt.getPayload().get("gender"), equalTo("female")); + assertTrue(jwt.getPayload().containsKey("birthdate")); + assertThat(jwt.getPayload().get("birthdate"), equalTo("1994-10-31")); + assertTrue(jwt.getPayload().containsKey("email")); + assertThat(jwt.getPayload().get("email"), equalTo("janedoe@example.com")); + assertTrue(jwt.getPayload().containsKey("picture")); + assertThat(jwt.getPayload().get("picture"), equalTo("http://example.com/janedoe/me.jpg")); + assertTrue(jwt.getPayload().containsKey("claim1")); + assertThat(jwt.getPayload().get("claim1"), equalTo("value1")); + assertTrue(jwt.getPayload().containsKey("claim2")); + assertThat(jwt.getPayload().get("claim2"), equalTo("value2")); + assertTrue(jwt.getPayload().containsKey("claim3")); + assertThat(jwt.getPayload().get("claim3"), equalTo("value3")); + assertTrue(jwt.getPayload().containsKey("claim4")); + assertThat(jwt.getPayload().get("claim4"), equalTo("value4")); + assertTrue(jwt.getHeader().containsKey("alg")); + assertThat(jwt.getHeader().get("alg"), equalTo("HS256")); + assertTrue(jwt.getHeader().containsKey("kid")); + assertThat(jwt.getHeader().get("kid"), equalTo("1e9gdk7")); + } +} diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/support/JsonWebTokenTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/support/JsonWebTokenTests.java new file mode 100644 index 0000000000000..62695f950db30 --- /dev/null +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/support/JsonWebTokenTests.java @@ -0,0 +1,32 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.security.authc.support; + +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.security.authc.support.jwt.JsonWebToken; + +import java.util.HashMap; +import java.util.Map; + +import static org.hamcrest.Matchers.equalTo; + +public class JsonWebTokenTests extends ESTestCase { + + public void testUnsignedJwtEncoding() { + Map header = new HashMap<>(); + Map payload = new HashMap<>(); + header.put("alg", "none"); + header.put("typ", "JWT"); + payload.put("sub", "ironman"); + payload.put("name", "Tony Stark"); + payload.put("iat", 1516239022L); + final JsonWebToken jwt = new JsonWebToken(header, payload); + // This is not an example of JWT validation. Order affects string representation + // and string representation equality is no means to validate a JWT + assertThat(jwt.encode(), equalTo("eyJ0eXAiOiJKV1QiLCJhbGciOiJub25lIn0" + + ".eyJzdWIiOiJpcm9ubWFuIiwibmFtZSI6IlRvbnkgU3RhcmsiLCJpYXQiOjE1MTYyMzkwMjJ9.")); + } +} From cd03bdf1c1f774edd00c888c40520917daf5f94a Mon Sep 17 00:00:00 2001 From: Ioannis Kakavas Date: Tue, 8 Jan 2019 01:52:36 +0200 Subject: [PATCH 13/71] Add support for signing and signature validation --- .../authc/oidc/OpenIdConnectRealm.java | 2 + .../security/authc/oidc/RPConfiguration.java | 4 +- .../security/authc/support/jwt/Claims.java | 79 ++++++----- .../support/jwt/EcSignatureValidator.java | 62 +++++++++ .../security/authc/support/jwt/EcSigner.java | 40 ++++++ .../support/jwt/HmacSignatureValidator.java | 69 ++++++++++ .../authc/support/jwt/HmacSigner.java | 43 ++++++ .../authc/support/jwt/JsonWebToken.java | 59 +++++++-- .../authc/support/jwt/JsonWebTokenParser.java | 123 +++++++++++++++--- ...java => JwsSignatureValidatorFactory.java} | 2 +- .../support/jwt/JwtSignatureValidator.java | 11 ++ .../security/authc/support/jwt/JwtSigner.java | 11 ++ .../support/jwt/RsaSignatureValidator.java | 62 +++++++++ .../security/authc/support/jwt/RsaSigner.java | 40 ++++++ .../authc/support/jwt/SignatureAlgorithm.java | 55 ++++++++ .../support/JsonWebTokenParserTests.java | 80 ++++++++++-- .../authc/support/JsonWebTokenTests.java | 3 +- 17 files changed, 668 insertions(+), 77 deletions(-) create mode 100644 x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/jwt/EcSignatureValidator.java create mode 100644 x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/jwt/EcSigner.java create mode 100644 x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/jwt/HmacSignatureValidator.java create mode 100644 x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/jwt/HmacSigner.java rename x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/jwt/{JsonWebTokenUtils.java => JwsSignatureValidatorFactory.java} (87%) create mode 100644 x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/jwt/JwtSignatureValidator.java create mode 100644 x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/jwt/JwtSigner.java create mode 100644 x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/jwt/RsaSignatureValidator.java create mode 100644 x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/jwt/RsaSigner.java create mode 100644 x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/jwt/SignatureAlgorithm.java diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/oidc/OpenIdConnectRealm.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/oidc/OpenIdConnectRealm.java index 3f35ee19cdc77..85f9d782aa707 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/oidc/OpenIdConnectRealm.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/oidc/OpenIdConnectRealm.java @@ -13,6 +13,7 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.hash.MessageDigests; import org.elasticsearch.common.lease.Releasable; +import org.elasticsearch.common.settings.SecureString; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.SettingsException; import org.elasticsearch.common.util.concurrent.ThreadContext; @@ -38,6 +39,7 @@ import static org.elasticsearch.xpack.core.security.authc.oidc.OpenIdConnectRealmSettings.OP_USERINFO_ENDPOINT; import static org.elasticsearch.xpack.core.security.authc.oidc.OpenIdConnectRealmSettings.RP_ALLOWED_SCOPES; import static org.elasticsearch.xpack.core.security.authc.oidc.OpenIdConnectRealmSettings.RP_CLIENT_ID; +import static org.elasticsearch.xpack.core.security.authc.oidc.OpenIdConnectRealmSettings.RP_CLIENT_SECRET; import static org.elasticsearch.xpack.core.security.authc.oidc.OpenIdConnectRealmSettings.RP_REDIRECT_URI; import static org.elasticsearch.xpack.core.security.authc.oidc.OpenIdConnectRealmSettings.RP_RESPONSE_TYPE; import static org.elasticsearch.xpack.core.security.authc.oidc.OpenIdConnectRealmSettings.RP_REQUESTED_SCOPES; diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/oidc/RPConfiguration.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/oidc/RPConfiguration.java index 9ce3b403864ed..6f121f3c094e3 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/oidc/RPConfiguration.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/oidc/RPConfiguration.java @@ -7,7 +7,6 @@ import org.elasticsearch.common.Nullable; import org.elasticsearch.common.Strings; -import org.opensaml.xmlsec.signature.P; import java.util.Collections; import java.util.List; @@ -23,7 +22,8 @@ public class RPConfiguration { private final List requestedScopes; private final List allowedScopes; - public RPConfiguration(String clientId, String redirectUri, String responseType, @Nullable List requestedScopes, + public RPConfiguration(String clientId, String redirectUri, String responseType, + @Nullable List requestedScopes, @Nullable List allowedScopes) { this.clientId = Objects.requireNonNull(clientId, "RP Client ID must be provided"); this.redirectUri = Objects.requireNonNull(redirectUri, "RP Redirect URI must be provided"); diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/jwt/Claims.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/jwt/Claims.java index 4f5db00c5f5d0..492b805bdd3dd 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/jwt/Claims.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/jwt/Claims.java @@ -31,50 +31,63 @@ public static List validHeaderClaims() { public enum StandardClaims { - ISSUER("iss"), - SUBJECT("sub"), - AUDIENCE("aud"), - EXPIRATION_TIME("exp"), - NOT_BEFORE("nbf"), - ISSUED_AT("iat"), - NONCE("nonce"), - AUTHN_CONTEXT_CLASS_REF("acr"), - AUTHN_METHODS_REF("amr"), - AUTHORIZED_PARTY("azp"), - AUTH_TIME("auth_time"), - JWTID("jti"), - NAME("name"), - GIVEN_NAME("given_name"), - MIDDLE_NAME("middle_name"), - FAMILY_NAME("family_name"), - NICKNAME("nickname"), - PREFERRED_USERNAME("preferred_username"), - PROFILE("profile"), - PICTURE("picture"), - WEBSITE("website"), - EMAIL("email"), - EMAIL_VERIFIED("email_verified"), - GENDER("gender"), - BIRTHDATE("birthdate"), - ZONEINFO("zoneinfo"), - LOCALE("locale"), - PHONE_NUMBER("phone_number"), - PHONE_NUMBER_VERIFIED("phone_number_verified"), - ADDRESS("address"), - UPDATED_AT("updated_at"); + ISSUER("iss", "string"), + SUBJECT("sub", "string"), + AUDIENCE("aud", "array"), + EXPIRATION_TIME("exp", "long"), + NOT_BEFORE("nbf", "long"), + ISSUED_AT("iat", "long"), + NONCE("nonce", "string"), + AUTHN_CONTEXT_CLASS_REF("acr", "array"), + AUTHN_METHODS_REF("amr", "string"), + AUTHORIZED_PARTY("azp", "string"), + AUTH_TIME("auth_time", "long"), + JWTID("jti", "string"), + NAME("name", "string"), + GIVEN_NAME("given_name", "string"), + MIDDLE_NAME("middle_name", "string"), + FAMILY_NAME("family_name", "string"), + NICKNAME("nickname", "string"), + PREFERRED_USERNAME("preferred_username", "string"), + PROFILE("profile", "string"), + PICTURE("picture", "string"), + WEBSITE("website", "string"), + EMAIL("email", "string"), + EMAIL_VERIFIED("email_verified", "boolean"), + GENDER("gender", "string"), + BIRTHDATE("birthdate", "string"), + ZONEINFO("zoneinfo", "string"), + LOCALE("locale", "string"), + PHONE_NUMBER("phone_number", "string"), + PHONE_NUMBER_VERIFIED("phone_number_verified", "boolean"), + ADDRESS("address", "object"), + UPDATED_AT("updated_at", "long"); private String name; + private String type; - StandardClaims(String name) { + StandardClaims(String name, String type) { this.name = name; + this.type = type; + } + + public String getType() { + return type; } public String getClaimName() { return name; } - public static List getKnownClaims() { + public static List getStandardClaims() { return Stream.of(StandardClaims.values()).map(StandardClaims::getClaimName).collect(Collectors.toList()); } + + public static List getClaimsOfType(String type) { + return Stream.of(StandardClaims.values()) + .filter(claim -> claim.getType().equals(type)) + .map(StandardClaims::getClaimName) + .collect(Collectors.toList()); + } } } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/jwt/EcSignatureValidator.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/jwt/EcSignatureValidator.java new file mode 100644 index 0000000000000..fd0a578d475b8 --- /dev/null +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/jwt/EcSignatureValidator.java @@ -0,0 +1,62 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.security.authc.support.jwt; + +import org.elasticsearch.ElasticsearchSecurityException; + +import java.security.Key; +import java.security.Signature; +import java.security.interfaces.ECPublicKey; +import java.util.Base64; + +/** + * Class offering necessary functionality for validating the signatures of JWTs that have been signed with the + * Elliptic Curve Digital Signature Algorithm (ECDSA) using a EC Private Key + */ +public class EcSignatureValidator implements JwtSignatureValidator { + private SignatureAlgorithm algorithm; + private Key key; + + public EcSignatureValidator(SignatureAlgorithm algorithm, Key key) { + if (key instanceof ECPublicKey == false) { + throw new IllegalArgumentException("ECDSA signatures can only be verified using an ECPublicKey " + + "but a [" + key.getClass().getName() + "] is provided"); + } + if (SignatureAlgorithm.getEcAlgorithms().contains(algorithm) == false) { + throw new IllegalArgumentException("Unsupported algorithm " + algorithm.name() + " for ECDSA signature"); + } + this.key = key; + this.algorithm = algorithm; + } + + /** + * Validates the signature of a signed JWT using the EC Public Key that corresponds to the EC Private Key + * with which it was signed + * + * @param data The serialized representation of the JWT payload + * @param signature The serialized representation of the JWT signature + */ + @Override + public void validateSignature(byte[] data, byte[] signature) { + if (null == data || data.length == 0) { + throw new IllegalArgumentException("JWT data must be provided"); + } + if (null == signature || signature.length == 0) { + throw new IllegalArgumentException("JWT signature must be provided"); + } + + try { + final byte[] signatureBytes = Base64.getUrlDecoder().decode(signature); + final Signature ecdsa = Signature.getInstance(algorithm.getJcaAlgoName()); + ecdsa.initVerify((ECPublicKey) key); + ecdsa.update(data); + ecdsa.verify(signatureBytes); + } catch (Exception e) { + throw new ElasticsearchSecurityException("Encountered error attempting to validate the JWT ECDSA Signature", e); + } + } + +} diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/jwt/EcSigner.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/jwt/EcSigner.java new file mode 100644 index 0000000000000..e06dc6001a411 --- /dev/null +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/jwt/EcSigner.java @@ -0,0 +1,40 @@ +package org.elasticsearch.xpack.security.authc.support.jwt; + +import org.elasticsearch.ElasticsearchSecurityException; + +import java.security.Key; +import java.security.Signature; +import java.security.interfaces.ECPrivateKey; + +public class EcSigner implements JwtSigner { + + private SignatureAlgorithm algorithm; + private Key key; + + public EcSigner(SignatureAlgorithm algorithm, Key key) { + if (key instanceof ECPrivateKey == false) { + throw new IllegalArgumentException("ECDSA signatures can only be created using a ECPrivateKey " + + "but a [" + key.getClass().getName() + "] is provided"); + } + if (SignatureAlgorithm.getEcAlgorithms().contains(algorithm) == false) { + throw new IllegalArgumentException("Unsupported algorithm " + algorithm.name() + " for ECDSA signature"); + } + this.algorithm = algorithm; + this.key = key; + } + + @Override + public byte[] sign(byte[] data) { + if (null == data || data.length == 0) { + throw new IllegalArgumentException("JWT data must be provided"); + } + try { + final Signature ecdsa = Signature.getInstance(algorithm.getJcaAlgoName()); + ecdsa.initSign((ECPrivateKey) key); + ecdsa.update(data); + return ecdsa.sign(); + } catch (Exception e) { + throw new ElasticsearchSecurityException("Encountered error attempting to create the JWT RSA Signature", e); + } + } +} diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/jwt/HmacSignatureValidator.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/jwt/HmacSignatureValidator.java new file mode 100644 index 0000000000000..c26f21c88474d --- /dev/null +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/jwt/HmacSignatureValidator.java @@ -0,0 +1,69 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.security.authc.support.jwt; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.elasticsearch.ElasticsearchSecurityException; + +import javax.crypto.Mac; +import javax.crypto.SecretKey; +import javax.crypto.spec.SecretKeySpec; +import java.security.Key; +import java.util.Arrays; + +/** + * Class offering necessary functionality for validating the signatures of JWTs that have been signed with a + * Hash-based Message Authentication Code using a secret key. + */ +public class HmacSignatureValidator implements JwtSignatureValidator { + protected final Logger logger = LogManager.getLogger(getClass()); + private Key key; + private SignatureAlgorithm algorithm; + + public HmacSignatureValidator(SignatureAlgorithm algorithm, Key key) { + if (key instanceof SecretKey == false) { + throw new IllegalArgumentException("HMAC signatures can only be verified using a SecretKey but a [" + key.getClass().getName() + "] is provided"); + } + if (SignatureAlgorithm.getHmacAlgorithms().contains(algorithm) == false) { + throw new IllegalArgumentException("Unsupported algorithm " + algorithm.name() + " for HMAC signature"); + } + this.key = key; + this.algorithm = algorithm; + + } + + /** + * Validates the signature of a signed JWT by generating the signature using the provided key and verifying that + * it matches the provided signature. + * + * @param data The serialized representation of the JWT payload + * @param expectedSignature The serialized representation of the JWT signature + * @return True if the newly calculated signature of the header and matches the one that was included in the JWT, false otherwise + */ + @Override + public void validateSignature(byte[] data, byte[] expectedSignature) { + if (null == data || data.length == 0) { + throw new IllegalArgumentException("JWT data must be provided"); + } + if (null == expectedSignature || expectedSignature.length == 0) { + throw new IllegalArgumentException("JWT signature must be provided"); + } + + try { + final SecretKeySpec keySpec = new SecretKeySpec(key.getEncoded(), algorithm.getJcaAlgoName()); + final Mac mac = Mac.getInstance(algorithm.getJcaAlgoName()); + mac.init(keySpec); + final byte[] calculatedSignature = mac.doFinal(data); + if (Arrays.equals(calculatedSignature, expectedSignature) == false) { + throw new ElasticsearchSecurityException("JWT HMAC Signature could not be validated. Calculated value was [{}] but the " + + "expected one was [{}]"); + } + } catch (Exception e) { + throw new ElasticsearchSecurityException("Encountered error attempting to validate the JWT HMAC Signature", e); + } + } +} diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/jwt/HmacSigner.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/jwt/HmacSigner.java new file mode 100644 index 0000000000000..9e4aa6331fb8e --- /dev/null +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/jwt/HmacSigner.java @@ -0,0 +1,43 @@ +package org.elasticsearch.xpack.security.authc.support.jwt; + +import org.elasticsearch.ElasticsearchSecurityException; + +import javax.crypto.Mac; +import javax.crypto.SecretKey; +import javax.crypto.spec.SecretKeySpec; +import java.security.Key; + +public class HmacSigner implements JwtSigner { + + private Key key; + private SignatureAlgorithm algorithm; + + public HmacSigner(SignatureAlgorithm algorithm, Key key) { + if (key instanceof SecretKey == false) { + throw new IllegalArgumentException("HMAC signatures can only be created using a SecretKey but a [" + key.getClass().getName() + + "] is provided"); + } + if (SignatureAlgorithm.getHmacAlgorithms().contains(algorithm) == false) { + throw new IllegalArgumentException("Unsupported algorithm " + algorithm.name() + " for HMAC signature"); + } + this.key = key; + this.algorithm = algorithm; + + } + + @Override + public byte[] sign(byte[] data) { + if (null == data || data.length == 0) { + throw new IllegalArgumentException("JWT data must be provided"); + } + + try { + final SecretKeySpec keySpec = new SecretKeySpec(key.getEncoded(), algorithm.getJcaAlgoName()); + final Mac mac = Mac.getInstance(algorithm.getJcaAlgoName()); + mac.init(keySpec); + return mac.doFinal(data); + } catch (Exception e) { + throw new ElasticsearchSecurityException("Encountered error attempting to create the JWT HMAC Signature", e); + } + } +} diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/jwt/JsonWebToken.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/jwt/JsonWebToken.java index eef54840482a6..63af2a6102286 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/jwt/JsonWebToken.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/jwt/JsonWebToken.java @@ -5,13 +5,13 @@ */ package org.elasticsearch.xpack.security.authc.support.jwt; -import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.xcontent.XContentBuilder; import java.io.IOException; import java.io.UncheckedIOException; import java.nio.charset.StandardCharsets; +import java.security.Key; import java.util.Base64; import java.util.Map; @@ -28,6 +28,7 @@ public class JsonWebToken { public JsonWebToken(Map header, Map payload) { this.header = header; this.payload = payload; + this.signature = ""; } public Map getHeader() { @@ -43,18 +44,25 @@ public Map getPayload() { * * @return The serialized JWT */ - public String encode() { - try { - // Base64 url encoding is defined in https://tools.ietf.org/html/rfc7515#appendix-C - String headerString = Base64.getUrlEncoder().withoutPadding().encodeToString(mapToJsonBytes(header)); - String payloadString = Base64.getUrlEncoder().withoutPadding().encodeToString(mapToJsonBytes(payload)); - String signatureString = Strings.hasText(signature) ? - Base64.getUrlEncoder().withoutPadding().encodeToString(signature.getBytes(StandardCharsets.UTF_8.name())) : - ""; - return headerString + "." + payloadString + "." + signatureString; - } catch (IOException e) { - throw new UncheckedIOException(e); + public String encode() throws IOException { + // Base64 url encoding is defined in https://tools.ietf.org/html/rfc7515#appendix-C + String headerString = Base64.getUrlEncoder().withoutPadding().encodeToString(mapToJsonBytes(header)); + String payloadString = Base64.getUrlEncoder().withoutPadding().encodeToString(mapToJsonBytes(payload)); + return headerString + "." + payloadString + "." + signature; + } + + public void sign(Key key) throws IOException { + SignatureAlgorithm algorithm = getAlgorithm(header); + JwtSigner signer = getSigner(algorithm, key); + if (null == signer) { + //TODO what kind of Exception? + throw new IllegalStateException("Wrong algorithm"); } + String headerString = Base64.getUrlEncoder().withoutPadding().encodeToString(mapToJsonBytes(header)); + String payloadString = Base64.getUrlEncoder().withoutPadding().encodeToString(mapToJsonBytes(payload)); + final byte[] data = (headerString + "." + payloadString).getBytes(StandardCharsets.UTF_8); + final byte[] signatureBytes = signer.sign(data); + signature = Base64.getUrlEncoder().withoutPadding().encodeToString(signatureBytes); } /** @@ -63,7 +71,7 @@ public String encode() { * @return a string representation of the JWT */ public String toString() { - return "{header=" + header + ", payload=" + payload + ", signature=" + signature + "}"; + return "{header=" + header + ", payload=" + payload + "}"; } private String mapToJsonString(Map map) throws IOException { @@ -87,4 +95,29 @@ private byte[] mapToJsonBytes(Map map) throws IOException { return BytesReference.toBytes(BytesReference.bytes(builder)); } } + + private JwtSigner getSigner(SignatureAlgorithm algorithm, Key key) { + if (SignatureAlgorithm.getHmacAlgorithms().contains(algorithm)) { + return new HmacSigner(algorithm, key); + } else if (SignatureAlgorithm.getRsaAlgorithms().contains(algorithm)) { + return new RsaSigner(algorithm, key); + } else if (SignatureAlgorithm.getEcAlgorithms().contains(algorithm)) { + return new EcSigner(algorithm, key); + } + return null; + } + + /** + * Returns the {@link SignatureAlgorithm} that corresponds to the value of the alg claim + * + * @param header The {@link Map} containing the parsed header claims + * @return the SignatureAlgorithm that corresponds to alg + */ + private SignatureAlgorithm getAlgorithm(Map header) { + if (header.containsKey("alg")) { + return SignatureAlgorithm.fromName((String) header.get("alg")); + } else { + return null; + } + } } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/jwt/JsonWebTokenParser.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/jwt/JsonWebTokenParser.java index 67dfc1cc08185..7406ac50b1349 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/jwt/JsonWebTokenParser.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/jwt/JsonWebTokenParser.java @@ -16,12 +16,10 @@ import java.io.IOException; import java.nio.charset.StandardCharsets; -import java.util.ArrayList; -import java.util.Arrays; +import java.security.Key; import java.util.Base64; import java.util.Collections; import java.util.HashMap; -import java.util.List; import java.util.Locale; import java.util.Map; @@ -37,25 +35,79 @@ public JsonWebTokenParser(RPConfiguration rpConfig) { this.rpConfig = rpConfig; } - public final JsonWebToken parseJwt(String jwt) throws IOException { - final JsonWebTokenBuilder builder = new JsonWebTokenBuilder(); + /** + * Parses the serialized format of an ID Token into a {@link JsonWebToken}. In doing so it + *
    + *
  • Validates that the format and structure of the ID Token is correct
  • + *
  • Validates that the ID Token is signed and that one of the supported algorithms is used
  • + *
  • Validates the signature using the appropriate
  • + *
+ * + * @param jwt Serialized string representation of the ID Token + * @param key The {@link Key} to be used for verifying the signature + * @return a {@link JsonWebToken} + * @throws IOException if the ID Token cannot be deserialized + */ + public final JsonWebToken parseAndValidateJwt(String jwt, Key key) throws IOException { final String[] jwtParts = jwt.split("\\."); if (jwtParts.length != 3) { throw new IllegalArgumentException("The provided token is not a valid JWT"); } - final String deserializedHeader = deserializePart(jwtParts[0]); - final String deserializedPayload = deserializePart(jwtParts[1]); - final byte[] deserializedSignature = Strings.hasText(jwtParts[2]) ? Base64.getUrlDecoder().decode(jwtParts[2]) : new byte[0]; + final String serializedHeader = jwtParts[0]; + final String serializedPayload = jwtParts[1]; + final String serializedSignature = jwtParts[2]; + final String deserializedHeader = deserializePart(serializedHeader); + final String deserializedPayload = deserializePart(serializedPayload); + final Map headerMap = parseHeader(deserializedHeader); + final SignatureAlgorithm algorithm = getAlgorithm(headerMap); + if (algorithm == null || algorithm.equals(SignatureAlgorithm.NONE.name())) { + //TODO what kind of Exception? + throw new IllegalStateException("JWT not signed or unrecognised algorithm"); + } + if (Strings.hasText(serializedSignature) == false) { + //TODO what kind of Exception? + throw new IllegalStateException("Unsigned JWT"); + } + JwtSignatureValidator validator = getValidator(algorithm, key); + if (null == validator) { + //TODO what kind of Exception? + throw new IllegalStateException("Wrong algorithm"); + } + final byte[] signatureBytes = serializedSignature.getBytes(StandardCharsets.US_ASCII); + final byte[] data = (serializedHeader + "." + serializedPayload).getBytes(StandardCharsets.UTF_8); + validator.validateSignature(data, signatureBytes); final Map payloadMap = parsePayload(deserializedPayload); return new JsonWebToken(headerMap, payloadMap); } - private static final String deserializePart(String encodedString) throws IOException { + /** + * Returns the {@link SignatureAlgorithm} that corresponds to the value of the alg claim + * + * @param header The {@link Map} containing the parsed header claims + * @return the SignatureAlgorithm that corresponds to alg + */ + private SignatureAlgorithm getAlgorithm(Map header) { + if (header.containsKey("alg")) { + return SignatureAlgorithm.fromName((String) header.get("alg")); + } else { + return null; + } + } + + private static String deserializePart(String encodedString) throws IOException { return new String(Base64.getUrlDecoder().decode(encodedString), StandardCharsets.UTF_8.name()); } - private final Map parseHeader(String headerJson) throws IOException { + /** + * Parses a JSON string representing the header of an ID Token into a {@link Map} where the key is the claim name and + * the value is the claim value. + * + * @param headerJson a JSON string representing the payload of a JWT + * @return a {@link Map} containing the parsed claims + * @throws IOException if the JSON string is malformed and cannot be parsed + */ + private Map parseHeader(String headerJson) throws IOException { try (XContentParser parser = JsonXContent.jsonXContent.createParser(NamedXContentRegistry.EMPTY, DeprecationHandler.THROW_UNSUPPORTED_OPERATION, headerJson)) { final Map headerMap = new HashMap<>(); @@ -78,7 +130,18 @@ private final Map parseHeader(String headerJson) throws IOExcept } } - private final Map parsePayload(String payloadJson) throws IOException { + /** + * Parses a JSON string representing the payload of an ID Token into a {@link Map} where the key is the claim name and + * the value is the claim value. It parses only claims that are either in the set of Standard Claims that the + * specification defines or explicitly defined by the + * user in the realm settings. For the Standard Claims, the claim is also syntactically checked to conform to the expected types + * (string, number, boolean, object). + * + * @param payloadJson a JSON string representing the payload of a JWT + * @return a {@link Map} containing the parsed claims + * @throws IOException if the JSON string is malformed and cannot be parsed + */ + private Map parsePayload(String payloadJson) throws IOException { try (XContentParser parser = JsonXContent.jsonXContent.createParser(NamedXContentRegistry.EMPTY, DeprecationHandler.THROW_UNSUPPORTED_OPERATION, payloadJson)) { final Map payloadMap = new HashMap<>(); @@ -88,26 +151,37 @@ private final Map parsePayload(String payloadJson) throws IOExce while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { if (token == XContentParser.Token.FIELD_NAME) { currentFieldName = parser.currentName(); - } else if (currentFieldName.equals(Claims.StandardClaims.AUDIENCE.getClaimName())) { + } else if (Claims.StandardClaims.AUDIENCE.getClaimName().equals(currentFieldName)) { if (token == XContentParser.Token.START_ARRAY) { - while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) { - XContentParserUtils. - ensureExpectedToken(XContentParser.Token.VALUE_STRING, parser.currentToken(), parser::getTokenLocation); - payloadMap.put(currentFieldName, parseFieldsValue(parser)); - } + payloadMap.put(currentFieldName, parser.list()); } else { XContentParserUtils. ensureExpectedToken(XContentParser.Token.VALUE_STRING, parser.currentToken(), parser::getTokenLocation); payloadMap.put(currentFieldName, Collections.singletonList(parseFieldsValue(parser))); } - } else if (Claims.StandardClaims.getKnownClaims().contains(currentFieldName)) { + } else if (Claims.StandardClaims.ADDRESS.getClaimName().equals(currentFieldName)) { + XContentParserUtils. + ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.currentToken(), parser::getTokenLocation); + payloadMap.put(currentFieldName, parser.mapStrings()); + + } else if (Claims.StandardClaims.getStandardClaims().contains(currentFieldName)) { if (parser.currentToken() == XContentParser.Token.VALUE_NULL) { String message = "Failed to parse object: null value for field"; throw new ParsingException(parser.getTokenLocation(), String.format(Locale.ROOT, message, currentFieldName)); - } else if (parser.currentToken() == XContentParser.Token.VALUE_NUMBER) { + } else if (Claims.StandardClaims.getClaimsOfType("string").contains(currentFieldName)) { + XContentParserUtils. + ensureExpectedToken(XContentParser.Token.VALUE_STRING, parser.currentToken(), parser::getTokenLocation); + payloadMap.put(currentFieldName, parser.text()); + } else if (Claims.StandardClaims.getClaimsOfType("long").contains(currentFieldName)) { + XContentParserUtils. + ensureExpectedToken(XContentParser.Token.VALUE_NUMBER, parser.currentToken(), parser::getTokenLocation); Number number = (Number) parseFieldsValue(parser); payloadMap.put(currentFieldName, number.longValue()); + } else if (Claims.StandardClaims.getClaimsOfType("boolean").contains(currentFieldName)) { + XContentParserUtils. + ensureExpectedToken(XContentParser.Token.VALUE_BOOLEAN, parser.currentToken(), parser::getTokenLocation); + payloadMap.put(currentFieldName, parser.booleanValue()); } else { payloadMap.put(currentFieldName, parseFieldsValue(parser)); } @@ -122,4 +196,15 @@ private final Map parsePayload(String payloadJson) throws IOExce return payloadMap; } } + + private JwtSignatureValidator getValidator(SignatureAlgorithm algorithm, Key key) { + if (SignatureAlgorithm.getHmacAlgorithms().contains(algorithm)) { + return new HmacSignatureValidator(algorithm, key); + } else if (SignatureAlgorithm.getRsaAlgorithms().contains(algorithm)) { + return new RsaSignatureValidator(algorithm, key); + } else if (SignatureAlgorithm.getEcAlgorithms().contains(algorithm)) { + return new EcSignatureValidator(algorithm, key); + } + return null; + } } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/jwt/JsonWebTokenUtils.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/jwt/JwsSignatureValidatorFactory.java similarity index 87% rename from x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/jwt/JsonWebTokenUtils.java rename to x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/jwt/JwsSignatureValidatorFactory.java index ce9b43ca23d8f..e4c44f1a122f0 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/jwt/JsonWebTokenUtils.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/jwt/JwsSignatureValidatorFactory.java @@ -5,5 +5,5 @@ */ package org.elasticsearch.xpack.security.authc.support.jwt; -public class JsonWebTokenUtils { +public class JwsSignatureValidatorFactory { } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/jwt/JwtSignatureValidator.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/jwt/JwtSignatureValidator.java new file mode 100644 index 0000000000000..ffef1aff4d9b7 --- /dev/null +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/jwt/JwtSignatureValidator.java @@ -0,0 +1,11 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.security.authc.support.jwt; + +public interface JwtSignatureValidator { + + void validateSignature(byte[] data, byte[] signature); +} diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/jwt/JwtSigner.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/jwt/JwtSigner.java new file mode 100644 index 0000000000000..d45598b9ad537 --- /dev/null +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/jwt/JwtSigner.java @@ -0,0 +1,11 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.security.authc.support.jwt; + +public interface JwtSigner { + + public byte[] sign(byte[] data); +} diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/jwt/RsaSignatureValidator.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/jwt/RsaSignatureValidator.java new file mode 100644 index 0000000000000..e0232d2560400 --- /dev/null +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/jwt/RsaSignatureValidator.java @@ -0,0 +1,62 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.security.authc.support.jwt; + +import org.elasticsearch.ElasticsearchSecurityException; + +import java.security.Key; +import java.security.PublicKey; +import java.security.Signature; +import java.util.Base64; + +/** + * Class offering necessary functionality for validating the signatures of JWTs that have been signed with + * RSASSA-PKCS1-v1_5 (PKCS#1) using an RSA Private Key + */ +public class RsaSignatureValidator implements JwtSignatureValidator { + private SignatureAlgorithm algorithm; + private Key key; + + public RsaSignatureValidator(SignatureAlgorithm algorithm, Key key) { + if (key instanceof PublicKey == false) { + throw new IllegalArgumentException("RSA signatures can only be verified using a PublicKey " + + "but a [" + key.getClass().getName() + "] is provided"); + } + if (SignatureAlgorithm.getRsaAlgorithms().contains(algorithm) == false) { + throw new IllegalArgumentException("Unsupported algorithm " + algorithm.name() + " for RSA signature"); + } + this.key = key; + this.algorithm = algorithm; + } + + /** + * Validates the signature of a signed JWT using the Public Key that corresponds to the Private Key + * with which it was signed + * + * @param data The serialized representation of the JWT payload + * @param signature The serialized representation of the JWT signature + */ + @Override + public void validateSignature(byte[] data, byte[] signature) { + if (null == data || data.length == 0) { + throw new IllegalArgumentException("JWT data must be provided"); + } + if (null == signature || signature.length == 0) { + throw new IllegalArgumentException("JWT signature must be provided"); + } + + try { + final byte[] signatureBytes = Base64.getUrlDecoder().decode(signature); + final Signature rsa = Signature.getInstance(algorithm.getJcaAlgoName()); + rsa.initVerify((PublicKey) key); + rsa.update(data); + rsa.verify(signatureBytes); + } catch (Exception e) { + throw new ElasticsearchSecurityException("Encountered error attempting to validate the JWT RSA Signature", e); + } + + } +} diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/jwt/RsaSigner.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/jwt/RsaSigner.java new file mode 100644 index 0000000000000..4590c153cca28 --- /dev/null +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/jwt/RsaSigner.java @@ -0,0 +1,40 @@ +package org.elasticsearch.xpack.security.authc.support.jwt; + +import org.elasticsearch.ElasticsearchSecurityException; + +import java.security.Key; +import java.security.PrivateKey; +import java.security.Signature; + +public class RsaSigner implements JwtSigner { + + private SignatureAlgorithm algorithm; + private Key key; + + public RsaSigner(SignatureAlgorithm algorithm, Key key) { + if (key instanceof PrivateKey == false) { + throw new IllegalArgumentException("RSA signatures can only be created using a PrivateKey " + + "but a [" + key.getClass().getName() + "] is provided"); + } + if (SignatureAlgorithm.getRsaAlgorithms().contains(algorithm) == false) { + throw new IllegalArgumentException("Unsupported algorithm " + algorithm.name() + " for RSA signature"); + } + this.algorithm = algorithm; + this.key = key; + } + + @Override + public byte[] sign(byte[] data) { + if (null == data || data.length == 0) { + throw new IllegalArgumentException("JWT data must be provided"); + } + try { + final Signature rsa = Signature.getInstance(algorithm.getJcaAlgoName()); + rsa.initSign((PrivateKey) key); + rsa.update(data); + return rsa.sign(); + } catch (Exception e) { + throw new ElasticsearchSecurityException("Encountered error attempting to create the JWT RSA Signature", e); + } + } +} diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/jwt/SignatureAlgorithm.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/jwt/SignatureAlgorithm.java new file mode 100644 index 0000000000000..ebfef8d9e41f9 --- /dev/null +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/jwt/SignatureAlgorithm.java @@ -0,0 +1,55 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.security.authc.support.jwt; + +import java.util.Arrays; +import java.util.List; +import java.util.stream.Stream; + +/** + * Representation of signature algorithms for JWT as described in RFC7518 + */ +public enum SignatureAlgorithm { + + NONE("NONE", "NONE"), + HS256("HS256", "HmacSHA256"), + HS384("HS384", "HmacSHA384"), + HS512("HS512", "HmacSHA512"), + RS256("RS256", "SHA256WithRSA"), + RS384("RS384", "SHA384WithRSA"), + RS512("RS512", "SHA512WithRSA"), + ES256("ES256", "SHA256WithECDSA"), + ES384("ES384", "SHA384WithECDSA"), + ES512("ES512", "SHA512WithECDSA"); + + private String name; + private String jcaAlgoName; + + SignatureAlgorithm(String name, String jcaAlgoName) { + this.name = name; + this.jcaAlgoName = jcaAlgoName; + } + + public static SignatureAlgorithm fromName(String alg) { + return Stream.of(SignatureAlgorithm.values()).filter(n -> n.name().equals(alg)).findFirst().orElse(null); + } + + public String getJcaAlgoName() { + return jcaAlgoName; + } + + public static List getHmacAlgorithms() { + return Arrays.asList(HS256, HS384, HS512); + } + + public static List getRsaAlgorithms() { + return Arrays.asList(RS256, RS384, RS512); + } + + public static List getEcAlgorithms() { + return Arrays.asList(ES256, ES384, ES512); + } +} diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/support/JsonWebTokenParserTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/support/JsonWebTokenParserTests.java index d91fe1d663530..7637f359c8ed3 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/support/JsonWebTokenParserTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/support/JsonWebTokenParserTests.java @@ -10,9 +10,12 @@ import org.elasticsearch.xpack.security.authc.support.jwt.JsonWebToken; import org.elasticsearch.xpack.security.authc.support.jwt.JsonWebTokenParser; +import javax.crypto.spec.SecretKeySpec; import java.io.IOException; import java.util.Arrays; +import java.util.HashMap; import java.util.List; +import java.util.Map; import static org.hamcrest.Matchers.equalTo; @@ -26,7 +29,8 @@ public void testIdTokenParsing() throws IOException { "EfuB907SuImosemSt7qPlg3HAJH85JKI"; JsonWebTokenParser jwtParser = new JsonWebTokenParser(new RPConfiguration("clientId", "redirectUri", "code", null, null)); - JsonWebToken jwt = jwtParser.parseJwt(serializedJwt); + final SecretKeySpec keySpec = new SecretKeySpec("ffff".getBytes(), "HmacSHA256"); + JsonWebToken jwt = jwtParser.parseAndValidateJwt(serializedJwt, keySpec); assertTrue(jwt.getPayload().containsKey("iss")); assertThat(jwt.getPayload().get("iss"), equalTo("http://server.example.com")); assertTrue(jwt.getPayload().containsKey("sub")); @@ -63,15 +67,16 @@ public void testIdTokenParsing() throws IOException { } public void testIdTokenWithPrivateClaimsParsing() throws IOException { - final String serializedJwt = "eyJhbGciOiJIUzI1NiIsImtpZCI6IjFlOWdkazcifQ.eyJpc3MiOiJodHRwOi8vc2VydmVyLmV4YW1wbGUuY29tIiwic3ViIjo" + - "iMjQ4Mjg5NzYxMDAxIiwiYXVkIjoiczZCaGRSa3F0MyIsIm5vbmNlIjoibi0wUzZfV3pBMk1qIiwiZXhwIjoxMzExMjgxOTcwLCJpYXQiOjEzMTEyODA5NzAsIm" + - "5hbWUiOiJKYW5lIERvZSIsImdpdmVuX25hbWUiOiJKYW5lIiwiZmFtaWx5X25hbWUiOiJEb2UiLCJnZW5kZXIiOiJmZW1hbGUiLCJjbGFpbTEiOiJ2YWx1ZTEiL" + - "CJjbGFpbTIiOiJ2YWx1ZTIiLCJjbGFpbTMiOiJ2YWx1ZTMiLCJjbGFpbTQiOiJ2YWx1ZTQiLCJiaXJ0aGRhdGUiOiIxOTk0LTEwLTMxIiwiZW1haWwiOiJqYW5l" + - "ZG9lQGV4YW1wbGUuY29tIiwicGljdHVyZSI6Imh0dHA6Ly9leGFtcGxlLmNvbS9qYW5lZG9lL21lLmpwZyJ9.5zBKLOfp-mkALbE-Uvs7wI4mEbzLdOxOlMeoDy" + - "op8MM"; + final String serializedJwt = "eyJhbGciOiJIUzI1NiIsImtpZCI6IjFlOWdkazcifQ.eyJpc3MiOiJodHRwOi8vc2VydmVyLmV4YW1wbGUuY29tIiwic3ViI" + + "joiMjQ4Mjg5NzYxMDAxIiwiYXVkIjoiczZCaGRSa3F0MyIsIm5vbmNlIjoibi0wUzZfV3pBMk1qIiwiZXhwIjoxMzExMjgxOTcwLCJpYXQiOjEzMTEyODA5Nz" + + "AsIm5hbWUiOiJKYW5lIERvZSIsImdpdmVuX25hbWUiOiJKYW5lIiwiZmFtaWx5X25hbWUiOiJEb2UiLCJnZW5kZXIiOiJmZW1hbGUiLCJjbGFpbTEiOiJ2YWx" + + "1ZTEiLCJjbGFpbTIiOiJ2YWx1ZTIiLCJjbGFpbTMiOiJ2YWx1ZTMiLCJjbGFpbTQiOiJ2YWx1ZTQiLCJiaXJ0aGRhdGUiOiIxOTk0LTEwLTMxIiwiZW1haWwi" + + "OiJqYW5lZG9lQGV4YW1wbGUuY29tIiwicGljdHVyZSI6Imh0dHA6Ly9leGFtcGxlLmNvbS9qYW5lZG9lL21lLmpwZyIsImFkZHJlc3MiOnsiY291bnRyeSI6I" + + "kdyZWVjZSIsInJlZ2lvbiI6IkV2aWEifX0.K9nnZaiuF0z8wJUrJQSJSMKQtql3O6xMPYxyEOa7uC4"; JsonWebTokenParser jwtParser = new JsonWebTokenParser(new RPConfiguration("clientId", "redirectUri", "code", null, Arrays.asList("claim1", "claim2", "claim3", "claim4"))); - JsonWebToken jwt = jwtParser.parseJwt(serializedJwt); + final SecretKeySpec keySpec = new SecretKeySpec("ffff".getBytes(), "HmacSHA256"); + JsonWebToken jwt = jwtParser.parseAndValidateJwt(serializedJwt, keySpec); assertTrue(jwt.getPayload().containsKey("iss")); assertThat(jwt.getPayload().get("iss"), equalTo("http://server.example.com")); assertTrue(jwt.getPayload().containsKey("sub")); @@ -108,6 +113,65 @@ public void testIdTokenWithPrivateClaimsParsing() throws IOException { assertThat(jwt.getPayload().get("claim3"), equalTo("value3")); assertTrue(jwt.getPayload().containsKey("claim4")); assertThat(jwt.getPayload().get("claim4"), equalTo("value4")); + assertTrue(jwt.getPayload().containsKey("address")); + Map expectedAddress = new HashMap<>(); + expectedAddress.put("country", "Greece"); + expectedAddress.put("region", "Evia"); + assertThat(jwt.getPayload().get("address"), equalTo(expectedAddress)); + assertTrue(jwt.getHeader().containsKey("alg")); + assertThat(jwt.getHeader().get("alg"), equalTo("HS256")); + assertTrue(jwt.getHeader().containsKey("kid")); + assertThat(jwt.getHeader().get("kid"), equalTo("1e9gdk7")); + } + + public void testIdTokenWithMutipleAudiencesParsing() throws IOException { + final String serializedJwt = "eyJhbGciOiJIUzI1NiIsImtpZCI6IjFlOWdkazcifQ.eyJpc3MiOiJodHRwOi8vc2VydmVyLmV4YW1wbGUuY29tIiwic3ViI" + + "joiMjQ4Mjg5NzYxMDAxIiwiYXVkIjpbInM2QmhkUmtxdDMiLCJvdGhlcl9hdWRpZW5jZSJdLCJub25jZSI6Im4tMFM2X1d6QTJNaiIsImV4cCI6MTMxMTI4MT" + + "k3MCwiaWF0IjoxMzExMjgwOTcwLCJuYW1lIjoiSmFuZSBEb2UiLCJnaXZlbl9uYW1lIjoiSmFuZSIsImZhbWlseV9uYW1lIjoiRG9lIiwiZ2VuZGVyIjoiZmV" + + "tYWxlIiwiY2xhaW0xIjoidmFsdWUxIiwiY2xhaW0yIjoidmFsdWUyIiwiY2xhaW0zIjoidmFsdWUzIiwiY2xhaW00IjoidmFsdWU0IiwiYmlydGhkYXRlIjoi" + + "MTk5NC0xMC0zMSIsImVtYWlsIjoiamFuZWRvZUBleGFtcGxlLmNvbSIsInBpY3R1cmUiOiJodHRwOi8vZXhhbXBsZS5jb20vamFuZWRvZS9tZS5qcGcifQ.xn" + + "HQXmN17lnkkBM-DX3kFRfr7Edk1OYoAPpCwCFOsvA"; + JsonWebTokenParser jwtParser = new JsonWebTokenParser(new RPConfiguration("clientId", "redirectUri", "code", null, + Arrays.asList("claim1", "claim2", "claim3", "claim4"))); + final SecretKeySpec keySpec = new SecretKeySpec("ffff".getBytes(), "HmacSHA256"); + JsonWebToken jwt = jwtParser.parseAndValidateJwt(serializedJwt, keySpec); + assertTrue(jwt.getPayload().containsKey("iss")); + assertThat(jwt.getPayload().get("iss"), equalTo("http://server.example.com")); + assertTrue(jwt.getPayload().containsKey("sub")); + assertThat(jwt.getPayload().get("sub"), equalTo("248289761001")); + assertTrue(jwt.getPayload().containsKey("aud")); + List aud = (List) jwt.getPayload().get("aud"); + assertThat(aud.size(), equalTo(2)); + assertTrue(aud.contains("s6BhdRkqt3")); + assertTrue(aud.contains("other_audience")); + assertTrue(jwt.getPayload().containsKey("nonce")); + assertThat(jwt.getPayload().get("nonce"), equalTo("n-0S6_WzA2Mj")); + assertTrue(jwt.getPayload().containsKey("exp")); + assertThat(jwt.getPayload().get("exp"), equalTo(1311281970L)); + assertTrue(jwt.getPayload().containsKey("iat")); + assertThat(jwt.getPayload().get("iat"), equalTo(1311280970L)); + assertTrue(jwt.getPayload().containsKey("name")); + assertThat(jwt.getPayload().get("name"), equalTo("Jane Doe")); + assertTrue(jwt.getPayload().containsKey("given_name")); + assertThat(jwt.getPayload().get("given_name"), equalTo("Jane")); + assertTrue(jwt.getPayload().containsKey("family_name")); + assertThat(jwt.getPayload().get("family_name"), equalTo("Doe")); + assertTrue(jwt.getPayload().containsKey("gender")); + assertThat(jwt.getPayload().get("gender"), equalTo("female")); + assertTrue(jwt.getPayload().containsKey("birthdate")); + assertThat(jwt.getPayload().get("birthdate"), equalTo("1994-10-31")); + assertTrue(jwt.getPayload().containsKey("email")); + assertThat(jwt.getPayload().get("email"), equalTo("janedoe@example.com")); + assertTrue(jwt.getPayload().containsKey("picture")); + assertThat(jwt.getPayload().get("picture"), equalTo("http://example.com/janedoe/me.jpg")); + assertTrue(jwt.getPayload().containsKey("claim1")); + assertThat(jwt.getPayload().get("claim1"), equalTo("value1")); + assertTrue(jwt.getPayload().containsKey("claim2")); + assertThat(jwt.getPayload().get("claim2"), equalTo("value2")); + assertTrue(jwt.getPayload().containsKey("claim3")); + assertThat(jwt.getPayload().get("claim3"), equalTo("value3")); + assertTrue(jwt.getPayload().containsKey("claim4")); + assertThat(jwt.getPayload().get("claim4"), equalTo("value4")); assertTrue(jwt.getHeader().containsKey("alg")); assertThat(jwt.getHeader().get("alg"), equalTo("HS256")); assertTrue(jwt.getHeader().containsKey("kid")); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/support/JsonWebTokenTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/support/JsonWebTokenTests.java index 62695f950db30..26395d63d9161 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/support/JsonWebTokenTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/support/JsonWebTokenTests.java @@ -8,6 +8,7 @@ import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.security.authc.support.jwt.JsonWebToken; +import java.io.IOException; import java.util.HashMap; import java.util.Map; @@ -15,7 +16,7 @@ public class JsonWebTokenTests extends ESTestCase { - public void testUnsignedJwtEncoding() { + public void testUnsignedJwtEncoding() throws IOException { Map header = new HashMap<>(); Map payload = new HashMap<>(); header.put("alg", "none"); From d70446f8b5d3983393097895a464ae63cbe23970 Mon Sep 17 00:00:00 2001 From: Ioannis Kakavas Date: Wed, 9 Jan 2019 16:25:32 +0200 Subject: [PATCH 14/71] Complement JWS implementation and tests - Added support to PemUtils for reading public keys from files - Added support for converting signatures from DER to JWS and vice versa - Added tests --- .../oidc/OpenIdConnectRealmSettings.java | 7 +- .../xpack/core/ssl/PemUtils.java | 105 ++++- .../xpack/core/ssl/PemUtilsTests.java | 15 + .../ssl/certs/simple/ec_public_key.pem | 4 + .../ssl/certs/simple/testnode-public.pem | 9 + .../authc/oidc/OpenIdConnectRealm.java | 13 +- .../security/authc/oidc/RPConfiguration.java | 11 + .../security/authc/support/jwt/Claims.java | 1 - .../support/jwt/EcSignatureValidator.java | 99 ++++- .../security/authc/support/jwt/EcSigner.java | 88 ++++- .../support/jwt/HmacSignatureValidator.java | 16 +- .../authc/support/jwt/HmacSigner.java | 16 +- ...WebTokenParser.java => IdTokenParser.java} | 89 +++-- .../authc/support/jwt/JsonWebToken.java | 57 ++- .../jwt/JwsSignatureValidatorFactory.java | 9 - .../security/authc/support/jwt/JwtSigner.java | 4 +- .../support/jwt/RsaSignatureValidator.java | 3 +- .../security/authc/support/jwt/RsaSigner.java | 12 +- .../authc/support/jwt/SignatureAlgorithm.java | 5 + .../support/JsonWebTokenParserTests.java | 180 --------- .../authc/support/jwt/IdTokenParserTests.java | 363 ++++++++++++++++++ .../support/{ => jwt}/JsonWebTokenTests.java | 2 +- .../jwt/JwtSignatureValidatorTests.java | 67 ++++ .../authc/support/jwt/JwtSignerTests.java | 115 ++++++ .../authc/oidc/ec_private_key_256.pem | 5 + .../authc/oidc/ec_private_key_384.pem | 6 + .../authc/oidc/ec_private_key_512.pem | 7 + .../security/authc/oidc/ec_public_key_256.pem | 4 + .../security/authc/oidc/ec_public_key_384.pem | 5 + .../security/authc/oidc/ec_public_key_512.pem | 6 + .../security/authc/oidc/rsa_private_key.pem | 52 +++ .../security/authc/oidc/rsa_public_key.pem | 14 + 32 files changed, 1114 insertions(+), 275 deletions(-) create mode 100644 x-pack/plugin/core/src/test/resources/org/elasticsearch/xpack/security/transport/ssl/certs/simple/ec_public_key.pem create mode 100644 x-pack/plugin/core/src/test/resources/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode-public.pem rename x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/jwt/{JsonWebTokenParser.java => IdTokenParser.java} (72%) delete mode 100644 x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/jwt/JwsSignatureValidatorFactory.java delete mode 100644 x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/support/JsonWebTokenParserTests.java create mode 100644 x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/support/jwt/IdTokenParserTests.java rename x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/support/{ => jwt}/JsonWebTokenTests.java (95%) create mode 100644 x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/support/jwt/JwtSignatureValidatorTests.java create mode 100644 x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/support/jwt/JwtSignerTests.java create mode 100644 x-pack/plugin/security/src/test/resources/org/elasticsearch/xpack/security/authc/oidc/ec_private_key_256.pem create mode 100644 x-pack/plugin/security/src/test/resources/org/elasticsearch/xpack/security/authc/oidc/ec_private_key_384.pem create mode 100644 x-pack/plugin/security/src/test/resources/org/elasticsearch/xpack/security/authc/oidc/ec_private_key_512.pem create mode 100644 x-pack/plugin/security/src/test/resources/org/elasticsearch/xpack/security/authc/oidc/ec_public_key_256.pem create mode 100644 x-pack/plugin/security/src/test/resources/org/elasticsearch/xpack/security/authc/oidc/ec_public_key_384.pem create mode 100644 x-pack/plugin/security/src/test/resources/org/elasticsearch/xpack/security/authc/oidc/ec_public_key_512.pem create mode 100644 x-pack/plugin/security/src/test/resources/org/elasticsearch/xpack/security/authc/oidc/rsa_private_key.pem create mode 100644 x-pack/plugin/security/src/test/resources/org/elasticsearch/xpack/security/authc/oidc/rsa_public_key.pem diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/oidc/OpenIdConnectRealmSettings.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/oidc/OpenIdConnectRealmSettings.java index 36ec5062c3f03..41297702daaa0 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/oidc/OpenIdConnectRealmSettings.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/oidc/OpenIdConnectRealmSettings.java @@ -46,13 +46,16 @@ private OpenIdConnectRealmSettings() { RealmSettings.realmSettingPrefix(TYPE), "rp.requested_scopes", key -> Setting.listSetting(key, Collections.singletonList("openid"), Function.identity(), Setting.Property.NodeScope)); public static final Setting.AffixSetting> RP_ALLOWED_SCOPES = Setting.affixKeySetting( - RealmSettings.realmSettingPrefix(TYPE), "rp.requested_scopes", + RealmSettings.realmSettingPrefix(TYPE), "rp.allowed_scopes", + key -> Setting.listSetting(key, Collections.emptyList(), Function.identity(), Setting.Property.NodeScope)); + public static final Setting.AffixSetting> RP_ALLOWED_SIGNATURE_ALGORITHMS = Setting.affixKeySetting( + RealmSettings.realmSettingPrefix(TYPE), "rp.allowed_signature_algorithms", key -> Setting.listSetting(key, Collections.emptyList(), Function.identity(), Setting.Property.NodeScope)); public static Set> getSettings() { final Set> set = Sets.newHashSet( OP_NAME, RP_CLIENT_ID, RP_REDIRECT_URI, RP_RESPONSE_TYPE, RP_REQUESTED_SCOPES, RP_ALLOWED_SCOPES, RP_CLIENT_SECRET, - OP_AUTHORIZATION_ENDPOINT, OP_TOKEN_ENDPOINT, OP_USERINFO_ENDPOINT, OP_ISSUER); + RP_ALLOWED_SIGNATURE_ALGORITHMS, OP_AUTHORIZATION_ENDPOINT, OP_TOKEN_ENDPOINT, OP_USERINFO_ENDPOINT, OP_ISSUER); set.addAll(DelegatedAuthorizationSettings.getSettings(TYPE)); set.addAll(RealmSettings.getStandardSettings(TYPE)); return set; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ssl/PemUtils.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ssl/PemUtils.java index 421b30baac7b6..3b1bf7a6bb4ab 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ssl/PemUtils.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ssl/PemUtils.java @@ -20,6 +20,7 @@ import java.security.KeyPairGenerator; import java.security.MessageDigest; import java.security.PrivateKey; +import java.security.PublicKey; import java.security.interfaces.ECKey; import java.security.spec.AlgorithmParameterSpec; import java.security.spec.DSAPrivateKeySpec; @@ -28,6 +29,7 @@ import java.security.spec.ECPrivateKeySpec; import java.security.spec.PKCS8EncodedKeySpec; import java.security.spec.RSAPrivateCrtKeySpec; +import java.security.spec.X509EncodedKeySpec; import java.util.Arrays; import java.util.Base64; @@ -59,6 +61,8 @@ public class PemUtils { private static final String OPENSSL_EC_PARAMS_HEADER = "-----BEGIN EC PARAMETERS-----"; private static final String OPENSSL_EC_PARAMS_FOOTER = "-----END EC PARAMETERS-----"; private static final String HEADER = "-----BEGIN"; + private static final String PUBLIC_HEADER = "-----BEGIN PUBLIC KEY-----"; + private static final String PUBLIC_FOOTER = "-----END PUBLIC KEY-----"; private PemUtils() { throw new IllegalStateException("Utility class should not be instantiated"); @@ -108,6 +112,26 @@ public static PrivateKey readPrivateKey(Path keyPath, Supplier passwordS } } + public static PublicKey readPublicKey(Path keyPath) { + try (BufferedReader bReader = Files.newBufferedReader(keyPath, StandardCharsets.UTF_8)) { + String line = bReader.readLine(); + while (null != line && line.startsWith(HEADER) == false) { + line = bReader.readLine(); + } + if (null == line) { + throw new IllegalStateException("Error parsing Public Key from: " + keyPath.toString() + ". File is empty"); + } + if (PUBLIC_HEADER.equals(line.trim())) { + return parseRsaPublicKey(bReader); + } else { + throw new IllegalStateException("Error parsing Public Key from: " + keyPath.toString() + ". File did not contain a " + + "supported key format"); + } + } catch (IOException | GeneralSecurityException e) { + throw new IllegalStateException("Error parsing Public Key from: " + keyPath.toString(), e); + } + } + /** * Removes the EC Headers that OpenSSL adds to EC private keys as the information in them * is redundant @@ -179,11 +203,38 @@ private static PrivateKey parsePKCS8(BufferedReader bReader) throws IOException, throw new IOException("Malformed PEM file, PEM footer is invalid or missing"); } byte[] keyBytes = Base64.getDecoder().decode(sb.toString()); - String keyAlgo = getKeyAlgorithmIdentifier(keyBytes); + String keyAlgo = getPrivateKeyAlgorithmIdentifier(keyBytes); KeyFactory keyFactory = KeyFactory.getInstance(keyAlgo); return keyFactory.generatePrivate(new PKCS8EncodedKeySpec(keyBytes)); } + /** + * Creates a {@link PublicKey} from the contents of {@code bReader} that contains a plaintext PEM encoded public key + * + * @param bReader the {@link BufferedReader} containing the key file contents + * @return {@link PublicKey} + * @throws IOException if the file can't be read + * @throws GeneralSecurityException if the public key can't be generated from the {@link X509EncodedKeySpec} + */ + private static PublicKey parseRsaPublicKey(BufferedReader bReader) throws IOException, GeneralSecurityException { + StringBuilder sb = new StringBuilder(); + String line = bReader.readLine(); + while (line != null) { + if (PUBLIC_FOOTER.equals(line.trim())) { + break; + } + sb.append(line.trim()); + line = bReader.readLine(); + } + if (null == line || PUBLIC_FOOTER.equals(line.trim()) == false) { + throw new IOException("Malformed PEM file, PEM footer is invalid or missing"); + } + byte[] keyBytes = Base64.getDecoder().decode(sb.toString()); + String keyAlgo = getPublicKeyAlgorithmIdentifier(keyBytes); + KeyFactory keyFactory = KeyFactory.getInstance(keyAlgo); + return keyFactory.generatePublic(new X509EncodedKeySpec(keyBytes)); + } + /** * Creates a {@link PrivateKey} from the contents of {@code bReader} that contains an EC private key encoded in * OpenSSL traditional format. @@ -332,7 +383,7 @@ private static PrivateKey parsePKCS8Encrypted(BufferedReader bReader, char[] key Cipher cipher = Cipher.getInstance(encryptedPrivateKeyInfo.getAlgName()); cipher.init(Cipher.DECRYPT_MODE, secretKey, encryptedPrivateKeyInfo.getAlgParameters()); PKCS8EncodedKeySpec keySpec = encryptedPrivateKeyInfo.getKeySpec(cipher); - String keyAlgo = getKeyAlgorithmIdentifier(keySpec.getEncoded()); + String keyAlgo = getPrivateKeyAlgorithmIdentifier(keySpec.getEncoded()); KeyFactory keyFactory = KeyFactory.getInstance(keyAlgo); return keyFactory.generatePrivate(keySpec); } @@ -534,12 +585,23 @@ private static DSAPrivateKeySpec parseDsaDer(byte[] keyBytes) throws IOException /** * Parses a DER encoded private key and reads its algorithm identifier Object OID. * + * PrivateKeyInfo ::= SEQUENCE { + * version Version, + * algorithm AlgorithmIdentifier, + * PrivateKey OCTET STRING + * } + * + * AlgorithmIdentifier ::= SEQUENCE { + * algorithm OBJECT IDENTIFIER, + * parameters ANY DEFINED BY algorithm OPTIONAL + * } + * * @param keyBytes the private key raw bytes * @return A string identifier for the key algorithm (RSA, DSA, or EC) * @throws GeneralSecurityException if the algorithm oid that is parsed from ASN.1 is unknown * @throws IOException if the DER encoded key can't be parsed */ - private static String getKeyAlgorithmIdentifier(byte[] keyBytes) throws IOException, GeneralSecurityException { + private static String getPrivateKeyAlgorithmIdentifier(byte[] keyBytes) throws IOException, GeneralSecurityException { DerParser parser = new DerParser(keyBytes); DerParser.Asn1Object sequence = parser.readAsn1Object(); parser = sequence.getParser(); @@ -558,4 +620,41 @@ private static String getKeyAlgorithmIdentifier(byte[] keyBytes) throws IOExcept throw new GeneralSecurityException("Error parsing key algorithm identifier. Algorithm with OID: "+oidString+ " is not " + "supported"); } + + /** + * Parses a DER encoded public key and reads its algorithm identifier Object OID. + * + * PublicKeyInfo ::= SEQUENCE { + * algorithm AlgorithmIdentifier, + * PublicKey BIT STRING + * } + * + * AlgorithmIdentifier ::= SEQUENCE { + * algorithm OBJECT IDENTIFIER, + * parameters ANY DEFINED BY algorithm OPTIONAL + * } + * + * @param keyBytes the public key raw bytes + * @return A string identifier for the key algorithm (RSA, DSA, or EC) + * @throws GeneralSecurityException if the algorithm oid that is parsed from ASN.1 is unknown + * @throws IOException if the DER encoded key can't be parsed + */ + private static String getPublicKeyAlgorithmIdentifier(byte[] keyBytes) throws IOException, GeneralSecurityException { + DerParser parser = new DerParser(keyBytes); + DerParser.Asn1Object sequence = parser.readAsn1Object(); + parser = sequence.getParser(); + DerParser.Asn1Object algSequence = parser.readAsn1Object(); + parser = algSequence.getParser(); + String oidString = parser.readAsn1Object().getOid(); + switch (oidString) { + case "1.2.840.10040.4.1": + return "DSA"; + case "1.2.840.113549.1.1.1": + return "RSA"; + case "1.2.840.10045.2.1": + return "EC"; + } + throw new GeneralSecurityException("Error parsing key algorithm identifier. Algorithm with OID: " + oidString + " is not " + + "supported"); + } } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ssl/PemUtilsTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ssl/PemUtilsTests.java index 3134d42ce3621..8be2b147feb4e 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ssl/PemUtilsTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ssl/PemUtilsTests.java @@ -14,6 +14,9 @@ import java.security.Key; import java.security.KeyStore; import java.security.PrivateKey; +import java.security.PublicKey; +import java.security.interfaces.ECPublicKey; +import java.security.interfaces.RSAPublicKey; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.instanceOf; @@ -210,6 +213,18 @@ public void testReadEmptyFile() { assertThat(e.getMessage(), containsString("File is empty")); } + public void testReadRsaPublicFile() { + PublicKey publicKey = PemUtils.readPublicKey(getDataPath + ("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode-public.pem")); + assertTrue(publicKey instanceof RSAPublicKey); + } + + public void testReadEcPublicFile() { + PublicKey publicKey = PemUtils.readPublicKey(getDataPath + ("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/ec_public_key.pem")); + assertTrue(publicKey instanceof ECPublicKey); + } + private Key getKeyFromKeystore(String algo) throws Exception { Path keystorePath = getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.jks"); try (InputStream in = Files.newInputStream(keystorePath)) { diff --git a/x-pack/plugin/core/src/test/resources/org/elasticsearch/xpack/security/transport/ssl/certs/simple/ec_public_key.pem b/x-pack/plugin/core/src/test/resources/org/elasticsearch/xpack/security/transport/ssl/certs/simple/ec_public_key.pem new file mode 100644 index 0000000000000..a3403a7d086eb --- /dev/null +++ b/x-pack/plugin/core/src/test/resources/org/elasticsearch/xpack/security/transport/ssl/certs/simple/ec_public_key.pem @@ -0,0 +1,4 @@ +-----BEGIN PUBLIC KEY----- +MFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAE7mUZVxp/0TnDu8hSSedG9tGL4Fd1 +PhaUcdJ8f8ooFo+sYhDCp1m21JzNJihfHNxhxpOYPDlz52yvero+raTAeQ== +-----END PUBLIC KEY----- diff --git a/x-pack/plugin/core/src/test/resources/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode-public.pem b/x-pack/plugin/core/src/test/resources/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode-public.pem new file mode 100644 index 0000000000000..79c94e94f9cd2 --- /dev/null +++ b/x-pack/plugin/core/src/test/resources/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode-public.pem @@ -0,0 +1,9 @@ +-----BEGIN PUBLIC KEY----- +MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA3rGZ1QbsW0+MuyrSLmMf +DFKtLBkIFW8V0gRuurFg1PUKKNR1Mq2tMVwjjYETAU/UY0iKZOzjgvYPKhDTYBTt +e/WHR1ZK4CYVv7TQX/gtFQG/ge/c7u0sLch9p7fbd+/HZiLS/rBEZDIohvgUvzvn +A8+OIYnw4kuxKo/5iboAIS41klMg/lATm8V71LMY68inht71/ZkQoAHKgcR9z4yN +YvQ1WqKG8DG8KROXltll3sTrKbl5zJhn660es/1ZnR6nvwt6xnSTl/mNHMjkfv1b +s4rJ/py3qPxicdoSIn/KyojUcgHVF38fuAy2CQTdjVG5fWj9iz+mQvLm3+qsIYQd +FwIDAQAB +-----END PUBLIC KEY----- diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/oidc/OpenIdConnectRealm.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/oidc/OpenIdConnectRealm.java index 85f9d782aa707..780b6f0e91afe 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/oidc/OpenIdConnectRealm.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/oidc/OpenIdConnectRealm.java @@ -38,6 +38,7 @@ import static org.elasticsearch.xpack.core.security.authc.oidc.OpenIdConnectRealmSettings.OP_TOKEN_ENDPOINT; import static org.elasticsearch.xpack.core.security.authc.oidc.OpenIdConnectRealmSettings.OP_USERINFO_ENDPOINT; import static org.elasticsearch.xpack.core.security.authc.oidc.OpenIdConnectRealmSettings.RP_ALLOWED_SCOPES; +import static org.elasticsearch.xpack.core.security.authc.oidc.OpenIdConnectRealmSettings.RP_ALLOWED_SIGNATURE_ALGORITHMS; import static org.elasticsearch.xpack.core.security.authc.oidc.OpenIdConnectRealmSettings.RP_CLIENT_ID; import static org.elasticsearch.xpack.core.security.authc.oidc.OpenIdConnectRealmSettings.RP_CLIENT_SECRET; import static org.elasticsearch.xpack.core.security.authc.oidc.OpenIdConnectRealmSettings.RP_REDIRECT_URI; @@ -91,8 +92,9 @@ private RPConfiguration buildRPConfiguration(RealmConfig config) { config.getSetting(RP_REQUESTED_SCOPES) : Collections.emptyList(); List allowedScopes = config.hasSetting(RP_ALLOWED_SCOPES) ? config.getSetting(RP_ALLOWED_SCOPES) : Collections.emptyList(); + List allowedSignatureAlgorithms = requireListSetting(config, RP_ALLOWED_SIGNATURE_ALGORITHMS); - return new RPConfiguration(clientId, redirectUri, responseType, requestedScopes, allowedScopes); + return new RPConfiguration(clientId, redirectUri, responseType, allowedSignatureAlgorithms, requestedScopes, allowedScopes); } private OPConfiguration buildOPConfiguration(RealmConfig config) { @@ -114,6 +116,15 @@ static String require(RealmConfig config, Setting.AffixSetting setting) return value; } + static List requireListSetting(RealmConfig config, Setting.AffixSetting> setting) { + final List value = config.getSetting(setting); + if (value.isEmpty()) { + throw new SettingsException("The configuration setting [" + RealmSettings.getFullSettingKey(config, setting) + + "] is required"); + } + return value; + } + /** * Creates the URI for an OIDC Authentication Request from the realm configuration using URI Query String Serialization and possibly * generates a state parameter. It then returns the URI and state encapsulated in a {@link OpenIdConnectPrepareAuthenticationResponse} diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/oidc/RPConfiguration.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/oidc/RPConfiguration.java index 6f121f3c094e3..9c48d88f23b6a 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/oidc/RPConfiguration.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/oidc/RPConfiguration.java @@ -19,10 +19,12 @@ public class RPConfiguration { private final String clientId; private final String redirectUri; private final String responseType; + private final List allowedSigningAlgorithms; private final List requestedScopes; private final List allowedScopes; public RPConfiguration(String clientId, String redirectUri, String responseType, + List allowedSigningAlgorithms, @Nullable List requestedScopes, @Nullable List allowedScopes) { this.clientId = Objects.requireNonNull(clientId, "RP Client ID must be provided"); @@ -34,6 +36,11 @@ public RPConfiguration(String clientId, String redirectUri, String responseType, } else { this.responseType = responseType; } + if (null == allowedSigningAlgorithms || allowedSigningAlgorithms.isEmpty()) { + throw new IllegalArgumentException("Allowed signing algorithms must be provided"); + } else { + this.allowedSigningAlgorithms = allowedSigningAlgorithms; + } if (null == requestedScopes || requestedScopes.isEmpty()) { this.requestedScopes = Collections.singletonList("openid"); } else { @@ -58,6 +65,10 @@ public String getResponseType() { return responseType; } + public List getAllowedSigningAlgorithms() { + return allowedSigningAlgorithms; + } + public List getRequestedScopes() { return requestedScopes; } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/jwt/Claims.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/jwt/Claims.java index 492b805bdd3dd..9b345f2d06f14 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/jwt/Claims.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/jwt/Claims.java @@ -1,6 +1,5 @@ package org.elasticsearch.xpack.security.authc.support.jwt; -import java.util.Arrays; import java.util.List; import java.util.stream.Collectors; import java.util.stream.Stream; diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/jwt/EcSignatureValidator.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/jwt/EcSignatureValidator.java index fd0a578d475b8..2d9a2944c026b 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/jwt/EcSignatureValidator.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/jwt/EcSignatureValidator.java @@ -9,8 +9,10 @@ import java.security.Key; import java.security.Signature; +import java.security.SignatureException; import java.security.interfaces.ECPublicKey; -import java.util.Base64; + +import static org.elasticsearch.xpack.security.authc.support.jwt.SignatureAlgorithm.*; /** * Class offering necessary functionality for validating the signatures of JWTs that have been signed with the @@ -25,13 +27,33 @@ public EcSignatureValidator(SignatureAlgorithm algorithm, Key key) { throw new IllegalArgumentException("ECDSA signatures can only be verified using an ECPublicKey " + "but a [" + key.getClass().getName() + "] is provided"); } - if (SignatureAlgorithm.getEcAlgorithms().contains(algorithm) == false) { + if (getEcAlgorithms().contains(algorithm) == false) { throw new IllegalArgumentException("Unsupported algorithm " + algorithm.name() + " for ECDSA signature"); } this.key = key; this.algorithm = algorithm; } + /** + * Return the expected ECDSA signature length (number of bytes) as described in + * the specification + * + * @param algorithm the {@link SignatureAlgorithm} for which to get the expected signature length + * @return the ECDSA signature length + */ + private int getExpectedSignatureLength(SignatureAlgorithm algorithm) { + switch (algorithm) { + case ES256: + return 64; + case ES384: + return 96; + case ES512: + return 132; + default: + throw new IllegalArgumentException("Unsupported algorithm " + algorithm.name() + " for ECDSA signature"); + } + } + /** * Validates the signature of a signed JWT using the EC Public Key that corresponds to the EC Private Key * with which it was signed @@ -49,14 +71,83 @@ public void validateSignature(byte[] data, byte[] signature) { } try { - final byte[] signatureBytes = Base64.getUrlDecoder().decode(signature); final Signature ecdsa = Signature.getInstance(algorithm.getJcaAlgoName()); ecdsa.initVerify((ECPublicKey) key); ecdsa.update(data); - ecdsa.verify(signatureBytes); + ecdsa.verify(convertToDer(signature)); } catch (Exception e) { throw new ElasticsearchSecurityException("Encountered error attempting to validate the JWT ECDSA Signature", e); } } + /** + * Converts the JOSE signature to DER so that it can be verified. See + * the specification + * Based on https://github.com/jwtk/jjwt/blob/1520ae8a21052b376282f8a38d310a91b15285e5/impl/src/main/java/io/jsonwebtoken/impl/crypto/EllipticCurveProvider.java + * + * @param jwsSignature The signature as decoded from the JWT + * @return the signature, DER encoded so that it can be used in {@link Signature#verify(byte[])} + * @throws SignatureException if the signature is malformed + */ + private byte[] convertToDer(byte[] jwsSignature) throws SignatureException { + if (jwsSignature.length != getExpectedSignatureLength(algorithm)) { + throw new SignatureException("Invalid ECDSA signature length"); + } + int rawLen = jwsSignature.length / 2; + + // Remove any extra padding bytes from the end of R if any + int i = rawLen; + while ((i > 0) && (jwsSignature[rawLen - i] == 0)) { + i--; + } + int rLength = i; + if (jwsSignature[rawLen - i] < 0) { + rLength += 1; + } + + // Remove any extra padding bytes from the end of S if any + int k = rawLen; + while ((k > 0) && (jwsSignature[2 * rawLen - k] == 0)) { + k--; + } + int sLength = k; + if (jwsSignature[2 * rawLen - k] < 0) { + sLength += 1; + } + + int len = 2 + rLength + 2 + sLength; + + if (len > 255) { + throw new SignatureException("Invalid ECDSA signature format"); + } + + int offset; + + final byte derSignature[]; + // Convert octet + if (len < 128) { + derSignature = new byte[2 + 2 + rLength + 2 + sLength]; + offset = 1; + } else { + derSignature = new byte[3 + 2 + rLength + 2 + sLength]; + derSignature[1] = (byte) 0x81; + offset = 2; + } + + derSignature[0] = 48; + derSignature[offset++] = (byte) len; + derSignature[offset++] = 2; + derSignature[offset++] = (byte) rLength; + //Copy R taking offset into account + System.arraycopy(jwsSignature, rawLen - i, derSignature, (offset + rLength) - i, i); + + offset += rLength; + + derSignature[offset++] = 2; + derSignature[offset++] = (byte) sLength; + //Copy S taking offset into account + System.arraycopy(jwsSignature, 2 * rawLen - k, derSignature, (offset + sLength) - k, k); + + return derSignature; + } } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/jwt/EcSigner.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/jwt/EcSigner.java index e06dc6001a411..9e327385b145c 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/jwt/EcSigner.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/jwt/EcSigner.java @@ -1,15 +1,16 @@ package org.elasticsearch.xpack.security.authc.support.jwt; -import org.elasticsearch.ElasticsearchSecurityException; - +import java.security.GeneralSecurityException; import java.security.Key; import java.security.Signature; +import java.security.SignatureException; import java.security.interfaces.ECPrivateKey; public class EcSigner implements JwtSigner { private SignatureAlgorithm algorithm; private Key key; + private int signatureLength; public EcSigner(SignatureAlgorithm algorithm, Key key) { if (key instanceof ECPrivateKey == false) { @@ -21,20 +22,87 @@ public EcSigner(SignatureAlgorithm algorithm, Key key) { } this.algorithm = algorithm; this.key = key; + this.signatureLength = getSignatureLength(algorithm); + } + + private int getSignatureLength(SignatureAlgorithm algorithm) { + switch (algorithm) { + case ES256: + return 64; + case ES384: + return 96; + case ES512: + return 132; + default: + throw new IllegalArgumentException("Unsupported algorithm " + algorithm.name() + " for ECDSA signature"); + } } + @Override - public byte[] sign(byte[] data) { + public byte[] sign(byte[] data) throws GeneralSecurityException { if (null == data || data.length == 0) { throw new IllegalArgumentException("JWT data must be provided"); } - try { - final Signature ecdsa = Signature.getInstance(algorithm.getJcaAlgoName()); - ecdsa.initSign((ECPrivateKey) key); - ecdsa.update(data); - return ecdsa.sign(); - } catch (Exception e) { - throw new ElasticsearchSecurityException("Encountered error attempting to create the JWT RSA Signature", e); + + final Signature ecdsa = Signature.getInstance(algorithm.getJcaAlgoName()); + ecdsa.initSign((ECPrivateKey) key); + ecdsa.update(data); + return convertToJose(ecdsa.sign()); + } + + /** + * Converts a DER Encoded signature to JOSE so that it can be attached to a JWT. See + * the specification + * Based on https://github.com/jwtk/jjwt/blob/1520ae8a21052b376282f8a38d310a91b15285e5/impl/src/main/java/io/jsonwebtoken/impl/crypto/EllipticCurveProvider.java + * + * @param derSignature The DER formatted signature + * @return + * @throws SignatureException + */ + private byte[] convertToJose(byte[] derSignature) throws SignatureException { + if (derSignature.length < 8 || derSignature[0] != 48) { + throw new SignatureException("Invalid DER encoded ECDSA signature"); + } + + int offset; + if (derSignature[1] > 0) { + offset = 2; + } else if (derSignature[1] == (byte) 0x81) { + offset = 3; + } else { + throw new SignatureException("Invalid DER encoded ECDSA signature"); + } + + byte rLength = derSignature[offset + 1]; + + int i = rLength; + while ((i > 0) && (derSignature[(offset + 2 + rLength) - i] == 0)) { + i--; + } + + byte sLength = derSignature[offset + 2 + rLength + 1]; + + int j = sLength; + while ((j > 0) && (derSignature[(offset + 2 + rLength + 2 + sLength) - j] == 0)) { + j--; } + + int rawLen = Math.max(i, j); + rawLen = Math.max(rawLen, signatureLength / 2); + + if ((derSignature[offset - 1] & 0xff) != derSignature.length - offset + || (derSignature[offset - 1] & 0xff) != 2 + rLength + 2 + sLength + || derSignature[offset] != 2 + || derSignature[offset + 2 + rLength] != 2) { + throw new SignatureException("Invalid DER encoded ECDSA signature"); + } + + final byte[] jwtSignature = new byte[2 * rawLen]; + + System.arraycopy(derSignature, (offset + 2 + rLength) - i, jwtSignature, rawLen - i, i); + System.arraycopy(derSignature, (offset + 2 + rLength + 2 + sLength) - j, jwtSignature, 2 * rawLen - j, j); + + return jwtSignature; } } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/jwt/HmacSignatureValidator.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/jwt/HmacSignatureValidator.java index c26f21c88474d..95855f4b3a6f2 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/jwt/HmacSignatureValidator.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/jwt/HmacSignatureValidator.java @@ -12,8 +12,11 @@ import javax.crypto.Mac; import javax.crypto.SecretKey; import javax.crypto.spec.SecretKeySpec; +import java.nio.charset.StandardCharsets; import java.security.Key; +import java.security.MessageDigest; import java.util.Arrays; +import java.util.Base64; /** * Class offering necessary functionality for validating the signatures of JWTs that have been signed with a @@ -40,9 +43,10 @@ public HmacSignatureValidator(SignatureAlgorithm algorithm, Key key) { * Validates the signature of a signed JWT by generating the signature using the provided key and verifying that * it matches the provided signature. * - * @param data The serialized representation of the JWT payload - * @param expectedSignature The serialized representation of the JWT signature - * @return True if the newly calculated signature of the header and matches the one that was included in the JWT, false otherwise + * @param data The JWT payload + * @param expectedSignature The JWT signature + * @return True if the newly calculated signature of the header and payload matches the one that was included in the JWT, false + * otherwise */ @Override public void validateSignature(byte[] data, byte[] expectedSignature) { @@ -58,9 +62,11 @@ public void validateSignature(byte[] data, byte[] expectedSignature) { final Mac mac = Mac.getInstance(algorithm.getJcaAlgoName()); mac.init(keySpec); final byte[] calculatedSignature = mac.doFinal(data); - if (Arrays.equals(calculatedSignature, expectedSignature) == false) { + if (MessageDigest.isEqual(calculatedSignature, expectedSignature) == false) { throw new ElasticsearchSecurityException("JWT HMAC Signature could not be validated. Calculated value was [{}] but the " + - "expected one was [{}]"); + "expected one was [{}]", + Base64.getUrlEncoder().withoutPadding().encodeToString(calculatedSignature), + Base64.getUrlEncoder().withoutPadding().encodeToString(expectedSignature)); } } catch (Exception e) { throw new ElasticsearchSecurityException("Encountered error attempting to validate the JWT HMAC Signature", e); diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/jwt/HmacSigner.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/jwt/HmacSigner.java index 9e4aa6331fb8e..e00e4f1278833 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/jwt/HmacSigner.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/jwt/HmacSigner.java @@ -5,6 +5,7 @@ import javax.crypto.Mac; import javax.crypto.SecretKey; import javax.crypto.spec.SecretKeySpec; +import java.security.GeneralSecurityException; import java.security.Key; public class HmacSigner implements JwtSigner { @@ -26,18 +27,15 @@ public HmacSigner(SignatureAlgorithm algorithm, Key key) { } @Override - public byte[] sign(byte[] data) { + public byte[] sign(byte[] data) throws GeneralSecurityException { if (null == data || data.length == 0) { throw new IllegalArgumentException("JWT data must be provided"); } - try { - final SecretKeySpec keySpec = new SecretKeySpec(key.getEncoded(), algorithm.getJcaAlgoName()); - final Mac mac = Mac.getInstance(algorithm.getJcaAlgoName()); - mac.init(keySpec); - return mac.doFinal(data); - } catch (Exception e) { - throw new ElasticsearchSecurityException("Encountered error attempting to create the JWT HMAC Signature", e); - } + + final SecretKeySpec keySpec = new SecretKeySpec(key.getEncoded(), algorithm.getJcaAlgoName()); + final Mac mac = Mac.getInstance(algorithm.getJcaAlgoName()); + mac.init(keySpec); + return mac.doFinal(data); } } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/jwt/JsonWebTokenParser.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/jwt/IdTokenParser.java similarity index 72% rename from x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/jwt/JsonWebTokenParser.java rename to x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/jwt/IdTokenParser.java index 7406ac50b1349..a7016362e7dd1 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/jwt/JsonWebTokenParser.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/jwt/IdTokenParser.java @@ -15,6 +15,7 @@ import org.elasticsearch.xpack.security.authc.oidc.RPConfiguration; import java.io.IOException; +import java.io.UnsupportedEncodingException; import java.nio.charset.StandardCharsets; import java.security.Key; import java.util.Base64; @@ -28,53 +29,55 @@ /** * Contains the necessary functionality for parsing a serialized OpenID Connect ID Token to a {@link JsonWebToken} */ -public class JsonWebTokenParser { +public class IdTokenParser { private final RPConfiguration rpConfig; - public JsonWebTokenParser(RPConfiguration rpConfig) { + public IdTokenParser(RPConfiguration rpConfig) { this.rpConfig = rpConfig; } /** - * Parses the serialized format of an ID Token into a {@link JsonWebToken}. In doing so it + * Parses the serialized format of an ID Token into a {@link JsonWebToken}. In doing so it: *
    *
  • Validates that the format and structure of the ID Token is correct
  • *
  • Validates that the ID Token is signed and that one of the supported algorithms is used
  • *
  • Validates the signature using the appropriate
  • *
+ * This method does not validate the contents of the ID Token such as expiration time, + * issuer, audience etc. These checks should be performed on the {@link JsonWebToken} by the caller. * - * @param jwt Serialized string representation of the ID Token + * @param idToken Serialized string representation of the ID Token * @param key The {@link Key} to be used for verifying the signature * @return a {@link JsonWebToken} * @throws IOException if the ID Token cannot be deserialized */ - public final JsonWebToken parseAndValidateJwt(String jwt, Key key) throws IOException { - final String[] jwtParts = jwt.split("\\."); - if (jwtParts.length != 3) { + public final JsonWebToken parseAndValidateIdToken(String idToken, Key key) throws IOException { + final String[] idTokenParts = idToken.split("\\."); + if (idTokenParts.length != 3) { throw new IllegalArgumentException("The provided token is not a valid JWT"); } - final String serializedHeader = jwtParts[0]; - final String serializedPayload = jwtParts[1]; - final String serializedSignature = jwtParts[2]; - final String deserializedHeader = deserializePart(serializedHeader); - final String deserializedPayload = deserializePart(serializedPayload); + final String serializedHeader = idTokenParts[0]; + final String serializedPayload = idTokenParts[1]; + final String serializedSignature = idTokenParts[2]; + final String deserializedHeader = decodePartToString(serializedHeader); + final String deserializedPayload = decodePartToString(serializedPayload); final Map headerMap = parseHeader(deserializedHeader); final SignatureAlgorithm algorithm = getAlgorithm(headerMap); - if (algorithm == null || algorithm.equals(SignatureAlgorithm.NONE.name())) { - //TODO what kind of Exception? - throw new IllegalStateException("JWT not signed or unrecognised algorithm"); + if (algorithm == null || algorithm.equals(SignatureAlgorithm.NONE)) { + throw new IllegalStateException("ID Token is not signed or the signing algorithm is unsupported"); } if (Strings.hasText(serializedSignature) == false) { - //TODO what kind of Exception? - throw new IllegalStateException("Unsigned JWT"); + throw new IllegalStateException("ID Token is unsigned or malformed. Signature is missing"); + } + if (rpConfig.getAllowedSigningAlgorithms().contains(algorithm.name()) == false) { + throw new IllegalStateException("ID Token is signed with an unsupported algorithm [{" + algorithm.name() + "}]"); } JwtSignatureValidator validator = getValidator(algorithm, key); if (null == validator) { - //TODO what kind of Exception? - throw new IllegalStateException("Wrong algorithm"); + throw new IllegalStateException("ID Token is signed with an unsupported algorithm [{" + algorithm.name() + "}]"); } - final byte[] signatureBytes = serializedSignature.getBytes(StandardCharsets.US_ASCII); + final byte[] signatureBytes = decodePart(serializedSignature); final byte[] data = (serializedHeader + "." + serializedPayload).getBytes(StandardCharsets.UTF_8); validator.validateSignature(data, signatureBytes); final Map payloadMap = parsePayload(deserializedPayload); @@ -85,7 +88,8 @@ public final JsonWebToken parseAndValidateJwt(String jwt, Key key) throws IOExce * Returns the {@link SignatureAlgorithm} that corresponds to the value of the alg claim * * @param header The {@link Map} containing the parsed header claims - * @return the SignatureAlgorithm that corresponds to alg + * @return the SignatureAlgorithm that corresponds to alg or null if the header doesn't contain an alg claim or the algorithm + * is not valid or supported */ private SignatureAlgorithm getAlgorithm(Map header) { if (header.containsKey("alg")) { @@ -95,8 +99,37 @@ private SignatureAlgorithm getAlgorithm(Map header) { } } - private static String deserializePart(String encodedString) throws IOException { - return new String(Base64.getUrlDecoder().decode(encodedString), StandardCharsets.UTF_8.name()); + /** + * URL safe Base64 decode a part of a JWT to a String + * + * @param encodedString the serialized part of the JWT as a string + * @return a JSON String with the JWT representation + * @throws UnsupportedEncodingException if UTF-8 encoding is not supported + */ + private static String decodePartToString(String encodedString) throws UnsupportedEncodingException { + return decodePartToString(encodedString, StandardCharsets.UTF_8.name()); + } + + /** + * URL safe Base64 decode a part of a JWT to a String + * + * @param encodedString the serialized part of the JWT as a string + * @param encodingName the Charset to use for the generated String + * @return a JSON String with the JWT representation + * @throws UnsupportedEncodingException if the provided encodingName is not valid + */ + private static String decodePartToString(String encodedString, String encodingName) throws UnsupportedEncodingException { + return new String(Base64.getUrlDecoder().decode(encodedString), encodingName); + } + + /** + * URL safe Base64 decode a part of a JWT to a byte array + * + * @param encodedString the serialized part of the JWT as a string + * @return a byte array with the decoded bytes + */ + private static byte[] decodePart(String encodedString) { + return Base64.getUrlDecoder().decode(encodedString); } /** @@ -118,7 +151,8 @@ private Map parseHeader(String headerJson) throws IOException { if (token == XContentParser.Token.FIELD_NAME) { currentFieldName = parser.currentName(); } else if (Claims.HeaderClaims.validHeaderClaims().contains(currentFieldName)) { - XContentParserUtils.ensureExpectedToken(XContentParser.Token.VALUE_STRING, parser.currentToken(), parser::getTokenLocation); + XContentParserUtils + .ensureExpectedToken(XContentParser.Token.VALUE_STRING, parser.currentToken(), parser::getTokenLocation); if (Strings.hasText(parser.text())) { headerMap.put(currentFieldName, parser.text()); } @@ -197,6 +231,13 @@ private Map parsePayload(String payloadJson) throws IOException } } + /** + * Returns the appropriate {@link JwtSignatureValidator} for the provided {@link SignatureAlgorithm} and key + * + * @param algorithm the {@link SignatureAlgorithm} with which the signature should be validated + * @param key the {@link Key} to use for validating the signature + * @return the appropriate {@link JwtSignatureValidator} or null if the algorithm is not supported or valid + */ private JwtSignatureValidator getValidator(SignatureAlgorithm algorithm, Key key) { if (SignatureAlgorithm.getHmacAlgorithms().contains(algorithm)) { return new HmacSignatureValidator(algorithm, key); diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/jwt/JsonWebToken.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/jwt/JsonWebToken.java index 63af2a6102286..aec4eb44ff7c7 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/jwt/JsonWebToken.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/jwt/JsonWebToken.java @@ -9,16 +9,17 @@ import org.elasticsearch.common.xcontent.XContentBuilder; import java.io.IOException; -import java.io.UncheckedIOException; import java.nio.charset.StandardCharsets; +import java.security.GeneralSecurityException; import java.security.Key; +import java.security.SignatureException; import java.util.Base64; import java.util.Map; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; /** - * A class that represents an OpenID Connect ID token according to https://tools.ietf.org/html/rfc7519. + * A class that represents a JSON Web Token according to https://tools.ietf.org/html/rfc7519. */ public class JsonWebToken { private Map header; @@ -31,6 +32,12 @@ public JsonWebToken(Map header, Map payload) { this.signature = ""; } + public JsonWebToken(Map header, Map payload, String signature) { + this.header = header; + this.payload = payload; + this.signature = signature; + } + public Map getHeader() { return header; } @@ -51,12 +58,18 @@ public String encode() throws IOException { return headerString + "." + payloadString + "." + signature; } - public void sign(Key key) throws IOException { + /** + * Signs the JWT with the provided Key using the algorithm specified in the appropriate header claim + * + * @param key The {@link Key} to sign the JWT with + * @throws GeneralSecurityException if any error is encountered with signing + * @throws IOException if the signature can't be decoded to a String + */ + public void sign(Key key) throws GeneralSecurityException, IOException { SignatureAlgorithm algorithm = getAlgorithm(header); JwtSigner signer = getSigner(algorithm, key); if (null == signer) { - //TODO what kind of Exception? - throw new IllegalStateException("Wrong algorithm"); + throw new SignatureException("Unable to sign JWT for specified algorithm"); } String headerString = Base64.getUrlEncoder().withoutPadding().encodeToString(mapToJsonBytes(header)); String payloadString = Base64.getUrlEncoder().withoutPadding().encodeToString(mapToJsonBytes(payload)); @@ -74,17 +87,23 @@ public String toString() { return "{header=" + header + ", payload=" + payload + "}"; } - private String mapToJsonString(Map map) throws IOException { - try (XContentBuilder builder = jsonBuilder()) { - builder.startObject(); - for (Map.Entry entry : map.entrySet()) { - builder.field(entry.getKey(), entry.getValue()); - } - builder.endObject(); - return BytesReference.bytes(builder).utf8ToString(); - } + public byte[] encodeSignableContent() throws IOException { + String headerString = Base64.getUrlEncoder().withoutPadding().encodeToString(mapToJsonBytes(header)); + String payloadString = Base64.getUrlEncoder().withoutPadding().encodeToString(mapToJsonBytes(payload)); + return (headerString + "." + payloadString).getBytes(StandardCharsets.UTF_8); + } + + public byte[] encodeSignature() { + return Base64.getUrlDecoder().decode(signature); } + /** + * Gets the raw bytes of a claims set + * + * @param map The header or payload to get the raw bytes for + * @return a byte array that can be encoded for representation + * @throws IOException if any error is encountered + */ private byte[] mapToJsonBytes(Map map) throws IOException { try (XContentBuilder builder = jsonBuilder()) { builder.startObject(); @@ -96,6 +115,13 @@ private byte[] mapToJsonBytes(Map map) throws IOException { } } + /** + * Returns the appropriate {@link JwtSigner} for the provided {@link SignatureAlgorithm} and key + * + * @param algorithm the {@link SignatureAlgorithm} with which the signature should be created + * @param key the {@link Key} to use for creating the signature + * @return the appropriate {@link JwtSigner} or null if the algorithm is not supported or valid + */ private JwtSigner getSigner(SignatureAlgorithm algorithm, Key key) { if (SignatureAlgorithm.getHmacAlgorithms().contains(algorithm)) { return new HmacSigner(algorithm, key); @@ -111,7 +137,8 @@ private JwtSigner getSigner(SignatureAlgorithm algorithm, Key key) { * Returns the {@link SignatureAlgorithm} that corresponds to the value of the alg claim * * @param header The {@link Map} containing the parsed header claims - * @return the SignatureAlgorithm that corresponds to alg + * @return the SignatureAlgorithm that corresponds to alg or null if the header doesn't contain an alg claim or the algorithm + * is not valid or supported */ private SignatureAlgorithm getAlgorithm(Map header) { if (header.containsKey("alg")) { diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/jwt/JwsSignatureValidatorFactory.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/jwt/JwsSignatureValidatorFactory.java deleted file mode 100644 index e4c44f1a122f0..0000000000000 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/jwt/JwsSignatureValidatorFactory.java +++ /dev/null @@ -1,9 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. - */ -package org.elasticsearch.xpack.security.authc.support.jwt; - -public class JwsSignatureValidatorFactory { -} diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/jwt/JwtSigner.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/jwt/JwtSigner.java index d45598b9ad537..5ca75fc8611ac 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/jwt/JwtSigner.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/jwt/JwtSigner.java @@ -5,7 +5,9 @@ */ package org.elasticsearch.xpack.security.authc.support.jwt; +import java.security.GeneralSecurityException; + public interface JwtSigner { - public byte[] sign(byte[] data); + byte[] sign(byte[] data) throws GeneralSecurityException; } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/jwt/RsaSignatureValidator.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/jwt/RsaSignatureValidator.java index e0232d2560400..006b5ebc5bbc8 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/jwt/RsaSignatureValidator.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/jwt/RsaSignatureValidator.java @@ -49,11 +49,10 @@ public void validateSignature(byte[] data, byte[] signature) { } try { - final byte[] signatureBytes = Base64.getUrlDecoder().decode(signature); final Signature rsa = Signature.getInstance(algorithm.getJcaAlgoName()); rsa.initVerify((PublicKey) key); rsa.update(data); - rsa.verify(signatureBytes); + rsa.verify(signature); } catch (Exception e) { throw new ElasticsearchSecurityException("Encountered error attempting to validate the JWT RSA Signature", e); } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/jwt/RsaSigner.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/jwt/RsaSigner.java index 4590c153cca28..0f5d732afd349 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/jwt/RsaSigner.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/jwt/RsaSigner.java @@ -1,7 +1,6 @@ package org.elasticsearch.xpack.security.authc.support.jwt; -import org.elasticsearch.ElasticsearchSecurityException; - +import java.security.GeneralSecurityException; import java.security.Key; import java.security.PrivateKey; import java.security.Signature; @@ -24,17 +23,14 @@ public RsaSigner(SignatureAlgorithm algorithm, Key key) { } @Override - public byte[] sign(byte[] data) { + public byte[] sign(byte[] data) throws GeneralSecurityException { if (null == data || data.length == 0) { throw new IllegalArgumentException("JWT data must be provided"); } - try { - final Signature rsa = Signature.getInstance(algorithm.getJcaAlgoName()); + + final Signature rsa = Signature.getInstance(algorithm.getJcaAlgoName()); rsa.initSign((PrivateKey) key); rsa.update(data); return rsa.sign(); - } catch (Exception e) { - throw new ElasticsearchSecurityException("Encountered error attempting to create the JWT RSA Signature", e); - } } } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/jwt/SignatureAlgorithm.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/jwt/SignatureAlgorithm.java index ebfef8d9e41f9..c0d96d6d0f773 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/jwt/SignatureAlgorithm.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/jwt/SignatureAlgorithm.java @@ -7,6 +7,7 @@ import java.util.Arrays; import java.util.List; +import java.util.stream.Collectors; import java.util.stream.Stream; /** @@ -52,4 +53,8 @@ public static List getRsaAlgorithms() { public static List getEcAlgorithms() { return Arrays.asList(ES256, ES384, ES512); } + + public static List getAllNames() { + return Stream.of(SignatureAlgorithm.values()).map(Enum::name).collect(Collectors.toList()); + } } diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/support/JsonWebTokenParserTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/support/JsonWebTokenParserTests.java deleted file mode 100644 index 7637f359c8ed3..0000000000000 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/support/JsonWebTokenParserTests.java +++ /dev/null @@ -1,180 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. - */ -package org.elasticsearch.xpack.security.authc.support; - -import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.xpack.security.authc.oidc.RPConfiguration; -import org.elasticsearch.xpack.security.authc.support.jwt.JsonWebToken; -import org.elasticsearch.xpack.security.authc.support.jwt.JsonWebTokenParser; - -import javax.crypto.spec.SecretKeySpec; -import java.io.IOException; -import java.util.Arrays; -import java.util.HashMap; -import java.util.List; -import java.util.Map; - -import static org.hamcrest.Matchers.equalTo; - -public class JsonWebTokenParserTests extends ESTestCase { - - public void testIdTokenParsing() throws IOException { - final String serializedJwt = "eyJhbGciOiJIUzI1NiIsImtpZCI6IjFlOWdkazcifQ.eyJpc3MiOiJodHRwOi8vc2VydmVyLmV4YW1wbGUuY29tIiwic3ViIjo" + - "iMjQ4Mjg5NzYxMDAxIiwiYXVkIjoiczZCaGRSa3F0MyIsIm5vbmNlIjoibi0wUzZfV3pBMk1qIiwiZXhwIjoxMzExMjgxOTcwLCJpYXQiOjEzMTEyODA5NzAsIm" + - "5hbWUiOiJKYW5lIERvZSIsImdpdmVuX25hbWUiOiJKYW5lIiwiZmFtaWx5X25hbWUiOiJEb2UiLCJnZW5kZXIiOiJmZW1hbGUiLCJiaXJ0aGRhdGUiOiIxOTk0L" + - "TEwLTMxIiwiZW1haWwiOiJqYW5lZG9lQGV4YW1wbGUuY29tIiwicGljdHVyZSI6Imh0dHA6Ly9leGFtcGxlLmNvbS9qYW5lZG9lL21lLmpwZyJ9.XY8hKQ6nx8K" + - "EfuB907SuImosemSt7qPlg3HAJH85JKI"; - - JsonWebTokenParser jwtParser = new JsonWebTokenParser(new RPConfiguration("clientId", "redirectUri", "code", null, null)); - final SecretKeySpec keySpec = new SecretKeySpec("ffff".getBytes(), "HmacSHA256"); - JsonWebToken jwt = jwtParser.parseAndValidateJwt(serializedJwt, keySpec); - assertTrue(jwt.getPayload().containsKey("iss")); - assertThat(jwt.getPayload().get("iss"), equalTo("http://server.example.com")); - assertTrue(jwt.getPayload().containsKey("sub")); - assertThat(jwt.getPayload().get("sub"), equalTo("248289761001")); - assertTrue(jwt.getPayload().containsKey("aud")); - List aud = (List) jwt.getPayload().get("aud"); - assertThat(aud.size(), equalTo(1)); - assertTrue(aud.contains("s6BhdRkqt3")); - assertTrue(jwt.getPayload().containsKey("nonce")); - assertThat(jwt.getPayload().get("nonce"), equalTo("n-0S6_WzA2Mj")); - assertTrue(jwt.getPayload().containsKey("exp")); - assertThat(jwt.getPayload().get("exp"), equalTo(1311281970L)); - assertTrue(jwt.getPayload().containsKey("iat")); - assertThat(jwt.getPayload().get("iat"), equalTo(1311280970L)); - assertTrue(jwt.getPayload().containsKey("name")); - assertThat(jwt.getPayload().get("name"), equalTo("Jane Doe")); - assertTrue(jwt.getPayload().containsKey("given_name")); - assertThat(jwt.getPayload().get("given_name"), equalTo("Jane")); - assertTrue(jwt.getPayload().containsKey("family_name")); - assertThat(jwt.getPayload().get("family_name"), equalTo("Doe")); - assertTrue(jwt.getPayload().containsKey("gender")); - assertThat(jwt.getPayload().get("gender"), equalTo("female")); - assertTrue(jwt.getPayload().containsKey("birthdate")); - assertThat(jwt.getPayload().get("birthdate"), equalTo("1994-10-31")); - assertTrue(jwt.getPayload().containsKey("email")); - assertThat(jwt.getPayload().get("email"), equalTo("janedoe@example.com")); - assertTrue(jwt.getPayload().containsKey("picture")); - assertThat(jwt.getPayload().get("picture"), equalTo("http://example.com/janedoe/me.jpg")); - assertTrue(jwt.getHeader().containsKey("alg")); - assertThat(jwt.getHeader().get("alg"), equalTo("HS256")); - assertTrue(jwt.getHeader().containsKey("kid")); - assertThat(jwt.getHeader().get("kid"), equalTo("1e9gdk7")); - - } - - public void testIdTokenWithPrivateClaimsParsing() throws IOException { - final String serializedJwt = "eyJhbGciOiJIUzI1NiIsImtpZCI6IjFlOWdkazcifQ.eyJpc3MiOiJodHRwOi8vc2VydmVyLmV4YW1wbGUuY29tIiwic3ViI" + - "joiMjQ4Mjg5NzYxMDAxIiwiYXVkIjoiczZCaGRSa3F0MyIsIm5vbmNlIjoibi0wUzZfV3pBMk1qIiwiZXhwIjoxMzExMjgxOTcwLCJpYXQiOjEzMTEyODA5Nz" + - "AsIm5hbWUiOiJKYW5lIERvZSIsImdpdmVuX25hbWUiOiJKYW5lIiwiZmFtaWx5X25hbWUiOiJEb2UiLCJnZW5kZXIiOiJmZW1hbGUiLCJjbGFpbTEiOiJ2YWx" + - "1ZTEiLCJjbGFpbTIiOiJ2YWx1ZTIiLCJjbGFpbTMiOiJ2YWx1ZTMiLCJjbGFpbTQiOiJ2YWx1ZTQiLCJiaXJ0aGRhdGUiOiIxOTk0LTEwLTMxIiwiZW1haWwi" + - "OiJqYW5lZG9lQGV4YW1wbGUuY29tIiwicGljdHVyZSI6Imh0dHA6Ly9leGFtcGxlLmNvbS9qYW5lZG9lL21lLmpwZyIsImFkZHJlc3MiOnsiY291bnRyeSI6I" + - "kdyZWVjZSIsInJlZ2lvbiI6IkV2aWEifX0.K9nnZaiuF0z8wJUrJQSJSMKQtql3O6xMPYxyEOa7uC4"; - JsonWebTokenParser jwtParser = new JsonWebTokenParser(new RPConfiguration("clientId", "redirectUri", "code", null, - Arrays.asList("claim1", "claim2", "claim3", "claim4"))); - final SecretKeySpec keySpec = new SecretKeySpec("ffff".getBytes(), "HmacSHA256"); - JsonWebToken jwt = jwtParser.parseAndValidateJwt(serializedJwt, keySpec); - assertTrue(jwt.getPayload().containsKey("iss")); - assertThat(jwt.getPayload().get("iss"), equalTo("http://server.example.com")); - assertTrue(jwt.getPayload().containsKey("sub")); - assertThat(jwt.getPayload().get("sub"), equalTo("248289761001")); - assertTrue(jwt.getPayload().containsKey("aud")); - List aud = (List) jwt.getPayload().get("aud"); - assertThat(aud.size(), equalTo(1)); - assertTrue(aud.contains("s6BhdRkqt3")); - assertTrue(jwt.getPayload().containsKey("nonce")); - assertThat(jwt.getPayload().get("nonce"), equalTo("n-0S6_WzA2Mj")); - assertTrue(jwt.getPayload().containsKey("exp")); - assertThat(jwt.getPayload().get("exp"), equalTo(1311281970L)); - assertTrue(jwt.getPayload().containsKey("iat")); - assertThat(jwt.getPayload().get("iat"), equalTo(1311280970L)); - assertTrue(jwt.getPayload().containsKey("name")); - assertThat(jwt.getPayload().get("name"), equalTo("Jane Doe")); - assertTrue(jwt.getPayload().containsKey("given_name")); - assertThat(jwt.getPayload().get("given_name"), equalTo("Jane")); - assertTrue(jwt.getPayload().containsKey("family_name")); - assertThat(jwt.getPayload().get("family_name"), equalTo("Doe")); - assertTrue(jwt.getPayload().containsKey("gender")); - assertThat(jwt.getPayload().get("gender"), equalTo("female")); - assertTrue(jwt.getPayload().containsKey("birthdate")); - assertThat(jwt.getPayload().get("birthdate"), equalTo("1994-10-31")); - assertTrue(jwt.getPayload().containsKey("email")); - assertThat(jwt.getPayload().get("email"), equalTo("janedoe@example.com")); - assertTrue(jwt.getPayload().containsKey("picture")); - assertThat(jwt.getPayload().get("picture"), equalTo("http://example.com/janedoe/me.jpg")); - assertTrue(jwt.getPayload().containsKey("claim1")); - assertThat(jwt.getPayload().get("claim1"), equalTo("value1")); - assertTrue(jwt.getPayload().containsKey("claim2")); - assertThat(jwt.getPayload().get("claim2"), equalTo("value2")); - assertTrue(jwt.getPayload().containsKey("claim3")); - assertThat(jwt.getPayload().get("claim3"), equalTo("value3")); - assertTrue(jwt.getPayload().containsKey("claim4")); - assertThat(jwt.getPayload().get("claim4"), equalTo("value4")); - assertTrue(jwt.getPayload().containsKey("address")); - Map expectedAddress = new HashMap<>(); - expectedAddress.put("country", "Greece"); - expectedAddress.put("region", "Evia"); - assertThat(jwt.getPayload().get("address"), equalTo(expectedAddress)); - assertTrue(jwt.getHeader().containsKey("alg")); - assertThat(jwt.getHeader().get("alg"), equalTo("HS256")); - assertTrue(jwt.getHeader().containsKey("kid")); - assertThat(jwt.getHeader().get("kid"), equalTo("1e9gdk7")); - } - - public void testIdTokenWithMutipleAudiencesParsing() throws IOException { - final String serializedJwt = "eyJhbGciOiJIUzI1NiIsImtpZCI6IjFlOWdkazcifQ.eyJpc3MiOiJodHRwOi8vc2VydmVyLmV4YW1wbGUuY29tIiwic3ViI" + - "joiMjQ4Mjg5NzYxMDAxIiwiYXVkIjpbInM2QmhkUmtxdDMiLCJvdGhlcl9hdWRpZW5jZSJdLCJub25jZSI6Im4tMFM2X1d6QTJNaiIsImV4cCI6MTMxMTI4MT" + - "k3MCwiaWF0IjoxMzExMjgwOTcwLCJuYW1lIjoiSmFuZSBEb2UiLCJnaXZlbl9uYW1lIjoiSmFuZSIsImZhbWlseV9uYW1lIjoiRG9lIiwiZ2VuZGVyIjoiZmV" + - "tYWxlIiwiY2xhaW0xIjoidmFsdWUxIiwiY2xhaW0yIjoidmFsdWUyIiwiY2xhaW0zIjoidmFsdWUzIiwiY2xhaW00IjoidmFsdWU0IiwiYmlydGhkYXRlIjoi" + - "MTk5NC0xMC0zMSIsImVtYWlsIjoiamFuZWRvZUBleGFtcGxlLmNvbSIsInBpY3R1cmUiOiJodHRwOi8vZXhhbXBsZS5jb20vamFuZWRvZS9tZS5qcGcifQ.xn" + - "HQXmN17lnkkBM-DX3kFRfr7Edk1OYoAPpCwCFOsvA"; - JsonWebTokenParser jwtParser = new JsonWebTokenParser(new RPConfiguration("clientId", "redirectUri", "code", null, - Arrays.asList("claim1", "claim2", "claim3", "claim4"))); - final SecretKeySpec keySpec = new SecretKeySpec("ffff".getBytes(), "HmacSHA256"); - JsonWebToken jwt = jwtParser.parseAndValidateJwt(serializedJwt, keySpec); - assertTrue(jwt.getPayload().containsKey("iss")); - assertThat(jwt.getPayload().get("iss"), equalTo("http://server.example.com")); - assertTrue(jwt.getPayload().containsKey("sub")); - assertThat(jwt.getPayload().get("sub"), equalTo("248289761001")); - assertTrue(jwt.getPayload().containsKey("aud")); - List aud = (List) jwt.getPayload().get("aud"); - assertThat(aud.size(), equalTo(2)); - assertTrue(aud.contains("s6BhdRkqt3")); - assertTrue(aud.contains("other_audience")); - assertTrue(jwt.getPayload().containsKey("nonce")); - assertThat(jwt.getPayload().get("nonce"), equalTo("n-0S6_WzA2Mj")); - assertTrue(jwt.getPayload().containsKey("exp")); - assertThat(jwt.getPayload().get("exp"), equalTo(1311281970L)); - assertTrue(jwt.getPayload().containsKey("iat")); - assertThat(jwt.getPayload().get("iat"), equalTo(1311280970L)); - assertTrue(jwt.getPayload().containsKey("name")); - assertThat(jwt.getPayload().get("name"), equalTo("Jane Doe")); - assertTrue(jwt.getPayload().containsKey("given_name")); - assertThat(jwt.getPayload().get("given_name"), equalTo("Jane")); - assertTrue(jwt.getPayload().containsKey("family_name")); - assertThat(jwt.getPayload().get("family_name"), equalTo("Doe")); - assertTrue(jwt.getPayload().containsKey("gender")); - assertThat(jwt.getPayload().get("gender"), equalTo("female")); - assertTrue(jwt.getPayload().containsKey("birthdate")); - assertThat(jwt.getPayload().get("birthdate"), equalTo("1994-10-31")); - assertTrue(jwt.getPayload().containsKey("email")); - assertThat(jwt.getPayload().get("email"), equalTo("janedoe@example.com")); - assertTrue(jwt.getPayload().containsKey("picture")); - assertThat(jwt.getPayload().get("picture"), equalTo("http://example.com/janedoe/me.jpg")); - assertTrue(jwt.getPayload().containsKey("claim1")); - assertThat(jwt.getPayload().get("claim1"), equalTo("value1")); - assertTrue(jwt.getPayload().containsKey("claim2")); - assertThat(jwt.getPayload().get("claim2"), equalTo("value2")); - assertTrue(jwt.getPayload().containsKey("claim3")); - assertThat(jwt.getPayload().get("claim3"), equalTo("value3")); - assertTrue(jwt.getPayload().containsKey("claim4")); - assertThat(jwt.getPayload().get("claim4"), equalTo("value4")); - assertTrue(jwt.getHeader().containsKey("alg")); - assertThat(jwt.getHeader().get("alg"), equalTo("HS256")); - assertTrue(jwt.getHeader().containsKey("kid")); - assertThat(jwt.getHeader().get("kid"), equalTo("1e9gdk7")); - } -} diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/support/jwt/IdTokenParserTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/support/jwt/IdTokenParserTests.java new file mode 100644 index 0000000000000..f5ff440449168 --- /dev/null +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/support/jwt/IdTokenParserTests.java @@ -0,0 +1,363 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.security.authc.support.jwt; + +import org.elasticsearch.common.io.PathUtils; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.core.ssl.PemUtils; +import org.elasticsearch.xpack.security.authc.oidc.RPConfiguration; + +import javax.crypto.spec.SecretKeySpec; +import java.io.IOException; +import java.nio.file.Path; +import java.security.PublicKey; +import java.util.Arrays; +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.equalTo; + +public class IdTokenParserTests extends ESTestCase { + + public void testIdTokenParsing() throws IOException { + final String serializedJwt = "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJpc3MiOiJodHRwOi8vb3AuZXhhbXBsZS5jb20iLCJzdWIiOiI" + + "yNDgyODk3NjEwMDEiLCJuYW1lIjoiSmFuZSBEb2UiLCJhdWQiOiJzNkJoZFJrcXQzIiwibm9uY2UiOiJuLTBTNl9XekEyTWoiLCJpYXQiOjE1MTYy" + + "MzkwMjIsImV4cCI6MTUxNjMzOTAyMiwiZ2l2ZW5fbmFtZSI6IkphbmUiLCJmYW1pbHlfbmFtZSI6IkRvZSIsImdlbmRlciI6ImZlbWFsZSIsImJpc" + + "nRoZGF0ZSI6IjE5OTQtMTAtMzEiLCJlbWFpbCI6ImphbmVkb2VAZXhhbXBsZS5jb20iLCJwaWN0dXJlIjoiaHR0cDovL2V4YW1wbGUuY29tL2phbm" + + "Vkb2UvbWUuanBnIn0.bpG9QZk9uykstyn2rv2w_7NkS-rerdX78_ehxli8RTM"; + RPConfiguration rpConfig = new RPConfiguration("clientId", "redirectUri", "code", SignatureAlgorithm.getAllNames(), null, null); + IdTokenParser jwtParser = new IdTokenParser(rpConfig); + final SecretKeySpec keySpec = new SecretKeySpec("144753a689a6508d7c7cd02752d7138e".getBytes(), "HmacSHA256"); + JsonWebToken jwt = jwtParser.parseAndValidateIdToken(serializedJwt, keySpec); + assertTrue(jwt.getPayload().containsKey("iss")); + assertThat(jwt.getPayload().get("iss"), equalTo("http://op.example.com")); + assertTrue(jwt.getPayload().containsKey("sub")); + assertThat(jwt.getPayload().get("sub"), equalTo("248289761001")); + assertTrue(jwt.getPayload().containsKey("aud")); + List aud = (List) jwt.getPayload().get("aud"); + assertThat(aud.size(), equalTo(1)); + assertTrue(aud.contains("s6BhdRkqt3")); + assertTrue(jwt.getPayload().containsKey("nonce")); + assertThat(jwt.getPayload().get("nonce"), equalTo("n-0S6_WzA2Mj")); + assertTrue(jwt.getPayload().containsKey("exp")); + assertThat(jwt.getPayload().get("exp"), equalTo(1516339022L)); + assertTrue(jwt.getPayload().containsKey("iat")); + assertThat(jwt.getPayload().get("iat"), equalTo(1516239022L)); + assertTrue(jwt.getPayload().containsKey("name")); + assertThat(jwt.getPayload().get("name"), equalTo("Jane Doe")); + assertTrue(jwt.getPayload().containsKey("given_name")); + assertThat(jwt.getPayload().get("given_name"), equalTo("Jane")); + assertTrue(jwt.getPayload().containsKey("family_name")); + assertThat(jwt.getPayload().get("family_name"), equalTo("Doe")); + assertTrue(jwt.getPayload().containsKey("gender")); + assertThat(jwt.getPayload().get("gender"), equalTo("female")); + assertTrue(jwt.getPayload().containsKey("birthdate")); + assertThat(jwt.getPayload().get("birthdate"), equalTo("1994-10-31")); + assertTrue(jwt.getPayload().containsKey("email")); + assertThat(jwt.getPayload().get("email"), equalTo("janedoe@example.com")); + assertTrue(jwt.getPayload().containsKey("picture")); + assertThat(jwt.getPayload().get("picture"), equalTo("http://example.com/janedoe/me.jpg")); + assertTrue(jwt.getHeader().containsKey("alg")); + assertThat(jwt.getHeader().get("alg"), equalTo("HS256")); + } + + public void testIdTokenWithPrivateClaimsParsing() throws IOException { + final String serializedJwt = "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJpc3MiOiJodHRwOi8vb3AuZXhhbXBsZS5jb20iLCJzdWIiOiI" + + "yNDgyODk3NjEwMDEiLCJuYW1lIjoiSmFuZSBEb2UiLCJhdWQiOiJzNkJoZFJrcXQzIiwibm9uY2UiOiJuLTBTNl9XekEyTWoiLCJpYXQiOjE1MTYy" + + "MzkwMjIsImV4cCI6MTUxNjMzOTAyMiwiZ2l2ZW5fbmFtZSI6IkphbmUiLCJmYW1pbHlfbmFtZSI6IkRvZSIsImdlbmRlciI6ImZlbWFsZSIsImJpc" + + "nRoZGF0ZSI6IjE5OTQtMTAtMzEiLCJlbWFpbCI6ImphbmVkb2VAZXhhbXBsZS5jb20iLCJwaWN0dXJlIjoiaHR0cDovL2V4YW1wbGUuY29tL2phbm" + + "Vkb2UvbWUuanBnIiwiY2xhaW0xIjoidmFsdWUxIiwiY2xhaW0yIjoidmFsdWUyIiwiY2xhaW0zIjoidmFsdWUzIiwiY2xhaW00IjoidmFsdWU0Iiw" + + "iYWRkcmVzcyI6eyJjb3VudHJ5IjoiR3JlZWNlIiwicmVnaW9uIjoiRXZpYSJ9fQ.hvG90pJHvjPkZaf_ll3WMeSvfHIzx82zYs5iuygopQo"; + RPConfiguration rpConfig = new RPConfiguration("clientId", "redirectUri", "code", SignatureAlgorithm.getAllNames(), + null, Arrays.asList("claim1", "claim2", "claim3", "claim4")); + IdTokenParser jwtParser = new IdTokenParser(rpConfig); + final SecretKeySpec keySpec = new SecretKeySpec("144753a689a6508d7c7cd02752d7138e".getBytes(), "HmacSHA256"); + JsonWebToken jwt = jwtParser.parseAndValidateIdToken(serializedJwt, keySpec); + assertTrue(jwt.getPayload().containsKey("iss")); + assertThat(jwt.getPayload().get("iss"), equalTo("http://op.example.com")); + assertTrue(jwt.getPayload().containsKey("sub")); + assertThat(jwt.getPayload().get("sub"), equalTo("248289761001")); + assertTrue(jwt.getPayload().containsKey("aud")); + List aud = (List) jwt.getPayload().get("aud"); + assertThat(aud.size(), equalTo(1)); + assertTrue(aud.contains("s6BhdRkqt3")); + assertTrue(jwt.getPayload().containsKey("nonce")); + assertThat(jwt.getPayload().get("nonce"), equalTo("n-0S6_WzA2Mj")); + assertTrue(jwt.getPayload().containsKey("exp")); + assertThat(jwt.getPayload().get("exp"), equalTo(1516339022L)); + assertTrue(jwt.getPayload().containsKey("iat")); + assertThat(jwt.getPayload().get("iat"), equalTo(1516239022L)); + assertTrue(jwt.getPayload().containsKey("name")); + assertThat(jwt.getPayload().get("name"), equalTo("Jane Doe")); + assertTrue(jwt.getPayload().containsKey("given_name")); + assertThat(jwt.getPayload().get("given_name"), equalTo("Jane")); + assertTrue(jwt.getPayload().containsKey("family_name")); + assertThat(jwt.getPayload().get("family_name"), equalTo("Doe")); + assertTrue(jwt.getPayload().containsKey("gender")); + assertThat(jwt.getPayload().get("gender"), equalTo("female")); + assertTrue(jwt.getPayload().containsKey("birthdate")); + assertThat(jwt.getPayload().get("birthdate"), equalTo("1994-10-31")); + assertTrue(jwt.getPayload().containsKey("email")); + assertThat(jwt.getPayload().get("email"), equalTo("janedoe@example.com")); + assertTrue(jwt.getPayload().containsKey("picture")); + assertThat(jwt.getPayload().get("picture"), equalTo("http://example.com/janedoe/me.jpg")); + assertTrue(jwt.getPayload().containsKey("claim1")); + assertThat(jwt.getPayload().get("claim1"), equalTo("value1")); + assertTrue(jwt.getPayload().containsKey("claim2")); + assertThat(jwt.getPayload().get("claim2"), equalTo("value2")); + assertTrue(jwt.getPayload().containsKey("claim3")); + assertThat(jwt.getPayload().get("claim3"), equalTo("value3")); + assertTrue(jwt.getPayload().containsKey("claim4")); + assertThat(jwt.getPayload().get("claim4"), equalTo("value4")); + assertTrue(jwt.getPayload().containsKey("address")); + Map expectedAddress = new HashMap<>(); + expectedAddress.put("country", "Greece"); + expectedAddress.put("region", "Evia"); + assertThat(jwt.getPayload().get("address"), equalTo(expectedAddress)); + assertTrue(jwt.getHeader().containsKey("alg")); + assertThat(jwt.getHeader().get("alg"), equalTo("HS256")); + } + + public void testIdTokenWithMutipleAudiencesParsing() throws IOException { + final String serializedJwt = "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJpc3MiOiJodHRwOi8vb3AuZXhhbXBsZS5jb20iLCJzdWIiOiIyNDgyOD" + + "k3NjEwMDEiLCJuYW1lIjoiSmFuZSBEb2UiLCJhdWQiOlsiczZCaGRSa3F0MyIsIm90aGVyX2F1ZGllbmNlIl0sIm5vbmNlIjoibi0wUzZfV3pBMk1qIiwiaW" + + "F0IjoxNTE2MjM5MDIyLCJleHAiOjE1MTYzMzkwMjIsImdpdmVuX25hbWUiOiJKYW5lIiwiZmFtaWx5X25hbWUiOiJEb2UiLCJnZW5kZXIiOiJmZW1hbGUiLC" + + "JiaXJ0aGRhdGUiOiIxOTk0LTEwLTMxIiwiZW1haWwiOiJqYW5lZG9lQGV4YW1wbGUuY29tIiwicGljdHVyZSI6Imh0dHA6Ly9leGFtcGxlLmNvbS9qYW5lZG" + + "9lL21lLmpwZyIsImNsYWltMSI6InZhbHVlMSIsImNsYWltMiI6InZhbHVlMiIsImNsYWltMyI6InZhbHVlMyIsImNsYWltNCI6InZhbHVlNCIsImFkZHJlc3" + + "MiOnsiY291bnRyeSI6IkdyZWVjZSIsInJlZ2lvbiI6IkV2aWEifX0.bo2s5D0i87Ij5TSdWnoCmwgM_0dagvscCOqs-luM1yI"; + RPConfiguration rpConfig = new RPConfiguration("clientId", "redirectUri", "code", SignatureAlgorithm.getAllNames(), + null, Arrays.asList("claim1", "claim2", "claim3", "claim4")); + IdTokenParser jwtParser = new IdTokenParser(rpConfig); + final SecretKeySpec keySpec = new SecretKeySpec("144753a689a6508d7c7cd02752d7138e".getBytes(), "HmacSHA256"); + JsonWebToken jwt = jwtParser.parseAndValidateIdToken(serializedJwt, keySpec); + assertTrue(jwt.getPayload().containsKey("iss")); + assertThat(jwt.getPayload().get("iss"), equalTo("http://op.example.com")); + assertTrue(jwt.getPayload().containsKey("sub")); + assertThat(jwt.getPayload().get("sub"), equalTo("248289761001")); + assertTrue(jwt.getPayload().containsKey("aud")); + List aud = (List) jwt.getPayload().get("aud"); + assertThat(aud.size(), equalTo(2)); + assertTrue(aud.contains("s6BhdRkqt3")); + assertTrue(aud.contains("other_audience")); + assertTrue(jwt.getPayload().containsKey("nonce")); + assertThat(jwt.getPayload().get("nonce"), equalTo("n-0S6_WzA2Mj")); + assertTrue(jwt.getPayload().containsKey("exp")); + assertThat(jwt.getPayload().get("exp"), equalTo(1516339022L)); + assertTrue(jwt.getPayload().containsKey("iat")); + assertThat(jwt.getPayload().get("iat"), equalTo(1516239022L)); + assertTrue(jwt.getPayload().containsKey("name")); + assertThat(jwt.getPayload().get("name"), equalTo("Jane Doe")); + assertTrue(jwt.getPayload().containsKey("given_name")); + assertThat(jwt.getPayload().get("given_name"), equalTo("Jane")); + assertTrue(jwt.getPayload().containsKey("family_name")); + assertThat(jwt.getPayload().get("family_name"), equalTo("Doe")); + assertTrue(jwt.getPayload().containsKey("gender")); + assertThat(jwt.getPayload().get("gender"), equalTo("female")); + assertTrue(jwt.getPayload().containsKey("birthdate")); + assertThat(jwt.getPayload().get("birthdate"), equalTo("1994-10-31")); + assertTrue(jwt.getPayload().containsKey("email")); + assertThat(jwt.getPayload().get("email"), equalTo("janedoe@example.com")); + assertTrue(jwt.getPayload().containsKey("picture")); + assertThat(jwt.getPayload().get("picture"), equalTo("http://example.com/janedoe/me.jpg")); + assertTrue(jwt.getPayload().containsKey("claim1")); + assertThat(jwt.getPayload().get("claim1"), equalTo("value1")); + assertTrue(jwt.getPayload().containsKey("claim2")); + assertThat(jwt.getPayload().get("claim2"), equalTo("value2")); + assertTrue(jwt.getPayload().containsKey("claim3")); + assertThat(jwt.getPayload().get("claim3"), equalTo("value3")); + assertTrue(jwt.getPayload().containsKey("claim4")); + assertThat(jwt.getPayload().get("claim4"), equalTo("value4")); + assertTrue(jwt.getHeader().containsKey("alg")); + assertThat(jwt.getHeader().get("alg"), equalTo("HS256")); + } + + public void testHmacSignatureVerification() throws IOException { + final String serializedJwt = "eyJhbGciOiJIUzM4NCIsInR5cCI6IkpXVCJ9.eyJpc3MiOiJodHRwOi8vb3AuZXhhbXBsZS5jb20iLCJzdWIiOiI" + + "yNDgyODk3NjEwMDEiLCJuYW1lIjoiSmFuZSBEb2UiLCJhdWQiOiJzNkJoZFJrcXQzIiwibm9uY2UiOiJuLTBTNl9XekEyTWoiLCJpYXQiOjE1MTYy" + + "MzkwMjIsImV4cCI6MTUxNjMzOTAyMiwiZ2l2ZW5fbmFtZSI6IkphbmUiLCJmYW1pbHlfbmFtZSI6IkRvZSIsImdlbmRlciI6ImZlbWFsZSIsImJpc" + + "nRoZGF0ZSI6IjE5OTQtMTAtMzEiLCJlbWFpbCI6ImphbmVkb2VAZXhhbXBsZS5jb20iLCJwaWN0dXJlIjoiaHR0cDovL2V4YW1wbGUuY29tL2phbm" + + "Vkb2UvbWUuanBnIn0.BALROUxYSPxhNeETrsn51f6UT7lksaAwCVoBxwj3Yd7L1Dxyzm-Dfhyv0GvJp3Ip"; + + RPConfiguration rpConfig = new RPConfiguration("clientId", "redirectUri", "code", SignatureAlgorithm.getAllNames(), + null, Arrays.asList("claim1", "claim2", "claim3", "claim4")); + IdTokenParser jwtParser = new IdTokenParser(rpConfig); + final SecretKeySpec keySpec = new SecretKeySpec("144753a689a6508d7c7cd02752d7138e".getBytes(), "HmacSHA384"); + JsonWebToken jwt = jwtParser.parseAndValidateIdToken(serializedJwt, keySpec); + assertTrue(jwt.getPayload().containsKey("iss")); + assertThat(jwt.getPayload().get("iss"), equalTo("http://op.example.com")); + assertTrue(jwt.getPayload().containsKey("sub")); + assertThat(jwt.getPayload().get("sub"), equalTo("248289761001")); + assertTrue(jwt.getHeader().containsKey("alg")); + assertThat(jwt.getHeader().get("alg"), equalTo("HS384")); + + final String serializedJwt512 = "eyJhbGciOiJIUzUxMiIsInR5cCI6IkpXVCJ9.eyJpc3MiOiJodHRwOi8vb3AuZXhhbXBsZS5jb20iLCJzdWIiOiI" + + "yNDgyODk3NjEwMDEiLCJuYW1lIjoiSmFuZSBEb2UiLCJhdWQiOiJzNkJoZFJrcXQzIiwibm9uY2UiOiJuLTBTNl9XekEyTWoiLCJpYXQiOjE1MTYyMzk" + + "wMjIsImV4cCI6MTUxNjMzOTAyMiwiZ2l2ZW5fbmFtZSI6IkphbmUiLCJmYW1pbHlfbmFtZSI6IkRvZSIsImdlbmRlciI6ImZlbWFsZSIsImJpcnRoZGF" + + "0ZSI6IjE5OTQtMTAtMzEiLCJlbWFpbCI6ImphbmVkb2VAZXhhbXBsZS5jb20iLCJwaWN0dXJlIjoiaHR0cDovL2V4YW1wbGUuY29tL2phbmVkb2UvbWU" + + "uanBnIn0.b-wg-whI_4hzmSn_lVmAfBt2YHjeeX9800jYBsiRLpGJ_WB8sCIIASTUpHiwT8RxqXAgn_nr0JsKTQkhJT6frg"; + + final SecretKeySpec keySpec512 = new SecretKeySpec("144753a689a6508d7c7cd02752d7138e".getBytes(), "HmacSHA512"); + JsonWebToken jwt512 = jwtParser.parseAndValidateIdToken(serializedJwt512, keySpec512); + assertTrue(jwt512.getPayload().containsKey("iss")); + assertThat(jwt512.getPayload().get("iss"), equalTo("http://op.example.com")); + assertTrue(jwt512.getPayload().containsKey("sub")); + assertThat(jwt512.getPayload().get("sub"), equalTo("248289761001")); + assertTrue(jwt512.getHeader().containsKey("alg")); + assertThat(jwt512.getHeader().get("alg"), equalTo("HS512")); + } + + public void testRsaSignatureVerification() throws Exception { + Path keyPath = PathUtils.get(IdTokenParserTests.class.getResource + ("/org/elasticsearch/xpack/security/authc/oidc/rsa_public_key.pem").toURI()); + final PublicKey publicKey = PemUtils.readPublicKey(keyPath); + final String serliazedJwt256 = "eyJhbGciOiJSUzI1NiIsInR5cCI6IkpXVCJ9.eyJpc3MiOiJodHRwOi8vb3AuZXhhbXBsZS5jb20iLCJzdWIiOiI" + + "yNDgyODk3NjEwMDEiLCJuYW1lIjoiSmFuZSBEb2UiLCJhdWQiOiJzNkJoZFJrcXQzIiwibm9uY2UiOiJuLTBTNl9XekEyTWoiLCJpYXQiOjE1MTYyMz" + + "kwMjIsImV4cCI6MTUxNjMzOTAyMiwiZ2l2ZW5fbmFtZSI6IkphbmUiLCJmYW1pbHlfbmFtZSI6IkRvZSIsImdlbmRlciI6ImZlbWFsZSIsImJpcnRoZ" + + "GF0ZSI6IjE5OTQtMTAtMzEiLCJlbWFpbCI6ImphbmVkb2VAZXhhbXBsZS5jb20iLCJwaWN0dXJlIjoiaHR0cDovL2V4YW1wbGUuY29tL2phbmVkb2Uv" + + "bWUuanBnIn0.EaczUHQedtRRjeolzutBNQop4CeDz2K-W5sYC7OSLB3dCeUE4DBcP7V-f6ekmpz_QACK-uK9X2qAYrUsHworddBGPy-19TMrA7Lz8so" + + "ZVvhDy9EeBr4QxtV63Oj8tZVn6ThnZoIvTXZDOwrEs1lcHrYpnubzdSH0pzC1kZQNC8FYwi7BnAG9T-c_mo1qgGRkzGZ-TE7mtJVeQcKepmenm9kzdF" + + "-fap22rHzW5bWr-DAtyXP14BgqeeXz0ZM3YlOOzIqmRBrpP77mxQXDe8cwxgpR2fk0SIw8hkDyYhb3Y_KnufuT1nV2xhgDh7B3e8aJQNItddI8bp3FA" + + "tS75HeVFzRdfCscm0Huoci9MBsEP57YENjEW6MMGni0ukhygWTXQmSWEkvFmDMpRXQQjdBIjvW8wZVjwxUSa3Krp4z08GVx2NoZlcFYUT8_3NrRTKnl" + + "BunJdLWJG0lcFRGHwX4PQoCofO-jZdRPqfeULb8pMP5I6D2Ra0atV_1UFm6awSdngpvS-mP7v3dZALsT13nRdwnMEUfVwTyjOAGiiN01gDalKPVGCsO" + + "5idqMp1xxt-JIwKgaegghXpqDApKzCeyY5Z672GJHBNhrgugJr-WSGrZIpsm_xhfs4ZlrsjKUqmp8M0AapRtz4x4Z61qTkSXgVctcsoa0xkQppLybwo" + + "4ASgo"; + RPConfiguration rpConfig = new RPConfiguration("clientId", "redirectUri", "code", SignatureAlgorithm.getAllNames(), + null, Arrays.asList("claim1", "claim2", "claim3", "claim4")); + IdTokenParser jwtParser = new IdTokenParser(rpConfig); + JsonWebToken jwt256 = jwtParser.parseAndValidateIdToken(serliazedJwt256, publicKey); + assertTrue(jwt256.getPayload().containsKey("iss")); + assertThat(jwt256.getPayload().get("iss"), equalTo("http://op.example.com")); + assertTrue(jwt256.getPayload().containsKey("sub")); + assertThat(jwt256.getPayload().get("sub"), equalTo("248289761001")); + assertTrue(jwt256.getHeader().containsKey("alg")); + assertThat(jwt256.getHeader().get("alg"), equalTo("RS256")); + + // RS384 + final String serliazedJwt384 = "eyJhbGciOiJSUzM4NCIsInR5cCI6IkpXVCJ9.eyJpc3MiOiJodHRwOi8vb3AuZXhhbXBsZS5jb20iLCJzdWIiOiI" + + "yNDgyODk3NjEwMDEiLCJuYW1lIjoiSmFuZSBEb2UiLCJhdWQiOiJzNkJoZFJrcXQzIiwibm9uY2UiOiJuLTBTNl9XekEyTWoiLCJpYXQiOjE1MTYyMz" + + "kwMjIsImV4cCI6MTUxNjMzOTAyMiwiZ2l2ZW5fbmFtZSI6IkphbmUiLCJmYW1pbHlfbmFtZSI6IkRvZSIsImdlbmRlciI6ImZlbWFsZSIsImJpcnRoZ" + + "GF0ZSI6IjE5OTQtMTAtMzEiLCJlbWFpbCI6ImphbmVkb2VAZXhhbXBsZS5jb20iLCJwaWN0dXJlIjoiaHR0cDovL2V4YW1wbGUuY29tL2phbmVkb2Uv" + + "bWUuanBnIn0.QDiQQ2LGfeCd4BwMOqWB-cjxWh-Pp4OXj3EM9HCG94OL-vxZxyj76QqYIOpLHzAarfCmGdN4FHkODkI6XmA2yOIWJeElWKXnDqAKqoX" + + "d6UoAdl9qoPagkDayrQ0y-KaioZMBvzbf6r2nTS5lbnVOcZ5r7HLxu_WWVed8r4GwyQzx9ZHlkHlvv7d5n47f6LQ5ngvHaA7rDQC9SJIYicaDhHYxoq" + + "WBMdk2J31zpUAjdxFQ1TiqRzOm9-RD2gj2254GzqBhvgB4xShLYaESpZ4neRF-yOvxfoVen1ZjEkRhlGY6baFC8fScOtOGJQoL1wllNDdm-CQ4ZrCZW" + + "huyMrGzSCZ1Q2zzUiGtaY2H8M47g8xK6q8kEqCIWO_nQa1G8464HLpJDkBF7GTAG36lEb2kuLOElK_p2xmrmAMoyEn9-PVjpN8vhFXG0lIzR3FzjvJD" + + "h3Arz8djUxIuv4dJjDiS-50hNkpVD6rhKW5a6hjywyvsFenQmT9SLf_iu9m5Es0JNU3zzlbM3H2zHaPmj4ACiXSWwgphzgnpCbGhTjqEb6uBTxUXpku" + + "zlDo2UW5Pb1j0aoJ0cMPpz9NJgrdiJq1vzvhBHnXQuKTV7TcdumihoZMLIKPWUtLuYbF1rTLxE0c7IZW_qE7hXp4IZDBtp62IpW9mIdNQbYBta5lVE-" + + "GkYL0"; + + JsonWebToken jwt384 = jwtParser.parseAndValidateIdToken(serliazedJwt384, publicKey); + assertTrue(jwt384.getPayload().containsKey("iss")); + assertThat(jwt384.getPayload().get("iss"), equalTo("http://op.example.com")); + assertTrue(jwt384.getPayload().containsKey("sub")); + assertThat(jwt384.getPayload().get("sub"), equalTo("248289761001")); + assertTrue(jwt384.getHeader().containsKey("alg")); + assertThat(jwt384.getHeader().get("alg"), equalTo("RS384")); + assertThat(jwt256.getPayload(), equalTo(jwt384.getPayload())); + + // RS512 + final String serliazedJwt512 = "eyJhbGciOiJSUzUxMiIsInR5cCI6IkpXVCJ9.eyJpc3MiOiJodHRwOi8vb3AuZXhhbXBsZS5jb20iLCJzdWIiOiI" + + "yNDgyODk3NjEwMDEiLCJuYW1lIjoiSmFuZSBEb2UiLCJhdWQiOiJzNkJoZFJrcXQzIiwibm9uY2UiOiJuLTBTNl9XekEyTWoiLCJpYXQiOjE1MTYyMz" + + "kwMjIsImV4cCI6MTUxNjMzOTAyMiwiZ2l2ZW5fbmFtZSI6IkphbmUiLCJmYW1pbHlfbmFtZSI6IkRvZSIsImdlbmRlciI6ImZlbWFsZSIsImJpcnRoZ" + + "GF0ZSI6IjE5OTQtMTAtMzEiLCJlbWFpbCI6ImphbmVkb2VAZXhhbXBsZS5jb20iLCJwaWN0dXJlIjoiaHR0cDovL2V4YW1wbGUuY29tL2phbmVkb2Uv" + + "bWUuanBnIn0.j1yxslZtV3Cgd3pqrgtA9ysAAAq0-WyPzSmjTUWp3N-wepTW6lV4DuCdTBVYGuYsMmTzts5AdFCEwmDxKX5fno63vt1gwxM1cS9VSxD" + + "4OhvzIoGoOoQNKPLPNC40hTlh-qOpwTl8WpTAMn_bEzykcIagFEt-MuQJ_0uTAYsW3PdumE8vJKROJrOnoG6085r8VaNfuNzWOyRlZlu_y_xRgOFYG1" + + "2dJseIMPIuf3BRVM2768fZirJVk_N6N1SAIeZOs3l7nDZ5qiB7wHiH_LURBXO4dZKo0TKpXx8XZfzbNBwk7yC5ftXeXeOPkUEODw2Iy4dO_Pm_-rDX0" + + "WqZID8f8fs69qc8_uqcBb6zGEN1iGuMe-FttvATTxdtfG912850wvLu-TBUBN_1UUw19k9T7KrKiLeIxUmxTONB9kIet0ga83ByIW0c72SSPBKPITZR" + + "mMy6ZaIRtW4gKovRfvRzhKhmSHWGrx9MHMabCFm3uQYo39Lai7QOymZqXlUBfsig1yicRwkc3JCQ9IOJXtu1SyLw0g01oU8OPpc9ziTkmCw-SiKB9Aw" + + "mqhqFxmMTd1o44ItKbTvCPep1Ss66Vku_zv4VprsdalDdy4gcKXglWeviStMa4jIoR6UalPBAT44Lb3zvNhAQlkfH2avhkUZwFKlANuO4PKLhKlJtV7" + + "SJ59E"; + + JsonWebToken jwt512 = jwtParser.parseAndValidateIdToken(serliazedJwt512, publicKey); + assertTrue(jwt512.getPayload().containsKey("iss")); + assertThat(jwt512.getPayload().get("iss"), equalTo("http://op.example.com")); + assertTrue(jwt512.getPayload().containsKey("sub")); + assertThat(jwt512.getPayload().get("sub"), equalTo("248289761001")); + assertTrue(jwt512.getHeader().containsKey("alg")); + assertThat(jwt512.getHeader().get("alg"), equalTo("RS512")); + assertThat(jwt512.getPayload(), equalTo(jwt384.getPayload())); + assertThat(jwt512.getPayload(), equalTo(jwt256.getPayload())); + } + + public void testEcSignatureVerification() throws Exception { + RPConfiguration rpConfig = new RPConfiguration("clientId", "redirectUri", "code", SignatureAlgorithm.getAllNames(), + null, Arrays.asList("claim1", "claim2", "claim3", "claim4")); + IdTokenParser jwtParser = new IdTokenParser(rpConfig); + Path keyPath256 = PathUtils.get(IdTokenParserTests.class.getResource + ("/org/elasticsearch/xpack/security/authc/oidc/ec_public_key_256.pem").toURI()); + Path keyPath384 = PathUtils.get(IdTokenParserTests.class.getResource + ("/org/elasticsearch/xpack/security/authc/oidc/ec_public_key_384.pem").toURI()); + Path keyPath512 = PathUtils.get(IdTokenParserTests.class.getResource + ("/org/elasticsearch/xpack/security/authc/oidc/ec_public_key_512.pem").toURI()); + final PublicKey publicKey256 = PemUtils.readPublicKey(keyPath256); + final PublicKey publicKey384 = PemUtils.readPublicKey(keyPath384); + final PublicKey publicKey512 = PemUtils.readPublicKey(keyPath512); + // ES256 + final String serliazedJwt256 = "eyJhbGciOiJFUzI1NiIsInR5cCI6IkpXVCJ9.eyJzdWIiOiIyNDgyODk3NjEwMDEiLCJiaXJ0aGRhdGUiOiIxOTk0LT" + + "EwLTMxIiwiZ2VuZGVyIjoiZmVtYWxlIiwiaXNzIjoiaHR0cDovL29wLmV4YW1wbGUuY29tIiwiZ2l2ZW5fbmFtZSI6IkphbmUiLCJub25jZSI6Im4tMFM2" + + "X1d6QTJNaiIsInBpY3R1cmUiOiJodHRwOi8vZXhhbXBsZS5jb20vamFuZWRvZS9tZS5qcGciLCJhdWQiOiJzNkJoZFJrcXQzIiwibmFtZSI6IkphbmUgRG" + + "9lIiwiZXhwIjoxNTE2MzM5MDIyLCJpYXQiOjE1MTYyMzkwMjIsImZhbWlseV9uYW1lIjoiRG9lIiwiZW1haWwiOiJqYW5lZG9lQGV4YW1wbGUuY29tIn0." + + "SRRZ4EUe4GG6iZeAkGVuAhhTunfd8xwfd0DYn8SIu3TpiU-jcfrIsYl2Eqv1K3STyRvKUxCFXgb8ziTtA8iqIQ"; + + + JsonWebToken jwt256 = jwtParser.parseAndValidateIdToken(serliazedJwt256, publicKey256); + assertTrue(jwt256.getPayload().containsKey("iss")); + assertThat(jwt256.getPayload().get("iss"), equalTo("http://op.example.com")); + assertTrue(jwt256.getPayload().containsKey("sub")); + assertThat(jwt256.getPayload().get("sub"), equalTo("248289761001")); + assertTrue(jwt256.getHeader().containsKey("alg")); + assertThat(jwt256.getHeader().get("alg"), equalTo("ES256")); + // ES384 + final String serliazedJwt384 = "eyJhbGciOiJFUzM4NCIsInR5cCI6IkpXVCJ9.eyJzdWIiOiIyNDgyODk3NjEwMDEiLCJiaXJ0aGRhdGUiOiIxOTk0LT" + + "EwLTMxIiwiZ2VuZGVyIjoiZmVtYWxlIiwiaXNzIjoiaHR0cDovL29wLmV4YW1wbGUuY29tIiwiZ2l2ZW5fbmFtZSI6IkphbmUiLCJub25jZSI6Im4tMFM2" + + "X1d6QTJNaiIsInBpY3R1cmUiOiJodHRwOi8vZXhhbXBsZS5jb20vamFuZWRvZS9tZS5qcGciLCJhdWQiOiJzNkJoZFJrcXQzIiwibmFtZSI6IkphbmUgRG" + + "9lIiwiZXhwIjoxNTE2MzM5MDIyLCJpYXQiOjE1MTYyMzkwMjIsImZhbWlseV9uYW1lIjoiRG9lIiwiZW1haWwiOiJqYW5lZG9lQGV4YW1wbGUuY29tIn0." + + "vyRAaD8ThjK9EvFt4_Lwqe2So_ZmEA4BeZiLoTSVCxlgYLNHFI6Ip01IB-5oN1pCPsId-SXZyFq-YMcgP_bIyLGCPUc5faU8XpHNhtloj4WBR_k1ZSH23g" + + "gk6hps2JjD"; + + JsonWebToken jwt384 = jwtParser.parseAndValidateIdToken(serliazedJwt384, publicKey384); + assertTrue(jwt384.getPayload().containsKey("iss")); + assertThat(jwt384.getPayload().get("iss"), equalTo("http://op.example.com")); + assertTrue(jwt384.getPayload().containsKey("sub")); + assertThat(jwt384.getPayload().get("sub"), equalTo("248289761001")); + assertTrue(jwt384.getHeader().containsKey("alg")); + assertThat(jwt384.getHeader().get("alg"), equalTo("ES384")); + // ES512 + final String serliazedJwt512 = "eyJhbGciOiJFUzUxMiIsInR5cCI6IkpXVCJ9.eyJzdWIiOiIyNDgyODk3NjEwMDEiLCJiaXJ0aGRhdGUiOiIxOTk0LTEwLT" + + "MxIiwiZ2VuZGVyIjoiZmVtYWxlIiwiaXNzIjoiaHR0cDovL29wLmV4YW1wbGUuY29tIiwiZ2l2ZW5fbmFtZSI6IkphbmUiLCJub25jZSI6Im4tMFM2X1d6QTJN" + + "aiIsInBpY3R1cmUiOiJodHRwOi8vZXhhbXBsZS5jb20vamFuZWRvZS9tZS5qcGciLCJhdWQiOiJzNkJoZFJrcXQzIiwibmFtZSI6IkphbmUgRG9lIiwiZXhwIj" + + "oxNTE2MzM5MDIyLCJpYXQiOjE1MTYyMzkwMjIsImZhbWlseV9uYW1lIjoiRG9lIiwiZW1haWwiOiJqYW5lZG9lQGV4YW1wbGUuY29tIn0.AU_7xDKGYupo9nt9" + + "-RrD7RSurkVX-ntyHycjH1SLKCTT8eWLfXHnklEhqCUTPwAmG2iKolsRN6C07fctAcsYeGkSAFw9QhW0rvPNXClAB4wIZiNU1CI2l0I0vpY43L0o6Eaucx-s42" + + "avqYalxOHimkxxzI1LlhDjz8XebkXWDtbN-AB2"; + + JsonWebToken jwt512 = jwtParser.parseAndValidateIdToken(serliazedJwt512, publicKey512); + assertTrue(jwt512.getPayload().containsKey("iss")); + assertThat(jwt512.getPayload().get("iss"), equalTo("http://op.example.com")); + assertTrue(jwt512.getPayload().containsKey("sub")); + assertThat(jwt512.getPayload().get("sub"), equalTo("248289761001")); + assertTrue(jwt512.getHeader().containsKey("alg")); + assertThat(jwt512.getHeader().get("alg"), equalTo("ES512")); + } + + public void testNotAllowedSignatureAlgorithm() throws Exception { + RPConfiguration rpConfig = new RPConfiguration("clientId", "redirectUri", "code", Collections.singletonList("HS512"), + null, Arrays.asList("claim1", "claim2", "claim3", "claim4")); + IdTokenParser jwtParser = new IdTokenParser(rpConfig); + final String serliazedJwt384 = "eyJhbGciOiJFUzM4NCIsInR5cCI6IkpXVCJ9.eyJzdWIiOiIyNDgyODk3NjEwMDEiLCJiaXJ0aGRhdGUiOiIxOTk0LT" + + "EwLTMxIiwiZ2VuZGVyIjoiZmVtYWxlIiwiaXNzIjoiaHR0cDovL29wLmV4YW1wbGUuY29tIiwiZ2l2ZW5fbmFtZSI6IkphbmUiLCJub25jZSI6Im4tMFM2" + + "X1d6QTJNaiIsInBpY3R1cmUiOiJodHRwOi8vZXhhbXBsZS5jb20vamFuZWRvZS9tZS5qcGciLCJhdWQiOiJzNkJoZFJrcXQzIiwibmFtZSI6IkphbmUgRG" + + "9lIiwiZXhwIjoxNTE2MzM5MDIyLCJpYXQiOjE1MTYyMzkwMjIsImZhbWlseV9uYW1lIjoiRG9lIiwiZW1haWwiOiJqYW5lZG9lQGV4YW1wbGUuY29tIn0." + + "vyRAaD8ThjK9EvFt4_Lwqe2So_ZmEA4BeZiLoTSVCxlgYLNHFI6Ip01IB-5oN1pCPsId-SXZyFq-YMcgP_bIyLGCPUc5faU8XpHNhtloj4WBR_k1ZSH23g" + + "gk6hps2JjD"; + Path keyPath384 = PathUtils.get(IdTokenParserTests.class.getResource + ("/org/elasticsearch/xpack/security/authc/oidc/ec_public_key_384.pem").toURI()); + final PublicKey publicKey384 = PemUtils.readPublicKey(keyPath384); + Exception e = expectThrows(IllegalStateException.class, () -> jwtParser.parseAndValidateIdToken(serliazedJwt384, publicKey384)); + assertThat(e.getMessage(), containsString("ID Token is signed with an unsupported algorithm")); + } +} diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/support/JsonWebTokenTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/support/jwt/JsonWebTokenTests.java similarity index 95% rename from x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/support/JsonWebTokenTests.java rename to x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/support/jwt/JsonWebTokenTests.java index 26395d63d9161..6a34f91b1c84e 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/support/JsonWebTokenTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/support/jwt/JsonWebTokenTests.java @@ -3,7 +3,7 @@ * or more contributor license agreements. Licensed under the Elastic License; * you may not use this file except in compliance with the Elastic License. */ -package org.elasticsearch.xpack.security.authc.support; +package org.elasticsearch.xpack.security.authc.support.jwt; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.security.authc.support.jwt.JsonWebToken; diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/support/jwt/JwtSignatureValidatorTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/support/jwt/JwtSignatureValidatorTests.java new file mode 100644 index 0000000000000..5df8054a5a7ad --- /dev/null +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/support/jwt/JwtSignatureValidatorTests.java @@ -0,0 +1,67 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.security.authc.support.jwt; + +import org.elasticsearch.common.collect.Tuple; +import org.elasticsearch.common.io.PathUtils; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.core.ssl.PemUtils; + +import javax.crypto.spec.SecretKeySpec; +import java.nio.file.Path; +import java.security.PrivateKey; +import java.security.PublicKey; + +import static org.hamcrest.Matchers.containsString; + +public class JwtSignatureValidatorTests extends ESTestCase { + + public void testOnlyAcceptCorrectKeyAndAlgorithm() throws Exception { + SignatureAlgorithm hmacAlgo = randomFrom(SignatureAlgorithm.getHmacAlgorithms()); + final SecretKeySpec hmacKey = + new SecretKeySpec("144753a689a6508d7c7cd02752d7138e".getBytes(), hmacAlgo.getJcaAlgoName()); + SignatureAlgorithm rsaAlgo = randomFrom(SignatureAlgorithm.getRsaAlgorithms()); + Path publicKeyPath = PathUtils.get(IdTokenParserTests.class.getResource + ("/org/elasticsearch/xpack/security/authc/oidc/rsa_public_key.pem").toURI()); + final PublicKey rsaPublicKey = PemUtils.readPublicKey(publicKeyPath); + SignatureAlgorithm ecAlgo = randomFrom(SignatureAlgorithm.getEcAlgorithms()); + Tuple ecKeyPair = getEcKeyPairForAlgorithm(ecAlgo); + + IllegalArgumentException e1 = expectThrows(IllegalArgumentException.class, () -> { + new HmacSignatureValidator(hmacAlgo, rsaPublicKey); + }); + assertThat(e1.getMessage(), containsString("using a SecretKey but a [sun.security.rsa.RSAPublicKeyImpl] is provided")); + + IllegalArgumentException e2 = expectThrows(IllegalArgumentException.class, () -> { + new HmacSignatureValidator(rsaAlgo, hmacKey); + }); + assertThat(e2.getMessage(), containsString("Unsupported algorithm RS")); + + IllegalArgumentException e3 = expectThrows(IllegalArgumentException.class, () -> { + new RsaSignatureValidator(hmacAlgo, rsaPublicKey); + }); + assertThat(e3.getMessage(), containsString("Unsupported algorithm HS")); + + IllegalArgumentException e4 = expectThrows(IllegalArgumentException.class, () -> { + new RsaSignatureValidator(rsaAlgo, hmacKey); + }); + assertThat(e4.getMessage(), containsString("using a PublicKey but a [javax.crypto.spec.SecretKeySpec] is provided")); + + IllegalArgumentException e5 = expectThrows(IllegalArgumentException.class, () -> { + new EcSignatureValidator(ecAlgo, ecKeyPair.v1()); + }); + assertThat(e5.getMessage(), containsString("using an ECPublicKey but a [sun.security.ec.ECPrivateKeyImpl] is provided")); + } + + private Tuple getEcKeyPairForAlgorithm(SignatureAlgorithm algorithm) throws Exception { + String keyLength = algorithm.name().replace("ES", ""); + Path privateKeyPath = PathUtils.get(IdTokenParserTests.class.getResource + ("/org/elasticsearch/xpack/security/authc/oidc/ec_private_key_" + keyLength + ".pem").toURI()); + Path publicKeyPath = PathUtils.get(IdTokenParserTests.class.getResource + ("/org/elasticsearch/xpack/security/authc/oidc/ec_public_key_" + keyLength + ".pem").toURI()); + return new Tuple<>(PemUtils.readPrivateKey(privateKeyPath, () -> null), PemUtils.readPublicKey(publicKeyPath)); + } +} diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/support/jwt/JwtSignerTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/support/jwt/JwtSignerTests.java new file mode 100644 index 0000000000000..32b149752b1b2 --- /dev/null +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/support/jwt/JwtSignerTests.java @@ -0,0 +1,115 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.security.authc.support.jwt; + +import org.elasticsearch.common.collect.Tuple; +import org.elasticsearch.common.io.PathUtils; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.core.ssl.PemUtils; + +import javax.crypto.spec.SecretKeySpec; +import java.nio.file.Path; +import java.security.PrivateKey; +import java.security.PublicKey; + +import static org.hamcrest.Matchers.containsString; + +public class JwtSignerTests extends ESTestCase { + + public void testOnlyAcceptCorrectKeyAndAlgorithm() throws Exception { + SignatureAlgorithm hmacAlgo = randomFrom(SignatureAlgorithm.getHmacAlgorithms()); + final SecretKeySpec hmacKey = + new SecretKeySpec("144753a689a6508d7c7cd02752d7138e".getBytes(), hmacAlgo.getJcaAlgoName()); + SignatureAlgorithm rsaAlgo = randomFrom(SignatureAlgorithm.getRsaAlgorithms()); + Path privateKeyPath = PathUtils.get(IdTokenParserTests.class.getResource + ("/org/elasticsearch/xpack/security/authc/oidc/rsa_private_key.pem").toURI()); + final PrivateKey rsaPrivateKey = PemUtils.readPrivateKey(privateKeyPath, () -> null); + SignatureAlgorithm ecAlgo = randomFrom(SignatureAlgorithm.getEcAlgorithms()); + Tuple ecKeyPair = getEcKeyPairForAlgorithm(ecAlgo); + + IllegalArgumentException e1 = expectThrows(IllegalArgumentException.class, () -> { + new HmacSigner(hmacAlgo, rsaPrivateKey); + }); + assertThat(e1.getMessage(), containsString("using a SecretKey but a [sun.security.rsa.RSAPrivateCrtKeyImpl] is provided")); + + IllegalArgumentException e2 = expectThrows(IllegalArgumentException.class, () -> { + new HmacSigner(rsaAlgo, hmacKey); + }); + assertThat(e2.getMessage(), containsString("Unsupported algorithm RS")); + + IllegalArgumentException e3 = expectThrows(IllegalArgumentException.class, () -> { + new RsaSigner(hmacAlgo, rsaPrivateKey); + }); + assertThat(e3.getMessage(), containsString("Unsupported algorithm HS")); + + IllegalArgumentException e4 = expectThrows(IllegalArgumentException.class, () -> { + new RsaSigner(rsaAlgo, hmacKey); + }); + assertThat(e4.getMessage(), containsString("using a PrivateKey but a [javax.crypto.spec.SecretKeySpec] is provided")); + + IllegalArgumentException e5 = expectThrows(IllegalArgumentException.class, () -> { + new EcSigner(ecAlgo, ecKeyPair.v2()); + }); + assertThat(e5.getMessage(), containsString("using a ECPrivateKey but a [sun.security.ec.ECPublicKeyImpl] is provided")); + } + + public void testSignAndValidateHmacSignatures() throws Exception { + SignatureAlgorithm algorithm = randomFrom(SignatureAlgorithm.getHmacAlgorithms()); + JsonWebToken token = new JsonWebTokenBuilder() + .algorithm(algorithm.name()) + .type("JWT") + .subject("subject") + .issuer("theissuer") + .build(); + final SecretKeySpec key = + new SecretKeySpec("144753a689a6508d7c7cd02752d7138e".getBytes(), algorithm.getJcaAlgoName()); + JwtSigner signer = new HmacSigner(algorithm, key); + JwtSignatureValidator validator = new HmacSignatureValidator(algorithm, key); + validator.validateSignature(token.encodeSignableContent(), signer.sign(token.encodeSignableContent())); + } + + public void testSignAndValidateRsaSignatures() throws Exception { + Path publicKeyPath = PathUtils.get(IdTokenParserTests.class.getResource + ("/org/elasticsearch/xpack/security/authc/oidc/rsa_public_key.pem").toURI()); + final PublicKey publicKey = PemUtils.readPublicKey(publicKeyPath); + Path privateKeyPath = PathUtils.get(IdTokenParserTests.class.getResource + ("/org/elasticsearch/xpack/security/authc/oidc/rsa_private_key.pem").toURI()); + final PrivateKey privateKey = PemUtils.readPrivateKey(privateKeyPath, () -> null); + SignatureAlgorithm algorithm = randomFrom(SignatureAlgorithm.getRsaAlgorithms()); + JsonWebToken token = new JsonWebTokenBuilder() + .algorithm(algorithm.name()) + .type("JWT") + .subject("subject") + .issuer("theissuer") + .build(); + JwtSigner signer = new RsaSigner(algorithm, privateKey); + JwtSignatureValidator validator = new RsaSignatureValidator(algorithm, publicKey); + validator.validateSignature(token.encodeSignableContent(), signer.sign(token.encodeSignableContent())); + } + + public void testSignAndValidateEcSignatures() throws Exception { + SignatureAlgorithm algorithm = randomFrom(SignatureAlgorithm.getEcAlgorithms()); + JsonWebToken token = new JsonWebTokenBuilder() + .algorithm(algorithm.name()) + .type("JWT") + .subject("subject") + .issuer("theissuer") + .build(); + Tuple keyPair = getEcKeyPairForAlgorithm(algorithm); + JwtSigner signer = new EcSigner(algorithm, keyPair.v1()); + JwtSignatureValidator validator = new EcSignatureValidator(algorithm, keyPair.v2()); + validator.validateSignature(token.encodeSignableContent(), signer.sign(token.encodeSignableContent())); + } + + private Tuple getEcKeyPairForAlgorithm(SignatureAlgorithm algorithm) throws Exception { + String keyLength = algorithm.name().replace("ES", ""); + Path privateKeyPath = PathUtils.get(IdTokenParserTests.class.getResource + ("/org/elasticsearch/xpack/security/authc/oidc/ec_private_key_" + keyLength + ".pem").toURI()); + Path publicKeyPath = PathUtils.get(IdTokenParserTests.class.getResource + ("/org/elasticsearch/xpack/security/authc/oidc/ec_public_key_" + keyLength + ".pem").toURI()); + return new Tuple<>(PemUtils.readPrivateKey(privateKeyPath, () -> null), PemUtils.readPublicKey(publicKeyPath)); + } +} diff --git a/x-pack/plugin/security/src/test/resources/org/elasticsearch/xpack/security/authc/oidc/ec_private_key_256.pem b/x-pack/plugin/security/src/test/resources/org/elasticsearch/xpack/security/authc/oidc/ec_private_key_256.pem new file mode 100644 index 0000000000000..2aba5180b5a4d --- /dev/null +++ b/x-pack/plugin/security/src/test/resources/org/elasticsearch/xpack/security/authc/oidc/ec_private_key_256.pem @@ -0,0 +1,5 @@ +-----BEGIN EC PRIVATE KEY----- +MHcCAQEEIDo2Lj/o6AS7o143K8Az80A0vRSU+AQW9btj4IXcwipooAoGCCqGSM49 +AwEHoUQDQgAE9dv3Fjenz8YR4tdkKGQ9Bi9zcoHsXKLsI+GlNZRmf2Tbvde44JHs +o0YGbGktKcUDcULbZETfu5EwdXADK1axqA== +-----END EC PRIVATE KEY----- diff --git a/x-pack/plugin/security/src/test/resources/org/elasticsearch/xpack/security/authc/oidc/ec_private_key_384.pem b/x-pack/plugin/security/src/test/resources/org/elasticsearch/xpack/security/authc/oidc/ec_private_key_384.pem new file mode 100644 index 0000000000000..7ea50d710a6e9 --- /dev/null +++ b/x-pack/plugin/security/src/test/resources/org/elasticsearch/xpack/security/authc/oidc/ec_private_key_384.pem @@ -0,0 +1,6 @@ +-----BEGIN EC PRIVATE KEY----- +MIGkAgEBBDCQt+1aGEbCvPAh7s/MUFYSlhiSTmAodcf0cYSTn3adnfyRy8gkEbDW +1wt9iFggB+agBwYFK4EEACKhZANiAARhkUjj0RDbs/Kb80E2RrK9FhioIM6xfFyB +shGwK9425DZZrrsqbOsumySktrXcuLBd8gTangXmq76cdDAAhTFpu5jWHqtStRYi +bIlehrCn1yRsCidM4w24iFmMsOJYzeY= +-----END EC PRIVATE KEY----- diff --git a/x-pack/plugin/security/src/test/resources/org/elasticsearch/xpack/security/authc/oidc/ec_private_key_512.pem b/x-pack/plugin/security/src/test/resources/org/elasticsearch/xpack/security/authc/oidc/ec_private_key_512.pem new file mode 100644 index 0000000000000..7991cd7a7d3bf --- /dev/null +++ b/x-pack/plugin/security/src/test/resources/org/elasticsearch/xpack/security/authc/oidc/ec_private_key_512.pem @@ -0,0 +1,7 @@ +-----BEGIN EC PRIVATE KEY----- +MIHcAgEBBEIBNSp87+P6KShEPwO2lr56FZ8dlMKqjF+9nSUJsJA4W52ST8hBZUY5 ++/0UKOpL8UC1tgXdzTW7bUycJaIcmGKpx96gBwYFK4EEACOhgYkDgYYABACTTc8w +yI6EhvbZUi6wDZQvlhfHI/mXiSw3kXIHy9SKnMoMjr4nB9HPKLToyaNT8QbNjM59 +Kpf7TLJFS3GdFOlzXAGRKM01jme+JAgSr8xiePT2+nwW4yW6C90uzlvzoj8IMCPa +InAaQ6SD3/uK3FKJzxDdciItFmVjsTC24zUEZ28NzA== +-----END EC PRIVATE KEY----- diff --git a/x-pack/plugin/security/src/test/resources/org/elasticsearch/xpack/security/authc/oidc/ec_public_key_256.pem b/x-pack/plugin/security/src/test/resources/org/elasticsearch/xpack/security/authc/oidc/ec_public_key_256.pem new file mode 100644 index 0000000000000..3a48aaf30b1b9 --- /dev/null +++ b/x-pack/plugin/security/src/test/resources/org/elasticsearch/xpack/security/authc/oidc/ec_public_key_256.pem @@ -0,0 +1,4 @@ +-----BEGIN PUBLIC KEY----- +MFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAE9dv3Fjenz8YR4tdkKGQ9Bi9zcoHs +XKLsI+GlNZRmf2Tbvde44JHso0YGbGktKcUDcULbZETfu5EwdXADK1axqA== +-----END PUBLIC KEY----- diff --git a/x-pack/plugin/security/src/test/resources/org/elasticsearch/xpack/security/authc/oidc/ec_public_key_384.pem b/x-pack/plugin/security/src/test/resources/org/elasticsearch/xpack/security/authc/oidc/ec_public_key_384.pem new file mode 100644 index 0000000000000..83fef33622004 --- /dev/null +++ b/x-pack/plugin/security/src/test/resources/org/elasticsearch/xpack/security/authc/oidc/ec_public_key_384.pem @@ -0,0 +1,5 @@ +-----BEGIN PUBLIC KEY----- +MHYwEAYHKoZIzj0CAQYFK4EEACIDYgAEYZFI49EQ27Pym/NBNkayvRYYqCDOsXxc +gbIRsCveNuQ2Wa67KmzrLpskpLa13LiwXfIE2p4F5qu+nHQwAIUxabuY1h6rUrUW +ImyJXoawp9ckbAonTOMNuIhZjLDiWM3m +-----END PUBLIC KEY----- diff --git a/x-pack/plugin/security/src/test/resources/org/elasticsearch/xpack/security/authc/oidc/ec_public_key_512.pem b/x-pack/plugin/security/src/test/resources/org/elasticsearch/xpack/security/authc/oidc/ec_public_key_512.pem new file mode 100644 index 0000000000000..1f7d9b2240d59 --- /dev/null +++ b/x-pack/plugin/security/src/test/resources/org/elasticsearch/xpack/security/authc/oidc/ec_public_key_512.pem @@ -0,0 +1,6 @@ +-----BEGIN PUBLIC KEY----- +MIGbMBAGByqGSM49AgEGBSuBBAAjA4GGAAQAiSsKUaydYBTfceUjg/w3mbCYGtw+ +SRyTiGwBDyqb1EcCbwvw7ZlAgQ6ffyYSpfG0p+Yla3LGf/WHtvcofI1sxDoAeyX7 +0jEb1eALWPkRvygY5JfoUFngRF6AyWRIp0fthK4+k1RhOqg+XpdTr88E+wngCu7x +Qw8PUYRmYIkCjeWsD9M= +-----END PUBLIC KEY----- diff --git a/x-pack/plugin/security/src/test/resources/org/elasticsearch/xpack/security/authc/oidc/rsa_private_key.pem b/x-pack/plugin/security/src/test/resources/org/elasticsearch/xpack/security/authc/oidc/rsa_private_key.pem new file mode 100644 index 0000000000000..3c1fb6580f616 --- /dev/null +++ b/x-pack/plugin/security/src/test/resources/org/elasticsearch/xpack/security/authc/oidc/rsa_private_key.pem @@ -0,0 +1,52 @@ +-----BEGIN PRIVATE KEY----- +MIIJQwIBADANBgkqhkiG9w0BAQEFAASCCS0wggkpAgEAAoICAQC/hhfM2GZIMdDH +yGXb9KTXMosyJiPJ8GIkGpNTXdznYKnMSZpfRR3D95tNNvfmsj9chFuoQzPcs0oS +BKSdrUqGjZOpiuDa2iKhWnMwjWO4EoG+EBZK7qDOirpugYtQUe16T7PG6jBtww+g +59bc+YU8oipZc7gx17hjQytq1x1d1GMQ5D8f3F4MtbG80SlHjEl+qZKZrM3Tg0Tg +qz+GHc8O01OPeO/6WBmtyh6bSmN7ossT/Q/Pn9t3HTYzC+SkJ4DFXAcg6azSsRWZ +Pz70JhGzzhhx6K4zLSmqaQBbSFJ9X462+Q8MGq808PCWx+Tr1IB1iKHKXL/Mi5mF +iTmPiWoKD0jkD7eeMJfZcCVbvCZbgq9Biul2zV/RErMp7bzhqu2qaptDlV2OxlEX +AAzNekAMQ1Luin6TkzecN49H4PYdVW0IfiIm/j+pCpPxZ1XqTddnCGJH3PiAeUzY +a34Rl3HpAwZ+FviV6Ha5iCgG9gP3S1GZVHuZxu53jCIcszmADMLE2YYo7z+w0GTk +0VQJzDKi1ihsF98DRWKMtr3uVkA/1r6uuAVYiD4Hd2OjVrKtTYsB0q1Lpn3VJDan +DgEoDRSra2dqn43a7FWb/Bh3XzMR3Pr2f0v9ONE38k5Vs166vaGczgHG85VYCEHD +U0rF+vC/AatbTEoT3WkHs0dxASe/jwIDAQABAoICAQCP/0fBsiokuZ+Ztegvmh6d +1dimGdrqp5uCPaDrGzVNmXXQrL4uRw78Ghxfz6ZPg1D0RGeNeQlzNDHKnkcObg4M +YEHXRR2OLle/LPxSUtvYt8FonzvDluq8n71P/II8H30Hawll0woaXIBRoD4B4nD0 +h5CDtOeWQfTWXNg1+gqQU3EwcAut7sefkrRRnIkGzEktvF0Gwc/26UlOCjM2qQ7W +vLiktG7psHeOHNsqMqHdgSZinu/WUQB27DWl55k+epzsxiZt7oML3iy5Db5NXpoM +sDXc3u6qd5hOEag8+YFb1g69L0DK/v0gNs8M7roFPb5foBzUwytJpOQGbIgkjVJ1 +dIRWmKbAmhXSdBenCGpq0WoJLxDXcKERrGfYcKeawQpYUIeg6yxMeklD2AywC6zZ +Th33vhGTtwEymEPlWW1roXwfQyfGOi379SzqXRhwACHsAQ3ZEinYXLzMvBntybqm +0VRVzaqDKMkNXHR6U6b9lWUxicYOYhc3V8m926zYC5O2CPPcB6LZzqUqRyCB9Cnz +HYCDlvsO8LuYOa0nUpobR610qWg8lHcyJkZEnrCkEYFsa6Bs622gOOv3rGNXIwhP +U3Zc5Us1RF/sbRE8qDVEp/tFI7/ceBQMnQGfkoCaC/L0SZAoP2LvUu660+ooTe4D +fMhMZkC5yOfwzYYus2qFsQKCAQEA50rpPYPBGeqbZtQ05JWpGeTHxalflby+L33K +6ebnNEYVeXGGK0DrdPpQurnFjbCzO+8lLcO0SQZ3zeXpMHCPzbjilYdyEepHbU1J +Z2dJvk5r0qC1f3dv7Lgug9q0au7utjm1s272KWbz+JrfIirnGKBkg6CaijgGHzpi +eF+4cTluaz5Kq7D68jZLLkEEQaDWv0aNK7HN9ShZBXfeI+G/RLkvMZpnTWEnZArq +AEJJhQuWNQLcI2Bz1H07snrDjmMh21IlBh6wp2uZJyUu25h45WsO9PeJM+BG10Rz +94Pap1C46DpiSCRqKiB5mjOK3uauaZ8EwMrJcjCXQ/0pcRMk6wKCAQEA0/ui03aZ +A3uKO65XdeolQrQ0dShIVnbOnKAyAm7PeRyOJFFPrBu/BEq9vlLFS78BcGyukGBL +zHfQCqfAO3GWI7FcBITrlv3+Stc0w85uOW0iDnAV+dx9wRRsGGDXly6cgxYVonPk +dFb4IBQzpCrv3A831aXTevy9gzgaS+tU0+iCeWjv8P9UYiF2Fps64nIWiE+fulat +u/0D9eic6UheynCq+aiEyX/frvfy+IpfmEwPBHABPrVz0JH2TW3/S9VCYvJd973E +6IseD6kYBdKe4iLzRtiqYnomVSSlShhDBA3Th31hSN389LgyJzVHuT3AhzH/tJur +AQGg+9JWJK827QKCAQBw5wFZ1NHTC9GV7WpwVpn4GNRnqVkOZ9bSqe3+JYeg3cEn +ygG7ZXJKZNI5g+tqUeH8FK2qXEjKCM08hclBqwCXZHO+mCJXmmwJb9uwDRxTOEar +x2UxdqT9AZ87H+xH8qA19Y+7qiOhbC8VoMvhCt5KCClok2I8rEJYxhBBQEBxjJ5Y +3Uxn6PlmeJhUWMpyGsgZEKBivuWRkYN7Bts52r2aF2dxKDTa8dzfmfBF9hWbS1+k +OuQopLGF/SfB5OlGvR9OxxdpmFuVLhHRqPXpndEN9CeV7qtb1lPeSvzsysFWQTIx +siY9nwDv1XBmlFhqnGJxDn2sepDBG/UK7C5pkvXHAoIBAE1dMumWCPX6alKpqAXC +tGKJvfNcBjp4FaOX22iD5HhA8adTfY5Eegwo8uK49oRUa0QKhhLu8frnZyfcXQ3U +wIS6P+juUcTDYcESlZE4j4WpzJoUUhDkZYjH2UN0ZHoqarh0oOSvAxtqehfLB6XM +fcmETE5OvMXP46I2IXK/qi/EVYnVwGvl43gE2kIZ2P23wX1yActRwwoupvliMZH5 +9Y5QXDXNSNj5MDt9BlGqvoSojXqDHhEyy9e8zXUoxdl5NUdDSQQXX/u2efwhLEFD +wvSo6LxcaqKSDddZDnHiNOMP4Lk8Cvs6+QyfBL7GdSyn2tH8b2QMw/YUEBqXgxTU +VWkCggEBAID5Ye9a5MgACsFCMFtEGlH74szeaePx+ByMJq4H6LGXJn/acfVFbO+0 +CtEQ9PP97BJcHmiFKUzEokbZ4KW3+U0XU2E7rDxEN0Att01lQqx7PfXPaGAt5dif +UITevbl4Wht89pzfcXtYEz36JeLNMfVpHiTXTl/ZDdykUbqFK2iwt8ipDuzIvQ4b +oqLOLOYItUpU1hmcW3u0FzCR5Xy+oDOPYHK4truJjMzqt4v0Vryx3WmgeGUDY2Ao +WMd5fU8hLTrs8/0zoW8/Q0ETC6hBwhRJO+SrM2mJUbdRni9XC3y07Dcj+g8QvDlM +qOmUy+vnhh6hqTWkgGydKhjZJxV+nDk= +-----END PRIVATE KEY----- diff --git a/x-pack/plugin/security/src/test/resources/org/elasticsearch/xpack/security/authc/oidc/rsa_public_key.pem b/x-pack/plugin/security/src/test/resources/org/elasticsearch/xpack/security/authc/oidc/rsa_public_key.pem new file mode 100644 index 0000000000000..17e8eb3cb2037 --- /dev/null +++ b/x-pack/plugin/security/src/test/resources/org/elasticsearch/xpack/security/authc/oidc/rsa_public_key.pem @@ -0,0 +1,14 @@ +-----BEGIN PUBLIC KEY----- +MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAv4YXzNhmSDHQx8hl2/Sk +1zKLMiYjyfBiJBqTU13c52CpzEmaX0Udw/ebTTb35rI/XIRbqEMz3LNKEgSkna1K +ho2TqYrg2toioVpzMI1juBKBvhAWSu6gzoq6boGLUFHtek+zxuowbcMPoOfW3PmF +PKIqWXO4Mde4Y0MratcdXdRjEOQ/H9xeDLWxvNEpR4xJfqmSmazN04NE4Ks/hh3P +DtNTj3jv+lgZrcoem0pje6LLE/0Pz5/bdx02MwvkpCeAxVwHIOms0rEVmT8+9CYR +s84YceiuMy0pqmkAW0hSfV+OtvkPDBqvNPDwlsfk69SAdYihyly/zIuZhYk5j4lq +Cg9I5A+3njCX2XAlW7wmW4KvQYrpds1f0RKzKe284artqmqbQ5VdjsZRFwAMzXpA +DENS7op+k5M3nDePR+D2HVVtCH4iJv4/qQqT8WdV6k3XZwhiR9z4gHlM2Gt+EZdx +6QMGfhb4leh2uYgoBvYD90tRmVR7mcbud4wiHLM5gAzCxNmGKO8/sNBk5NFUCcwy +otYobBffA0VijLa97lZAP9a+rrgFWIg+B3djo1ayrU2LAdKtS6Z91SQ2pw4BKA0U +q2tnap+N2uxVm/wYd18zEdz69n9L/TjRN/JOVbNeur2hnM4BxvOVWAhBw1NKxfrw +vwGrW0xKE91pB7NHcQEnv48CAwEAAQ== +-----END PUBLIC KEY----- From 8aa1a42d11d5fb79de289b532d39607e0531aa1c Mon Sep 17 00:00:00 2001 From: Ioannis Kakavas Date: Wed, 9 Jan 2019 17:22:19 +0200 Subject: [PATCH 15/71] Doc changes and add an IdToken class --- .../xpack/security/authc/oidc/IdToken.java | 107 +++++++++ .../{support/jwt => oidc}/IdTokenParser.java | 22 +- .../security/authc/support/jwt/Claims.java | 8 + .../security/authc/support/jwt/EcSigner.java | 16 +- .../support/jwt/HmacSignatureValidator.java | 2 - .../authc/support/jwt/HmacSigner.java | 12 +- .../support/jwt/JsonWebTokenBuilder.java | 1 - .../security/authc/support/jwt/JwtSigner.java | 7 + .../support/jwt/RsaSignatureValidator.java | 1 - .../security/authc/support/jwt/RsaSigner.java | 17 +- .../jwt => oidc}/IdTokenParserTests.java | 206 ++++++++---------- .../jwt/JwtSignatureValidatorTests.java | 1 + .../authc/support/jwt/JwtSignerTests.java | 1 + 13 files changed, 271 insertions(+), 130 deletions(-) create mode 100644 x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/oidc/IdToken.java rename x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/{support/jwt => oidc}/IdTokenParser.java (94%) rename x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/{support/jwt => oidc}/IdTokenParserTests.java (73%) diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/oidc/IdToken.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/oidc/IdToken.java new file mode 100644 index 0000000000000..064bcba2f3f14 --- /dev/null +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/oidc/IdToken.java @@ -0,0 +1,107 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.security.authc.oidc; + +import org.elasticsearch.xpack.security.authc.support.jwt.JsonWebToken; + +import java.util.Collections; +import java.util.List; +import java.util.Map; + +public class IdToken extends JsonWebToken { + public IdToken(Map header, Map payload) { + super(header, payload); + } + + public IdToken(Map header, Map payload, String signature) { + super(header, payload, signature); + } + + /** + * Returns the iss claim value of the ID Token as a String + */ + public String getIssuer() { + return (String) getPayload().get("iss"); + } + + /** + * Returns the sub claim value of the ID Token as a String + */ + public String getSubject() { + return (String) getPayload().get("sub"); + } + + /** + * Returns the nonce claim value of the ID Token as a String or null if the ID Token did not contain that optional claim + */ + public String getNonce() { + return (String) getPayload().get("nonce"); + } + + /** + * Returns the aud claim value of the ID Token as a List of Strings + */ + public List getAudiences() { + return (List) getPayload().get("aud"); + } + + /** + * Returns the exp claim value of the ID Token as long + */ + public long getExpiration() { + return (long) getPayload().get("exp"); + } + + /** + * Returns the iat claim value of the ID Token as long + */ + public long getIssuedAt() { + return (long) getPayload().get("iat"); + } + + /** + * Returns the auth_time claim value or -1 if the ID Token did not contain that optional claim + */ + public long getAuthTime() { + if (getPayload().containsKey("auth_time")) { + return (long) getPayload().get("auth_time"); + } else { + return -1; + } + } + + /** + * Returns the acr claim value or null if the ID Token did not contain that optional claim + */ + public String getAuthenticationContectClassReference() { + if (getPayload().containsKey("acr")) { + return (String) getPayload().get("acr"); + } else { + return null; + } + } + + /** + * Returns the amr claim values as a List of Stings or an empty list if the ID Token did not contain that optional claim + */ + public List getAuthenticationMethodsReferences() { + if (getPayload().containsKey("amr")) { + return (List) getPayload().get("amr"); + } else + return Collections.emptyList(); + } + + /** + * Returns the azp claim value or null if the ID Token did not contain that optional claim + */ + public String getAuthorizedParty() { + if (getPayload().containsKey("azp")) { + return (String) getPayload().get("azp"); + } else { + return null; + } + } +} diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/jwt/IdTokenParser.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/oidc/IdTokenParser.java similarity index 94% rename from x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/jwt/IdTokenParser.java rename to x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/oidc/IdTokenParser.java index a7016362e7dd1..7f486cc64bc6c 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/jwt/IdTokenParser.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/oidc/IdTokenParser.java @@ -3,7 +3,7 @@ * or more contributor license agreements. Licensed under the Elastic License; * you may not use this file except in compliance with the Elastic License. */ -package org.elasticsearch.xpack.security.authc.support.jwt; +package org.elasticsearch.xpack.security.authc.oidc; import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.Strings; @@ -12,7 +12,12 @@ import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentParserUtils; import org.elasticsearch.common.xcontent.json.JsonXContent; -import org.elasticsearch.xpack.security.authc.oidc.RPConfiguration; +import org.elasticsearch.xpack.security.authc.support.jwt.Claims; +import org.elasticsearch.xpack.security.authc.support.jwt.EcSignatureValidator; +import org.elasticsearch.xpack.security.authc.support.jwt.HmacSignatureValidator; +import org.elasticsearch.xpack.security.authc.support.jwt.JwtSignatureValidator; +import org.elasticsearch.xpack.security.authc.support.jwt.RsaSignatureValidator; +import org.elasticsearch.xpack.security.authc.support.jwt.SignatureAlgorithm; import java.io.IOException; import java.io.UnsupportedEncodingException; @@ -27,7 +32,7 @@ import static org.elasticsearch.common.xcontent.XContentParserUtils.parseFieldsValue; /** - * Contains the necessary functionality for parsing a serialized OpenID Connect ID Token to a {@link JsonWebToken} + * Contains the necessary functionality for parsing a serialized OpenID Connect ID Token to an {@link IdToken} */ public class IdTokenParser { private final RPConfiguration rpConfig; @@ -37,21 +42,22 @@ public IdTokenParser(RPConfiguration rpConfig) { } /** - * Parses the serialized format of an ID Token into a {@link JsonWebToken}. In doing so it: + * Parses the serialized format of an ID Token into a {@link IdToken}. In doing so it: *
    *
  • Validates that the format and structure of the ID Token is correct
  • *
  • Validates that the ID Token is signed and that one of the supported algorithms is used
  • *
  • Validates the signature using the appropriate
  • *
* This method does not validate the contents of the ID Token such as expiration time, - * issuer, audience etc. These checks should be performed on the {@link JsonWebToken} by the caller. + * issuer, audience etc. These checks should be performed on the {@link IdToken} + * by the caller. * * @param idToken Serialized string representation of the ID Token * @param key The {@link Key} to be used for verifying the signature - * @return a {@link JsonWebToken} + * @return a {@link IdToken} * @throws IOException if the ID Token cannot be deserialized */ - public final JsonWebToken parseAndValidateIdToken(String idToken, Key key) throws IOException { + public final IdToken parseAndValidateIdToken(String idToken, Key key) throws IOException { final String[] idTokenParts = idToken.split("\\."); if (idTokenParts.length != 3) { throw new IllegalArgumentException("The provided token is not a valid JWT"); @@ -81,7 +87,7 @@ public final JsonWebToken parseAndValidateIdToken(String idToken, Key key) throw final byte[] data = (serializedHeader + "." + serializedPayload).getBytes(StandardCharsets.UTF_8); validator.validateSignature(data, signatureBytes); final Map payloadMap = parsePayload(deserializedPayload); - return new JsonWebToken(headerMap, payloadMap); + return new IdToken(headerMap, payloadMap); } /** diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/jwt/Claims.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/jwt/Claims.java index 9b345f2d06f14..18acc6ef03e3c 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/jwt/Claims.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/jwt/Claims.java @@ -1,9 +1,17 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ package org.elasticsearch.xpack.security.authc.support.jwt; import java.util.List; import java.util.stream.Collectors; import java.util.stream.Stream; +/** + * Class that contains commonly used Claims in OpenID Connect + */ public class Claims { public enum HeaderClaims { diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/jwt/EcSigner.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/jwt/EcSigner.java index 9e327385b145c..faf520bf43ece 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/jwt/EcSigner.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/jwt/EcSigner.java @@ -6,6 +6,10 @@ import java.security.SignatureException; import java.security.interfaces.ECPrivateKey; +/** + * Class offering necessary functionality for signing JWTs using the + * Elliptic Curve Digital Signature Algorithm (ECDSA) with a EC Private Key. + */ public class EcSigner implements JwtSigner { private SignatureAlgorithm algorithm; @@ -38,7 +42,13 @@ private int getSignatureLength(SignatureAlgorithm algorithm) { } } - + /** + * Signs the data byte array with a EC Private Key using the Elliptic Curve Digital Signature Algorithm (ECDSA) + * + * @param data the data to be signed + * @return the signature bytes + * @throws GeneralSecurityException if any error was encountered while signing + */ @Override public byte[] sign(byte[] data) throws GeneralSecurityException { if (null == data || data.length == 0) { @@ -57,8 +67,8 @@ public byte[] sign(byte[] data) throws GeneralSecurityException { * Based on https://github.com/jwtk/jjwt/blob/1520ae8a21052b376282f8a38d310a91b15285e5/impl/src/main/java/io/jsonwebtoken/impl/crypto/EllipticCurveProvider.java * * @param derSignature The DER formatted signature - * @return - * @throws SignatureException + * @return the signature formatted in a way that can be encoded and added in a JWT + * @throws SignatureException if the signature DER encoding is not valid */ private byte[] convertToJose(byte[] derSignature) throws SignatureException { if (derSignature.length < 8 || derSignature[0] != 48) { diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/jwt/HmacSignatureValidator.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/jwt/HmacSignatureValidator.java index 95855f4b3a6f2..8a4f9679cd60e 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/jwt/HmacSignatureValidator.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/jwt/HmacSignatureValidator.java @@ -12,10 +12,8 @@ import javax.crypto.Mac; import javax.crypto.SecretKey; import javax.crypto.spec.SecretKeySpec; -import java.nio.charset.StandardCharsets; import java.security.Key; import java.security.MessageDigest; -import java.util.Arrays; import java.util.Base64; /** diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/jwt/HmacSigner.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/jwt/HmacSigner.java index e00e4f1278833..d12bea7f6681f 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/jwt/HmacSigner.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/jwt/HmacSigner.java @@ -1,13 +1,14 @@ package org.elasticsearch.xpack.security.authc.support.jwt; -import org.elasticsearch.ElasticsearchSecurityException; - import javax.crypto.Mac; import javax.crypto.SecretKey; import javax.crypto.spec.SecretKeySpec; import java.security.GeneralSecurityException; import java.security.Key; +/** + * Class offering necessary functionality for generating an HMAC for the JWT using a secret key. + */ public class HmacSigner implements JwtSigner { private Key key; @@ -26,6 +27,13 @@ public HmacSigner(SignatureAlgorithm algorithm, Key key) { } + /** + * Generates the HMAC of the JWT + * + * @param data the data to be signed + * @return the HMAC as a byte array + * @throws GeneralSecurityException if any error was encountered generating the HMAC + */ @Override public byte[] sign(byte[] data) throws GeneralSecurityException { if (null == data || data.length == 0) { diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/jwt/JsonWebTokenBuilder.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/jwt/JsonWebTokenBuilder.java index 74d59a3af29a7..457303b4ca6bd 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/jwt/JsonWebTokenBuilder.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/jwt/JsonWebTokenBuilder.java @@ -9,7 +9,6 @@ import java.util.ArrayList; import java.util.HashMap; -import java.util.HashSet; import java.util.List; import java.util.Map; diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/jwt/JwtSigner.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/jwt/JwtSigner.java index 5ca75fc8611ac..e9b6535a2a011 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/jwt/JwtSigner.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/jwt/JwtSigner.java @@ -9,5 +9,12 @@ public interface JwtSigner { + /** + * Signs the data byte array using the specified algorithm and private or secret key + * + * @param data the data to be signed + * @return the signature bytes + * @throws GeneralSecurityException if any error was encountered while signing + */ byte[] sign(byte[] data) throws GeneralSecurityException; } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/jwt/RsaSignatureValidator.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/jwt/RsaSignatureValidator.java index 006b5ebc5bbc8..aab74f66564e1 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/jwt/RsaSignatureValidator.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/jwt/RsaSignatureValidator.java @@ -10,7 +10,6 @@ import java.security.Key; import java.security.PublicKey; import java.security.Signature; -import java.util.Base64; /** * Class offering necessary functionality for validating the signatures of JWTs that have been signed with diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/jwt/RsaSigner.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/jwt/RsaSigner.java index 0f5d732afd349..6f206a88ec4cb 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/jwt/RsaSigner.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/jwt/RsaSigner.java @@ -5,6 +5,10 @@ import java.security.PrivateKey; import java.security.Signature; +/** + * Class offering necessary functionality for signing JWTs with a Private Key using + * RSASSA-PKCS1-v1_5 (PKCS#1) + */ public class RsaSigner implements JwtSigner { private SignatureAlgorithm algorithm; @@ -22,6 +26,13 @@ public RsaSigner(SignatureAlgorithm algorithm, Key key) { this.key = key; } + /** + * Signs the data byte array with an Private Key using RSASSA-PKCS1-v1_5 (PKCS#1) + * + * @param data the data to be signed + * @return the signature bytes + * @throws GeneralSecurityException if any error was encountered while signing + */ @Override public byte[] sign(byte[] data) throws GeneralSecurityException { if (null == data || data.length == 0) { @@ -29,8 +40,8 @@ public byte[] sign(byte[] data) throws GeneralSecurityException { } final Signature rsa = Signature.getInstance(algorithm.getJcaAlgoName()); - rsa.initSign((PrivateKey) key); - rsa.update(data); - return rsa.sign(); + rsa.initSign((PrivateKey) key); + rsa.update(data); + return rsa.sign(); } } diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/support/jwt/IdTokenParserTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/oidc/IdTokenParserTests.java similarity index 73% rename from x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/support/jwt/IdTokenParserTests.java rename to x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/oidc/IdTokenParserTests.java index f5ff440449168..8c1223500553f 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/support/jwt/IdTokenParserTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/oidc/IdTokenParserTests.java @@ -3,12 +3,13 @@ * or more contributor license agreements. Licensed under the Elastic License; * you may not use this file except in compliance with the Elastic License. */ -package org.elasticsearch.xpack.security.authc.support.jwt; +package org.elasticsearch.xpack.security.authc.oidc; import org.elasticsearch.common.io.PathUtils; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.core.ssl.PemUtils; -import org.elasticsearch.xpack.security.authc.oidc.RPConfiguration; +import org.elasticsearch.xpack.security.authc.support.jwt.JsonWebToken; +import org.elasticsearch.xpack.security.authc.support.jwt.SignatureAlgorithm; import javax.crypto.spec.SecretKeySpec; import java.io.IOException; @@ -32,39 +33,34 @@ public void testIdTokenParsing() throws IOException { "nRoZGF0ZSI6IjE5OTQtMTAtMzEiLCJlbWFpbCI6ImphbmVkb2VAZXhhbXBsZS5jb20iLCJwaWN0dXJlIjoiaHR0cDovL2V4YW1wbGUuY29tL2phbm" + "Vkb2UvbWUuanBnIn0.bpG9QZk9uykstyn2rv2w_7NkS-rerdX78_ehxli8RTM"; RPConfiguration rpConfig = new RPConfiguration("clientId", "redirectUri", "code", SignatureAlgorithm.getAllNames(), null, null); - IdTokenParser jwtParser = new IdTokenParser(rpConfig); + IdTokenParser idTokenParser = new IdTokenParser(rpConfig); final SecretKeySpec keySpec = new SecretKeySpec("144753a689a6508d7c7cd02752d7138e".getBytes(), "HmacSHA256"); - JsonWebToken jwt = jwtParser.parseAndValidateIdToken(serializedJwt, keySpec); - assertTrue(jwt.getPayload().containsKey("iss")); - assertThat(jwt.getPayload().get("iss"), equalTo("http://op.example.com")); - assertTrue(jwt.getPayload().containsKey("sub")); - assertThat(jwt.getPayload().get("sub"), equalTo("248289761001")); - assertTrue(jwt.getPayload().containsKey("aud")); - List aud = (List) jwt.getPayload().get("aud"); + IdToken idToken = idTokenParser.parseAndValidateIdToken(serializedJwt, keySpec); + assertThat(idToken.getIssuer(), equalTo("http://op.example.com")); + assertThat(idToken.getSubject(), equalTo("248289761001")); + List aud = idToken.getAudiences(); assertThat(aud.size(), equalTo(1)); assertTrue(aud.contains("s6BhdRkqt3")); - assertTrue(jwt.getPayload().containsKey("nonce")); - assertThat(jwt.getPayload().get("nonce"), equalTo("n-0S6_WzA2Mj")); - assertTrue(jwt.getPayload().containsKey("exp")); - assertThat(jwt.getPayload().get("exp"), equalTo(1516339022L)); - assertTrue(jwt.getPayload().containsKey("iat")); - assertThat(jwt.getPayload().get("iat"), equalTo(1516239022L)); - assertTrue(jwt.getPayload().containsKey("name")); - assertThat(jwt.getPayload().get("name"), equalTo("Jane Doe")); - assertTrue(jwt.getPayload().containsKey("given_name")); - assertThat(jwt.getPayload().get("given_name"), equalTo("Jane")); - assertTrue(jwt.getPayload().containsKey("family_name")); - assertThat(jwt.getPayload().get("family_name"), equalTo("Doe")); - assertTrue(jwt.getPayload().containsKey("gender")); - assertThat(jwt.getPayload().get("gender"), equalTo("female")); - assertTrue(jwt.getPayload().containsKey("birthdate")); - assertThat(jwt.getPayload().get("birthdate"), equalTo("1994-10-31")); - assertTrue(jwt.getPayload().containsKey("email")); - assertThat(jwt.getPayload().get("email"), equalTo("janedoe@example.com")); - assertTrue(jwt.getPayload().containsKey("picture")); - assertThat(jwt.getPayload().get("picture"), equalTo("http://example.com/janedoe/me.jpg")); - assertTrue(jwt.getHeader().containsKey("alg")); - assertThat(jwt.getHeader().get("alg"), equalTo("HS256")); + assertTrue(idToken.getPayload().containsKey("nonce")); + assertThat(idToken.getNonce(), equalTo("n-0S6_WzA2Mj")); + assertThat(idToken.getExpiration(), equalTo(1516339022L)); + assertThat(idToken.getIssuedAt(), equalTo(1516239022L)); + assertTrue(idToken.getPayload().containsKey("name")); + assertThat(idToken.getPayload().get("name"), equalTo("Jane Doe")); + assertTrue(idToken.getPayload().containsKey("given_name")); + assertThat(idToken.getPayload().get("given_name"), equalTo("Jane")); + assertTrue(idToken.getPayload().containsKey("family_name")); + assertThat(idToken.getPayload().get("family_name"), equalTo("Doe")); + assertTrue(idToken.getPayload().containsKey("gender")); + assertThat(idToken.getPayload().get("gender"), equalTo("female")); + assertTrue(idToken.getPayload().containsKey("birthdate")); + assertThat(idToken.getPayload().get("birthdate"), equalTo("1994-10-31")); + assertTrue(idToken.getPayload().containsKey("email")); + assertThat(idToken.getPayload().get("email"), equalTo("janedoe@example.com")); + assertTrue(idToken.getPayload().containsKey("picture")); + assertThat(idToken.getPayload().get("picture"), equalTo("http://example.com/janedoe/me.jpg")); + assertTrue(idToken.getHeader().containsKey("alg")); + assertThat(idToken.getHeader().get("alg"), equalTo("HS256")); } public void testIdTokenWithPrivateClaimsParsing() throws IOException { @@ -76,52 +72,47 @@ public void testIdTokenWithPrivateClaimsParsing() throws IOException { "iYWRkcmVzcyI6eyJjb3VudHJ5IjoiR3JlZWNlIiwicmVnaW9uIjoiRXZpYSJ9fQ.hvG90pJHvjPkZaf_ll3WMeSvfHIzx82zYs5iuygopQo"; RPConfiguration rpConfig = new RPConfiguration("clientId", "redirectUri", "code", SignatureAlgorithm.getAllNames(), null, Arrays.asList("claim1", "claim2", "claim3", "claim4")); - IdTokenParser jwtParser = new IdTokenParser(rpConfig); + IdTokenParser idTokenParser = new IdTokenParser(rpConfig); final SecretKeySpec keySpec = new SecretKeySpec("144753a689a6508d7c7cd02752d7138e".getBytes(), "HmacSHA256"); - JsonWebToken jwt = jwtParser.parseAndValidateIdToken(serializedJwt, keySpec); - assertTrue(jwt.getPayload().containsKey("iss")); - assertThat(jwt.getPayload().get("iss"), equalTo("http://op.example.com")); - assertTrue(jwt.getPayload().containsKey("sub")); - assertThat(jwt.getPayload().get("sub"), equalTo("248289761001")); - assertTrue(jwt.getPayload().containsKey("aud")); - List aud = (List) jwt.getPayload().get("aud"); + IdToken idToken = idTokenParser.parseAndValidateIdToken(serializedJwt, keySpec); + assertThat(idToken.getIssuer(), equalTo("http://op.example.com")); + assertThat(idToken.getSubject(), equalTo("248289761001")); + List aud = idToken.getAudiences(); assertThat(aud.size(), equalTo(1)); assertTrue(aud.contains("s6BhdRkqt3")); - assertTrue(jwt.getPayload().containsKey("nonce")); - assertThat(jwt.getPayload().get("nonce"), equalTo("n-0S6_WzA2Mj")); - assertTrue(jwt.getPayload().containsKey("exp")); - assertThat(jwt.getPayload().get("exp"), equalTo(1516339022L)); - assertTrue(jwt.getPayload().containsKey("iat")); - assertThat(jwt.getPayload().get("iat"), equalTo(1516239022L)); - assertTrue(jwt.getPayload().containsKey("name")); - assertThat(jwt.getPayload().get("name"), equalTo("Jane Doe")); - assertTrue(jwt.getPayload().containsKey("given_name")); - assertThat(jwt.getPayload().get("given_name"), equalTo("Jane")); - assertTrue(jwt.getPayload().containsKey("family_name")); - assertThat(jwt.getPayload().get("family_name"), equalTo("Doe")); - assertTrue(jwt.getPayload().containsKey("gender")); - assertThat(jwt.getPayload().get("gender"), equalTo("female")); - assertTrue(jwt.getPayload().containsKey("birthdate")); - assertThat(jwt.getPayload().get("birthdate"), equalTo("1994-10-31")); - assertTrue(jwt.getPayload().containsKey("email")); - assertThat(jwt.getPayload().get("email"), equalTo("janedoe@example.com")); - assertTrue(jwt.getPayload().containsKey("picture")); - assertThat(jwt.getPayload().get("picture"), equalTo("http://example.com/janedoe/me.jpg")); - assertTrue(jwt.getPayload().containsKey("claim1")); - assertThat(jwt.getPayload().get("claim1"), equalTo("value1")); - assertTrue(jwt.getPayload().containsKey("claim2")); - assertThat(jwt.getPayload().get("claim2"), equalTo("value2")); - assertTrue(jwt.getPayload().containsKey("claim3")); - assertThat(jwt.getPayload().get("claim3"), equalTo("value3")); - assertTrue(jwt.getPayload().containsKey("claim4")); - assertThat(jwt.getPayload().get("claim4"), equalTo("value4")); - assertTrue(jwt.getPayload().containsKey("address")); + assertTrue(idToken.getPayload().containsKey("nonce")); + assertThat(idToken.getNonce(), equalTo("n-0S6_WzA2Mj")); + assertThat(idToken.getExpiration(), equalTo(1516339022L)); + assertThat(idToken.getIssuedAt(), equalTo(1516239022L)); + assertTrue(idToken.getPayload().containsKey("name")); + assertThat(idToken.getPayload().get("name"), equalTo("Jane Doe")); + assertTrue(idToken.getPayload().containsKey("given_name")); + assertThat(idToken.getPayload().get("given_name"), equalTo("Jane")); + assertTrue(idToken.getPayload().containsKey("family_name")); + assertThat(idToken.getPayload().get("family_name"), equalTo("Doe")); + assertTrue(idToken.getPayload().containsKey("gender")); + assertThat(idToken.getPayload().get("gender"), equalTo("female")); + assertTrue(idToken.getPayload().containsKey("birthdate")); + assertThat(idToken.getPayload().get("birthdate"), equalTo("1994-10-31")); + assertTrue(idToken.getPayload().containsKey("email")); + assertThat(idToken.getPayload().get("email"), equalTo("janedoe@example.com")); + assertTrue(idToken.getPayload().containsKey("picture")); + assertThat(idToken.getPayload().get("picture"), equalTo("http://example.com/janedoe/me.jpg")); + assertTrue(idToken.getPayload().containsKey("claim1")); + assertThat(idToken.getPayload().get("claim1"), equalTo("value1")); + assertTrue(idToken.getPayload().containsKey("claim2")); + assertThat(idToken.getPayload().get("claim2"), equalTo("value2")); + assertTrue(idToken.getPayload().containsKey("claim3")); + assertThat(idToken.getPayload().get("claim3"), equalTo("value3")); + assertTrue(idToken.getPayload().containsKey("claim4")); + assertThat(idToken.getPayload().get("claim4"), equalTo("value4")); + assertTrue(idToken.getPayload().containsKey("address")); Map expectedAddress = new HashMap<>(); expectedAddress.put("country", "Greece"); expectedAddress.put("region", "Evia"); - assertThat(jwt.getPayload().get("address"), equalTo(expectedAddress)); - assertTrue(jwt.getHeader().containsKey("alg")); - assertThat(jwt.getHeader().get("alg"), equalTo("HS256")); + assertThat(idToken.getPayload().get("address"), equalTo(expectedAddress)); + assertTrue(idToken.getHeader().containsKey("alg")); + assertThat(idToken.getHeader().get("alg"), equalTo("HS256")); } public void testIdTokenWithMutipleAudiencesParsing() throws IOException { @@ -133,48 +124,43 @@ public void testIdTokenWithMutipleAudiencesParsing() throws IOException { "MiOnsiY291bnRyeSI6IkdyZWVjZSIsInJlZ2lvbiI6IkV2aWEifX0.bo2s5D0i87Ij5TSdWnoCmwgM_0dagvscCOqs-luM1yI"; RPConfiguration rpConfig = new RPConfiguration("clientId", "redirectUri", "code", SignatureAlgorithm.getAllNames(), null, Arrays.asList("claim1", "claim2", "claim3", "claim4")); - IdTokenParser jwtParser = new IdTokenParser(rpConfig); + IdTokenParser idTokenParser = new IdTokenParser(rpConfig); final SecretKeySpec keySpec = new SecretKeySpec("144753a689a6508d7c7cd02752d7138e".getBytes(), "HmacSHA256"); - JsonWebToken jwt = jwtParser.parseAndValidateIdToken(serializedJwt, keySpec); - assertTrue(jwt.getPayload().containsKey("iss")); - assertThat(jwt.getPayload().get("iss"), equalTo("http://op.example.com")); - assertTrue(jwt.getPayload().containsKey("sub")); - assertThat(jwt.getPayload().get("sub"), equalTo("248289761001")); - assertTrue(jwt.getPayload().containsKey("aud")); - List aud = (List) jwt.getPayload().get("aud"); + IdToken idToken = idTokenParser.parseAndValidateIdToken(serializedJwt, keySpec); + assertThat(idToken.getIssuer(), equalTo("http://op.example.com")); + assertThat(idToken.getSubject(), equalTo("248289761001")); + List aud = idToken.getAudiences(); assertThat(aud.size(), equalTo(2)); assertTrue(aud.contains("s6BhdRkqt3")); assertTrue(aud.contains("other_audience")); - assertTrue(jwt.getPayload().containsKey("nonce")); - assertThat(jwt.getPayload().get("nonce"), equalTo("n-0S6_WzA2Mj")); - assertTrue(jwt.getPayload().containsKey("exp")); - assertThat(jwt.getPayload().get("exp"), equalTo(1516339022L)); - assertTrue(jwt.getPayload().containsKey("iat")); - assertThat(jwt.getPayload().get("iat"), equalTo(1516239022L)); - assertTrue(jwt.getPayload().containsKey("name")); - assertThat(jwt.getPayload().get("name"), equalTo("Jane Doe")); - assertTrue(jwt.getPayload().containsKey("given_name")); - assertThat(jwt.getPayload().get("given_name"), equalTo("Jane")); - assertTrue(jwt.getPayload().containsKey("family_name")); - assertThat(jwt.getPayload().get("family_name"), equalTo("Doe")); - assertTrue(jwt.getPayload().containsKey("gender")); - assertThat(jwt.getPayload().get("gender"), equalTo("female")); - assertTrue(jwt.getPayload().containsKey("birthdate")); - assertThat(jwt.getPayload().get("birthdate"), equalTo("1994-10-31")); - assertTrue(jwt.getPayload().containsKey("email")); - assertThat(jwt.getPayload().get("email"), equalTo("janedoe@example.com")); - assertTrue(jwt.getPayload().containsKey("picture")); - assertThat(jwt.getPayload().get("picture"), equalTo("http://example.com/janedoe/me.jpg")); - assertTrue(jwt.getPayload().containsKey("claim1")); - assertThat(jwt.getPayload().get("claim1"), equalTo("value1")); - assertTrue(jwt.getPayload().containsKey("claim2")); - assertThat(jwt.getPayload().get("claim2"), equalTo("value2")); - assertTrue(jwt.getPayload().containsKey("claim3")); - assertThat(jwt.getPayload().get("claim3"), equalTo("value3")); - assertTrue(jwt.getPayload().containsKey("claim4")); - assertThat(jwt.getPayload().get("claim4"), equalTo("value4")); - assertTrue(jwt.getHeader().containsKey("alg")); - assertThat(jwt.getHeader().get("alg"), equalTo("HS256")); + assertTrue(idToken.getPayload().containsKey("nonce")); + assertThat(idToken.getNonce(), equalTo("n-0S6_WzA2Mj")); + assertThat(idToken.getExpiration(), equalTo(1516339022L)); + assertThat(idToken.getIssuedAt(), equalTo(1516239022L)); + assertTrue(idToken.getPayload().containsKey("name")); + assertThat(idToken.getPayload().get("name"), equalTo("Jane Doe")); + assertTrue(idToken.getPayload().containsKey("given_name")); + assertThat(idToken.getPayload().get("given_name"), equalTo("Jane")); + assertTrue(idToken.getPayload().containsKey("family_name")); + assertThat(idToken.getPayload().get("family_name"), equalTo("Doe")); + assertTrue(idToken.getPayload().containsKey("gender")); + assertThat(idToken.getPayload().get("gender"), equalTo("female")); + assertTrue(idToken.getPayload().containsKey("birthdate")); + assertThat(idToken.getPayload().get("birthdate"), equalTo("1994-10-31")); + assertTrue(idToken.getPayload().containsKey("email")); + assertThat(idToken.getPayload().get("email"), equalTo("janedoe@example.com")); + assertTrue(idToken.getPayload().containsKey("picture")); + assertThat(idToken.getPayload().get("picture"), equalTo("http://example.com/janedoe/me.jpg")); + assertTrue(idToken.getPayload().containsKey("claim1")); + assertThat(idToken.getPayload().get("claim1"), equalTo("value1")); + assertTrue(idToken.getPayload().containsKey("claim2")); + assertThat(idToken.getPayload().get("claim2"), equalTo("value2")); + assertTrue(idToken.getPayload().containsKey("claim3")); + assertThat(idToken.getPayload().get("claim3"), equalTo("value3")); + assertTrue(idToken.getPayload().containsKey("claim4")); + assertThat(idToken.getPayload().get("claim4"), equalTo("value4")); + assertTrue(idToken.getHeader().containsKey("alg")); + assertThat(idToken.getHeader().get("alg"), equalTo("HS256")); } public void testHmacSignatureVerification() throws IOException { diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/support/jwt/JwtSignatureValidatorTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/support/jwt/JwtSignatureValidatorTests.java index 5df8054a5a7ad..4863c905e81c0 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/support/jwt/JwtSignatureValidatorTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/support/jwt/JwtSignatureValidatorTests.java @@ -9,6 +9,7 @@ import org.elasticsearch.common.io.PathUtils; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.core.ssl.PemUtils; +import org.elasticsearch.xpack.security.authc.oidc.IdTokenParserTests; import javax.crypto.spec.SecretKeySpec; import java.nio.file.Path; diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/support/jwt/JwtSignerTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/support/jwt/JwtSignerTests.java index 32b149752b1b2..46c4271164e21 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/support/jwt/JwtSignerTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/support/jwt/JwtSignerTests.java @@ -9,6 +9,7 @@ import org.elasticsearch.common.io.PathUtils; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.core.ssl.PemUtils; +import org.elasticsearch.xpack.security.authc.oidc.IdTokenParserTests; import javax.crypto.spec.SecretKeySpec; import java.nio.file.Path; From 0bf371a94845bf8cc2556f87eaeb9cfcf0cacee2 Mon Sep 17 00:00:00 2001 From: Ioannis Kakavas Date: Wed, 9 Jan 2019 18:15:41 +0200 Subject: [PATCH 16/71] fix all precommit issues --- .../xpack/security/authc/oidc/IdToken.java | 4 +++ .../security/authc/oidc/IdTokenParser.java | 2 +- .../authc/oidc/OpenIdConnectRealm.java | 2 -- .../support/jwt/EcSignatureValidator.java | 5 +-- .../security/authc/support/jwt/EcSigner.java | 8 ++++- .../support/jwt/HmacSignatureValidator.java | 5 ++- .../authc/support/jwt/HmacSigner.java | 5 +++ .../security/authc/support/jwt/RsaSigner.java | 5 +++ .../authc/oidc/IdTokenParserTests.java | 35 ++++++++++++++++--- .../jwt/JwtSignatureValidatorTests.java | 3 +- .../authc/support/jwt/JwtSignerTests.java | 5 +-- 11 files changed, 62 insertions(+), 17 deletions(-) diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/oidc/IdToken.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/oidc/IdToken.java index 064bcba2f3f14..d840afdae09b6 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/oidc/IdToken.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/oidc/IdToken.java @@ -20,6 +20,10 @@ public IdToken(Map header, Map payload, String s super(header, payload, signature); } + public IdToken(JsonWebToken token) { + super(token.getHeader(), token.getPayload()); + } + /** * Returns the iss claim value of the ID Token as a String */ diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/oidc/IdTokenParser.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/oidc/IdTokenParser.java index 7f486cc64bc6c..6fc748b9d9464 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/oidc/IdTokenParser.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/oidc/IdTokenParser.java @@ -58,7 +58,7 @@ public IdTokenParser(RPConfiguration rpConfig) { * @throws IOException if the ID Token cannot be deserialized */ public final IdToken parseAndValidateIdToken(String idToken, Key key) throws IOException { - final String[] idTokenParts = idToken.split("\\."); + final String[] idTokenParts = idToken.split("\\.", -1); if (idTokenParts.length != 3) { throw new IllegalArgumentException("The provided token is not a valid JWT"); } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/oidc/OpenIdConnectRealm.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/oidc/OpenIdConnectRealm.java index 780b6f0e91afe..3092e934feb10 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/oidc/OpenIdConnectRealm.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/oidc/OpenIdConnectRealm.java @@ -13,7 +13,6 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.hash.MessageDigests; import org.elasticsearch.common.lease.Releasable; -import org.elasticsearch.common.settings.SecureString; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.SettingsException; import org.elasticsearch.common.util.concurrent.ThreadContext; @@ -40,7 +39,6 @@ import static org.elasticsearch.xpack.core.security.authc.oidc.OpenIdConnectRealmSettings.RP_ALLOWED_SCOPES; import static org.elasticsearch.xpack.core.security.authc.oidc.OpenIdConnectRealmSettings.RP_ALLOWED_SIGNATURE_ALGORITHMS; import static org.elasticsearch.xpack.core.security.authc.oidc.OpenIdConnectRealmSettings.RP_CLIENT_ID; -import static org.elasticsearch.xpack.core.security.authc.oidc.OpenIdConnectRealmSettings.RP_CLIENT_SECRET; import static org.elasticsearch.xpack.core.security.authc.oidc.OpenIdConnectRealmSettings.RP_REDIRECT_URI; import static org.elasticsearch.xpack.core.security.authc.oidc.OpenIdConnectRealmSettings.RP_RESPONSE_TYPE; import static org.elasticsearch.xpack.core.security.authc.oidc.OpenIdConnectRealmSettings.RP_REQUESTED_SCOPES; diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/jwt/EcSignatureValidator.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/jwt/EcSignatureValidator.java index 2d9a2944c026b..6a04772af5d21 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/jwt/EcSignatureValidator.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/jwt/EcSignatureValidator.java @@ -12,7 +12,7 @@ import java.security.SignatureException; import java.security.interfaces.ECPublicKey; -import static org.elasticsearch.xpack.security.authc.support.jwt.SignatureAlgorithm.*; +import static org.elasticsearch.xpack.security.authc.support.jwt.SignatureAlgorithm.getEcAlgorithms; /** * Class offering necessary functionality for validating the signatures of JWTs that have been signed with the @@ -83,7 +83,8 @@ public void validateSignature(byte[] data, byte[] signature) { /** * Converts the JOSE signature to DER so that it can be verified. See * the specification - * Based on https://github.com/jwtk/jjwt/blob/1520ae8a21052b376282f8a38d310a91b15285e5/impl/src/main/java/io/jsonwebtoken/impl/crypto/EllipticCurveProvider.java + * Based on https://github.com/jwtk/jjwt/blob/1520ae8a21052b376282f8a38d310a91b15285e5/ + * impl/src/main/java/io/jsonwebtoken/impl/crypto/EllipticCurveProvider.java * * @param jwsSignature The signature as decoded from the JWT * @return the signature, DER encoded so that it can be used in {@link Signature#verify(byte[])} diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/jwt/EcSigner.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/jwt/EcSigner.java index faf520bf43ece..822417919e8da 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/jwt/EcSigner.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/jwt/EcSigner.java @@ -1,3 +1,8 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ package org.elasticsearch.xpack.security.authc.support.jwt; import java.security.GeneralSecurityException; @@ -64,7 +69,8 @@ public byte[] sign(byte[] data) throws GeneralSecurityException { /** * Converts a DER Encoded signature to JOSE so that it can be attached to a JWT. See * the specification - * Based on https://github.com/jwtk/jjwt/blob/1520ae8a21052b376282f8a38d310a91b15285e5/impl/src/main/java/io/jsonwebtoken/impl/crypto/EllipticCurveProvider.java + * Based on https://github.com/jwtk/jjwt/blob/1520ae8a21052b376282f8a38d310a91b15285e5/impl/src/main/java/io/jsonwebtoken/impl/ + * crypto/EllipticCurveProvider.java * * @param derSignature The DER formatted signature * @return the signature formatted in a way that can be encoded and added in a JWT diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/jwt/HmacSignatureValidator.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/jwt/HmacSignatureValidator.java index 8a4f9679cd60e..0b1d137188982 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/jwt/HmacSignatureValidator.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/jwt/HmacSignatureValidator.java @@ -27,7 +27,8 @@ public class HmacSignatureValidator implements JwtSignatureValidator { public HmacSignatureValidator(SignatureAlgorithm algorithm, Key key) { if (key instanceof SecretKey == false) { - throw new IllegalArgumentException("HMAC signatures can only be verified using a SecretKey but a [" + key.getClass().getName() + "] is provided"); + throw new IllegalArgumentException("HMAC signatures can only be verified using a SecretKey " + + "but a [" + key.getClass().getName() + "] is provided"); } if (SignatureAlgorithm.getHmacAlgorithms().contains(algorithm) == false) { throw new IllegalArgumentException("Unsupported algorithm " + algorithm.name() + " for HMAC signature"); @@ -43,8 +44,6 @@ public HmacSignatureValidator(SignatureAlgorithm algorithm, Key key) { * * @param data The JWT payload * @param expectedSignature The JWT signature - * @return True if the newly calculated signature of the header and payload matches the one that was included in the JWT, false - * otherwise */ @Override public void validateSignature(byte[] data, byte[] expectedSignature) { diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/jwt/HmacSigner.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/jwt/HmacSigner.java index d12bea7f6681f..20ac3a2d1600f 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/jwt/HmacSigner.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/jwt/HmacSigner.java @@ -1,3 +1,8 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ package org.elasticsearch.xpack.security.authc.support.jwt; import javax.crypto.Mac; diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/jwt/RsaSigner.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/jwt/RsaSigner.java index 6f206a88ec4cb..19bf11f490b38 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/jwt/RsaSigner.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/jwt/RsaSigner.java @@ -1,3 +1,8 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ package org.elasticsearch.xpack.security.authc.support.jwt; import java.security.GeneralSecurityException; diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/oidc/IdTokenParserTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/oidc/IdTokenParserTests.java index 8c1223500553f..3ecd1e464bed6 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/oidc/IdTokenParserTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/oidc/IdTokenParserTests.java @@ -9,10 +9,12 @@ import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.core.ssl.PemUtils; import org.elasticsearch.xpack.security.authc.support.jwt.JsonWebToken; +import org.elasticsearch.xpack.security.authc.support.jwt.JsonWebTokenBuilder; import org.elasticsearch.xpack.security.authc.support.jwt.SignatureAlgorithm; import javax.crypto.spec.SecretKeySpec; import java.io.IOException; +import java.nio.charset.StandardCharsets; import java.nio.file.Path; import java.security.PublicKey; import java.util.Arrays; @@ -34,7 +36,8 @@ public void testIdTokenParsing() throws IOException { "Vkb2UvbWUuanBnIn0.bpG9QZk9uykstyn2rv2w_7NkS-rerdX78_ehxli8RTM"; RPConfiguration rpConfig = new RPConfiguration("clientId", "redirectUri", "code", SignatureAlgorithm.getAllNames(), null, null); IdTokenParser idTokenParser = new IdTokenParser(rpConfig); - final SecretKeySpec keySpec = new SecretKeySpec("144753a689a6508d7c7cd02752d7138e".getBytes(), "HmacSHA256"); + final SecretKeySpec keySpec = + new SecretKeySpec("144753a689a6508d7c7cd02752d7138e".getBytes(StandardCharsets.UTF_8.name()), "HmacSHA256"); IdToken idToken = idTokenParser.parseAndValidateIdToken(serializedJwt, keySpec); assertThat(idToken.getIssuer(), equalTo("http://op.example.com")); assertThat(idToken.getSubject(), equalTo("248289761001")); @@ -73,7 +76,8 @@ public void testIdTokenWithPrivateClaimsParsing() throws IOException { RPConfiguration rpConfig = new RPConfiguration("clientId", "redirectUri", "code", SignatureAlgorithm.getAllNames(), null, Arrays.asList("claim1", "claim2", "claim3", "claim4")); IdTokenParser idTokenParser = new IdTokenParser(rpConfig); - final SecretKeySpec keySpec = new SecretKeySpec("144753a689a6508d7c7cd02752d7138e".getBytes(), "HmacSHA256"); + final SecretKeySpec keySpec = + new SecretKeySpec("144753a689a6508d7c7cd02752d7138e".getBytes(StandardCharsets.UTF_8.name()), "HmacSHA256"); IdToken idToken = idTokenParser.parseAndValidateIdToken(serializedJwt, keySpec); assertThat(idToken.getIssuer(), equalTo("http://op.example.com")); assertThat(idToken.getSubject(), equalTo("248289761001")); @@ -125,7 +129,8 @@ public void testIdTokenWithMutipleAudiencesParsing() throws IOException { RPConfiguration rpConfig = new RPConfiguration("clientId", "redirectUri", "code", SignatureAlgorithm.getAllNames(), null, Arrays.asList("claim1", "claim2", "claim3", "claim4")); IdTokenParser idTokenParser = new IdTokenParser(rpConfig); - final SecretKeySpec keySpec = new SecretKeySpec("144753a689a6508d7c7cd02752d7138e".getBytes(), "HmacSHA256"); + final SecretKeySpec keySpec = + new SecretKeySpec("144753a689a6508d7c7cd02752d7138e".getBytes(StandardCharsets.UTF_8.name()), "HmacSHA256"); IdToken idToken = idTokenParser.parseAndValidateIdToken(serializedJwt, keySpec); assertThat(idToken.getIssuer(), equalTo("http://op.example.com")); assertThat(idToken.getSubject(), equalTo("248289761001")); @@ -173,7 +178,8 @@ public void testHmacSignatureVerification() throws IOException { RPConfiguration rpConfig = new RPConfiguration("clientId", "redirectUri", "code", SignatureAlgorithm.getAllNames(), null, Arrays.asList("claim1", "claim2", "claim3", "claim4")); IdTokenParser jwtParser = new IdTokenParser(rpConfig); - final SecretKeySpec keySpec = new SecretKeySpec("144753a689a6508d7c7cd02752d7138e".getBytes(), "HmacSHA384"); + final SecretKeySpec keySpec = + new SecretKeySpec("144753a689a6508d7c7cd02752d7138e".getBytes(StandardCharsets.UTF_8.name()), "HmacSHA384"); JsonWebToken jwt = jwtParser.parseAndValidateIdToken(serializedJwt, keySpec); assertTrue(jwt.getPayload().containsKey("iss")); assertThat(jwt.getPayload().get("iss"), equalTo("http://op.example.com")); @@ -188,7 +194,8 @@ public void testHmacSignatureVerification() throws IOException { "0ZSI6IjE5OTQtMTAtMzEiLCJlbWFpbCI6ImphbmVkb2VAZXhhbXBsZS5jb20iLCJwaWN0dXJlIjoiaHR0cDovL2V4YW1wbGUuY29tL2phbmVkb2UvbWU" + "uanBnIn0.b-wg-whI_4hzmSn_lVmAfBt2YHjeeX9800jYBsiRLpGJ_WB8sCIIASTUpHiwT8RxqXAgn_nr0JsKTQkhJT6frg"; - final SecretKeySpec keySpec512 = new SecretKeySpec("144753a689a6508d7c7cd02752d7138e".getBytes(), "HmacSHA512"); + final SecretKeySpec keySpec512 = + new SecretKeySpec("144753a689a6508d7c7cd02752d7138e".getBytes(StandardCharsets.UTF_8.name()), "HmacSHA512"); JsonWebToken jwt512 = jwtParser.parseAndValidateIdToken(serializedJwt512, keySpec512); assertTrue(jwt512.getPayload().containsKey("iss")); assertThat(jwt512.getPayload().get("iss"), equalTo("http://op.example.com")); @@ -346,4 +353,22 @@ public void testNotAllowedSignatureAlgorithm() throws Exception { Exception e = expectThrows(IllegalStateException.class, () -> jwtParser.parseAndValidateIdToken(serliazedJwt384, publicKey384)); assertThat(e.getMessage(), containsString("ID Token is signed with an unsupported algorithm")); } + + public void testNoneAlgorithmNotAllowed() throws Exception { + IdToken idToken = new IdToken(new JsonWebTokenBuilder() + .algorithm("NONE") + .type("JWT") + .issuer("issuer") + .audience("audience") + .expirationTime(1516339022L) + .issuedAt(1516239022L) + .build()); + RPConfiguration rpConfig = new RPConfiguration("clientId", "redirectUri", "code", SignatureAlgorithm.getAllNames(), null, null); + IdTokenParser idTokenParser = new IdTokenParser(rpConfig); + final SecretKeySpec keySpec = + new SecretKeySpec("144753a689a6508d7c7cd02752d7138e".getBytes(StandardCharsets.UTF_8.name()), "HmacSHA256"); + IllegalStateException e = expectThrows(IllegalStateException.class, + () -> idTokenParser.parseAndValidateIdToken(idToken.encode(), keySpec)); + assertThat(e.getMessage(), containsString("ID Token is not signed or the signing algorithm is unsupported")); + } } diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/support/jwt/JwtSignatureValidatorTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/support/jwt/JwtSignatureValidatorTests.java index 4863c905e81c0..0c01b89c9fbf8 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/support/jwt/JwtSignatureValidatorTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/support/jwt/JwtSignatureValidatorTests.java @@ -12,6 +12,7 @@ import org.elasticsearch.xpack.security.authc.oidc.IdTokenParserTests; import javax.crypto.spec.SecretKeySpec; +import java.nio.charset.StandardCharsets; import java.nio.file.Path; import java.security.PrivateKey; import java.security.PublicKey; @@ -23,7 +24,7 @@ public class JwtSignatureValidatorTests extends ESTestCase { public void testOnlyAcceptCorrectKeyAndAlgorithm() throws Exception { SignatureAlgorithm hmacAlgo = randomFrom(SignatureAlgorithm.getHmacAlgorithms()); final SecretKeySpec hmacKey = - new SecretKeySpec("144753a689a6508d7c7cd02752d7138e".getBytes(), hmacAlgo.getJcaAlgoName()); + new SecretKeySpec("144753a689a6508d7c7cd02752d7138e".getBytes(StandardCharsets.UTF_8.name()), hmacAlgo.getJcaAlgoName()); SignatureAlgorithm rsaAlgo = randomFrom(SignatureAlgorithm.getRsaAlgorithms()); Path publicKeyPath = PathUtils.get(IdTokenParserTests.class.getResource ("/org/elasticsearch/xpack/security/authc/oidc/rsa_public_key.pem").toURI()); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/support/jwt/JwtSignerTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/support/jwt/JwtSignerTests.java index 46c4271164e21..989f9225c505c 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/support/jwt/JwtSignerTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/support/jwt/JwtSignerTests.java @@ -12,6 +12,7 @@ import org.elasticsearch.xpack.security.authc.oidc.IdTokenParserTests; import javax.crypto.spec.SecretKeySpec; +import java.nio.charset.StandardCharsets; import java.nio.file.Path; import java.security.PrivateKey; import java.security.PublicKey; @@ -23,7 +24,7 @@ public class JwtSignerTests extends ESTestCase { public void testOnlyAcceptCorrectKeyAndAlgorithm() throws Exception { SignatureAlgorithm hmacAlgo = randomFrom(SignatureAlgorithm.getHmacAlgorithms()); final SecretKeySpec hmacKey = - new SecretKeySpec("144753a689a6508d7c7cd02752d7138e".getBytes(), hmacAlgo.getJcaAlgoName()); + new SecretKeySpec("144753a689a6508d7c7cd02752d7138e".getBytes(StandardCharsets.UTF_8.name()), hmacAlgo.getJcaAlgoName()); SignatureAlgorithm rsaAlgo = randomFrom(SignatureAlgorithm.getRsaAlgorithms()); Path privateKeyPath = PathUtils.get(IdTokenParserTests.class.getResource ("/org/elasticsearch/xpack/security/authc/oidc/rsa_private_key.pem").toURI()); @@ -66,7 +67,7 @@ public void testSignAndValidateHmacSignatures() throws Exception { .issuer("theissuer") .build(); final SecretKeySpec key = - new SecretKeySpec("144753a689a6508d7c7cd02752d7138e".getBytes(), algorithm.getJcaAlgoName()); + new SecretKeySpec("144753a689a6508d7c7cd02752d7138e".getBytes(StandardCharsets.UTF_8.name()), algorithm.getJcaAlgoName()); JwtSigner signer = new HmacSigner(algorithm, key); JwtSignatureValidator validator = new HmacSignatureValidator(algorithm, key); validator.validateSignature(token.encodeSignableContent(), signer.sign(token.encodeSignableContent())); From 0400d8a4d8477eeaac8b05074761126196618b34 Mon Sep 17 00:00:00 2001 From: Ioannis Kakavas Date: Thu, 10 Jan 2019 08:17:11 +0200 Subject: [PATCH 17/71] Update realm tests with the required RP_ALLOWED_SIGNATURE_ALGORITHM settings --- .../authc/oidc/OpenIdConnectRealmTests.java | 27 ++++++++++++++++++- 1 file changed, 26 insertions(+), 1 deletion(-) diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/oidc/OpenIdConnectRealmTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/oidc/OpenIdConnectRealmTests.java index 310f08db6cf4a..edb8444d92083 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/oidc/OpenIdConnectRealmTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/oidc/OpenIdConnectRealmTests.java @@ -44,6 +44,7 @@ public void testIncorrectResponseTypeThrowsError() { .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.OP_NAME), "the op") .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.RP_REDIRECT_URI), "https://rp.my.com") .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.RP_CLIENT_ID), "rp-my") + .putList(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.RP_ALLOWED_SIGNATURE_ALGORITHMS), "HS256", "HS512") .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.RP_RESPONSE_TYPE), "hybrid"); IllegalArgumentException exception = expectThrows(IllegalArgumentException.class, () -> { new OpenIdConnectRealm(buildConfig(settingsBuilder.build())); @@ -57,6 +58,7 @@ public void testMissingAuthorizationEndpointThrowsError() { .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.OP_NAME), "the op") .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.RP_REDIRECT_URI), "https://rp.my.com") .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.RP_CLIENT_ID), "rp-my") + .putList(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.RP_ALLOWED_SIGNATURE_ALGORITHMS), "HS256", "HS512") .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.RP_RESPONSE_TYPE), "code"); SettingsException exception = expectThrows(SettingsException.class, () -> { new OpenIdConnectRealm(buildConfig(settingsBuilder.build())); @@ -71,6 +73,7 @@ public void testMissingIssuerThrowsError() { .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.OP_NAME), "the op") .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.RP_REDIRECT_URI), "https://rp.my.com") .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.RP_CLIENT_ID), "rp-my") + .putList(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.RP_ALLOWED_SIGNATURE_ALGORITHMS), "HS256", "HS512") .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.RP_RESPONSE_TYPE), "code"); SettingsException exception = expectThrows(SettingsException.class, () -> { new OpenIdConnectRealm(buildConfig(settingsBuilder.build())); @@ -85,6 +88,7 @@ public void testMissingNameTypeThrowsError() { .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.OP_ISSUER), "https://op.example.com") .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.RP_REDIRECT_URI), "https://rp.my.com") .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.RP_CLIENT_ID), "rp-my") + .putList(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.RP_ALLOWED_SIGNATURE_ALGORITHMS), "HS256", "HS512") .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.RP_RESPONSE_TYPE), "code"); SettingsException exception = expectThrows(SettingsException.class, () -> { new OpenIdConnectRealm(buildConfig(settingsBuilder.build())); @@ -99,6 +103,7 @@ public void testMissingRedirectUriThrowsError() { .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.OP_ISSUER), "https://op.example.com") .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.OP_NAME), "the op") .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.RP_CLIENT_ID), "rp-my") + .putList(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.RP_ALLOWED_SIGNATURE_ALGORITHMS), "HS256", "HS512") .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.RP_RESPONSE_TYPE), "code"); SettingsException exception = expectThrows(SettingsException.class, () -> { new OpenIdConnectRealm(buildConfig(settingsBuilder.build())); @@ -107,16 +112,32 @@ public void testMissingRedirectUriThrowsError() { Matchers.containsString(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.RP_REDIRECT_URI))); } - public void testMissingClientIdThrowsError() { + public void testMissingAllowedAlgorithms() { final Settings.Builder settingsBuilder = Settings.builder() .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.OP_AUTHORIZATION_ENDPOINT), "https://op.example.com/login") .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.OP_ISSUER), "https://op.example.com") .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.OP_NAME), "the op") + .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.RP_CLIENT_ID), "rp-my") .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.RP_REDIRECT_URI), "https://rp.my.com") .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.RP_RESPONSE_TYPE), "code"); SettingsException exception = expectThrows(SettingsException.class, () -> { new OpenIdConnectRealm(buildConfig(settingsBuilder.build())); }); + assertThat(exception.getMessage(), + Matchers.containsString(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.RP_ALLOWED_SIGNATURE_ALGORITHMS))); + } + + public void testMissingClientIdThrowsError() { + final Settings.Builder settingsBuilder = Settings.builder() + .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.OP_AUTHORIZATION_ENDPOINT), "https://op.example.com/login") + .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.OP_ISSUER), "https://op.example.com") + .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.OP_NAME), "the op") + .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.RP_REDIRECT_URI), "https://rp.my.com") + .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.RP_RESPONSE_TYPE), "code") + .putList(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.RP_ALLOWED_SIGNATURE_ALGORITHMS), "HS256", "HS512"); + SettingsException exception = expectThrows(SettingsException.class, () -> { + new OpenIdConnectRealm(buildConfig(settingsBuilder.build())); + }); assertThat(exception.getMessage(), Matchers.containsString(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.RP_CLIENT_ID))); } @@ -128,6 +149,7 @@ public void testBuilidingAuthenticationRequest() { .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.OP_NAME), "the op") .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.RP_REDIRECT_URI), "https://rp.my.com/cb") .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.RP_CLIENT_ID), "rp-my") + .putList(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.RP_ALLOWED_SIGNATURE_ALGORITHMS), "HS256", "HS512") .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.RP_RESPONSE_TYPE), "code") .putList(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.RP_REQUESTED_SCOPES), Arrays.asList("openid", "scope1", "scope2")); @@ -148,6 +170,7 @@ public void testBuilidingAuthenticationRequestWithoutState() { .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.OP_NAME), "the op") .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.RP_REDIRECT_URI), "https://rp.my.com/cb") .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.RP_CLIENT_ID), "rp-my") + .putList(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.RP_ALLOWED_SIGNATURE_ALGORITHMS), "HS256", "HS512") .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.RP_RESPONSE_TYPE), "code") .putList(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.RP_REQUESTED_SCOPES), Arrays.asList("openid", "scope1", "scope2")); @@ -167,6 +190,7 @@ public void testBuilidingAuthenticationRequestWithoutStateAndNonce() { .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.OP_NAME), "the op") .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.RP_REDIRECT_URI), "https://rp.my.com/cb") .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.RP_CLIENT_ID), "rp-my") + .putList(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.RP_ALLOWED_SIGNATURE_ALGORITHMS), "HS256", "HS512") .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.RP_RESPONSE_TYPE), "code") .putList(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.RP_REQUESTED_SCOPES), Arrays.asList("openid", "scope1", "scope2")); @@ -185,6 +209,7 @@ public void testBuilidingAuthenticationRequestWithDefaultScope() { .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.OP_NAME), "the op") .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.RP_REDIRECT_URI), "https://rp.my.com/cb") .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.RP_CLIENT_ID), "rp-my") + .putList(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.RP_ALLOWED_SIGNATURE_ALGORITHMS), "HS256", "HS512") .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.RP_RESPONSE_TYPE), "code"); final OpenIdConnectRealm realm = new OpenIdConnectRealm(buildConfig(settingsBuilder.build())); final OpenIdConnectPrepareAuthenticationResponse response = realm.buildAuthenticationRequestUri(null, null); From b6e5ccaf8a2304892405712cf832d23efdce0940 Mon Sep 17 00:00:00 2001 From: Przemyslaw Gomulka Date: Thu, 17 Jan 2019 09:10:09 +0100 Subject: [PATCH 18/71] Remove the AbstracLifecycleComponent constructor with Settings (#37523) Adding the migration guide and removing the deprecated in 6.x constructor relates #35560 relates #34488 --- docs/reference/migration/migrate_7_0/java.asciidoc | 8 +++++++- .../common/component/AbstractLifecycleComponent.java | 6 ------ 2 files changed, 7 insertions(+), 7 deletions(-) diff --git a/docs/reference/migration/migrate_7_0/java.asciidoc b/docs/reference/migration/migrate_7_0/java.asciidoc index e48a4cf1b45c3..f34b1c6ca9906 100644 --- a/docs/reference/migration/migrate_7_0/java.asciidoc +++ b/docs/reference/migration/migrate_7_0/java.asciidoc @@ -38,4 +38,10 @@ because `Settings` is no longer needed. ==== Deprecated method `Client#termVector` removed The client method `termVector`, deprecated in 2.0, has been removed. The method -`termVectors` (plural) should be used instead. \ No newline at end of file +`termVectors` (plural) should be used instead. + +[float] +==== Deprecated constructor `AbstractLifecycleComponent(Settings settings)` removed + +The constructor `AbstractLifecycleComponent(Settings settings)`, deprecated in 6.7 +has been removed. The parameterless constructor should be used instead. diff --git a/server/src/main/java/org/elasticsearch/common/component/AbstractLifecycleComponent.java b/server/src/main/java/org/elasticsearch/common/component/AbstractLifecycleComponent.java index 2caaa43fbcd05..772d2d89cf5be 100644 --- a/server/src/main/java/org/elasticsearch/common/component/AbstractLifecycleComponent.java +++ b/server/src/main/java/org/elasticsearch/common/component/AbstractLifecycleComponent.java @@ -21,7 +21,6 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; -import org.elasticsearch.common.settings.Settings; import java.io.IOException; import java.util.List; @@ -36,11 +35,6 @@ public abstract class AbstractLifecycleComponent implements LifecycleComponent { protected AbstractLifecycleComponent() {} - @Deprecated - protected AbstractLifecycleComponent(Settings settings) { - // TODO drop settings from ctor - } - @Override public Lifecycle.State lifecycleState() { return this.lifecycle.state(); From 1686c32ba9825d7998336b942759afaa64fce390 Mon Sep 17 00:00:00 2001 From: Marios Trivyzas Date: Thu, 17 Jan 2019 10:17:58 +0200 Subject: [PATCH 19/71] SQL: Rename SQL type DATE to DATETIME (#37395) * SQL: Rename SQL data type DATE to DATETIME SQL data type DATE has only the date part (e.g.: 2019-01-14) without any time information. Previously the SQL type DATE was referring to the ES DATE which contains also the time part along with TZ information. To conform with SQL data types the data type `DATE` is renamed to `DATETIME`, since it includes also the time, as a new runtime SQL `DATE` data type will be introduced down the road, which only contains the date part and meets the SQL standard. Closes: #36440 * Address comments --- docs/reference/sql/endpoints/rest.asciidoc | 6 +- .../sql/functions/date-time.asciidoc | 118 +++++++++--------- .../reference/sql/functions/grouping.asciidoc | 4 +- .../sql/functions/type-conversion.asciidoc | 2 +- .../elasticsearch/xpack/sql/jdbc/EsType.java | 4 +- .../xpack/sql/jdbc/JdbcPreparedStatement.java | 4 +- .../xpack/sql/jdbc/JdbcResultSet.java | 2 +- .../xpack/sql/jdbc/TypeConverter.java | 10 +- .../xpack/sql/jdbc/TypeUtils.java | 20 +-- .../sql/jdbc/JdbcPreparedStatementTests.java | 20 +-- .../xpack/sql/jdbc/TypeConverterTests.java | 4 +- .../xpack/sql/qa/SqlProtocolTestCase.java | 11 +- .../xpack/sql/qa/jdbc/ResultSetTestCase.java | 30 ++--- .../sql/qa/src/main/resources/agg.csv-spec | 8 +- .../sql/qa/src/main/resources/agg.sql-spec | 24 ++-- .../sql/qa/src/main/resources/alias.csv-spec | 16 +-- .../qa/src/main/resources/command.csv-spec | 32 ++--- .../main/resources/datetime-interval.csv-spec | 6 +- .../qa/src/main/resources/datetime.csv-spec | 4 +- .../qa/src/main/resources/datetime.sql-spec | 6 +- .../sql/qa/src/main/resources/docs.csv-spec | 54 ++++---- .../sql/qa/src/main/resources/nested.csv-spec | 8 +- .../sql/qa/src/main/resources/null.csv-spec | 2 +- .../setup_mock_metadata_get_columns.sql | 2 +- .../sql/analysis/index/IndexResolver.java | 4 +- .../search/extractor/FieldHitExtractor.java | 2 +- .../xpack/sql/expression/Expressions.java | 24 ++-- .../function/grouping/Histogram.java | 4 +- .../scalar/datetime/CurrentDateTime.java | 2 +- .../expression/gen/script/ScriptWeaver.java | 6 +- .../xpack/sql/parser/ExpressionBuilder.java | 10 +- .../xpack/sql/planner/QueryFolder.java | 6 +- .../xpack/sql/planner/QueryTranslator.java | 2 +- .../querydsl/agg/GroupByDateHistogram.java | 2 +- .../xpack/sql/querydsl/agg/GroupByKey.java | 4 +- .../querydsl/container/QueryContainer.java | 4 +- .../querydsl/container/SearchHitFieldRef.java | 2 +- .../xpack/sql/type/DataType.java | 16 ++- .../xpack/sql/type/DataTypeConversion.java | 26 ++-- .../xpack/sql/type/DataTypes.java | 14 +-- .../xpack/sql/type/DateEsField.java | 2 +- .../elasticsearch/xpack/sql/type/Types.java | 4 +- .../analyzer/VerifierErrorMessagesTests.java | 7 +- .../analysis/index/IndexResolverTests.java | 2 +- .../extractor/FieldHitExtractorTests.java | 2 +- .../scalar/datetime/DayOfYearTests.java | 2 +- .../arithmetic/BinaryArithmeticTests.java | 12 +- .../xpack/sql/optimizer/OptimizerTests.java | 2 +- .../sql/parser/EscapedFunctionsTests.java | 6 +- .../logical/command/sys/SysParserTests.java | 4 +- .../logical/command/sys/SysTypesTests.java | 4 +- .../xpack/sql/planner/QueryFolderTests.java | 2 +- .../sql/planner/QueryTranslatorTests.java | 4 +- .../sql/type/DataTypeConversionTests.java | 12 +- .../xpack/sql/type/DataTypesTests.java | 22 ++-- .../xpack/sql/type/TypesTests.java | 12 +- 56 files changed, 315 insertions(+), 309 deletions(-) diff --git a/docs/reference/sql/endpoints/rest.asciidoc b/docs/reference/sql/endpoints/rest.asciidoc index 901d34113aee6..cdce98ef0e500 100644 --- a/docs/reference/sql/endpoints/rest.asciidoc +++ b/docs/reference/sql/endpoints/rest.asciidoc @@ -68,7 +68,7 @@ Which returns: {"name": "author", "type": "text"}, {"name": "name", "type": "text"}, {"name": "page_count", "type": "short"}, - {"name": "release_date", "type": "date"} + {"name": "release_date", "type": "datetime"} ], "rows": [ ["Peter F. Hamilton", "Pandora's Star", 768, "2004-03-02T00:00:00.000Z"], @@ -186,7 +186,7 @@ Douglas Adams |The Hitchhiker's Guide to the Galaxy|180 |1979-10-12T In addition to the `query` and `cursor` fields, the request can contain `fetch_size` and `time_zone`. `fetch_size` is a hint for how many results to return in each page. SQL might chose to return more -or fewer results though. `time_zone` is the time zone to use for date -functions and date parsing. `time_zone` defaults to `utc` and can take +or fewer results though. `time_zone` is the time zone to use for datetime +functions and datetime parsing. `time_zone` defaults to `utc` and can take any values documented http://www.joda.org/joda-time/apidocs/org/joda/time/DateTimeZone.html[here]. diff --git a/docs/reference/sql/functions/date-time.asciidoc b/docs/reference/sql/functions/date-time.asciidoc index 416e927903961..15fdba39026ef 100644 --- a/docs/reference/sql/functions/date-time.asciidoc +++ b/docs/reference/sql/functions/date-time.asciidoc @@ -22,9 +22,9 @@ The table below shows the mapping between {es} and {es-sql}: |========================== s|{es} s|{es-sql} -2+h| Index/Table date math +2+h| Index/Table datetime math 2+| -2+h| Query date math +2+h| Query date/time math | 1y | INTERVAL 1 YEAR | 2M | INTERVAL 2 MONTH | 3w | INTERVAL 21 DAY @@ -57,7 +57,7 @@ s|Description ==== Operators -Basic arithmetic operators (`+`, `-`, etc) support date-time parameters as indicated below: +Basic arithmetic operators (`+`, `-`, etc) support date/time parameters as indicated below: ["source","sql",subs="attributes,callouts,macros"] -------------------------------------------------- @@ -66,7 +66,7 @@ include-tagged::{sql-specs}/docs.csv-spec[dtIntervalPlusInterval] ["source","sql",subs="attributes,callouts,macros"] -------------------------------------------------- -include-tagged::{sql-specs}/docs.csv-spec[dtDatePlusInterval] +include-tagged::{sql-specs}/docs.csv-spec[dtDateTimePlusInterval] -------------------------------------------------- ["source","sql",subs="attributes,callouts,macros"] @@ -81,7 +81,7 @@ include-tagged::{sql-specs}/docs.csv-spec[dtIntervalMinusInterval] ["source","sql",subs="attributes,callouts,macros"] -------------------------------------------------- -include-tagged::{sql-specs}/docs.csv-spec[dtDateMinusInterval] +include-tagged::{sql-specs}/docs.csv-spec[dtDateTimeMinusInterval] -------------------------------------------------- ["source","sql",subs="attributes,callouts,macros"] @@ -146,18 +146,18 @@ include-tagged::{sql-specs}/docs.csv-spec[filterNow] .Synopsis: [source, sql] -------------------------------------------------- -DAY_OF_MONTH(date_exp<1>) +DAY_OF_MONTH(datetime_exp<1>) -------------------------------------------------- *Input*: -<1> date expression +<1> date/datetime expression *Output*: integer .Description: -Extract the day of the month from a date. +Extract the day of the month from a date/datetime. ["source","sql",subs="attributes,callouts,macros"] -------------------------------------------------- @@ -170,18 +170,18 @@ include-tagged::{sql-specs}/docs.csv-spec[dayOfMonth] .Synopsis: [source, sql] -------------------------------------------------- -DAY_OF_WEEK(date_exp<1>) +DAY_OF_WEEK(datetime_exp<1>) -------------------------------------------------- *Input*: -<1> date expression +<1> date/datetime expression *Output*: integer .Description: -Extract the day of the week from a date. Sunday is `1`, Monday is `2`, etc. +Extract the day of the week from a date/datetime. Sunday is `1`, Monday is `2`, etc. ["source","sql",subs="attributes,callouts,macros"] -------------------------------------------------- @@ -194,18 +194,18 @@ include-tagged::{sql-specs}/docs.csv-spec[dayOfWeek] .Synopsis: [source, sql] -------------------------------------------------- -DAY_OF_YEAR(date_exp<1>) +DAY_OF_YEAR(datetime_exp<1>) -------------------------------------------------- *Input*: -<1> date expression +<1> date/datetime expression *Output*: integer .Description: -Extract the day of the year from a date. +Extract the day of the year from a date/datetime. ["source","sql",subs="attributes,callouts,macros"] -------------------------------------------------- @@ -218,18 +218,18 @@ include-tagged::{sql-specs}/docs.csv-spec[dayOfYear] .Synopsis: [source, sql] -------------------------------------------------- -DAY_NAME(date_exp<1>) +DAY_NAME(datetime_exp<1>) -------------------------------------------------- *Input*: -<1> date expression +<1> date/datetime expression *Output*: string .Description: -Extract the day of the week from a datetime in text format (`Monday`, `Tuesday`...). +Extract the day of the week from a date/datetime in text format (`Monday`, `Tuesday`...). ["source","sql",subs="attributes,callouts,macros"] -------------------------------------------------- @@ -242,18 +242,18 @@ include-tagged::{sql-specs}/docs.csv-spec[dayName] .Synopsis: [source, sql] -------------------------------------------------- -HOUR_OF_DAY(date_exp<1>) +HOUR_OF_DAY(datetime_exp<1>) -------------------------------------------------- *Input*: -<1> date expression +<1> date/datetime expression *Output*: integer .Description: -Extract the hour of the day from a date. +Extract the hour of the day from a date/datetime. ["source","sql",subs="attributes,callouts,macros"] -------------------------------------------------- @@ -266,18 +266,18 @@ include-tagged::{sql-specs}/docs.csv-spec[hourOfDay] .Synopsis: [source, sql] -------------------------------------------------- -ISO_DAY_OF_WEEK(date_exp<1>) +ISO_DAY_OF_WEEK(datetime_exp<1>) -------------------------------------------------- *Input*: -<1> date expression +<1> date/datetime expression *Output*: integer .Description: -Extract the day of the week from a date, following the https://en.wikipedia.org/wiki/ISO_week_date[ISO 8601 standard]. +Extract the day of the week from a date/datetime, following the https://en.wikipedia.org/wiki/ISO_week_date[ISO 8601 standard]. Monday is `1`, Tuesday is `2`, etc. ["source","sql",subs="attributes,callouts,macros"] @@ -291,18 +291,18 @@ include-tagged::{sql-specs}/docs.csv-spec[isoDayOfWeek] .Synopsis: [source, sql] -------------------------------------------------- -ISO_WEEK_OF_YEAR(date_exp<1>) +ISO_WEEK_OF_YEAR(datetime_exp<1>) -------------------------------------------------- *Input*: -<1> date expression +<1> date/datetime expression *Output*: integer .Description: -Extract the week of the year from a date, following https://en.wikipedia.org/wiki/ISO_week_date[ISO 8601 standard]. The first week +Extract the week of the year from a date/datetime, following https://en.wikipedia.org/wiki/ISO_week_date[ISO 8601 standard]. The first week of a year is the first week with a majority (4 or more) of its days in January. ["source","sql",subs="attributes,callouts,macros"] @@ -316,18 +316,18 @@ include-tagged::{sql-specs}/docs.csv-spec[isoWeekOfYear] .Synopsis: [source, sql] -------------------------------------------------- -MINUTE_OF_DAY(date_exp<1>) +MINUTE_OF_DAY(datetime_exp<1>) -------------------------------------------------- *Input*: -<1> date expression +<1> date/datetime expression *Output*: integer .Description: -Extract the minute of the day from a date. +Extract the minute of the day from a date/datetime. ["source","sql",subs="attributes,callouts,macros"] -------------------------------------------------- @@ -340,18 +340,18 @@ include-tagged::{sql-specs}/docs.csv-spec[minuteOfDay] .Synopsis: [source, sql] -------------------------------------------------- -MINUTE_OF_HOUR(date_exp<1>) +MINUTE_OF_HOUR(datetime_exp<1>) -------------------------------------------------- *Input*: -<1> date expression +<1> date/datetime expression *Output*: integer .Description: -Extract the minute of the hour from a date. +Extract the minute of the hour from a date/datetime. ["source","sql",subs="attributes,callouts,macros"] -------------------------------------------------- @@ -364,18 +364,18 @@ include-tagged::{sql-specs}/docs.csv-spec[minuteOfHour] .Synopsis: [source, sql] -------------------------------------------------- -MONTH(date_exp<1>) +MONTH(datetime_exp<1>) -------------------------------------------------- *Input*: -<1> date expression +<1> date/datetime expression *Output*: integer .Description: -Extract the month of the year from a date. +Extract the month of the year from a date/datetime. ["source","sql",subs="attributes,callouts,macros"] -------------------------------------------------- @@ -388,18 +388,18 @@ include-tagged::{sql-specs}/docs.csv-spec[monthOfYear] .Synopsis: [source, sql] -------------------------------------------------- -MONTH_NAME(date_exp<1>) +MONTH_NAME(datetime_exp<1>) -------------------------------------------------- *Input*: -<1> date expression +<1> date/datetime expression *Output*: string .Description: -Extract the month from a datetime in text format (`January`, `February`...). +Extract the month from a date/datetime in text format (`January`, `February`...). ["source","sql",subs="attributes,callouts,macros"] -------------------------------------------------- @@ -417,20 +417,20 @@ NOW() *Input*: _none_ -*Output*: date/time +*Output*: datetime .Description: -This function offers the same functionality as <> function: returns the date/time -when the current query reached the server. This method always returns the same value within a query. +This function offers the same functionality as <> function: returns +the datetime when the current query reached the server. This method always returns the same value within a query. ["source","sql",subs="attributes,callouts,macros"] -------------------------------------------------- include-tagged::{sql-specs}/docs.csv-spec[nowFunction] -------------------------------------------------- -Typically, this function (as well as its twin <> function is used for -relative date/time filtering: +Typically, this function (as well as its twin <> function is used +for relative date/time filtering: ["source","sql",subs="attributes,callouts,macros"] -------------------------------------------------- @@ -443,18 +443,18 @@ include-tagged::{sql-specs}/docs.csv-spec[filterNow] .Synopsis: [source, sql] -------------------------------------------------- -SECOND_OF_MINUTE(date_exp<1>) +SECOND_OF_MINUTE(datetime_exp<1>) -------------------------------------------------- *Input*: -<1> date expression +<1> date/datetime expression *Output*: integer .Description: -Extract the second of the minute from a date. +Extract the second of the minute from a date/datetime. ["source","sql",subs="attributes,callouts,macros"] -------------------------------------------------- @@ -467,18 +467,18 @@ include-tagged::{sql-specs}/docs.csv-spec[secondOfMinute] .Synopsis: [source, sql] -------------------------------------------------- -QUARTER(date_exp<1>) +QUARTER(datetime_exp<1>) -------------------------------------------------- *Input*: -<1> date expression +<1> date/datetime expression *Output*: integer .Description: -Extract the year quarter the date falls in. +Extract the year quarter the date/datetime falls in. ["source","sql",subs="attributes,callouts,macros"] -------------------------------------------------- @@ -491,18 +491,18 @@ include-tagged::{sql-specs}/docs.csv-spec[quarter] .Synopsis: [source, sql] -------------------------------------------------- -WEEK_OF_YEAR(date_exp<1>) +WEEK_OF_YEAR(datetime_exp<1>) -------------------------------------------------- *Input*: -<1> date expression +<1> date/datetime expression *Output*: integer .Description: -Extract the week of the year from a date. +Extract the week of the year from a date/datetime. ["source","sql",subs="attributes,callouts,macros"] -------------------------------------------------- @@ -515,18 +515,18 @@ include-tagged::{sql-specs}/docs.csv-spec[weekOfYear] .Synopsis: [source, sql] -------------------------------------------------- -YEAR(date_exp<1>) +YEAR(datetime_exp<1>) -------------------------------------------------- *Input*: -<1> date expression +<1> date/datetime expression *Output*: integer .Description: -Extract the year from a date. +Extract the year from a date/datetime. ["source","sql",subs="attributes,callouts,macros"] -------------------------------------------------- @@ -539,19 +539,19 @@ include-tagged::{sql-specs}/docs.csv-spec[year] .Synopsis: [source, sql] -------------------------------------------------- -EXTRACT(datetime_function<1> FROM date_exp<2>) +EXTRACT(datetime_function<1> FROM datetime_exp<2>) -------------------------------------------------- *Input*: -<1> datetime function name -<2> date expression +<1> date/time function name +<2> date/datetime expression *Output*: integer .Description: -Extract fields from a datetime by specifying the name of a <>. +Extract fields from a date/datetime by specifying the name of a <>. The following ["source","sql",subs="attributes,callouts,macros"] diff --git a/docs/reference/sql/functions/grouping.asciidoc b/docs/reference/sql/functions/grouping.asciidoc index 9784f4e9b7480..0eee0426ce65a 100644 --- a/docs/reference/sql/functions/grouping.asciidoc +++ b/docs/reference/sql/functions/grouping.asciidoc @@ -50,7 +50,7 @@ or date/time fields: ["source","sql",subs="attributes,callouts,macros"] ---- -include-tagged::{sql-specs}/docs.csv-spec[histogramDate] +include-tagged::{sql-specs}/docs.csv-spec[histogramDateTime] ---- Expressions inside the histogram are also supported as long as the @@ -74,5 +74,5 @@ Instead one can rewrite the query to move the expression on the histogram _insid ["source","sql",subs="attributes,callouts,macros"] ---- -include-tagged::{sql-specs}/docs.csv-spec[histogramDateExpression] +include-tagged::{sql-specs}/docs.csv-spec[histogramDateTimeExpression] ---- diff --git a/docs/reference/sql/functions/type-conversion.asciidoc b/docs/reference/sql/functions/type-conversion.asciidoc index dec7ccb77ae4b..b99e9cc5e9c0d 100644 --- a/docs/reference/sql/functions/type-conversion.asciidoc +++ b/docs/reference/sql/functions/type-conversion.asciidoc @@ -37,7 +37,7 @@ include-tagged::{sql-specs}/docs.csv-spec[conversionIntToStringCast] ["source","sql",subs="attributes,callouts,macros"] ---- -include-tagged::{sql-specs}/docs.csv-spec[conversionStringToDateCast] +include-tagged::{sql-specs}/docs.csv-spec[conversionStringToDateTimeCast] ---- diff --git a/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/EsType.java b/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/EsType.java index 33a6b791dfa44..097bc476bcb09 100644 --- a/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/EsType.java +++ b/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/EsType.java @@ -28,7 +28,7 @@ public enum EsType implements SQLType { OBJECT( Types.STRUCT), NESTED( Types.STRUCT), BINARY( Types.VARBINARY), - DATE( Types.TIMESTAMP), + DATETIME( Types.TIMESTAMP), IP( Types.VARCHAR), INTERVAL_YEAR( ExtraTypes.INTERVAL_YEAR), INTERVAL_MONTH( ExtraTypes.INTERVAL_MONTH), @@ -64,4 +64,4 @@ public String getVendor() { public Integer getVendorTypeNumber() { return type; } -} \ No newline at end of file +} diff --git a/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/JdbcPreparedStatement.java b/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/JdbcPreparedStatement.java index 74fb9b43ddafe..041c457d91b3d 100644 --- a/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/JdbcPreparedStatement.java +++ b/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/JdbcPreparedStatement.java @@ -367,7 +367,7 @@ private void setObject(int parameterIndex, Object x, EsType dataType, String typ || x instanceof Time || x instanceof java.util.Date) { - if (dataType == EsType.DATE) { + if (dataType == EsType.DATETIME) { // converting to {@code java.util.Date} because this is the type supported by {@code XContentBuilder} for serialization java.util.Date dateToSet; if (x instanceof Timestamp) { @@ -532,4 +532,4 @@ public boolean execute(String sql, String[] columnNames) throws SQLException { public long executeLargeUpdate() throws SQLException { throw new SQLFeatureNotSupportedException("Batching not supported"); } -} \ No newline at end of file +} diff --git a/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/JdbcResultSet.java b/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/JdbcResultSet.java index d089a99b0eeac..8c01b3112effa 100644 --- a/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/JdbcResultSet.java +++ b/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/JdbcResultSet.java @@ -245,7 +245,7 @@ private Long dateTime(int columnIndex) throws SQLException { // TODO: the B6 appendix of the jdbc spec does mention CHAR, VARCHAR, LONGVARCHAR, DATE, TIMESTAMP as supported // jdbc types that should be handled by getDate and getTime methods. From all of those we support VARCHAR and // TIMESTAMP. Should we consider the VARCHAR conversion as a later enhancement? - if (EsType.DATE == type) { + if (EsType.DATETIME == type) { // the cursor can return an Integer if the date-since-epoch is small enough, XContentParser (Jackson) will // return the "smallest" data type for numbers when parsing // TODO: this should probably be handled server side diff --git a/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/TypeConverter.java b/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/TypeConverter.java index 80f00ea3bbe29..9274e9061d453 100644 --- a/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/TypeConverter.java +++ b/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/TypeConverter.java @@ -213,7 +213,7 @@ static Object convert(Object v, EsType columnType, String typeString) throws SQL return doubleValue(v); // Double might be represented as string for infinity and NaN values case FLOAT: return floatValue(v); // Float might be represented as string for infinity and NaN values - case DATE: + case DATETIME: return JdbcDateUtils.asDateTimeField(v, JdbcDateUtils::asTimestamp, Timestamp::new); case INTERVAL_YEAR: case INTERVAL_MONTH: @@ -467,21 +467,21 @@ private static Double asDouble(Object val, EsType columnType, String typeString) } private static Date asDate(Object val, EsType columnType, String typeString) throws SQLException { - if (columnType == EsType.DATE) { + if (columnType == EsType.DATETIME) { return JdbcDateUtils.asDateTimeField(val, JdbcDateUtils::asDate, Date::new); } return failConversion(val, columnType, typeString, Date.class); } private static Time asTime(Object val, EsType columnType, String typeString) throws SQLException { - if (columnType == EsType.DATE) { + if (columnType == EsType.DATETIME) { return JdbcDateUtils.asDateTimeField(val, JdbcDateUtils::asTime, Time::new); } return failConversion(val, columnType, typeString, Time.class); } private static Timestamp asTimestamp(Object val, EsType columnType, String typeString) throws SQLException { - if (columnType == EsType.DATE) { + if (columnType == EsType.DATETIME) { return JdbcDateUtils.asDateTimeField(val, JdbcDateUtils::asTimestamp, Timestamp::new); } return failConversion(val, columnType, typeString, Timestamp.class); @@ -538,4 +538,4 @@ private static long safeToLong(double x) throws SQLException { } return Math.round(x); } -} \ No newline at end of file +} diff --git a/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/TypeUtils.java b/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/TypeUtils.java index 2a5f27f11bd64..ab8465dab90c3 100644 --- a/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/TypeUtils.java +++ b/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/TypeUtils.java @@ -37,7 +37,7 @@ private TypeUtils() {} private static final Set SIGNED_TYPE = EnumSet.of(EsType.BYTE, EsType.SHORT, EsType.INTEGER, EsType.LONG, - EsType.FLOAT, EsType.HALF_FLOAT, EsType.SCALED_FLOAT, EsType.DOUBLE, EsType.DATE); + EsType.FLOAT, EsType.HALF_FLOAT, EsType.SCALED_FLOAT, EsType.DOUBLE, EsType.DATETIME); static { @@ -52,16 +52,16 @@ private TypeUtils() {} aMap.put(String.class, EsType.KEYWORD); aMap.put(byte[].class, EsType.BINARY); aMap.put(String.class, EsType.KEYWORD); - aMap.put(Timestamp.class, EsType.DATE); + aMap.put(Timestamp.class, EsType.DATETIME); // apart from the mappings in {@code DataType} three more Java classes can be mapped to a {@code JDBCType.TIMESTAMP} // according to B-4 table from the jdbc4.2 spec - aMap.put(Calendar.class, EsType.DATE); - aMap.put(GregorianCalendar.class, EsType.DATE); - aMap.put(java.util.Date.class, EsType.DATE); - aMap.put(java.sql.Date.class, EsType.DATE); - aMap.put(java.sql.Time.class, EsType.DATE); - aMap.put(LocalDateTime.class, EsType.DATE); + aMap.put(Calendar.class, EsType.DATETIME); + aMap.put(GregorianCalendar.class, EsType.DATETIME); + aMap.put(java.util.Date.class, EsType.DATETIME); + aMap.put(java.sql.Date.class, EsType.DATETIME); + aMap.put(java.sql.Time.class, EsType.DATETIME); + aMap.put(LocalDateTime.class, EsType.DATETIME); CLASS_TO_TYPE = Collections.unmodifiableMap(aMap); Map> types = new LinkedHashMap<>(); @@ -77,7 +77,7 @@ private TypeUtils() {} types.put(EsType.KEYWORD, String.class); types.put(EsType.TEXT, String.class); types.put(EsType.BINARY, byte[].class); - types.put(EsType.DATE, Timestamp.class); + types.put(EsType.DATETIME, Timestamp.class); types.put(EsType.IP, String.class); types.put(EsType.INTERVAL_YEAR, Period.class); types.put(EsType.INTERVAL_MONTH, Period.class); @@ -172,4 +172,4 @@ static EsType of(Class clazz) throws SQLException { } return dataType; } -} \ No newline at end of file +} diff --git a/x-pack/plugin/sql/jdbc/src/test/java/org/elasticsearch/xpack/sql/jdbc/JdbcPreparedStatementTests.java b/x-pack/plugin/sql/jdbc/src/test/java/org/elasticsearch/xpack/sql/jdbc/JdbcPreparedStatementTests.java index 50143f729370f..9134378a370c8 100644 --- a/x-pack/plugin/sql/jdbc/src/test/java/org/elasticsearch/xpack/sql/jdbc/JdbcPreparedStatementTests.java +++ b/x-pack/plugin/sql/jdbc/src/test/java/org/elasticsearch/xpack/sql/jdbc/JdbcPreparedStatementTests.java @@ -29,7 +29,7 @@ import static org.elasticsearch.xpack.sql.jdbc.EsType.BINARY; import static org.elasticsearch.xpack.sql.jdbc.EsType.BOOLEAN; import static org.elasticsearch.xpack.sql.jdbc.EsType.BYTE; -import static org.elasticsearch.xpack.sql.jdbc.EsType.DATE; +import static org.elasticsearch.xpack.sql.jdbc.EsType.DATETIME; import static org.elasticsearch.xpack.sql.jdbc.EsType.DOUBLE; import static org.elasticsearch.xpack.sql.jdbc.EsType.FLOAT; import static org.elasticsearch.xpack.sql.jdbc.EsType.HALF_FLOAT; @@ -371,13 +371,13 @@ public void testSettingTimestampValues() throws SQLException { Timestamp someTimestamp = new Timestamp(randomLong()); jps.setTimestamp(1, someTimestamp); assertEquals(someTimestamp.getTime(), ((Date)value(jps)).getTime()); - assertEquals(DATE, jdbcType(jps)); + assertEquals(DATETIME, jdbcType(jps)); Calendar nonDefaultCal = randomCalendar(); // February 29th, 2016. 01:17:55 GMT = 1456708675000 millis since epoch jps.setTimestamp(1, new Timestamp(1456708675000L), nonDefaultCal); assertEquals(1456708675000L, convertFromUTCtoCalendar(((Date)value(jps)), nonDefaultCal)); - assertEquals(DATE, jdbcType(jps)); + assertEquals(DATETIME, jdbcType(jps)); long beforeEpochTime = randomLongBetween(Long.MIN_VALUE, 0); jps.setTimestamp(1, new Timestamp(beforeEpochTime), nonDefaultCal); @@ -404,7 +404,7 @@ public void testSettingTimeValues() throws SQLException { Calendar nonDefaultCal = randomCalendar(); jps.setTime(1, time, nonDefaultCal); assertEquals(4675000, convertFromUTCtoCalendar(((Date)value(jps)), nonDefaultCal)); - assertEquals(DATE, jdbcType(jps)); + assertEquals(DATETIME, jdbcType(jps)); assertTrue(value(jps) instanceof java.util.Date); jps.setObject(1, time, Types.VARCHAR); @@ -426,13 +426,13 @@ public void testSettingSqlDateValues() throws SQLException { java.sql.Date someSqlDate = new java.sql.Date(randomLong()); jps.setDate(1, someSqlDate); assertEquals(someSqlDate.getTime(), ((Date)value(jps)).getTime()); - assertEquals(DATE, jdbcType(jps)); + assertEquals(DATETIME, jdbcType(jps)); someSqlDate = new java.sql.Date(randomLong()); Calendar nonDefaultCal = randomCalendar(); jps.setDate(1, someSqlDate, nonDefaultCal); assertEquals(someSqlDate.getTime(), convertFromUTCtoCalendar(((Date)value(jps)), nonDefaultCal)); - assertEquals(DATE, jdbcType(jps)); + assertEquals(DATETIME, jdbcType(jps)); assertTrue(value(jps) instanceof java.util.Date); jps.setObject(1, someSqlDate, Types.VARCHAR); @@ -456,7 +456,7 @@ public void testSettingCalendarValues() throws SQLException { jps.setObject(1, someCalendar); assertEquals(someCalendar.getTime(), value(jps)); - assertEquals(DATE, jdbcType(jps)); + assertEquals(DATETIME, jdbcType(jps)); assertTrue(value(jps) instanceof java.util.Date); jps.setObject(1, someCalendar, Types.VARCHAR); @@ -466,7 +466,7 @@ public void testSettingCalendarValues() throws SQLException { Calendar nonDefaultCal = randomCalendar(); jps.setObject(1, nonDefaultCal); assertEquals(nonDefaultCal.getTime(), value(jps)); - assertEquals(DATE, jdbcType(jps)); + assertEquals(DATETIME, jdbcType(jps)); } public void testThrownExceptionsWhenSettingCalendarValues() throws SQLException { @@ -483,7 +483,7 @@ public void testSettingDateValues() throws SQLException { jps.setObject(1, someDate); assertEquals(someDate, value(jps)); - assertEquals(DATE, jdbcType(jps)); + assertEquals(DATETIME, jdbcType(jps)); assertTrue(value(jps) instanceof java.util.Date); jps.setObject(1, someDate, Types.VARCHAR); @@ -505,7 +505,7 @@ public void testSettingLocalDateTimeValues() throws SQLException { jps.setObject(1, ldt); assertEquals(Date.class, value(jps).getClass()); - assertEquals(DATE, jdbcType(jps)); + assertEquals(DATETIME, jdbcType(jps)); assertTrue(value(jps) instanceof java.util.Date); jps.setObject(1, ldt, Types.VARCHAR); diff --git a/x-pack/plugin/sql/jdbc/src/test/java/org/elasticsearch/xpack/sql/jdbc/TypeConverterTests.java b/x-pack/plugin/sql/jdbc/src/test/java/org/elasticsearch/xpack/sql/jdbc/TypeConverterTests.java index 39b9393dacbe8..2e33f4e130741 100644 --- a/x-pack/plugin/sql/jdbc/src/test/java/org/elasticsearch/xpack/sql/jdbc/TypeConverterTests.java +++ b/x-pack/plugin/sql/jdbc/src/test/java/org/elasticsearch/xpack/sql/jdbc/TypeConverterTests.java @@ -41,8 +41,8 @@ public void testDoubleAsNative() throws Exception { public void testTimestampAsNative() throws Exception { DateTime now = DateTime.now(); - assertThat(convertAsNative(now, EsType.DATE), instanceOf(Timestamp.class)); - assertEquals(now.getMillis(), ((Timestamp) convertAsNative(now, EsType.DATE)).getTime()); + assertThat(convertAsNative(now, EsType.DATETIME), instanceOf(Timestamp.class)); + assertEquals(now.getMillis(), ((Timestamp) convertAsNative(now, EsType.DATETIME)).getTime()); } private Object convertAsNative(Object value, EsType type) throws Exception { diff --git a/x-pack/plugin/sql/qa/src/main/java/org/elasticsearch/xpack/sql/qa/SqlProtocolTestCase.java b/x-pack/plugin/sql/qa/src/main/java/org/elasticsearch/xpack/sql/qa/SqlProtocolTestCase.java index 51de82f97413b..868c9584a0057 100644 --- a/x-pack/plugin/sql/qa/src/main/java/org/elasticsearch/xpack/sql/qa/SqlProtocolTestCase.java +++ b/x-pack/plugin/sql/qa/src/main/java/org/elasticsearch/xpack/sql/qa/SqlProtocolTestCase.java @@ -64,11 +64,12 @@ public void testTextualType() throws IOException { } public void testDateTimes() throws IOException { - assertQuery("SELECT CAST('2019-01-14T12:29:25.000Z' AS DATE)", "CAST('2019-01-14T12:29:25.000Z' AS DATE)", "date", - "2019-01-14T12:29:25.000Z", 24); - assertQuery("SELECT CAST(-26853765751000 AS DATE)", "CAST(-26853765751000 AS DATE)", "date", "1119-01-15T12:37:29.000Z", 24); - assertQuery("SELECT CAST(CAST('-26853765751000' AS BIGINT) AS DATE)", "CAST(CAST('-26853765751000' AS BIGINT) AS DATE)", "date", - "1119-01-15T12:37:29.000Z", 24); + assertQuery("SELECT CAST('2019-01-14T12:29:25.000Z' AS DATETIME)", "CAST('2019-01-14T12:29:25.000Z' AS DATETIME)", + "datetime", "2019-01-14T12:29:25.000Z", 24); + assertQuery("SELECT CAST(-26853765751000 AS DATETIME)", "CAST(-26853765751000 AS DATETIME)", + "datetime", "1119-01-15T12:37:29.000Z", 24); + assertQuery("SELECT CAST(CAST('-26853765751000' AS BIGINT) AS DATETIME)", "CAST(CAST('-26853765751000' AS BIGINT) AS DATETIME)", + "datetime", "1119-01-15T12:37:29.000Z", 24); } public void testIPs() throws IOException { diff --git a/x-pack/plugin/sql/qa/src/main/java/org/elasticsearch/xpack/sql/qa/jdbc/ResultSetTestCase.java b/x-pack/plugin/sql/qa/src/main/java/org/elasticsearch/xpack/sql/qa/jdbc/ResultSetTestCase.java index 3d5308d148d57..2550026c153a5 100644 --- a/x-pack/plugin/sql/qa/src/main/java/org/elasticsearch/xpack/sql/qa/jdbc/ResultSetTestCase.java +++ b/x-pack/plugin/sql/qa/src/main/java/org/elasticsearch/xpack/sql/qa/jdbc/ResultSetTestCase.java @@ -201,10 +201,10 @@ public void testGettingInvalidByte() throws Exception { sqle.getMessage()); sqle = expectThrows(SQLException.class, () -> results.getByte("test_date")); - assertEquals(format(Locale.ROOT, "Unable to convert value [%.128s] of type [DATE] to [Byte]", of(randomDate)), + assertEquals(format(Locale.ROOT, "Unable to convert value [%.128s] of type [DATETIME] to [Byte]", of(randomDate)), sqle.getMessage()); sqle = expectThrows(SQLException.class, () -> results.getObject("test_date", Byte.class)); - assertEquals(format(Locale.ROOT, "Unable to convert value [%.128s] of type [DATE] to [Byte]", of(randomDate)), + assertEquals(format(Locale.ROOT, "Unable to convert value [%.128s] of type [DATETIME] to [Byte]", of(randomDate)), sqle.getMessage()); }); } @@ -324,10 +324,10 @@ public void testGettingInvalidShort() throws Exception { sqle.getMessage()); sqle = expectThrows(SQLException.class, () -> results.getShort("test_date")); - assertEquals(format(Locale.ROOT, "Unable to convert value [%.128s] of type [DATE] to [Short]", of(randomDate)), + assertEquals(format(Locale.ROOT, "Unable to convert value [%.128s] of type [DATETIME] to [Short]", of(randomDate)), sqle.getMessage()); sqle = expectThrows(SQLException.class, () -> results.getObject("test_date", Short.class)); - assertEquals(format(Locale.ROOT, "Unable to convert value [%.128s] of type [DATE] to [Short]", of(randomDate)), + assertEquals(format(Locale.ROOT, "Unable to convert value [%.128s] of type [DATETIME] to [Short]", of(randomDate)), sqle.getMessage()); }); } @@ -439,10 +439,10 @@ public void testGettingInvalidInteger() throws Exception { sqle.getMessage()); sqle = expectThrows(SQLException.class, () -> results.getInt("test_date")); - assertEquals(format(Locale.ROOT, "Unable to convert value [%.128s] of type [DATE] to [Integer]", of(randomDate)), + assertEquals(format(Locale.ROOT, "Unable to convert value [%.128s] of type [DATETIME] to [Integer]", of(randomDate)), sqle.getMessage()); sqle = expectThrows(SQLException.class, () -> results.getObject("test_date", Integer.class)); - assertEquals(format(Locale.ROOT, "Unable to convert value [%.128s] of type [DATE] to [Integer]", of(randomDate)), + assertEquals(format(Locale.ROOT, "Unable to convert value [%.128s] of type [DATETIME] to [Integer]", of(randomDate)), sqle.getMessage()); }); } @@ -541,10 +541,10 @@ public void testGettingInvalidLong() throws Exception { sqle.getMessage()); sqle = expectThrows(SQLException.class, () -> results.getLong("test_date")); - assertEquals(format(Locale.ROOT, "Unable to convert value [%.128s] of type [DATE] to [Long]", of(randomDate)), + assertEquals(format(Locale.ROOT, "Unable to convert value [%.128s] of type [DATETIME] to [Long]", of(randomDate)), sqle.getMessage()); sqle = expectThrows(SQLException.class, () -> results.getObject("test_date", Long.class)); - assertEquals(format(Locale.ROOT, "Unable to convert value [%.128s] of type [DATE] to [Long]", of(randomDate)), + assertEquals(format(Locale.ROOT, "Unable to convert value [%.128s] of type [DATETIME] to [Long]", of(randomDate)), sqle.getMessage()); }); } @@ -624,10 +624,10 @@ public void testGettingInvalidDouble() throws Exception { sqle.getMessage()); sqle = expectThrows(SQLException.class, () -> results.getDouble("test_date")); - assertEquals(format(Locale.ROOT, "Unable to convert value [%.128s] of type [DATE] to [Double]", of(randomDate)), + assertEquals(format(Locale.ROOT, "Unable to convert value [%.128s] of type [DATETIME] to [Double]", of(randomDate)), sqle.getMessage()); sqle = expectThrows(SQLException.class, () -> results.getObject("test_date", Double.class)); - assertEquals(format(Locale.ROOT, "Unable to convert value [%.128s] of type [DATE] to [Double]", of(randomDate)), + assertEquals(format(Locale.ROOT, "Unable to convert value [%.128s] of type [DATETIME] to [Double]", of(randomDate)), sqle.getMessage()); }); } @@ -707,10 +707,10 @@ public void testGettingInvalidFloat() throws Exception { sqle.getMessage()); sqle = expectThrows(SQLException.class, () -> results.getFloat("test_date")); - assertEquals(format(Locale.ROOT, "Unable to convert value [%.128s] of type [DATE] to [Float]", of(randomDate)), + assertEquals(format(Locale.ROOT, "Unable to convert value [%.128s] of type [DATETIME] to [Float]", of(randomDate)), sqle.getMessage()); sqle = expectThrows(SQLException.class, () -> results.getObject("test_date", Float.class)); - assertEquals(format(Locale.ROOT, "Unable to convert value [%.128s] of type [DATE] to [Float]", of(randomDate)), + assertEquals(format(Locale.ROOT, "Unable to convert value [%.128s] of type [DATETIME] to [Float]", of(randomDate)), sqle.getMessage()); }); } @@ -768,7 +768,7 @@ public void testGettingBooleanValues() throws Exception { assertEquals("Expected: but was: for field " + fld, true, results.getObject(fld, Boolean.class)); } SQLException sqle = expectThrows(SQLException.class, () -> results.getBoolean("test_date")); - assertEquals(format(Locale.ROOT, "Unable to convert value [%.128s] of type [DATE] to [Boolean]", of(randomDate1)), + assertEquals(format(Locale.ROOT, "Unable to convert value [%.128s] of type [DATETIME] to [Boolean]", of(randomDate1)), sqle.getMessage()); results.next(); @@ -778,11 +778,11 @@ public void testGettingBooleanValues() throws Exception { assertEquals("Expected: but was: for field " + fld, false, results.getObject(fld, Boolean.class)); } sqle = expectThrows(SQLException.class, () -> results.getBoolean("test_date")); - assertEquals(format(Locale.ROOT, "Unable to convert value [%.128s] of type [DATE] to [Boolean]", of(randomDate2)), + assertEquals(format(Locale.ROOT, "Unable to convert value [%.128s] of type [DATETIME] to [Boolean]", of(randomDate2)), sqle.getMessage()); sqle = expectThrows(SQLException.class, () -> results.getObject("test_date", Boolean.class)); - assertEquals(format(Locale.ROOT, "Unable to convert value [%.128s] of type [DATE] to [Boolean]", of(randomDate2)), + assertEquals(format(Locale.ROOT, "Unable to convert value [%.128s] of type [DATETIME] to [Boolean]", of(randomDate2)), sqle.getMessage()); results.next(); diff --git a/x-pack/plugin/sql/qa/src/main/resources/agg.csv-spec b/x-pack/plugin/sql/qa/src/main/resources/agg.csv-spec index 668316372c4bb..7051353d78dda 100644 --- a/x-pack/plugin/sql/qa/src/main/resources/agg.csv-spec +++ b/x-pack/plugin/sql/qa/src/main/resources/agg.csv-spec @@ -223,7 +223,7 @@ SELECT HISTOGRAM(salary, 5000) AS h FROM test_emp GROUP BY h; 70000 ; -histogramDate +histogramDateTime schema::h:ts|c:l SELECT HISTOGRAM(birth_date, INTERVAL 1 YEAR) AS h, COUNT(*) as c FROM test_emp GROUP BY h; @@ -247,7 +247,7 @@ null |10 ; -histogramDateWithCountAndOrder +histogramDateTimeWithCountAndOrder schema::h:ts|c:l SELECT HISTOGRAM(birth_date, INTERVAL 1 YEAR) AS h, COUNT(*) as c FROM test_emp GROUP BY h ORDER BY h DESC; @@ -270,7 +270,7 @@ SELECT HISTOGRAM(birth_date, INTERVAL 1 YEAR) AS h, COUNT(*) as c FROM test_emp null |10 ; -histogramDateWithMonthOnTop +histogramDateTimeWithMonthOnTop schema::h:i|c:l SELECT HISTOGRAM(MONTH(birth_date), 2) AS h, COUNT(*) as c FROM test_emp GROUP BY h ORDER BY h DESC; @@ -286,7 +286,7 @@ SELECT HISTOGRAM(MONTH(birth_date), 2) AS h, COUNT(*) as c FROM test_emp GROUP B null |10 ; -histogramDateWithYearOnTop +histogramDateTimeWithYearOnTop schema::h:i|c:l SELECT HISTOGRAM(YEAR(birth_date), 2) AS h, COUNT(*) as c FROM test_emp GROUP BY h ORDER BY h DESC; h | c diff --git a/x-pack/plugin/sql/qa/src/main/resources/agg.sql-spec b/x-pack/plugin/sql/qa/src/main/resources/agg.sql-spec index 21dd7bf530e3d..7406ea488308d 100644 --- a/x-pack/plugin/sql/qa/src/main/resources/agg.sql-spec +++ b/x-pack/plugin/sql/qa/src/main/resources/agg.sql-spec @@ -18,13 +18,13 @@ SELECT gender g FROM "test_emp" WHERE emp_no < 10020 GROUP BY g ORDER BY gender; groupByOnTextOnAliasOrderDesc SELECT gender g FROM "test_emp" WHERE emp_no < 10020 GROUP BY g ORDER BY g DESC; -groupByOnDate +groupByOnDateTime SELECT birth_date b FROM "test_emp" GROUP BY birth_date ORDER BY birth_date DESC; -groupByOnDateWithWhereClause +groupByOnDateTimeWithWhereClause SELECT birth_date b FROM "test_emp" WHERE emp_no < 10020 GROUP BY birth_date ORDER BY birth_date DESC; -groupByOnDateWithWhereAndLimit +groupByOnDateTimeWithWhereAndLimit SELECT birth_date b FROM "test_emp" WHERE emp_no < 10020 GROUP BY birth_date ORDER BY birth_date DESC LIMIT 1; -groupByOnDateOnAlias +groupByOnDateTimeOnAlias SELECT birth_date b FROM "test_emp" WHERE emp_no < 10020 GROUP BY b ORDER BY birth_date DESC; groupByOnNumber @@ -62,13 +62,13 @@ SELECT gender g, languages l FROM "test_emp" WHERE emp_no < 10020 GROUP BY g, l groupByMultiOnTextOnAliasOrderDesc SELECT gender g, languages l FROM "test_emp" WHERE emp_no < 10020 GROUP BY g, l ORDER BY g, l ASC; -groupByMultiOnDate +groupByMultiOnDateTime SELECT birth_date b, languages l FROM "test_emp" GROUP BY birth_date, languages ORDER BY birth_date DESC, languages; -groupByMultiOnDateWithWhereClause +groupByMultiOnDateTimeWithWhereClause SELECT birth_date b, languages l FROM "test_emp" WHERE emp_no < 10020 GROUP BY birth_date, languages ORDER BY birth_date DESC, languages; -groupByMultiOnDateWithWhereAndLimit +groupByMultiOnDateTimeWithWhereAndLimit SELECT birth_date b, languages l FROM "test_emp" WHERE emp_no < 10020 GROUP BY birth_date, languages ORDER BY birth_date DESC, languages LIMIT 1; -groupByMultiOnDateOnAlias +groupByMultiOnDateTimeOnAlias SELECT birth_date b, languages l FROM "test_emp" WHERE emp_no < 10020 GROUP BY b, l ORDER BY birth_date DESC, languages; groupByMultiAddScalar @@ -248,7 +248,7 @@ aggMinWithCastAndFilter SELECT gender g, CAST(MIN(emp_no) AS SMALLINT) m, COUNT(1) c FROM "test_emp" WHERE emp_no < 10020 GROUP BY gender ORDER BY gender; aggMinWithAlias SELECT gender g, MIN(emp_no) m FROM "test_emp" GROUP BY g ORDER BY gender; -aggMinOnDate +aggMinOnDateTime SELECT gender, MIN(birth_date) m FROM "test_emp" GROUP BY gender ORDER BY gender; // Conditional MIN @@ -304,7 +304,7 @@ aggMaxAndCountWithFilterAndLimit SELECT gender g, MAX(emp_no) m, COUNT(1) c FROM "test_emp" WHERE emp_no > 10000 GROUP BY gender ORDER BY gender LIMIT 1; aggMaxWithAlias SELECT gender g, MAX(emp_no) m FROM "test_emp" GROUP BY g ORDER BY gender; -aggMaxOnDate +aggMaxOnDateTime SELECT gender, MAX(birth_date) m FROM "test_emp" GROUP BY gender ORDER BY gender; aggAvgAndMaxWithLikeFilter SELECT CAST(AVG(salary) AS LONG) AS avg, CAST(SUM(salary) AS LONG) AS s FROM "test_emp" WHERE first_name LIKE 'G%'; @@ -482,9 +482,9 @@ selectCountWhereIsNull SELECT COUNT(*) count FROM test_emp WHERE first_name IS NULL; selectLanguagesCountWithNullsAndGroupByLanguage SELECT languages l, COUNT(*) c FROM test_emp GROUP BY languages ORDER BY languages; -selectHireDateGroupByHireDate +selectHireDateTimeGroupByHireDateTime SELECT hire_date HD, COUNT(*) c FROM test_emp GROUP BY hire_date ORDER BY hire_date DESC; -selectHireDateGroupByHireDate +selectHireDateTimeGroupByHireDateTime SELECT hire_date HD, COUNT(*) c FROM test_emp GROUP BY hire_date ORDER BY hire_date DESC; selectSalaryGroupBySalary SELECT salary, COUNT(*) c FROM test_emp GROUP BY salary ORDER BY salary DESC; diff --git a/x-pack/plugin/sql/qa/src/main/resources/alias.csv-spec b/x-pack/plugin/sql/qa/src/main/resources/alias.csv-spec index 7ad8ef342669d..4134db187c9a6 100644 --- a/x-pack/plugin/sql/qa/src/main/resources/alias.csv-spec +++ b/x-pack/plugin/sql/qa/src/main/resources/alias.csv-spec @@ -28,13 +28,13 @@ DESCRIBE test_alias; column | type | mapping --------------------+---------------+--------------- -birth_date |TIMESTAMP |date +birth_date |TIMESTAMP |datetime dep |STRUCT |nested dep.dep_id |VARCHAR |keyword dep.dep_name |VARCHAR |text dep.dep_name.keyword|VARCHAR |keyword -dep.from_date |TIMESTAMP |date -dep.to_date |TIMESTAMP |date +dep.from_date |TIMESTAMP |datetime +dep.to_date |TIMESTAMP |datetime emp_no |INTEGER |integer extra |STRUCT |object extra.info |STRUCT |object @@ -44,7 +44,7 @@ extra_no |INTEGER |integer first_name |VARCHAR |text first_name.keyword |VARCHAR |keyword gender |VARCHAR |keyword -hire_date |TIMESTAMP |date +hire_date |TIMESTAMP |datetime languages |TINYINT |byte last_name |VARCHAR |text last_name.keyword |VARCHAR |keyword @@ -56,13 +56,13 @@ DESCRIBE "test_*"; column | type | mapping --------------------+---------------+--------------- -birth_date |TIMESTAMP |date +birth_date |TIMESTAMP |datetime dep |STRUCT |nested dep.dep_id |VARCHAR |keyword dep.dep_name |VARCHAR |text dep.dep_name.keyword|VARCHAR |keyword -dep.from_date |TIMESTAMP |date -dep.to_date |TIMESTAMP |date +dep.from_date |TIMESTAMP |datetime +dep.to_date |TIMESTAMP |datetime emp_no |INTEGER |integer extra |STRUCT |object extra.info |STRUCT |object @@ -72,7 +72,7 @@ extra_no |INTEGER |integer first_name |VARCHAR |text first_name.keyword |VARCHAR |keyword gender |VARCHAR |keyword -hire_date |TIMESTAMP |date +hire_date |TIMESTAMP |datetime languages |TINYINT |byte last_name |VARCHAR |text last_name.keyword |VARCHAR |keyword diff --git a/x-pack/plugin/sql/qa/src/main/resources/command.csv-spec b/x-pack/plugin/sql/qa/src/main/resources/command.csv-spec index 0128873997bd3..e23ee39c54610 100644 --- a/x-pack/plugin/sql/qa/src/main/resources/command.csv-spec +++ b/x-pack/plugin/sql/qa/src/main/resources/command.csv-spec @@ -228,13 +228,13 @@ DESCRIBE LIKE 'test_emp'; column | type | mapping --------------------+---------------+--------------- -birth_date |TIMESTAMP |date +birth_date |TIMESTAMP |datetime dep |STRUCT |nested dep.dep_id |VARCHAR |keyword dep.dep_name |VARCHAR |text dep.dep_name.keyword|VARCHAR |keyword -dep.from_date |TIMESTAMP |date -dep.to_date |TIMESTAMP |date +dep.from_date |TIMESTAMP |datetime +dep.to_date |TIMESTAMP |datetime emp_no |INTEGER |integer extra |STRUCT |object extra.info |STRUCT |object @@ -244,7 +244,7 @@ extra_no |INTEGER |integer first_name |VARCHAR |text first_name.keyword |VARCHAR |keyword gender |VARCHAR |keyword -hire_date |TIMESTAMP |date +hire_date |TIMESTAMP |datetime languages |TINYINT |byte last_name |VARCHAR |text last_name.keyword |VARCHAR |keyword @@ -256,13 +256,13 @@ DESCRIBE LIKE 'test_emp%'; column | type | mapping --------------------+---------------+--------------- -birth_date |TIMESTAMP |date +birth_date |TIMESTAMP |datetime dep |STRUCT |nested dep.dep_id |VARCHAR |keyword dep.dep_name |VARCHAR |text dep.dep_name.keyword|VARCHAR |keyword -dep.from_date |TIMESTAMP |date -dep.to_date |TIMESTAMP |date +dep.from_date |TIMESTAMP |datetime +dep.to_date |TIMESTAMP |datetime emp_no |INTEGER |integer extra |STRUCT |object extra.info |STRUCT |object @@ -272,7 +272,7 @@ extra_no |INTEGER |integer first_name |VARCHAR |text first_name.keyword |VARCHAR |keyword gender |VARCHAR |keyword -hire_date |TIMESTAMP |date +hire_date |TIMESTAMP |datetime languages |TINYINT |byte last_name |VARCHAR |text last_name.keyword |VARCHAR |keyword @@ -284,18 +284,18 @@ DESCRIBE "test_emp"; column | type | mapping --------------------+---------------+--------------- -birth_date |TIMESTAMP |date +birth_date |TIMESTAMP |datetime dep |STRUCT |nested dep.dep_id |VARCHAR |keyword dep.dep_name |VARCHAR |text dep.dep_name.keyword|VARCHAR |keyword -dep.from_date |TIMESTAMP |date -dep.to_date |TIMESTAMP |date +dep.from_date |TIMESTAMP |datetime +dep.to_date |TIMESTAMP |datetime emp_no |INTEGER |integer first_name |VARCHAR |text first_name.keyword |VARCHAR |keyword gender |VARCHAR |keyword -hire_date |TIMESTAMP |date +hire_date |TIMESTAMP |datetime languages |TINYINT |byte last_name |VARCHAR |text last_name.keyword |VARCHAR |keyword @@ -310,18 +310,18 @@ DESCRIBE "test_*,-test_alias*"; column | type | mapping --------------------+---------------+--------------- -birth_date |TIMESTAMP |date +birth_date |TIMESTAMP |datetime dep |STRUCT |nested dep.dep_id |VARCHAR |keyword dep.dep_name |VARCHAR |text dep.dep_name.keyword|VARCHAR |keyword -dep.from_date |TIMESTAMP |date -dep.to_date |TIMESTAMP |date +dep.from_date |TIMESTAMP |datetime +dep.to_date |TIMESTAMP |datetime emp_no |INTEGER |integer first_name |VARCHAR |text first_name.keyword |VARCHAR |keyword gender |VARCHAR |keyword -hire_date |TIMESTAMP |date +hire_date |TIMESTAMP |datetime languages |TINYINT |byte last_name |VARCHAR |text last_name.keyword |VARCHAR |keyword diff --git a/x-pack/plugin/sql/qa/src/main/resources/datetime-interval.csv-spec b/x-pack/plugin/sql/qa/src/main/resources/datetime-interval.csv-spec index 9434ead51da9b..8d9a65d1b85b6 100644 --- a/x-pack/plugin/sql/qa/src/main/resources/datetime-interval.csv-spec +++ b/x-pack/plugin/sql/qa/src/main/resources/datetime-interval.csv-spec @@ -141,7 +141,7 @@ INTERVAL 1 DAY + INTERVAL 53 MINUTES ; datePlusIntervalInline -SELECT CAST('1969-05-13T12:34:56' AS DATE) + INTERVAL 49 YEARS AS result; +SELECT CAST('1969-05-13T12:34:56' AS DATETIME) + INTERVAL 49 YEARS AS result; result -------------------- @@ -183,7 +183,7 @@ SELECT -2 * INTERVAL '1 23:45' DAY TO MINUTES AS result; ; dateMinusInterval -SELECT CAST('2018-05-13T12:34:56' AS DATE) - INTERVAL '2-8' YEAR TO MONTH AS result; +SELECT CAST('2018-05-13T12:34:56' AS DATETIME) - INTERVAL '2-8' YEAR TO MONTH AS result; result -------------------- @@ -288,4 +288,4 @@ SELECT birth_date, MAX(hire_date) - INTERVAL 1 YEAR AS f FROM test_emp GROUP BY 1952-05-15T00:00:00Z|1953 1952-06-13T00:00:00Z|1953 1952-07-08T00:00:00Z|1953 -; \ No newline at end of file +; diff --git a/x-pack/plugin/sql/qa/src/main/resources/datetime.csv-spec b/x-pack/plugin/sql/qa/src/main/resources/datetime.csv-spec index 5e51ae69bf396..39681e7118fc1 100644 --- a/x-pack/plugin/sql/qa/src/main/resources/datetime.csv-spec +++ b/x-pack/plugin/sql/qa/src/main/resources/datetime.csv-spec @@ -54,7 +54,7 @@ d:i | l:s ; // -// Date +// DateTime // dateTimeIsoDayOfWeek SELECT ISO_DAY_OF_WEEK(birth_date) d, last_name l FROM "test_emp" WHERE emp_no < 10010 ORDER BY ISO_DAY_OF_WEEK(birth_date); @@ -380,4 +380,4 @@ Berni Bezalel Bojan -; \ No newline at end of file +; diff --git a/x-pack/plugin/sql/qa/src/main/resources/datetime.sql-spec b/x-pack/plugin/sql/qa/src/main/resources/datetime.sql-spec index 4b12d2de58fc7..3748a116b7450 100644 --- a/x-pack/plugin/sql/qa/src/main/resources/datetime.sql-spec +++ b/x-pack/plugin/sql/qa/src/main/resources/datetime.sql-spec @@ -10,7 +10,7 @@ // This has implications on the results, which could change given specific locales where the rules for determining the start of a year are different. // -// Date +// DateTime // dateTimeDay @@ -25,10 +25,10 @@ SELECT MONTH(birth_date) d, last_name l FROM "test_emp" WHERE emp_no < 10010 ORD dateTimeYear SELECT YEAR(birth_date) d, last_name l FROM "test_emp" WHERE emp_no < 10010 ORDER BY emp_no; -monthNameFromStringDate +monthNameFromStringDateTime SELECT MONTHNAME(CAST('2018-09-03' AS TIMESTAMP)) month FROM "test_emp" limit 1; -dayNameFromStringDate +dayNameFromStringDateTime SELECT DAYNAME(CAST('2018-09-03' AS TIMESTAMP)) day FROM "test_emp" limit 1; quarterSelect diff --git a/x-pack/plugin/sql/qa/src/main/resources/docs.csv-spec b/x-pack/plugin/sql/qa/src/main/resources/docs.csv-spec index 2ffbdc302af47..5c4f016d16459 100644 --- a/x-pack/plugin/sql/qa/src/main/resources/docs.csv-spec +++ b/x-pack/plugin/sql/qa/src/main/resources/docs.csv-spec @@ -14,18 +14,18 @@ DESCRIBE emp; column | type | mapping --------------------+---------------+--------------- -birth_date |TIMESTAMP |date +birth_date |TIMESTAMP |datetime dep |STRUCT |nested dep.dep_id |VARCHAR |keyword dep.dep_name |VARCHAR |text dep.dep_name.keyword|VARCHAR |keyword -dep.from_date |TIMESTAMP |date -dep.to_date |TIMESTAMP |date +dep.from_date |TIMESTAMP |datetime +dep.to_date |TIMESTAMP |datetime emp_no |INTEGER |integer first_name |VARCHAR |text first_name.keyword |VARCHAR |keyword gender |VARCHAR |keyword -hire_date |TIMESTAMP |date +hire_date |TIMESTAMP |datetime languages |TINYINT |byte last_name |VARCHAR |text last_name.keyword |VARCHAR |keyword @@ -53,18 +53,18 @@ SHOW COLUMNS IN emp; column | type | mapping --------------------+---------------+--------------- -birth_date |TIMESTAMP |date +birth_date |TIMESTAMP |datetime dep |STRUCT |nested dep.dep_id |VARCHAR |keyword dep.dep_name |VARCHAR |text dep.dep_name.keyword|VARCHAR |keyword -dep.from_date |TIMESTAMP |date -dep.to_date |TIMESTAMP |date +dep.from_date |TIMESTAMP |datetime +dep.to_date |TIMESTAMP |datetime emp_no |INTEGER |integer first_name |VARCHAR |text first_name.keyword |VARCHAR |keyword gender |VARCHAR |keyword -hire_date |TIMESTAMP |date +hire_date |TIMESTAMP |datetime languages |TINYINT |byte last_name |VARCHAR |text last_name.keyword |VARCHAR |keyword @@ -746,9 +746,9 @@ SELECT HISTOGRAM(salary % 100, 10) AS h, COUNT(*) AS c FROM emp GROUP BY h; // end::histogramNumericExpression ; -histogramDate +histogramDateTime schema::h:ts|c:l -// tag::histogramDate +// tag::histogramDateTime SELECT HISTOGRAM(birth_date, INTERVAL 1 YEAR) AS h, COUNT(*) AS c FROM emp GROUP BY h; @@ -770,7 +770,7 @@ null |10 1963-02-07T00:00:00Z|7 1964-02-02T00:00:00Z|5 -// end::histogramDate +// end::histogramDateTime ; expressionOnHistogramNotAllowed-Ignore @@ -778,9 +778,9 @@ expressionOnHistogramNotAllowed-Ignore SELECT MONTH(HISTOGRAM(birth_date), 2)) AS h, COUNT(*) as c FROM emp GROUP BY h ORDER BY h DESC; // end::expressionOnHistogramNotAllowed -histogramDateExpression +histogramDateTimeExpression schema::h:i|c:l -// tag::histogramDateExpression +// tag::histogramDateTimeExpression SELECT HISTOGRAM(MONTH(birth_date), 2) AS h, COUNT(*) as c FROM emp GROUP BY h ORDER BY h DESC; h | c @@ -794,12 +794,12 @@ SELECT HISTOGRAM(MONTH(birth_date), 2) AS h, COUNT(*) as c FROM emp GROUP BY h O 0 |6 null |10 -// end::histogramDateExpression +// end::histogramDateTimeExpression ; /////////////////////////////// // -// Date/Time +// DateTime/Time // /////////////////////////////// @@ -816,14 +816,14 @@ SELECT INTERVAL 1 DAY + INTERVAL 53 MINUTES AS result; ; -dtDatePlusInterval -// tag::dtDatePlusInterval -SELECT CAST('1969-05-13T12:34:56' AS DATE) + INTERVAL 49 YEARS AS result; +dtDateTimePlusInterval +// tag::dtDateTimePlusInterval +SELECT CAST('1969-05-13T12:34:56' AS DATETIME) + INTERVAL 49 YEARS AS result; result -------------------- 2018-05-13T12:34:56Z -// end::dtDatePlusInterval +// end::dtDateTimePlusInterval ; dtMinusInterval @@ -848,14 +848,14 @@ SELECT INTERVAL '1' DAY - INTERVAL '2' HOURS AS result; ; -dtDateMinusInterval -// tag::dtDateMinusInterval -SELECT CAST('2018-05-13T12:34:56' AS DATE) - INTERVAL '2-8' YEAR TO MONTH AS result; +dtDateTimeMinusInterval +// tag::dtDateTimeMinusInterval +SELECT CAST('2018-05-13T12:34:56' AS DATETIME) - INTERVAL '2-8' YEAR TO MONTH AS result; result -------------------- 2015-09-13T12:34:56Z -// end::dtDateMinusInterval +// end::dtDateTimeMinusInterval ; dtIntervalMul @@ -1360,14 +1360,14 @@ SELECT CAST(123 AS VARCHAR) AS string; // end::conversionIntToStringCast ; -conversionStringToDateCast -// tag::conversionStringToDateCast +conversionStringToDateTimeCast +// tag::conversionStringToDateTimeCast SELECT YEAR(CAST('2018-05-19T11:23:45Z' AS TIMESTAMP)) AS year; year --------------- 2018 -// end::conversionStringToDateCast +// end::conversionStringToDateTimeCast ; /////////////////////////////// @@ -1918,7 +1918,7 @@ elastic /////////////////////////////// // -// Date-Time functions +// DateTime-Time functions // /////////////////////////////// diff --git a/x-pack/plugin/sql/qa/src/main/resources/nested.csv-spec b/x-pack/plugin/sql/qa/src/main/resources/nested.csv-spec index d20769e237f97..39f9b2965c6dc 100644 --- a/x-pack/plugin/sql/qa/src/main/resources/nested.csv-spec +++ b/x-pack/plugin/sql/qa/src/main/resources/nested.csv-spec @@ -8,18 +8,18 @@ DESCRIBE test_emp; column | type | mapping --------------------+---------------+--------------- -birth_date |TIMESTAMP |date +birth_date |TIMESTAMP |datetime dep |STRUCT |nested dep.dep_id |VARCHAR |keyword dep.dep_name |VARCHAR |text dep.dep_name.keyword|VARCHAR |keyword -dep.from_date |TIMESTAMP |date -dep.to_date |TIMESTAMP |date +dep.from_date |TIMESTAMP |datetime +dep.to_date |TIMESTAMP |datetime emp_no |INTEGER |integer first_name |VARCHAR |text first_name.keyword |VARCHAR |keyword gender |VARCHAR |keyword -hire_date |TIMESTAMP |date +hire_date |TIMESTAMP |datetime languages |TINYINT |byte last_name |VARCHAR |text last_name.keyword |VARCHAR |keyword diff --git a/x-pack/plugin/sql/qa/src/main/resources/null.csv-spec b/x-pack/plugin/sql/qa/src/main/resources/null.csv-spec index 474fceaed4612..19541cf5d9f32 100644 --- a/x-pack/plugin/sql/qa/src/main/resources/null.csv-spec +++ b/x-pack/plugin/sql/qa/src/main/resources/null.csv-spec @@ -3,7 +3,7 @@ // dateTimeOverNull -SELECT YEAR(CAST(NULL AS DATE)) d; +SELECT YEAR(CAST(NULL AS DATETIME)) d; d:i null diff --git a/x-pack/plugin/sql/qa/src/main/resources/setup_mock_metadata_get_columns.sql b/x-pack/plugin/sql/qa/src/main/resources/setup_mock_metadata_get_columns.sql index 69c572f4ddd4e..f61d48af4ff37 100644 --- a/x-pack/plugin/sql/qa/src/main/resources/setup_mock_metadata_get_columns.sql +++ b/x-pack/plugin/sql/qa/src/main/resources/setup_mock_metadata_get_columns.sql @@ -33,7 +33,7 @@ SELECT null, 'test1', 'name.keyword', 12, 'KEYWORD', 0, 2147483647, null, null, null, null, 12, 0, 2147483647, 1, 'YES', null, null, null, null, 'NO', 'NO' FROM DUAL UNION ALL -SELECT null, 'test2', 'date', 93, 'DATE', 24, 8, null, null, +SELECT null, 'test2', 'date', 93, 'DATETIME', 24, 8, null, null, 1, -- columnNullable null, null, 9, 3, null, 1, 'YES', null, null, null, null, 'NO', 'NO' FROM DUAL diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/analysis/index/IndexResolver.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/analysis/index/IndexResolver.java index 618dd66d88d11..43d356720f8ed 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/analysis/index/IndexResolver.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/analysis/index/IndexResolver.java @@ -384,7 +384,7 @@ private static EsField createField(String fieldName, String typeName, Map exps, Predicate exps) { - return Nullability.and(exps.stream().map(Expression::nullable).toArray(Nullability[]::new)); + return Nullability.and(exps.stream().map(Expression::nullable).toArray(Nullability[]::new)); } public static boolean foldable(List exps) { @@ -171,25 +171,25 @@ public static TypeResolution typeMustBeString(Expression e, String operationName } public static TypeResolution typeMustBeDate(Expression e, String operationName, ParamOrdinal paramOrd) { - return typeMustBe(e, dt -> dt == DataType.DATE, operationName, paramOrd, "date"); + return typeMustBe(e, dt -> dt == DataType.DATETIME, operationName, paramOrd, "date"); } public static TypeResolution typeMustBeNumericOrDate(Expression e, String operationName, ParamOrdinal paramOrd) { - return typeMustBe(e, dt -> dt.isNumeric() || dt == DataType.DATE, operationName, paramOrd, "numeric", "date"); + return typeMustBe(e, dt -> dt.isNumeric() || dt == DataType.DATETIME, operationName, paramOrd, "numeric", "date"); } public static TypeResolution typeMustBe(Expression e, - Predicate predicate, - String operationName, - ParamOrdinal paramOrd, - String... acceptedTypes) { + Predicate predicate, + String operationName, + ParamOrdinal paramOrd, + String... acceptedTypes) { return predicate.test(e.dataType()) || DataTypes.isNull(e.dataType())? TypeResolution.TYPE_RESOLVED : new TypeResolution(format(Locale.ROOT, "[%s]%s argument must be [%s], found value [%s] type [%s]", - operationName, - paramOrd == null || paramOrd == ParamOrdinal.DEFAULT ? "" : " " + paramOrd.name().toLowerCase(Locale.ROOT), - Strings.arrayToDelimitedString(acceptedTypes, " or "), - Expressions.name(e), - e.dataType().esType)); + operationName, + paramOrd == null || paramOrd == ParamOrdinal.DEFAULT ? "" : " " + paramOrd.name().toLowerCase(Locale.ROOT), + Strings.arrayToDelimitedString(acceptedTypes, " or "), + Expressions.name(e), + e.dataType().esType)); } } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/grouping/Histogram.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/grouping/Histogram.java index 1cace59a2cc00..46614755b7e8f 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/grouping/Histogram.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/grouping/Histogram.java @@ -42,7 +42,7 @@ protected TypeResolution resolveType() { TypeResolution resolution = Expressions.typeMustBeNumericOrDate(field(), "HISTOGRAM", ParamOrdinal.FIRST); if (resolution == TypeResolution.TYPE_RESOLVED) { // interval must be Literal interval - if (field().dataType() == DataType.DATE) { + if (field().dataType() == DataType.DATETIME) { resolution = Expressions.typeMustBe(interval, DataTypes::isInterval, "(Date) HISTOGRAM", ParamOrdinal.SECOND, "interval"); } else { resolution = Expressions.typeMustBeNumeric(interval, "(Numeric) HISTOGRAM", ParamOrdinal.SECOND); @@ -81,4 +81,4 @@ public boolean equals(Object obj) { } return false; } -} \ No newline at end of file +} diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/CurrentDateTime.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/CurrentDateTime.java index 50a7f8868141a..82556795b5961 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/CurrentDateTime.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/CurrentDateTime.java @@ -21,7 +21,7 @@ public class CurrentDateTime extends ConfigurationFunction { private final ZonedDateTime dateTime; public CurrentDateTime(Source source, Expression precision, Configuration configuration) { - super(source, configuration, DataType.DATE); + super(source, configuration, DataType.DATETIME); this.precision = precision; int p = precision != null ? ((Number) precision.fold()).intValue() : 0; this.dateTime = nanoPrecision(configuration().now(), p); diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/gen/script/ScriptWeaver.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/gen/script/ScriptWeaver.java index 074518f6b7d7c..cd13570a1ad10 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/gen/script/ScriptWeaver.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/gen/script/ScriptWeaver.java @@ -79,7 +79,7 @@ default ScriptTemplate scriptWithScalar(ScalarFunctionAttribute scalar) { default ScriptTemplate scriptWithAggregate(AggregateFunctionAttribute aggregate) { String template = "{}"; - if (aggregate.dataType() == DataType.DATE) { + if (aggregate.dataType() == DataType.DATETIME) { template = "{sql}.asDateTime({})"; } return new ScriptTemplate(processScript(template), @@ -89,7 +89,7 @@ default ScriptTemplate scriptWithAggregate(AggregateFunctionAttribute aggregate) default ScriptTemplate scriptWithGrouping(GroupingFunctionAttribute grouping) { String template = "{}"; - if (grouping.dataType() == DataType.DATE) { + if (grouping.dataType() == DataType.DATETIME) { template = "{sql}.asDateTime({})"; } return new ScriptTemplate(processScript(template), @@ -110,4 +110,4 @@ default String processScript(String script) { default String formatTemplate(String template) { return Scripts.formatTemplate(template); } -} \ No newline at end of file +} diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/ExpressionBuilder.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/ExpressionBuilder.java index 132c390337628..68baa84a802f6 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/ExpressionBuilder.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/ExpressionBuilder.java @@ -411,9 +411,9 @@ public DataType visitPrimitiveDataType(PrimitiveDataTypeContext ctx) { case "float": case "double": return DataType.DOUBLE; - case "date": + case "datetime": case "timestamp": - return DataType.DATE; + return DataType.DATETIME; case "char": case "varchar": case "string": @@ -793,7 +793,7 @@ public Literal visitDateEscapedLiteral(DateEscapedLiteralContext ctx) { } catch(IllegalArgumentException ex) { throw new ParsingException(source, "Invalid date received; {}", ex.getMessage()); } - return new Literal(source, DateUtils.of(dt), DataType.DATE); + return new Literal(source, DateUtils.of(dt), DataType.DATETIME); } @Override @@ -829,7 +829,7 @@ public Literal visitTimestampEscapedLiteral(TimestampEscapedLiteralContext ctx) } catch (IllegalArgumentException ex) { throw new ParsingException(source, "Invalid timestamp received; {}", ex.getMessage()); } - return new Literal(source, DateUtils.of(dt), DataType.DATE); + return new Literal(source, DateUtils.of(dt), DataType.DATETIME); } @Override @@ -930,4 +930,4 @@ private static Source minusAwareSource(SqlBaseParser.NumberContext ctx) { } return null; } -} \ No newline at end of file +} diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/planner/QueryFolder.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/planner/QueryFolder.java index 46380a9de2afd..5189a0ca4981e 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/planner/QueryFolder.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/planner/QueryFolder.java @@ -284,7 +284,7 @@ protected PhysicalPlan rule(AggregateExec a) { if (matchingGroup != null) { if (exp instanceof Attribute || exp instanceof ScalarFunction || exp instanceof GroupingFunction) { Processor action = null; - ZoneId zi = DataType.DATE == exp.dataType() ? DateUtils.UTC : null; + ZoneId zi = DataType.DATETIME == exp.dataType() ? DateUtils.UTC : null; /* * special handling of dates since aggs return the typed Date object which needs * extraction instead of handling this in the scroller, the folder handles this @@ -335,7 +335,7 @@ protected PhysicalPlan rule(AggregateExec a) { // check if the field is a date - if so mark it as such to interpret the long as a date // UTC is used since that's what the server uses and there's no conversion applied // (like for date histograms) - ZoneId zi = DataType.DATE == child.dataType() ? DateUtils.UTC : null; + ZoneId zi = DataType.DATETIME == child.dataType() ? DateUtils.UTC : null; queryC = queryC.addColumn(new GroupByRef(matchingGroup.id(), null, zi)); } // handle histogram @@ -359,7 +359,7 @@ else if (child instanceof GroupingFunction) { matchingGroup = groupingContext.groupFor(ne); Check.notNull(matchingGroup, "Cannot find group [{}]", Expressions.name(ne)); - ZoneId zi = DataType.DATE == ne.dataType() ? DateUtils.UTC : null; + ZoneId zi = DataType.DATETIME == ne.dataType() ? DateUtils.UTC : null; queryC = queryC.addColumn(new GroupByRef(matchingGroup.id(), null, zi)); } } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/planner/QueryTranslator.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/planner/QueryTranslator.java index e0472f27131f0..489e1506edf1a 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/planner/QueryTranslator.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/planner/QueryTranslator.java @@ -275,7 +275,7 @@ else if (exp instanceof GroupingFunction) { Expression field = h.field(); // date histogram - if (h.dataType() == DataType.DATE) { + if (h.dataType() == DataType.DATETIME) { long intervalAsMillis = Intervals.inMillis(h.interval()); // TODO: set timezone if (field instanceof FieldAttribute) { diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/agg/GroupByDateHistogram.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/agg/GroupByDateHistogram.java index 71c0e4f3e847b..ada855ec1511d 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/agg/GroupByDateHistogram.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/agg/GroupByDateHistogram.java @@ -15,7 +15,7 @@ import java.util.Objects; /** - * GROUP BY key based on histograms on date fields. + * GROUP BY key based on histograms on date/datetime fields. */ public class GroupByDateHistogram extends GroupByKey { diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/agg/GroupByKey.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/agg/GroupByKey.java index 7d74c1c3330e2..8626ea18e30c5 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/agg/GroupByKey.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/agg/GroupByKey.java @@ -39,7 +39,7 @@ public final CompositeValuesSourceBuilder asValueSource() { builder.valueType(ValueType.DOUBLE); } else if (script.outputType().isString()) { builder.valueType(ValueType.STRING); - } else if (script.outputType() == DataType.DATE) { + } else if (script.outputType() == DataType.DATETIME) { builder.valueType(ValueType.DATE); } else if (script.outputType() == DataType.BOOLEAN) { builder.valueType(ValueType.BOOLEAN); @@ -78,4 +78,4 @@ public boolean equals(Object obj) { && Objects.equals(script, ((GroupByKey) obj).script) && Objects.equals(direction, ((GroupByKey) obj).direction); } -} \ No newline at end of file +} diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/container/QueryContainer.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/container/QueryContainer.java index 43b1045ff7092..9a784b7b112ff 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/container/QueryContainer.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/container/QueryContainer.java @@ -183,7 +183,7 @@ private Tuple nestedHitFieldRef(FieldAttribute List nestedRefs = new ArrayList<>(); String name = aliasName(attr); - String format = attr.field().getDataType() == DataType.DATE ? "epoch_millis" : DocValueFieldsContext.USE_DEFAULT_FORMAT; + String format = attr.field().getDataType() == DataType.DATETIME ? "epoch_millis" : DocValueFieldsContext.USE_DEFAULT_FORMAT; Query q = rewriteToContainNestedField(query, attr.source(), attr.nestedParent().name(), name, format, attr.field().isAggregatable()); @@ -362,4 +362,4 @@ public String toString() { throw new RuntimeException("error rendering", e); } } -} \ No newline at end of file +} diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/container/SearchHitFieldRef.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/container/SearchHitFieldRef.java index 7f799108d28ec..5e51b36fc3c32 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/container/SearchHitFieldRef.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/container/SearchHitFieldRef.java @@ -49,7 +49,7 @@ public void collectFields(SqlSourceBuilder sourceBuilder) { return; } if (docValue) { - String format = dataType == DataType.DATE ? "epoch_millis" : null; + String format = dataType == DataType.DATETIME ? "epoch_millis" : null; sourceBuilder.addDocField(name, format); } else { sourceBuilder.addSourceField(name); diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/type/DataType.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/type/DataType.java index b8d55f22942eb..f233632d0f656 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/type/DataType.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/type/DataType.java @@ -44,7 +44,7 @@ public enum DataType { // since ODBC and JDBC interpret precision for Date as display size, // the precision is 23 (number of chars in ISO8601 with millis) + Z (the UTC timezone) // see https://github.com/elastic/elasticsearch/issues/30386#issuecomment-386807288 - DATE( JDBCType.TIMESTAMP, Long.BYTES, 24, 24, false, false, true), + DATETIME( JDBCType.TIMESTAMP, Long.BYTES, 24, 24, false, false, true), // // specialized types // @@ -102,9 +102,9 @@ public enum DataType { odbcToEs.put("SQL_LONGVARBINARY", BINARY); // Date - odbcToEs.put("SQL_DATE", DATE); - odbcToEs.put("SQL_TIME", DATE); - odbcToEs.put("SQL_TIMESTAMP", DATE); + odbcToEs.put("SQL_DATE", DATETIME); + odbcToEs.put("SQL_TIME", DATETIME); + odbcToEs.put("SQL_TIMESTAMP", DATETIME); // Intervals odbcToEs.put("SQL_INTERVAL_HOUR_TO_MINUTE", INTERVAL_HOUR_TO_MINUTE); @@ -225,10 +225,14 @@ public static DataType fromOdbcType(String odbcType) { * For any dataType DataType.fromTypeName(dataType.esType) == dataType */ public static DataType fromTypeName(String esType) { + String uppercase = esType.toUpperCase(Locale.ROOT); + if (uppercase.equals("DATE")) { + return DataType.DATETIME; + } try { - return DataType.valueOf(esType.toUpperCase(Locale.ROOT)); + return DataType.valueOf(uppercase); } catch (IllegalArgumentException ex) { return DataType.UNSUPPORTED; } } -} \ No newline at end of file +} diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/type/DataTypeConversion.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/type/DataTypeConversion.java index 3cfb5d5ddf804..f3cf3d2bac1ac 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/type/DataTypeConversion.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/type/DataTypeConversion.java @@ -17,7 +17,7 @@ import java.util.function.LongFunction; import static org.elasticsearch.xpack.sql.type.DataType.BOOLEAN; -import static org.elasticsearch.xpack.sql.type.DataType.DATE; +import static org.elasticsearch.xpack.sql.type.DataType.DATETIME; import static org.elasticsearch.xpack.sql.type.DataType.LONG; import static org.elasticsearch.xpack.sql.type.DataType.NULL; @@ -83,7 +83,7 @@ public static DataType commonType(DataType left, DataType right) { } if (DataTypes.isInterval(right)) { - if (left == DATE) { + if (left == DATETIME) { return left; } } @@ -145,8 +145,8 @@ private static Conversion conversion(DataType from, DataType to) { return conversionToFloat(from); case DOUBLE: return conversionToDouble(from); - case DATE: - return conversionToDate(from); + case DATETIME: + return conversionToDateTime(from); case BOOLEAN: return conversionToBoolean(from); default: @@ -156,7 +156,7 @@ private static Conversion conversion(DataType from, DataType to) { } private static Conversion conversionToString(DataType from) { - if (from == DATE) { + if (from == DATETIME) { return Conversion.DATE_TO_STRING; } return Conversion.OTHER_TO_STRING; @@ -182,7 +182,7 @@ private static Conversion conversionToLong(DataType from) { if (from.isString()) { return Conversion.STRING_TO_LONG; } - if (from == DATE) { + if (from == DATETIME) { return Conversion.DATE_TO_LONG; } return null; @@ -201,7 +201,7 @@ private static Conversion conversionToInt(DataType from) { if (from.isString()) { return Conversion.STRING_TO_INT; } - if (from == DATE) { + if (from == DATETIME) { return Conversion.DATE_TO_INT; } return null; @@ -220,7 +220,7 @@ private static Conversion conversionToShort(DataType from) { if (from.isString()) { return Conversion.STRING_TO_SHORT; } - if (from == DATE) { + if (from == DATETIME) { return Conversion.DATE_TO_SHORT; } return null; @@ -239,7 +239,7 @@ private static Conversion conversionToByte(DataType from) { if (from.isString()) { return Conversion.STRING_TO_BYTE; } - if (from == DATE) { + if (from == DATETIME) { return Conversion.DATE_TO_BYTE; } return null; @@ -258,7 +258,7 @@ private static Conversion conversionToFloat(DataType from) { if (from.isString()) { return Conversion.STRING_TO_FLOAT; } - if (from == DATE) { + if (from == DATETIME) { return Conversion.DATE_TO_FLOAT; } return null; @@ -277,13 +277,13 @@ private static Conversion conversionToDouble(DataType from) { if (from.isString()) { return Conversion.STRING_TO_DOUBLE; } - if (from == DATE) { + if (from == DATETIME) { return Conversion.DATE_TO_DOUBLE; } return null; } - private static Conversion conversionToDate(DataType from) { + private static Conversion conversionToDateTime(DataType from) { if (from.isRational()) { return Conversion.RATIONAL_TO_DATE; } @@ -306,7 +306,7 @@ private static Conversion conversionToBoolean(DataType from) { if (from.isString()) { return Conversion.STRING_TO_BOOLEAN; } - if (from == DATE) { + if (from == DATETIME) { return Conversion.DATE_TO_BOOLEAN; } return null; diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/type/DataTypes.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/type/DataTypes.java index b865f541634b1..5a3fa235e9a73 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/type/DataTypes.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/type/DataTypes.java @@ -12,7 +12,7 @@ import static org.elasticsearch.xpack.sql.type.DataType.BOOLEAN; import static org.elasticsearch.xpack.sql.type.DataType.BYTE; -import static org.elasticsearch.xpack.sql.type.DataType.DATE; +import static org.elasticsearch.xpack.sql.type.DataType.DATETIME; import static org.elasticsearch.xpack.sql.type.DataType.DOUBLE; import static org.elasticsearch.xpack.sql.type.DataType.FLOAT; import static org.elasticsearch.xpack.sql.type.DataType.INTEGER; @@ -68,7 +68,7 @@ public static DataType fromJava(Object value) { return SHORT; } if (value instanceof ZonedDateTime) { - return DATE; + return DATETIME; } if (value instanceof String || value instanceof Character) { return KEYWORD; @@ -166,7 +166,7 @@ private static String intervalUnit(char unitChar) { // https://docs.microsoft.com/en-us/sql/relational-databases/native-client-odbc-date-time/metadata-catalog // https://github.com/elastic/elasticsearch/issues/30386 public static Integer metaSqlDataType(DataType t) { - if (t == DATE) { + if (t == DATETIME) { // ODBC SQL_DATETME return Integer.valueOf(9); } @@ -177,7 +177,7 @@ public static Integer metaSqlDataType(DataType t) { // https://github.com/elastic/elasticsearch/issues/30386 // https://docs.microsoft.com/en-us/sql/odbc/reference/syntax/sqlgettypeinfo-function?view=sql-server-2017 public static Integer metaSqlDateTimeSub(DataType t) { - if (t == DATE) { + if (t == DATETIME) { // ODBC SQL_CODE_TIMESTAMP return Integer.valueOf(3); } @@ -188,7 +188,7 @@ public static Integer metaSqlDateTimeSub(DataType t) { // https://docs.microsoft.com/en-us/sql/odbc/reference/appendixes/decimal-digits?view=sql-server-2017 public static Short metaSqlMinimumScale(DataType t) { // TODO: return info for HALF/SCALED_FLOATS (should be based on field not type) - if (t == DATE) { + if (t == DATETIME) { return Short.valueOf((short) 3); } if (t.isInteger()) { @@ -203,7 +203,7 @@ public static Short metaSqlMinimumScale(DataType t) { public static Short metaSqlMaximumScale(DataType t) { // TODO: return info for HALF/SCALED_FLOATS (should be based on field not type) - if (t == DATE) { + if (t == DATETIME) { return Short.valueOf((short) 3); } if (t.isInteger()) { @@ -223,4 +223,4 @@ public static Integer metaSqlRadix(DataType t) { // null means radix is not applicable for the given type. return t.isInteger() ? Integer.valueOf(10) : (t.isRational() ? Integer.valueOf(2) : null); } -} \ No newline at end of file +} diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/type/DateEsField.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/type/DateEsField.java index 04926db5407f5..71924adab5581 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/type/DateEsField.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/type/DateEsField.java @@ -21,7 +21,7 @@ public class DateEsField extends EsField { private final List formats; public DateEsField(String name, Map properties, boolean hasDocValues, String... formats) { - super(name, DataType.DATE, properties, hasDocValues); + super(name, DataType.DATETIME, properties, hasDocValues); this.formats = CollectionUtils.isEmpty(formats) ? DEFAULT_FORMAT : Arrays.asList(formats); } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/type/Types.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/type/Types.java index f367f39530dae..0af0a5f322cc6 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/type/Types.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/type/Types.java @@ -86,7 +86,7 @@ private static void walkMapping(String name, Object value, Map boolean normalized = Strings.hasText(textSetting(content.get("normalizer"), null)); field = new KeywordEsField(name, properties, docValues, length, normalized); break; - case DATE: + case DATETIME: Object fmt = content.get("format"); if (fmt != null) { field = new DateEsField(name, properties, docValues, Strings.delimitedListToStringArray(fmt.toString(), "||")); @@ -118,4 +118,4 @@ private static boolean boolSetting(Object value, boolean defaultValue) { private static int intSetting(Object value, int defaultValue) { return value == null ? defaultValue : Integer.parseInt(value.toString()); } -} \ No newline at end of file +} diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/analysis/analyzer/VerifierErrorMessagesTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/analysis/analyzer/VerifierErrorMessagesTests.java index e394296829b09..e45da9d08fee9 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/analysis/analyzer/VerifierErrorMessagesTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/analysis/analyzer/VerifierErrorMessagesTests.java @@ -373,7 +373,7 @@ public void testInNestedWithDifferentDataTypesFromLeftValue_WhereClause() { } public void testNotSupportedAggregateOnDate() { - assertEquals("1:8: [AVG(date)] argument must be [numeric], found value [date] type [date]", + assertEquals("1:8: [AVG(date)] argument must be [numeric], found value [date] type [datetime]", error("SELECT AVG(date) FROM test")); } @@ -510,14 +510,14 @@ public void testAggsInWhere() { public void testHistogramInFilter() { assertEquals("1:63: Cannot filter on grouping function [HISTOGRAM(date, INTERVAL 1 MONTH)], use its argument instead", error("SELECT HISTOGRAM(date, INTERVAL 1 MONTH) AS h FROM test WHERE " - + "HISTOGRAM(date, INTERVAL 1 MONTH) > CAST('2000-01-01' AS DATE) GROUP BY h")); + + "HISTOGRAM(date, INTERVAL 1 MONTH) > CAST('2000-01-01' AS DATETIME) GROUP BY h")); } // related https://github.com/elastic/elasticsearch/issues/36853 public void testHistogramInHaving() { assertEquals("1:75: Cannot filter on grouping function [h], use its argument instead", error("SELECT HISTOGRAM(date, INTERVAL 1 MONTH) AS h FROM test GROUP BY h HAVING " - + "h > CAST('2000-01-01' AS DATE)")); + + "h > CAST('2000-01-01' AS DATETIME)")); } public void testGroupByScalarOnTopOfGrouping() { @@ -548,3 +548,4 @@ public void testErrorMessageForPercentileRankWithSecondArgBasedOnAField() { e.getMessage()); } } + diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/analysis/index/IndexResolverTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/analysis/index/IndexResolverTests.java index b53d00cfbb71d..bb328b2d8ffdc 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/analysis/index/IndexResolverTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/analysis/index/IndexResolverTests.java @@ -190,7 +190,7 @@ private static boolean isSearchable(DataType type) { } private static boolean isAggregatable(DataType type) { - return type.isNumeric() || type == DataType.KEYWORD || type == DataType.DATE; + return type.isNumeric() || type == DataType.KEYWORD || type == DataType.DATETIME; } private static class UpdateableFieldCapabilities extends FieldCapabilities { diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/execution/search/extractor/FieldHitExtractorTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/execution/search/extractor/FieldHitExtractorTests.java index 4f562e82b5c21..7677878ddac4f 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/execution/search/extractor/FieldHitExtractorTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/execution/search/extractor/FieldHitExtractorTests.java @@ -144,7 +144,7 @@ public void testGetDate() { SearchHit hit = new SearchHit(1); DocumentField field = new DocumentField("my_date_field", documentFieldValues); hit.fields(singletonMap("my_date_field", field)); - FieldHitExtractor extractor = new FieldHitExtractor("my_date_field", DataType.DATE, true); + FieldHitExtractor extractor = new FieldHitExtractor("my_date_field", DataType.DATETIME, true); assertEquals(DateUtils.of(millis), extractor.extract(hit)); } diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DayOfYearTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DayOfYearTests.java index 2a7af2916373e..7168716b529ea 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DayOfYearTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DayOfYearTests.java @@ -28,6 +28,6 @@ private Object extract(Object value, ZoneId zoneId) { } private DayOfYear build(Object value, ZoneId zoneId) { - return new DayOfYear(Source.EMPTY, new Literal(Source.EMPTY, value, DataType.DATE), zoneId); + return new DayOfYear(Source.EMPTY, new Literal(Source.EMPTY, value, DataType.DATETIME), zoneId); } } diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/predicate/operator/arithmetic/BinaryArithmeticTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/predicate/operator/arithmetic/BinaryArithmeticTests.java index e329ad248108c..696f999b0b051 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/predicate/operator/arithmetic/BinaryArithmeticTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/predicate/operator/arithmetic/BinaryArithmeticTests.java @@ -77,7 +77,7 @@ public void testAddDayTimeIntervals() { assertEquals(interval(Duration.ofDays(1).plusHours(2), INTERVAL_DAY_TO_HOUR), L(x)); } - public void testAddYearMonthIntervalToDate() { + public void testAddYearMonthIntervalToDateTime() { ZonedDateTime now = ZonedDateTime.now(DateUtils.UTC); Literal l = L(now); TemporalAmount t = Period.ofYears(100).plusMonths(50); @@ -86,7 +86,7 @@ public void testAddYearMonthIntervalToDate() { assertEquals(L(now.plus(t)), L(x)); } - public void testAddDayTimeIntervalToDate() { + public void testAddDayTimeIntervalToDateTime() { ZonedDateTime now = ZonedDateTime.now(DateUtils.UTC); Literal l = L(now); TemporalAmount t = Duration.ofHours(2); @@ -95,7 +95,7 @@ public void testAddDayTimeIntervalToDate() { assertEquals(L(now.plus(t)), L(x)); } - public void testAddDayTimeIntervalToDateReverse() { + public void testAddDayTimeIntervalToDateTimeReverse() { ZonedDateTime now = ZonedDateTime.now(DateUtils.UTC); Literal l = L(now); TemporalAmount t = Duration.ofHours(2); @@ -124,7 +124,7 @@ public void testSubDayTimeIntervals() { assertEquals(interval(Duration.ofDays(1).plusHours(8), INTERVAL_DAY_TO_HOUR), L(x)); } - public void testSubYearMonthIntervalToDate() { + public void testSubYearMonthIntervalToDateTime() { ZonedDateTime now = ZonedDateTime.now(DateUtils.UTC); Literal l = L(now); TemporalAmount t = Period.ofYears(100).plusMonths(50); @@ -133,7 +133,7 @@ public void testSubYearMonthIntervalToDate() { assertEquals(L(now.minus(t)), L(x)); } - public void testSubYearMonthIntervalToDateIllegal() { + public void testSubYearMonthIntervalToDateTimeIllegal() { ZonedDateTime now = ZonedDateTime.now(DateUtils.UTC); Literal l = L(now); TemporalAmount t = Period.ofYears(100).plusMonths(50); @@ -148,7 +148,7 @@ public void testSubNumberFromIntervalIllegal() { assertEquals("Cannot compute [-] between [IntervalDayTime] [Integer]", expect.getMessage()); } - public void testSubDayTimeIntervalToDate() { + public void testSubDayTimeIntervalToDateTime() { ZonedDateTime now = ZonedDateTime.now(DateUtils.UTC); Literal l = L(now); TemporalAmount t = Duration.ofHours(2); diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/optimizer/OptimizerTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/optimizer/OptimizerTests.java index 700097c46163a..6873e4a107fb6 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/optimizer/OptimizerTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/optimizer/OptimizerTests.java @@ -330,7 +330,7 @@ public void testConstantFoldingLikes() { } public void testConstantFoldingDatetime() { - Expression cast = new Cast(EMPTY, Literal.of(EMPTY, "2018-01-19T10:23:27Z"), DataType.DATE); + Expression cast = new Cast(EMPTY, Literal.of(EMPTY, "2018-01-19T10:23:27Z"), DataType.DATETIME); assertEquals(2018, foldFunction(new Year(EMPTY, cast, UTC))); assertEquals(1, foldFunction(new MonthOfYear(EMPTY, cast, UTC))); assertEquals(19, foldFunction(new DayOfMonth(EMPTY, cast, UTC))); diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/parser/EscapedFunctionsTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/parser/EscapedFunctionsTests.java index 5969f8e5ed2cd..f3bf9fc03e777 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/parser/EscapedFunctionsTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/parser/EscapedFunctionsTests.java @@ -170,7 +170,7 @@ public void testFunctionWithFunctionWithArgAndParams() { public void testDateLiteral() { Literal l = dateLiteral("2012-01-01"); - assertThat(l.dataType(), is(DataType.DATE)); + assertThat(l.dataType(), is(DataType.DATETIME)); } public void testDateLiteralValidation() { @@ -192,7 +192,7 @@ public void testTimeLiteralValidation() { public void testTimestampLiteral() { Literal l = timestampLiteral("2012-01-01 10:01:02.3456"); - assertThat(l.dataType(), is(DataType.DATE)); + assertThat(l.dataType(), is(DataType.DATETIME)); } public void testTimestampLiteralValidation() { @@ -236,4 +236,4 @@ public void testLikeEscape() { LikePattern pattern = likeEscape("|%tring", "|"); assertThat(pattern.escape(), is('|')); } -} \ No newline at end of file +} diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/plan/logical/command/sys/SysParserTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/plan/logical/command/sys/SysParserTests.java index 0462956bf851f..6ed46b74d4512 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/plan/logical/command/sys/SysParserTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/plan/logical/command/sys/SysParserTests.java @@ -61,7 +61,7 @@ public void testSysTypes() throws Exception { Command cmd = sql("SYS TYPES").v1(); List names = asList("BYTE", "LONG", "BINARY", "NULL", "INTEGER", "SHORT", "HALF_FLOAT", "SCALED_FLOAT", "FLOAT", "DOUBLE", - "KEYWORD", "TEXT", "IP", "BOOLEAN", "DATE", + "KEYWORD", "TEXT", "IP", "BOOLEAN", "DATETIME", "INTERVAL_YEAR", "INTERVAL_MONTH", "INTERVAL_DAY", "INTERVAL_HOUR", "INTERVAL_MINUTE", "INTERVAL_SECOND", "INTERVAL_YEAR_TO_MONTH", "INTERVAL_DAY_TO_HOUR", "INTERVAL_DAY_TO_MINUTE", "INTERVAL_DAY_TO_SECOND", "INTERVAL_HOUR_TO_MINUTE", "INTERVAL_HOUR_TO_SECOND", "INTERVAL_MINUTE_TO_SECOND", @@ -160,4 +160,4 @@ private void runSysColumns(String commandVariation) throws Exception { }, ex -> fail(ex.getMessage()))); } -} \ No newline at end of file +} diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/plan/logical/command/sys/SysTypesTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/plan/logical/command/sys/SysTypesTests.java index 7adeddc9ebec3..92f734e539780 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/plan/logical/command/sys/SysTypesTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/plan/logical/command/sys/SysTypesTests.java @@ -44,7 +44,7 @@ public void testSysTypes() throws Exception { Command cmd = sql("SYS TYPES").v1(); List names = asList("BYTE", "LONG", "BINARY", "NULL", "INTEGER", "SHORT", "HALF_FLOAT", "SCALED_FLOAT", "FLOAT", "DOUBLE", - "KEYWORD", "TEXT", "IP", "BOOLEAN", "DATE", + "KEYWORD", "TEXT", "IP", "BOOLEAN", "DATETIME", "INTERVAL_YEAR", "INTERVAL_MONTH", "INTERVAL_DAY", "INTERVAL_HOUR", "INTERVAL_MINUTE", "INTERVAL_SECOND", "INTERVAL_YEAR_TO_MONTH", "INTERVAL_DAY_TO_HOUR", "INTERVAL_DAY_TO_MINUTE", "INTERVAL_DAY_TO_SECOND", "INTERVAL_HOUR_TO_MINUTE", "INTERVAL_HOUR_TO_SECOND", "INTERVAL_MINUTE_TO_SECOND", @@ -108,4 +108,4 @@ public void testSysTypesMultipleMatches() throws Exception { assertEquals("IP", r.column(0)); }, ex -> fail(ex.getMessage()))); } -} \ No newline at end of file +} diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/planner/QueryFolderTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/planner/QueryFolderTests.java index 214d935251994..44f50b53b5aa3 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/planner/QueryFolderTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/planner/QueryFolderTests.java @@ -292,7 +292,7 @@ public void testGroupKeyTypes_IP() { assertThat(ee.output().get(1).toString(), startsWith("a{s->")); } - public void testGroupKeyTypes_Date() { + public void testGroupKeyTypes_DateTime() { PhysicalPlan p = plan("SELECT count(*), date + INTERVAL '1-2' YEAR TO MONTH AS a FROM test GROUP BY a"); assertEquals(EsQueryExec.class, p.getClass()); EsQueryExec ee = (EsQueryExec) p; diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/planner/QueryTranslatorTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/planner/QueryTranslatorTests.java index 23beae2fd586e..8ee94194845a5 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/planner/QueryTranslatorTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/planner/QueryTranslatorTests.java @@ -170,7 +170,7 @@ public void testDateRangeLiteral() { } public void testDateRangeCast() { - LogicalPlan p = plan("SELECT some.string FROM test WHERE date > CAST('1969-05-13T12:34:56Z' AS DATE)"); + LogicalPlan p = plan("SELECT some.string FROM test WHERE date > CAST('1969-05-13T12:34:56Z' AS DATETIME)"); assertTrue(p instanceof Project); p = ((Project) p).child(); assertTrue(p instanceof Filter); @@ -480,7 +480,7 @@ public void testGroupByHistogram() { assertEquals("+2-0", h.interval().fold().toString()); Expression field = h.field(); assertEquals(FieldAttribute.class, field.getClass()); - assertEquals(DataType.DATE, field.dataType()); + assertEquals(DataType.DATETIME, field.dataType()); } public void testCountAndCountDistinctFolding() { diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/type/DataTypeConversionTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/type/DataTypeConversionTests.java index a44ce44d0f904..ac744c3365a54 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/type/DataTypeConversionTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/type/DataTypeConversionTests.java @@ -18,7 +18,7 @@ import static org.elasticsearch.xpack.sql.expression.function.scalar.datetime.DateTimeTestUtils.dateTime; import static org.elasticsearch.xpack.sql.type.DataType.BOOLEAN; import static org.elasticsearch.xpack.sql.type.DataType.BYTE; -import static org.elasticsearch.xpack.sql.type.DataType.DATE; +import static org.elasticsearch.xpack.sql.type.DataType.DATETIME; import static org.elasticsearch.xpack.sql.type.DataType.DOUBLE; import static org.elasticsearch.xpack.sql.type.DataType.FLOAT; import static org.elasticsearch.xpack.sql.type.DataType.INTEGER; @@ -41,7 +41,7 @@ public void testConversionToString() { assertNull(conversion.convert(null)); assertEquals("10.0", conversion.convert(10.0)); - conversion = conversionFor(DATE, KEYWORD); + conversion = conversionFor(DATETIME, KEYWORD); assertNull(conversion.convert(null)); assertEquals("1970-01-01T00:00:00.000Z", conversion.convert(dateTime(0))); } @@ -80,8 +80,8 @@ public void testConversionToLong() { assertEquals("cannot cast [0xff] to [Long]", e.getMessage()); } - public void testConversionToDate() { - DataType to = DATE; + public void testConversionToDateTime() { + DataType to = DATETIME; { Conversion conversion = conversionFor(DOUBLE, to); assertNull(conversion.convert(null)); @@ -112,8 +112,8 @@ public void testConversionToDate() { // double check back and forth conversion ZonedDateTime dt = TestUtils.now(); - Conversion forward = conversionFor(DATE, KEYWORD); - Conversion back = conversionFor(KEYWORD, DATE); + Conversion forward = conversionFor(DATETIME, KEYWORD); + Conversion back = conversionFor(KEYWORD, DATETIME); assertEquals(dt, back.convert(forward.convert(dt))); Exception e = expectThrows(SqlIllegalArgumentException.class, () -> conversion.convert("0xff")); assertEquals("cannot cast [0xff] to [Date]:Invalid format: \"0xff\" is malformed at \"xff\"", e.getMessage()); diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/type/DataTypesTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/type/DataTypesTests.java index ff6bf4611c827..7b38718dad794 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/type/DataTypesTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/type/DataTypesTests.java @@ -9,7 +9,7 @@ import java.util.EnumSet; -import static org.elasticsearch.xpack.sql.type.DataType.DATE; +import static org.elasticsearch.xpack.sql.type.DataType.DATETIME; import static org.elasticsearch.xpack.sql.type.DataType.FLOAT; import static org.elasticsearch.xpack.sql.type.DataType.INTERVAL_DAY; import static org.elasticsearch.xpack.sql.type.DataType.INTERVAL_DAY_TO_HOUR; @@ -37,32 +37,32 @@ public class DataTypesTests extends ESTestCase { public void testMetaDataType() { - assertEquals(Integer.valueOf(9), metaSqlDataType(DATE)); - DataType t = randomDataTypeNoDate(); + assertEquals(Integer.valueOf(9), metaSqlDataType(DATETIME)); + DataType t = randomDataTypeNoDateTime(); assertEquals(t.sqlType.getVendorTypeNumber(), metaSqlDataType(t)); } public void testMetaDateTypeSub() { - assertEquals(Integer.valueOf(3), metaSqlDateTimeSub(DATE)); - assertEquals(Integer.valueOf(0), metaSqlDateTimeSub(randomDataTypeNoDate())); + assertEquals(Integer.valueOf(3), metaSqlDateTimeSub(DATETIME)); + assertEquals(Integer.valueOf(0), metaSqlDateTimeSub(randomDataTypeNoDateTime())); } public void testMetaMinimumScale() { - assertEquals(Short.valueOf((short) 3), metaSqlMinimumScale(DATE)); + assertEquals(Short.valueOf((short) 3), metaSqlMinimumScale(DATETIME)); assertEquals(Short.valueOf((short) 0), metaSqlMinimumScale(LONG)); assertEquals(Short.valueOf((short) 0), metaSqlMinimumScale(FLOAT)); assertNull(metaSqlMinimumScale(KEYWORD)); } public void testMetaMaximumScale() { - assertEquals(Short.valueOf((short) 3), metaSqlMaximumScale(DATE)); + assertEquals(Short.valueOf((short) 3), metaSqlMaximumScale(DATETIME)); assertEquals(Short.valueOf((short) 0), metaSqlMaximumScale(LONG)); assertEquals(Short.valueOf((short) FLOAT.defaultPrecision), metaSqlMaximumScale(FLOAT)); assertNull(metaSqlMaximumScale(KEYWORD)); } public void testMetaRadix() { - assertNull(metaSqlRadix(DATE)); + assertNull(metaSqlRadix(DATETIME)); assertNull(metaSqlRadix(KEYWORD)); assertEquals(Integer.valueOf(10), metaSqlRadix(LONG)); assertEquals(Integer.valueOf(2), metaSqlRadix(FLOAT)); @@ -108,7 +108,7 @@ public void testIncompatibleInterval() throws Exception { assertNull(compatibleInterval(INTERVAL_MINUTE_TO_SECOND, INTERVAL_MONTH)); } - private DataType randomDataTypeNoDate() { - return randomValueOtherThan(DataType.DATE, () -> randomFrom(DataType.values())); + private DataType randomDataTypeNoDateTime() { + return randomValueOtherThan(DataType.DATETIME, () -> randomFrom(DataType.values())); } -} \ No newline at end of file +} diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/type/TypesTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/type/TypesTests.java index 8e02e82eb831f..fd7b88330d3c3 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/type/TypesTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/type/TypesTests.java @@ -14,7 +14,7 @@ import java.util.Map; import static java.util.Collections.emptyMap; -import static org.elasticsearch.xpack.sql.type.DataType.DATE; +import static org.elasticsearch.xpack.sql.type.DataType.DATETIME; import static org.elasticsearch.xpack.sql.type.DataType.INTEGER; import static org.elasticsearch.xpack.sql.type.DataType.KEYWORD; import static org.elasticsearch.xpack.sql.type.DataType.NESTED; @@ -81,7 +81,7 @@ public void testDateField() { assertThat(mapping.size(), is(1)); EsField field = mapping.get("date"); - assertThat(field.getDataType(), is(DATE)); + assertThat(field.getDataType(), is(DATETIME)); assertThat(field.isAggregatable(), is(true)); assertThat(field.getPrecision(), is(24)); @@ -95,7 +95,7 @@ public void testDateNoFormat() { assertThat(mapping.size(), is(1)); EsField field = mapping.get("date"); - assertThat(field.getDataType(), is(DATE)); + assertThat(field.getDataType(), is(DATETIME)); assertThat(field.isAggregatable(), is(true)); DateEsField dfield = (DateEsField) field; // default types @@ -107,7 +107,7 @@ public void testDateMulti() { assertThat(mapping.size(), is(1)); EsField field = mapping.get("date"); - assertThat(field.getDataType(), is(DATE)); + assertThat(field.getDataType(), is(DATETIME)); assertThat(field.isAggregatable(), is(true)); DateEsField dfield = (DateEsField) field; // default types @@ -175,7 +175,7 @@ public void testNestedDoc() { Map children = field.getProperties(); assertThat(children.size(), is(4)); assertThat(children.get("dep_name").getDataType(), is(TEXT)); - assertThat(children.get("start_date").getDataType(), is(DATE)); + assertThat(children.get("start_date").getDataType(), is(DATETIME)); } public void testGeoField() { @@ -208,4 +208,4 @@ public static Map loadMapping(String name, boolean ordered) { assertNotNull("Could not find mapping resource:" + name, stream); return Types.fromEs(XContentHelper.convertToMap(JsonXContent.jsonXContent, stream, ordered)); } -} \ No newline at end of file +} From 676e1b1a13588e2d13777242a6ceddecd2f2def4 Mon Sep 17 00:00:00 2001 From: Torgeir Thoresen Date: Thu, 17 Jan 2019 10:22:49 +0100 Subject: [PATCH 20/71] Fix erroneous docstrings for abstract bulk by scroll request (#37517) --- .../index/reindex/AbstractBulkByScrollRequest.java | 4 ++-- .../index/reindex/AbstractBulkByScrollRequestBuilder.java | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/index/reindex/AbstractBulkByScrollRequest.java b/server/src/main/java/org/elasticsearch/index/reindex/AbstractBulkByScrollRequest.java index 4aa9bc5ce146c..265ef1cbf481a 100644 --- a/server/src/main/java/org/elasticsearch/index/reindex/AbstractBulkByScrollRequest.java +++ b/server/src/main/java/org/elasticsearch/index/reindex/AbstractBulkByScrollRequest.java @@ -185,14 +185,14 @@ public Self setSize(int size) { } /** - * Should version conflicts cause aborts? Defaults to false. + * Whether or not version conflicts cause the action to abort. */ public boolean isAbortOnVersionConflict() { return abortOnVersionConflict; } /** - * Should version conflicts cause aborts? Defaults to false. + * Set whether or not version conflicts cause the action to abort. */ public Self setAbortOnVersionConflict(boolean abortOnVersionConflict) { this.abortOnVersionConflict = abortOnVersionConflict; diff --git a/server/src/main/java/org/elasticsearch/index/reindex/AbstractBulkByScrollRequestBuilder.java b/server/src/main/java/org/elasticsearch/index/reindex/AbstractBulkByScrollRequestBuilder.java index 227814e24302e..a14ef850c5079 100644 --- a/server/src/main/java/org/elasticsearch/index/reindex/AbstractBulkByScrollRequestBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/reindex/AbstractBulkByScrollRequestBuilder.java @@ -75,7 +75,7 @@ public Self size(int size) { } /** - * Should we version conflicts cause the action to abort? + * Set whether or not version conflicts cause the action to abort. */ public Self abortOnVersionConflict(boolean abortOnVersionConflict) { request.setAbortOnVersionConflict(abortOnVersionConflict); From da799306a8c471b02ab4bd8e6e0185933952a7a9 Mon Sep 17 00:00:00 2001 From: Martijn van Groningen Date: Thu, 17 Jan 2019 11:51:17 +0100 Subject: [PATCH 21/71] Decreased time out in test Relates to #37378 --- .../action/admin/cluster/state/ClusterStateApiTests.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/server/src/test/java/org/elasticsearch/action/admin/cluster/state/ClusterStateApiTests.java b/server/src/test/java/org/elasticsearch/action/admin/cluster/state/ClusterStateApiTests.java index fb823d3657e19..e061e7a08dd89 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/cluster/state/ClusterStateApiTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/cluster/state/ClusterStateApiTests.java @@ -62,7 +62,7 @@ public void testWaitForMetaDataVersion() throws Exception { // Verify that the timed out property has been set" metadataVersion = response.getState().getMetaData().version(); clusterStateRequest.waitForMetaDataVersion(metadataVersion + 1); - clusterStateRequest.waitForTimeout(TimeValue.timeValueSeconds(1)); // Fail fast + clusterStateRequest.waitForTimeout(TimeValue.timeValueMillis(500)); // Fail fast ActionFuture future3 = client().admin().cluster().state(clusterStateRequest); assertBusy(() -> { assertThat(future3.isDone(), is(true)); From 6fe2d6da0391a1da861fc55222c39693388a529b Mon Sep 17 00:00:00 2001 From: Yannick Welsch Date: Thu, 17 Jan 2019 13:54:48 +0100 Subject: [PATCH 22/71] Mute TransportClientNodesServiceTests#testListenerFailures Relates to #37567 --- .../client/transport/TransportClientNodesServiceTests.java | 1 + 1 file changed, 1 insertion(+) diff --git a/server/src/test/java/org/elasticsearch/client/transport/TransportClientNodesServiceTests.java b/server/src/test/java/org/elasticsearch/client/transport/TransportClientNodesServiceTests.java index 208629c169a67..3100dcbcc66a3 100644 --- a/server/src/test/java/org/elasticsearch/client/transport/TransportClientNodesServiceTests.java +++ b/server/src/test/java/org/elasticsearch/client/transport/TransportClientNodesServiceTests.java @@ -220,6 +220,7 @@ public void close() { } } + @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/37567") public void testListenerFailures() throws InterruptedException { int iters = iterations(10, 100); for (int i = 0; i Date: Thu, 17 Jan 2019 13:14:06 +0100 Subject: [PATCH 23/71] Moved ccr integration to the package with other ccr integration tests. --- .../org/elasticsearch/xpack/ccr/{action => }/FollowStatsIT.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) rename x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/{action => }/FollowStatsIT.java (99%) diff --git a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/FollowStatsIT.java b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/FollowStatsIT.java similarity index 99% rename from x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/FollowStatsIT.java rename to x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/FollowStatsIT.java index bf6f080099088..409746f9d851b 100644 --- a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/FollowStatsIT.java +++ b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/FollowStatsIT.java @@ -4,7 +4,7 @@ * you may not use this file except in compliance with the Elastic License. */ -package org.elasticsearch.xpack.ccr.action; +package org.elasticsearch.xpack.ccr; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.admin.cluster.state.ClusterStateRequest; From d9fa4e4adaf354d802523e5cf396cf2bfc7f40b4 Mon Sep 17 00:00:00 2001 From: Yannick Welsch Date: Thu, 17 Jan 2019 13:59:09 +0100 Subject: [PATCH 24/71] Fix testRelocateWhileContinuouslyIndexingAndWaitingForRefresh (#37560) This test failed because the refresh at the end of the test is not guaranteed to run before the indexing is completed, and therefore there's no guarantee that the refresh will free all operations. This triggers an assertion failure in the test clean-up, which asserts that there are no more pending operations. --- .../elasticsearch/recovery/RelocationIT.java | 20 +++++++++++++------ 1 file changed, 14 insertions(+), 6 deletions(-) diff --git a/server/src/test/java/org/elasticsearch/recovery/RelocationIT.java b/server/src/test/java/org/elasticsearch/recovery/RelocationIT.java index 45f0fce3b8143..fb455f37d76f3 100644 --- a/server/src/test/java/org/elasticsearch/recovery/RelocationIT.java +++ b/server/src/test/java/org/elasticsearch/recovery/RelocationIT.java @@ -27,6 +27,7 @@ import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse; import org.elasticsearch.action.admin.cluster.reroute.ClusterRerouteResponse; import org.elasticsearch.action.index.IndexRequestBuilder; +import org.elasticsearch.action.index.IndexResponse; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.support.WriteRequest; import org.elasticsearch.client.Client; @@ -552,7 +553,7 @@ public void testRelocateWhileWaitingForRefresh() { assertThat(client().prepareSearch("test").setSize(0).execute().actionGet().getHits().getTotalHits().value, equalTo(20L)); } - public void testRelocateWhileContinuouslyIndexingAndWaitingForRefresh() { + public void testRelocateWhileContinuouslyIndexingAndWaitingForRefresh() throws Exception { logger.info("--> starting [node1] ..."); final String node1 = internalCluster().startNode(); @@ -570,9 +571,11 @@ public void testRelocateWhileContinuouslyIndexingAndWaitingForRefresh() { logger.info("--> flush so we have an actual index"); client().admin().indices().prepareFlush().execute().actionGet(); logger.info("--> index more docs so we have something in the translog"); + final List> pendingIndexResponses = new ArrayList<>(); for (int i = 10; i < 20; i++) { - client().prepareIndex("test", "type", Integer.toString(i)).setRefreshPolicy(WriteRequest.RefreshPolicy.WAIT_UNTIL) - .setSource("field", "value" + i).execute(); + pendingIndexResponses.add(client().prepareIndex("test", "type", Integer.toString(i)) + .setRefreshPolicy(WriteRequest.RefreshPolicy.WAIT_UNTIL) + .setSource("field", "value" + i).execute()); } logger.info("--> start another node"); @@ -587,8 +590,9 @@ public void testRelocateWhileContinuouslyIndexingAndWaitingForRefresh() { .execute(); logger.info("--> index 100 docs while relocating"); for (int i = 20; i < 120; i++) { - client().prepareIndex("test", "type", Integer.toString(i)).setRefreshPolicy(WriteRequest.RefreshPolicy.WAIT_UNTIL) - .setSource("field", "value" + i).execute(); + pendingIndexResponses.add(client().prepareIndex("test", "type", Integer.toString(i)) + .setRefreshPolicy(WriteRequest.RefreshPolicy.WAIT_UNTIL) + .setSource("field", "value" + i).execute()); } relocationListener.actionGet(); clusterHealthResponse = client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID) @@ -596,7 +600,11 @@ public void testRelocateWhileContinuouslyIndexingAndWaitingForRefresh() { assertThat(clusterHealthResponse.isTimedOut(), equalTo(false)); logger.info("--> verifying count"); - client().admin().indices().prepareRefresh().execute().actionGet(); + assertBusy(() -> { + client().admin().indices().prepareRefresh().execute().actionGet(); + assertTrue(pendingIndexResponses.stream().allMatch(ActionFuture::isDone)); + }, 1, TimeUnit.MINUTES); + assertThat(client().prepareSearch("test").setSize(0).execute().actionGet().getHits().getTotalHits().value, equalTo(120L)); } From b85bfd3e1793c71330bb6d2e186d3e6f6edfb74e Mon Sep 17 00:00:00 2001 From: Martijn van Groningen Date: Thu, 17 Jan 2019 14:04:41 +0100 Subject: [PATCH 25/71] Added fatal_exception field for ccr stats in monitoring mapping. (#37563) --- .../collector/ccr/FollowStatsMonitoringDocTests.java | 7 ++++++- .../plugin/core/src/main/resources/monitoring-es.json | 11 +++++++++++ 2 files changed, 17 insertions(+), 1 deletion(-) diff --git a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/monitoring/collector/ccr/FollowStatsMonitoringDocTests.java b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/monitoring/collector/ccr/FollowStatsMonitoringDocTests.java index 410d573e1b4c0..33affe45fc46c 100644 --- a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/monitoring/collector/ccr/FollowStatsMonitoringDocTests.java +++ b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/monitoring/collector/ccr/FollowStatsMonitoringDocTests.java @@ -230,7 +230,7 @@ public void testShardFollowNodeTaskStatusFieldsMapped() throws IOException { 10, fetchExceptions, 2, - null); + new ElasticsearchException("fatal error")); XContentBuilder builder = jsonBuilder(); builder.value(status); Map serializedStatus = XContentHelper.convertToMap(XContentType.JSON.xContent(), Strings.toString(builder), false); @@ -266,6 +266,11 @@ public void testShardFollowNodeTaskStatusFieldsMapped() throws IOException { assertThat(exceptionFieldMapping.size(), equalTo(2)); assertThat(XContentMapValues.extractValue("type.type", exceptionFieldMapping), equalTo("keyword")); assertThat(XContentMapValues.extractValue("reason.type", exceptionFieldMapping), equalTo("text")); + } else if (fieldName.equals("fatal_exception")) { + assertThat(fieldType, equalTo("object")); + assertThat(((Map) fieldMapping.get("properties")).size(), equalTo(2)); + assertThat(XContentMapValues.extractValue("properties.type.type", fieldMapping), equalTo("keyword")); + assertThat(XContentMapValues.extractValue("properties.reason.type", fieldMapping), equalTo("text")); } else { fail("unexpected field value type [" + fieldValue.getClass() + "] for field [" + fieldName + "]"); } diff --git a/x-pack/plugin/core/src/main/resources/monitoring-es.json b/x-pack/plugin/core/src/main/resources/monitoring-es.json index 872d3df43a81f..426262cd48c03 100644 --- a/x-pack/plugin/core/src/main/resources/monitoring-es.json +++ b/x-pack/plugin/core/src/main/resources/monitoring-es.json @@ -1028,6 +1028,17 @@ }, "time_since_last_read_millis": { "type": "long" + }, + "fatal_exception": { + "type": "object", + "properties": { + "type" : { + "type": "keyword" + }, + "reason": { + "type": "text" + } + } } } }, From 4351a5e5375237b8259fbcbd3ff4d7c59cc2d215 Mon Sep 17 00:00:00 2001 From: Jim Ferenczi Date: Thu, 17 Jan 2019 15:10:28 +0100 Subject: [PATCH 26/71] Allow field types to optimize phrase prefix queries (#37436) This change adds a way to customize how phrase prefix queries should be created on field types. The match phrase prefix query is exposed in field types in order to allow optimizations based on the options set on the field. For instance the text field uses the configured prefix field (if available) to build a span near that mixes the original field and the prefix field on the last position. This change also contains a small refactoring of the match/multi_match query that simplifies the interactions between the builders. Closes #31921 --- .../AnnotatedTextFieldMapper.java | 72 +-- .../lucene/search/MultiPhrasePrefixQuery.java | 12 +- .../SpanBooleanQueryRewriteWithMaxClause.java | 119 ++++ .../index/mapper/MappedFieldType.java | 16 +- .../index/mapper/TextFieldMapper.java | 191 ++++-- .../query/SpanMultiTermQueryBuilder.java | 155 ++--- .../index/search/MatchQuery.java | 548 +++++++++++------- .../index/search/MultiMatchQuery.java | 340 ++++------- .../CustomUnifiedHighlighterTests.java | 4 +- .../search/MultiPhrasePrefixQueryTests.java | 10 +- .../index/mapper/TextFieldMapperTests.java | 128 ++++ .../MatchPhrasePrefixQueryBuilderTests.java | 16 +- .../index/query/MatchQueryBuilderTests.java | 8 +- .../query/MultiMatchQueryBuilderTests.java | 12 +- .../query/QueryStringQueryBuilderTests.java | 7 +- .../query/SpanMultiTermQueryBuilderTests.java | 137 ++--- .../index/search/MultiMatchQueryTests.java | 22 +- 17 files changed, 1042 insertions(+), 755 deletions(-) create mode 100644 server/src/main/java/org/elasticsearch/common/lucene/search/SpanBooleanQueryRewriteWithMaxClause.java diff --git a/plugins/mapper-annotated-text/src/main/java/org/elasticsearch/index/mapper/annotatedtext/AnnotatedTextFieldMapper.java b/plugins/mapper-annotated-text/src/main/java/org/elasticsearch/index/mapper/annotatedtext/AnnotatedTextFieldMapper.java index 79fefbc64d407..2aadfd2218590 100644 --- a/plugins/mapper-annotated-text/src/main/java/org/elasticsearch/index/mapper/annotatedtext/AnnotatedTextFieldMapper.java +++ b/plugins/mapper-annotated-text/src/main/java/org/elasticsearch/index/mapper/annotatedtext/AnnotatedTextFieldMapper.java @@ -27,17 +27,17 @@ import org.apache.lucene.analysis.tokenattributes.OffsetAttribute; import org.apache.lucene.analysis.tokenattributes.PositionIncrementAttribute; import org.apache.lucene.analysis.tokenattributes.PositionLengthAttribute; -import org.apache.lucene.analysis.tokenattributes.TermToBytesRefAttribute; import org.apache.lucene.analysis.tokenattributes.TypeAttribute; import org.apache.lucene.document.Field; import org.apache.lucene.index.IndexOptions; import org.apache.lucene.index.IndexableField; import org.apache.lucene.index.Term; -import org.apache.lucene.search.MultiPhraseQuery; import org.apache.lucene.search.NormsFieldExistsQuery; -import org.apache.lucene.search.PhraseQuery; +import org.apache.lucene.search.PrefixQuery; import org.apache.lucene.search.Query; import org.apache.lucene.search.TermQuery; +import org.apache.lucene.search.spans.SpanMultiTermQueryWrapper; +import org.apache.lucene.search.spans.SpanQuery; import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -603,62 +603,26 @@ public Query existsQuery(QueryShardContext context) { } @Override - public Query phraseQuery(String field, TokenStream stream, int slop, boolean enablePosIncrements) throws IOException { - PhraseQuery.Builder builder = new PhraseQuery.Builder(); - builder.setSlop(slop); - - TermToBytesRefAttribute termAtt = stream.getAttribute(TermToBytesRefAttribute.class); - PositionIncrementAttribute posIncrAtt = stream.getAttribute(PositionIncrementAttribute.class); - int position = -1; - - stream.reset(); - while (stream.incrementToken()) { - if (enablePosIncrements) { - position += posIncrAtt.getPositionIncrement(); - } - else { - position += 1; - } - builder.add(new Term(field, termAtt.getBytesRef()), position); - } - - return builder.build(); + public SpanQuery spanPrefixQuery(String value, SpanMultiTermQueryWrapper.SpanRewriteMethod method, QueryShardContext context) { + SpanMultiTermQueryWrapper spanMulti = + new SpanMultiTermQueryWrapper<>(new PrefixQuery(new Term(name(), indexedValueForSearch(value)))); + spanMulti.setRewriteMethod(method); + return spanMulti; } @Override - public Query multiPhraseQuery(String field, TokenStream stream, int slop, boolean enablePositionIncrements) throws IOException { - - MultiPhraseQuery.Builder mpqb = new MultiPhraseQuery.Builder(); - mpqb.setSlop(slop); - - TermToBytesRefAttribute termAtt = stream.getAttribute(TermToBytesRefAttribute.class); - - PositionIncrementAttribute posIncrAtt = stream.getAttribute(PositionIncrementAttribute.class); - int position = -1; - - List multiTerms = new ArrayList<>(); - stream.reset(); - while (stream.incrementToken()) { - int positionIncrement = posIncrAtt.getPositionIncrement(); + public Query phraseQuery(TokenStream stream, int slop, boolean enablePositionIncrements) throws IOException { + return TextFieldMapper.createPhraseQuery(stream, name(), slop, enablePositionIncrements); + } - if (positionIncrement > 0 && multiTerms.size() > 0) { - if (enablePositionIncrements) { - mpqb.add(multiTerms.toArray(new Term[0]), position); - } else { - mpqb.add(multiTerms.toArray(new Term[0])); - } - multiTerms.clear(); - } - position += positionIncrement; - multiTerms.add(new Term(field, termAtt.getBytesRef())); - } + @Override + public Query multiPhraseQuery(TokenStream stream, int slop, boolean enablePositionIncrements) throws IOException { + return TextFieldMapper.createPhraseQuery(stream, name(), slop, enablePositionIncrements); + } - if (enablePositionIncrements) { - mpqb.add(multiTerms.toArray(new Term[0]), position); - } else { - mpqb.add(multiTerms.toArray(new Term[0])); - } - return mpqb.build(); + @Override + public Query phrasePrefixQuery(TokenStream stream, int slop, int maxExpansions) throws IOException { + return TextFieldMapper.createPhrasePrefixQuery(stream, name(), slop, maxExpansions); } } diff --git a/server/src/main/java/org/elasticsearch/common/lucene/search/MultiPhrasePrefixQuery.java b/server/src/main/java/org/elasticsearch/common/lucene/search/MultiPhrasePrefixQuery.java index b8e1039b2df1d..57f60add714a1 100644 --- a/server/src/main/java/org/elasticsearch/common/lucene/search/MultiPhrasePrefixQuery.java +++ b/server/src/main/java/org/elasticsearch/common/lucene/search/MultiPhrasePrefixQuery.java @@ -39,16 +39,21 @@ import java.util.Iterator; import java.util.List; import java.util.ListIterator; +import java.util.Objects; public class MultiPhrasePrefixQuery extends Query { - private String field; + private final String field; private ArrayList termArrays = new ArrayList<>(); private ArrayList positions = new ArrayList<>(); private int maxExpansions = Integer.MAX_VALUE; private int slop = 0; + public MultiPhrasePrefixQuery(String field) { + this.field = Objects.requireNonNull(field); + } + /** * Sets the phrase slop for this query. * @@ -102,9 +107,6 @@ public void add(Term[] terms) { * @see org.apache.lucene.search.PhraseQuery.Builder#add(Term, int) */ public void add(Term[] terms, int position) { - if (termArrays.size() == 0) - field = terms[0].field(); - for (int i = 0; i < terms.length; i++) { if (terms[i].field() != field) { throw new IllegalArgumentException( @@ -212,7 +214,7 @@ private void getPrefixTerms(ObjectHashSet terms, final Term prefix, final @Override public final String toString(String f) { StringBuilder buffer = new StringBuilder(); - if (field == null || !field.equals(f)) { + if (field.equals(f) == false) { buffer.append(field); buffer.append(":"); } diff --git a/server/src/main/java/org/elasticsearch/common/lucene/search/SpanBooleanQueryRewriteWithMaxClause.java b/server/src/main/java/org/elasticsearch/common/lucene/search/SpanBooleanQueryRewriteWithMaxClause.java new file mode 100644 index 0000000000000..e78770ed2a85a --- /dev/null +++ b/server/src/main/java/org/elasticsearch/common/lucene/search/SpanBooleanQueryRewriteWithMaxClause.java @@ -0,0 +1,119 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.common.lucene.search; + +import org.apache.lucene.index.IndexReader; +import org.apache.lucene.index.IndexReaderContext; +import org.apache.lucene.index.LeafReaderContext; +import org.apache.lucene.index.Term; +import org.apache.lucene.index.Terms; +import org.apache.lucene.index.TermsEnum; +import org.apache.lucene.queries.SpanMatchNoDocsQuery; +import org.apache.lucene.search.BooleanQuery; +import org.apache.lucene.search.MultiTermQuery; +import org.apache.lucene.search.Query; +import org.apache.lucene.search.spans.SpanMultiTermQueryWrapper; +import org.apache.lucene.search.spans.SpanOrQuery; +import org.apache.lucene.search.spans.SpanQuery; +import org.apache.lucene.search.spans.SpanTermQuery; +import org.apache.lucene.util.BytesRef; + +import java.io.IOException; +import java.util.Collection; +import java.util.HashSet; +import java.util.Set; + +/** + * A span rewrite method that extracts the first maxExpansions terms + * that match the {@link MultiTermQuery} in the terms dictionary. + * The rewrite throws an error if more than maxExpansions terms are found and hardLimit + * is set. + */ +public class SpanBooleanQueryRewriteWithMaxClause extends SpanMultiTermQueryWrapper.SpanRewriteMethod { + private final int maxExpansions; + private final boolean hardLimit; + + public SpanBooleanQueryRewriteWithMaxClause() { + this(BooleanQuery.getMaxClauseCount(), true); + } + + public SpanBooleanQueryRewriteWithMaxClause(int maxExpansions, boolean hardLimit) { + this.maxExpansions = maxExpansions; + this.hardLimit = hardLimit; + } + + public int getMaxExpansions() { + return maxExpansions; + } + + public boolean isHardLimit() { + return hardLimit; + } + + @Override + public SpanQuery rewrite(IndexReader reader, MultiTermQuery query) throws IOException { + final MultiTermQuery.RewriteMethod delegate = new MultiTermQuery.RewriteMethod() { + @Override + public Query rewrite(IndexReader reader, MultiTermQuery query) throws IOException { + Collection queries = collectTerms(reader, query); + if (queries.size() == 0) { + return new SpanMatchNoDocsQuery(query.getField(), "no expansion found for " + query.toString()); + } else if (queries.size() == 1) { + return queries.iterator().next(); + } else { + return new SpanOrQuery(queries.toArray(new SpanQuery[0])); + } + } + + private Collection collectTerms(IndexReader reader, MultiTermQuery query) throws IOException { + Set queries = new HashSet<>(); + IndexReaderContext topReaderContext = reader.getContext(); + for (LeafReaderContext context : topReaderContext.leaves()) { + final Terms terms = context.reader().terms(query.getField()); + if (terms == null) { + // field does not exist + continue; + } + + final TermsEnum termsEnum = getTermsEnum(query, terms, null); + assert termsEnum != null; + + if (termsEnum == TermsEnum.EMPTY) + continue; + + BytesRef bytes; + while ((bytes = termsEnum.next()) != null) { + if (queries.size() >= maxExpansions) { + if (hardLimit) { + throw new RuntimeException("[" + query.toString() + " ] " + + "exceeds maxClauseCount [ Boolean maxClauseCount is set to " + BooleanQuery.getMaxClauseCount() + "]"); + } else { + return queries; + } + } + queries.add(new SpanTermQuery(new Term(query.getField(), bytes))); + } + } + return queries; + } + }; + return (SpanQuery) delegate.rewrite(reader, query); + } +} diff --git a/server/src/main/java/org/elasticsearch/index/mapper/MappedFieldType.java b/server/src/main/java/org/elasticsearch/index/mapper/MappedFieldType.java index 741b2300a4678..f785e01125f69 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/MappedFieldType.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/MappedFieldType.java @@ -35,6 +35,8 @@ import org.apache.lucene.search.TermInSetQuery; import org.apache.lucene.search.TermQuery; import org.apache.lucene.search.intervals.IntervalsSource; +import org.apache.lucene.search.spans.SpanMultiTermQueryWrapper; +import org.apache.lucene.search.spans.SpanQuery; import org.apache.lucene.util.BytesRef; import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.common.Nullable; @@ -365,16 +367,26 @@ public Query regexpQuery(String value, int flags, int maxDeterminizedStates, @Nu public abstract Query existsQuery(QueryShardContext context); - public Query phraseQuery(String field, TokenStream stream, int slop, boolean enablePositionIncrements) throws IOException { + public Query phraseQuery(TokenStream stream, int slop, boolean enablePositionIncrements) throws IOException { throw new IllegalArgumentException("Can only use phrase queries on text fields - not on [" + name + "] which is of type [" + typeName() + "]"); } - public Query multiPhraseQuery(String field, TokenStream stream, int slop, boolean enablePositionIncrements) throws IOException { + public Query multiPhraseQuery(TokenStream stream, int slop, boolean enablePositionIncrements) throws IOException { throw new IllegalArgumentException("Can only use phrase queries on text fields - not on [" + name + "] which is of type [" + typeName() + "]"); } + public Query phrasePrefixQuery(TokenStream stream, int slop, int maxExpansions) throws IOException { + throw new IllegalArgumentException("Can only use phrase prefix queries on text fields - not on [" + name + + "] which is of type [" + typeName() + "]"); + } + + public SpanQuery spanPrefixQuery(String value, SpanMultiTermQueryWrapper.SpanRewriteMethod method, QueryShardContext context) { + throw new IllegalArgumentException("Can only use span prefix queries on text fields - not on [" + name + + "] which is of type [" + typeName() + "]"); + } + /** * Create an {@link IntervalsSource} to be used for proximity queries */ diff --git a/server/src/main/java/org/elasticsearch/index/mapper/TextFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/TextFieldMapper.java index 1b25c7b9866f7..e5fc470e130bc 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/TextFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/TextFieldMapper.java @@ -40,14 +40,23 @@ import org.apache.lucene.search.MultiTermQuery; import org.apache.lucene.search.NormsFieldExistsQuery; import org.apache.lucene.search.PhraseQuery; +import org.apache.lucene.search.PrefixQuery; import org.apache.lucene.search.Query; +import org.apache.lucene.search.SynonymQuery; import org.apache.lucene.search.TermQuery; import org.apache.lucene.search.intervals.IntervalsSource; +import org.apache.lucene.search.spans.FieldMaskingSpanQuery; +import org.apache.lucene.search.spans.SpanMultiTermQueryWrapper; +import org.apache.lucene.search.spans.SpanNearQuery; +import org.apache.lucene.search.spans.SpanOrQuery; +import org.apache.lucene.search.spans.SpanQuery; +import org.apache.lucene.search.spans.SpanTermQuery; import org.apache.lucene.util.automaton.Automata; import org.apache.lucene.util.automaton.Automaton; import org.apache.lucene.util.automaton.Operations; import org.elasticsearch.Version; import org.elasticsearch.common.collect.Iterators; +import org.elasticsearch.common.lucene.search.MultiPhrasePrefixQuery; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.support.XContentMapValues; @@ -60,6 +69,7 @@ import java.io.IOException; import java.util.ArrayList; +import java.util.Arrays; import java.util.Iterator; import java.util.List; import java.util.Map; @@ -598,6 +608,23 @@ public Query prefixQuery(String value, MultiTermQuery.RewriteMethod method, Quer return tq; } + @Override + public SpanQuery spanPrefixQuery(String value, SpanMultiTermQueryWrapper.SpanRewriteMethod method, QueryShardContext context) { + failIfNotIndexed(); + if (prefixFieldType != null + && value.length() >= prefixFieldType.minChars + && value.length() <= prefixFieldType.maxChars + && prefixFieldType.indexOptions().compareTo(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS) >= 0) { + + return new FieldMaskingSpanQuery(new SpanTermQuery(new Term(prefixFieldType.name(), indexedValueForSearch(value))), name()); + } else { + SpanMultiTermQueryWrapper spanMulti = + new SpanMultiTermQueryWrapper<>(new PrefixQuery(new Term(name(), indexedValueForSearch(value)))); + spanMulti.setRewriteMethod(method); + return spanMulti; + } + } + @Override public Query existsQuery(QueryShardContext context) { if (omitNorms()) { @@ -617,9 +644,9 @@ public IntervalsSource intervals(String text, int maxGaps, boolean ordered, Name } @Override - public Query phraseQuery(String field, TokenStream stream, int slop, boolean enablePosIncrements) throws IOException { - - if (indexPhrases && slop == 0 && hasGaps(cache(stream)) == false) { + public Query phraseQuery(TokenStream stream, int slop, boolean enablePosIncrements) throws IOException { + String field = name(); + if (indexPhrases && slop == 0 && hasGaps(stream) == false) { stream = new FixedShingleFilter(stream, 2); field = field + FAST_PHRASE_SUFFIX; } @@ -645,54 +672,85 @@ public Query phraseQuery(String field, TokenStream stream, int slop, boolean ena } @Override - public Query multiPhraseQuery(String field, TokenStream stream, int slop, boolean enablePositionIncrements) throws IOException { - - if (indexPhrases && slop == 0 && hasGaps(cache(stream)) == false) { + public Query multiPhraseQuery(TokenStream stream, int slop, boolean enablePositionIncrements) throws IOException { + String field = name(); + if (indexPhrases && slop == 0 && hasGaps(stream) == false) { stream = new FixedShingleFilter(stream, 2); field = field + FAST_PHRASE_SUFFIX; } + return createPhraseQuery(stream, field, slop, enablePositionIncrements); + } - MultiPhraseQuery.Builder mpqb = new MultiPhraseQuery.Builder(); - mpqb.setSlop(slop); - - TermToBytesRefAttribute termAtt = stream.getAttribute(TermToBytesRefAttribute.class); + @Override + public Query phrasePrefixQuery(TokenStream stream, int slop, int maxExpansions) throws IOException { + return analyzePhrasePrefix(stream, slop, maxExpansions); + } - PositionIncrementAttribute posIncrAtt = stream.getAttribute(PositionIncrementAttribute.class); - int position = -1; + private Query analyzePhrasePrefix(TokenStream stream, int slop, int maxExpansions) throws IOException { + final MultiPhrasePrefixQuery query = createPhrasePrefixQuery(stream, name(), slop, maxExpansions); - List multiTerms = new ArrayList<>(); - stream.reset(); - while (stream.incrementToken()) { - int positionIncrement = posIncrAtt.getPositionIncrement(); + if (slop > 0 + || prefixFieldType == null + || prefixFieldType.indexOptions().compareTo(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS) < 0) { + return query; + } - if (positionIncrement > 0 && multiTerms.size() > 0) { - if (enablePositionIncrements) { - mpqb.add(multiTerms.toArray(new Term[0]), position); - } else { - mpqb.add(multiTerms.toArray(new Term[0])); - } - multiTerms.clear(); + int lastPos = query.getTerms().length - 1; + final Term[][] terms = query.getTerms(); + final int[] positions = query.getPositions(); + for (Term term : terms[lastPos]) { + String value = term.text(); + if (value.length() < prefixFieldType.minChars || value.length() > prefixFieldType.maxChars) { + return query; } - position += positionIncrement; - multiTerms.add(new Term(field, termAtt.getBytesRef())); } - if (enablePositionIncrements) { - mpqb.add(multiTerms.toArray(new Term[0]), position); - } else { - mpqb.add(multiTerms.toArray(new Term[0])); + if (terms.length == 1) { + Term[] newTerms = Arrays.stream(terms[0]) + .map(term -> new Term(prefixFieldType.name(), term.bytes())) + .toArray(Term[]::new); + return new SynonymQuery(newTerms); } - return mpqb.build(); - } - private static CachingTokenFilter cache(TokenStream in) { - if (in instanceof CachingTokenFilter) { - return (CachingTokenFilter) in; + SpanNearQuery.Builder spanQuery = new SpanNearQuery.Builder(name(), true); + spanQuery.setSlop(slop); + int previousPos = -1; + for (int i = 0; i < terms.length; i++) { + Term[] posTerms = terms[i]; + int posInc = positions[i] - previousPos; + previousPos = positions[i]; + if (posInc > 1) { + spanQuery.addGap(posInc - 1); + } + if (i == lastPos) { + if (posTerms.length == 1) { + FieldMaskingSpanQuery fieldMask = + new FieldMaskingSpanQuery(new SpanTermQuery(new Term(prefixFieldType.name(), posTerms[0].bytes())), name()); + spanQuery.addClause(fieldMask); + } else { + SpanQuery[] queries = Arrays.stream(posTerms) + .map(term -> new FieldMaskingSpanQuery( + new SpanTermQuery(new Term(prefixFieldType.name(), term.bytes())), name()) + ) + .toArray(SpanQuery[]::new); + spanQuery.addClause(new SpanOrQuery(queries)); + } + } else { + if (posTerms.length == 1) { + spanQuery.addClause(new SpanTermQuery(posTerms[0])); + } else { + SpanTermQuery[] queries = Arrays.stream(posTerms) + .map(SpanTermQuery::new) + .toArray(SpanTermQuery[]::new); + spanQuery.addClause(new SpanOrQuery(queries)); + } + } } - return new CachingTokenFilter(in); + return spanQuery.build(); } - private static boolean hasGaps(CachingTokenFilter stream) throws IOException { + private static boolean hasGaps(TokenStream stream) throws IOException { + assert stream instanceof CachingTokenFilter; PositionIncrementAttribute posIncAtt = stream.getAttribute(PositionIncrementAttribute.class); stream.reset(); while (stream.incrementToken()) { @@ -870,4 +928,65 @@ protected void doXContentBody(XContentBuilder builder, boolean includeDefaults, builder.field("index_phrases", fieldType().indexPhrases); } } + + public static Query createPhraseQuery(TokenStream stream, String field, int slop, boolean enablePositionIncrements) throws IOException { + MultiPhraseQuery.Builder mpqb = new MultiPhraseQuery.Builder(); + mpqb.setSlop(slop); + + TermToBytesRefAttribute termAtt = stream.getAttribute(TermToBytesRefAttribute.class); + + PositionIncrementAttribute posIncrAtt = stream.getAttribute(PositionIncrementAttribute.class); + int position = -1; + + List multiTerms = new ArrayList<>(); + stream.reset(); + while (stream.incrementToken()) { + int positionIncrement = posIncrAtt.getPositionIncrement(); + + if (positionIncrement > 0 && multiTerms.size() > 0) { + if (enablePositionIncrements) { + mpqb.add(multiTerms.toArray(new Term[0]), position); + } else { + mpqb.add(multiTerms.toArray(new Term[0])); + } + multiTerms.clear(); + } + position += positionIncrement; + multiTerms.add(new Term(field, termAtt.getBytesRef())); + } + + if (enablePositionIncrements) { + mpqb.add(multiTerms.toArray(new Term[0]), position); + } else { + mpqb.add(multiTerms.toArray(new Term[0])); + } + return mpqb.build(); + } + + public static MultiPhrasePrefixQuery createPhrasePrefixQuery(TokenStream stream, String field, + int slop, int maxExpansions) throws IOException { + MultiPhrasePrefixQuery builder = new MultiPhrasePrefixQuery(field); + builder.setSlop(slop); + builder.setMaxExpansions(maxExpansions); + + List currentTerms = new ArrayList<>(); + + TermToBytesRefAttribute termAtt = stream.getAttribute(TermToBytesRefAttribute.class); + PositionIncrementAttribute posIncrAtt = stream.getAttribute(PositionIncrementAttribute.class); + + stream.reset(); + int position = -1; + while (stream.incrementToken()) { + if (posIncrAtt.getPositionIncrement() != 0) { + if (currentTerms.isEmpty() == false) { + builder.add(currentTerms.toArray(new Term[0]), position); + } + position += posIncrAtt.getPositionIncrement(); + currentTerms.clear(); + } + currentTerms.add(new Term(field, termAtt.getBytesRef())); + } + builder.add(currentTerms.toArray(new Term[0]), position); + return builder; + } } diff --git a/server/src/main/java/org/elasticsearch/index/query/SpanMultiTermQueryBuilder.java b/server/src/main/java/org/elasticsearch/index/query/SpanMultiTermQueryBuilder.java index 22fca7d1d0b8f..49e5e53e1ed91 100644 --- a/server/src/main/java/org/elasticsearch/index/query/SpanMultiTermQueryBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/query/SpanMultiTermQueryBuilder.java @@ -18,31 +18,19 @@ */ package org.elasticsearch.index.query; -import org.apache.lucene.index.Term; -import org.apache.lucene.index.IndexReader; -import org.apache.lucene.index.TermStates; import org.apache.lucene.queries.SpanMatchNoDocsQuery; -import org.apache.lucene.search.BooleanQuery; import org.apache.lucene.search.BoostQuery; import org.apache.lucene.search.ConstantScoreQuery; import org.apache.lucene.search.MatchNoDocsQuery; import org.apache.lucene.search.MultiTermQuery; -import org.apache.lucene.search.PrefixQuery; import org.apache.lucene.search.Query; -import org.apache.lucene.search.TermQuery; -import org.apache.lucene.search.spans.FieldMaskingSpanQuery; -import org.apache.lucene.search.ScoringRewrite; import org.apache.lucene.search.TopTermsRewrite; -import org.apache.lucene.search.spans.SpanBoostQuery; import org.apache.lucene.search.spans.SpanMultiTermQueryWrapper; -import org.apache.lucene.search.spans.SpanOrQuery; -import org.apache.lucene.search.spans.SpanQuery; -import org.apache.lucene.search.spans.SpanTermQuery; -import org.elasticsearch.Version; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.lucene.search.SpanBooleanQueryRewriteWithMaxClause; import org.elasticsearch.common.xcontent.LoggingDeprecationHandler; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; @@ -50,8 +38,6 @@ import org.elasticsearch.index.query.support.QueryParsers; import java.io.IOException; -import java.util.ArrayList; -import java.util.List; import java.util.Objects; /** @@ -138,126 +124,53 @@ public static SpanMultiTermQueryBuilder fromXContent(XContentParser parser) thro return new SpanMultiTermQueryBuilder(subQuery).queryName(queryName).boost(boost); } - static class TopTermSpanBooleanQueryRewriteWithMaxClause extends SpanMultiTermQueryWrapper.SpanRewriteMethod { - private final long maxExpansions; - - TopTermSpanBooleanQueryRewriteWithMaxClause() { - this.maxExpansions = BooleanQuery.getMaxClauseCount(); - } - - @Override - public SpanQuery rewrite(IndexReader reader, MultiTermQuery query) throws IOException { - final MultiTermQuery.RewriteMethod delegate = new ScoringRewrite>() { - @Override - protected List getTopLevelBuilder() { - return new ArrayList(); - } - - @Override - protected Query build(List builder) { - return new SpanOrQuery((SpanQuery[]) builder.toArray(new SpanQuery[builder.size()])); - } - - @Override - protected void checkMaxClauseCount(int count) { - if (count > maxExpansions) { - throw new RuntimeException("[" + query.toString() + " ] " + - "exceeds maxClauseCount [ Boolean maxClauseCount is set to " + BooleanQuery.getMaxClauseCount() + "]"); - } - } - - @Override - protected void addClause(List topLevel, Term term, int docCount, float boost, TermStates states) { - SpanTermQuery q = new SpanTermQuery(term, states); - topLevel.add(q); - } - }; - return (SpanQuery) delegate.rewrite(reader, query); - } - } - @Override protected Query doToQuery(QueryShardContext context) throws IOException { - Query subQuery = multiTermQueryBuilder.toQuery(context); - float boost = AbstractQueryBuilder.DEFAULT_BOOST; - while (true) { - if (subQuery instanceof ConstantScoreQuery) { - subQuery = ((ConstantScoreQuery) subQuery).getQuery(); - boost = 1; - } else if (subQuery instanceof BoostQuery) { - BoostQuery boostQuery = (BoostQuery) subQuery; - subQuery = boostQuery.getQuery(); - boost *= boostQuery.getBoost(); - } else { - break; - } - } - // no MultiTermQuery extends SpanQuery, so SpanBoostQuery is not supported here - assert subQuery instanceof SpanBoostQuery == false; - - if (subQuery instanceof MatchNoDocsQuery) { - return new SpanMatchNoDocsQuery(multiTermQueryBuilder.fieldName(), subQuery.toString()); - } - - final SpanQuery spanQuery; - if (subQuery instanceof TermQuery) { - /** - * Text fields that index prefixes can rewrite prefix queries - * into term queries. See {@link TextFieldMapper.TextFieldType#prefixQuery}. - */ - if (multiTermQueryBuilder.getClass() != PrefixQueryBuilder.class) { - throw new UnsupportedOperationException("unsupported inner query generated by " + - multiTermQueryBuilder.getClass().getName() + ", should be " + MultiTermQuery.class.getName() - + " but was " + subQuery.getClass().getName()); - } - + if (multiTermQueryBuilder instanceof PrefixQueryBuilder) { PrefixQueryBuilder prefixBuilder = (PrefixQueryBuilder) multiTermQueryBuilder; - MappedFieldType fieldType = context.fieldMapper(prefixBuilder.fieldName()); - String fieldName = fieldType != null ? fieldType.name() : prefixBuilder.fieldName(); - - if (context.getIndexSettings().getIndexVersionCreated().before(Version.V_6_4_0)) { - /** - * Indices created in this version do not index positions on the prefix field - * so we cannot use it to match positional queries. Instead, we explicitly create the prefix - * query on the main field to avoid the rewrite. - */ - PrefixQuery prefixQuery = new PrefixQuery(new Term(fieldName, prefixBuilder.value())); - if (prefixBuilder.rewrite() != null) { - MultiTermQuery.RewriteMethod rewriteMethod = - QueryParsers.parseRewriteMethod(prefixBuilder.rewrite(), null, LoggingDeprecationHandler.INSTANCE); - prefixQuery.setRewriteMethod(rewriteMethod); + MappedFieldType fieldType = context.fieldMapper(multiTermQueryBuilder.fieldName()); + if (fieldType == null) { + return new SpanMatchNoDocsQuery(multiTermQueryBuilder.fieldName(), "unknown field"); + } + final SpanMultiTermQueryWrapper.SpanRewriteMethod spanRewriteMethod; + if (prefixBuilder.rewrite() != null) { + MultiTermQuery.RewriteMethod rewriteMethod = + QueryParsers.parseRewriteMethod(prefixBuilder.rewrite(), null, LoggingDeprecationHandler.INSTANCE); + if (rewriteMethod instanceof TopTermsRewrite) { + TopTermsRewrite innerRewrite = (TopTermsRewrite) rewriteMethod; + spanRewriteMethod = new SpanMultiTermQueryWrapper.TopTermsSpanBooleanQueryRewrite(innerRewrite.getSize()); + } else { + spanRewriteMethod = new SpanBooleanQueryRewriteWithMaxClause(); } - subQuery = prefixQuery; - spanQuery = new SpanMultiTermQueryWrapper<>(prefixQuery); } else { - /** - * Prefixes are indexed in a different field so we mask the term query with the original field - * name. This is required because span_near and span_or queries don't work across different field. - * The masking is safe because the prefix field is indexed using the same content than the original field - * and the prefix analyzer preserves positions. - */ - SpanTermQuery spanTermQuery = new SpanTermQuery(((TermQuery) subQuery).getTerm()); - spanQuery = new FieldMaskingSpanQuery(spanTermQuery, fieldName); + spanRewriteMethod = new SpanBooleanQueryRewriteWithMaxClause(); } + return fieldType.spanPrefixQuery(prefixBuilder.value(), spanRewriteMethod, context); } else { - if (subQuery instanceof MultiTermQuery == false) { + Query subQuery = multiTermQueryBuilder.toQuery(context); + while (true) { + if (subQuery instanceof ConstantScoreQuery) { + subQuery = ((ConstantScoreQuery) subQuery).getQuery(); + } else if (subQuery instanceof BoostQuery) { + BoostQuery boostQuery = (BoostQuery) subQuery; + subQuery = boostQuery.getQuery(); + } else { + break; + } + } + if (subQuery instanceof MatchNoDocsQuery) { + return new SpanMatchNoDocsQuery(multiTermQueryBuilder.fieldName(), subQuery.toString()); + } else if (subQuery instanceof MultiTermQuery == false) { throw new UnsupportedOperationException("unsupported inner query, should be " + MultiTermQuery.class.getName() + " but was " + subQuery.getClass().getName()); } - spanQuery = new SpanMultiTermQueryWrapper<>((MultiTermQuery) subQuery); - } - if (subQuery instanceof MultiTermQuery) { MultiTermQuery multiTermQuery = (MultiTermQuery) subQuery; - SpanMultiTermQueryWrapper wrapper = (SpanMultiTermQueryWrapper) spanQuery; + SpanMultiTermQueryWrapper wrapper = new SpanMultiTermQueryWrapper<>(multiTermQuery); if (multiTermQuery.getRewriteMethod() instanceof TopTermsRewrite == false) { - wrapper.setRewriteMethod(new TopTermSpanBooleanQueryRewriteWithMaxClause()); + wrapper.setRewriteMethod(new SpanBooleanQueryRewriteWithMaxClause()); } + return wrapper; } - if (boost != AbstractQueryBuilder.DEFAULT_BOOST) { - return new SpanBoostQuery(spanQuery, boost); - } - - return spanQuery; } @Override diff --git a/server/src/main/java/org/elasticsearch/index/search/MatchQuery.java b/server/src/main/java/org/elasticsearch/index/search/MatchQuery.java index 267f3a6951161..ad4b267eef643 100644 --- a/server/src/main/java/org/elasticsearch/index/search/MatchQuery.java +++ b/server/src/main/java/org/elasticsearch/index/search/MatchQuery.java @@ -20,43 +20,46 @@ package org.elasticsearch.index.search; import org.apache.lucene.analysis.Analyzer; +import org.apache.lucene.analysis.CachingTokenFilter; import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.analysis.miscellaneous.DisableGraphAttribute; +import org.apache.lucene.analysis.tokenattributes.PositionIncrementAttribute; +import org.apache.lucene.analysis.tokenattributes.PositionLengthAttribute; +import org.apache.lucene.analysis.tokenattributes.TermToBytesRefAttribute; import org.apache.lucene.index.IndexOptions; import org.apache.lucene.index.Term; import org.apache.lucene.queries.ExtendedCommonTermsQuery; import org.apache.lucene.search.BooleanClause; import org.apache.lucene.search.BooleanClause.Occur; import org.apache.lucene.search.BooleanQuery; -import org.apache.lucene.search.BoostQuery; import org.apache.lucene.search.FuzzyQuery; -import org.apache.lucene.search.MultiPhraseQuery; import org.apache.lucene.search.MultiTermQuery; -import org.apache.lucene.search.PhraseQuery; -import org.apache.lucene.search.PrefixQuery; import org.apache.lucene.search.Query; -import org.apache.lucene.search.SynonymQuery; import org.apache.lucene.search.TermQuery; import org.apache.lucene.search.spans.SpanMultiTermQueryWrapper; import org.apache.lucene.search.spans.SpanNearQuery; import org.apache.lucene.search.spans.SpanOrQuery; import org.apache.lucene.search.spans.SpanQuery; import org.apache.lucene.search.spans.SpanTermQuery; -import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.QueryBuilder; +import org.apache.lucene.util.graph.GraphTokenStreamFiniteStrings; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.lucene.Lucene; -import org.elasticsearch.common.lucene.search.MultiPhrasePrefixQuery; import org.elasticsearch.common.lucene.search.Queries; +import org.elasticsearch.common.lucene.search.SpanBooleanQueryRewriteWithMaxClause; import org.elasticsearch.common.unit.Fuzziness; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.query.QueryShardContext; import org.elasticsearch.index.query.support.QueryParsers; import java.io.IOException; +import java.util.ArrayList; +import java.util.Iterator; +import java.util.List; +import java.util.function.Supplier; import static org.elasticsearch.common.lucene.search.Queries.newLenientFieldQuery; import static org.elasticsearch.common.lucene.search.Queries.newUnmappedFieldQuery; @@ -128,19 +131,10 @@ public void writeTo(StreamOutput out) throws IOException { } } - /** - * the default phrase slop - */ public static final int DEFAULT_PHRASE_SLOP = 0; - /** - * the default leniency setting - */ public static final boolean DEFAULT_LENIENCY = false; - /** - * the default zero terms query - */ public static final ZeroTermsQuery DEFAULT_ZERO_TERMS_QUERY = ZeroTermsQuery.NONE; protected final QueryShardContext context; @@ -159,6 +153,9 @@ public void writeTo(StreamOutput out) throws IOException { protected int maxExpansions = FuzzyQuery.defaultMaxExpansions; + protected SpanMultiTermQueryWrapper.SpanRewriteMethod spanRewriteMethod = + new SpanBooleanQueryRewriteWithMaxClause(FuzzyQuery.defaultMaxExpansions, false); + protected boolean transpositions = FuzzyQuery.defaultTranspositions; protected MultiTermQuery.RewriteMethod fuzzyRewriteMethod; @@ -212,6 +209,7 @@ public void setFuzzyPrefixLength(int fuzzyPrefixLength) { public void setMaxExpansions(int maxExpansions) { this.maxExpansions = maxExpansions; + this.spanRewriteMethod = new SpanBooleanQueryRewriteWithMaxClause(maxExpansions, false); } public void setTranspositions(boolean transpositions) { @@ -234,78 +232,83 @@ public void setAutoGenerateSynonymsPhraseQuery(boolean enabled) { this.autoGenerateSynonymsPhraseQuery = enabled; } - protected Analyzer getAnalyzer(MappedFieldType fieldType, boolean quoted) { - if (analyzer == null) { - return quoted ? context.getSearchQuoteAnalyzer(fieldType) : context.getSearchAnalyzer(fieldType); - } else { - return analyzer; - } - } - - private boolean hasPositions(MappedFieldType fieldType) { - return fieldType.indexOptions().compareTo(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS) >= 0; - } - public Query parse(Type type, String fieldName, Object value) throws IOException { - MappedFieldType fieldType = context.fieldMapper(fieldName); + final MappedFieldType fieldType = context.fieldMapper(fieldName); if (fieldType == null) { return newUnmappedFieldQuery(fieldName); } - final String field = fieldType.name(); - - Analyzer analyzer = getAnalyzer(fieldType, type == Type.PHRASE); + Analyzer analyzer = getAnalyzer(fieldType, type == Type.PHRASE || type == Type.PHRASE_PREFIX); assert analyzer != null; + MatchQueryBuilder builder = new MatchQueryBuilder(analyzer, fieldType); + /* * If a keyword analyzer is used, we know that further analysis isn't * needed and can immediately return a term query. */ - if (analyzer == Lucene.KEYWORD_ANALYZER) { - return blendTermQuery(new Term(fieldName, value.toString()), fieldType); + if (analyzer == Lucene.KEYWORD_ANALYZER + && type != Type.PHRASE_PREFIX) { + return builder.newTermQuery(new Term(fieldName, value.toString())); } - MatchQueryBuilder builder = new MatchQueryBuilder(analyzer, fieldType); - builder.setEnablePositionIncrements(this.enablePositionIncrements); - if (hasPositions(fieldType)) { - builder.setAutoGenerateMultiTermSynonymsPhraseQuery(this.autoGenerateSynonymsPhraseQuery); - } else { - builder.setAutoGenerateMultiTermSynonymsPhraseQuery(false); - } + return parseInternal(type, fieldName, builder, value); + } - Query query = null; + protected final Query parseInternal(Type type, String fieldName, MatchQueryBuilder builder, Object value) throws IOException { + final Query query; switch (type) { case BOOLEAN: if (commonTermsCutoff == null) { - query = builder.createBooleanQuery(field, value.toString(), occur); + query = builder.createBooleanQuery(fieldName, value.toString(), occur); } else { - query = builder.createCommonTermsQuery(field, value.toString(), occur, occur, commonTermsCutoff); + query = createCommonTermsQuery(builder, fieldName, value.toString(), occur, occur, commonTermsCutoff); } break; + case PHRASE: - query = builder.createPhraseQuery(field, value.toString(), phraseSlop); + query = builder.createPhraseQuery(fieldName, value.toString(), phraseSlop); break; + case PHRASE_PREFIX: - query = builder.createPhrasePrefixQuery(field, value.toString(), phraseSlop, maxExpansions); + query = builder.createPhrasePrefixQuery(fieldName, value.toString(), phraseSlop); break; + default: throw new IllegalStateException("No type found for [" + type + "]"); } - if (query == null) { - return zeroTermsQuery(); - } else { - return query; + return query == null ? zeroTermsQuery() : query; + } + + private Query createCommonTermsQuery(MatchQueryBuilder builder, String field, String queryText, + Occur highFreqOccur, Occur lowFreqOccur, float maxTermFrequency) { + Query booleanQuery = builder.createBooleanQuery(field, queryText, lowFreqOccur); + if (booleanQuery != null && booleanQuery instanceof BooleanQuery) { + BooleanQuery bq = (BooleanQuery) booleanQuery; + return boolToExtendedCommonTermsQuery(bq, highFreqOccur, lowFreqOccur, maxTermFrequency); } + return booleanQuery; } - protected final Query termQuery(MappedFieldType fieldType, BytesRef value, boolean lenient) { - try { - return fieldType.termQuery(value, context); - } catch (RuntimeException e) { - if (lenient) { - return newLenientFieldQuery(fieldType.name(), e); + private Query boolToExtendedCommonTermsQuery(BooleanQuery bq, + Occur highFreqOccur, + Occur lowFreqOccur, + float maxTermFrequency) { + ExtendedCommonTermsQuery query = new ExtendedCommonTermsQuery(highFreqOccur, lowFreqOccur, maxTermFrequency); + for (BooleanClause clause : bq.clauses()) { + if ((clause.getQuery() instanceof TermQuery) == false) { + return bq; } - throw e; + query.add(((TermQuery) clause.getQuery()).getTerm()); + } + return query; + } + + protected Analyzer getAnalyzer(MappedFieldType fieldType, boolean quoted) { + if (analyzer == null) { + return quoted ? context.getSearchQuoteAnalyzer(fieldType) : context.getSearchAnalyzer(fieldType); + } else { + return analyzer; } } @@ -322,216 +325,345 @@ protected Query zeroTermsQuery() { } } - private class MatchQueryBuilder extends QueryBuilder { + private boolean hasPositions(MappedFieldType fieldType) { + return fieldType.indexOptions().compareTo(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS) >= 0; + } - private final MappedFieldType mapper; + class MatchQueryBuilder extends QueryBuilder { + private final MappedFieldType fieldType; /** * Creates a new QueryBuilder using the given analyzer. */ - MatchQueryBuilder(Analyzer analyzer, MappedFieldType mapper) { + MatchQueryBuilder(Analyzer analyzer, MappedFieldType fieldType) { super(analyzer); - this.mapper = mapper; + this.fieldType = fieldType; + if (hasPositions(fieldType)) { + setAutoGenerateMultiTermSynonymsPhraseQuery(autoGenerateSynonymsPhraseQuery); + } else { + setAutoGenerateMultiTermSynonymsPhraseQuery(false); + } + setEnablePositionIncrements(enablePositionIncrements); } @Override - protected Query newTermQuery(Term term) { - return blendTermQuery(term, mapper); + protected Query createFieldQuery(Analyzer analyzer, BooleanClause.Occur operator, String field, + String queryText, boolean quoted, int slop) { + assert operator == BooleanClause.Occur.SHOULD || operator == BooleanClause.Occur.MUST; + Type type = quoted ? Type.PHRASE : Type.BOOLEAN; + return createQuery(field, queryText, type, operator, slop); } - @Override - protected Query newSynonymQuery(Term[] terms) { - return blendTermsQuery(terms, mapper); + public Query createPhrasePrefixQuery(String field, String queryText, int slop) { + return createQuery(field, queryText, Type.PHRASE_PREFIX, occur, slop); } - @Override - protected Query analyzePhrase(String field, TokenStream stream, int slop) throws IOException { - try { - checkForPositions(field); - Query query = mapper.phraseQuery(field, stream, slop, enablePositionIncrements); - if (query instanceof PhraseQuery) { - // synonyms that expand to multiple terms can return a phrase query. - return blendPhraseQuery((PhraseQuery) query, mapper); - } - return query; - } catch (IllegalArgumentException | IllegalStateException e) { - if (lenient) { - return newLenientFieldQuery(field, e); + private Query createFieldQuery(TokenStream source, Type type, BooleanClause.Occur operator, String field, int phraseSlop) { + assert operator == BooleanClause.Occur.SHOULD || operator == BooleanClause.Occur.MUST; + + // Build an appropriate query based on the analysis chain. + try (CachingTokenFilter stream = new CachingTokenFilter(source)) { + + TermToBytesRefAttribute termAtt = stream.getAttribute(TermToBytesRefAttribute.class); + PositionIncrementAttribute posIncAtt = stream.addAttribute(PositionIncrementAttribute.class); + PositionLengthAttribute posLenAtt = stream.addAttribute(PositionLengthAttribute.class); + + if (termAtt == null) { + return null; } - throw e; - } - } - @Override - protected Query analyzeMultiPhrase(String field, TokenStream stream, int slop) throws IOException { - try { - checkForPositions(field); - return mapper.multiPhraseQuery(field, stream, slop, enablePositionIncrements); - } catch (IllegalArgumentException | IllegalStateException e) { - if (lenient) { - return newLenientFieldQuery(field, e); + // phase 1: read through the stream and assess the situation: + // counting the number of tokens/positions and marking if we have any synonyms. + + int numTokens = 0; + int positionCount = 0; + boolean hasSynonyms = false; + boolean isGraph = false; + + stream.reset(); + while (stream.incrementToken()) { + numTokens++; + int positionIncrement = posIncAtt.getPositionIncrement(); + if (positionIncrement != 0) { + positionCount += positionIncrement; + } else { + hasSynonyms = true; + } + + int positionLength = posLenAtt.getPositionLength(); + if (enableGraphQueries && positionLength > 1) { + isGraph = true; + } } - throw e; - } - } - private void checkForPositions(String field) { - if (hasPositions(mapper) == false) { - throw new IllegalStateException("field:[" + field + "] was indexed without position data; cannot run PhraseQuery"); + // phase 2: based on token count, presence of synonyms, and options + // formulate a single term, boolean, or phrase. + if (numTokens == 0) { + return null; + } else if (numTokens == 1) { + // single term + if (type == Type.PHRASE_PREFIX) { + return analyzePhrasePrefix(field, stream, phraseSlop, positionCount); + } else { + return analyzeTerm(field, stream); + } + } else if (isGraph) { + // graph + if (type == Type.PHRASE || type == Type.PHRASE_PREFIX) { + return analyzeGraphPhrase(stream, field, type, phraseSlop); + } else { + return analyzeGraphBoolean(field, stream, operator); + } + } else if (type == Type.PHRASE && positionCount > 1) { + // phrase + if (hasSynonyms) { + // complex phrase with synonyms + return analyzeMultiPhrase(field, stream, phraseSlop); + } else { + // simple phrase + return analyzePhrase(field, stream, phraseSlop); + } + } else if (type == Type.PHRASE_PREFIX) { + // phrase prefix + return analyzePhrasePrefix(field, stream, phraseSlop, positionCount); + } else { + // boolean + if (positionCount == 1) { + // only one position, with synonyms + return analyzeBoolean(field, stream); + } else { + // complex case: multiple positions + return analyzeMultiBoolean(field, stream, operator); + } + } + } catch (IOException e) { + throw new RuntimeException("Error analyzing query text", e); } } - /** - * Checks if graph analysis should be enabled for the field depending - * on the provided {@link Analyzer} - */ - @Override - protected Query createFieldQuery(Analyzer analyzer, BooleanClause.Occur operator, String field, - String queryText, boolean quoted, int phraseSlop) { - assert operator == BooleanClause.Occur.SHOULD || operator == BooleanClause.Occur.MUST; - + private Query createQuery(String field, String queryText, Type type, BooleanClause.Occur operator, int phraseSlop) { // Use the analyzer to get all the tokens, and then build an appropriate // query based on the analysis chain. try (TokenStream source = analyzer.tokenStream(field, queryText)) { if (source.hasAttribute(DisableGraphAttribute.class)) { /* - A {@link TokenFilter} in this {@link TokenStream} disabled the graph analysis to avoid - paths explosion. See {@link org.elasticsearch.index.analysis.ShingleTokenFilterFactory} for details. + * A {@link TokenFilter} in this {@link TokenStream} disabled the graph analysis to avoid + * paths explosion. See {@link org.elasticsearch.index.analysis.ShingleTokenFilterFactory} for details. */ setEnableGraphQueries(false); } - Query query = super.createFieldQuery(source, operator, field, quoted, phraseSlop); - setEnableGraphQueries(true); - return query; + try { + return createFieldQuery(source, type, operator, field, phraseSlop); + } finally { + setEnableGraphQueries(true); + } } catch (IOException e) { throw new RuntimeException("Error analyzing query text", e); } } - public Query createPhrasePrefixQuery(String field, String queryText, int phraseSlop, int maxExpansions) { - final Query query = createFieldQuery(getAnalyzer(), Occur.MUST, field, queryText, true, phraseSlop); - return toMultiPhrasePrefix(query, phraseSlop, maxExpansions); + private SpanQuery newSpanQuery(Term[] terms, boolean prefix) { + if (terms.length == 1) { + return prefix ? fieldType.spanPrefixQuery(terms[0].text(), spanRewriteMethod, context) : new SpanTermQuery(terms[0]); + } + SpanQuery[] spanQueries = new SpanQuery[terms.length]; + for (int i = 0; i < terms.length; i++) { + spanQueries[i] = prefix ? new SpanTermQuery(terms[i]) : + fieldType.spanPrefixQuery(terms[i].text(), spanRewriteMethod, context); + } + return new SpanOrQuery(spanQueries); } - private Query toMultiPhrasePrefix(final Query query, int phraseSlop, int maxExpansions) { - float boost = 1; - Query innerQuery = query; - while (innerQuery instanceof BoostQuery) { - BoostQuery bq = (BoostQuery) innerQuery; - boost *= bq.getBoost(); - innerQuery = bq.getQuery(); - } - if (query instanceof SpanQuery) { - return toSpanQueryPrefix((SpanQuery) query, boost); + @Override + protected SpanQuery createSpanQuery(TokenStream in, String field) throws IOException { + return createSpanQuery(in, field, false); + } + + private SpanQuery createSpanQuery(TokenStream in, String field, boolean prefix) throws IOException { + TermToBytesRefAttribute termAtt = in.getAttribute(TermToBytesRefAttribute.class); + PositionIncrementAttribute posIncAtt = in.getAttribute(PositionIncrementAttribute.class); + if (termAtt == null) { + return null; } - final MultiPhrasePrefixQuery prefixQuery = new MultiPhrasePrefixQuery(); - prefixQuery.setMaxExpansions(maxExpansions); - prefixQuery.setSlop(phraseSlop); - if (innerQuery instanceof PhraseQuery) { - PhraseQuery pq = (PhraseQuery) innerQuery; - Term[] terms = pq.getTerms(); - int[] positions = pq.getPositions(); - for (int i = 0; i < terms.length; i++) { - prefixQuery.add(new Term[]{terms[i]}, positions[i]); + + SpanNearQuery.Builder builder = new SpanNearQuery.Builder(field, true); + Term lastTerm = null; + while (in.incrementToken()) { + if (posIncAtt.getPositionIncrement() > 1) { + builder.addGap(posIncAtt.getPositionIncrement()-1); } - return boost == 1 ? prefixQuery : new BoostQuery(prefixQuery, boost); - } else if (innerQuery instanceof MultiPhraseQuery) { - MultiPhraseQuery pq = (MultiPhraseQuery) innerQuery; - Term[][] terms = pq.getTermArrays(); - int[] positions = pq.getPositions(); - for (int i = 0; i < terms.length; i++) { - prefixQuery.add(terms[i], positions[i]); + if (lastTerm != null) { + builder.addClause(new SpanTermQuery(lastTerm)); } - return boost == 1 ? prefixQuery : new BoostQuery(prefixQuery, boost); - } else if (innerQuery instanceof TermQuery) { - prefixQuery.add(((TermQuery) innerQuery).getTerm()); - return boost == 1 ? prefixQuery : new BoostQuery(prefixQuery, boost); + lastTerm = new Term(field, termAtt.getBytesRef()); + } + if (lastTerm != null) { + SpanQuery spanQuery = prefix ? + fieldType.spanPrefixQuery(lastTerm.text(), spanRewriteMethod, context) : new SpanTermQuery(lastTerm); + builder.addClause(spanQuery); + } + SpanNearQuery query = builder.build(); + SpanQuery[] clauses = query.getClauses(); + if (clauses.length == 1) { + return clauses[0]; + } else { + return query; } - return query; } - private Query toSpanQueryPrefix(SpanQuery query, float boost) { - if (query instanceof SpanTermQuery) { - SpanMultiTermQueryWrapper ret = - new SpanMultiTermQueryWrapper<>(new PrefixQuery(((SpanTermQuery) query).getTerm())); - return boost == 1 ? ret : new BoostQuery(ret, boost); - } else if (query instanceof SpanNearQuery) { - SpanNearQuery spanNearQuery = (SpanNearQuery) query; - SpanQuery[] clauses = spanNearQuery.getClauses(); - if (clauses[clauses.length - 1] instanceof SpanTermQuery) { - clauses[clauses.length - 1] = new SpanMultiTermQueryWrapper<>( - new PrefixQuery(((SpanTermQuery) clauses[clauses.length - 1]).getTerm()) - ); - } - SpanNearQuery newQuery = new SpanNearQuery(clauses, spanNearQuery.getSlop(), spanNearQuery.isInOrder()); - return boost == 1 ? newQuery : new BoostQuery(newQuery, boost); - } else if (query instanceof SpanOrQuery) { - SpanOrQuery orQuery = (SpanOrQuery) query; - SpanQuery[] clauses = new SpanQuery[orQuery.getClauses().length]; - for (int i = 0; i < clauses.length; i++) { - clauses[i] = (SpanQuery) toSpanQueryPrefix(orQuery.getClauses()[i], 1); - } - return boost == 1 ? new SpanOrQuery(clauses) : new BoostQuery(new SpanOrQuery(clauses), boost); + @Override + protected Query newTermQuery(Term term) { + Supplier querySupplier; + if (fuzziness != null) { + querySupplier = () -> { + Query query = fieldType.fuzzyQuery(term.text(), fuzziness, fuzzyPrefixLength, maxExpansions, transpositions); + if (query instanceof FuzzyQuery) { + QueryParsers.setRewriteMethod((FuzzyQuery) query, fuzzyRewriteMethod); + } + return query; + }; } else { + querySupplier = () -> fieldType.termQuery(term.bytes(), context); + } + try { + Query query = querySupplier.get(); return query; + } catch (RuntimeException e) { + if (lenient) { + return newLenientFieldQuery(fieldType.name(), e); + } else { + throw e; + } } } - public Query createCommonTermsQuery(String field, String queryText, - Occur highFreqOccur, - Occur lowFreqOccur, - float maxTermFrequency) { - Query booleanQuery = createBooleanQuery(field, queryText, lowFreqOccur); - if (booleanQuery != null && booleanQuery instanceof BooleanQuery) { - BooleanQuery bq = (BooleanQuery) booleanQuery; - return boolToExtendedCommonTermsQuery(bq, highFreqOccur, lowFreqOccur, maxTermFrequency); + @Override + protected Query analyzePhrase(String field, TokenStream stream, int slop) throws IOException { + try { + checkForPositions(field); + return fieldType.phraseQuery(stream, slop, enablePositionIncrements); + } catch (IllegalArgumentException | IllegalStateException e) { + if (lenient) { + return newLenientFieldQuery(field, e); + } + throw e; } - return booleanQuery; } - private Query boolToExtendedCommonTermsQuery(BooleanQuery bq, - Occur highFreqOccur, - Occur lowFreqOccur, - float maxTermFrequency) { - ExtendedCommonTermsQuery query = new ExtendedCommonTermsQuery(highFreqOccur, lowFreqOccur, maxTermFrequency); - for (BooleanClause clause : bq.clauses()) { - if (!(clause.getQuery() instanceof TermQuery)) { - return bq; + @Override + protected Query analyzeMultiPhrase(String field, TokenStream stream, int slop) throws IOException { + try { + checkForPositions(field); + return fieldType.multiPhraseQuery(stream, slop, enablePositionIncrements); + } catch (IllegalArgumentException | IllegalStateException e) { + if (lenient) { + return newLenientFieldQuery(field, e); } - query.add(((TermQuery) clause.getQuery()).getTerm()); + throw e; } - return query; } - } - - /** - * Called when a phrase query is built with {@link QueryBuilder#analyzePhrase(String, TokenStream, int)}. - * Subclass can override this function to blend this query to multiple fields. - */ - protected Query blendPhraseQuery(PhraseQuery query, MappedFieldType fieldType) { - return query; - } - - protected Query blendTermsQuery(Term[] terms, MappedFieldType fieldType) { - return new SynonymQuery(terms); - } - protected Query blendTermQuery(Term term, MappedFieldType fieldType) { - if (fuzziness != null) { + private Query analyzePhrasePrefix(String field, TokenStream stream, int slop, int positionCount) throws IOException { try { - Query query = fieldType.fuzzyQuery(term.text(), fuzziness, fuzzyPrefixLength, maxExpansions, transpositions); - if (query instanceof FuzzyQuery) { - QueryParsers.setRewriteMethod((FuzzyQuery) query, fuzzyRewriteMethod); + if (positionCount > 1) { + checkForPositions(field); } - return query; - } catch (RuntimeException e) { + return fieldType.phrasePrefixQuery(stream, slop, maxExpansions); + } catch (IllegalArgumentException | IllegalStateException e) { if (lenient) { - return newLenientFieldQuery(fieldType.name(), e); + return newLenientFieldQuery(field, e); + } + throw e; + } + } + + private Query analyzeGraphPhrase(TokenStream source, String field, Type type, int slop) throws IOException { + assert type == Type.PHRASE_PREFIX || type == Type.PHRASE; + + source.reset(); + GraphTokenStreamFiniteStrings graph = new GraphTokenStreamFiniteStrings(source); + if (phraseSlop > 0) { + /* + * Creates a boolean query from the graph token stream by extracting all the finite strings from the graph + * and using them to create phrase queries with the appropriate slop. + */ + BooleanQuery.Builder builder = new BooleanQuery.Builder(); + Iterator it = graph.getFiniteStrings(); + while (it.hasNext()) { + Query query = createFieldQuery(it.next(), type, BooleanClause.Occur.MUST, field, slop); + if (query != null) { + builder.add(query, BooleanClause.Occur.SHOULD); + } + } + return builder.build(); + } + + /* + * Creates a span near (phrase) query from a graph token stream. + * The articulation points of the graph are visited in order and the queries + * created at each point are merged in the returned near query. + */ + List clauses = new ArrayList<>(); + int[] articulationPoints = graph.articulationPoints(); + int lastState = 0; + int maxClauseCount = BooleanQuery.getMaxClauseCount(); + for (int i = 0; i <= articulationPoints.length; i++) { + int start = lastState; + int end = -1; + if (i < articulationPoints.length) { + end = articulationPoints[i]; + } + lastState = end; + final SpanQuery queryPos; + boolean endPrefix = end == -1 && type == Type.PHRASE_PREFIX; + if (graph.hasSidePath(start)) { + List queries = new ArrayList<>(); + Iterator it = graph.getFiniteStrings(start, end); + while (it.hasNext()) { + TokenStream ts = it.next(); + SpanQuery q = createSpanQuery(ts, field, endPrefix); + if (q != null) { + if (queries.size() >= maxClauseCount) { + throw new BooleanQuery.TooManyClauses(); + } + queries.add(q); + } + } + if (queries.size() > 0) { + queryPos = new SpanOrQuery(queries.toArray(new SpanQuery[0])); + } else { + queryPos = null; + } } else { - throw e; + Term[] terms = graph.getTerms(field, start); + assert terms.length > 0; + if (terms.length >= maxClauseCount) { + throw new BooleanQuery.TooManyClauses(); + } + queryPos = newSpanQuery(terms, endPrefix); + } + + if (queryPos != null) { + if (clauses.size() >= maxClauseCount) { + throw new BooleanQuery.TooManyClauses(); + } + clauses.add(queryPos); } } + + if (clauses.isEmpty()) { + return null; + } else if (clauses.size() == 1) { + return clauses.get(0); + } else { + return new SpanNearQuery(clauses.toArray(new SpanQuery[0]), 0, true); + } + } + + private void checkForPositions(String field) { + if (hasPositions(fieldType) == false) { + throw new IllegalStateException("field:[" + field + "] was indexed without position data; cannot run PhraseQuery"); + } } - return termQuery(fieldType, term.bytes(), lenient); } } diff --git a/server/src/main/java/org/elasticsearch/index/search/MultiMatchQuery.java b/server/src/main/java/org/elasticsearch/index/search/MultiMatchQuery.java index 6f57faba001c9..7eefaadaadde2 100644 --- a/server/src/main/java/org/elasticsearch/index/search/MultiMatchQuery.java +++ b/server/src/main/java/org/elasticsearch/index/search/MultiMatchQuery.java @@ -20,12 +20,12 @@ package org.elasticsearch.index.search; import org.apache.lucene.analysis.Analyzer; +import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.index.Term; import org.apache.lucene.queries.BlendedTermQuery; import org.apache.lucene.search.BoostQuery; import org.apache.lucene.search.DisjunctionMaxQuery; import org.apache.lucene.search.MatchNoDocsQuery; -import org.apache.lucene.search.PhraseQuery; import org.apache.lucene.search.Query; import org.apache.lucene.search.TermQuery; import org.apache.lucene.util.BytesRef; @@ -49,222 +49,182 @@ public class MultiMatchQuery extends MatchQuery { private Float groupTieBreaker = null; - public void setTieBreaker(float tieBreaker) { - this.groupTieBreaker = tieBreaker; - } - public MultiMatchQuery(QueryShardContext context) { super(context); } - private Query parseAndApply(Type type, String fieldName, Object value, - String minimumShouldMatch, Float boostValue) throws IOException { - Query query = parse(type, fieldName, value); - query = Queries.maybeApplyMinimumShouldMatch(query, minimumShouldMatch); - if (query != null && boostValue != null && - boostValue != AbstractQueryBuilder.DEFAULT_BOOST && query instanceof MatchNoDocsQuery == false) { - query = new BoostQuery(query, boostValue); - } - return query; + public void setTieBreaker(float tieBreaker) { + this.groupTieBreaker = tieBreaker; } public Query parse(MultiMatchQueryBuilder.Type type, Map fieldNames, - Object value, String minimumShouldMatch) throws IOException { - final Query result; - // reset query builder - queryBuilder = null; - if (fieldNames.size() == 1) { - Map.Entry fieldBoost = fieldNames.entrySet().iterator().next(); - Float boostValue = fieldBoost.getValue(); - result = parseAndApply(type.matchQueryType(), fieldBoost.getKey(), value, minimumShouldMatch, boostValue); - } else { - final float tieBreaker = groupTieBreaker == null ? type.tieBreaker() : groupTieBreaker; - switch (type) { - case PHRASE: - case PHRASE_PREFIX: - case BEST_FIELDS: - case MOST_FIELDS: - queryBuilder = new QueryBuilder(tieBreaker); - break; - case CROSS_FIELDS: - queryBuilder = new CrossFieldsQueryBuilder(tieBreaker); - break; - default: - throw new IllegalStateException("No such type: " + type); - } - final List queries = queryBuilder.buildGroupedQueries(type, fieldNames, value, minimumShouldMatch); - result = queryBuilder.combineGrouped(queries); + Object value, String minimumShouldMatch) throws IOException { + final float tieBreaker = groupTieBreaker == null ? type.tieBreaker() : groupTieBreaker; + final List queries; + switch (type) { + case PHRASE: + case PHRASE_PREFIX: + case BEST_FIELDS: + case MOST_FIELDS: + queries = buildFieldQueries(type, fieldNames, value, minimumShouldMatch); + break; + + case CROSS_FIELDS: + queries = buildCrossFieldQuery(type, fieldNames, value, minimumShouldMatch, tieBreaker); + break; + + default: + throw new IllegalStateException("No such type: " + type); } - return result; + return combineGrouped(queries, tieBreaker); } - private QueryBuilder queryBuilder; - - public class QueryBuilder { - protected final float tieBreaker; - - public QueryBuilder(float tieBreaker) { - this.tieBreaker = tieBreaker; - } - - public List buildGroupedQueries(MultiMatchQueryBuilder.Type type, Map fieldNames, - Object value, String minimumShouldMatch) throws IOException{ - List queries = new ArrayList<>(); - for (String fieldName : fieldNames.keySet()) { - Float boostValue = fieldNames.get(fieldName); - Query query = parseGroup(type.matchQueryType(), fieldName, boostValue, value, minimumShouldMatch); - if (query != null) { - queries.add(query); - } - } - return queries; + private Query combineGrouped(List groupQuery, float tieBreaker) { + if (groupQuery.isEmpty()) { + return zeroTermsQuery(); } - - Query parseGroup(Type type, String field, Float boostValue, Object value, String minimumShouldMatch) throws IOException { - if (context.fieldMapper(field) == null) { - return null; // indicates to the caller that this field is unmapped and should be disregarded - } - return parseAndApply(type, field, value, minimumShouldMatch, boostValue); + if (groupQuery.size() == 1) { + return groupQuery.get(0); } + return new DisjunctionMaxQuery(groupQuery, tieBreaker); + } - private Query combineGrouped(List groupQuery) { - if (groupQuery == null || groupQuery.isEmpty()) { - return zeroTermsQuery(); + private List buildFieldQueries(MultiMatchQueryBuilder.Type type, Map fieldNames, + Object value, String minimumShouldMatch) throws IOException{ + List queries = new ArrayList<>(); + for (String fieldName : fieldNames.keySet()) { + if (context.fieldMapper(fieldName) == null) { + // ignore unmapped fields + continue; } - if (groupQuery.size() == 1) { - return groupQuery.get(0); + Float boostValue = fieldNames.get(fieldName); + Query query = parse(type.matchQueryType(), fieldName, value); + query = Queries.maybeApplyMinimumShouldMatch(query, minimumShouldMatch); + if (query != null + && boostValue != null + && boostValue != AbstractQueryBuilder.DEFAULT_BOOST + && query instanceof MatchNoDocsQuery == false) { + query = new BoostQuery(query, boostValue); } - List queries = new ArrayList<>(); - for (Query query : groupQuery) { + if (query != null) { queries.add(query); } - return new DisjunctionMaxQuery(queries, tieBreaker); - } - - public Query blendTerm(Term term, MappedFieldType fieldType) { - return MultiMatchQuery.super.blendTermQuery(term, fieldType); - } - - public Query blendTerms(Term[] terms, MappedFieldType fieldType) { - return MultiMatchQuery.super.blendTermsQuery(terms, fieldType); - } - - public Query termQuery(MappedFieldType fieldType, BytesRef value) { - return MultiMatchQuery.this.termQuery(fieldType, value, lenient); - } - - public Query blendPhrase(PhraseQuery query, MappedFieldType type) { - return MultiMatchQuery.super.blendPhraseQuery(query, type); } + return queries; } - final class CrossFieldsQueryBuilder extends QueryBuilder { - private FieldAndFieldType[] blendedFields; - - CrossFieldsQueryBuilder(float tiebreaker) { - super(tiebreaker); - } - - @Override - public List buildGroupedQueries(MultiMatchQueryBuilder.Type type, Map fieldNames, - Object value, String minimumShouldMatch) throws IOException { - Map> groups = new HashMap<>(); - List queries = new ArrayList<>(); - for (Map.Entry entry : fieldNames.entrySet()) { - String name = entry.getKey(); - MappedFieldType fieldType = context.fieldMapper(name); - if (fieldType != null) { - Analyzer actualAnalyzer = getAnalyzer(fieldType, type == MultiMatchQueryBuilder.Type.PHRASE); - name = fieldType.name(); - if (!groups.containsKey(actualAnalyzer)) { - groups.put(actualAnalyzer, new ArrayList<>()); - } - Float boost = entry.getValue(); - boost = boost == null ? Float.valueOf(1.0f) : boost; - groups.get(actualAnalyzer).add(new FieldAndFieldType(fieldType, boost)); - } else { - queries.add(new MatchNoDocsQuery("unknown field " + name)); + private List buildCrossFieldQuery(MultiMatchQueryBuilder.Type type, Map fieldNames, + Object value, String minimumShouldMatch, float tieBreaker) throws IOException { + Map> groups = new HashMap<>(); + List queries = new ArrayList<>(); + for (Map.Entry entry : fieldNames.entrySet()) { + String name = entry.getKey(); + MappedFieldType fieldType = context.fieldMapper(name); + if (fieldType != null) { + Analyzer actualAnalyzer = getAnalyzer(fieldType, type == MultiMatchQueryBuilder.Type.PHRASE); + if (!groups.containsKey(actualAnalyzer)) { + groups.put(actualAnalyzer, new ArrayList<>()); } + float boost = entry.getValue() == null ? 1.0f : entry.getValue(); + groups.get(actualAnalyzer).add(new FieldAndBoost(fieldType, boost)); + } + } + for (Map.Entry> group : groups.entrySet()) { + final MatchQueryBuilder builder; + if (group.getValue().size() == 1) { + builder = new MatchQueryBuilder(group.getKey(), group.getValue().get(0).fieldType); + } else { + builder = new BlendedQueryBuilder(group.getKey(), group.getValue(), tieBreaker); } - for (List group : groups.values()) { - if (group.size() > 1) { - blendedFields = new FieldAndFieldType[group.size()]; - int i = 0; - for (FieldAndFieldType fieldAndFieldType : group) { - blendedFields[i++] = fieldAndFieldType; + + /* + * We have to pick some field to pass through the superclass so + * we just pick the first field. It shouldn't matter because + * fields are already grouped by their analyzers/types. + */ + String representativeField = group.getValue().get(0).fieldType.name(); + Query query = parseInternal(type.matchQueryType(), representativeField, builder, value); + query = Queries.maybeApplyMinimumShouldMatch(query, minimumShouldMatch); + if (query != null) { + if (group.getValue().size() == 1) { + // apply the field boost to groups that contain a single field + float boost = group.getValue().get(0).boost; + if (boost != AbstractQueryBuilder.DEFAULT_BOOST) { + query = new BoostQuery(query, boost); } - } else { - blendedFields = null; - } - /* - * We have to pick some field to pass through the superclass so - * we just pick the first field. It shouldn't matter because - * fields are already grouped by their analyzers/types. - */ - String representativeField = group.get(0).fieldType.name(); - Query q = parseGroup(type.matchQueryType(), representativeField, 1f, value, minimumShouldMatch); - if (q != null) { - queries.add(q); } + queries.add(query); } + } + + return queries; + } + + private class BlendedQueryBuilder extends MatchQueryBuilder { + private final List blendedFields; + private final float tieBreaker; - return queries.isEmpty() ? null : queries; + BlendedQueryBuilder(Analyzer analyzer, List blendedFields, float tieBreaker) { + super(analyzer, blendedFields.get(0).fieldType); + this.blendedFields = blendedFields; + this.tieBreaker = tieBreaker; } @Override - public Query blendTerms(Term[] terms, MappedFieldType fieldType) { - if (blendedFields == null || blendedFields.length == 1) { - return super.blendTerms(terms, fieldType); - } + protected Query newSynonymQuery(Term[] terms) { BytesRef[] values = new BytesRef[terms.length]; for (int i = 0; i < terms.length; i++) { values[i] = terms[i].bytes(); } - return MultiMatchQuery.blendTerms(context, values, commonTermsCutoff, tieBreaker, lenient, blendedFields); + return blendTerms(context, values, commonTermsCutoff, tieBreaker, lenient, blendedFields); } @Override - public Query blendTerm(Term term, MappedFieldType fieldType) { - if (blendedFields == null) { - return super.blendTerm(term, fieldType); - } - return MultiMatchQuery.blendTerm(context, term.bytes(), commonTermsCutoff, tieBreaker, lenient, blendedFields); + public Query newTermQuery(Term term) { + return blendTerm(context, term.bytes(), commonTermsCutoff, tieBreaker, lenient, blendedFields); } @Override - public Query termQuery(MappedFieldType fieldType, BytesRef value) { - /* - * Use the string value of the term because we're reusing the - * portion of the query is usually after the analyzer has run on - * each term. We just skip that analyzer phase. - */ - return blendTerm(new Term(fieldType.name(), value.utf8ToString()), fieldType); + protected Query analyzePhrase(String field, TokenStream stream, int slop) throws IOException { + List disjunctions = new ArrayList<>(); + for (FieldAndBoost fieldType : blendedFields) { + Query query = fieldType.fieldType.phraseQuery(stream, slop, enablePositionIncrements); + if (fieldType.boost != 1f) { + query = new BoostQuery(query, fieldType.boost); + } + disjunctions.add(query); + } + return new DisjunctionMaxQuery(disjunctions, tieBreaker); } @Override - public Query blendPhrase(PhraseQuery query, MappedFieldType type) { - if (blendedFields == null) { - return super.blendPhrase(query, type); + protected Query analyzeMultiPhrase(String field, TokenStream stream, int slop) throws IOException { + List disjunctions = new ArrayList<>(); + for (FieldAndBoost fieldType : blendedFields) { + Query query = fieldType.fieldType.multiPhraseQuery(stream, slop, enablePositionIncrements); + if (fieldType.boost != 1f) { + query = new BoostQuery(query, fieldType.boost); + } + disjunctions.add(query); } - /** - * We build phrase queries for multi-word synonyms when {@link QueryBuilder#autoGenerateSynonymsPhraseQuery} is true. - */ - return MultiMatchQuery.blendPhrase(query, tieBreaker, blendedFields); + return new DisjunctionMaxQuery(disjunctions, tieBreaker); } } static Query blendTerm(QueryShardContext context, BytesRef value, Float commonTermsCutoff, float tieBreaker, - boolean lenient, FieldAndFieldType... blendedFields) { + boolean lenient, List blendedFields) { + return blendTerms(context, new BytesRef[] {value}, commonTermsCutoff, tieBreaker, lenient, blendedFields); } static Query blendTerms(QueryShardContext context, BytesRef[] values, Float commonTermsCutoff, float tieBreaker, - boolean lenient, FieldAndFieldType... blendedFields) { + boolean lenient, List blendedFields) { + List queries = new ArrayList<>(); - Term[] terms = new Term[blendedFields.length * values.length]; - float[] blendedBoost = new float[blendedFields.length * values.length]; + Term[] terms = new Term[blendedFields.size() * values.length]; + float[] blendedBoost = new float[blendedFields.size() * values.length]; int i = 0; - for (FieldAndFieldType ft : blendedFields) { + for (FieldAndBoost ft : blendedFields) { for (BytesRef term : values) { Query query; try { @@ -309,61 +269,15 @@ static Query blendTerms(QueryShardContext context, BytesRef[] values, Float comm // best effort: add clauses that are not term queries so that they have an opportunity to match // however their score contribution will be different // TODO: can we improve this? - return new DisjunctionMaxQuery(queries, 1.0f); - } - } - - /** - * Expand a {@link PhraseQuery} to multiple fields that share the same analyzer. - * Returns a {@link DisjunctionMaxQuery} with a disjunction for each expanded field. - */ - static Query blendPhrase(PhraseQuery query, float tiebreaker, FieldAndFieldType... fields) { - List disjunctions = new ArrayList<>(); - for (FieldAndFieldType field : fields) { - int[] positions = query.getPositions(); - Term[] terms = query.getTerms(); - PhraseQuery.Builder builder = new PhraseQuery.Builder(); - for (int i = 0; i < terms.length; i++) { - builder.add(new Term(field.fieldType.name(), terms[i].bytes()), positions[i]); - } - Query q = builder.build(); - if (field.boost != AbstractQueryBuilder.DEFAULT_BOOST) { - q = new BoostQuery(q, field.boost); - } - disjunctions.add(q); - } - return new DisjunctionMaxQuery(disjunctions, tiebreaker); - } - - @Override - protected Query blendTermQuery(Term term, MappedFieldType fieldType) { - if (queryBuilder == null) { - return super.blendTermQuery(term, fieldType); - } - return queryBuilder.blendTerm(term, fieldType); - } - - @Override - protected Query blendTermsQuery(Term[] terms, MappedFieldType fieldType) { - if (queryBuilder == null) { - return super.blendTermsQuery(terms, fieldType); - } - return queryBuilder.blendTerms(terms, fieldType); - } - - @Override - protected Query blendPhraseQuery(PhraseQuery query, MappedFieldType fieldType) { - if (queryBuilder == null) { - return super.blendPhraseQuery(query, fieldType); + return new DisjunctionMaxQuery(queries, tieBreaker); } - return queryBuilder.blendPhrase(query, fieldType); } - static final class FieldAndFieldType { + static final class FieldAndBoost { final MappedFieldType fieldType; final float boost; - FieldAndFieldType(MappedFieldType fieldType, float boost) { + FieldAndBoost(MappedFieldType fieldType, float boost) { this.fieldType = Objects.requireNonNull(fieldType); this.boost = boost; } diff --git a/server/src/test/java/org/apache/lucene/search/uhighlight/CustomUnifiedHighlighterTests.java b/server/src/test/java/org/apache/lucene/search/uhighlight/CustomUnifiedHighlighterTests.java index a6e676006fdbf..4e4b04d1ff19c 100644 --- a/server/src/test/java/org/apache/lucene/search/uhighlight/CustomUnifiedHighlighterTests.java +++ b/server/src/test/java/org/apache/lucene/search/uhighlight/CustomUnifiedHighlighterTests.java @@ -126,7 +126,7 @@ public void testMultiPhrasePrefixQuerySingleTerm() throws Exception { final String[] outputs = { "The quick brown fox." }; - MultiPhrasePrefixQuery query = new MultiPhrasePrefixQuery(); + MultiPhrasePrefixQuery query = new MultiPhrasePrefixQuery("text"); query.add(new Term("text", "bro")); assertHighlightOneDoc("text", inputs, new StandardAnalyzer(), query, Locale.ROOT, BreakIterator.getSentenceInstance(Locale.ROOT), 0, outputs); @@ -139,7 +139,7 @@ public void testMultiPhrasePrefixQuery() throws Exception { final String[] outputs = { "The quick brown fox." }; - MultiPhrasePrefixQuery query = new MultiPhrasePrefixQuery(); + MultiPhrasePrefixQuery query = new MultiPhrasePrefixQuery("text"); query.add(new Term("text", "quick")); query.add(new Term("text", "brown")); query.add(new Term("text", "fo")); diff --git a/server/src/test/java/org/elasticsearch/common/lucene/search/MultiPhrasePrefixQueryTests.java b/server/src/test/java/org/elasticsearch/common/lucene/search/MultiPhrasePrefixQueryTests.java index 23b6939fe7a70..f0d4c88e01c19 100644 --- a/server/src/test/java/org/elasticsearch/common/lucene/search/MultiPhrasePrefixQueryTests.java +++ b/server/src/test/java/org/elasticsearch/common/lucene/search/MultiPhrasePrefixQueryTests.java @@ -43,24 +43,24 @@ public void testSimple() throws Exception { IndexReader reader = DirectoryReader.open(writer); IndexSearcher searcher = new IndexSearcher(reader); - MultiPhrasePrefixQuery query = new MultiPhrasePrefixQuery(); + MultiPhrasePrefixQuery query = new MultiPhrasePrefixQuery("field"); query.add(new Term("field", "aa")); assertThat(searcher.count(query), equalTo(1)); - query = new MultiPhrasePrefixQuery(); + query = new MultiPhrasePrefixQuery("field"); query.add(new Term("field", "aaa")); query.add(new Term("field", "bb")); assertThat(searcher.count(query), equalTo(1)); - query = new MultiPhrasePrefixQuery(); + query = new MultiPhrasePrefixQuery("field"); query.setSlop(1); query.add(new Term("field", "aaa")); query.add(new Term("field", "cc")); assertThat(searcher.count(query), equalTo(1)); - query = new MultiPhrasePrefixQuery(); + query = new MultiPhrasePrefixQuery("field"); query.setSlop(1); query.add(new Term("field", "xxx")); assertThat(searcher.count(query), equalTo(0)); } -} \ No newline at end of file +} diff --git a/server/src/test/java/org/elasticsearch/index/mapper/TextFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/TextFieldMapperTests.java index acd6c9ee6f80b..e527f98f73c20 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/TextFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/TextFieldMapperTests.java @@ -34,13 +34,19 @@ import org.apache.lucene.search.MultiPhraseQuery; import org.apache.lucene.search.PhraseQuery; import org.apache.lucene.search.Query; +import org.apache.lucene.search.SynonymQuery; import org.apache.lucene.search.TermQuery; +import org.apache.lucene.search.spans.FieldMaskingSpanQuery; +import org.apache.lucene.search.spans.SpanNearQuery; +import org.apache.lucene.search.spans.SpanOrQuery; +import org.apache.lucene.search.spans.SpanTermQuery; import org.apache.lucene.util.BytesRef; import org.elasticsearch.Version; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.compress.CompressedXContent; +import org.elasticsearch.common.lucene.search.MultiPhrasePrefixQuery; import org.elasticsearch.common.lucene.uid.Versions; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.ToXContent; @@ -52,6 +58,7 @@ import org.elasticsearch.index.engine.Engine; import org.elasticsearch.index.mapper.MapperService.MergeReason; import org.elasticsearch.index.mapper.TextFieldMapper.TextFieldType; +import org.elasticsearch.index.query.MatchPhrasePrefixQueryBuilder; import org.elasticsearch.index.query.MatchPhraseQueryBuilder; import org.elasticsearch.index.query.QueryShardContext; import org.elasticsearch.index.search.MatchQuery; @@ -956,4 +963,125 @@ public void testIndexPrefixMapping() throws IOException { assertThat(e.getMessage(), containsString("Cannot set index_prefixes on unindexed field [field]")); } } + + public void testFastPhrasePrefixes() throws IOException { + QueryShardContext queryShardContext = indexService.newQueryShardContext( + randomInt(20), null, () -> { + throw new UnsupportedOperationException(); + }, null); + + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") + .startObject("properties") + .startObject("field") + .field("type", "text") + .field("analyzer", "my_stop_analyzer") + .startObject("index_prefixes") + .field("min_chars", 2) + .field("max_chars", 10) + .endObject() + .endObject() + .startObject("synfield") + .field("type", "text") + .field("analyzer", "standard") // will be replaced with MockSynonymAnalyzer + .field("index_phrases", true) + .startObject("index_prefixes") + .field("min_chars", 2) + .field("max_chars", 10) + .endObject() + .endObject() + .endObject() + .endObject().endObject()); + + queryShardContext.getMapperService().merge("type", new CompressedXContent(mapping), MergeReason.MAPPING_UPDATE); + + { + Query q = new MatchPhrasePrefixQueryBuilder("field", "two words").toQuery(queryShardContext); + Query expected = new SpanNearQuery.Builder("field", true) + .addClause(new SpanTermQuery(new Term("field", "two"))) + .addClause(new FieldMaskingSpanQuery( + new SpanTermQuery(new Term("field._index_prefix", "words")), "field") + ) + .build(); + assertThat(q, equalTo(expected)); + } + + { + Query q = new MatchPhrasePrefixQueryBuilder("field", "three words here").toQuery(queryShardContext); + Query expected = new SpanNearQuery.Builder("field", true) + .addClause(new SpanTermQuery(new Term("field", "three"))) + .addClause(new SpanTermQuery(new Term("field", "words"))) + .addClause(new FieldMaskingSpanQuery( + new SpanTermQuery(new Term("field._index_prefix", "here")), "field") + ) + .build(); + assertThat(q, equalTo(expected)); + } + + { + Query q = new MatchPhrasePrefixQueryBuilder("field", "two words").slop(1).toQuery(queryShardContext); + MultiPhrasePrefixQuery mpq = new MultiPhrasePrefixQuery("field"); + mpq.setSlop(1); + mpq.add(new Term("field", "two")); + mpq.add(new Term("field", "words")); + assertThat(q, equalTo(mpq)); + } + + { + Query q = new MatchPhrasePrefixQueryBuilder("field", "singleton").toQuery(queryShardContext); + assertThat(q, is(new SynonymQuery(new Term("field._index_prefix", "singleton")))); + } + + { + + Query q = new MatchPhrasePrefixQueryBuilder("field", "sparkle a stopword").toQuery(queryShardContext); + Query expected = new SpanNearQuery.Builder("field", true) + .addClause(new SpanTermQuery(new Term("field", "sparkle"))) + .addGap(1) + .addClause(new FieldMaskingSpanQuery( + new SpanTermQuery(new Term("field._index_prefix", "stopword")), "field") + ) + .build(); + assertThat(q, equalTo(expected)); + } + + { + MatchQuery matchQuery = new MatchQuery(queryShardContext); + matchQuery.setAnalyzer(new MockSynonymAnalyzer()); + Query q = matchQuery.parse(MatchQuery.Type.PHRASE_PREFIX, "synfield", "motor dogs"); + Query expected = new SpanNearQuery.Builder("synfield", true) + .addClause(new SpanTermQuery(new Term("synfield", "motor"))) + .addClause( + new SpanOrQuery( + new FieldMaskingSpanQuery( + new SpanTermQuery(new Term("synfield._index_prefix", "dogs")), "synfield" + ), + new FieldMaskingSpanQuery( + new SpanTermQuery(new Term("synfield._index_prefix", "dog")), "synfield" + ) + ) + ) + .build(); + assertThat(q, equalTo(expected)); + } + + { + MatchQuery matchQuery = new MatchQuery(queryShardContext); + matchQuery.setPhraseSlop(1); + matchQuery.setAnalyzer(new MockSynonymAnalyzer()); + Query q = matchQuery.parse(MatchQuery.Type.PHRASE_PREFIX, "synfield", "two dogs"); + MultiPhrasePrefixQuery mpq = new MultiPhrasePrefixQuery("synfield"); + mpq.setSlop(1); + mpq.add(new Term("synfield", "two")); + mpq.add(new Term[] { new Term("synfield", "dogs"), new Term("synfield", "dog") }); + assertThat(q, equalTo(mpq)); + } + + { + Query q = new MatchPhrasePrefixQueryBuilder("field", "motor d").toQuery(queryShardContext); + MultiPhrasePrefixQuery mpq = new MultiPhrasePrefixQuery("field"); + mpq.add(new Term("field", "motor")); + mpq.add(new Term("field", "d")); + assertThat(q, equalTo(mpq)); + } + } } diff --git a/server/src/test/java/org/elasticsearch/index/query/MatchPhrasePrefixQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/MatchPhrasePrefixQueryBuilderTests.java index fd722ef0c77af..a6aa53e3aa0e9 100644 --- a/server/src/test/java/org/elasticsearch/index/query/MatchPhrasePrefixQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/MatchPhrasePrefixQueryBuilderTests.java @@ -19,12 +19,9 @@ package org.elasticsearch.index.query; -import org.apache.lucene.search.BooleanQuery; -import org.apache.lucene.search.IndexOrDocValuesQuery; import org.apache.lucene.search.MatchNoDocsQuery; -import org.apache.lucene.search.PointRangeQuery; import org.apache.lucene.search.Query; -import org.apache.lucene.search.TermQuery; +import org.apache.lucene.search.SynonymQuery; import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.lucene.search.MultiPhrasePrefixQuery; import org.elasticsearch.search.internal.SearchContext; @@ -34,7 +31,6 @@ import java.util.HashMap; import java.util.Map; -import static org.elasticsearch.test.AbstractBuilderTestCase.STRING_ALIAS_FIELD_NAME; import static org.hamcrest.CoreMatchers.either; import static org.hamcrest.CoreMatchers.instanceOf; import static org.hamcrest.Matchers.containsString; @@ -43,8 +39,7 @@ public class MatchPhrasePrefixQueryBuilderTests extends AbstractQueryTestCase { @Override protected MatchPhrasePrefixQueryBuilder doCreateTestQueryBuilder() { - String fieldName = randomFrom(STRING_FIELD_NAME, STRING_ALIAS_FIELD_NAME, BOOLEAN_FIELD_NAME, INT_FIELD_NAME, - DOUBLE_FIELD_NAME, DATE_FIELD_NAME); + String fieldName = randomFrom(STRING_FIELD_NAME, STRING_ALIAS_FIELD_NAME); Object value; if (isTextField(fieldName)) { int terms = randomIntBetween(0, 3); @@ -91,10 +86,9 @@ protected Map getAlternateVersions() { protected void doAssertLuceneQuery(MatchPhrasePrefixQueryBuilder queryBuilder, Query query, SearchContext context) throws IOException { assertThat(query, notNullValue()); - assertThat(query, - either(instanceOf(BooleanQuery.class)).or(instanceOf(MultiPhrasePrefixQuery.class)) - .or(instanceOf(TermQuery.class)).or(instanceOf(PointRangeQuery.class)) - .or(instanceOf(IndexOrDocValuesQuery.class)).or(instanceOf(MatchNoDocsQuery.class))); + assertThat(query, either(instanceOf(MultiPhrasePrefixQuery.class)) + .or(instanceOf(SynonymQuery.class)) + .or(instanceOf(MatchNoDocsQuery.class))); } public void testIllegalValues() { diff --git a/server/src/test/java/org/elasticsearch/index/query/MatchQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/MatchQueryBuilderTests.java index 184ee2759c15e..c258cce6c7c50 100644 --- a/server/src/test/java/org/elasticsearch/index/query/MatchQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/MatchQueryBuilderTests.java @@ -25,7 +25,6 @@ import org.apache.lucene.queries.ExtendedCommonTermsQuery; import org.apache.lucene.search.BooleanClause; import org.apache.lucene.search.BooleanQuery; -import org.apache.lucene.search.BoostQuery; import org.apache.lucene.search.FuzzyQuery; import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.search.MatchNoDocsQuery; @@ -371,13 +370,10 @@ protected void initializeAdditionalMappings(MapperService mapperService) throws public void testMatchPhrasePrefixWithBoost() throws Exception { QueryShardContext context = createShardContext(); { - // field boost is applied on a single term query + // field boost is ignored on a single term query MatchPhrasePrefixQueryBuilder builder = new MatchPhrasePrefixQueryBuilder("string_boost", "foo"); Query query = builder.toQuery(context); - assertThat(query, instanceOf(BoostQuery.class)); - assertThat(((BoostQuery) query).getBoost(), equalTo(4f)); - Query innerQuery = ((BoostQuery) query).getQuery(); - assertThat(innerQuery, instanceOf(MultiPhrasePrefixQuery.class)); + assertThat(query, instanceOf(MultiPhrasePrefixQuery.class)); } { diff --git a/server/src/test/java/org/elasticsearch/index/query/MultiMatchQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/MultiMatchQueryBuilderTests.java index 43c76f028e22e..27651e0da0de4 100644 --- a/server/src/test/java/org/elasticsearch/index/query/MultiMatchQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/MultiMatchQueryBuilderTests.java @@ -91,7 +91,12 @@ protected MultiMatchQueryBuilder doCreateTestQueryBuilder() { // sets other parameters of the multi match query if (randomBoolean()) { - query.type(randomFrom(MultiMatchQueryBuilder.Type.values())); + if (fieldName.equals(STRING_FIELD_NAME)) { + query.type(randomFrom(MultiMatchQueryBuilder.Type.values())); + } else { + query.type(randomValueOtherThan(MultiMatchQueryBuilder.Type.PHRASE_PREFIX, + () -> randomFrom(MultiMatchQueryBuilder.Type.values()))); + } } if (randomBoolean()) { query.operator(randomFrom(Operator.values())); @@ -384,6 +389,11 @@ public void testDefaultField() throws Exception { ), 0.0f ); assertEquals(expected, query); + + context.getIndexSettings().updateIndexMetaData( + newIndexMeta("index", context.getIndexSettings().getSettings(), + Settings.builder().putNull("index.query.default_field").build()) + ); } public void testWithStopWords() throws Exception { diff --git a/server/src/test/java/org/elasticsearch/index/query/QueryStringQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/QueryStringQueryBuilderTests.java index baa0fed01bbf0..0eb6de7da252f 100644 --- a/server/src/test/java/org/elasticsearch/index/query/QueryStringQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/QueryStringQueryBuilderTests.java @@ -1208,20 +1208,21 @@ public void testUnmappedFieldRewriteToMatchNoDocs() throws IOException { .field("unmapped_field") .lenient(true) .toQuery(createShardContext()); - assertEquals(new MatchNoDocsQuery(""), query); + assertEquals(new BooleanQuery.Builder().build(), query); // Unmapped prefix field query = new QueryStringQueryBuilder("unmapped_field:hello") .lenient(true) .toQuery(createShardContext()); - assertEquals(new MatchNoDocsQuery(""), query); + assertEquals(new BooleanQuery.Builder().build(), query); // Unmapped fields query = new QueryStringQueryBuilder("hello") .lenient(true) .field("unmapped_field") + .field("another_field") .toQuery(createShardContext()); - assertEquals(new MatchNoDocsQuery(""), query); + assertEquals(new BooleanQuery.Builder().build(), query); } public void testDefaultField() throws Exception { diff --git a/server/src/test/java/org/elasticsearch/index/query/SpanMultiTermQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/SpanMultiTermQueryBuilderTests.java index 47db7d42d8cd0..4c59e25804a55 100644 --- a/server/src/test/java/org/elasticsearch/index/query/SpanMultiTermQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/SpanMultiTermQueryBuilderTests.java @@ -32,8 +32,8 @@ import org.apache.lucene.search.PrefixQuery; import org.apache.lucene.search.Query; import org.apache.lucene.search.TermQuery; +import org.apache.lucene.search.TopTermsRewrite; import org.apache.lucene.search.spans.FieldMaskingSpanQuery; -import org.apache.lucene.search.spans.SpanBoostQuery; import org.apache.lucene.search.spans.SpanMultiTermQueryWrapper; import org.apache.lucene.search.spans.SpanQuery; import org.apache.lucene.search.spans.SpanTermQuery; @@ -42,6 +42,7 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.lucene.search.SpanBooleanQueryRewriteWithMaxClause; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.search.internal.SearchContext; @@ -55,6 +56,7 @@ import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.CoreMatchers.instanceOf; import static org.hamcrest.CoreMatchers.either; +import static org.hamcrest.CoreMatchers.startsWith; public class SpanMultiTermQueryBuilderTests extends AbstractQueryTestCase { @Override @@ -68,6 +70,9 @@ protected void initializeAdditionalMappings(MapperService mapperService) throws .field("type", "alias") .field("path", "prefix_field") .endObject() + .startObject("body") + .field("type", "text") + .endObject() .endObject().endObject().endObject(); mapperService.merge("_doc", @@ -85,23 +90,26 @@ protected void doAssertLuceneQuery(SpanMultiTermQueryBuilder queryBuilder, Query if (query instanceof SpanMatchNoDocsQuery) { return; } - if (queryBuilder.innerQuery().boost() != AbstractQueryBuilder.DEFAULT_BOOST) { - assertThat(query, instanceOf(SpanBoostQuery.class)); - SpanBoostQuery boostQuery = (SpanBoostQuery) query; - assertThat(boostQuery.getBoost(), equalTo(queryBuilder.innerQuery().boost())); - query = boostQuery.getQuery(); - } - assertThat(query, instanceOf(SpanMultiTermQueryWrapper.class)); - SpanMultiTermQueryWrapper spanMultiTermQueryWrapper = (SpanMultiTermQueryWrapper) query; - Query multiTermQuery = queryBuilder.innerQuery().toQuery(context.getQueryShardContext()); - if (queryBuilder.innerQuery().boost() != AbstractQueryBuilder.DEFAULT_BOOST) { - assertThat(multiTermQuery, instanceOf(BoostQuery.class)); - BoostQuery boostQuery = (BoostQuery) multiTermQuery; - multiTermQuery = boostQuery.getQuery(); + assertThat(query, either(instanceOf(SpanMultiTermQueryWrapper.class)).or(instanceOf(FieldMaskingSpanQuery.class))); + if (query instanceof SpanMultiTermQueryWrapper) { + SpanMultiTermQueryWrapper wrapper = (SpanMultiTermQueryWrapper) query; + Query innerQuery = queryBuilder.innerQuery().toQuery(context.getQueryShardContext()); + if (queryBuilder.innerQuery().boost() != AbstractQueryBuilder.DEFAULT_BOOST) { + assertThat(innerQuery, instanceOf(BoostQuery.class)); + BoostQuery boostQuery = (BoostQuery) innerQuery; + innerQuery = boostQuery.getQuery(); + } + assertThat(innerQuery, instanceOf(MultiTermQuery.class)); + MultiTermQuery multiQuery = (MultiTermQuery) innerQuery; + if (multiQuery.getRewriteMethod() instanceof TopTermsRewrite) { + assertThat(wrapper.getRewriteMethod(), instanceOf(SpanMultiTermQueryWrapper.TopTermsSpanBooleanQueryRewrite.class)); + } else { + assertThat(wrapper.getRewriteMethod(), instanceOf(SpanBooleanQueryRewriteWithMaxClause.class)); + } + } else if (query instanceof FieldMaskingSpanQuery) { + FieldMaskingSpanQuery mask = (FieldMaskingSpanQuery) query; + assertThat(mask.getMaskedQuery(), instanceOf(TermQuery.class)); } - assertThat(multiTermQuery, either(instanceOf(MultiTermQuery.class)).or(instanceOf(TermQuery.class))); - assertThat(spanMultiTermQueryWrapper.getWrappedQuery(), - equalTo(new SpanMultiTermQueryWrapper<>((MultiTermQuery) multiTermQuery).getWrappedQuery())); } public void testIllegalArgument() { @@ -168,11 +176,10 @@ public String fieldName() { */ public void testUnsupportedInnerQueryType() throws IOException { MultiTermQueryBuilder query = new TermMultiTermQueryBuilder(); - SpanMultiTermQueryBuilder spamMultiTermQuery = new SpanMultiTermQueryBuilder(query); + SpanMultiTermQueryBuilder spanMultiTermQuery = new SpanMultiTermQueryBuilder(query); UnsupportedOperationException e = expectThrows(UnsupportedOperationException.class, - () -> spamMultiTermQuery.toQuery(createShardContext())); - assertThat(e.getMessage(), containsString("unsupported inner query generated by " + TermMultiTermQueryBuilder.class.getName() + - ", should be " + MultiTermQuery.class.getName())); + () -> spanMultiTermQuery.toQuery(createShardContext())); + assertThat(e.getMessage(), startsWith("unsupported inner query")); } public void testToQueryInnerSpanMultiTerm() throws IOException { @@ -184,50 +191,39 @@ public void testToQueryInnerSpanMultiTerm() throws IOException { public void testToQueryInnerTermQuery() throws IOException { String fieldName = randomFrom("prefix_field", "prefix_field_alias"); final QueryShardContext context = createShardContext(); - if (context.getIndexSettings().getIndexVersionCreated().onOrAfter(Version.V_6_4_0)) { - Query query = new SpanMultiTermQueryBuilder(new PrefixQueryBuilder(fieldName, "foo")) - .toQuery(context); - assertThat(query, instanceOf(FieldMaskingSpanQuery.class)); - FieldMaskingSpanQuery fieldSpanQuery = (FieldMaskingSpanQuery) query; - assertThat(fieldSpanQuery.getField(), equalTo("prefix_field")); - assertThat(fieldSpanQuery.getMaskedQuery(), instanceOf(SpanTermQuery.class)); - SpanTermQuery spanTermQuery = (SpanTermQuery) fieldSpanQuery.getMaskedQuery(); - assertThat(spanTermQuery.getTerm().text(), equalTo("foo")); - - query = new SpanMultiTermQueryBuilder(new PrefixQueryBuilder(fieldName, "foo")) - .boost(2.0f) - .toQuery(context); - assertThat(query, instanceOf(SpanBoostQuery.class)); - SpanBoostQuery boostQuery = (SpanBoostQuery) query; - assertThat(boostQuery.getBoost(), equalTo(2.0f)); - assertThat(boostQuery.getQuery(), instanceOf(FieldMaskingSpanQuery.class)); - fieldSpanQuery = (FieldMaskingSpanQuery) boostQuery.getQuery(); - assertThat(fieldSpanQuery.getField(), equalTo("prefix_field")); - assertThat(fieldSpanQuery.getMaskedQuery(), instanceOf(SpanTermQuery.class)); - spanTermQuery = (SpanTermQuery) fieldSpanQuery.getMaskedQuery(); - assertThat(spanTermQuery.getTerm().text(), equalTo("foo")); - } else { - Query query = new SpanMultiTermQueryBuilder(new PrefixQueryBuilder(fieldName, "foo")) - .toQuery(context); + { + Query query = new SpanMultiTermQueryBuilder(new PrefixQueryBuilder(fieldName, "foo")).toQuery(context); + if (context.getIndexSettings().getIndexVersionCreated().onOrAfter(Version.V_6_4_0)) { + assertThat(query, instanceOf(FieldMaskingSpanQuery.class)); + FieldMaskingSpanQuery fieldQuery = (FieldMaskingSpanQuery) query; + assertThat(fieldQuery.getMaskedQuery(), instanceOf(SpanTermQuery.class)); + assertThat(fieldQuery.getField(), equalTo("prefix_field")); + SpanTermQuery termQuery = (SpanTermQuery) fieldQuery.getMaskedQuery(); + assertThat(termQuery.getTerm().field(), equalTo("prefix_field._index_prefix")); + assertThat(termQuery.getTerm().text(), equalTo("foo")); + } else { + assertThat(query, instanceOf(SpanMultiTermQueryWrapper.class)); + SpanMultiTermQueryWrapper wrapper = (SpanMultiTermQueryWrapper) query; + assertThat(wrapper.getWrappedQuery(), instanceOf(PrefixQuery.class)); + PrefixQuery prefixQuery = (PrefixQuery) wrapper.getWrappedQuery(); + assertThat(prefixQuery.getField(), equalTo("prefix_field")); + assertThat(prefixQuery.getPrefix().text(), equalTo("foo")); + } + } + + { + Query query = new SpanMultiTermQueryBuilder(new PrefixQueryBuilder(fieldName, "f")).toQuery(context); assertThat(query, instanceOf(SpanMultiTermQueryWrapper.class)); SpanMultiTermQueryWrapper wrapper = (SpanMultiTermQueryWrapper) query; assertThat(wrapper.getWrappedQuery(), instanceOf(PrefixQuery.class)); + assertThat(wrapper.getField(), equalTo("prefix_field")); PrefixQuery prefixQuery = (PrefixQuery) wrapper.getWrappedQuery(); assertThat(prefixQuery.getField(), equalTo("prefix_field")); - assertThat(prefixQuery.getPrefix().text(), equalTo("foo")); - - query = new SpanMultiTermQueryBuilder(new PrefixQueryBuilder(fieldName, "foo")) - .boost(2.0f) - .toQuery(context); - assertThat(query, instanceOf(SpanBoostQuery.class)); - SpanBoostQuery boostQuery = (SpanBoostQuery) query; - assertThat(boostQuery.getBoost(), equalTo(2.0f)); - assertThat(boostQuery.getQuery(), instanceOf(SpanMultiTermQueryWrapper.class)); - wrapper = (SpanMultiTermQueryWrapper) boostQuery.getQuery(); - assertThat(wrapper.getWrappedQuery(), instanceOf(PrefixQuery.class)); - prefixQuery = (PrefixQuery) wrapper.getWrappedQuery(); - assertThat(prefixQuery.getField(), equalTo("prefix_field")); - assertThat(prefixQuery.getPrefix().text(), equalTo("foo")); + assertThat(prefixQuery.getPrefix().text(), equalTo("f")); + assertThat(wrapper.getRewriteMethod(), instanceOf(SpanBooleanQueryRewriteWithMaxClause.class)); + SpanBooleanQueryRewriteWithMaxClause rewrite = (SpanBooleanQueryRewriteWithMaxClause) wrapper.getRewriteMethod(); + assertThat(rewrite.getMaxExpansions(), equalTo(BooleanQuery.getMaxClauseCount())); + assertTrue(rewrite.isHardLimit()); } } @@ -255,17 +251,13 @@ public void testFromJson() throws IOException { } public void testDefaultMaxRewriteBuilder() throws Exception { - Query query = QueryBuilders.spanMultiTermQueryBuilder(QueryBuilders.prefixQuery("foo", "b")). - toQuery(createShardContext()); - - if (query instanceof SpanBoostQuery) { - query = ((SpanBoostQuery)query).getQuery(); - } + Query query = QueryBuilders.spanMultiTermQueryBuilder(QueryBuilders.prefixQuery("body", "b")) + .toQuery(createShardContext()); assertTrue(query instanceof SpanMultiTermQueryWrapper); if (query instanceof SpanMultiTermQueryWrapper) { - MultiTermQuery.RewriteMethod rewriteMethod = ((SpanMultiTermQueryWrapper)query).getRewriteMethod(); - assertTrue(rewriteMethod instanceof SpanMultiTermQueryBuilder.TopTermSpanBooleanQueryRewriteWithMaxClause); + MultiTermQuery.RewriteMethod rewriteMethod = ((SpanMultiTermQueryWrapper) query).getRewriteMethod(); + assertTrue(rewriteMethod instanceof SpanBooleanQueryRewriteWithMaxClause); } } @@ -285,7 +277,6 @@ public void testTermExpansionExceptionOnSpanFailure() throws Exception { Query query = queryBuilder.toQuery(createShardContext(reader)); RuntimeException exc = expectThrows(RuntimeException.class, () -> query.rewrite(reader)); assertThat(exc.getMessage(), containsString("maxClauseCount")); - } finally { BooleanQuery.setMaxClauseCount(origBoolMaxClauseCount); } @@ -296,17 +287,13 @@ public void testTermExpansionExceptionOnSpanFailure() throws Exception { public void testTopNMultiTermsRewriteInsideSpan() throws Exception { Query query = QueryBuilders.spanMultiTermQueryBuilder( - QueryBuilders.prefixQuery("foo", "b").rewrite("top_terms_boost_2000") + QueryBuilders.prefixQuery("body", "b").rewrite("top_terms_boost_2000") ).toQuery(createShardContext()); - if (query instanceof SpanBoostQuery) { - query = ((SpanBoostQuery)query).getQuery(); - } - assertTrue(query instanceof SpanMultiTermQueryWrapper); if (query instanceof SpanMultiTermQueryWrapper) { MultiTermQuery.RewriteMethod rewriteMethod = ((SpanMultiTermQueryWrapper)query).getRewriteMethod(); - assertFalse(rewriteMethod instanceof SpanMultiTermQueryBuilder.TopTermSpanBooleanQueryRewriteWithMaxClause); + assertFalse(rewriteMethod instanceof SpanBooleanQueryRewriteWithMaxClause); } } diff --git a/server/src/test/java/org/elasticsearch/index/search/MultiMatchQueryTests.java b/server/src/test/java/org/elasticsearch/index/search/MultiMatchQueryTests.java index 1087bbbf9fd8f..58baadd83573d 100644 --- a/server/src/test/java/org/elasticsearch/index/search/MultiMatchQueryTests.java +++ b/server/src/test/java/org/elasticsearch/index/search/MultiMatchQueryTests.java @@ -27,7 +27,6 @@ import org.apache.lucene.search.BoostQuery; import org.apache.lucene.search.DisjunctionMaxQuery; import org.apache.lucene.search.MatchAllDocsQuery; -import org.apache.lucene.search.MatchNoDocsQuery; import org.apache.lucene.search.PhraseQuery; import org.apache.lucene.search.Query; import org.apache.lucene.search.SynonymQuery; @@ -44,7 +43,7 @@ import org.elasticsearch.index.mapper.MockFieldMapper.FakeFieldType; import org.elasticsearch.index.query.MultiMatchQueryBuilder; import org.elasticsearch.index.query.QueryShardContext; -import org.elasticsearch.index.search.MultiMatchQuery.FieldAndFieldType; +import org.elasticsearch.index.search.MultiMatchQuery.FieldAndBoost; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.ESSingleNodeTestCase; import org.elasticsearch.test.MockKeywordPlugin; @@ -105,7 +104,8 @@ public void testCrossFieldMultiMatchQuery() throws IOException { for (float tieBreaker : new float[] {0.0f, 0.5f}) { Query parsedQuery = multiMatchQuery("banon") .field("name.first", 2) - .field("name.last", 3).field("foobar") + .field("name.last", 3) + .field("foobar") .type(MultiMatchQueryBuilder.Type.CROSS_FIELDS) .tieBreaker(tieBreaker) .toQuery(queryShardContext); @@ -113,11 +113,7 @@ public void testCrossFieldMultiMatchQuery() throws IOException { Query rewrittenQuery = searcher.searcher().rewrite(parsedQuery); Query tq1 = new BoostQuery(new TermQuery(new Term("name.first", "banon")), 2); Query tq2 = new BoostQuery(new TermQuery(new Term("name.last", "banon")), 3); - Query expected = new DisjunctionMaxQuery( - Arrays.asList( - new MatchNoDocsQuery("unknown field foobar"), - new DisjunctionMaxQuery(Arrays.asList(tq2, tq1), tieBreaker) - ), tieBreaker); + Query expected = new DisjunctionMaxQuery(Arrays.asList(tq2, tq1), tieBreaker); assertEquals(expected, rewrittenQuery); } } @@ -133,7 +129,7 @@ public void testBlendTerms() { Query expected = BlendedTermQuery.dismaxBlendedQuery(terms, boosts, 1.0f); Query actual = MultiMatchQuery.blendTerm( indexService.newQueryShardContext(randomInt(20), null, () -> { throw new UnsupportedOperationException(); }, null), - new BytesRef("baz"), null, 1f, false, new FieldAndFieldType(ft1, 2), new FieldAndFieldType(ft2, 3)); + new BytesRef("baz"), null, 1f, false, Arrays.asList(new FieldAndBoost(ft1, 2), new FieldAndBoost(ft2, 3))); assertEquals(expected, actual); } @@ -149,7 +145,7 @@ public void testBlendTermsWithFieldBoosts() { Query expected = BlendedTermQuery.dismaxBlendedQuery(terms, boosts, 1.0f); Query actual = MultiMatchQuery.blendTerm( indexService.newQueryShardContext(randomInt(20), null, () -> { throw new UnsupportedOperationException(); }, null), - new BytesRef("baz"), null, 1f, false, new FieldAndFieldType(ft1, 2), new FieldAndFieldType(ft2, 3)); + new BytesRef("baz"), null, 1f, false, Arrays.asList(new FieldAndBoost(ft1, 2), new FieldAndBoost(ft2, 3))); assertEquals(expected, actual); } @@ -171,7 +167,7 @@ public Query termQuery(Object value, QueryShardContext context) { ), 1f); Query actual = MultiMatchQuery.blendTerm( indexService.newQueryShardContext(randomInt(20), null, () -> { throw new UnsupportedOperationException(); }, null), - new BytesRef("baz"), null, 1f, true, new FieldAndFieldType(ft1, 2), new FieldAndFieldType(ft2, 3)); + new BytesRef("baz"), null, 1f, true, Arrays.asList(new FieldAndBoost(ft1, 2), new FieldAndBoost(ft2, 3))); assertEquals(expected, actual); } @@ -185,7 +181,7 @@ public Query termQuery(Object value, QueryShardContext context) { ft.setName("bar"); expectThrows(IllegalArgumentException.class, () -> MultiMatchQuery.blendTerm( indexService.newQueryShardContext(randomInt(20), null, () -> { throw new UnsupportedOperationException(); }, null), - new BytesRef("baz"), null, 1f, false, new FieldAndFieldType(ft, 1))); + new BytesRef("baz"), null, 1f, false, Arrays.asList(new FieldAndBoost(ft, 1)))); } public void testBlendNoTermQuery() { @@ -209,7 +205,7 @@ public Query termQuery(Object value, QueryShardContext context) { ), 1.0f); Query actual = MultiMatchQuery.blendTerm( indexService.newQueryShardContext(randomInt(20), null, () -> { throw new UnsupportedOperationException(); }, null), - new BytesRef("baz"), null, 1f, false, new FieldAndFieldType(ft1, 2), new FieldAndFieldType(ft2, 3)); + new BytesRef("baz"), null, 1f, false, Arrays.asList(new FieldAndBoost(ft1, 2), new FieldAndBoost(ft2, 3))); assertEquals(expected, actual); } From 20ed3dd1a8b9c75e7080f2004e703050223febd6 Mon Sep 17 00:00:00 2001 From: Nhat Nguyen Date: Thu, 17 Jan 2019 09:59:05 -0500 Subject: [PATCH 27/71] Make recovery source send operations non-blocking (#37503) Relates #37458 --- .../recovery/RecoverySourceHandler.java | 151 +++++++++--------- .../recovery/RecoverySourceHandlerTests.java | 70 +++++++- 2 files changed, 139 insertions(+), 82 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/indices/recovery/RecoverySourceHandler.java b/server/src/main/java/org/elasticsearch/indices/recovery/RecoverySourceHandler.java index d2d03156271cd..34434f50b456f 100644 --- a/server/src/main/java/org/elasticsearch/indices/recovery/RecoverySourceHandler.java +++ b/server/src/main/java/org/elasticsearch/indices/recovery/RecoverySourceHandler.java @@ -33,9 +33,9 @@ import org.elasticsearch.Version; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.StepListener; -import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.cluster.routing.IndexShardRoutingTable; import org.elasticsearch.cluster.routing.ShardRouting; +import org.elasticsearch.common.CheckedSupplier; import org.elasticsearch.common.StopWatch; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.collect.Tuple; @@ -71,7 +71,7 @@ import java.util.Locale; import java.util.concurrent.CompletableFuture; import java.util.concurrent.CopyOnWriteArrayList; -import java.util.concurrent.atomic.AtomicLong; +import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicReference; import java.util.function.Consumer; import java.util.function.Supplier; @@ -514,13 +514,6 @@ TimeValue prepareTargetForTranslog(final boolean fileBasedRecovery, final int to */ void phase2(long startingSeqNo, long requiredSeqNoRangeStart, long endingSeqNo, Translog.Snapshot snapshot, long maxSeenAutoIdTimestamp, long maxSeqNoOfUpdatesOrDeletes, ActionListener listener) throws IOException { - ActionListener.completeWith(listener, () -> sendSnapshotBlockingly( - startingSeqNo, requiredSeqNoRangeStart, endingSeqNo, snapshot, maxSeenAutoIdTimestamp, maxSeqNoOfUpdatesOrDeletes)); - } - - private SendSnapshotResult sendSnapshotBlockingly(long startingSeqNo, long requiredSeqNoRangeStart, long endingSeqNo, - Translog.Snapshot snapshot, long maxSeenAutoIdTimestamp, - long maxSeqNoOfUpdatesOrDeletes) throws IOException { assert requiredSeqNoRangeStart <= endingSeqNo + 1: "requiredSeqNoRangeStart " + requiredSeqNoRangeStart + " is larger than endingSeqNo " + endingSeqNo; assert startingSeqNo <= requiredSeqNoRangeStart : @@ -528,83 +521,87 @@ private SendSnapshotResult sendSnapshotBlockingly(long startingSeqNo, long requi if (shard.state() == IndexShardState.CLOSED) { throw new IndexShardClosedException(request.shardId()); } - - final StopWatch stopWatch = new StopWatch().start(); - logger.trace("recovery [phase2]: sending transaction log operations (seq# from [" + startingSeqNo + "], " + "required [" + requiredSeqNoRangeStart + ":" + endingSeqNo + "]"); - int ops = 0; - long size = 0; - int skippedOps = 0; - int totalSentOps = 0; - final AtomicLong targetLocalCheckpoint = new AtomicLong(SequenceNumbers.UNASSIGNED_SEQ_NO); - final List operations = new ArrayList<>(); + final AtomicInteger skippedOps = new AtomicInteger(); + final AtomicInteger totalSentOps = new AtomicInteger(); final LocalCheckpointTracker requiredOpsTracker = new LocalCheckpointTracker(endingSeqNo, requiredSeqNoRangeStart - 1); + final AtomicInteger lastBatchCount = new AtomicInteger(); // used to estimate the count of the subsequent batch. + final CheckedSupplier, IOException> readNextBatch = () -> { + // We need to synchronized Snapshot#next() because it's called by different threads through sendBatch. + // Even though those calls are not concurrent, Snapshot#next() uses non-synchronized state and is not multi-thread-compatible. + synchronized (snapshot) { + final List ops = lastBatchCount.get() > 0 ? new ArrayList<>(lastBatchCount.get()) : new ArrayList<>(); + long batchSizeInBytes = 0L; + Translog.Operation operation; + while ((operation = snapshot.next()) != null) { + if (shard.state() == IndexShardState.CLOSED) { + throw new IndexShardClosedException(request.shardId()); + } + cancellableThreads.checkForCancel(); + final long seqNo = operation.seqNo(); + if (seqNo < startingSeqNo || seqNo > endingSeqNo) { + skippedOps.incrementAndGet(); + continue; + } + ops.add(operation); + batchSizeInBytes += operation.estimateSize(); + totalSentOps.incrementAndGet(); + requiredOpsTracker.markSeqNoAsCompleted(seqNo); - final int expectedTotalOps = snapshot.totalOperations(); - if (expectedTotalOps == 0) { - logger.trace("no translog operations to send"); - } - - final CancellableThreads.IOInterruptible sendBatch = () -> { - // TODO: Make this non-blocking - final PlainActionFuture future = new PlainActionFuture<>(); - recoveryTarget.indexTranslogOperations( - operations, expectedTotalOps, maxSeenAutoIdTimestamp, maxSeqNoOfUpdatesOrDeletes, future); - targetLocalCheckpoint.set(future.actionGet()); - }; - - // send operations in batches - Translog.Operation operation; - while ((operation = snapshot.next()) != null) { - if (shard.state() == IndexShardState.CLOSED) { - throw new IndexShardClosedException(request.shardId()); - } - cancellableThreads.checkForCancel(); - - final long seqNo = operation.seqNo(); - if (seqNo < startingSeqNo || seqNo > endingSeqNo) { - skippedOps++; - continue; - } - operations.add(operation); - ops++; - size += operation.estimateSize(); - totalSentOps++; - requiredOpsTracker.markSeqNoAsCompleted(seqNo); - - // check if this request is past bytes threshold, and if so, send it off - if (size >= chunkSizeInBytes) { - cancellableThreads.executeIO(sendBatch); - logger.trace("sent batch of [{}][{}] (total: [{}]) translog operations", ops, new ByteSizeValue(size), expectedTotalOps); - ops = 0; - size = 0; - operations.clear(); + // check if this request is past bytes threshold, and if so, send it off + if (batchSizeInBytes >= chunkSizeInBytes) { + break; + } + } + lastBatchCount.set(ops.size()); + return ops; } - } - - if (!operations.isEmpty() || totalSentOps == 0) { - // send the leftover operations or if no operations were sent, request the target to respond with its local checkpoint - cancellableThreads.executeIO(sendBatch); - } + }; - assert expectedTotalOps == snapshot.skippedOperations() + skippedOps + totalSentOps - : String.format(Locale.ROOT, "expected total [%d], overridden [%d], skipped [%d], total sent [%d]", - expectedTotalOps, snapshot.skippedOperations(), skippedOps, totalSentOps); + final StopWatch stopWatch = new StopWatch().start(); + final ActionListener batchedListener = ActionListener.wrap( + targetLocalCheckpoint -> { + assert snapshot.totalOperations() == snapshot.skippedOperations() + skippedOps.get() + totalSentOps.get() + : String.format(Locale.ROOT, "expected total [%d], overridden [%d], skipped [%d], total sent [%d]", + snapshot.totalOperations(), snapshot.skippedOperations(), skippedOps.get(), totalSentOps.get()); + if (requiredOpsTracker.getCheckpoint() < endingSeqNo) { + throw new IllegalStateException("translog replay failed to cover required sequence numbers" + + " (required range [" + requiredSeqNoRangeStart + ":" + endingSeqNo + "). first missing op is [" + + (requiredOpsTracker.getCheckpoint() + 1) + "]"); + } + stopWatch.stop(); + final TimeValue tookTime = stopWatch.totalTime(); + logger.trace("recovery [phase2]: took [{}]", tookTime); + listener.onResponse(new SendSnapshotResult(targetLocalCheckpoint, totalSentOps.get(), tookTime)); + }, + listener::onFailure + ); + + sendBatch(readNextBatch, true, SequenceNumbers.UNASSIGNED_SEQ_NO, snapshot.totalOperations(), + maxSeenAutoIdTimestamp, maxSeqNoOfUpdatesOrDeletes, batchedListener); + } - if (requiredOpsTracker.getCheckpoint() < endingSeqNo) { - throw new IllegalStateException("translog replay failed to cover required sequence numbers" + - " (required range [" + requiredSeqNoRangeStart + ":" + endingSeqNo + "). first missing op is [" - + (requiredOpsTracker.getCheckpoint() + 1) + "]"); + private void sendBatch(CheckedSupplier, IOException> nextBatch, boolean firstBatch, + long targetLocalCheckpoint, int totalTranslogOps, long maxSeenAutoIdTimestamp, + long maxSeqNoOfUpdatesOrDeletes, ActionListener listener) throws IOException { + final List operations = nextBatch.get(); + // send the leftover operations or if no operations were sent, request the target to respond with its local checkpoint + if (operations.isEmpty() == false || firstBatch) { + cancellableThreads.execute(() -> { + recoveryTarget.indexTranslogOperations(operations, totalTranslogOps, maxSeenAutoIdTimestamp, maxSeqNoOfUpdatesOrDeletes, + ActionListener.wrap( + newCheckpoint -> { + sendBatch(nextBatch, false, SequenceNumbers.max(targetLocalCheckpoint, newCheckpoint), + totalTranslogOps, maxSeenAutoIdTimestamp, maxSeqNoOfUpdatesOrDeletes, listener); + }, + listener::onFailure + )); + }); + } else { + listener.onResponse(targetLocalCheckpoint); } - - logger.trace("sent final batch of [{}][{}] (total: [{}]) translog operations", ops, new ByteSizeValue(size), expectedTotalOps); - - stopWatch.stop(); - final TimeValue tookTime = stopWatch.totalTime(); - logger.trace("recovery [phase2]: took [{}]", tookTime); - return new SendSnapshotResult(targetLocalCheckpoint.get(), totalSentOps, tookTime); } void finalizeRecovery(final long targetLocalCheckpoint, final ActionListener listener) throws IOException { diff --git a/server/src/test/java/org/elasticsearch/indices/recovery/RecoverySourceHandlerTests.java b/server/src/test/java/org/elasticsearch/indices/recovery/RecoverySourceHandlerTests.java index 97f2cadfa3a5d..0cecc925b2488 100644 --- a/server/src/test/java/org/elasticsearch/indices/recovery/RecoverySourceHandlerTests.java +++ b/server/src/test/java/org/elasticsearch/indices/recovery/RecoverySourceHandlerTests.java @@ -76,6 +76,10 @@ import org.elasticsearch.test.DummyShardLock; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.IndexSettingsModule; +import org.elasticsearch.threadpool.TestThreadPool; +import org.elasticsearch.threadpool.ThreadPool; +import org.junit.After; +import org.junit.Before; import java.io.IOException; import java.io.OutputStream; @@ -115,6 +119,18 @@ public class RecoverySourceHandlerTests extends ESTestCase { private final ShardId shardId = new ShardId(INDEX_SETTINGS.getIndex(), 1); private final ClusterSettings service = new ClusterSettings(Settings.EMPTY, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS); + private ThreadPool threadPool; + + @Before + public void setUpThreadPool() { + threadPool = new TestThreadPool(getTestName()); + } + + @After + public void tearDownThreadPool() { + terminate(threadPool); + } + public void testSendFiles() throws Throwable { Settings settings = Settings.builder().put("indices.recovery.concurrent_streams", 1). put("indices.recovery.concurrent_small_file_streams", 1).build(); @@ -198,18 +214,17 @@ public StartRecoveryRequest getStartRecoveryRequest() throws IOException { } public void testSendSnapshotSendsOps() throws IOException { - final RecoverySettings recoverySettings = new RecoverySettings(Settings.EMPTY, service); - final int fileChunkSizeInBytes = recoverySettings.getChunkSize().bytesAsInt(); + final int fileChunkSizeInBytes = between(1, 4096); final StartRecoveryRequest request = getStartRecoveryRequest(); final IndexShard shard = mock(IndexShard.class); when(shard.state()).thenReturn(IndexShardState.STARTED); final List operations = new ArrayList<>(); - final int initialNumberOfDocs = randomIntBetween(16, 64); + final int initialNumberOfDocs = randomIntBetween(10, 1000); for (int i = 0; i < initialNumberOfDocs; i++) { final Engine.Index index = getIndex(Integer.toString(i)); operations.add(new Translog.Index(index, new Engine.IndexResult(1, 1, SequenceNumbers.UNASSIGNED_SEQ_NO, true))); } - final int numberOfDocsWithValidSequenceNumbers = randomIntBetween(16, 64); + final int numberOfDocsWithValidSequenceNumbers = randomIntBetween(10, 1000); for (int i = initialNumberOfDocs; i < initialNumberOfDocs + numberOfDocsWithValidSequenceNumbers; i++) { final Engine.Index index = getIndex(Integer.toString(i)); operations.add(new Translog.Index(index, new Engine.IndexResult(1, 1, i - initialNumberOfDocs, true))); @@ -219,12 +234,14 @@ public void testSendSnapshotSendsOps() throws IOException { final long endingSeqNo = randomIntBetween((int) requiredStartingSeqNo - 1, numberOfDocsWithValidSequenceNumbers - 1); final List shippedOps = new ArrayList<>(); + final AtomicLong checkpointOnTarget = new AtomicLong(SequenceNumbers.NO_OPS_PERFORMED); RecoveryTargetHandler recoveryTarget = new TestRecoveryTargetHandler() { @Override public void indexTranslogOperations(List operations, int totalTranslogOps, long timestamp, long msu, ActionListener listener) { shippedOps.addAll(operations); - listener.onResponse(SequenceNumbers.NO_OPS_PERFORMED); + checkpointOnTarget.set(randomLongBetween(checkpointOnTarget.get(), Long.MAX_VALUE)); + maybeExecuteAsync(() -> listener.onResponse(checkpointOnTarget.get())); } }; RecoverySourceHandler handler = new RecoverySourceHandler(shard, recoveryTarget, request, fileChunkSizeInBytes, between(1, 10)); @@ -239,6 +256,7 @@ public void indexTranslogOperations(List operations, int tot for (int i = 0; i < shippedOps.size(); i++) { assertThat(shippedOps.get(i), equalTo(operations.get(i + (int) startingSeqNo + initialNumberOfDocs))); } + assertThat(result.targetLocalCheckpoint, equalTo(checkpointOnTarget.get())); if (endingSeqNo >= requiredStartingSeqNo + 1) { // check that missing ops blows up List requiredOps = operations.subList(0, operations.size() - 1).stream() // remove last null marker @@ -253,6 +271,40 @@ public void indexTranslogOperations(List operations, int tot } } + public void testSendSnapshotStopOnError() throws Exception { + final int fileChunkSizeInBytes = between(1, 10 * 1024); + final StartRecoveryRequest request = getStartRecoveryRequest(); + final IndexShard shard = mock(IndexShard.class); + when(shard.state()).thenReturn(IndexShardState.STARTED); + final List ops = new ArrayList<>(); + for (int numOps = between(1, 256), i = 0; i < numOps; i++) { + final Engine.Index index = getIndex(Integer.toString(i)); + ops.add(new Translog.Index(index, new Engine.IndexResult(1, 1, i, true))); + } + final AtomicBoolean wasFailed = new AtomicBoolean(); + RecoveryTargetHandler recoveryTarget = new TestRecoveryTargetHandler() { + @Override + public void indexTranslogOperations(List operations, int totalTranslogOps, long timestamp, + long msu, ActionListener listener) { + if (randomBoolean()) { + maybeExecuteAsync(() -> listener.onResponse(SequenceNumbers.NO_OPS_PERFORMED)); + } else { + maybeExecuteAsync(() -> listener.onFailure(new RuntimeException("test - failed to index"))); + wasFailed.set(true); + } + } + }; + RecoverySourceHandler handler = new RecoverySourceHandler(shard, recoveryTarget, request, fileChunkSizeInBytes, between(1, 10)); + PlainActionFuture future = new PlainActionFuture<>(); + final long startingSeqNo = randomLongBetween(0, ops.size() - 1L); + final long endingSeqNo = randomLongBetween(startingSeqNo, ops.size() - 1L); + handler.phase2(startingSeqNo, startingSeqNo, endingSeqNo, newTranslogSnapshot(ops, Collections.emptyList()), + randomNonNegativeLong(), randomNonNegativeLong(), future); + if (wasFailed.get()) { + assertThat(expectThrows(RuntimeException.class, () -> future.actionGet()).getMessage(), equalTo("test - failed to index")); + } + } + private Engine.Index getIndex(final String id) { final String type = "test"; final ParseContext.Document document = new ParseContext.Document(); @@ -717,4 +769,12 @@ public void close() { } }; } + + private void maybeExecuteAsync(Runnable runnable) { + if (randomBoolean()) { + threadPool.generic().execute(runnable); + } else { + runnable.run(); + } + } } From 587034dfa75cf153b22ae95d940a46fdee85dd43 Mon Sep 17 00:00:00 2001 From: Jake Landis Date: Thu, 17 Jan 2019 09:55:36 -0600 Subject: [PATCH 28/71] Add set_priority action to ILM (#37397) This commit adds a set_priority action to the hot, warm, and cold phases for an ILM policy. This action sets the `index.priority` on the managed index to allow different priorities between the hot, warm, and cold recoveries. This commit also includes the HLRC and documentation changes. closes #36905 --- .../IndexLifecycleNamedXContentProvider.java | 5 +- .../indexlifecycle/LifecyclePolicy.java | 7 +- .../indexlifecycle/SetPriorityAction.java | 96 ++++++++++++++ .../client/RestHighLevelClientTests.java | 6 +- .../GetLifecyclePolicyResponseTests.java | 3 +- .../LifecyclePolicyMetadataTests.java | 3 +- .../indexlifecycle/LifecyclePolicyTests.java | 15 ++- .../SetPriorityActionTests.java | 71 +++++++++++ .../reference/ilm/policy-definitions.asciidoc | 45 +++++++ .../xpack/core/XPackClientPlugin.java | 4 +- .../core/indexlifecycle/ReadOnlyAction.java | 2 +- .../indexlifecycle/SetPriorityAction.java | 117 ++++++++++++++++++ .../TimeseriesLifecycleType.java | 6 +- .../LifecyclePolicyMetadataTests.java | 6 +- .../indexlifecycle/LifecyclePolicyTests.java | 10 +- .../SetPriorityActionTests.java | 95 ++++++++++++++ .../TimeseriesLifecycleTypeTests.java | 33 ++++- .../action/PutLifecycleRequestTests.java | 7 +- .../TimeSeriesLifecycleActionsIT.java | 39 +++++- .../xpack/indexlifecycle/IndexLifecycle.java | 4 +- .../IndexLifecycleMetadataTests.java | 7 +- 21 files changed, 547 insertions(+), 34 deletions(-) create mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/indexlifecycle/SetPriorityAction.java create mode 100644 client/rest-high-level/src/test/java/org/elasticsearch/client/indexlifecycle/SetPriorityActionTests.java create mode 100644 x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/indexlifecycle/SetPriorityAction.java create mode 100644 x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/indexlifecycle/SetPriorityActionTests.java diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/indexlifecycle/IndexLifecycleNamedXContentProvider.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/indexlifecycle/IndexLifecycleNamedXContentProvider.java index 1c22f1e0654f8..a4e5f034b5154 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/indexlifecycle/IndexLifecycleNamedXContentProvider.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/indexlifecycle/IndexLifecycleNamedXContentProvider.java @@ -53,7 +53,10 @@ public List getNamedXContentParsers() { ShrinkAction::parse), new NamedXContentRegistry.Entry(LifecycleAction.class, new ParseField(FreezeAction.NAME), - FreezeAction::parse) + FreezeAction::parse), + new NamedXContentRegistry.Entry(LifecycleAction.class, + new ParseField(SetPriorityAction.NAME), + SetPriorityAction::parse) ); } } diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/indexlifecycle/LifecyclePolicy.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/indexlifecycle/LifecyclePolicy.java index 21a052500a4ae..1a0f80b740ee7 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/indexlifecycle/LifecyclePolicy.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/indexlifecycle/LifecyclePolicy.java @@ -57,9 +57,10 @@ public class LifecyclePolicy implements ToXContentObject { throw new IllegalArgumentException("ordered " + PHASES_FIELD.getPreferredName() + " are not supported"); }, PHASES_FIELD); - ALLOWED_ACTIONS.put("hot", Sets.newHashSet(RolloverAction.NAME)); - ALLOWED_ACTIONS.put("warm", Sets.newHashSet(AllocateAction.NAME, ForceMergeAction.NAME, ReadOnlyAction.NAME, ShrinkAction.NAME)); - ALLOWED_ACTIONS.put("cold", Sets.newHashSet(AllocateAction.NAME, FreezeAction.NAME)); + ALLOWED_ACTIONS.put("hot", Sets.newHashSet(SetPriorityAction.NAME, RolloverAction.NAME)); + ALLOWED_ACTIONS.put("warm", Sets.newHashSet(SetPriorityAction.NAME, AllocateAction.NAME, ForceMergeAction.NAME, + ReadOnlyAction.NAME, ShrinkAction.NAME)); + ALLOWED_ACTIONS.put("cold", Sets.newHashSet(SetPriorityAction.NAME, AllocateAction.NAME, FreezeAction.NAME)); ALLOWED_ACTIONS.put("delete", Sets.newHashSet(DeleteAction.NAME)); } diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/indexlifecycle/SetPriorityAction.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/indexlifecycle/SetPriorityAction.java new file mode 100644 index 0000000000000..414d2a52ad048 --- /dev/null +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/indexlifecycle/SetPriorityAction.java @@ -0,0 +1,96 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.client.indexlifecycle; + +import org.elasticsearch.common.Nullable; +import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.xcontent.ConstructingObjectParser; +import org.elasticsearch.common.xcontent.ObjectParser; +import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.common.xcontent.ToXContentObject; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentParser; + +import java.io.IOException; + +/** + * A {@link LifecycleAction} which sets the index's priority. The higher the priority, the faster the recovery. + */ +public class SetPriorityAction implements LifecycleAction, ToXContentObject { + public static final String NAME = "set_priority"; + private static final ParseField RECOVERY_PRIORITY_FIELD = new ParseField("priority"); + + @SuppressWarnings("unchecked") + private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>(NAME, true, + a -> new SetPriorityAction((Integer) a[0])); + + //package private for testing + final Integer recoveryPriority; + + static { + PARSER.declareField(ConstructingObjectParser.constructorArg(), + (p) -> p.currentToken() == XContentParser.Token.VALUE_NULL ? null : p.intValue() + , RECOVERY_PRIORITY_FIELD, ObjectParser.ValueType.INT_OR_NULL); + } + + public static SetPriorityAction parse(XContentParser parser) { + return PARSER.apply(parser, null); + } + + public SetPriorityAction(@Nullable Integer recoveryPriority) { + if (recoveryPriority != null && recoveryPriority <= 0) { + throw new IllegalArgumentException("[" + RECOVERY_PRIORITY_FIELD.getPreferredName() + "] must be 0 or greater"); + } + this.recoveryPriority = recoveryPriority; + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params params) throws IOException { + builder.startObject(); + builder.field(RECOVERY_PRIORITY_FIELD.getPreferredName(), recoveryPriority); + builder.endObject(); + return builder; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + + SetPriorityAction that = (SetPriorityAction) o; + + return recoveryPriority != null ? recoveryPriority.equals(that.recoveryPriority) : that.recoveryPriority == null; + } + + @Override + public int hashCode() { + return recoveryPriority != null ? recoveryPriority.hashCode() : 0; + } + + @Override + public String toString() { + return Strings.toString(this); + } + + @Override + public String getName() { + return NAME; + } +} diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/RestHighLevelClientTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/RestHighLevelClientTests.java index a94ab4541f0f9..6995fcf099ad2 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/RestHighLevelClientTests.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/RestHighLevelClientTests.java @@ -54,6 +54,7 @@ import org.elasticsearch.client.indexlifecycle.LifecycleAction; import org.elasticsearch.client.indexlifecycle.ReadOnlyAction; import org.elasticsearch.client.indexlifecycle.RolloverAction; +import org.elasticsearch.client.indexlifecycle.SetPriorityAction; import org.elasticsearch.client.indexlifecycle.ShrinkAction; import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.common.CheckedFunction; @@ -644,7 +645,7 @@ public void testDefaultNamedXContents() { public void testProvidedNamedXContents() { List namedXContents = RestHighLevelClient.getProvidedNamedXContents(); - assertEquals(18, namedXContents.size()); + assertEquals(19, namedXContents.size()); Map, Integer> categories = new HashMap<>(); List names = new ArrayList<>(); for (NamedXContentRegistry.Entry namedXContent : namedXContents) { @@ -668,7 +669,7 @@ public void testProvidedNamedXContents() { assertTrue(names.contains(MeanReciprocalRank.NAME)); assertTrue(names.contains(DiscountedCumulativeGain.NAME)); assertTrue(names.contains(ExpectedReciprocalRank.NAME)); - assertEquals(Integer.valueOf(7), categories.get(LifecycleAction.class)); + assertEquals(Integer.valueOf(8), categories.get(LifecycleAction.class)); assertTrue(names.contains(AllocateAction.NAME)); assertTrue(names.contains(DeleteAction.NAME)); assertTrue(names.contains(ForceMergeAction.NAME)); @@ -676,6 +677,7 @@ public void testProvidedNamedXContents() { assertTrue(names.contains(RolloverAction.NAME)); assertTrue(names.contains(ShrinkAction.NAME)); assertTrue(names.contains(FreezeAction.NAME)); + assertTrue(names.contains(SetPriorityAction.NAME)); } public void testApiNamingConventions() throws Exception { diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/indexlifecycle/GetLifecyclePolicyResponseTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/indexlifecycle/GetLifecyclePolicyResponseTests.java index d703d90d95ed9..0fb7b29067f22 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/indexlifecycle/GetLifecyclePolicyResponseTests.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/indexlifecycle/GetLifecyclePolicyResponseTests.java @@ -67,7 +67,8 @@ protected NamedXContentRegistry xContentRegistry() { new NamedXContentRegistry.Entry(LifecycleAction.class, new ParseField(ReadOnlyAction.NAME), ReadOnlyAction::parse), new NamedXContentRegistry.Entry(LifecycleAction.class, new ParseField(RolloverAction.NAME), RolloverAction::parse), new NamedXContentRegistry.Entry(LifecycleAction.class, new ParseField(ShrinkAction.NAME), ShrinkAction::parse), - new NamedXContentRegistry.Entry(LifecycleAction.class, new ParseField(FreezeAction.NAME), FreezeAction::parse) + new NamedXContentRegistry.Entry(LifecycleAction.class, new ParseField(FreezeAction.NAME), FreezeAction::parse), + new NamedXContentRegistry.Entry(LifecycleAction.class, new ParseField(SetPriorityAction.NAME), SetPriorityAction::parse) )); return new NamedXContentRegistry(entries); } diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/indexlifecycle/LifecyclePolicyMetadataTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/indexlifecycle/LifecyclePolicyMetadataTests.java index 93fb69c2ab47d..25bfa5a4c43d2 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/indexlifecycle/LifecyclePolicyMetadataTests.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/indexlifecycle/LifecyclePolicyMetadataTests.java @@ -63,7 +63,8 @@ protected NamedXContentRegistry xContentRegistry() { new NamedXContentRegistry.Entry(LifecycleAction.class, new ParseField(ReadOnlyAction.NAME), ReadOnlyAction::parse), new NamedXContentRegistry.Entry(LifecycleAction.class, new ParseField(RolloverAction.NAME), RolloverAction::parse), new NamedXContentRegistry.Entry(LifecycleAction.class, new ParseField(ShrinkAction.NAME), ShrinkAction::parse), - new NamedXContentRegistry.Entry(LifecycleAction.class, new ParseField(FreezeAction.NAME), FreezeAction::parse) + new NamedXContentRegistry.Entry(LifecycleAction.class, new ParseField(FreezeAction.NAME), FreezeAction::parse), + new NamedXContentRegistry.Entry(LifecycleAction.class, new ParseField(SetPriorityAction.NAME), SetPriorityAction::parse) )); return new NamedXContentRegistry(entries); } diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/indexlifecycle/LifecyclePolicyTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/indexlifecycle/LifecyclePolicyTests.java index 97c98919d8a88..4f04f814471c1 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/indexlifecycle/LifecyclePolicyTests.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/indexlifecycle/LifecyclePolicyTests.java @@ -39,10 +39,10 @@ import static org.hamcrest.Matchers.equalTo; public class LifecyclePolicyTests extends AbstractXContentTestCase { - private static final Set VALID_HOT_ACTIONS = Sets.newHashSet(RolloverAction.NAME); - private static final Set VALID_WARM_ACTIONS = Sets.newHashSet(AllocateAction.NAME, ForceMergeAction.NAME, - ReadOnlyAction.NAME, ShrinkAction.NAME); - private static final Set VALID_COLD_ACTIONS = Sets.newHashSet(AllocateAction.NAME, FreezeAction.NAME); + private static final Set VALID_HOT_ACTIONS = Sets.newHashSet(SetPriorityAction.NAME, RolloverAction.NAME); + private static final Set VALID_WARM_ACTIONS = Sets.newHashSet(SetPriorityAction.NAME, AllocateAction.NAME, + ForceMergeAction.NAME, ReadOnlyAction.NAME, ShrinkAction.NAME); + private static final Set VALID_COLD_ACTIONS = Sets.newHashSet(SetPriorityAction.NAME, AllocateAction.NAME, FreezeAction.NAME); private static final Set VALID_DELETE_ACTIONS = Sets.newHashSet(DeleteAction.NAME); private String lifecycleName; @@ -67,7 +67,8 @@ protected NamedXContentRegistry xContentRegistry() { new NamedXContentRegistry.Entry(LifecycleAction.class, new ParseField(ReadOnlyAction.NAME), ReadOnlyAction::parse), new NamedXContentRegistry.Entry(LifecycleAction.class, new ParseField(RolloverAction.NAME), RolloverAction::parse), new NamedXContentRegistry.Entry(LifecycleAction.class, new ParseField(ShrinkAction.NAME), ShrinkAction::parse), - new NamedXContentRegistry.Entry(LifecycleAction.class, new ParseField(FreezeAction.NAME), FreezeAction::parse) + new NamedXContentRegistry.Entry(LifecycleAction.class, new ParseField(FreezeAction.NAME), FreezeAction::parse), + new NamedXContentRegistry.Entry(LifecycleAction.class, new ParseField(SetPriorityAction.NAME), SetPriorityAction::parse) )); return new NamedXContentRegistry(entries); } @@ -210,6 +211,8 @@ public static LifecyclePolicy createRandomPolicy(String lifecycleName) { return ShrinkActionTests.randomInstance(); case FreezeAction.NAME: return new FreezeAction(); + case SetPriorityAction.NAME: + return SetPriorityActionTests.randomInstance(); default: throw new IllegalArgumentException("invalid action [" + action + "]"); }}; @@ -241,6 +244,8 @@ private LifecycleAction getTestAction(String actionName) { return ShrinkActionTests.randomInstance(); case FreezeAction.NAME: return new FreezeAction(); + case SetPriorityAction.NAME: + return SetPriorityActionTests.randomInstance(); default: throw new IllegalArgumentException("unsupported phase action [" + actionName + "]"); } diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/indexlifecycle/SetPriorityActionTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/indexlifecycle/SetPriorityActionTests.java new file mode 100644 index 0000000000000..f50935a87d398 --- /dev/null +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/indexlifecycle/SetPriorityActionTests.java @@ -0,0 +1,71 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.client.indexlifecycle; + +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.test.AbstractXContentTestCase; +import org.elasticsearch.test.EqualsHashCodeTestUtils; + +import java.io.IOException; + +import static org.hamcrest.Matchers.equalTo; + +public class SetPriorityActionTests extends AbstractXContentTestCase { + + @Override + protected SetPriorityAction doParseInstance(XContentParser parser) throws IOException { + return SetPriorityAction.parse(parser); + } + + @Override + protected SetPriorityAction createTestInstance() { + return randomInstance(); + } + + static SetPriorityAction randomInstance() { + return new SetPriorityAction(randomIntBetween(1, 100)); + } + + @Override + protected boolean supportsUnknownFields() { + return false; + } + + public void testNonPositivePriority() { + Exception e = expectThrows(Exception.class, () -> new SetPriorityAction(randomIntBetween(-100, 0))); + assertThat(e.getMessage(), equalTo("[priority] must be 0 or greater")); + } + + public void testNullPriorityAllowed(){ + SetPriorityAction nullPriority = new SetPriorityAction(null); + assertNull(nullPriority.recoveryPriority); + } + + public void testEqualsAndHashCode() { + EqualsHashCodeTestUtils.checkEqualsAndHashCode(createTestInstance(), this::copy); + } + + SetPriorityAction copy(SetPriorityAction setPriorityAction) { + return new SetPriorityAction(setPriorityAction.recoveryPriority); + } + + SetPriorityAction notCopy(SetPriorityAction setPriorityAction) { + return new SetPriorityAction(setPriorityAction.recoveryPriority + 1); + } +} diff --git a/docs/reference/ilm/policy-definitions.asciidoc b/docs/reference/ilm/policy-definitions.asciidoc index c4125496b38fb..2f71c20e2c76a 100644 --- a/docs/reference/ilm/policy-definitions.asciidoc +++ b/docs/reference/ilm/policy-definitions.asciidoc @@ -85,13 +85,16 @@ executing. The below list shows the actions which are available in each phase. * Hot + - <> - <> * Warm + - <> - <> - <> - <> - <> * Cold + - <> - <> - <> * Delete @@ -525,6 +528,48 @@ The above example illustrates a policy which attempts to delete an index one day after the index has been rolled over. It does not delete the index one day after it has been created. +[[ilm-set-priority-action]] +==== Set Priority + +Phases allowed: hot, warm, cold. + +This action sets the <> on the index as +soon as the policy enters the hot, warm, or cold phase. Indices with a higher +priority will be recovered before indices with lower priorities following a node +restart. Generally, indexes in the hot phase should have the highest value and +indexes in the cold phase should have the lowest values. For example: +100 for the hot phase, 50 for the warm phase, and 0 for the cold phase. +Indicies that don't set this value have an implicit default priority of 1. + +[[ilm-set-priority-options]] +.Set Priority Options +[options="header"] +|====== +| Name | Required | Default | Description +| `priority` | yes | - | The priority for the index. Must be 0 or greater. + The value may also be set to null to remove the priority. + +|====== + +[source,js] +-------------------------------------------------- +PUT _ilm/policy/my_policy +{ + "policy": { + "phases": { + "warm": { + "actions": { + "set_priority" : { + "priority": 50 + } + } + } + } + } +} +-------------------------------------------------- +// CONSOLE + [[ilm-shrink-action]] ==== Shrink diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackClientPlugin.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackClientPlugin.java index d7abe9a1f0f03..6865cd58c0dac 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackClientPlugin.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackClientPlugin.java @@ -52,6 +52,7 @@ import org.elasticsearch.xpack.core.indexlifecycle.IndexLifecycleMetadata; import org.elasticsearch.xpack.core.indexlifecycle.LifecycleAction; import org.elasticsearch.xpack.core.indexlifecycle.LifecycleType; +import org.elasticsearch.xpack.core.indexlifecycle.SetPriorityAction; import org.elasticsearch.xpack.core.indexlifecycle.ReadOnlyAction; import org.elasticsearch.xpack.core.indexlifecycle.RolloverAction; import org.elasticsearch.xpack.core.indexlifecycle.ShrinkAction; @@ -427,7 +428,8 @@ public List getNamedWriteables() { new NamedWriteableRegistry.Entry(LifecycleAction.class, RolloverAction.NAME, RolloverAction::new), new NamedWriteableRegistry.Entry(LifecycleAction.class, ShrinkAction.NAME, ShrinkAction::new), new NamedWriteableRegistry.Entry(LifecycleAction.class, DeleteAction.NAME, DeleteAction::new), - new NamedWriteableRegistry.Entry(LifecycleAction.class, FreezeAction.NAME, FreezeAction::new) + new NamedWriteableRegistry.Entry(LifecycleAction.class, FreezeAction.NAME, FreezeAction::new), + new NamedWriteableRegistry.Entry(LifecycleAction.class, SetPriorityAction.NAME, SetPriorityAction::new) ); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/indexlifecycle/ReadOnlyAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/indexlifecycle/ReadOnlyAction.java index 15edd51908bfe..0e6486eecb7b0 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/indexlifecycle/ReadOnlyAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/indexlifecycle/ReadOnlyAction.java @@ -21,7 +21,7 @@ import java.util.List; /** - * A {@link LifecycleAction} which force-merges the index. + * A {@link LifecycleAction} which sets the index to be read-only. */ public class ReadOnlyAction implements LifecycleAction { public static final String NAME = "readonly"; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/indexlifecycle/SetPriorityAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/indexlifecycle/SetPriorityAction.java new file mode 100644 index 0000000000000..507da4613e22a --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/indexlifecycle/SetPriorityAction.java @@ -0,0 +1,117 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.core.indexlifecycle; + +import org.elasticsearch.client.Client; +import org.elasticsearch.cluster.metadata.IndexMetaData; +import org.elasticsearch.common.Nullable; +import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.xcontent.ConstructingObjectParser; +import org.elasticsearch.common.xcontent.ObjectParser; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.xpack.core.indexlifecycle.Step.StepKey; + +import java.io.IOException; +import java.util.Collections; +import java.util.List; + +/** + * A {@link LifecycleAction} which sets the index's priority. The higher the priority, the faster the recovery. + */ +public class SetPriorityAction implements LifecycleAction { + public static final String NAME = "set_priority"; + private static final ParseField RECOVERY_PRIORITY_FIELD = new ParseField("priority"); + + @SuppressWarnings("unchecked") + private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>(NAME, + a -> new SetPriorityAction((Integer) a[0])); + + //package private for testing + final Integer recoveryPriority; + + static { + PARSER.declareField(ConstructingObjectParser.constructorArg(), + (p) -> p.currentToken() == XContentParser.Token.VALUE_NULL ? null : p.intValue() + , RECOVERY_PRIORITY_FIELD, ObjectParser.ValueType.INT_OR_NULL); + } + + public static SetPriorityAction parse(XContentParser parser) { + return PARSER.apply(parser, null); + } + + public SetPriorityAction(@Nullable Integer recoveryPriority) { + if (recoveryPriority != null && recoveryPriority < 0) { + throw new IllegalArgumentException("[" + RECOVERY_PRIORITY_FIELD.getPreferredName() + "] must be 0 or greater"); + } + this.recoveryPriority = recoveryPriority; + } + + public SetPriorityAction(StreamInput in) throws IOException { + this(in.readOptionalVInt()); + } + + @Override + public String getWriteableName() { + return NAME; + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + builder.field(RECOVERY_PRIORITY_FIELD.getPreferredName(), recoveryPriority); + builder.endObject(); + return builder; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeOptionalVInt(recoveryPriority); + } + + @Override + public boolean isSafeAction() { + return true; + } + + @Override + public List toSteps(Client client, String phase, StepKey nextStepKey) { + StepKey key = new StepKey(phase, NAME, NAME); + Settings indexPriority = recoveryPriority == null ? + Settings.builder().putNull(IndexMetaData.INDEX_PRIORITY_SETTING.getKey()).build() + : Settings.builder().put(IndexMetaData.INDEX_PRIORITY_SETTING.getKey(), recoveryPriority).build(); + return Collections.singletonList(new UpdateSettingsStep(key, nextStepKey, client, indexPriority)); + } + + @Override + public List toStepKeys(String phase) { + return Collections.singletonList(new StepKey(phase, NAME, NAME)); + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + + SetPriorityAction that = (SetPriorityAction) o; + + return recoveryPriority != null ? recoveryPriority.equals(that.recoveryPriority) : that.recoveryPriority == null; + } + + @Override + public int hashCode() { + return recoveryPriority != null ? recoveryPriority.hashCode() : 0; + } + + @Override + public String toString() { + return Strings.toString(this); + } +} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/indexlifecycle/TimeseriesLifecycleType.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/indexlifecycle/TimeseriesLifecycleType.java index 331a4f9c33aa0..5dad5725ba9ba 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/indexlifecycle/TimeseriesLifecycleType.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/indexlifecycle/TimeseriesLifecycleType.java @@ -34,10 +34,10 @@ public class TimeseriesLifecycleType implements LifecycleType { public static final String TYPE = "timeseries"; static final List VALID_PHASES = Arrays.asList("hot", "warm", "cold", "delete"); - static final List ORDERED_VALID_HOT_ACTIONS = Collections.singletonList(RolloverAction.NAME); - static final List ORDERED_VALID_WARM_ACTIONS = Arrays.asList(ReadOnlyAction.NAME, AllocateAction.NAME, + static final List ORDERED_VALID_HOT_ACTIONS = Arrays.asList(SetPriorityAction.NAME, RolloverAction.NAME); + static final List ORDERED_VALID_WARM_ACTIONS = Arrays.asList(SetPriorityAction.NAME, ReadOnlyAction.NAME, AllocateAction.NAME, ShrinkAction.NAME, ForceMergeAction.NAME); - static final List ORDERED_VALID_COLD_ACTIONS = Arrays.asList(AllocateAction.NAME, FreezeAction.NAME); + static final List ORDERED_VALID_COLD_ACTIONS = Arrays.asList(SetPriorityAction.NAME, AllocateAction.NAME, FreezeAction.NAME); static final List ORDERED_VALID_DELETE_ACTIONS = Arrays.asList(DeleteAction.NAME); static final Set VALID_HOT_ACTIONS = Sets.newHashSet(ORDERED_VALID_HOT_ACTIONS); static final Set VALID_WARM_ACTIONS = Sets.newHashSet(ORDERED_VALID_WARM_ACTIONS); diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/indexlifecycle/LifecyclePolicyMetadataTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/indexlifecycle/LifecyclePolicyMetadataTests.java index a2ee5e3e9030d..d943f7ea65308 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/indexlifecycle/LifecyclePolicyMetadataTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/indexlifecycle/LifecyclePolicyMetadataTests.java @@ -44,7 +44,8 @@ protected NamedWriteableRegistry getNamedWriteableRegistry() { new NamedWriteableRegistry.Entry(LifecycleAction.class, ReadOnlyAction.NAME, ReadOnlyAction::new), new NamedWriteableRegistry.Entry(LifecycleAction.class, RolloverAction.NAME, RolloverAction::new), new NamedWriteableRegistry.Entry(LifecycleAction.class, ShrinkAction.NAME, ShrinkAction::new), - new NamedWriteableRegistry.Entry(LifecycleAction.class, FreezeAction.NAME, FreezeAction::new) + new NamedWriteableRegistry.Entry(LifecycleAction.class, FreezeAction.NAME, FreezeAction::new), + new NamedWriteableRegistry.Entry(LifecycleAction.class, SetPriorityAction.NAME, SetPriorityAction::new) )); } @@ -60,7 +61,8 @@ protected NamedXContentRegistry xContentRegistry() { new NamedXContentRegistry.Entry(LifecycleAction.class, new ParseField(ReadOnlyAction.NAME), ReadOnlyAction::parse), new NamedXContentRegistry.Entry(LifecycleAction.class, new ParseField(RolloverAction.NAME), RolloverAction::parse), new NamedXContentRegistry.Entry(LifecycleAction.class, new ParseField(ShrinkAction.NAME), ShrinkAction::parse), - new NamedXContentRegistry.Entry(LifecycleAction.class, new ParseField(FreezeAction.NAME), FreezeAction::parse) + new NamedXContentRegistry.Entry(LifecycleAction.class, new ParseField(FreezeAction.NAME), FreezeAction::parse), + new NamedXContentRegistry.Entry(LifecycleAction.class, new ParseField(SetPriorityAction.NAME), SetPriorityAction::parse) )); return new NamedXContentRegistry(entries); } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/indexlifecycle/LifecyclePolicyTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/indexlifecycle/LifecyclePolicyTests.java index cb952420a408c..34e09824ed4b4 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/indexlifecycle/LifecyclePolicyTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/indexlifecycle/LifecyclePolicyTests.java @@ -53,7 +53,8 @@ protected NamedWriteableRegistry getNamedWriteableRegistry() { new NamedWriteableRegistry.Entry(LifecycleAction.class, ReadOnlyAction.NAME, ReadOnlyAction::new), new NamedWriteableRegistry.Entry(LifecycleAction.class, RolloverAction.NAME, RolloverAction::new), new NamedWriteableRegistry.Entry(LifecycleAction.class, ShrinkAction.NAME, ShrinkAction::new), - new NamedWriteableRegistry.Entry(LifecycleAction.class, FreezeAction.NAME, FreezeAction::new) + new NamedWriteableRegistry.Entry(LifecycleAction.class, FreezeAction.NAME, FreezeAction::new), + new NamedWriteableRegistry.Entry(LifecycleAction.class, SetPriorityAction.NAME, SetPriorityAction::new) )); } @@ -69,7 +70,8 @@ protected NamedXContentRegistry xContentRegistry() { new NamedXContentRegistry.Entry(LifecycleAction.class, new ParseField(ReadOnlyAction.NAME), ReadOnlyAction::parse), new NamedXContentRegistry.Entry(LifecycleAction.class, new ParseField(RolloverAction.NAME), RolloverAction::parse), new NamedXContentRegistry.Entry(LifecycleAction.class, new ParseField(ShrinkAction.NAME), ShrinkAction::parse), - new NamedXContentRegistry.Entry(LifecycleAction.class, new ParseField(FreezeAction.NAME), FreezeAction::parse) + new NamedXContentRegistry.Entry(LifecycleAction.class, new ParseField(FreezeAction.NAME), FreezeAction::parse), + new NamedXContentRegistry.Entry(LifecycleAction.class, new ParseField(SetPriorityAction.NAME), SetPriorityAction::parse) )); return new NamedXContentRegistry(entries); } @@ -116,6 +118,8 @@ public static LifecyclePolicy randomTimeseriesLifecyclePolicyWithAllPhases(@Null return ShrinkActionTests.randomInstance(); case FreezeAction.NAME: return new FreezeAction(); + case SetPriorityAction.NAME: + return SetPriorityActionTests.randomInstance(); default: throw new IllegalArgumentException("invalid action [" + action + "]"); }}; @@ -164,6 +168,8 @@ public static LifecyclePolicy randomTimeseriesLifecyclePolicy(@Nullable String l return ShrinkActionTests.randomInstance(); case FreezeAction.NAME: return new FreezeAction(); + case SetPriorityAction.NAME: + return SetPriorityActionTests.randomInstance(); default: throw new IllegalArgumentException("invalid action [" + action + "]"); }}; diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/indexlifecycle/SetPriorityActionTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/indexlifecycle/SetPriorityActionTests.java new file mode 100644 index 0000000000000..34634c3972d56 --- /dev/null +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/indexlifecycle/SetPriorityActionTests.java @@ -0,0 +1,95 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.core.indexlifecycle; + +import org.elasticsearch.cluster.metadata.IndexMetaData; +import org.elasticsearch.common.io.stream.Writeable.Reader; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.test.EqualsHashCodeTestUtils; +import org.elasticsearch.xpack.core.indexlifecycle.Step.StepKey; + +import java.util.List; + +import static org.hamcrest.Matchers.equalTo; + +public class SetPriorityActionTests extends AbstractActionTestCase { + + private final int priority = randomIntBetween(0, Integer.MAX_VALUE); + + static SetPriorityAction randomInstance() { + return new SetPriorityAction(randomIntBetween(2, Integer.MAX_VALUE - 1)); + } + + @Override + protected SetPriorityAction doParseInstance(XContentParser parser) { + return SetPriorityAction.parse(parser); + } + + @Override + protected SetPriorityAction createTestInstance() { + return new SetPriorityAction(priority); + } + + @Override + protected Reader instanceReader() { + return SetPriorityAction::new; + } + + public void testNonPositivePriority() { + Exception e = expectThrows(Exception.class, () -> new SetPriorityAction(randomIntBetween(-100, 0))); + assertThat(e.getMessage(), equalTo("[priority] must be 0 or greater")); + } + + public void testNullPriorityAllowed(){ + SetPriorityAction nullPriority = new SetPriorityAction((Integer) null); + assertNull(nullPriority.recoveryPriority); + } + + public void testToSteps() { + SetPriorityAction action = createTestInstance(); + String phase = randomAlphaOfLengthBetween(1, 10); + StepKey nextStepKey = new StepKey(randomAlphaOfLengthBetween(1, 10), randomAlphaOfLengthBetween(1, 10), + randomAlphaOfLengthBetween(1, 10)); + List steps = action.toSteps(null, phase, nextStepKey); + assertNotNull(steps); + assertEquals(1, steps.size()); + StepKey expectedFirstStepKey = new StepKey(phase, SetPriorityAction.NAME, SetPriorityAction.NAME); + UpdateSettingsStep firstStep = (UpdateSettingsStep) steps.get(0); + assertThat(firstStep.getKey(), equalTo(expectedFirstStepKey)); + assertThat(firstStep.getNextStepKey(), equalTo(nextStepKey)); + assertThat(firstStep.getSettings().size(), equalTo(1)); + assertEquals(priority, (long) IndexMetaData.INDEX_PRIORITY_SETTING.get(firstStep.getSettings())); + } + + public void testNullPriorityStep() { + SetPriorityAction action = new SetPriorityAction((Integer)null); + String phase = randomAlphaOfLengthBetween(1, 10); + StepKey nextStepKey = new StepKey(randomAlphaOfLengthBetween(1, 10), randomAlphaOfLengthBetween(1, 10), + randomAlphaOfLengthBetween(1, 10)); + List steps = action.toSteps(null, phase, nextStepKey); + assertNotNull(steps); + assertEquals(1, steps.size()); + StepKey expectedFirstStepKey = new StepKey(phase, SetPriorityAction.NAME, SetPriorityAction.NAME); + UpdateSettingsStep firstStep = (UpdateSettingsStep) steps.get(0); + assertThat(firstStep.getKey(), equalTo(expectedFirstStepKey)); + assertThat(firstStep.getNextStepKey(), equalTo(nextStepKey)); + assertThat(firstStep.getSettings().size(), equalTo(1)); + assertThat(IndexMetaData.INDEX_PRIORITY_SETTING.get(firstStep.getSettings()), + equalTo(IndexMetaData.INDEX_PRIORITY_SETTING.getDefault(firstStep.getSettings()))); + } + + public void testEqualsAndHashCode() { + EqualsHashCodeTestUtils.checkEqualsAndHashCode(createTestInstance(), this::copy, this::notCopy); + } + + SetPriorityAction copy(SetPriorityAction setPriorityAction) { + return new SetPriorityAction(setPriorityAction.recoveryPriority); + } + + SetPriorityAction notCopy(SetPriorityAction setPriorityAction) { + return new SetPriorityAction(setPriorityAction.recoveryPriority + 1); + } +} diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/indexlifecycle/TimeseriesLifecycleTypeTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/indexlifecycle/TimeseriesLifecycleTypeTests.java index 8b9a06fbcb2c6..76c8b1dd515ed 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/indexlifecycle/TimeseriesLifecycleTypeTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/indexlifecycle/TimeseriesLifecycleTypeTests.java @@ -39,6 +39,7 @@ public class TimeseriesLifecycleTypeTests extends ESTestCase { private static final ShrinkAction TEST_SHRINK_ACTION = new ShrinkAction(1); private static final ReadOnlyAction TEST_READ_ONLY_ACTION = new ReadOnlyAction(); private static final FreezeAction TEST_FREEZE_ACTION = new FreezeAction(); + private static final SetPriorityAction TEST_PRIORITY_ACTION = new SetPriorityAction(0); public void testValidatePhases() { boolean invalid = randomBoolean(); @@ -61,7 +62,7 @@ public void testValidateHotPhase() { Map actions = VALID_HOT_ACTIONS .stream().map(this::getTestAction).collect(Collectors.toMap(LifecycleAction::getWriteableName, Function.identity())); if (randomBoolean()) { - invalidAction = getTestAction(randomFrom("allocate", "forcemerge", "delete", "shrink")); + invalidAction = getTestAction(randomFrom("allocate", "forcemerge", "delete", "shrink", "freeze")); actions.put(invalidAction.getWriteableName(), invalidAction); } Map hotPhase = Collections.singletonMap("hot", @@ -82,7 +83,7 @@ public void testValidateWarmPhase() { Map actions = randomSubsetOf(VALID_WARM_ACTIONS) .stream().map(this::getTestAction).collect(Collectors.toMap(LifecycleAction::getWriteableName, Function.identity())); if (randomBoolean()) { - invalidAction = getTestAction(randomFrom("rollover", "delete")); + invalidAction = getTestAction(randomFrom("rollover", "delete", "freeze")); actions.put(invalidAction.getWriteableName(), invalidAction); } Map warmPhase = Collections.singletonMap("warm", @@ -124,7 +125,7 @@ public void testValidateDeletePhase() { Map actions = VALID_DELETE_ACTIONS .stream().map(this::getTestAction).collect(Collectors.toMap(LifecycleAction::getWriteableName, Function.identity())); if (randomBoolean()) { - invalidAction = getTestAction(randomFrom("allocate", "rollover", "forcemerge", "shrink")); + invalidAction = getTestAction(randomFrom("allocate", "rollover", "forcemerge", "shrink", "freeze", "set_priority")); actions.put(invalidAction.getWriteableName(), invalidAction); } Map deletePhase = Collections.singletonMap("delete", @@ -163,6 +164,7 @@ public void testGetOrderedActionsHot() { Phase hotPhase = new Phase("hot", TimeValue.ZERO, actions); List orderedActions = TimeseriesLifecycleType.INSTANCE.getOrderedActions(hotPhase); assertTrue(isSorted(orderedActions, LifecycleAction::getWriteableName, ORDERED_VALID_HOT_ACTIONS)); + assertThat(orderedActions.indexOf(TEST_PRIORITY_ACTION), equalTo(0)); } public void testGetOrderedActionsWarm() { @@ -171,6 +173,7 @@ public void testGetOrderedActionsWarm() { Phase warmPhase = new Phase("warm", TimeValue.ZERO, actions); List orderedActions = TimeseriesLifecycleType.INSTANCE.getOrderedActions(warmPhase); assertTrue(isSorted(orderedActions, LifecycleAction::getWriteableName, ORDERED_VALID_WARM_ACTIONS)); + assertThat(orderedActions.indexOf(TEST_PRIORITY_ACTION), equalTo(0)); } public void testGetOrderedActionsCold() { @@ -179,6 +182,7 @@ public void testGetOrderedActionsCold() { Phase coldPhase = new Phase("cold", TimeValue.ZERO, actions); List orderedActions = TimeseriesLifecycleType.INSTANCE.getOrderedActions(coldPhase); assertTrue(isSorted(orderedActions, LifecycleAction::getWriteableName, ORDERED_VALID_COLD_ACTIONS)); + assertThat(orderedActions.indexOf(TEST_PRIORITY_ACTION), equalTo(0)); } public void testGetOrderedActionsDelete() { @@ -301,6 +305,8 @@ public void testGetPreviousPhaseName() { public void testGetNextActionName() { // Hot Phase + assertNextActionName("hot", SetPriorityAction.NAME, null, new String[] {}); + assertNextActionName("hot", SetPriorityAction.NAME, RolloverAction.NAME, new String[]{SetPriorityAction.NAME, RolloverAction.NAME}); assertNextActionName("hot", RolloverAction.NAME, null, new String[] {}); assertNextActionName("hot", RolloverAction.NAME, null, new String[] { RolloverAction.NAME }); assertInvalidAction("hot", "foo", new String[] { RolloverAction.NAME }); @@ -311,6 +317,16 @@ public void testGetNextActionName() { assertInvalidAction("hot", ShrinkAction.NAME, new String[] { RolloverAction.NAME }); // Warm Phase + assertNextActionName("warm", SetPriorityAction.NAME, ReadOnlyAction.NAME, + new String[]{SetPriorityAction.NAME, ReadOnlyAction.NAME, AllocateAction.NAME, ShrinkAction.NAME, ForceMergeAction.NAME}); + assertNextActionName("warm", SetPriorityAction.NAME, AllocateAction.NAME, + new String[]{SetPriorityAction.NAME, AllocateAction.NAME, ShrinkAction.NAME, ForceMergeAction.NAME}); + assertNextActionName("warm", SetPriorityAction.NAME, ShrinkAction.NAME, + new String[]{SetPriorityAction.NAME, ShrinkAction.NAME, ForceMergeAction.NAME}); + assertNextActionName("warm", SetPriorityAction.NAME, ForceMergeAction.NAME, + new String[]{SetPriorityAction.NAME, ForceMergeAction.NAME}); + assertNextActionName("warm", SetPriorityAction.NAME, null, new String[]{SetPriorityAction.NAME}); + assertNextActionName("warm", ReadOnlyAction.NAME, AllocateAction.NAME, new String[] { ReadOnlyAction.NAME, AllocateAction.NAME, ShrinkAction.NAME, ForceMergeAction.NAME }); assertNextActionName("warm", ReadOnlyAction.NAME, ShrinkAction.NAME, @@ -355,6 +371,11 @@ public void testGetNextActionName() { new String[] { ReadOnlyAction.NAME, AllocateAction.NAME, ShrinkAction.NAME, ForceMergeAction.NAME }); // Cold Phase + assertNextActionName("cold", SetPriorityAction.NAME, FreezeAction.NAME, new String[]{SetPriorityAction.NAME, FreezeAction.NAME}); + assertNextActionName("cold", SetPriorityAction.NAME, AllocateAction.NAME, + new String[]{SetPriorityAction.NAME, AllocateAction.NAME}); + assertNextActionName("cold", SetPriorityAction.NAME, null, new String[] { SetPriorityAction.NAME }); + assertNextActionName("cold", SetPriorityAction.NAME, null, new String[] {}); assertNextActionName("cold", AllocateAction.NAME, null, new String[] { AllocateAction.NAME }); assertNextActionName("cold", AllocateAction.NAME, null, new String[] {}); assertNextActionName("cold", AllocateAction.NAME, null, new String[] {}); @@ -378,6 +399,8 @@ public void testGetNextActionName() { assertInvalidAction("delete", ReadOnlyAction.NAME, new String[] { DeleteAction.NAME }); assertInvalidAction("delete", RolloverAction.NAME, new String[] { DeleteAction.NAME }); assertInvalidAction("delete", ShrinkAction.NAME, new String[] { DeleteAction.NAME }); + assertInvalidAction("delete", FreezeAction.NAME, new String[] { DeleteAction.NAME }); + assertInvalidAction("delete", SetPriorityAction.NAME, new String[] { DeleteAction.NAME }); Phase phase = new Phase("foo", TimeValue.ZERO, Collections.emptyMap()); IllegalArgumentException exception = expectThrows(IllegalArgumentException.class, @@ -419,6 +442,8 @@ private ConcurrentMap convertActionNamesToActions(Strin return new ShrinkAction(1); case FreezeAction.NAME: return new FreezeAction(); + case SetPriorityAction.NAME: + return new SetPriorityAction(0); } return new DeleteAction(); }).collect(Collectors.toConcurrentMap(LifecycleAction::getWriteableName, Function.identity())); @@ -482,6 +507,8 @@ private LifecycleAction getTestAction(String actionName) { return TEST_SHRINK_ACTION; case FreezeAction.NAME: return TEST_FREEZE_ACTION; + case SetPriorityAction.NAME: + return TEST_PRIORITY_ACTION; default: throw new IllegalArgumentException("unsupported timeseries phase action [" + actionName + "]"); } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/indexlifecycle/action/PutLifecycleRequestTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/indexlifecycle/action/PutLifecycleRequestTests.java index cb547d179d5d5..d747e26161234 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/indexlifecycle/action/PutLifecycleRequestTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/indexlifecycle/action/PutLifecycleRequestTests.java @@ -21,6 +21,7 @@ import org.elasticsearch.xpack.core.indexlifecycle.LifecycleType; import org.elasticsearch.xpack.core.indexlifecycle.ReadOnlyAction; import org.elasticsearch.xpack.core.indexlifecycle.RolloverAction; +import org.elasticsearch.xpack.core.indexlifecycle.SetPriorityAction; import org.elasticsearch.xpack.core.indexlifecycle.ShrinkAction; import org.elasticsearch.xpack.core.indexlifecycle.TimeseriesLifecycleType; import org.elasticsearch.xpack.core.indexlifecycle.action.PutLifecycleAction.Request; @@ -66,7 +67,8 @@ protected NamedWriteableRegistry getNamedWriteableRegistry() { new NamedWriteableRegistry.Entry(LifecycleAction.class, ReadOnlyAction.NAME, ReadOnlyAction::new), new NamedWriteableRegistry.Entry(LifecycleAction.class, RolloverAction.NAME, RolloverAction::new), new NamedWriteableRegistry.Entry(LifecycleAction.class, ShrinkAction.NAME, ShrinkAction::new), - new NamedWriteableRegistry.Entry(LifecycleAction.class, FreezeAction.NAME, FreezeAction::new) + new NamedWriteableRegistry.Entry(LifecycleAction.class, FreezeAction.NAME, FreezeAction::new), + new NamedWriteableRegistry.Entry(LifecycleAction.class, SetPriorityAction.NAME, SetPriorityAction::new) )); } @@ -82,7 +84,8 @@ protected NamedXContentRegistry xContentRegistry() { new NamedXContentRegistry.Entry(LifecycleAction.class, new ParseField(ReadOnlyAction.NAME), ReadOnlyAction::parse), new NamedXContentRegistry.Entry(LifecycleAction.class, new ParseField(RolloverAction.NAME), RolloverAction::parse), new NamedXContentRegistry.Entry(LifecycleAction.class, new ParseField(ShrinkAction.NAME), ShrinkAction::parse), - new NamedXContentRegistry.Entry(LifecycleAction.class, new ParseField(FreezeAction.NAME), FreezeAction::parse) + new NamedXContentRegistry.Entry(LifecycleAction.class, new ParseField(FreezeAction.NAME), FreezeAction::parse), + new NamedXContentRegistry.Entry(LifecycleAction.class, new ParseField(SetPriorityAction.NAME), SetPriorityAction::parse) )); return new NamedXContentRegistry(entries); } diff --git a/x-pack/plugin/ilm/qa/multi-node/src/test/java/org/elasticsearch/xpack/indexlifecycle/TimeSeriesLifecycleActionsIT.java b/x-pack/plugin/ilm/qa/multi-node/src/test/java/org/elasticsearch/xpack/indexlifecycle/TimeSeriesLifecycleActionsIT.java index 779a737c88279..01eba362711b3 100644 --- a/x-pack/plugin/ilm/qa/multi-node/src/test/java/org/elasticsearch/xpack/indexlifecycle/TimeSeriesLifecycleActionsIT.java +++ b/x-pack/plugin/ilm/qa/multi-node/src/test/java/org/elasticsearch/xpack/indexlifecycle/TimeSeriesLifecycleActionsIT.java @@ -33,6 +33,7 @@ import org.elasticsearch.xpack.core.indexlifecycle.Phase; import org.elasticsearch.xpack.core.indexlifecycle.ReadOnlyAction; import org.elasticsearch.xpack.core.indexlifecycle.RolloverAction; +import org.elasticsearch.xpack.core.indexlifecycle.SetPriorityAction; import org.elasticsearch.xpack.core.indexlifecycle.ShrinkAction; import org.elasticsearch.xpack.core.indexlifecycle.ShrinkStep; import org.elasticsearch.xpack.core.indexlifecycle.Step.StepKey; @@ -440,6 +441,31 @@ public void testFreezeAction() throws Exception { }); } + public void testSetPriority() throws Exception { + createIndexWithSettings(index, Settings.builder().put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1) + .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 0).put(IndexMetaData.INDEX_PRIORITY_SETTING.getKey(), 100)); + int priority = randomIntBetween(0, 99); + createNewSingletonPolicy("warm", new SetPriorityAction(priority)); + updatePolicy(index, policy); + assertBusy(() -> { + Map settings = getOnlyIndexSettings(index); + assertThat(getStepKeyForIndex(index), equalTo(TerminalPolicyStep.KEY)); + assertThat(settings.get(IndexMetaData.INDEX_PRIORITY_SETTING.getKey()), equalTo(String.valueOf(priority))); + }); + } + + public void testSetNullPriority() throws Exception { + createIndexWithSettings(index, Settings.builder().put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1) + .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 0).put(IndexMetaData.INDEX_PRIORITY_SETTING.getKey(), 100)); + createNewSingletonPolicy("warm", new SetPriorityAction((Integer) null)); + updatePolicy(index, policy); + assertBusy(() -> { + Map settings = getOnlyIndexSettings(index); + assertThat(getStepKeyForIndex(index), equalTo(TerminalPolicyStep.KEY)); + assertNull(settings.get(IndexMetaData.INDEX_PRIORITY_SETTING.getKey())); + }); + } + @SuppressWarnings("unchecked") public void testNonexistentPolicy() throws Exception { String indexPrefix = randomAlphaOfLengthBetween(5,15).toLowerCase(Locale.ROOT); @@ -601,16 +627,21 @@ public void testRemoveAndReaddPolicy() throws Exception { } private void createFullPolicy(TimeValue hotTime) throws IOException { + Map hotActions = new HashMap<>(); + hotActions.put(SetPriorityAction.NAME, new SetPriorityAction(100)); + hotActions.put(RolloverAction.NAME, new RolloverAction(null, null, 1L)); Map warmActions = new HashMap<>(); + warmActions.put(SetPriorityAction.NAME, new SetPriorityAction(50)); warmActions.put(ForceMergeAction.NAME, new ForceMergeAction(1)); warmActions.put(AllocateAction.NAME, new AllocateAction(1, singletonMap("_name", "node-1,node-2"), null, null)); warmActions.put(ShrinkAction.NAME, new ShrinkAction(1)); + Map coldActions = new HashMap<>(); + coldActions.put(SetPriorityAction.NAME, new SetPriorityAction(0)); + coldActions.put(AllocateAction.NAME, new AllocateAction(0, singletonMap("_name", "node-3"), null, null)); Map phases = new HashMap<>(); - phases.put("hot", new Phase("hot", hotTime, singletonMap(RolloverAction.NAME, - new RolloverAction(null, null, 1L)))); + phases.put("hot", new Phase("hot", hotTime, hotActions)); phases.put("warm", new Phase("warm", TimeValue.ZERO, warmActions)); - phases.put("cold", new Phase("cold", TimeValue.ZERO, singletonMap(AllocateAction.NAME, - new AllocateAction(0, singletonMap("_name", "node-3"), null, null)))); + phases.put("cold", new Phase("cold", TimeValue.ZERO, coldActions)); phases.put("delete", new Phase("delete", TimeValue.ZERO, singletonMap(DeleteAction.NAME, new DeleteAction()))); LifecyclePolicy lifecyclePolicy = new LifecyclePolicy(policy, phases); // PUT policy diff --git a/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/indexlifecycle/IndexLifecycle.java b/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/indexlifecycle/IndexLifecycle.java index a2ffac8412eb1..0088b7fde1cba 100644 --- a/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/indexlifecycle/IndexLifecycle.java +++ b/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/indexlifecycle/IndexLifecycle.java @@ -42,6 +42,7 @@ import org.elasticsearch.xpack.core.indexlifecycle.LifecycleAction; import org.elasticsearch.xpack.core.indexlifecycle.LifecycleSettings; import org.elasticsearch.xpack.core.indexlifecycle.LifecycleType; +import org.elasticsearch.xpack.core.indexlifecycle.SetPriorityAction; import org.elasticsearch.xpack.core.indexlifecycle.ReadOnlyAction; import org.elasticsearch.xpack.core.indexlifecycle.RolloverAction; import org.elasticsearch.xpack.core.indexlifecycle.ShrinkAction; @@ -159,7 +160,8 @@ public List getNa new NamedXContentRegistry.Entry(LifecycleAction.class, new ParseField(RolloverAction.NAME), RolloverAction::parse), new NamedXContentRegistry.Entry(LifecycleAction.class, new ParseField(ShrinkAction.NAME), ShrinkAction::parse), new NamedXContentRegistry.Entry(LifecycleAction.class, new ParseField(DeleteAction.NAME), DeleteAction::parse), - new NamedXContentRegistry.Entry(LifecycleAction.class, new ParseField(FreezeAction.NAME), FreezeAction::parse) + new NamedXContentRegistry.Entry(LifecycleAction.class, new ParseField(FreezeAction.NAME), FreezeAction::parse), + new NamedXContentRegistry.Entry(LifecycleAction.class, new ParseField(SetPriorityAction.NAME), SetPriorityAction::parse) ); } diff --git a/x-pack/plugin/ilm/src/test/java/org/elasticsearch/xpack/indexlifecycle/IndexLifecycleMetadataTests.java b/x-pack/plugin/ilm/src/test/java/org/elasticsearch/xpack/indexlifecycle/IndexLifecycleMetadataTests.java index 455f35ceae2d8..5ac01f4753012 100644 --- a/x-pack/plugin/ilm/src/test/java/org/elasticsearch/xpack/indexlifecycle/IndexLifecycleMetadataTests.java +++ b/x-pack/plugin/ilm/src/test/java/org/elasticsearch/xpack/indexlifecycle/IndexLifecycleMetadataTests.java @@ -31,6 +31,7 @@ import org.elasticsearch.xpack.core.indexlifecycle.Phase; import org.elasticsearch.xpack.core.indexlifecycle.ReadOnlyAction; import org.elasticsearch.xpack.core.indexlifecycle.RolloverAction; +import org.elasticsearch.xpack.core.indexlifecycle.SetPriorityAction; import org.elasticsearch.xpack.core.indexlifecycle.ShrinkAction; import org.elasticsearch.xpack.core.indexlifecycle.TimeseriesLifecycleType; @@ -83,7 +84,8 @@ protected NamedWriteableRegistry getNamedWriteableRegistry() { new NamedWriteableRegistry.Entry(LifecycleAction.class, ReadOnlyAction.NAME, ReadOnlyAction::new), new NamedWriteableRegistry.Entry(LifecycleAction.class, RolloverAction.NAME, RolloverAction::new), new NamedWriteableRegistry.Entry(LifecycleAction.class, ShrinkAction.NAME, ShrinkAction::new), - new NamedWriteableRegistry.Entry(LifecycleAction.class, FreezeAction.NAME, FreezeAction::new) + new NamedWriteableRegistry.Entry(LifecycleAction.class, FreezeAction.NAME, FreezeAction::new), + new NamedWriteableRegistry.Entry(LifecycleAction.class, SetPriorityAction.NAME, SetPriorityAction::new) )); } @@ -99,7 +101,8 @@ protected NamedXContentRegistry xContentRegistry() { new NamedXContentRegistry.Entry(LifecycleAction.class, new ParseField(ReadOnlyAction.NAME), ReadOnlyAction::parse), new NamedXContentRegistry.Entry(LifecycleAction.class, new ParseField(RolloverAction.NAME), RolloverAction::parse), new NamedXContentRegistry.Entry(LifecycleAction.class, new ParseField(ShrinkAction.NAME), ShrinkAction::parse), - new NamedXContentRegistry.Entry(LifecycleAction.class, new ParseField(FreezeAction.NAME), FreezeAction::parse) + new NamedXContentRegistry.Entry(LifecycleAction.class, new ParseField(FreezeAction.NAME), FreezeAction::parse), + new NamedXContentRegistry.Entry(LifecycleAction.class, new ParseField(SetPriorityAction.NAME), SetPriorityAction::parse) )); return new NamedXContentRegistry(entries); } From a2d9c464b2cf9d0bb640e16321ba61514943473f Mon Sep 17 00:00:00 2001 From: Lisa Cawley Date: Thu, 17 Jan 2019 08:21:37 -0800 Subject: [PATCH 29/71] [DOCS] Adds limitation to the get jobs API (#37549) --- docs/reference/ml/apis/get-job.asciidoc | 2 ++ 1 file changed, 2 insertions(+) diff --git a/docs/reference/ml/apis/get-job.asciidoc b/docs/reference/ml/apis/get-job.asciidoc index be61d6baea031..4eb7eaf5a7f07 100644 --- a/docs/reference/ml/apis/get-job.asciidoc +++ b/docs/reference/ml/apis/get-job.asciidoc @@ -27,6 +27,8 @@ group name, a comma-separated list of jobs, or a wildcard expression. You can get information for all jobs by using `_all`, by specifying `*` as the ``, or by omitting the ``. +IMPORTANT: This API returns a maximum of 10,000 jobs. + ==== Path Parameters From 6d64a2a90162e90c7626315bfe937c5f91cb996c Mon Sep 17 00:00:00 2001 From: Yannick Welsch Date: Thu, 17 Jan 2019 17:46:35 +0100 Subject: [PATCH 30/71] Propagate Errors in executors to uncaught exception handler (#36137) This is a continuation of #28667 and has as goal to convert all executors to propagate errors to the uncaught exception handler. Notable missing ones were the direct executor and the scheduler. This commit also makes it the property of the executor, not the runnable, to ensure this property. A big part of this commit also consists of vastly improving the test coverage in this area. --- .../resources/forbidden/es-all-signatures.txt | 13 + .../threadpool/EvilThreadPoolTests.java | 311 ++++++++++++++++-- .../org/elasticsearch/ExceptionsHelper.java | 7 + .../common/util/concurrent/EsExecutors.java | 52 ++- .../util/concurrent/EsThreadPoolExecutor.java | 11 +- .../PrioritizedEsThreadPoolExecutor.java | 23 +- .../util/concurrent/PrioritizedRunnable.java | 10 +- .../QueueResizingEsThreadPoolExecutor.java | 29 +- .../common/util/concurrent/ThreadContext.java | 58 +--- .../common/util/concurrent/TimedRunnable.java | 7 +- .../util/concurrent/WrappedRunnable.java | 23 ++ .../elasticsearch/threadpool/Scheduler.java | 31 +- ...ueueResizingEsThreadPoolExecutorTests.java | 10 +- .../shard/GlobalCheckpointListenersTests.java | 3 +- .../autodetect/AutodetectProcessManager.java | 6 +- .../FileStructureFinderManagerTests.java | 4 +- .../TimeoutCheckerTests.java | 4 +- .../AutodetectProcessManagerTests.java | 23 ++ .../AutoDetectResultProcessorTests.java | 3 +- 19 files changed, 514 insertions(+), 114 deletions(-) create mode 100644 server/src/main/java/org/elasticsearch/common/util/concurrent/WrappedRunnable.java diff --git a/buildSrc/src/main/resources/forbidden/es-all-signatures.txt b/buildSrc/src/main/resources/forbidden/es-all-signatures.txt index 130984eb58f17..2ea46376ae3bf 100644 --- a/buildSrc/src/main/resources/forbidden/es-all-signatures.txt +++ b/buildSrc/src/main/resources/forbidden/es-all-signatures.txt @@ -50,3 +50,16 @@ java.nio.channels.SocketChannel#connect(java.net.SocketAddress) java.lang.Boolean#getBoolean(java.lang.String) org.apache.lucene.util.IOUtils @ use @org.elasticsearch.core.internal.io instead + +@defaultMessage use executors from org.elasticsearch.common.util.concurrent.EsExecutors instead which will properly bubble up Errors +java.util.concurrent.AbstractExecutorService#() +java.util.concurrent.ThreadPoolExecutor#(int, int, long, java.util.concurrent.TimeUnit, java.util.concurrent.BlockingQueue) +java.util.concurrent.ThreadPoolExecutor#(int, int, long, java.util.concurrent.TimeUnit, java.util.concurrent.BlockingQueue, java.util.concurrent.ThreadFactory) +java.util.concurrent.ThreadPoolExecutor#(int, int, long, java.util.concurrent.TimeUnit, java.util.concurrent.BlockingQueue, java.util.concurrent.RejectedExecutionHandler) +java.util.concurrent.ThreadPoolExecutor#(int, int, long, java.util.concurrent.TimeUnit, java.util.concurrent.BlockingQueue, java.util.concurrent.ThreadFactory, java.util.concurrent.RejectedExecutionHandler) + +@defaultMessage extend org.elasticsearch.threadpool.Scheduler.SafeScheduledThreadPoolExecutor instead which will properly bubble up Errors +java.util.concurrent.ScheduledThreadPoolExecutor#(int) +java.util.concurrent.ScheduledThreadPoolExecutor#(int, java.util.concurrent.ThreadFactory) +java.util.concurrent.ScheduledThreadPoolExecutor#(int, java.util.concurrent.RejectedExecutionHandler) +java.util.concurrent.ScheduledThreadPoolExecutor#(int, java.util.concurrent.ThreadFactory, java.util.concurrent.RejectedExecutionHandler) diff --git a/qa/evil-tests/src/test/java/org/elasticsearch/threadpool/EvilThreadPoolTests.java b/qa/evil-tests/src/test/java/org/elasticsearch/threadpool/EvilThreadPoolTests.java index da43927d1dfee..64cec9224965b 100644 --- a/qa/evil-tests/src/test/java/org/elasticsearch/threadpool/EvilThreadPoolTests.java +++ b/qa/evil-tests/src/test/java/org/elasticsearch/threadpool/EvilThreadPoolTests.java @@ -19,12 +19,21 @@ package org.elasticsearch.threadpool; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.common.util.concurrent.AbstractRunnable; +import org.elasticsearch.common.util.concurrent.EsExecutors; +import org.elasticsearch.common.util.concurrent.EsThreadPoolExecutor; +import org.elasticsearch.common.util.concurrent.PrioritizedEsThreadPoolExecutor; import org.elasticsearch.test.ESTestCase; import org.junit.After; import org.junit.Before; import java.util.Optional; import java.util.concurrent.CountDownLatch; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.ScheduledThreadPoolExecutor; +import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicReference; import java.util.function.Consumer; @@ -42,30 +51,279 @@ public void setUpThreadPool() { } @After - public void tearDownThreadPool() throws InterruptedException { + public void tearDownThreadPool() { terminate(threadPool); } - public void testExecutionException() throws InterruptedException { - runExecutionExceptionTest( - () -> { + public void testExecutionErrorOnDefaultThreadPoolTypes() throws InterruptedException { + for (String executor : ThreadPool.THREAD_POOL_TYPES.keySet()) { + checkExecutionError(getExecuteRunner(threadPool.executor(executor))); + checkExecutionError(getSubmitRunner(threadPool.executor(executor))); + checkExecutionError(getScheduleRunner(executor)); + } + } + + public void testExecutionErrorOnDirectExecutorService() throws InterruptedException { + final ExecutorService directExecutorService = EsExecutors.newDirectExecutorService(); + checkExecutionError(getExecuteRunner(directExecutorService)); + checkExecutionError(getSubmitRunner(directExecutorService)); + } + + public void testExecutionErrorOnFixedESThreadPoolExecutor() throws InterruptedException { + final EsThreadPoolExecutor fixedExecutor = EsExecutors.newFixed("test", 1, 1, + EsExecutors.daemonThreadFactory("test"), threadPool.getThreadContext()); + try { + checkExecutionError(getExecuteRunner(fixedExecutor)); + checkExecutionError(getSubmitRunner(fixedExecutor)); + } finally { + ThreadPool.terminate(fixedExecutor, 10, TimeUnit.SECONDS); + } + } + + public void testExecutionErrorOnScalingESThreadPoolExecutor() throws InterruptedException { + final EsThreadPoolExecutor scalingExecutor = EsExecutors.newScaling("test", 1, 1, + 10, TimeUnit.SECONDS, EsExecutors.daemonThreadFactory("test"), threadPool.getThreadContext()); + try { + checkExecutionError(getExecuteRunner(scalingExecutor)); + checkExecutionError(getSubmitRunner(scalingExecutor)); + } finally { + ThreadPool.terminate(scalingExecutor, 10, TimeUnit.SECONDS); + } + } + + public void testExecutionErrorOnAutoQueueFixedESThreadPoolExecutor() throws InterruptedException { + final EsThreadPoolExecutor autoQueueFixedExecutor = EsExecutors.newAutoQueueFixed("test", 1, 1, + 1, 1, 1, TimeValue.timeValueSeconds(10), EsExecutors.daemonThreadFactory("test"), threadPool.getThreadContext()); + try { + checkExecutionError(getExecuteRunner(autoQueueFixedExecutor)); + checkExecutionError(getSubmitRunner(autoQueueFixedExecutor)); + } finally { + ThreadPool.terminate(autoQueueFixedExecutor, 10, TimeUnit.SECONDS); + } + } + + public void testExecutionErrorOnSinglePrioritizingThreadPoolExecutor() throws InterruptedException { + final PrioritizedEsThreadPoolExecutor prioritizedExecutor = EsExecutors.newSinglePrioritizing("test", + EsExecutors.daemonThreadFactory("test"), threadPool.getThreadContext(), threadPool.scheduler()); + try { + checkExecutionError(getExecuteRunner(prioritizedExecutor)); + checkExecutionError(getSubmitRunner(prioritizedExecutor)); + checkExecutionError(r -> prioritizedExecutor.execute(r, TimeValue.ZERO, r)); + } finally { + ThreadPool.terminate(prioritizedExecutor, 10, TimeUnit.SECONDS); + } + } + + public void testExecutionErrorOnScheduler() throws InterruptedException { + final ScheduledThreadPoolExecutor scheduler = Scheduler.initScheduler(Settings.EMPTY); + try { + checkExecutionError(getExecuteRunner(scheduler)); + checkExecutionError(getSubmitRunner(scheduler)); + checkExecutionError(r -> scheduler.schedule(r, randomFrom(0, 1), TimeUnit.MILLISECONDS)); + } finally { + Scheduler.terminate(scheduler, 10, TimeUnit.SECONDS); + } + } + + private void checkExecutionError(Consumer runner) throws InterruptedException { + logger.info("checking error for {}", runner); + final Runnable runnable; + if (randomBoolean()) { + runnable = () -> { + throw new Error("future error"); + }; + } else { + runnable = new AbstractRunnable() { + @Override + public void onFailure(Exception e) { + + } + + @Override + protected void doRun() { throw new Error("future error"); - }, - true, - o -> { - assertTrue(o.isPresent()); - assertThat(o.get(), instanceOf(Error.class)); - assertThat(o.get(), hasToString(containsString("future error"))); - }); - runExecutionExceptionTest( - () -> { + } + }; + } + runExecutionTest( + runner, + runnable, + true, + o -> { + assertTrue(o.isPresent()); + assertThat(o.get(), instanceOf(Error.class)); + assertThat(o.get(), hasToString(containsString("future error"))); + }); + } + + public void testExecutionExceptionOnDefaultThreadPoolTypes() throws InterruptedException { + for (String executor : ThreadPool.THREAD_POOL_TYPES.keySet()) { + final boolean expectExceptionOnExecute = + // fixed_auto_queue_size wraps stuff into TimedRunnable, which is an AbstractRunnable + // TODO: this is dangerous as it will silently swallow exceptions, and possibly miss calling a response listener + ThreadPool.THREAD_POOL_TYPES.get(executor) != ThreadPool.ThreadPoolType.FIXED_AUTO_QUEUE_SIZE; + checkExecutionException(getExecuteRunner(threadPool.executor(executor)), expectExceptionOnExecute); + + // here, it's ok for the exception not to bubble up. Accessing the future will yield the exception + checkExecutionException(getSubmitRunner(threadPool.executor(executor)), false); + + final boolean expectExceptionOnSchedule = + // fixed_auto_queue_size wraps stuff into TimedRunnable, which is an AbstractRunnable + // TODO: this is dangerous as it will silently swallow exceptions, and possibly miss calling a response listener + ThreadPool.THREAD_POOL_TYPES.get(executor) != ThreadPool.ThreadPoolType.FIXED_AUTO_QUEUE_SIZE + // scheduler just swallows the exception here + // TODO: bubble these exceptions up + && ThreadPool.THREAD_POOL_TYPES.get(executor) != ThreadPool.ThreadPoolType.DIRECT; + checkExecutionException(getScheduleRunner(executor), expectExceptionOnSchedule); + } + } + + public void testExecutionExceptionOnDirectExecutorService() throws InterruptedException { + final ExecutorService directExecutorService = EsExecutors.newDirectExecutorService(); + checkExecutionException(getExecuteRunner(directExecutorService), true); + checkExecutionException(getSubmitRunner(directExecutorService), false); + } + + public void testExecutionExceptionOnFixedESThreadPoolExecutor() throws InterruptedException { + final EsThreadPoolExecutor fixedExecutor = EsExecutors.newFixed("test", 1, 1, + EsExecutors.daemonThreadFactory("test"), threadPool.getThreadContext()); + try { + checkExecutionException(getExecuteRunner(fixedExecutor), true); + checkExecutionException(getSubmitRunner(fixedExecutor), false); + } finally { + ThreadPool.terminate(fixedExecutor, 10, TimeUnit.SECONDS); + } + } + + public void testExecutionExceptionOnScalingESThreadPoolExecutor() throws InterruptedException { + final EsThreadPoolExecutor scalingExecutor = EsExecutors.newScaling("test", 1, 1, + 10, TimeUnit.SECONDS, EsExecutors.daemonThreadFactory("test"), threadPool.getThreadContext()); + try { + checkExecutionException(getExecuteRunner(scalingExecutor), true); + checkExecutionException(getSubmitRunner(scalingExecutor), false); + } finally { + ThreadPool.terminate(scalingExecutor, 10, TimeUnit.SECONDS); + } + } + + public void testExecutionExceptionOnAutoQueueFixedESThreadPoolExecutor() throws InterruptedException { + final EsThreadPoolExecutor autoQueueFixedExecutor = EsExecutors.newAutoQueueFixed("test", 1, 1, + 1, 1, 1, TimeValue.timeValueSeconds(10), EsExecutors.daemonThreadFactory("test"), threadPool.getThreadContext()); + try { + // fixed_auto_queue_size wraps stuff into TimedRunnable, which is an AbstractRunnable + // TODO: this is dangerous as it will silently swallow exceptions, and possibly miss calling a response listener + checkExecutionException(getExecuteRunner(autoQueueFixedExecutor), false); + checkExecutionException(getSubmitRunner(autoQueueFixedExecutor), false); + } finally { + ThreadPool.terminate(autoQueueFixedExecutor, 10, TimeUnit.SECONDS); + } + } + + public void testExecutionExceptionOnSinglePrioritizingThreadPoolExecutor() throws InterruptedException { + final PrioritizedEsThreadPoolExecutor prioritizedExecutor = EsExecutors.newSinglePrioritizing("test", + EsExecutors.daemonThreadFactory("test"), threadPool.getThreadContext(), threadPool.scheduler()); + try { + checkExecutionException(getExecuteRunner(prioritizedExecutor), true); + checkExecutionException(getSubmitRunner(prioritizedExecutor), false); + checkExecutionException(r -> prioritizedExecutor.execute(r, TimeValue.ZERO, r), true); + } finally { + ThreadPool.terminate(prioritizedExecutor, 10, TimeUnit.SECONDS); + } + } + + public void testExecutionExceptionOnScheduler() throws InterruptedException { + final ScheduledThreadPoolExecutor scheduler = Scheduler.initScheduler(Settings.EMPTY); + try { + // scheduler just swallows the exceptions + // TODO: bubble these exceptions up + checkExecutionException(getExecuteRunner(scheduler), false); + checkExecutionException(getSubmitRunner(scheduler), false); + checkExecutionException(r -> scheduler.schedule(r, randomFrom(0, 1), TimeUnit.MILLISECONDS), false); + } finally { + Scheduler.terminate(scheduler, 10, TimeUnit.SECONDS); + } + } + + private void checkExecutionException(Consumer runner, boolean expectException) throws InterruptedException { + logger.info("checking exception for {}", runner); + final Runnable runnable; + final boolean willThrow; + if (randomBoolean()) { + runnable = () -> { + throw new IllegalStateException("future exception"); + }; + willThrow = expectException; + } else { + runnable = new AbstractRunnable() { + @Override + public void onFailure(Exception e) { + + } + + @Override + protected void doRun() { throw new IllegalStateException("future exception"); - }, - false, - o -> assertFalse(o.isPresent())); + } + }; + willThrow = false; + } + runExecutionTest( + runner, + runnable, + willThrow, + o -> { + assertEquals(willThrow, o.isPresent()); + if (willThrow) { + assertThat(o.get(), instanceOf(IllegalStateException.class)); + assertThat(o.get(), hasToString(containsString("future exception"))); + } + }); + } + + Consumer getExecuteRunner(ExecutorService executor) { + return new Consumer() { + @Override + public void accept(Runnable runnable) { + executor.execute(runnable); + } + + @Override + public String toString() { + return "executor(" + executor + ").execute()"; + } + }; + } + + Consumer getSubmitRunner(ExecutorService executor) { + return new Consumer() { + @Override + public void accept(Runnable runnable) { + executor.submit(runnable); + } + + @Override + public String toString() { + return "executor(" + executor + ").submit()"; + } + }; + } + + Consumer getScheduleRunner(String executor) { + return new Consumer() { + @Override + public void accept(Runnable runnable) { + threadPool.schedule(randomFrom(TimeValue.ZERO, TimeValue.timeValueMillis(1)), executor, runnable); + } + + @Override + public String toString() { + return "schedule(" + executor + ")"; + } + }; } - private void runExecutionExceptionTest( + private void runExecutionTest( + final Consumer runner, final Runnable runnable, final boolean expectThrowable, final Consumer> consumer) throws InterruptedException { @@ -82,13 +340,18 @@ private void runExecutionExceptionTest( final CountDownLatch supplierLatch = new CountDownLatch(1); - threadPool.generic().submit(() -> { - try { - runnable.run(); - } finally { - supplierLatch.countDown(); - } - }); + try { + runner.accept(() -> { + try { + runnable.run(); + } finally { + supplierLatch.countDown(); + } + }); + } catch (Throwable t) { + consumer.accept(Optional.of(t)); + return; + } supplierLatch.await(); diff --git a/server/src/main/java/org/elasticsearch/ExceptionsHelper.java b/server/src/main/java/org/elasticsearch/ExceptionsHelper.java index 923a76c0acb20..e0525127ee7e7 100644 --- a/server/src/main/java/org/elasticsearch/ExceptionsHelper.java +++ b/server/src/main/java/org/elasticsearch/ExceptionsHelper.java @@ -243,6 +243,13 @@ public static Optional maybeError(final Throwable cause, final Logger log return Optional.empty(); } + /** + * See {@link #maybeError(Throwable, Logger)}. Uses the class-local logger. + */ + public static Optional maybeError(final Throwable cause) { + return maybeError(cause, logger); + } + /** * If the specified cause is an unrecoverable error, this method will rethrow the cause on a separate thread so that it can not be * caught and bubbles up to the uncaught exception handler. Note that the cause tree is examined for any {@link Error}. See diff --git a/server/src/main/java/org/elasticsearch/common/util/concurrent/EsExecutors.java b/server/src/main/java/org/elasticsearch/common/util/concurrent/EsExecutors.java index 44367053406e3..cb358a0596d25 100644 --- a/server/src/main/java/org/elasticsearch/common/util/concurrent/EsExecutors.java +++ b/server/src/main/java/org/elasticsearch/common/util/concurrent/EsExecutors.java @@ -19,6 +19,8 @@ package org.elasticsearch.common.util.concurrent; +import org.elasticsearch.ExceptionsHelper; +import org.elasticsearch.common.SuppressForbidden; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Setting.Property; import org.elasticsearch.common.settings.Settings; @@ -27,10 +29,14 @@ import java.util.Arrays; import java.util.List; +import java.util.Optional; import java.util.concurrent.AbstractExecutorService; import java.util.concurrent.BlockingQueue; +import java.util.concurrent.CancellationException; +import java.util.concurrent.ExecutionException; import java.util.concurrent.ExecutorService; import java.util.concurrent.LinkedTransferQueue; +import java.util.concurrent.RunnableFuture; import java.util.concurrent.ScheduledExecutorService; import java.util.concurrent.ThreadFactory; import java.util.concurrent.ThreadPoolExecutor; @@ -108,7 +114,45 @@ public static EsThreadPoolExecutor newAutoQueueFixed(String name, int size, int new EsAbortPolicy(), contextHolder); } - private static final ExecutorService DIRECT_EXECUTOR_SERVICE = new AbstractExecutorService() { + /** + * Checks if the runnable arose from asynchronous submission of a task to an executor. If an uncaught exception was thrown + * during the execution of this task, we need to inspect this runnable and see if it is an error that should be propagated + * to the uncaught exception handler. + */ + public static void rethrowErrors(Runnable runnable) { + if (runnable instanceof RunnableFuture) { + try { + ((RunnableFuture) runnable).get(); + } catch (final Exception e) { + /* + * In theory, Future#get can only throw a cancellation exception, an interrupted exception, or an execution + * exception. We want to ignore cancellation exceptions, restore the interrupt status on interrupted exceptions, and + * inspect the cause of an execution. We are going to be extra paranoid here though and completely unwrap the + * exception to ensure that there is not a buried error anywhere. We assume that a general exception has been + * handled by the executed task or the task submitter. + */ + assert e instanceof CancellationException + || e instanceof InterruptedException + || e instanceof ExecutionException : e; + final Optional maybeError = ExceptionsHelper.maybeError(e); + if (maybeError.isPresent()) { + // throw this error where it will propagate to the uncaught exception handler + throw maybeError.get(); + } + if (e instanceof InterruptedException) { + // restore the interrupt status + Thread.currentThread().interrupt(); + } + } + } + } + + private static final class DirectExecutorService extends AbstractExecutorService { + + @SuppressForbidden(reason = "properly rethrowing errors, see EsExecutors.rethrowErrors") + DirectExecutorService() { + super(); + } @Override public void shutdown() { @@ -131,16 +175,18 @@ public boolean isTerminated() { } @Override - public boolean awaitTermination(long timeout, TimeUnit unit) throws InterruptedException { + public boolean awaitTermination(long timeout, TimeUnit unit) { throw new UnsupportedOperationException(); } @Override public void execute(Runnable command) { command.run(); + rethrowErrors(command); } + } - }; + private static final ExecutorService DIRECT_EXECUTOR_SERVICE = new DirectExecutorService(); /** * Returns an {@link ExecutorService} that executes submitted tasks on the current thread. This executor service does not support being diff --git a/server/src/main/java/org/elasticsearch/common/util/concurrent/EsThreadPoolExecutor.java b/server/src/main/java/org/elasticsearch/common/util/concurrent/EsThreadPoolExecutor.java index 8bbf0a59ee06d..4bb82e5a01157 100644 --- a/server/src/main/java/org/elasticsearch/common/util/concurrent/EsThreadPoolExecutor.java +++ b/server/src/main/java/org/elasticsearch/common/util/concurrent/EsThreadPoolExecutor.java @@ -19,6 +19,8 @@ package org.elasticsearch.common.util.concurrent; +import org.elasticsearch.common.SuppressForbidden; + import java.util.concurrent.BlockingQueue; import java.util.concurrent.ThreadFactory; import java.util.concurrent.ThreadPoolExecutor; @@ -48,6 +50,7 @@ final String getName() { this(name, corePoolSize, maximumPoolSize, keepAliveTime, unit, workQueue, threadFactory, new EsAbortPolicy(), contextHolder); } + @SuppressForbidden(reason = "properly rethrowing errors, see EsExecutors.rethrowErrors") EsThreadPoolExecutor(String name, int corePoolSize, int maximumPoolSize, long keepAliveTime, TimeUnit unit, BlockingQueue workQueue, ThreadFactory threadFactory, XRejectedExecutionHandler handler, ThreadContext contextHolder) { @@ -89,11 +92,8 @@ public interface ShutdownListener { } @Override - public void execute(final Runnable command) { - doExecute(wrapRunnable(command)); - } - - protected void doExecute(final Runnable command) { + public void execute(Runnable command) { + command = wrapRunnable(command); try { super.execute(command); } catch (EsRejectedExecutionException ex) { @@ -115,6 +115,7 @@ protected void doExecute(final Runnable command) { @Override protected void afterExecute(Runnable r, Throwable t) { super.afterExecute(r, t); + EsExecutors.rethrowErrors(unwrap(r)); assert assertDefaultContext(r); } diff --git a/server/src/main/java/org/elasticsearch/common/util/concurrent/PrioritizedEsThreadPoolExecutor.java b/server/src/main/java/org/elasticsearch/common/util/concurrent/PrioritizedEsThreadPoolExecutor.java index d1157efe77a84..c7998f0f3b902 100644 --- a/server/src/main/java/org/elasticsearch/common/util/concurrent/PrioritizedEsThreadPoolExecutor.java +++ b/server/src/main/java/org/elasticsearch/common/util/concurrent/PrioritizedEsThreadPoolExecutor.java @@ -96,13 +96,13 @@ private void addPending(List runnables, List pending, boolean /** innerRunnable can be null if task is finished but not removed from executor yet, * see {@link TieBreakingPrioritizedRunnable#run} and {@link TieBreakingPrioritizedRunnable#runAndClean} */ - pending.add(new Pending(unwrap(innerRunnable), t.priority(), t.insertionOrder, executing)); + pending.add(new Pending(super.unwrap(innerRunnable), t.priority(), t.insertionOrder, executing)); } } else if (runnable instanceof PrioritizedFutureTask) { PrioritizedFutureTask t = (PrioritizedFutureTask) runnable; Object task = t.task; if (t.task instanceof Runnable) { - task = unwrap((Runnable) t.task); + task = super.unwrap((Runnable) t.task); } pending.add(new Pending(task, t.priority, t.insertionOrder, executing)); } @@ -122,7 +122,7 @@ protected void afterExecute(Runnable r, Throwable t) { public void execute(Runnable command, final TimeValue timeout, final Runnable timeoutCallback) { command = wrapRunnable(command); - doExecute(command); + execute(command); if (timeout.nanos() >= 0) { if (command instanceof TieBreakingPrioritizedRunnable) { ((TieBreakingPrioritizedRunnable) command).scheduleTimeout(timer, timeoutCallback, timeout); @@ -149,6 +149,14 @@ protected Runnable wrapRunnable(Runnable command) { } } + @Override + protected Runnable unwrap(Runnable runnable) { + if (runnable instanceof WrappedRunnable) { + return super.unwrap(((WrappedRunnable) runnable).unwrap()); + } else { + return super.unwrap(runnable); + } + } @Override protected RunnableFuture newTaskFor(Runnable runnable, T value) { @@ -181,7 +189,7 @@ public Pending(Object task, Priority priority, long insertionOrder, boolean exec } } - private final class TieBreakingPrioritizedRunnable extends PrioritizedRunnable { + private final class TieBreakingPrioritizedRunnable extends PrioritizedRunnable implements WrappedRunnable { private Runnable runnable; private final long insertionOrder; @@ -246,11 +254,16 @@ private void runAndClean(Runnable run) { runnable = null; timeoutFuture = null; } + } + @Override + public Runnable unwrap() { + return runnable; } + } - private final class PrioritizedFutureTask extends FutureTask implements Comparable { + private static final class PrioritizedFutureTask extends FutureTask implements Comparable { final Object task; final Priority priority; diff --git a/server/src/main/java/org/elasticsearch/common/util/concurrent/PrioritizedRunnable.java b/server/src/main/java/org/elasticsearch/common/util/concurrent/PrioritizedRunnable.java index 7ef2e96e2c5a3..7f0b4ac1a13ff 100644 --- a/server/src/main/java/org/elasticsearch/common/util/concurrent/PrioritizedRunnable.java +++ b/server/src/main/java/org/elasticsearch/common/util/concurrent/PrioritizedRunnable.java @@ -29,7 +29,7 @@ public abstract class PrioritizedRunnable implements Runnable, Comparable runnableWrapper; + private final Function runnableWrapper; private final ResizableBlockingQueue workQueue; private final int tasksPerFrame; private final int minQueueSize; @@ -60,7 +60,7 @@ public final class QueueResizingEsThreadPoolExecutor extends EsThreadPoolExecuto QueueResizingEsThreadPoolExecutor(String name, int corePoolSize, int maximumPoolSize, long keepAliveTime, TimeUnit unit, ResizableBlockingQueue workQueue, int minQueueSize, int maxQueueSize, - Function runnableWrapper, final int tasksPerFrame, + Function runnableWrapper, final int tasksPerFrame, TimeValue targetedResponseTime, ThreadFactory threadFactory, XRejectedExecutionHandler handler, ThreadContext contextHolder) { super(name, corePoolSize, maximumPoolSize, keepAliveTime, unit, @@ -78,12 +78,18 @@ public final class QueueResizingEsThreadPoolExecutor extends EsThreadPoolExecuto } @Override - protected void doExecute(final Runnable command) { - // we are submitting a task, it has not yet started running (because super.excute() has not - // been called), but it could be immediately run, or run at a later time. We need the time - // this task entered the queue, which we get by creating a TimedRunnable, which starts the - // clock as soon as it is created. - super.doExecute(this.runnableWrapper.apply(command)); + protected Runnable wrapRunnable(Runnable command) { + return super.wrapRunnable(this.runnableWrapper.apply(command)); + } + + @Override + protected Runnable unwrap(Runnable runnable) { + final Runnable unwrapped = super.unwrap(runnable); + if (unwrapped instanceof WrappedRunnable) { + return ((WrappedRunnable) unwrapped).unwrap(); + } else { + return unwrapped; + } } /** @@ -146,11 +152,12 @@ protected void afterExecute(Runnable r, Throwable t) { // total time as a combination of the time in the queue and time spent running the task. We // only want runnables that did not throw errors though, because they could be fast-failures // that throw off our timings, so only check when t is null. - assert r instanceof TimedRunnable : "expected only TimedRunnables in queue"; - final long taskNanos = ((TimedRunnable) r).getTotalNanos(); + assert super.unwrap(r) instanceof TimedRunnable : "expected only TimedRunnables in queue"; + final TimedRunnable timedRunnable = (TimedRunnable) super.unwrap(r); + final long taskNanos = timedRunnable.getTotalNanos(); final long totalNanos = totalTaskNanos.addAndGet(taskNanos); - final long taskExecutionNanos = ((TimedRunnable) r).getTotalExecutionNanos(); + final long taskExecutionNanos = timedRunnable.getTotalExecutionNanos(); assert taskExecutionNanos >= 0 : "expected task to always take longer than 0 nanoseconds, got: " + taskExecutionNanos; executionEWMA.addValue(taskExecutionNanos); diff --git a/server/src/main/java/org/elasticsearch/common/util/concurrent/ThreadContext.java b/server/src/main/java/org/elasticsearch/common/util/concurrent/ThreadContext.java index 79d7c3510c2d1..2c1011d1d9e53 100644 --- a/server/src/main/java/org/elasticsearch/common/util/concurrent/ThreadContext.java +++ b/server/src/main/java/org/elasticsearch/common/util/concurrent/ThreadContext.java @@ -18,10 +18,9 @@ */ package org.elasticsearch.common.util.concurrent; -import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; import org.apache.lucene.util.CloseableThreadLocal; -import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.action.support.ContextPreservingActionListener; import org.elasticsearch.client.OriginSettingClient; import org.elasticsearch.common.io.stream.StreamInput; @@ -32,27 +31,23 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.http.HttpTransportSettings; -import static org.elasticsearch.http.HttpTransportSettings.SETTING_HTTP_MAX_WARNING_HEADER_COUNT; -import static org.elasticsearch.http.HttpTransportSettings.SETTING_HTTP_MAX_WARNING_HEADER_SIZE; - import java.io.Closeable; import java.io.IOException; +import java.nio.charset.StandardCharsets; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; -import java.util.Optional; import java.util.Set; -import java.util.concurrent.CancellationException; -import java.util.concurrent.ExecutionException; -import java.util.concurrent.RunnableFuture; import java.util.concurrent.atomic.AtomicBoolean; import java.util.function.Function; import java.util.function.Supplier; import java.util.stream.Collectors; import java.util.stream.Stream; -import java.nio.charset.StandardCharsets; + +import static org.elasticsearch.http.HttpTransportSettings.SETTING_HTTP_MAX_WARNING_HEADER_COUNT; +import static org.elasticsearch.http.HttpTransportSettings.SETTING_HTTP_MAX_WARNING_HEADER_SIZE; /** @@ -352,11 +347,8 @@ public Runnable preserveContext(Runnable command) { * Unwraps a command that was previously wrapped by {@link #preserveContext(Runnable)}. */ public Runnable unwrap(Runnable command) { - if (command instanceof ContextPreservingAbstractRunnable) { - return ((ContextPreservingAbstractRunnable) command).unwrap(); - } - if (command instanceof ContextPreservingRunnable) { - return ((ContextPreservingRunnable) command).unwrap(); + if (command instanceof WrappedRunnable) { + return ((WrappedRunnable) command).unwrap(); } return command; } @@ -642,7 +634,7 @@ public void close() { /** * Wraps a Runnable to preserve the thread context. */ - private class ContextPreservingRunnable implements Runnable { + private class ContextPreservingRunnable implements WrappedRunnable { private final Runnable in; private final ThreadContext.StoredContext ctx; @@ -658,36 +650,6 @@ public void run() { ctx.restore(); whileRunning = true; in.run(); - if (in instanceof RunnableFuture) { - /* - * The wrapped runnable arose from asynchronous submission of a task to an executor. If an uncaught exception was thrown - * during the execution of this task, we need to inspect this runnable and see if it is an error that should be - * propagated to the uncaught exception handler. - */ - try { - ((RunnableFuture) in).get(); - } catch (final Exception e) { - /* - * In theory, Future#get can only throw a cancellation exception, an interrupted exception, or an execution - * exception. We want to ignore cancellation exceptions, restore the interrupt status on interrupted exceptions, and - * inspect the cause of an execution. We are going to be extra paranoid here though and completely unwrap the - * exception to ensure that there is not a buried error anywhere. We assume that a general exception has been - * handled by the executed task or the task submitter. - */ - assert e instanceof CancellationException - || e instanceof InterruptedException - || e instanceof ExecutionException : e; - final Optional maybeError = ExceptionsHelper.maybeError(e, logger); - if (maybeError.isPresent()) { - // throw this error where it will propagate to the uncaught exception handler - throw maybeError.get(); - } - if (e instanceof InterruptedException) { - // restore the interrupt status - Thread.currentThread().interrupt(); - } - } - } whileRunning = false; } catch (IllegalStateException ex) { if (whileRunning || threadLocal.closed.get() == false) { @@ -704,6 +666,7 @@ public String toString() { return in.toString(); } + @Override public Runnable unwrap() { return in; } @@ -712,7 +675,7 @@ public Runnable unwrap() { /** * Wraps an AbstractRunnable to preserve the thread context. */ - private class ContextPreservingAbstractRunnable extends AbstractRunnable { + private class ContextPreservingAbstractRunnable extends AbstractRunnable implements WrappedRunnable { private final AbstractRunnable in; private final ThreadContext.StoredContext creatorsContext; @@ -773,6 +736,7 @@ public String toString() { return in.toString(); } + @Override public AbstractRunnable unwrap() { return in; } diff --git a/server/src/main/java/org/elasticsearch/common/util/concurrent/TimedRunnable.java b/server/src/main/java/org/elasticsearch/common/util/concurrent/TimedRunnable.java index ad5519c0a76df..6d01f5e5cc255 100644 --- a/server/src/main/java/org/elasticsearch/common/util/concurrent/TimedRunnable.java +++ b/server/src/main/java/org/elasticsearch/common/util/concurrent/TimedRunnable.java @@ -23,7 +23,7 @@ * A class used to wrap a {@code Runnable} that allows capturing the time of the task since creation * through execution as well as only execution time. */ -class TimedRunnable extends AbstractRunnable { +class TimedRunnable extends AbstractRunnable implements WrappedRunnable { private final Runnable original; private final long creationTimeNanos; private long startTimeNanos; @@ -94,4 +94,9 @@ long getTotalExecutionNanos() { return Math.max(finishTimeNanos - startTimeNanos, 1); } + @Override + public Runnable unwrap() { + return original; + } + } diff --git a/server/src/main/java/org/elasticsearch/common/util/concurrent/WrappedRunnable.java b/server/src/main/java/org/elasticsearch/common/util/concurrent/WrappedRunnable.java new file mode 100644 index 0000000000000..37519968253d6 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/common/util/concurrent/WrappedRunnable.java @@ -0,0 +1,23 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.common.util.concurrent; + +public interface WrappedRunnable extends Runnable { + Runnable unwrap(); +} diff --git a/server/src/main/java/org/elasticsearch/threadpool/Scheduler.java b/server/src/main/java/org/elasticsearch/threadpool/Scheduler.java index 2901fc1f7a8ed..1b7c74ed6eec4 100644 --- a/server/src/main/java/org/elasticsearch/threadpool/Scheduler.java +++ b/server/src/main/java/org/elasticsearch/threadpool/Scheduler.java @@ -19,6 +19,7 @@ package org.elasticsearch.threadpool; +import org.elasticsearch.common.SuppressForbidden; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.concurrent.AbstractRunnable; @@ -26,8 +27,10 @@ import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.common.util.concurrent.EsRejectedExecutionException; +import java.util.concurrent.RejectedExecutionHandler; import java.util.concurrent.ScheduledFuture; import java.util.concurrent.ScheduledThreadPoolExecutor; +import java.util.concurrent.ThreadFactory; import java.util.concurrent.TimeUnit; import java.util.function.Consumer; @@ -37,7 +40,7 @@ public interface Scheduler { static ScheduledThreadPoolExecutor initScheduler(Settings settings) { - ScheduledThreadPoolExecutor scheduler = new ScheduledThreadPoolExecutor(1, + final ScheduledThreadPoolExecutor scheduler = new SafeScheduledThreadPoolExecutor(1, EsExecutors.daemonThreadFactory(settings, "scheduler"), new EsAbortPolicy()); scheduler.setExecuteExistingDelayedTasksAfterShutdownPolicy(false); scheduler.setContinueExistingPeriodicTasksAfterShutdownPolicy(false); @@ -206,4 +209,30 @@ public void onAfter() { } } } + + /** + * This subclass ensures to properly bubble up Throwable instances of type Error. + */ + class SafeScheduledThreadPoolExecutor extends ScheduledThreadPoolExecutor { + + @SuppressForbidden(reason = "properly rethrowing errors, see EsExecutors.rethrowErrors") + public SafeScheduledThreadPoolExecutor(int corePoolSize, ThreadFactory threadFactory, RejectedExecutionHandler handler) { + super(corePoolSize, threadFactory, handler); + } + + @SuppressForbidden(reason = "properly rethrowing errors, see EsExecutors.rethrowErrors") + public SafeScheduledThreadPoolExecutor(int corePoolSize, ThreadFactory threadFactory) { + super(corePoolSize, threadFactory); + } + + @SuppressForbidden(reason = "properly rethrowing errors, see EsExecutors.rethrowErrors") + public SafeScheduledThreadPoolExecutor(int corePoolSize) { + super(corePoolSize); + } + + @Override + protected void afterExecute(Runnable r, Throwable t) { + EsExecutors.rethrowErrors(r); + } + } } diff --git a/server/src/test/java/org/elasticsearch/common/util/concurrent/QueueResizingEsThreadPoolExecutorTests.java b/server/src/test/java/org/elasticsearch/common/util/concurrent/QueueResizingEsThreadPoolExecutorTests.java index 3f2c8fabec27b..8e4c729ee9cef 100644 --- a/server/src/test/java/org/elasticsearch/common/util/concurrent/QueueResizingEsThreadPoolExecutorTests.java +++ b/server/src/test/java/org/elasticsearch/common/util/concurrent/QueueResizingEsThreadPoolExecutorTests.java @@ -226,19 +226,13 @@ public void testExecutionEWMACalculation() throws Exception { context.close(); } - private Function randomBetweenLimitsWrapper(final int minNs, final int maxNs) { - return (runnable) -> { - return new SettableTimedRunnable(randomIntBetween(minNs, maxNs)); - }; - } - - private Function fastWrapper() { + private Function fastWrapper() { return (runnable) -> { return new SettableTimedRunnable(TimeUnit.NANOSECONDS.toNanos(100)); }; } - private Function slowWrapper() { + private Function slowWrapper() { return (runnable) -> { return new SettableTimedRunnable(TimeUnit.MINUTES.toNanos(2)); }; diff --git a/server/src/test/java/org/elasticsearch/index/shard/GlobalCheckpointListenersTests.java b/server/src/test/java/org/elasticsearch/index/shard/GlobalCheckpointListenersTests.java index fa0e0cee1435f..59c3553d25fd2 100644 --- a/server/src/test/java/org/elasticsearch/index/shard/GlobalCheckpointListenersTests.java +++ b/server/src/test/java/org/elasticsearch/index/shard/GlobalCheckpointListenersTests.java @@ -27,6 +27,7 @@ import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.index.Index; import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.threadpool.Scheduler; import org.junit.After; import org.mockito.ArgumentCaptor; @@ -68,7 +69,7 @@ public class GlobalCheckpointListenersTests extends ESTestCase { private final ShardId shardId = new ShardId(new Index("index", "uuid"), 0); private final ScheduledThreadPoolExecutor scheduler = - new ScheduledThreadPoolExecutor(1, EsExecutors.daemonThreadFactory(Settings.EMPTY, "scheduler")); + new Scheduler.SafeScheduledThreadPoolExecutor(1, EsExecutors.daemonThreadFactory(Settings.EMPTY, "scheduler")); @After public void shutdownScheduler() { diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/AutodetectProcessManager.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/AutodetectProcessManager.java index fb6e9d46f4064..567ade2c22a0f 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/AutodetectProcessManager.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/AutodetectProcessManager.java @@ -12,12 +12,14 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.client.Client; import org.elasticsearch.common.CheckedConsumer; +import org.elasticsearch.common.SuppressForbidden; import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.ByteSizeUnit; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.util.concurrent.AbstractRunnable; +import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.common.util.concurrent.EsRejectedExecutionException; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.common.xcontent.NamedXContentRegistry; @@ -758,7 +760,7 @@ public ByteSizeValue getMinLocalStorageAvailable() { * operations are initially added to a queue and a worker thread from ml autodetect threadpool will process each * operation at a time. */ - class AutodetectWorkerExecutorService extends AbstractExecutorService { + static class AutodetectWorkerExecutorService extends AbstractExecutorService { private final ThreadContext contextHolder; private final CountDownLatch awaitTermination = new CountDownLatch(1); @@ -766,6 +768,7 @@ class AutodetectWorkerExecutorService extends AbstractExecutorService { private volatile boolean running = true; + @SuppressForbidden(reason = "properly rethrowing errors, see EsExecutors.rethrowErrors") AutodetectWorkerExecutorService(ThreadContext contextHolder) { this.contextHolder = contextHolder; } @@ -813,6 +816,7 @@ void start() { } catch (Exception e) { logger.error("error handling job operation", e); } + EsExecutors.rethrowErrors(contextHolder.unwrap(runnable)); } } } catch (InterruptedException e) { diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/filestructurefinder/FileStructureFinderManagerTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/filestructurefinder/FileStructureFinderManagerTests.java index 246c96011c2bf..978f1c5286de8 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/filestructurefinder/FileStructureFinderManagerTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/filestructurefinder/FileStructureFinderManagerTests.java @@ -8,6 +8,7 @@ import com.ibm.icu.text.CharsetMatch; import org.elasticsearch.ElasticsearchTimeoutException; import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.threadpool.Scheduler; import org.elasticsearch.xpack.core.ml.filestructurefinder.FileStructure; import org.junit.After; import org.junit.Before; @@ -21,7 +22,6 @@ import java.nio.charset.StandardCharsets; import java.util.Arrays; import java.util.concurrent.ScheduledExecutorService; -import java.util.concurrent.ScheduledThreadPoolExecutor; import java.util.concurrent.TimeUnit; import static org.elasticsearch.xpack.ml.filestructurefinder.FileStructureOverrides.EMPTY_OVERRIDES; @@ -36,7 +36,7 @@ public class FileStructureFinderManagerTests extends FileStructureTestCase { @Before public void setup() { - scheduler = new ScheduledThreadPoolExecutor(1); + scheduler = new Scheduler.SafeScheduledThreadPoolExecutor(1); structureFinderManager = new FileStructureFinderManager(scheduler); } diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/filestructurefinder/TimeoutCheckerTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/filestructurefinder/TimeoutCheckerTests.java index ea581f663462f..2770656279cff 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/filestructurefinder/TimeoutCheckerTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/filestructurefinder/TimeoutCheckerTests.java @@ -8,11 +8,11 @@ import org.elasticsearch.ElasticsearchTimeoutException; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.grok.Grok; +import org.elasticsearch.threadpool.Scheduler; import org.junit.After; import org.junit.Before; import java.util.concurrent.ScheduledExecutorService; -import java.util.concurrent.ScheduledThreadPoolExecutor; public class TimeoutCheckerTests extends FileStructureTestCase { @@ -20,7 +20,7 @@ public class TimeoutCheckerTests extends FileStructureTestCase { @Before public void createScheduler() { - scheduler = new ScheduledThreadPoolExecutor(1); + scheduler = new Scheduler.SafeScheduledThreadPoolExecutor(1); } @After diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/process/autodetect/AutodetectProcessManagerTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/process/autodetect/AutodetectProcessManagerTests.java index 346e9aa5d5dbc..998297070c27b 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/process/autodetect/AutodetectProcessManagerTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/process/autodetect/AutodetectProcessManagerTests.java @@ -20,6 +20,7 @@ import org.elasticsearch.index.analysis.AnalysisRegistry; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.junit.annotations.TestLogging; +import org.elasticsearch.threadpool.TestThreadPool; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xpack.core.ml.job.config.AnalysisConfig; import org.elasticsearch.xpack.core.ml.job.config.DataDescription; @@ -41,6 +42,7 @@ import org.elasticsearch.xpack.ml.job.persistence.JobDataCountsPersister; import org.elasticsearch.xpack.ml.job.persistence.JobResultsPersister; import org.elasticsearch.xpack.ml.job.persistence.JobResultsProvider; +import org.elasticsearch.xpack.ml.job.process.autodetect.AutodetectProcessManager.AutodetectWorkerExecutorService; import org.elasticsearch.xpack.ml.job.process.autodetect.params.AutodetectParams; import org.elasticsearch.xpack.ml.job.process.autodetect.params.DataLoadParams; import org.elasticsearch.xpack.ml.job.process.autodetect.params.FlushJobParams; @@ -78,6 +80,7 @@ import static org.elasticsearch.mock.orig.Mockito.verify; import static org.elasticsearch.mock.orig.Mockito.verifyNoMoreInteractions; import static org.elasticsearch.mock.orig.Mockito.when; +import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.notNullValue; import static org.hamcrest.Matchers.nullValue; @@ -650,6 +653,26 @@ public void testCreate_givenNonZeroCountsAndNoModelSnapshotNorQuantiles() { verifyNoMoreInteractions(auditor); } + public void testAutodetectWorkerExecutorServiceDoesNotSwallowErrors() { + final ThreadPool threadPool = new TestThreadPool("testAutodetectWorkerExecutorServiceDoesNotSwallowErrors"); + try { + final AutodetectWorkerExecutorService executor = new AutodetectWorkerExecutorService(threadPool.getThreadContext()); + if (randomBoolean()) { + executor.submit(() -> { + throw new Error("future error"); + }); + } else { + executor.execute(() -> { + throw new Error("future error"); + }); + } + final Error e = expectThrows(Error.class, () -> executor.start()); + assertThat(e.getMessage(), containsString("future error")); + } finally { + ThreadPool.terminate(threadPool, 10, TimeUnit.SECONDS); + } + } + private AutodetectProcessManager createNonSpyManager(String jobId) { Client client = mock(Client.class); ThreadPool threadPool = mock(ThreadPool.class); diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/process/autodetect/output/AutoDetectResultProcessorTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/process/autodetect/output/AutoDetectResultProcessorTests.java index 807ac81830904..8c0f5da6366a0 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/process/autodetect/output/AutoDetectResultProcessorTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/process/autodetect/output/AutoDetectResultProcessorTests.java @@ -18,6 +18,7 @@ import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.threadpool.Scheduler; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xpack.core.ml.action.UpdateJobAction; import org.elasticsearch.xpack.core.ml.job.config.JobUpdate; @@ -81,7 +82,7 @@ public class AutoDetectResultProcessorTests extends ESTestCase { @Before public void setUpMocks() { - executor = new ScheduledThreadPoolExecutor(1); + executor = new Scheduler.SafeScheduledThreadPoolExecutor(1); client = mock(Client.class); threadPool = mock(ThreadPool.class); when(client.threadPool()).thenReturn(threadPool); From 68de2edb14280bafa8cd7f92978237ddb8a6cc90 Mon Sep 17 00:00:00 2001 From: Yannick Welsch Date: Thu, 17 Jan 2019 19:18:47 +0100 Subject: [PATCH 31/71] Fix assertion at end of forceRefreshes (#37559) This commit ensures that we only change refreshListeners to a list if we're actually adding something to the list. --- .../index/shard/RefreshListeners.java | 17 +++++++++-------- 1 file changed, 9 insertions(+), 8 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/index/shard/RefreshListeners.java b/server/src/main/java/org/elasticsearch/index/shard/RefreshListeners.java index b4b9e13f7e063..713563eb111ba 100644 --- a/server/src/main/java/org/elasticsearch/index/shard/RefreshListeners.java +++ b/server/src/main/java/org/elasticsearch/index/shard/RefreshListeners.java @@ -129,15 +129,12 @@ public boolean addOrNotify(Translog.Location location, Consumer listene return true; } synchronized (this) { - List>> listeners = refreshListeners; - if (listeners == null) { - if (closed) { - throw new IllegalStateException("can't wait for refresh on a closed index"); - } - listeners = new ArrayList<>(); - refreshListeners = listeners; + if (closed) { + throw new IllegalStateException("can't wait for refresh on a closed index"); } - if (refreshForcers == 0 && listeners.size() < getMaxRefreshListeners.getAsInt()) { + List>> listeners = refreshListeners; + final int maxRefreshes = getMaxRefreshListeners.getAsInt(); + if (refreshForcers == 0 && maxRefreshes > 0 && (listeners == null || listeners.size() < maxRefreshes)) { ThreadContext.StoredContext storedContext = threadContext.newStoredContext(true); Consumer contextPreservingListener = forced -> { try (ThreadContext.StoredContext ignore = threadContext.stashContext()) { @@ -145,8 +142,12 @@ public boolean addOrNotify(Translog.Location location, Consumer listene listener.accept(forced); } }; + if (listeners == null) { + listeners = new ArrayList<>(); + } // We have a free slot so register the listener listeners.add(new Tuple<>(location, contextPreservingListener)); + refreshListeners = listeners; return false; } } From 6dcb3af4c8300d2d5f5fc9848c2a039956edda5d Mon Sep 17 00:00:00 2001 From: Lisa Cawley Date: Thu, 17 Jan 2019 10:47:15 -0800 Subject: [PATCH 32/71] [DOCS] Adds size limitation to the get datafeeds APIs (#37578) --- docs/reference/ml/apis/get-datafeed-stats.asciidoc | 1 + docs/reference/ml/apis/get-datafeed.asciidoc | 2 ++ docs/reference/ml/apis/get-job-stats.asciidoc | 2 ++ 3 files changed, 5 insertions(+) diff --git a/docs/reference/ml/apis/get-datafeed-stats.asciidoc b/docs/reference/ml/apis/get-datafeed-stats.asciidoc index e43a2f454ca5e..34c27d3dae962 100644 --- a/docs/reference/ml/apis/get-datafeed-stats.asciidoc +++ b/docs/reference/ml/apis/get-datafeed-stats.asciidoc @@ -32,6 +32,7 @@ statistics for all {dfeeds} by using `_all`, by specifying `*` as the If the {dfeed} is stopped, the only information you receive is the `datafeed_id` and the `state`. +IMPORTANT: This API returns a maximum of 10,000 {dfeeds}. ==== Path Parameters diff --git a/docs/reference/ml/apis/get-datafeed.asciidoc b/docs/reference/ml/apis/get-datafeed.asciidoc index b54eb59bb7a52..402838742dfa6 100644 --- a/docs/reference/ml/apis/get-datafeed.asciidoc +++ b/docs/reference/ml/apis/get-datafeed.asciidoc @@ -27,6 +27,8 @@ comma-separated list of {dfeeds} or a wildcard expression. You can get information for all {dfeeds} by using `_all`, by specifying `*` as the ``, or by omitting the ``. +IMPORTANT: This API returns a maximum of 10,000 {dfeeds}. + ==== Path Parameters `feed_id`:: diff --git a/docs/reference/ml/apis/get-job-stats.asciidoc b/docs/reference/ml/apis/get-job-stats.asciidoc index 7cc6d18b86a13..b674b01802bee 100644 --- a/docs/reference/ml/apis/get-job-stats.asciidoc +++ b/docs/reference/ml/apis/get-job-stats.asciidoc @@ -29,6 +29,8 @@ group name, a comma-separated list of jobs, or a wildcard expression. You can get statistics for all jobs by using `_all`, by specifying `*` as the ``, or by omitting the ``. +IMPORTANT: This API returns a maximum of 10,000 jobs. + ==== Path Parameters From 5782a5bbbcf0074a0f8dff8028cc1a1106e36ef0 Mon Sep 17 00:00:00 2001 From: James Baiera Date: Thu, 17 Jan 2019 15:12:01 -0500 Subject: [PATCH 33/71] Mute UnicastZenPingTests#testSimplePings relates #26701 --- .../org/elasticsearch/discovery/zen/UnicastZenPingTests.java | 1 + 1 file changed, 1 insertion(+) diff --git a/server/src/test/java/org/elasticsearch/discovery/zen/UnicastZenPingTests.java b/server/src/test/java/org/elasticsearch/discovery/zen/UnicastZenPingTests.java index f06ef3e72808a..3178663a1f684 100644 --- a/server/src/test/java/org/elasticsearch/discovery/zen/UnicastZenPingTests.java +++ b/server/src/test/java/org/elasticsearch/discovery/zen/UnicastZenPingTests.java @@ -135,6 +135,7 @@ public void tearDown() throws Exception { } } + @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/26701") public void testSimplePings() throws IOException, InterruptedException, ExecutionException { // use ephemeral ports final Settings settings = Settings.builder().put("cluster.name", "test").put(TransportSettings.PORT.getKey(), 0).build(); From a0c504e4a3ded75352fb25c0a0f18b5c191e102e Mon Sep 17 00:00:00 2001 From: Tal Levy Date: Thu, 17 Jan 2019 13:21:12 -0800 Subject: [PATCH 34/71] Create specific exception for when snapshots are in progress (#37550) delete and close index actions threw IllegalArgumentExceptions when attempting to run against an index that has a snapshot in progress. This change introduces a dedicated SnapshotInProgressException for these scenarios. This is done to explicitly signal to clients that this is the reason the action failed, and it is a retryable error. relates to #37541. --- .../elasticsearch/ElasticsearchException.java | 4 +- .../SnapshotInProgressException.java | 47 +++++++++++++++++++ .../snapshots/SnapshotsService.java | 4 +- .../ExceptionSerializationTests.java | 2 + .../MetaDataDeleteIndexServiceTests.java | 3 +- .../MetaDataIndexStateServiceTests.java | 3 +- .../SharedClusterSnapshotRestoreIT.java | 4 +- 7 files changed, 60 insertions(+), 7 deletions(-) create mode 100644 server/src/main/java/org/elasticsearch/snapshots/SnapshotInProgressException.java diff --git a/server/src/main/java/org/elasticsearch/ElasticsearchException.java b/server/src/main/java/org/elasticsearch/ElasticsearchException.java index d18d4d4820f7d..530c5ce4f6396 100644 --- a/server/src/main/java/org/elasticsearch/ElasticsearchException.java +++ b/server/src/main/java/org/elasticsearch/ElasticsearchException.java @@ -1010,7 +1010,9 @@ private enum ElasticsearchExceptionHandle { COORDINATION_STATE_REJECTED_EXCEPTION(org.elasticsearch.cluster.coordination.CoordinationStateRejectedException.class, org.elasticsearch.cluster.coordination.CoordinationStateRejectedException::new, 150, Version.V_7_0_0), CLUSTER_ALREADY_BOOTSTRAPPED_EXCEPTION(org.elasticsearch.cluster.coordination.ClusterAlreadyBootstrappedException.class, - org.elasticsearch.cluster.coordination.ClusterAlreadyBootstrappedException::new, 151, Version.V_7_0_0); + org.elasticsearch.cluster.coordination.ClusterAlreadyBootstrappedException::new, 151, Version.V_7_0_0), + SNAPSHOT_IN_PROGRESS_EXCEPTION(org.elasticsearch.snapshots.SnapshotInProgressException.class, + org.elasticsearch.snapshots.SnapshotInProgressException::new, 152, Version.V_7_0_0); final Class exceptionClass; final CheckedFunction constructor; diff --git a/server/src/main/java/org/elasticsearch/snapshots/SnapshotInProgressException.java b/server/src/main/java/org/elasticsearch/snapshots/SnapshotInProgressException.java new file mode 100644 index 0000000000000..1fac4642118da --- /dev/null +++ b/server/src/main/java/org/elasticsearch/snapshots/SnapshotInProgressException.java @@ -0,0 +1,47 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.snapshots; + +import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.rest.RestStatus; + +import java.io.IOException; + +/** + * Thrown on the attempt to execute an action that requires + * that no snapshot is in progress. + */ +public class SnapshotInProgressException extends ElasticsearchException { + + public SnapshotInProgressException(String msg) { + super(msg); + } + + public SnapshotInProgressException(StreamInput in) throws IOException { + super(in); + } + + @Override + public RestStatus status() { + return RestStatus.BAD_REQUEST; + } +} + diff --git a/server/src/main/java/org/elasticsearch/snapshots/SnapshotsService.java b/server/src/main/java/org/elasticsearch/snapshots/SnapshotsService.java index 153bb1fbf2fcf..86ed2095433b2 100644 --- a/server/src/main/java/org/elasticsearch/snapshots/SnapshotsService.java +++ b/server/src/main/java/org/elasticsearch/snapshots/SnapshotsService.java @@ -1447,7 +1447,7 @@ private ImmutableOpenMap shard public static void checkIndexDeletion(ClusterState currentState, Set indices) { Set indicesToFail = indicesToFailForCloseOrDeletion(currentState, indices); if (indicesToFail != null) { - throw new IllegalArgumentException("Cannot delete indices that are being snapshotted: " + indicesToFail + + throw new SnapshotInProgressException("Cannot delete indices that are being snapshotted: " + indicesToFail + ". Try again after snapshot finishes or cancel the currently running snapshot."); } } @@ -1459,7 +1459,7 @@ public static void checkIndexDeletion(ClusterState currentState, Set indices) { Set indicesToFail = indicesToFailForCloseOrDeletion(currentState, indices); if (indicesToFail != null) { - throw new IllegalArgumentException("Cannot close indices that are being snapshotted: " + indicesToFail + + throw new SnapshotInProgressException("Cannot close indices that are being snapshotted: " + indicesToFail + ". Try again after snapshot finishes or cancel the currently running snapshot."); } } diff --git a/server/src/test/java/org/elasticsearch/ExceptionSerializationTests.java b/server/src/test/java/org/elasticsearch/ExceptionSerializationTests.java index cee57c9f50c47..489a98dcbaac6 100644 --- a/server/src/test/java/org/elasticsearch/ExceptionSerializationTests.java +++ b/server/src/test/java/org/elasticsearch/ExceptionSerializationTests.java @@ -79,6 +79,7 @@ import org.elasticsearch.snapshots.Snapshot; import org.elasticsearch.snapshots.SnapshotException; import org.elasticsearch.snapshots.SnapshotId; +import org.elasticsearch.snapshots.SnapshotInProgressException; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.TestSearchContext; import org.elasticsearch.test.VersionUtils; @@ -809,6 +810,7 @@ public void testIds() { ids.put(149, MultiBucketConsumerService.TooManyBucketsException.class); ids.put(150, CoordinationStateRejectedException.class); ids.put(151, ClusterAlreadyBootstrappedException.class); + ids.put(152, SnapshotInProgressException.class); Map, Integer> reverse = new HashMap<>(); for (Map.Entry> entry : ids.entrySet()) { diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/MetaDataDeleteIndexServiceTests.java b/server/src/test/java/org/elasticsearch/cluster/metadata/MetaDataDeleteIndexServiceTests.java index d65a35b8c26c6..5905d528ff43f 100644 --- a/server/src/test/java/org/elasticsearch/cluster/metadata/MetaDataDeleteIndexServiceTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/metadata/MetaDataDeleteIndexServiceTests.java @@ -32,6 +32,7 @@ import org.elasticsearch.repositories.IndexId; import org.elasticsearch.snapshots.Snapshot; import org.elasticsearch.snapshots.SnapshotId; +import org.elasticsearch.snapshots.SnapshotInProgressException; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.VersionUtils; @@ -63,7 +64,7 @@ SnapshotsInProgress.State.INIT, singletonList(new IndexId(index, "doesn't matter ClusterState state = ClusterState.builder(clusterState(index)) .putCustom(SnapshotsInProgress.TYPE, snaps) .build(); - Exception e = expectThrows(IllegalArgumentException.class, + Exception e = expectThrows(SnapshotInProgressException.class, () -> service.deleteIndices(state, singleton(state.metaData().getIndices().get(index).getIndex()))); assertEquals("Cannot delete indices that are being snapshotted: [[" + index + "]]. Try again after snapshot finishes " + "or cancel the currently running snapshot.", e.getMessage()); diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/MetaDataIndexStateServiceTests.java b/server/src/test/java/org/elasticsearch/cluster/metadata/MetaDataIndexStateServiceTests.java index c30925514bb93..56ee25ee5febb 100644 --- a/server/src/test/java/org/elasticsearch/cluster/metadata/MetaDataIndexStateServiceTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/metadata/MetaDataIndexStateServiceTests.java @@ -46,6 +46,7 @@ import org.elasticsearch.repositories.IndexId; import org.elasticsearch.snapshots.Snapshot; import org.elasticsearch.snapshots.SnapshotId; +import org.elasticsearch.snapshots.SnapshotInProgressException; import org.elasticsearch.test.ESTestCase; import java.util.Arrays; @@ -171,7 +172,7 @@ public void testAddIndexClosedBlocks() { assertThat(exception.getMessage(), containsString("Cannot close indices that are being restored: [[restored]]")); } { - IllegalArgumentException exception = expectThrows(IllegalArgumentException.class, () -> { + SnapshotInProgressException exception = expectThrows(SnapshotInProgressException.class, () -> { ClusterState state = addSnapshotIndex("snapshotted", randomIntBetween(1, 3), randomIntBetween(0, 3), initialState); if (randomBoolean()) { state = addOpenedIndex("opened", randomIntBetween(1, 3), randomIntBetween(0, 3), state); diff --git a/server/src/test/java/org/elasticsearch/snapshots/SharedClusterSnapshotRestoreIT.java b/server/src/test/java/org/elasticsearch/snapshots/SharedClusterSnapshotRestoreIT.java index 1826064c97c78..e9ce98b564e1d 100644 --- a/server/src/test/java/org/elasticsearch/snapshots/SharedClusterSnapshotRestoreIT.java +++ b/server/src/test/java/org/elasticsearch/snapshots/SharedClusterSnapshotRestoreIT.java @@ -2492,7 +2492,7 @@ public void testCloseOrDeleteIndexDuringSnapshot() throws Exception { logger.info("--> delete index while non-partial snapshot is running"); client.admin().indices().prepareDelete("test-idx-1").get(); fail("Expected deleting index to fail during snapshot"); - } catch (IllegalArgumentException e) { + } catch (SnapshotInProgressException e) { assertThat(e.getMessage(), containsString("Cannot delete indices that are being snapshotted: [[test-idx-1/")); } } else { @@ -2500,7 +2500,7 @@ public void testCloseOrDeleteIndexDuringSnapshot() throws Exception { logger.info("--> close index while non-partial snapshot is running"); client.admin().indices().prepareClose("test-idx-1").get(); fail("Expected closing index to fail during snapshot"); - } catch (IllegalArgumentException e) { + } catch (SnapshotInProgressException e) { assertThat(e.getMessage(), containsString("Cannot close indices that are being snapshotted: [[test-idx-1/")); } } From 381d035cd6cb2e859df956e57b5d033dd19e93b5 Mon Sep 17 00:00:00 2001 From: Armin Braun Date: Thu, 17 Jan 2019 22:23:23 +0100 Subject: [PATCH 35/71] Remove Redundant RestoreRequest Class (#37535) * Same as #37464 but for the restore side --- .../TransportRestoreSnapshotAction.java | 7 +- .../snapshots/RestoreService.java | 232 ++---------------- .../xpack/ccr/CcrRepositoryIT.java | 28 +-- 3 files changed, 31 insertions(+), 236 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/restore/TransportRestoreSnapshotAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/restore/TransportRestoreSnapshotAction.java index 56fcba85167e7..b362be49b10ab 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/restore/TransportRestoreSnapshotAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/restore/TransportRestoreSnapshotAction.java @@ -82,12 +82,7 @@ protected ClusterBlockException checkBlock(RestoreSnapshotRequest request, Clust @Override protected void masterOperation(final RestoreSnapshotRequest request, final ClusterState state, final ActionListener listener) { - RestoreService.RestoreRequest restoreRequest = new RestoreService.RestoreRequest(request.repository(), request.snapshot(), - request.indices(), request.indicesOptions(), request.renamePattern(), request.renameReplacement(), - request.settings(), request.masterNodeTimeout(), request.includeGlobalState(), request.partial(), request.includeAliases(), - request.indexSettings(), request.ignoreIndexSettings(), "restore_snapshot[" + request.snapshot() + "]"); - - restoreService.restoreSnapshot(restoreRequest, new ActionListener() { + restoreService.restoreSnapshot(request, new ActionListener() { @Override public void onResponse(RestoreCompletionResponse restoreCompletionResponse) { if (restoreCompletionResponse.getRestoreInfo() == null && request.waitForCompletion()) { diff --git a/server/src/main/java/org/elasticsearch/snapshots/RestoreService.java b/server/src/main/java/org/elasticsearch/snapshots/RestoreService.java index eecac92d63e95..b8fa8c6f1a9c8 100644 --- a/server/src/main/java/org/elasticsearch/snapshots/RestoreService.java +++ b/server/src/main/java/org/elasticsearch/snapshots/RestoreService.java @@ -27,7 +27,7 @@ import org.apache.logging.log4j.message.ParameterizedMessage; import org.elasticsearch.Version; import org.elasticsearch.action.ActionListener; -import org.elasticsearch.action.support.IndicesOptions; +import org.elasticsearch.action.admin.cluster.snapshots.restore.RestoreSnapshotRequest; import org.elasticsearch.cluster.ClusterChangedEvent; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ClusterStateApplier; @@ -78,7 +78,6 @@ import java.util.HashSet; import java.util.List; import java.util.Map; -import java.util.Objects; import java.util.Optional; import java.util.Set; import java.util.function.Predicate; @@ -100,7 +99,7 @@ *

* Restore operation is performed in several stages. *

- * First {@link #restoreSnapshot(RestoreRequest, org.elasticsearch.action.ActionListener)} + * First {@link #restoreSnapshot(RestoreSnapshotRequest, org.elasticsearch.action.ActionListener)} * method reads information about snapshot and metadata from repository. In update cluster state task it checks restore * preconditions, restores global state if needed, creates {@link RestoreInProgress} record with list of shards that needs * to be restored and adds this shard to the routing table using @@ -172,28 +171,30 @@ public RestoreService(ClusterService clusterService, RepositoriesService reposit * @param request restore request * @param listener restore listener */ - public void restoreSnapshot(final RestoreRequest request, final ActionListener listener) { + public void restoreSnapshot(final RestoreSnapshotRequest request, final ActionListener listener) { try { // Read snapshot info and metadata from the repository - Repository repository = repositoriesService.repository(request.repositoryName); + final String repositoryName = request.repository(); + Repository repository = repositoriesService.repository(repositoryName); final RepositoryData repositoryData = repository.getRepositoryData(); + final String snapshotName = request.snapshot(); final Optional incompatibleSnapshotId = - repositoryData.getIncompatibleSnapshotIds().stream().filter(s -> request.snapshotName.equals(s.getName())).findFirst(); + repositoryData.getIncompatibleSnapshotIds().stream().filter(s -> snapshotName.equals(s.getName())).findFirst(); if (incompatibleSnapshotId.isPresent()) { - throw new SnapshotRestoreException(request.repositoryName, request.snapshotName, "cannot restore incompatible snapshot"); + throw new SnapshotRestoreException(repositoryName, snapshotName, "cannot restore incompatible snapshot"); } final Optional matchingSnapshotId = repositoryData.getSnapshotIds().stream() - .filter(s -> request.snapshotName.equals(s.getName())).findFirst(); + .filter(s -> snapshotName.equals(s.getName())).findFirst(); if (matchingSnapshotId.isPresent() == false) { - throw new SnapshotRestoreException(request.repositoryName, request.snapshotName, "snapshot does not exist"); + throw new SnapshotRestoreException(repositoryName, snapshotName, "snapshot does not exist"); } final SnapshotId snapshotId = matchingSnapshotId.get(); final SnapshotInfo snapshotInfo = repository.getSnapshotInfo(snapshotId); - final Snapshot snapshot = new Snapshot(request.repositoryName, snapshotId); + final Snapshot snapshot = new Snapshot(repositoryName, snapshotId); // Make sure that we can restore from this snapshot - validateSnapshotRestorable(request.repositoryName, snapshotInfo); + validateSnapshotRestorable(repositoryName, snapshotInfo); // Resolve the indices from the snapshot that need to be restored final List indicesInSnapshot = filterIndices(snapshotInfo.indices(), request.indices(), request.indicesOptions()); @@ -218,7 +219,7 @@ public void restoreSnapshot(final RestoreRequest request, final ActionListener new ParameterizedMessage("[{}] failed to restore snapshot", - request.repositoryName + ":" + request.snapshotName), e); + request.repository() + ":" + request.snapshot()), e); listener.onFailure(e); } } @@ -820,7 +821,7 @@ public static int failedShards(ImmutableOpenMap renamedIndices(RestoreRequest request, List filteredIndices) { + private Map renamedIndices(RestoreSnapshotRequest request, List filteredIndices) { Map renamedIndices = new HashMap<>(); for (String index : filteredIndices) { String renamedIndex = index; @@ -829,7 +830,7 @@ private Map renamedIndices(RestoreRequest request, List } String previousIndex = renamedIndices.put(renamedIndex, index); if (previousIndex != null) { - throw new SnapshotRestoreException(request.repositoryName, request.snapshotName, + throw new SnapshotRestoreException(request.repository(), request.snapshot(), "indices [" + index + "] and [" + previousIndex + "] are renamed into the same index [" + renamedIndex + "]"); } } @@ -919,203 +920,4 @@ public static boolean isRepositoryInUse(ClusterState clusterState, String reposi } return false; } - - /** - * Restore snapshot request - */ - public static class RestoreRequest { - - private final String cause; - - private final String repositoryName; - - private final String snapshotName; - - private final String[] indices; - - private final String renamePattern; - - private final String renameReplacement; - - private final IndicesOptions indicesOptions; - - private final Settings settings; - - private final TimeValue masterNodeTimeout; - - private final boolean includeGlobalState; - - private final boolean partial; - - private final boolean includeAliases; - - private final Settings indexSettings; - - private final String[] ignoreIndexSettings; - - /** - * Constructs new restore request - * - * @param repositoryName repositoryName - * @param snapshotName snapshotName - * @param indices list of indices to restore - * @param indicesOptions indices options - * @param renamePattern pattern to rename indices - * @param renameReplacement replacement for renamed indices - * @param settings repository specific restore settings - * @param masterNodeTimeout master node timeout - * @param includeGlobalState include global state into restore - * @param partial allow partial restore - * @param indexSettings index settings that should be changed on restore - * @param ignoreIndexSettings index settings that shouldn't be restored - * @param cause cause for restoring the snapshot - */ - public RestoreRequest(String repositoryName, String snapshotName, String[] indices, IndicesOptions indicesOptions, - String renamePattern, String renameReplacement, Settings settings, - TimeValue masterNodeTimeout, boolean includeGlobalState, boolean partial, boolean includeAliases, - Settings indexSettings, String[] ignoreIndexSettings, String cause) { - this.repositoryName = Objects.requireNonNull(repositoryName); - this.snapshotName = Objects.requireNonNull(snapshotName); - this.indices = indices; - this.renamePattern = renamePattern; - this.renameReplacement = renameReplacement; - this.indicesOptions = indicesOptions; - this.settings = settings; - this.masterNodeTimeout = masterNodeTimeout; - this.includeGlobalState = includeGlobalState; - this.partial = partial; - this.includeAliases = includeAliases; - this.indexSettings = indexSettings; - this.ignoreIndexSettings = ignoreIndexSettings; - this.cause = cause; - } - - /** - * Returns restore operation cause - * - * @return restore operation cause - */ - public String cause() { - return cause; - } - - /** - * Returns repository name - * - * @return repository name - */ - public String repositoryName() { - return repositoryName; - } - - /** - * Returns snapshot name - * - * @return snapshot name - */ - public String snapshotName() { - return snapshotName; - } - - /** - * Return the list of indices to be restored - * - * @return the list of indices - */ - public String[] indices() { - return indices; - } - - /** - * Returns indices option flags - * - * @return indices options flags - */ - public IndicesOptions indicesOptions() { - return indicesOptions; - } - - /** - * Returns rename pattern - * - * @return rename pattern - */ - public String renamePattern() { - return renamePattern; - } - - /** - * Returns replacement pattern - * - * @return replacement pattern - */ - public String renameReplacement() { - return renameReplacement; - } - - /** - * Returns repository-specific restore settings - * - * @return restore settings - */ - public Settings settings() { - return settings; - } - - /** - * Returns true if global state should be restore during this restore operation - * - * @return restore global state flag - */ - public boolean includeGlobalState() { - return includeGlobalState; - } - - /** - * Returns true if incomplete indices will be restored - * - * @return partial indices restore flag - */ - public boolean partial() { - return partial; - } - - /** - * Returns true if aliases should be restore during this restore operation - * - * @return restore aliases state flag - */ - public boolean includeAliases() { - return includeAliases; - } - - /** - * Returns index settings that should be changed on restore - * - * @return restore aliases state flag - */ - public Settings indexSettings() { - return indexSettings; - } - - /** - * Returns index settings that that shouldn't be restored - * - * @return restore aliases state flag - */ - public String[] ignoreIndexSettings() { - return ignoreIndexSettings; - } - - - /** - * Return master node timeout - * - * @return master node timeout - */ - public TimeValue masterNodeTimeout() { - return masterNodeTimeout; - } - - } } diff --git a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/CcrRepositoryIT.java b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/CcrRepositoryIT.java index 36e1027dc5f87..825520d2f1541 100644 --- a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/CcrRepositoryIT.java +++ b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/CcrRepositoryIT.java @@ -8,6 +8,7 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.admin.cluster.settings.ClusterUpdateSettingsRequest; +import org.elasticsearch.action.admin.cluster.snapshots.restore.RestoreSnapshotRequest; import org.elasticsearch.action.admin.cluster.state.ClusterStateRequest; import org.elasticsearch.action.admin.cluster.state.ClusterStateResponse; import org.elasticsearch.action.get.GetResponse; @@ -115,11 +116,10 @@ public void testThatRepositoryRecoversEmptyIndexBasedOnLeaderSettings() throws I Settings.Builder settingsBuilder = Settings.builder() .put(IndexMetaData.SETTING_INDEX_PROVIDED_NAME, followerIndex) .put(CcrSettings.CCR_FOLLOWING_INDEX_SETTING.getKey(), true); - RestoreService.RestoreRequest restoreRequest = new RestoreService.RestoreRequest(leaderClusterRepoName, - CcrRepository.LATEST, new String[]{leaderIndex}, indicesOptions, - "^(.*)$", followerIndex, Settings.EMPTY, new TimeValue(1, TimeUnit.HOURS), false, - false, true, settingsBuilder.build(), new String[0], - "restore_snapshot[" + leaderClusterRepoName + ":" + leaderIndex + "]"); + RestoreSnapshotRequest restoreRequest = new RestoreSnapshotRequest(leaderClusterRepoName, CcrRepository.LATEST) + .indices(leaderIndex).indicesOptions(indicesOptions).renamePattern("^(.*)$") + .renameReplacement(followerIndex).masterNodeTimeout(new TimeValue(1L, TimeUnit.HOURS)) + .indexSettings(settingsBuilder); PlainActionFuture future = PlainActionFuture.newFuture(); restoreService.restoreSnapshot(restoreRequest, waitForRestore(clusterService, future)); @@ -215,11 +215,10 @@ public void testDocsAreRecovered() throws Exception { Settings.Builder settingsBuilder = Settings.builder() .put(IndexMetaData.SETTING_INDEX_PROVIDED_NAME, followerIndex) .put(CcrSettings.CCR_FOLLOWING_INDEX_SETTING.getKey(), true); - RestoreService.RestoreRequest restoreRequest = new RestoreService.RestoreRequest(leaderClusterRepoName, - CcrRepository.LATEST, new String[]{leaderIndex}, indicesOptions, - "^(.*)$", followerIndex, Settings.EMPTY, new TimeValue(1, TimeUnit.HOURS), false, - false, true, settingsBuilder.build(), new String[0], - "restore_snapshot[" + leaderClusterRepoName + ":" + leaderIndex + "]"); + RestoreSnapshotRequest restoreRequest = new RestoreSnapshotRequest(leaderClusterRepoName, CcrRepository.LATEST) + .indices(leaderIndex).indicesOptions(indicesOptions).renamePattern("^(.*)$") + .renameReplacement(followerIndex).masterNodeTimeout(new TimeValue(1L, TimeUnit.HOURS)) + .indexSettings(settingsBuilder); PlainActionFuture future = PlainActionFuture.newFuture(); restoreService.restoreSnapshot(restoreRequest, waitForRestore(clusterService, future)); @@ -252,11 +251,10 @@ public void testFollowerMappingIsUpdated() throws IOException { Settings.Builder settingsBuilder = Settings.builder() .put(IndexMetaData.SETTING_INDEX_PROVIDED_NAME, followerIndex) .put(CcrSettings.CCR_FOLLOWING_INDEX_SETTING.getKey(), true); - RestoreService.RestoreRequest restoreRequest = new RestoreService.RestoreRequest(leaderClusterRepoName, - CcrRepository.LATEST, new String[]{leaderIndex}, indicesOptions, - "^(.*)$", followerIndex, Settings.EMPTY, new TimeValue(1, TimeUnit.HOURS), false, - false, true, settingsBuilder.build(), new String[0], - "restore_snapshot[" + leaderClusterRepoName + ":" + leaderIndex + "]"); + RestoreSnapshotRequest restoreRequest = new RestoreSnapshotRequest(leaderClusterRepoName, CcrRepository.LATEST) + .indices(leaderIndex).indicesOptions(indicesOptions).renamePattern("^(.*)$") + .renameReplacement(followerIndex).masterNodeTimeout(new TimeValue(1L, TimeUnit.HOURS)) + .indexSettings(settingsBuilder); // TODO: Eventually when the file recovery work is complete, we should test updated mappings by // indexing to the leader while the recovery is happening. However, into order to that test mappings From b6f06a48c0697eba6ccb413806be2658128f2bd1 Mon Sep 17 00:00:00 2001 From: Tim Brooks Date: Thu, 17 Jan 2019 14:58:46 -0700 Subject: [PATCH 36/71] Implement follower rate limiting for file restore (#37449) This is related to #35975. This commit implements rate limiting on the follower side using a new class `CombinedRateLimiter`. --- .../common/util/CombinedRateLimiter.java | 59 +++++++++++++++++++ .../java/org/elasticsearch/xpack/ccr/Ccr.java | 6 +- .../elasticsearch/xpack/ccr/CcrSettings.java | 34 +++++++++-- .../xpack/ccr/repository/CcrRepository.java | 54 ++++++++++++----- .../xpack/ccr/CcrRepositoryIT.java | 57 ++++++++++++++++++ 5 files changed, 189 insertions(+), 21 deletions(-) create mode 100644 server/src/main/java/org/elasticsearch/common/util/CombinedRateLimiter.java diff --git a/server/src/main/java/org/elasticsearch/common/util/CombinedRateLimiter.java b/server/src/main/java/org/elasticsearch/common/util/CombinedRateLimiter.java new file mode 100644 index 0000000000000..23324cbe00b04 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/common/util/CombinedRateLimiter.java @@ -0,0 +1,59 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.common.util; + +import org.apache.lucene.store.RateLimiter; +import org.elasticsearch.common.unit.ByteSizeValue; + +import java.util.concurrent.atomic.AtomicLong; + +/** + * A rate limiter designed for multiple concurrent users. + */ +public class CombinedRateLimiter { + + // TODO: This rate limiter has some concurrency issues between the two maybePause operations + + private final AtomicLong bytesSinceLastPause = new AtomicLong(); + private final RateLimiter.SimpleRateLimiter rateLimiter; + private volatile boolean rateLimit; + + public CombinedRateLimiter(ByteSizeValue maxBytesPerSec) { + rateLimit = maxBytesPerSec.getBytes() > 0; + rateLimiter = new RateLimiter.SimpleRateLimiter(maxBytesPerSec.getMbFrac()); + } + + public long maybePause(int bytes) { + if (rateLimit) { + long bytesSincePause = bytesSinceLastPause.addAndGet(bytes); + if (bytesSincePause > rateLimiter.getMinPauseCheckBytes()) { + // Time to pause + bytesSinceLastPause.addAndGet(-bytesSincePause); + return Math.max(rateLimiter.pause(bytesSincePause), 0); + } + } + return 0; + } + + public void setMBPerSec(ByteSizeValue maxBytesPerSec) { + rateLimit = maxBytesPerSec.getBytes() > 0; + rateLimiter.setMBPerSec(maxBytesPerSec.getMbFrac()); + } +} diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/Ccr.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/Ccr.java index 8bbacac3d8054..4a7f9600ffa42 100644 --- a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/Ccr.java +++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/Ccr.java @@ -117,6 +117,7 @@ public class Ccr extends Plugin implements ActionPlugin, PersistentTaskPlugin, E private final Settings settings; private final CcrLicenseChecker ccrLicenseChecker; private final SetOnce restoreSourceService = new SetOnce<>(); + private final SetOnce ccrSettings = new SetOnce<>(); private Client client; /** @@ -159,6 +160,8 @@ public Collection createComponents( CcrRestoreSourceService restoreSourceService = new CcrRestoreSourceService(); this.restoreSourceService.set(restoreSourceService); + CcrSettings ccrSettings = new CcrSettings(settings, clusterService.getClusterSettings()); + this.ccrSettings.set(ccrSettings); return Arrays.asList( ccrLicenseChecker, restoreSourceService, @@ -291,7 +294,8 @@ public List> getExecutorBuilders(Settings settings) { @Override public Map getInternalRepositories(Environment env, NamedXContentRegistry namedXContentRegistry) { - Repository.Factory repositoryFactory = (metadata) -> new CcrRepository(metadata, client, ccrLicenseChecker, settings); + Repository.Factory repositoryFactory = + (metadata) -> new CcrRepository(metadata, client, ccrLicenseChecker, settings, ccrSettings.get()); return Collections.singletonMap(CcrRepository.TYPE, repositoryFactory); } diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/CcrSettings.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/CcrSettings.java index d7495dec8c2cf..fe0eb7853e3ce 100644 --- a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/CcrSettings.java +++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/CcrSettings.java @@ -5,9 +5,14 @@ */ package org.elasticsearch.xpack.ccr; +import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Setting.Property; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.unit.ByteSizeUnit; +import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.common.util.CombinedRateLimiter; import org.elasticsearch.xpack.core.XPackSettings; import java.util.Arrays; @@ -18,11 +23,6 @@ */ public final class CcrSettings { - // prevent construction - private CcrSettings() { - - } - /** * Index setting for a following index. */ @@ -35,6 +35,14 @@ private CcrSettings() { public static final Setting CCR_AUTO_FOLLOW_WAIT_FOR_METADATA_TIMEOUT = Setting.timeSetting( "ccr.auto_follow.wait_for_metadata_timeout", TimeValue.timeValueSeconds(60), Property.NodeScope, Property.Dynamic); + + /** + * Max bytes a node can recover per second. + */ + public static final Setting RECOVERY_MAX_BYTES_PER_SECOND = + Setting.byteSizeSetting("ccr.indices.recovery.max_bytes_per_sec", new ByteSizeValue(40, ByteSizeUnit.MB), + Setting.Property.Dynamic, Setting.Property.NodeScope); + /** * The settings defined by CCR. * @@ -44,7 +52,23 @@ static List> getSettings() { return Arrays.asList( XPackSettings.CCR_ENABLED_SETTING, CCR_FOLLOWING_INDEX_SETTING, + RECOVERY_MAX_BYTES_PER_SECOND, CCR_AUTO_FOLLOW_WAIT_FOR_METADATA_TIMEOUT); } + private final CombinedRateLimiter ccrRateLimiter; + + public CcrSettings(Settings settings, ClusterSettings clusterSettings) { + this.ccrRateLimiter = new CombinedRateLimiter(RECOVERY_MAX_BYTES_PER_SECOND.get(settings)); + clusterSettings.addSettingsUpdateConsumer(RECOVERY_MAX_BYTES_PER_SECOND, this::setMaxBytesPerSec); + } + + private void setMaxBytesPerSec(ByteSizeValue maxBytesPerSec) { + ccrRateLimiter.setMBPerSec(maxBytesPerSec); + } + + public CombinedRateLimiter getRateLimiter() { + return ccrRateLimiter; + } + } diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/repository/CcrRepository.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/repository/CcrRepository.java index 5c3e0edda6177..33a8c64c96138 100644 --- a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/repository/CcrRepository.java +++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/repository/CcrRepository.java @@ -24,8 +24,10 @@ import org.elasticsearch.common.collect.ImmutableOpenMap; import org.elasticsearch.common.component.AbstractLifecycleComponent; import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.metrics.CounterMetric; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.ByteSizeValue; +import org.elasticsearch.common.util.CombinedRateLimiter; import org.elasticsearch.index.Index; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.engine.EngineException; @@ -49,6 +51,7 @@ import org.elasticsearch.snapshots.SnapshotState; import org.elasticsearch.xpack.ccr.Ccr; import org.elasticsearch.xpack.ccr.CcrLicenseChecker; +import org.elasticsearch.xpack.ccr.CcrSettings; import org.elasticsearch.xpack.ccr.action.CcrRequests; import org.elasticsearch.xpack.ccr.action.repositories.ClearCcrRestoreSessionAction; import org.elasticsearch.xpack.ccr.action.repositories.ClearCcrRestoreSessionRequest; @@ -66,6 +69,7 @@ import java.util.List; import java.util.Map; import java.util.Set; +import java.util.function.LongConsumer; /** * This repository relies on a remote cluster for Ccr restores. It is read-only so it can only be used to @@ -79,12 +83,17 @@ public class CcrRepository extends AbstractLifecycleComponent implements Reposit private static final SnapshotId SNAPSHOT_ID = new SnapshotId(LATEST, LATEST); private final RepositoryMetaData metadata; + private final CcrSettings ccrSettings; private final String remoteClusterAlias; private final Client client; private final CcrLicenseChecker ccrLicenseChecker; - public CcrRepository(RepositoryMetaData metadata, Client client, CcrLicenseChecker ccrLicenseChecker, Settings settings) { + private final CounterMetric throttledTime = new CounterMetric(); + + public CcrRepository(RepositoryMetaData metadata, Client client, CcrLicenseChecker ccrLicenseChecker, Settings settings, + CcrSettings ccrSettings) { this.metadata = metadata; + this.ccrSettings = ccrSettings; assert metadata.name().startsWith(NAME_PREFIX) : "CcrRepository metadata.name() must start with: " + NAME_PREFIX; this.remoteClusterAlias = Strings.split(metadata.name(), NAME_PREFIX)[1]; this.ccrLicenseChecker = ccrLicenseChecker; @@ -206,7 +215,7 @@ public long getSnapshotThrottleTimeInNanos() { @Override public long getRestoreThrottleTimeInNanos() { - return 0; + return throttledTime.count(); } @Override @@ -257,7 +266,7 @@ public void restoreShard(IndexShard indexShard, SnapshotId snapshotId, Version v // TODO: There should be some local timeout. And if the remote cluster returns an unknown session // response, we should be able to retry by creating a new session. String name = metadata.name(); - try (RestoreSession restoreSession = RestoreSession.openSession(name, remoteClient, leaderShardId, indexShard, recoveryState)) { + try (RestoreSession restoreSession = openSession(name, remoteClient, leaderShardId, indexShard, recoveryState)) { restoreSession.restoreFiles(); } catch (Exception e) { throw new IndexShardRestoreFailedException(indexShard.shardId(), "failed to restore snapshot [" + snapshotId + "]", e); @@ -285,6 +294,15 @@ private void maybeUpdateMappings(Client localClient, Client remoteClient, Index } } + private RestoreSession openSession(String repositoryName, Client remoteClient, ShardId leaderShardId, IndexShard indexShard, + RecoveryState recoveryState) { + String sessionUUID = UUIDs.randomBase64UUID(); + PutCcrRestoreSessionAction.PutCcrRestoreSessionResponse response = remoteClient.execute(PutCcrRestoreSessionAction.INSTANCE, + new PutCcrRestoreSessionRequest(sessionUUID, leaderShardId)).actionGet(); + return new RestoreSession(repositoryName, remoteClient, sessionUUID, response.getNode(), indexShard, recoveryState, + response.getStoreFileMetaData(), ccrSettings.getRateLimiter(), throttledTime::inc); + } + private static class RestoreSession extends FileRestoreContext implements Closeable { private static final int BUFFER_SIZE = 1 << 16; @@ -293,23 +311,19 @@ private static class RestoreSession extends FileRestoreContext implements Closea private final String sessionUUID; private final DiscoveryNode node; private final Store.MetadataSnapshot sourceMetaData; + private final CombinedRateLimiter rateLimiter; + private final LongConsumer throttleListener; RestoreSession(String repositoryName, Client remoteClient, String sessionUUID, DiscoveryNode node, IndexShard indexShard, - RecoveryState recoveryState, Store.MetadataSnapshot sourceMetaData) { + RecoveryState recoveryState, Store.MetadataSnapshot sourceMetaData, CombinedRateLimiter rateLimiter, + LongConsumer throttleListener) { super(repositoryName, indexShard, SNAPSHOT_ID, recoveryState, BUFFER_SIZE); this.remoteClient = remoteClient; this.sessionUUID = sessionUUID; this.node = node; this.sourceMetaData = sourceMetaData; - } - - static RestoreSession openSession(String repositoryName, Client remoteClient, ShardId leaderShardId, IndexShard indexShard, - RecoveryState recoveryState) { - String sessionUUID = UUIDs.randomBase64UUID(); - PutCcrRestoreSessionAction.PutCcrRestoreSessionResponse response = remoteClient.execute(PutCcrRestoreSessionAction.INSTANCE, - new PutCcrRestoreSessionRequest(sessionUUID, leaderShardId)).actionGet(); - return new RestoreSession(repositoryName, remoteClient, sessionUUID, response.getNode(), indexShard, recoveryState, - response.getStoreFileMetaData()); + this.rateLimiter = rateLimiter; + this.throttleListener = throttleListener; } void restoreFiles() throws IOException { @@ -324,7 +338,7 @@ void restoreFiles() throws IOException { @Override protected InputStream fileInputStream(BlobStoreIndexShardSnapshot.FileInfo fileInfo) { - return new RestoreFileInputStream(remoteClient, sessionUUID, node, fileInfo.metadata()); + return new RestoreFileInputStream(remoteClient, sessionUUID, node, fileInfo.metadata(), rateLimiter, throttleListener); } @Override @@ -341,14 +355,19 @@ private static class RestoreFileInputStream extends InputStream { private final String sessionUUID; private final DiscoveryNode node; private final StoreFileMetaData fileToRecover; + private final CombinedRateLimiter rateLimiter; + private final LongConsumer throttleListener; private long pos = 0; - private RestoreFileInputStream(Client remoteClient, String sessionUUID, DiscoveryNode node, StoreFileMetaData fileToRecover) { + private RestoreFileInputStream(Client remoteClient, String sessionUUID, DiscoveryNode node, StoreFileMetaData fileToRecover, + CombinedRateLimiter rateLimiter, LongConsumer throttleListener) { this.remoteClient = remoteClient; this.sessionUUID = sessionUUID; this.node = node; this.fileToRecover = fileToRecover; + this.rateLimiter = rateLimiter; + this.throttleListener = throttleListener; } @@ -365,6 +384,10 @@ public int read(byte[] bytes, int off, int len) throws IOException { } int bytesRequested = (int) Math.min(remainingBytes, len); + + long nanosPaused = rateLimiter.maybePause(bytesRequested); + throttleListener.accept(nanosPaused); + String fileName = fileToRecover.name(); GetCcrRestoreFileChunkRequest request = new GetCcrRestoreFileChunkRequest(node, sessionUUID, fileName, bytesRequested); GetCcrRestoreFileChunkAction.GetCcrRestoreFileChunkResponse response = @@ -388,5 +411,6 @@ public int read(byte[] bytes, int off, int len) throws IOException { return bytesReceived; } + } } diff --git a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/CcrRepositoryIT.java b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/CcrRepositoryIT.java index 825520d2f1541..a635487084b71 100644 --- a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/CcrRepositoryIT.java +++ b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/CcrRepositoryIT.java @@ -22,6 +22,7 @@ import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.collect.ImmutableOpenMap; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.common.xcontent.support.XContentMapValues; @@ -39,6 +40,8 @@ import org.elasticsearch.xpack.ccr.repository.CcrRestoreSourceService; import java.io.IOException; +import java.util.ArrayList; +import java.util.List; import java.util.Locale; import java.util.Map; import java.util.concurrent.TimeUnit; @@ -234,6 +237,60 @@ public void testDocsAreRecovered() throws Exception { thread.join(); } + public void testRateLimitingIsEmployed() throws Exception { + ClusterUpdateSettingsRequest settingsRequest = new ClusterUpdateSettingsRequest(); + settingsRequest.persistentSettings(Settings.builder().put(CcrSettings.RECOVERY_MAX_BYTES_PER_SECOND.getKey(), "10K")); + assertAcked(followerClient().admin().cluster().updateSettings(settingsRequest).actionGet()); + + String leaderClusterRepoName = CcrRepository.NAME_PREFIX + "leader_cluster"; + String leaderIndex = "index1"; + String followerIndex = "index2"; + + final int numberOfPrimaryShards = randomIntBetween(1, 3); + final String leaderIndexSettings = getIndexSettings(numberOfPrimaryShards, between(0, 1), + singletonMap(IndexSettings.INDEX_SOFT_DELETES_SETTING.getKey(), "true")); + assertAcked(leaderClient().admin().indices().prepareCreate(leaderIndex).setSource(leaderIndexSettings, XContentType.JSON)); + ensureLeaderGreen(leaderIndex); + + final RestoreService restoreService = getFollowerCluster().getCurrentMasterNodeInstance(RestoreService.class); + final ClusterService clusterService = getFollowerCluster().getCurrentMasterNodeInstance(ClusterService.class); + + List repositories = new ArrayList<>(); + + for (RepositoriesService repositoriesService : getFollowerCluster().getDataOrMasterNodeInstances(RepositoriesService.class)) { + Repository repository = repositoriesService.repository(leaderClusterRepoName); + repositories.add((CcrRepository) repository); + } + + logger.info("--> indexing some data"); + for (int i = 0; i < 100; i++) { + final String source = String.format(Locale.ROOT, "{\"f\":%d}", i); + leaderClient().prepareIndex("index1", "doc", Integer.toString(i)).setSource(source, XContentType.JSON).get(); + } + + leaderClient().admin().indices().prepareFlush(leaderIndex).setForce(true).setWaitIfOngoing(true).get(); + + Settings.Builder settingsBuilder = Settings.builder() + .put(IndexMetaData.SETTING_INDEX_PROVIDED_NAME, followerIndex) + .put(CcrSettings.CCR_FOLLOWING_INDEX_SETTING.getKey(), true); + RestoreService.RestoreRequest restoreRequest = new RestoreService.RestoreRequest(leaderClusterRepoName, + CcrRepository.LATEST, new String[]{leaderIndex}, indicesOptions, + "^(.*)$", followerIndex, Settings.EMPTY, new TimeValue(1, TimeUnit.HOURS), false, + false, true, settingsBuilder.build(), new String[0], + "restore_snapshot[" + leaderClusterRepoName + ":" + leaderIndex + "]"); + + PlainActionFuture future = PlainActionFuture.newFuture(); + restoreService.restoreSnapshot(restoreRequest, waitForRestore(clusterService, future)); + future.actionGet(); + + assertTrue(repositories.stream().anyMatch(cr -> cr.getRestoreThrottleTimeInNanos() > 0)); + + settingsRequest = new ClusterUpdateSettingsRequest(); + ByteSizeValue defaultValue = CcrSettings.RECOVERY_MAX_BYTES_PER_SECOND.getDefault(Settings.EMPTY); + settingsRequest.persistentSettings(Settings.builder().put(CcrSettings.RECOVERY_MAX_BYTES_PER_SECOND.getKey(), defaultValue)); + assertAcked(followerClient().admin().cluster().updateSettings(settingsRequest).actionGet()); + } + public void testFollowerMappingIsUpdated() throws IOException { String leaderClusterRepoName = CcrRepository.NAME_PREFIX + "leader_cluster"; String leaderIndex = "index1"; From 978c818d0f27b4d0c2e9cbf46a0eccc461a91787 Mon Sep 17 00:00:00 2001 From: Tim Brooks Date: Thu, 17 Jan 2019 15:31:27 -0700 Subject: [PATCH 37/71] Use RestoreSnapshotRequest in CcrRepositoryIT Commit #37535 removed an internal restore request in favor of the RestoreSnapshotRequest. Commit #37449 added a new test that used the internal restore request. This commit modifies the new test to use the RestoreSnapshotRequest. --- .../org/elasticsearch/xpack/ccr/CcrRepositoryIT.java | 9 ++++----- 1 file changed, 4 insertions(+), 5 deletions(-) diff --git a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/CcrRepositoryIT.java b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/CcrRepositoryIT.java index a635487084b71..47cc1c528fa5c 100644 --- a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/CcrRepositoryIT.java +++ b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/CcrRepositoryIT.java @@ -273,11 +273,10 @@ public void testRateLimitingIsEmployed() throws Exception { Settings.Builder settingsBuilder = Settings.builder() .put(IndexMetaData.SETTING_INDEX_PROVIDED_NAME, followerIndex) .put(CcrSettings.CCR_FOLLOWING_INDEX_SETTING.getKey(), true); - RestoreService.RestoreRequest restoreRequest = new RestoreService.RestoreRequest(leaderClusterRepoName, - CcrRepository.LATEST, new String[]{leaderIndex}, indicesOptions, - "^(.*)$", followerIndex, Settings.EMPTY, new TimeValue(1, TimeUnit.HOURS), false, - false, true, settingsBuilder.build(), new String[0], - "restore_snapshot[" + leaderClusterRepoName + ":" + leaderIndex + "]"); + RestoreSnapshotRequest restoreRequest = new RestoreSnapshotRequest(leaderClusterRepoName, CcrRepository.LATEST) + .indices(leaderIndex).indicesOptions(indicesOptions).renamePattern("^(.*)$") + .renameReplacement(followerIndex).masterNodeTimeout(new TimeValue(1L, TimeUnit.HOURS)) + .indexSettings(settingsBuilder); PlainActionFuture future = PlainActionFuture.newFuture(); restoreService.restoreSnapshot(restoreRequest, waitForRestore(clusterService, future)); From 62ddc8c7762a363435a2f87f1df47b63b67450ca Mon Sep 17 00:00:00 2001 From: Armin Braun Date: Fri, 18 Jan 2019 08:36:22 +0100 Subject: [PATCH 38/71] Reenable UnicastZenPingTests#testSimplePings * This was muted needlessly, the problem in #26701 only applies to `6.x` * Relates #26701 --- .../org/elasticsearch/discovery/zen/UnicastZenPingTests.java | 1 - 1 file changed, 1 deletion(-) diff --git a/server/src/test/java/org/elasticsearch/discovery/zen/UnicastZenPingTests.java b/server/src/test/java/org/elasticsearch/discovery/zen/UnicastZenPingTests.java index 3178663a1f684..f06ef3e72808a 100644 --- a/server/src/test/java/org/elasticsearch/discovery/zen/UnicastZenPingTests.java +++ b/server/src/test/java/org/elasticsearch/discovery/zen/UnicastZenPingTests.java @@ -135,7 +135,6 @@ public void tearDown() throws Exception { } } - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/26701") public void testSimplePings() throws IOException, InterruptedException, ExecutionException { // use ephemeral ports final Settings settings = Settings.builder().put("cluster.name", "test").put(TransportSettings.PORT.getKey(), 0).build(); From 080c07361829e8c852b08745cd3632b8923818cb Mon Sep 17 00:00:00 2001 From: Mike Place Date: Fri, 18 Jan 2019 00:28:48 -0800 Subject: [PATCH 39/71] Minor docs cleanup (#37595) Not all terminals are accessed via SSH. --- docs/reference/cat.asciidoc | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/reference/cat.asciidoc b/docs/reference/cat.asciidoc index 7a2262b7962bb..0da752aba42d2 100644 --- a/docs/reference/cat.asciidoc +++ b/docs/reference/cat.asciidoc @@ -9,8 +9,8 @@ JSON is great... for computers. Even if it's pretty-printed, trying to find relationships in the data is tedious. Human eyes, especially -when looking at an ssh terminal, need compact and aligned text. The -cat API aims to meet this need. +when looking at a terminal, need compact and aligned text. The cat API +aims to meet this need. All the cat commands accept a query string parameter `help` to see all the headers and info they provide, and the `/_cat` command alone lists all From 2f0e0b24265bd94b28235ab76df5e142fa3529ea Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Christoph=20B=C3=BCscher?= Date: Fri, 18 Jan 2019 09:33:36 +0100 Subject: [PATCH 40/71] Allow indices.get_mapping response parsing without types (#37492) This change adds deprecation warning to the indices.get_mapping API in case the "inlcude_type_name" parameter is set to "true" and changes the parsing code in GetMappingsResponse to parse the type-less response instead of the one containing types. As a consequence the HLRC client doesn't need to force "include_type_name=true" any more and the GetMappingsResponseTests can be adapted to the new format as well. Also removing some "include_type_name" parameters in yaml test and docs where not necessary. --- .../client/IndicesRequestConverters.java | 1 - .../elasticsearch/client/IndicesClientIT.java | 4 +- .../client/IndicesRequestConvertersTests.java | 1 - .../IndicesClientDocumentationIT.java | 2 - .../high-level/indices/get_mappings.asciidoc | 1 - .../api/indices.get_mapping.json | 2 +- .../test/indices.get_mapping/10_basic.yml | 2 - .../test/indices.get_mapping/60_empty.yml | 3 - .../mapping/get/GetMappingsResponse.java | 20 ++--- .../admin/indices/RestGetMappingAction.java | 6 ++ .../mapping/get/GetMappingsResponseTests.java | 80 +++++++++++++++---- .../indices/RestGetMappingActionTests.java | 26 ++++++ 12 files changed, 105 insertions(+), 43 deletions(-) diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/IndicesRequestConverters.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/IndicesRequestConverters.java index 79b0646770845..f3ce8d2a935ce 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/IndicesRequestConverters.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/IndicesRequestConverters.java @@ -150,7 +150,6 @@ static Request getMappings(GetMappingsRequest getMappingsRequest) throws IOExcep parameters.withMasterTimeout(getMappingsRequest.masterNodeTimeout()); parameters.withIndicesOptions(getMappingsRequest.indicesOptions()); parameters.withLocal(getMappingsRequest.local()); - parameters.putParam(INCLUDE_TYPE_NAME_PARAMETER, "true"); return request; } diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/IndicesClientIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/IndicesClientIT.java index f3a2fd2baaa3a..0f29950355472 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/IndicesClientIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/IndicesClientIT.java @@ -443,9 +443,7 @@ public void testGetMapping() throws IOException { Map getIndexResponse = getAsMap(indexName); assertEquals("text", XContentMapValues.extractValue(indexName + ".mappings.properties.field.type", getIndexResponse)); - GetMappingsRequest request = new GetMappingsRequest() - .indices(indexName) - .types("_doc"); + GetMappingsRequest request = new GetMappingsRequest().indices(indexName); GetMappingsResponse getMappingsResponse = execute(request, highLevelClient().indices()::getMapping, highLevelClient().indices()::getMappingAsync); diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/IndicesRequestConvertersTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/IndicesRequestConvertersTests.java index 663c40b17a8b2..308c576edafe6 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/IndicesRequestConvertersTests.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/IndicesRequestConvertersTests.java @@ -217,7 +217,6 @@ public void testGetMapping() throws IOException { getMappingRequest::indicesOptions, expectedParams); RequestConvertersTests.setRandomMasterTimeout(getMappingRequest, expectedParams); RequestConvertersTests.setRandomLocal(getMappingRequest, expectedParams); - expectedParams.put(INCLUDE_TYPE_NAME_PARAMETER, "true"); Request request = IndicesRequestConverters.getMappings(getMappingRequest); StringJoiner endpoint = new StringJoiner("/", "/", ""); diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/IndicesClientDocumentationIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/IndicesClientDocumentationIT.java index 8f9d8a069fd48..3d1b77562215b 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/IndicesClientDocumentationIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/IndicesClientDocumentationIT.java @@ -609,7 +609,6 @@ public void testGetMapping() throws IOException { // tag::get-mappings-request GetMappingsRequest request = new GetMappingsRequest(); // <1> request.indices("twitter"); // <2> - request.types("_doc"); // <3> // end::get-mappings-request // tag::get-mappings-request-masterTimeout @@ -665,7 +664,6 @@ public void testGetMappingAsync() throws Exception { { GetMappingsRequest request = new GetMappingsRequest(); request.indices("twitter"); - request.types("_doc"); // tag::get-mappings-execute-listener ActionListener listener = diff --git a/docs/java-rest/high-level/indices/get_mappings.asciidoc b/docs/java-rest/high-level/indices/get_mappings.asciidoc index c8616cdab9271..a42a8ac77b338 100644 --- a/docs/java-rest/high-level/indices/get_mappings.asciidoc +++ b/docs/java-rest/high-level/indices/get_mappings.asciidoc @@ -18,7 +18,6 @@ include-tagged::{doc-tests-file}[{api}-request] -------------------------------------------------- <1> An empty request that will return all indices and types <2> Setting the indices to fetch mapping for -<3> The types to be returned ==== Optional arguments The following arguments can also optionally be provided: diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.get_mapping.json b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.get_mapping.json index ccec2ddffdd0c..d9016ec402498 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.get_mapping.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.get_mapping.json @@ -18,7 +18,7 @@ "params": { "include_type_name": { "type" : "boolean", - "description" : "Whether to add the type name to the response" + "description" : "Whether to add the type name to the response (default: false)" }, "ignore_unavailable": { "type" : "boolean", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_mapping/10_basic.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_mapping/10_basic.yml index d9ea7d325e3a8..76519cc4c4c01 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_mapping/10_basic.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_mapping/10_basic.yml @@ -5,13 +5,11 @@ setup: reason: include_type_name defaults to true before 7.0 - do: indices.create: - include_type_name: false index: test_1 body: mappings: {} - do: indices.create: - include_type_name: false index: test_2 body: mappings: {} diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_mapping/60_empty.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_mapping/60_empty.yml index e2a502f30a84d..b5069295a1fa6 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_mapping/60_empty.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_mapping/60_empty.yml @@ -1,8 +1,5 @@ --- setup: - - skip: - version: " - 6.99.99" - reason: include_type_name defaults to true before 7.0 - do: indices.create: index: test_1 diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/mapping/get/GetMappingsResponse.java b/server/src/main/java/org/elasticsearch/action/admin/indices/mapping/get/GetMappingsResponse.java index 4c037bd1d6df9..50b7a36426802 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/mapping/get/GetMappingsResponse.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/mapping/get/GetMappingsResponse.java @@ -31,6 +31,7 @@ import org.elasticsearch.common.xcontent.ToXContentFragment; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.rest.BaseRestHandler; import java.io.IOException; @@ -101,22 +102,17 @@ public static GetMappingsResponse fromXContent(XContentParser parser) throws IOE for (Map.Entry entry : parts.entrySet()) { final String indexName = entry.getKey(); assert entry.getValue() instanceof Map : "expected a map as type mapping, but got: " + entry.getValue().getClass(); - @SuppressWarnings("unchecked") - final Map mapping = (Map) ((Map) entry.getValue()).get(MAPPINGS.getPreferredName()); - ImmutableOpenMap.Builder typeBuilder = new ImmutableOpenMap.Builder<>(); - for (Map.Entry typeEntry : mapping.entrySet()) { - final String typeName = typeEntry.getKey(); - assert typeEntry.getValue() instanceof Map : "expected a map as inner type mapping, but got: " + - typeEntry.getValue().getClass(); - @SuppressWarnings("unchecked") - final Map fieldMappings = (Map) typeEntry.getValue(); - MappingMetaData mmd = new MappingMetaData(typeName, fieldMappings); - typeBuilder.put(typeName, mmd); + @SuppressWarnings("unchecked") + final Map fieldMappings = (Map) ((Map) entry.getValue()) + .get(MAPPINGS.getPreferredName()); + if (fieldMappings.isEmpty() == false) { + assert fieldMappings instanceof Map : "expected a map as inner type mapping, but got: " + fieldMappings.getClass(); + MappingMetaData mmd = new MappingMetaData(MapperService.SINGLE_MAPPING_NAME, fieldMappings); + typeBuilder.put(MapperService.SINGLE_MAPPING_NAME, mmd); } builder.put(indexName, typeBuilder.build()); } - return new GetMappingsResponse(builder.build()); } diff --git a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestGetMappingAction.java b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestGetMappingAction.java index da7f2af501db2..8826932e252ba 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestGetMappingAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestGetMappingAction.java @@ -20,6 +20,7 @@ package org.elasticsearch.rest.action.admin.indices; import com.carrotsearch.hppc.cursors.ObjectCursor; + import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsRequest; @@ -59,6 +60,8 @@ public class RestGetMappingAction extends BaseRestHandler { private static final Logger logger = LogManager.getLogger(RestGetMappingAction.class); private static final DeprecationLogger deprecationLogger = new DeprecationLogger(logger); + static final String TYPES_DEPRECATION_MESSAGE = "[types removal] Using include_type_name in get mapping requests is deprecated. " + + "The parameter will be removed in the next major version."; public RestGetMappingAction(final Settings settings, final RestController controller) { super(settings); @@ -90,6 +93,9 @@ public RestChannelConsumer prepareRequest(final RestRequest request, final NodeC throw new IllegalArgumentException("Types cannot be provided in get mapping requests, unless" + " include_type_name is set to true."); } + if (request.hasParam(INCLUDE_TYPE_NAME_PARAMETER)) { + deprecationLogger.deprecatedAndMaybeLog("get_mapping_with_types", TYPES_DEPRECATION_MESSAGE); + } final GetMappingsRequest getMappingsRequest = new GetMappingsRequest(); getMappingsRequest.indices(indices).types(types); diff --git a/server/src/test/java/org/elasticsearch/action/admin/indices/mapping/get/GetMappingsResponseTests.java b/server/src/test/java/org/elasticsearch/action/admin/indices/mapping/get/GetMappingsResponseTests.java index 633d74acde174..7d1a19c65ed52 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/indices/mapping/get/GetMappingsResponseTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/indices/mapping/get/GetMappingsResponseTests.java @@ -24,8 +24,11 @@ import org.elasticsearch.cluster.metadata.MappingMetaData; import org.elasticsearch.common.collect.ImmutableOpenMap; import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.common.xcontent.ToXContent.Params; import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.index.mapper.MapperService; +import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.test.AbstractStreamableXContentTestCase; import org.elasticsearch.test.EqualsHashCodeTestUtils; @@ -38,7 +41,7 @@ import java.util.Map; import java.util.Objects; -import static org.elasticsearch.rest.BaseRestHandler.INCLUDE_TYPE_NAME_PARAMETER; +import static org.elasticsearch.test.AbstractXContentTestCase.xContentTester; public class GetMappingsResponseTests extends AbstractStreamableXContentTestCase { @@ -86,12 +89,6 @@ protected GetMappingsResponse mutateInstance(GetMappingsResponse instance) throw return mutate(instance); } - public static ImmutableOpenMap createMappingsForIndex() { - // rarely have no types - int typeCount = rarely() ? 0 : scaledRandomIntBetween(1, 3); - return createMappingsForIndex(typeCount, true); - } - public static ImmutableOpenMap createMappingsForIndex(int typeCount, boolean randomTypeName) { List typeMappings = new ArrayList<>(typeCount); @@ -122,22 +119,18 @@ public static ImmutableOpenMap createMappingsForIndex(i @Override protected GetMappingsResponse createTestInstance() { + return createTestInstance(true); + } + + private GetMappingsResponse createTestInstance(boolean randomTypeNames) { ImmutableOpenMap.Builder> indexBuilder = ImmutableOpenMap.builder(); - indexBuilder.put("index-" + randomAlphaOfLength(5), createMappingsForIndex()); + int typeCount = rarely() ? 0 : 1; + indexBuilder.put("index-" + randomAlphaOfLength(5), createMappingsForIndex(typeCount, randomTypeNames)); GetMappingsResponse resp = new GetMappingsResponse(indexBuilder.build()); logger.debug("--> created: {}", resp); return resp; } - /** - * For now, we only unit test the legacy typed responses. This will soon no longer be the - * case, as we introduce support for typeless xContent parsing in {@link GetMappingsResponse}. - */ - @Override - protected ToXContent.Params getToXContentParams() { - return new ToXContent.MapParams(Collections.singletonMap(INCLUDE_TYPE_NAME_PARAMETER, "true")); - } - // Not meant to be exhaustive private static Map randomFieldMapping() { Map mappings = new HashMap<>(); @@ -170,4 +163,57 @@ private static Map randomFieldMapping() { } return mappings; } + + @Override + protected GetMappingsResponse createXContextTestInstance(XContentType xContentType) { + // don't use random type names for XContent roundtrip tests because we cannot parse them back anymore + return createTestInstance(false); + } + + /** + * check that the "old" legacy response format with types works as expected + */ + public void testToXContentWithTypes() throws IOException { + Params params = new ToXContent.MapParams(Collections.singletonMap(BaseRestHandler.INCLUDE_TYPE_NAME_PARAMETER, "true")); + xContentTester(this::createParser, t -> createTestInstance(), params, this::fromXContentWithTypes) + .numberOfTestRuns(NUMBER_OF_TEST_RUNS) + .supportsUnknownFields(supportsUnknownFields()) + .shuffleFieldsExceptions(getShuffleFieldsExceptions()) + .randomFieldsExcludeFilter(getRandomFieldsExcludeFilter()) + .assertEqualsConsumer(this::assertEqualInstances) + .assertToXContentEquivalence(true) + .test(); + } + + /** + * including the pre-7.0 parsing code here to test that older HLRC clients using this can parse the responses that are + * returned when "include_type_name=true" + */ + private GetMappingsResponse fromXContentWithTypes(XContentParser parser) throws IOException { + if (parser.currentToken() == null) { + parser.nextToken(); + } + assert parser.currentToken() == XContentParser.Token.START_OBJECT; + Map parts = parser.map(); + + ImmutableOpenMap.Builder> builder = new ImmutableOpenMap.Builder<>(); + for (Map.Entry entry : parts.entrySet()) { + final String indexName = entry.getKey(); + assert entry.getValue() instanceof Map : "expected a map as type mapping, but got: " + entry.getValue().getClass(); + final Map mapping = (Map) ((Map) entry.getValue()).get("mappings"); + + ImmutableOpenMap.Builder typeBuilder = new ImmutableOpenMap.Builder<>(); + for (Map.Entry typeEntry : mapping.entrySet()) { + final String typeName = typeEntry.getKey(); + assert typeEntry.getValue() instanceof Map : "expected a map as inner type mapping, but got: " + + typeEntry.getValue().getClass(); + final Map fieldMappings = (Map) typeEntry.getValue(); + MappingMetaData mmd = new MappingMetaData(typeName, fieldMappings); + typeBuilder.put(typeName, mmd); + } + builder.put(indexName, typeBuilder.build()); + } + + return new GetMappingsResponse(builder.build()); + } } diff --git a/server/src/test/java/org/elasticsearch/rest/action/admin/indices/RestGetMappingActionTests.java b/server/src/test/java/org/elasticsearch/rest/action/admin/indices/RestGetMappingActionTests.java index 8eea9dc34c2fb..7ce32e371de6e 100644 --- a/server/src/test/java/org/elasticsearch/rest/action/admin/indices/RestGetMappingActionTests.java +++ b/server/src/test/java/org/elasticsearch/rest/action/admin/indices/RestGetMappingActionTests.java @@ -28,6 +28,7 @@ import org.elasticsearch.test.rest.FakeRestChannel; import org.elasticsearch.test.rest.FakeRestRequest; import org.elasticsearch.test.rest.RestActionTestCase; +import org.junit.Before; import java.util.HashMap; import java.util.Map; @@ -37,6 +38,11 @@ public class RestGetMappingActionTests extends RestActionTestCase { + @Before + public void setUpAction() { + new RestGetMappingAction(Settings.EMPTY, controller()); + } + public void testTypeExistsDeprecation() throws Exception { Map params = new HashMap<>(); params.put("type", "_doc"); @@ -69,4 +75,24 @@ public void testTypeInPath() { assertEquals(1, channel.errors().get()); assertEquals(RestStatus.BAD_REQUEST, channel.capturedResponse().status()); } + + /** + * Setting "include_type_name" to true or false should cause a deprecation warning starting in 7.0 + */ + public void testTypeUrlParameterDeprecation() throws Exception { + Map params = new HashMap<>(); + params.put(INCLUDE_TYPE_NAME_PARAMETER, Boolean.toString(randomBoolean())); + RestRequest request = new FakeRestRequest.Builder(xContentRegistry()) + .withMethod(RestRequest.Method.GET) + .withParams(params) + .withPath("/some_index/_mappings") + .build(); + + FakeRestChannel channel = new FakeRestChannel(request, false, 1); + ThreadContext threadContext = new ThreadContext(Settings.EMPTY); + controller().dispatchRequest(request, channel, threadContext); + + assertWarnings(RestGetMappingAction.TYPES_DEPRECATION_MESSAGE); + } + } From 25aac4f77fd00f9a181aece5151f7614e0a0563d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Christoph=20B=C3=BCscher?= Date: Fri, 18 Jan 2019 09:34:11 +0100 Subject: [PATCH 41/71] Remove `include_type_name` in asciidoc where possible (#37568) The "include_type_name" parameter was temporarily introduced in #37285 to facilitate moving the default parameter setting to "false" in many places in the documentation code snippets. Most of the places can simply be reverted without causing errors. In this change I looked for asciidoc files that contained the "include_type_name=true" addition when creating new indices but didn't look likey they made use of the "_doc" type for mappings. This is mostly the case e.g. in the analysis docs where index creating often only contains settings. I manually corrected the use of types in some places where the docs still used an explicit type name and not the dummy "_doc" type. --- .../painless-context-examples.asciidoc | 26 ++++--- docs/plugins/analysis-kuromoji.asciidoc | 14 ++-- docs/plugins/analysis-nori.asciidoc | 8 +-- docs/plugins/analysis-phonetic.asciidoc | 2 +- docs/plugins/store-smb.asciidoc | 2 +- .../bucket/nested-aggregation.asciidoc | 8 +-- .../reverse-nested-aggregation.asciidoc | 26 ++++--- .../significantterms-aggregation.asciidoc | 18 +++-- .../bucket/terms-aggregation.asciidoc | 18 +++-- .../analyzers/custom-analyzer.asciidoc | 4 +- .../analyzers/fingerprint-analyzer.asciidoc | 4 +- .../analyzers/keyword-analyzer.asciidoc | 2 +- .../analysis/analyzers/lang-analyzer.asciidoc | 68 +++++++++---------- .../analyzers/pattern-analyzer.asciidoc | 6 +- .../analyzers/simple-analyzer.asciidoc | 2 +- .../analyzers/standard-analyzer.asciidoc | 4 +- .../analysis/analyzers/stop-analyzer.asciidoc | 4 +- .../analyzers/whitespace-analyzer.asciidoc | 2 +- .../charfilters/htmlstrip-charfilter.asciidoc | 2 +- .../charfilters/mapping-charfilter.asciidoc | 4 +- .../asciifolding-tokenfilter.asciidoc | 4 +- .../cjk-bigram-tokenfilter.asciidoc | 2 +- .../common-grams-tokenfilter.asciidoc | 4 +- .../compound-word-tokenfilter.asciidoc | 2 +- .../condition-tokenfilter.asciidoc | 4 +- .../tokenfilters/elision-tokenfilter.asciidoc | 2 +- .../hunspell-tokenfilter.asciidoc | 2 +- .../keep-types-tokenfilter.asciidoc | 6 +- .../keep-words-tokenfilter.asciidoc | 2 +- .../keyword-marker-tokenfilter.asciidoc | 2 +- .../keyword-repeat-tokenfilter.asciidoc | 2 +- .../limit-token-count-tokenfilter.asciidoc | 2 +- .../lowercase-tokenfilter.asciidoc | 2 +- .../multiplexer-tokenfilter.asciidoc | 4 +- .../pattern-capture-tokenfilter.asciidoc | 4 +- .../predicate-tokenfilter.asciidoc | 4 +- .../snowball-tokenfilter.asciidoc | 2 +- .../stemmer-override-tokenfilter.asciidoc | 4 +- .../tokenfilters/stemmer-tokenfilter.asciidoc | 2 +- .../tokenfilters/stop-tokenfilter.asciidoc | 4 +- .../synonym-graph-tokenfilter.asciidoc | 8 +-- .../tokenfilters/synonym-tokenfilter.asciidoc | 10 +-- .../tokenizers/classic-tokenizer.asciidoc | 2 +- .../tokenizers/ngram-tokenizer.asciidoc | 2 +- .../pathhierarchy-tokenizer.asciidoc | 2 +- .../tokenizers/pattern-tokenizer.asciidoc | 4 +- .../simplepattern-tokenizer.asciidoc | 2 +- .../simplepatternsplit-tokenizer.asciidoc | 2 +- .../tokenizers/standard-tokenizer.asciidoc | 2 +- .../tokenizers/uaxurlemail-tokenizer.asciidoc | 2 +- docs/reference/cat/alias.asciidoc | 2 +- docs/reference/ccr/getting-started.asciidoc | 46 ++++++------- docs/reference/ilm/apis/explain.asciidoc | 2 +- docs/reference/ilm/apis/move-to-step.asciidoc | 2 +- .../apis/remove-policy-from-index.asciidoc | 2 +- docs/reference/ilm/error-handling.asciidoc | 2 +- .../ilm/getting-started-ilm.asciidoc | 2 +- .../reference/ilm/policy-definitions.asciidoc | 2 +- .../ilm/set-up-lifecycle-policy.asciidoc | 4 +- docs/reference/ilm/start-stop-ilm.asciidoc | 2 +- .../ilm/update-lifecycle-policy.asciidoc | 4 +- .../allocation/prioritization.asciidoc | 4 +- docs/reference/index-modules/store.asciidoc | 4 +- docs/reference/indices/analyze.asciidoc | 4 +- docs/reference/indices/recovery.asciidoc | 2 +- .../reference/indices/rollover-index.asciidoc | 6 +- docs/reference/indices/split-index.asciidoc | 2 +- .../mapping/types/geo-shape.asciidoc | 52 +++++++------- .../modules/indices/request_cache.asciidoc | 2 +- docs/reference/query-dsl/mlt-query.asciidoc | 34 +++++----- .../search/request/highlighting.asciidoc | 24 +++---- 71 files changed, 252 insertions(+), 272 deletions(-) diff --git a/docs/painless/painless-contexts/painless-context-examples.asciidoc b/docs/painless/painless-contexts/painless-context-examples.asciidoc index 79fe9056aba95..8a0691459960f 100644 --- a/docs/painless/painless-contexts/painless-context-examples.asciidoc +++ b/docs/painless/painless-contexts/painless-context-examples.asciidoc @@ -43,22 +43,20 @@ the request URL. + [source,js] ---- -PUT /seats?include_type_name=true +PUT /seats { "mappings": { - "seat": { - "properties": { - "theatre": { "type": "keyword" }, - "play": { "type": "text" }, - "actors": { "type": "text" }, - "row": { "type": "integer" }, - "number": { "type": "integer" }, - "cost": { "type": "double" }, - "sold": { "type": "boolean" }, - "datetime": { "type": "date" }, - "date": { "type": "keyword" }, - "time": { "type": "keyword" } - } + "properties": { + "theatre": { "type": "keyword" }, + "play": { "type": "text" }, + "actors": { "type": "text" }, + "row": { "type": "integer" }, + "number": { "type": "integer" }, + "cost": { "type": "double" }, + "sold": { "type": "boolean" }, + "datetime": { "type": "date" }, + "date": { "type": "keyword" }, + "time": { "type": "keyword" } } } } diff --git a/docs/plugins/analysis-kuromoji.asciidoc b/docs/plugins/analysis-kuromoji.asciidoc index fe7ed1cc317b9..383df5afb485b 100644 --- a/docs/plugins/analysis-kuromoji.asciidoc +++ b/docs/plugins/analysis-kuromoji.asciidoc @@ -124,7 +124,7 @@ Then create an analyzer as follows: [source,js] -------------------------------------------------- -PUT kuromoji_sample?include_type_name=true +PUT kuromoji_sample { "settings": { "index": { @@ -186,7 +186,7 @@ BaseFormAttribute. This acts as a lemmatizer for verbs and adjectives. Example: [source,js] -------------------------------------------------- -PUT kuromoji_sample?include_type_name=true +PUT kuromoji_sample { "settings": { "index": { @@ -243,7 +243,7 @@ For example: [source,js] -------------------------------------------------- -PUT kuromoji_sample?include_type_name=true +PUT kuromoji_sample { "settings": { "index": { @@ -317,7 +317,7 @@ katakana reading form: [source,js] -------------------------------------------------- -PUT kuromoji_sample?include_type_name=true +PUT kuromoji_sample { "settings": { "index":{ @@ -381,7 +381,7 @@ This token filter accepts the following setting: [source,js] -------------------------------------------------- -PUT kuromoji_sample?include_type_name=true +PUT kuromoji_sample { "settings": { "index": { @@ -434,7 +434,7 @@ predefined list, then use the [source,js] -------------------------------------------------- -PUT kuromoji_sample?include_type_name=true +PUT kuromoji_sample { "settings": { "index": { @@ -493,7 +493,7 @@ to regular Arabic decimal numbers in half-width characters. For example: [source,js] -------------------------------------------------- -PUT kuromoji_sample?include_type_name=true +PUT kuromoji_sample { "settings": { "index": { diff --git a/docs/plugins/analysis-nori.asciidoc b/docs/plugins/analysis-nori.asciidoc index 1fe21fa13da97..68ec943533aa9 100644 --- a/docs/plugins/analysis-nori.asciidoc +++ b/docs/plugins/analysis-nori.asciidoc @@ -90,7 +90,7 @@ Then create an analyzer as follows: [source,js] -------------------------------------------------- -PUT nori_sample?include_type_name=true +PUT nori_sample { "settings": { "index": { @@ -164,7 +164,7 @@ the `user_dictionary_rules` option: [source,js] -------------------------------------------------- -PUT nori_sample?include_type_name=true +PUT nori_sample { "settings": { "index": { @@ -332,7 +332,7 @@ For example: [source,js] -------------------------------------------------- -PUT nori_sample?include_type_name=true +PUT nori_sample { "settings": { "index": { @@ -398,7 +398,7 @@ The `nori_readingform` token filter rewrites tokens written in Hanja to their Ha [source,js] -------------------------------------------------- -PUT nori_sample?include_type_name=true +PUT nori_sample { "settings": { "index":{ diff --git a/docs/plugins/analysis-phonetic.asciidoc b/docs/plugins/analysis-phonetic.asciidoc index 7996edb6afba3..e22f819e1eb3e 100644 --- a/docs/plugins/analysis-phonetic.asciidoc +++ b/docs/plugins/analysis-phonetic.asciidoc @@ -29,7 +29,7 @@ The `phonetic` token filter takes the following settings: [source,js] -------------------------------------------------- -PUT phonetic_sample?include_type_name=true +PUT phonetic_sample { "settings": { "index": { diff --git a/docs/plugins/store-smb.asciidoc b/docs/plugins/store-smb.asciidoc index e0649873f8794..4f713568655cb 100644 --- a/docs/plugins/store-smb.asciidoc +++ b/docs/plugins/store-smb.asciidoc @@ -46,7 +46,7 @@ It can also be set on a per-index basis at index creation time: [source,js] ---- -PUT my_index?include_type_name=true +PUT my_index { "settings": { "index.store.type": "smb_mmap_fs" diff --git a/docs/reference/aggregations/bucket/nested-aggregation.asciidoc b/docs/reference/aggregations/bucket/nested-aggregation.asciidoc index 2acf760fff3d1..d323eb1c134ee 100644 --- a/docs/reference/aggregations/bucket/nested-aggregation.asciidoc +++ b/docs/reference/aggregations/bucket/nested-aggregation.asciidoc @@ -8,10 +8,9 @@ price for the product. The mapping could look like: [source,js] -------------------------------------------------- -PUT /index?include_type_name=true +PUT /index { - "mappings": { - "product" : { + "mappings": { "properties" : { "resellers" : { <1> "type" : "nested", @@ -22,7 +21,6 @@ PUT /index?include_type_name=true } } } - } } -------------------------------------------------- // CONSOLE @@ -52,7 +50,7 @@ GET /_search -------------------------------------------------- // CONSOLE // TEST[s/GET \/_search/GET \/_search\?filter_path=aggregations/] -// TEST[s/^/PUT index\/product\/0\?refresh\n{"name":"led", "resellers": [{"name": "foo", "price": 350.00}, {"name": "bar", "price": 500.00}]}\n/] +// TEST[s/^/PUT index\/_doc\/0\?refresh\n{"name":"led", "resellers": [{"name": "foo", "price": 350.00}, {"name": "bar", "price": 500.00}]}\n/] As you can see above, the nested aggregation requires the `path` of the nested documents within the top level documents. Then one can define any type of aggregation over these nested documents. diff --git a/docs/reference/aggregations/bucket/reverse-nested-aggregation.asciidoc b/docs/reference/aggregations/bucket/reverse-nested-aggregation.asciidoc index 493326651ef99..f922d90331fd0 100644 --- a/docs/reference/aggregations/bucket/reverse-nested-aggregation.asciidoc +++ b/docs/reference/aggregations/bucket/reverse-nested-aggregation.asciidoc @@ -17,21 +17,19 @@ the issue documents as nested documents. The mapping could look like: [source,js] -------------------------------------------------- -PUT /issues?include_type_name=true +PUT /issues { "mappings": { - "issue" : { - "properties" : { - "tags" : { "type" : "keyword" }, - "comments" : { <1> - "type" : "nested", - "properties" : { - "username" : { "type" : "keyword" }, - "comment" : { "type" : "text" } - } - } - } - } + "properties" : { + "tags" : { "type" : "keyword" }, + "comments" : { <1> + "type" : "nested", + "properties" : { + "username" : { "type" : "keyword" }, + "comment" : { "type" : "text" } + } + } + } } } -------------------------------------------------- @@ -45,7 +43,7 @@ tags of the issues the user has commented on: [source,js] -------------------------------------------------- -POST /issues/issue/0?refresh +POST /issues/_doc/0?refresh {"tags": ["tag_1"], "comments": [{"username": "username_1"}]} -------------------------------------------------- // CONSOLE diff --git a/docs/reference/aggregations/bucket/significantterms-aggregation.asciidoc b/docs/reference/aggregations/bucket/significantterms-aggregation.asciidoc index 0b2b769adfcbd..1c615e795c6a4 100644 --- a/docs/reference/aggregations/bucket/significantterms-aggregation.asciidoc +++ b/docs/reference/aggregations/bucket/significantterms-aggregation.asciidoc @@ -19,23 +19,21 @@ that is significant and probably very relevant to their search. 5/10,000,000 vs [source,js] -------------------------------------------------- -PUT /reports?include_type_name=true +PUT /reports { "mappings": { - "report": { - "properties": { - "force": { - "type": "keyword" - }, - "crime_type": { - "type": "keyword" - } + "properties": { + "force": { + "type": "keyword" + }, + "crime_type": { + "type": "keyword" } } } } -POST /reports/report/_bulk?refresh +POST /reports/_bulk?refresh {"index":{"_id":0}} {"force": "British Transport Police", "crime_type": "Bicycle theft"} {"index":{"_id":1}} diff --git a/docs/reference/aggregations/bucket/terms-aggregation.asciidoc b/docs/reference/aggregations/bucket/terms-aggregation.asciidoc index 3b104c90332ae..8c0e586d8b2f6 100644 --- a/docs/reference/aggregations/bucket/terms-aggregation.asciidoc +++ b/docs/reference/aggregations/bucket/terms-aggregation.asciidoc @@ -7,23 +7,21 @@ A multi-bucket value source based aggregation where buckets are dynamically buil [source,js] -------------------------------------------------- -PUT /products?include_type_name=true +PUT /products { "mappings": { - "product": { - "properties": { - "genre": { - "type": "keyword" - }, - "product": { - "type": "keyword" - } + "properties": { + "genre": { + "type": "keyword" + }, + "product": { + "type": "keyword" } } } } -POST /products/product/_bulk?refresh +POST /products/_bulk?refresh {"index":{"_id":0}} {"genre": "rock", "product": "Product A"} {"index":{"_id":1}} diff --git a/docs/reference/analysis/analyzers/custom-analyzer.asciidoc b/docs/reference/analysis/analyzers/custom-analyzer.asciidoc index 153f0fe539e1a..92133822fa51f 100644 --- a/docs/reference/analysis/analyzers/custom-analyzer.asciidoc +++ b/docs/reference/analysis/analyzers/custom-analyzer.asciidoc @@ -53,7 +53,7 @@ Token Filters:: [source,js] -------------------------------- -PUT my_index?include_type_name=true +PUT my_index { "settings": { "analysis": { @@ -157,7 +157,7 @@ Here is an example: [source,js] -------------------------------------------------- -PUT my_index?include_type_name=true +PUT my_index { "settings": { "analysis": { diff --git a/docs/reference/analysis/analyzers/fingerprint-analyzer.asciidoc b/docs/reference/analysis/analyzers/fingerprint-analyzer.asciidoc index 28df6d2d3bcf4..cc82d2eb8179f 100644 --- a/docs/reference/analysis/analyzers/fingerprint-analyzer.asciidoc +++ b/docs/reference/analysis/analyzers/fingerprint-analyzer.asciidoc @@ -86,7 +86,7 @@ pre-defined list of English stop words: [source,js] ---------------------------- -PUT my_index?include_type_name=true +PUT my_index { "settings": { "analysis": { @@ -158,7 +158,7 @@ customization: [source,js] ---------------------------------------------------- -PUT /fingerprint_example?include_type_name=true +PUT /fingerprint_example { "settings": { "analysis": { diff --git a/docs/reference/analysis/analyzers/keyword-analyzer.asciidoc b/docs/reference/analysis/analyzers/keyword-analyzer.asciidoc index 571ff953c95c3..954b514ced605 100644 --- a/docs/reference/analysis/analyzers/keyword-analyzer.asciidoc +++ b/docs/reference/analysis/analyzers/keyword-analyzer.asciidoc @@ -68,7 +68,7 @@ for further customization: [source,js] ---------------------------------------------------- -PUT /keyword_example?include_type_name=true +PUT /keyword_example { "settings": { "analysis": { diff --git a/docs/reference/analysis/analyzers/lang-analyzer.asciidoc b/docs/reference/analysis/analyzers/lang-analyzer.asciidoc index 959504dbbd608..9a4dcbe8aaac7 100644 --- a/docs/reference/analysis/analyzers/lang-analyzer.asciidoc +++ b/docs/reference/analysis/analyzers/lang-analyzer.asciidoc @@ -78,7 +78,7 @@ The `arabic` analyzer could be reimplemented as a `custom` analyzer as follows: [source,js] ---------------------------------------------------- -PUT /arabic_example?include_type_name=true +PUT /arabic_example { "settings": { "analysis": { @@ -128,7 +128,7 @@ The `armenian` analyzer could be reimplemented as a `custom` analyzer as follows [source,js] ---------------------------------------------------- -PUT /armenian_example?include_type_name=true +PUT /armenian_example { "settings": { "analysis": { @@ -176,7 +176,7 @@ The `basque` analyzer could be reimplemented as a `custom` analyzer as follows: [source,js] ---------------------------------------------------- -PUT /basque_example?include_type_name=true +PUT /basque_example { "settings": { "analysis": { @@ -224,7 +224,7 @@ The `bengali` analyzer could be reimplemented as a `custom` analyzer as follows: [source,js] ---------------------------------------------------- -PUT /bengali_example?include_type_name=true +PUT /bengali_example { "settings": { "analysis": { @@ -275,7 +275,7 @@ The `brazilian` analyzer could be reimplemented as a `custom` analyzer as follow [source,js] ---------------------------------------------------- -PUT /brazilian_example?include_type_name=true +PUT /brazilian_example { "settings": { "analysis": { @@ -323,7 +323,7 @@ The `bulgarian` analyzer could be reimplemented as a `custom` analyzer as follow [source,js] ---------------------------------------------------- -PUT /bulgarian_example?include_type_name=true +PUT /bulgarian_example { "settings": { "analysis": { @@ -371,7 +371,7 @@ The `catalan` analyzer could be reimplemented as a `custom` analyzer as follows: [source,js] ---------------------------------------------------- -PUT /catalan_example?include_type_name=true +PUT /catalan_example { "settings": { "analysis": { @@ -428,7 +428,7 @@ The `cjk` analyzer could be reimplemented as a `custom` analyzer as follows: [source,js] ---------------------------------------------------- -PUT /cjk_example?include_type_name=true +PUT /cjk_example { "settings": { "analysis": { @@ -474,7 +474,7 @@ The `czech` analyzer could be reimplemented as a `custom` analyzer as follows: [source,js] ---------------------------------------------------- -PUT /czech_example?include_type_name=true +PUT /czech_example { "settings": { "analysis": { @@ -522,7 +522,7 @@ The `danish` analyzer could be reimplemented as a `custom` analyzer as follows: [source,js] ---------------------------------------------------- -PUT /danish_example?include_type_name=true +PUT /danish_example { "settings": { "analysis": { @@ -570,7 +570,7 @@ The `dutch` analyzer could be reimplemented as a `custom` analyzer as follows: [source,js] ---------------------------------------------------- -PUT /dutch_example?include_type_name=true +PUT /dutch_example { "settings": { "analysis": { @@ -628,7 +628,7 @@ The `english` analyzer could be reimplemented as a `custom` analyzer as follows: [source,js] ---------------------------------------------------- -PUT /english_example?include_type_name=true +PUT /english_example { "settings": { "analysis": { @@ -681,7 +681,7 @@ The `finnish` analyzer could be reimplemented as a `custom` analyzer as follows: [source,js] ---------------------------------------------------- -PUT /finnish_example?include_type_name=true +PUT /finnish_example { "settings": { "analysis": { @@ -729,7 +729,7 @@ The `french` analyzer could be reimplemented as a `custom` analyzer as follows: [source,js] ---------------------------------------------------- -PUT /french_example?include_type_name=true +PUT /french_example { "settings": { "analysis": { @@ -787,7 +787,7 @@ The `galician` analyzer could be reimplemented as a `custom` analyzer as follows [source,js] ---------------------------------------------------- -PUT /galician_example?include_type_name=true +PUT /galician_example { "settings": { "analysis": { @@ -835,7 +835,7 @@ The `german` analyzer could be reimplemented as a `custom` analyzer as follows: [source,js] ---------------------------------------------------- -PUT /german_example?include_type_name=true +PUT /german_example { "settings": { "analysis": { @@ -884,7 +884,7 @@ The `greek` analyzer could be reimplemented as a `custom` analyzer as follows: [source,js] ---------------------------------------------------- -PUT /greek_example?include_type_name=true +PUT /greek_example { "settings": { "analysis": { @@ -936,7 +936,7 @@ The `hindi` analyzer could be reimplemented as a `custom` analyzer as follows: [source,js] ---------------------------------------------------- -PUT /hindi_example?include_type_name=true +PUT /hindi_example { "settings": { "analysis": { @@ -987,7 +987,7 @@ The `hungarian` analyzer could be reimplemented as a `custom` analyzer as follow [source,js] ---------------------------------------------------- -PUT /hungarian_example?include_type_name=true +PUT /hungarian_example { "settings": { "analysis": { @@ -1036,7 +1036,7 @@ The `indonesian` analyzer could be reimplemented as a `custom` analyzer as follo [source,js] ---------------------------------------------------- -PUT /indonesian_example?include_type_name=true +PUT /indonesian_example { "settings": { "analysis": { @@ -1084,7 +1084,7 @@ The `irish` analyzer could be reimplemented as a `custom` analyzer as follows: [source,js] ---------------------------------------------------- -PUT /irish_example?include_type_name=true +PUT /irish_example { "settings": { "analysis": { @@ -1148,7 +1148,7 @@ The `italian` analyzer could be reimplemented as a `custom` analyzer as follows: [source,js] ---------------------------------------------------- -PUT /italian_example?include_type_name=true +PUT /italian_example { "settings": { "analysis": { @@ -1207,7 +1207,7 @@ The `latvian` analyzer could be reimplemented as a `custom` analyzer as follows: [source,js] ---------------------------------------------------- -PUT /latvian_example?include_type_name=true +PUT /latvian_example { "settings": { "analysis": { @@ -1255,7 +1255,7 @@ The `lithuanian` analyzer could be reimplemented as a `custom` analyzer as follo [source,js] ---------------------------------------------------- -PUT /lithuanian_example?include_type_name=true +PUT /lithuanian_example { "settings": { "analysis": { @@ -1303,7 +1303,7 @@ The `norwegian` analyzer could be reimplemented as a `custom` analyzer as follow [source,js] ---------------------------------------------------- -PUT /norwegian_example?include_type_name=true +PUT /norwegian_example { "settings": { "analysis": { @@ -1351,7 +1351,7 @@ The `persian` analyzer could be reimplemented as a `custom` analyzer as follows: [source,js] ---------------------------------------------------- -PUT /persian_example?include_type_name=true +PUT /persian_example { "settings": { "analysis": { @@ -1397,7 +1397,7 @@ The `portuguese` analyzer could be reimplemented as a `custom` analyzer as follo [source,js] ---------------------------------------------------- -PUT /portuguese_example?include_type_name=true +PUT /portuguese_example { "settings": { "analysis": { @@ -1445,7 +1445,7 @@ The `romanian` analyzer could be reimplemented as a `custom` analyzer as follows [source,js] ---------------------------------------------------- -PUT /romanian_example?include_type_name=true +PUT /romanian_example { "settings": { "analysis": { @@ -1494,7 +1494,7 @@ The `russian` analyzer could be reimplemented as a `custom` analyzer as follows: [source,js] ---------------------------------------------------- -PUT /russian_example?include_type_name=true +PUT /russian_example { "settings": { "analysis": { @@ -1542,7 +1542,7 @@ The `sorani` analyzer could be reimplemented as a `custom` analyzer as follows: [source,js] ---------------------------------------------------- -PUT /sorani_example?include_type_name=true +PUT /sorani_example { "settings": { "analysis": { @@ -1592,7 +1592,7 @@ The `spanish` analyzer could be reimplemented as a `custom` analyzer as follows: [source,js] ---------------------------------------------------- -PUT /spanish_example?include_type_name=true +PUT /spanish_example { "settings": { "analysis": { @@ -1640,7 +1640,7 @@ The `swedish` analyzer could be reimplemented as a `custom` analyzer as follows: [source,js] ---------------------------------------------------- -PUT /swedish_example?include_type_name=true +PUT /swedish_example { "settings": { "analysis": { @@ -1688,7 +1688,7 @@ The `turkish` analyzer could be reimplemented as a `custom` analyzer as follows: [source,js] ---------------------------------------------------- -PUT /turkish_example?include_type_name=true +PUT /turkish_example { "settings": { "analysis": { @@ -1741,7 +1741,7 @@ The `thai` analyzer could be reimplemented as a `custom` analyzer as follows: [source,js] ---------------------------------------------------- -PUT /thai_example?include_type_name=true +PUT /thai_example { "settings": { "analysis": { diff --git a/docs/reference/analysis/analyzers/pattern-analyzer.asciidoc b/docs/reference/analysis/analyzers/pattern-analyzer.asciidoc index 759c781616926..027f37280a67d 100644 --- a/docs/reference/analysis/analyzers/pattern-analyzer.asciidoc +++ b/docs/reference/analysis/analyzers/pattern-analyzer.asciidoc @@ -177,7 +177,7 @@ on non-word characters or on underscores (`\W|_`), and to lower-case the result: [source,js] ---------------------------- -PUT my_index?include_type_name=true +PUT my_index { "settings": { "analysis": { @@ -266,7 +266,7 @@ The following more complicated example splits CamelCase text into tokens: [source,js] -------------------------------------------------- -PUT my_index?include_type_name=true +PUT my_index { "settings": { "analysis": { @@ -386,7 +386,7 @@ customization: [source,js] ---------------------------------------------------- -PUT /pattern_example?include_type_name=true +PUT /pattern_example { "settings": { "analysis": { diff --git a/docs/reference/analysis/analyzers/simple-analyzer.asciidoc b/docs/reference/analysis/analyzers/simple-analyzer.asciidoc index 23130a4fd58ab..d82655d9bd8e1 100644 --- a/docs/reference/analysis/analyzers/simple-analyzer.asciidoc +++ b/docs/reference/analysis/analyzers/simple-analyzer.asciidoc @@ -135,7 +135,7 @@ a starting point for further customization: [source,js] ---------------------------------------------------- -PUT /simple_example?include_type_name=true +PUT /simple_example { "settings": { "analysis": { diff --git a/docs/reference/analysis/analyzers/standard-analyzer.asciidoc b/docs/reference/analysis/analyzers/standard-analyzer.asciidoc index 959e493d9d5b0..3097ece21db23 100644 --- a/docs/reference/analysis/analyzers/standard-analyzer.asciidoc +++ b/docs/reference/analysis/analyzers/standard-analyzer.asciidoc @@ -151,7 +151,7 @@ pre-defined list of English stop words: [source,js] ---------------------------- -PUT my_index?include_type_name=true +PUT my_index { "settings": { "analysis": { @@ -283,7 +283,7 @@ it, usually by adding token filters. This would recreate the built-in [source,js] ---------------------------------------------------- -PUT /standard_example?include_type_name=true +PUT /standard_example { "settings": { "analysis": { diff --git a/docs/reference/analysis/analyzers/stop-analyzer.asciidoc b/docs/reference/analysis/analyzers/stop-analyzer.asciidoc index 2586d79c8443b..1b84797d94761 100644 --- a/docs/reference/analysis/analyzers/stop-analyzer.asciidoc +++ b/docs/reference/analysis/analyzers/stop-analyzer.asciidoc @@ -130,7 +130,7 @@ words as stop words: [source,js] ---------------------------- -PUT my_index?include_type_name=true +PUT my_index { "settings": { "analysis": { @@ -248,7 +248,7 @@ customization: [source,js] ---------------------------------------------------- -PUT /stop_example?include_type_name=true +PUT /stop_example { "settings": { "analysis": { diff --git a/docs/reference/analysis/analyzers/whitespace-analyzer.asciidoc b/docs/reference/analysis/analyzers/whitespace-analyzer.asciidoc index 3ebc665abdd87..31ba8d9ce8f24 100644 --- a/docs/reference/analysis/analyzers/whitespace-analyzer.asciidoc +++ b/docs/reference/analysis/analyzers/whitespace-analyzer.asciidoc @@ -128,7 +128,7 @@ and you can use it as a starting point for further customization: [source,js] ---------------------------------------------------- -PUT /whitespace_example?include_type_name=true +PUT /whitespace_example { "settings": { "analysis": { diff --git a/docs/reference/analysis/charfilters/htmlstrip-charfilter.asciidoc b/docs/reference/analysis/charfilters/htmlstrip-charfilter.asciidoc index a9e11cf778d3a..6c1a1875d67ca 100644 --- a/docs/reference/analysis/charfilters/htmlstrip-charfilter.asciidoc +++ b/docs/reference/analysis/charfilters/htmlstrip-charfilter.asciidoc @@ -73,7 +73,7 @@ tags in place: [source,js] ---------------------------- -PUT my_index?include_type_name=true +PUT my_index { "settings": { "analysis": { diff --git a/docs/reference/analysis/charfilters/mapping-charfilter.asciidoc b/docs/reference/analysis/charfilters/mapping-charfilter.asciidoc index 0cbc6de782d79..30e565d443a3b 100644 --- a/docs/reference/analysis/charfilters/mapping-charfilter.asciidoc +++ b/docs/reference/analysis/charfilters/mapping-charfilter.asciidoc @@ -33,7 +33,7 @@ numerals with their Latin equivalents: [source,js] ---------------------------- -PUT my_index?include_type_name=true +PUT my_index { "settings": { "analysis": { @@ -107,7 +107,7 @@ example replaces the `:)` and `:(` emoticons with a text equivalent: [source,js] ---------------------------- -PUT my_index?include_type_name=true +PUT my_index { "settings": { "analysis": { diff --git a/docs/reference/analysis/tokenfilters/asciifolding-tokenfilter.asciidoc b/docs/reference/analysis/tokenfilters/asciifolding-tokenfilter.asciidoc index 77eb1bb05826d..bd22b013334a9 100644 --- a/docs/reference/analysis/tokenfilters/asciifolding-tokenfilter.asciidoc +++ b/docs/reference/analysis/tokenfilters/asciifolding-tokenfilter.asciidoc @@ -8,7 +8,7 @@ equivalents, if one exists. Example: [source,js] -------------------------------------------------- -PUT /asciifold_example?include_type_name=true +PUT /asciifold_example { "settings" : { "analysis" : { @@ -30,7 +30,7 @@ example: [source,js] -------------------------------------------------- -PUT /asciifold_example?include_type_name=true +PUT /asciifold_example { "settings" : { "analysis" : { diff --git a/docs/reference/analysis/tokenfilters/cjk-bigram-tokenfilter.asciidoc b/docs/reference/analysis/tokenfilters/cjk-bigram-tokenfilter.asciidoc index a7ec4d9248546..cc26d025f04f9 100644 --- a/docs/reference/analysis/tokenfilters/cjk-bigram-tokenfilter.asciidoc +++ b/docs/reference/analysis/tokenfilters/cjk-bigram-tokenfilter.asciidoc @@ -16,7 +16,7 @@ Bigrams are generated for characters in `han`, `hiragana`, `katakana` and [source,js] -------------------------------------------------- -PUT /cjk_bigram_example?include_type_name=true +PUT /cjk_bigram_example { "settings" : { "analysis" : { diff --git a/docs/reference/analysis/tokenfilters/common-grams-tokenfilter.asciidoc b/docs/reference/analysis/tokenfilters/common-grams-tokenfilter.asciidoc index 538e28a8cf0a1..80ef7c2ce79b7 100644 --- a/docs/reference/analysis/tokenfilters/common-grams-tokenfilter.asciidoc +++ b/docs/reference/analysis/tokenfilters/common-grams-tokenfilter.asciidoc @@ -41,7 +41,7 @@ Here is an example: [source,js] -------------------------------------------------- -PUT /common_grams_example?include_type_name=true +PUT /common_grams_example { "settings": { "analysis": { @@ -168,4 +168,4 @@ And the response will be: ] } -------------------------------------------------- -// TESTRESPONSE \ No newline at end of file +// TESTRESPONSE diff --git a/docs/reference/analysis/tokenfilters/compound-word-tokenfilter.asciidoc b/docs/reference/analysis/tokenfilters/compound-word-tokenfilter.asciidoc index b345e0b7b4c08..d200c0b988bc4 100644 --- a/docs/reference/analysis/tokenfilters/compound-word-tokenfilter.asciidoc +++ b/docs/reference/analysis/tokenfilters/compound-word-tokenfilter.asciidoc @@ -84,7 +84,7 @@ Here is an example: [source,js] -------------------------------------------------- -PUT /compound_word_example?include_type_name=true +PUT /compound_word_example { "settings": { "index": { diff --git a/docs/reference/analysis/tokenfilters/condition-tokenfilter.asciidoc b/docs/reference/analysis/tokenfilters/condition-tokenfilter.asciidoc index e241bddb12b1d..0aeadded0d4de 100644 --- a/docs/reference/analysis/tokenfilters/condition-tokenfilter.asciidoc +++ b/docs/reference/analysis/tokenfilters/condition-tokenfilter.asciidoc @@ -20,7 +20,7 @@ You can set it up like: [source,js] -------------------------------------------------- -PUT /condition_example?include_type_name=true +PUT /condition_example { "settings" : { "analysis" : { @@ -87,4 +87,4 @@ And it'd respond: // TESTRESPONSE <1> The term `What` has been lowercased, because it is only 4 characters long <2> The term `Flapdoodle` has been left in its original case, because it doesn't pass - the predicate \ No newline at end of file + the predicate diff --git a/docs/reference/analysis/tokenfilters/elision-tokenfilter.asciidoc b/docs/reference/analysis/tokenfilters/elision-tokenfilter.asciidoc index 7a28760194a71..924903b9f65a8 100644 --- a/docs/reference/analysis/tokenfilters/elision-tokenfilter.asciidoc +++ b/docs/reference/analysis/tokenfilters/elision-tokenfilter.asciidoc @@ -9,7 +9,7 @@ example: [source,js] -------------------------------------------------- -PUT /elision_example?include_type_name=true +PUT /elision_example { "settings" : { "analysis" : { diff --git a/docs/reference/analysis/tokenfilters/hunspell-tokenfilter.asciidoc b/docs/reference/analysis/tokenfilters/hunspell-tokenfilter.asciidoc index e68b6685a78e7..cef687f761905 100644 --- a/docs/reference/analysis/tokenfilters/hunspell-tokenfilter.asciidoc +++ b/docs/reference/analysis/tokenfilters/hunspell-tokenfilter.asciidoc @@ -42,7 +42,7 @@ settings: [source,js] -------------------------------------------------- -PUT /hunspell_example?include_type_name=true +PUT /hunspell_example { "settings": { "analysis" : { diff --git a/docs/reference/analysis/tokenfilters/keep-types-tokenfilter.asciidoc b/docs/reference/analysis/tokenfilters/keep-types-tokenfilter.asciidoc index ac5b3f368b53e..f8dce95f1b0fb 100644 --- a/docs/reference/analysis/tokenfilters/keep-types-tokenfilter.asciidoc +++ b/docs/reference/analysis/tokenfilters/keep-types-tokenfilter.asciidoc @@ -19,7 +19,7 @@ You can set it up like: [source,js] -------------------------------------------------- -PUT /keep_types_example?include_type_name=true +PUT /keep_types_example { "settings" : { "analysis" : { @@ -80,7 +80,7 @@ If the `mode` parameter is set to `exclude` like in the following example: [source,js] -------------------------------------------------- -PUT /keep_types_exclude_example?include_type_name=true +PUT /keep_types_exclude_example { "settings" : { "analysis" : { @@ -139,4 +139,4 @@ The response will be: ] } -------------------------------------------------- -// TESTRESPONSE \ No newline at end of file +// TESTRESPONSE diff --git a/docs/reference/analysis/tokenfilters/keep-words-tokenfilter.asciidoc b/docs/reference/analysis/tokenfilters/keep-words-tokenfilter.asciidoc index 1f1d49cfe89dc..b7385379be94b 100644 --- a/docs/reference/analysis/tokenfilters/keep-words-tokenfilter.asciidoc +++ b/docs/reference/analysis/tokenfilters/keep-words-tokenfilter.asciidoc @@ -20,7 +20,7 @@ keep_words_case:: a boolean indicating whether to lower case the words (defaults [source,js] -------------------------------------------------- -PUT /keep_words_example?include_type_name=true +PUT /keep_words_example { "settings" : { "analysis" : { diff --git a/docs/reference/analysis/tokenfilters/keyword-marker-tokenfilter.asciidoc b/docs/reference/analysis/tokenfilters/keyword-marker-tokenfilter.asciidoc index 8a12b0d4757af..1f1e4e655c55e 100644 --- a/docs/reference/analysis/tokenfilters/keyword-marker-tokenfilter.asciidoc +++ b/docs/reference/analysis/tokenfilters/keyword-marker-tokenfilter.asciidoc @@ -23,7 +23,7 @@ You can configure it like: [source,js] -------------------------------------------------- -PUT /keyword_marker_example?include_type_name=true +PUT /keyword_marker_example { "settings": { "analysis": { diff --git a/docs/reference/analysis/tokenfilters/keyword-repeat-tokenfilter.asciidoc b/docs/reference/analysis/tokenfilters/keyword-repeat-tokenfilter.asciidoc index e0a2e4c73c96d..044e8c1476951 100644 --- a/docs/reference/analysis/tokenfilters/keyword-repeat-tokenfilter.asciidoc +++ b/docs/reference/analysis/tokenfilters/keyword-repeat-tokenfilter.asciidoc @@ -14,7 +14,7 @@ preserve both the stemmed and unstemmed version of tokens: [source,js] -------------------------------------------------- -PUT /keyword_repeat_example?include_type_name=true +PUT /keyword_repeat_example { "settings": { "analysis": { diff --git a/docs/reference/analysis/tokenfilters/limit-token-count-tokenfilter.asciidoc b/docs/reference/analysis/tokenfilters/limit-token-count-tokenfilter.asciidoc index deb13843a1ced..ba2018c107626 100644 --- a/docs/reference/analysis/tokenfilters/limit-token-count-tokenfilter.asciidoc +++ b/docs/reference/analysis/tokenfilters/limit-token-count-tokenfilter.asciidoc @@ -18,7 +18,7 @@ Here is an example: [source,js] -------------------------------------------------- -PUT /limit_example?include_type_name=true +PUT /limit_example { "settings": { "analysis": { diff --git a/docs/reference/analysis/tokenfilters/lowercase-tokenfilter.asciidoc b/docs/reference/analysis/tokenfilters/lowercase-tokenfilter.asciidoc index c0c35e8a60c9d..519fd77ba2afd 100644 --- a/docs/reference/analysis/tokenfilters/lowercase-tokenfilter.asciidoc +++ b/docs/reference/analysis/tokenfilters/lowercase-tokenfilter.asciidoc @@ -10,7 +10,7 @@ custom analyzer [source,js] -------------------------------------------------- -PUT /lowercase_example?include_type_name=true +PUT /lowercase_example { "settings": { "analysis": { diff --git a/docs/reference/analysis/tokenfilters/multiplexer-tokenfilter.asciidoc b/docs/reference/analysis/tokenfilters/multiplexer-tokenfilter.asciidoc index 8ad3fab8f36e7..a92e2476ad77e 100644 --- a/docs/reference/analysis/tokenfilters/multiplexer-tokenfilter.asciidoc +++ b/docs/reference/analysis/tokenfilters/multiplexer-tokenfilter.asciidoc @@ -31,7 +31,7 @@ You can set it up like: [source,js] -------------------------------------------------- -PUT /multiplexer_example?include_type_name=true +PUT /multiplexer_example { "settings" : { "analysis" : { @@ -121,4 +121,4 @@ that produce multiple tokens at the same position. This means that any filters within the multiplexer will be ignored for the purpose of synonyms. If you want to use filters contained within the multiplexer for parsing synonyms (for example, to apply stemming to the synonym lists), then you should append the synonym filter -to the relevant multiplexer filter list. \ No newline at end of file +to the relevant multiplexer filter list. diff --git a/docs/reference/analysis/tokenfilters/pattern-capture-tokenfilter.asciidoc b/docs/reference/analysis/tokenfilters/pattern-capture-tokenfilter.asciidoc index a028abef7a1f2..5b935d31f1289 100644 --- a/docs/reference/analysis/tokenfilters/pattern-capture-tokenfilter.asciidoc +++ b/docs/reference/analysis/tokenfilters/pattern-capture-tokenfilter.asciidoc @@ -46,7 +46,7 @@ This is particularly useful for indexing text like camel-case code, eg [source,js] -------------------------------------------------- -PUT test?include_type_name=true +PUT test { "settings" : { "analysis" : { @@ -87,7 +87,7 @@ Another example is analyzing email addresses: [source,js] -------------------------------------------------- -PUT test?include_type_name=true +PUT test { "settings" : { "analysis" : { diff --git a/docs/reference/analysis/tokenfilters/predicate-tokenfilter.asciidoc b/docs/reference/analysis/tokenfilters/predicate-tokenfilter.asciidoc index 2e7f7c11631fe..44ead824a84f7 100644 --- a/docs/reference/analysis/tokenfilters/predicate-tokenfilter.asciidoc +++ b/docs/reference/analysis/tokenfilters/predicate-tokenfilter.asciidoc @@ -17,7 +17,7 @@ You can set it up like: [source,js] -------------------------------------------------- -PUT /condition_example?include_type_name=true +PUT /condition_example { "settings" : { "analysis" : { @@ -76,4 +76,4 @@ And it'd respond: <1> The token 'What' has been removed from the tokenstream because it does not match the predicate. -<2> The position and offset values are unaffected by the removal of earlier tokens \ No newline at end of file +<2> The position and offset values are unaffected by the removal of earlier tokens diff --git a/docs/reference/analysis/tokenfilters/snowball-tokenfilter.asciidoc b/docs/reference/analysis/tokenfilters/snowball-tokenfilter.asciidoc index c9f8eff813626..99ed03649ff93 100644 --- a/docs/reference/analysis/tokenfilters/snowball-tokenfilter.asciidoc +++ b/docs/reference/analysis/tokenfilters/snowball-tokenfilter.asciidoc @@ -12,7 +12,7 @@ For example: [source,js] -------------------------------------------------- -PUT /my_index?include_type_name=true +PUT /my_index { "settings": { "analysis" : { diff --git a/docs/reference/analysis/tokenfilters/stemmer-override-tokenfilter.asciidoc b/docs/reference/analysis/tokenfilters/stemmer-override-tokenfilter.asciidoc index b80bd517b1981..e178181d1474a 100644 --- a/docs/reference/analysis/tokenfilters/stemmer-override-tokenfilter.asciidoc +++ b/docs/reference/analysis/tokenfilters/stemmer-override-tokenfilter.asciidoc @@ -20,7 +20,7 @@ Here is an example: [source,js] -------------------------------------------------- -PUT /my_index?include_type_name=true +PUT /my_index { "settings": { "analysis" : { @@ -53,7 +53,7 @@ You can also define the overrides rules inline: [source,js] -------------------------------------------------- -PUT /my_index?include_type_name=true +PUT /my_index { "settings": { "analysis" : { diff --git a/docs/reference/analysis/tokenfilters/stemmer-tokenfilter.asciidoc b/docs/reference/analysis/tokenfilters/stemmer-tokenfilter.asciidoc index f6db7206b04f6..f59e2f3f2cf88 100644 --- a/docs/reference/analysis/tokenfilters/stemmer-tokenfilter.asciidoc +++ b/docs/reference/analysis/tokenfilters/stemmer-tokenfilter.asciidoc @@ -6,7 +6,7 @@ filters through a single unified interface. For example: [source,js] -------------------------------------------------- -PUT /my_index?include_type_name=true +PUT /my_index { "settings": { "analysis" : { diff --git a/docs/reference/analysis/tokenfilters/stop-tokenfilter.asciidoc b/docs/reference/analysis/tokenfilters/stop-tokenfilter.asciidoc index 579d2202c00ed..3167a4342ac2d 100644 --- a/docs/reference/analysis/tokenfilters/stop-tokenfilter.asciidoc +++ b/docs/reference/analysis/tokenfilters/stop-tokenfilter.asciidoc @@ -33,7 +33,7 @@ The `stopwords` parameter accepts either an array of stopwords: [source,js] ------------------------------------ -PUT /my_index?include_type_name=true +PUT /my_index { "settings": { "analysis": { @@ -53,7 +53,7 @@ or a predefined language-specific list: [source,js] ------------------------------------ -PUT /my_index?include_type_name=true +PUT /my_index { "settings": { "analysis": { diff --git a/docs/reference/analysis/tokenfilters/synonym-graph-tokenfilter.asciidoc b/docs/reference/analysis/tokenfilters/synonym-graph-tokenfilter.asciidoc index d614a6bcf4d82..2a555d7d044da 100644 --- a/docs/reference/analysis/tokenfilters/synonym-graph-tokenfilter.asciidoc +++ b/docs/reference/analysis/tokenfilters/synonym-graph-tokenfilter.asciidoc @@ -23,7 +23,7 @@ Here is an example: [source,js] -------------------------------------------------- -PUT /test_index?include_type_name=true +PUT /test_index { "settings": { "index" : { @@ -59,7 +59,7 @@ to note that only those synonym rules which cannot get parsed are ignored. For i [source,js] -------------------------------------------------- -PUT /test_index?include_type_name=true +PUT /test_index { "settings": { "index" : { @@ -118,7 +118,7 @@ configuration file (note use of `synonyms` instead of `synonyms_path`): [source,js] -------------------------------------------------- -PUT /test_index?include_type_name=true +PUT /test_index { "settings": { "index" : { @@ -150,7 +150,7 @@ declared using `format`: [source,js] -------------------------------------------------- -PUT /test_index?include_type_name=true +PUT /test_index { "settings": { "index" : { diff --git a/docs/reference/analysis/tokenfilters/synonym-tokenfilter.asciidoc b/docs/reference/analysis/tokenfilters/synonym-tokenfilter.asciidoc index 6bb42354c3556..715abdde6331d 100644 --- a/docs/reference/analysis/tokenfilters/synonym-tokenfilter.asciidoc +++ b/docs/reference/analysis/tokenfilters/synonym-tokenfilter.asciidoc @@ -7,7 +7,7 @@ Here is an example: [source,js] -------------------------------------------------- -PUT /test_index?include_type_name=true +PUT /test_index { "settings": { "index" : { @@ -46,7 +46,7 @@ to note that only those synonym rules which cannot get parsed are ignored. For i [source,js] -------------------------------------------------- -PUT /test_index?include_type_name=true +PUT /test_index { "settings": { "index" : { @@ -106,7 +106,7 @@ configuration file (note use of `synonyms` instead of `synonyms_path`): [source,js] -------------------------------------------------- -PUT /test_index?include_type_name=true +PUT /test_index { "settings": { "index" : { @@ -138,7 +138,7 @@ declared using `format`: [source,js] -------------------------------------------------- -PUT /test_index?include_type_name=true +PUT /test_index { "settings": { "index" : { @@ -175,4 +175,4 @@ positions, some token filters may cause issues here. Token filters that produce multiple versions of a token may choose which version of the token to emit when parsing synonyms, e.g. `asciifolding` will only produce the folded version of the token. Others, e.g. `multiplexer`, `word_delimiter_graph` or `ngram` will throw an -error. \ No newline at end of file +error. diff --git a/docs/reference/analysis/tokenizers/classic-tokenizer.asciidoc b/docs/reference/analysis/tokenizers/classic-tokenizer.asciidoc index ca827e73ec6a8..52bdcbd773221 100644 --- a/docs/reference/analysis/tokenizers/classic-tokenizer.asciidoc +++ b/docs/reference/analysis/tokenizers/classic-tokenizer.asciidoc @@ -145,7 +145,7 @@ In this example, we configure the `classic` tokenizer to have a [source,js] ---------------------------- -PUT my_index?include_type_name=true +PUT my_index { "settings": { "analysis": { diff --git a/docs/reference/analysis/tokenizers/ngram-tokenizer.asciidoc b/docs/reference/analysis/tokenizers/ngram-tokenizer.asciidoc index c558a293927e0..c182ffacd1cfe 100644 --- a/docs/reference/analysis/tokenizers/ngram-tokenizer.asciidoc +++ b/docs/reference/analysis/tokenizers/ngram-tokenizer.asciidoc @@ -209,7 +209,7 @@ digits as tokens, and to produce tri-grams (grams of length `3`): [source,js] ---------------------------- -PUT my_index?include_type_name=true +PUT my_index { "settings": { "analysis": { diff --git a/docs/reference/analysis/tokenizers/pathhierarchy-tokenizer.asciidoc b/docs/reference/analysis/tokenizers/pathhierarchy-tokenizer.asciidoc index b27c1fb7cefa3..55aa7d66da343 100644 --- a/docs/reference/analysis/tokenizers/pathhierarchy-tokenizer.asciidoc +++ b/docs/reference/analysis/tokenizers/pathhierarchy-tokenizer.asciidoc @@ -93,7 +93,7 @@ characters, and to replace them with `/`. The first two tokens are skipped: [source,js] ---------------------------- -PUT my_index?include_type_name=true +PUT my_index { "settings": { "analysis": { diff --git a/docs/reference/analysis/tokenizers/pattern-tokenizer.asciidoc b/docs/reference/analysis/tokenizers/pattern-tokenizer.asciidoc index de52ea31372a4..5c19fcf59cc92 100644 --- a/docs/reference/analysis/tokenizers/pattern-tokenizer.asciidoc +++ b/docs/reference/analysis/tokenizers/pattern-tokenizer.asciidoc @@ -125,7 +125,7 @@ tokens when it encounters commas: [source,js] ---------------------------- -PUT my_index?include_type_name=true +PUT my_index { "settings": { "analysis": { @@ -215,7 +215,7 @@ escaped, so the pattern ends up looking like: [source,js] ---------------------------- -PUT my_index?include_type_name=true +PUT my_index { "settings": { "analysis": { diff --git a/docs/reference/analysis/tokenizers/simplepattern-tokenizer.asciidoc b/docs/reference/analysis/tokenizers/simplepattern-tokenizer.asciidoc index 2f68a0b8937c0..adc5fc05deeb9 100644 --- a/docs/reference/analysis/tokenizers/simplepattern-tokenizer.asciidoc +++ b/docs/reference/analysis/tokenizers/simplepattern-tokenizer.asciidoc @@ -36,7 +36,7 @@ three-digit numbers [source,js] ---------------------------- -PUT my_index?include_type_name=true +PUT my_index { "settings": { "analysis": { diff --git a/docs/reference/analysis/tokenizers/simplepatternsplit-tokenizer.asciidoc b/docs/reference/analysis/tokenizers/simplepatternsplit-tokenizer.asciidoc index d74f8823ff350..fc2e186f97267 100644 --- a/docs/reference/analysis/tokenizers/simplepatternsplit-tokenizer.asciidoc +++ b/docs/reference/analysis/tokenizers/simplepatternsplit-tokenizer.asciidoc @@ -37,7 +37,7 @@ text on underscores. [source,js] ---------------------------- -PUT my_index?include_type_name=true +PUT my_index { "settings": { "analysis": { diff --git a/docs/reference/analysis/tokenizers/standard-tokenizer.asciidoc b/docs/reference/analysis/tokenizers/standard-tokenizer.asciidoc index b19f31188002f..9f77a0e13dc88 100644 --- a/docs/reference/analysis/tokenizers/standard-tokenizer.asciidoc +++ b/docs/reference/analysis/tokenizers/standard-tokenizer.asciidoc @@ -136,7 +136,7 @@ In this example, we configure the `standard` tokenizer to have a [source,js] ---------------------------- -PUT my_index?include_type_name=true +PUT my_index { "settings": { "analysis": { diff --git a/docs/reference/analysis/tokenizers/uaxurlemail-tokenizer.asciidoc b/docs/reference/analysis/tokenizers/uaxurlemail-tokenizer.asciidoc index 8df2bc507528e..7fea0f1e8d8aa 100644 --- a/docs/reference/analysis/tokenizers/uaxurlemail-tokenizer.asciidoc +++ b/docs/reference/analysis/tokenizers/uaxurlemail-tokenizer.asciidoc @@ -92,7 +92,7 @@ In this example, we configure the `uax_url_email` tokenizer to have a [source,js] ---------------------------- -PUT my_index?include_type_name=true +PUT my_index { "settings": { "analysis": { diff --git a/docs/reference/cat/alias.asciidoc b/docs/reference/cat/alias.asciidoc index 9cf831249dda9..394231e448dc0 100644 --- a/docs/reference/cat/alias.asciidoc +++ b/docs/reference/cat/alias.asciidoc @@ -8,7 +8,7 @@ including filter and routing infos. Hidden setup for example: [source,js] -------------------------------------------------- -PUT test1?include_type_name=true +PUT test1 { "aliases": { "alias1": {}, diff --git a/docs/reference/ccr/getting-started.asciidoc b/docs/reference/ccr/getting-started.asciidoc index 16041ef8332dd..1af236f7d86fb 100644 --- a/docs/reference/ccr/getting-started.asciidoc +++ b/docs/reference/ccr/getting-started.asciidoc @@ -174,7 +174,7 @@ In the following example, we will create a leader index in the remote cluster: [source,js] -------------------------------------------------- -PUT /server-metrics?include_type_name=true +PUT /server-metrics { "settings" : { "index" : { @@ -188,29 +188,27 @@ PUT /server-metrics?include_type_name=true } }, "mappings" : { - "metric" : { - "properties" : { - "@timestamp" : { - "type" : "date" - }, - "accept" : { - "type" : "long" - }, - "deny" : { - "type" : "long" - }, - "host" : { - "type" : "keyword" - }, - "response" : { - "type" : "float" - }, - "service" : { - "type" : "keyword" - }, - "total" : { - "type" : "long" - } + "properties" : { + "@timestamp" : { + "type" : "date" + }, + "accept" : { + "type" : "long" + }, + "deny" : { + "type" : "long" + }, + "host" : { + "type" : "keyword" + }, + "response" : { + "type" : "float" + }, + "service" : { + "type" : "keyword" + }, + "total" : { + "type" : "long" } } } diff --git a/docs/reference/ilm/apis/explain.asciidoc b/docs/reference/ilm/apis/explain.asciidoc index d23faf9f75d54..66762ead9eb32 100644 --- a/docs/reference/ilm/apis/explain.asciidoc +++ b/docs/reference/ilm/apis/explain.asciidoc @@ -64,7 +64,7 @@ PUT _ilm/policy/my_policy } } -PUT my_index?include_type_name=true +PUT my_index { "settings": { "index.lifecycle.name": "my_policy", diff --git a/docs/reference/ilm/apis/move-to-step.asciidoc b/docs/reference/ilm/apis/move-to-step.asciidoc index 207d5139298b7..57ea1a226ea40 100644 --- a/docs/reference/ilm/apis/move-to-step.asciidoc +++ b/docs/reference/ilm/apis/move-to-step.asciidoc @@ -72,7 +72,7 @@ PUT _ilm/policy/my_policy } } -PUT my_index?include_type_name=true +PUT my_index { "settings": { "index.lifecycle.name": "my_policy" diff --git a/docs/reference/ilm/apis/remove-policy-from-index.asciidoc b/docs/reference/ilm/apis/remove-policy-from-index.asciidoc index 81646cc135000..888d3f17eecac 100644 --- a/docs/reference/ilm/apis/remove-policy-from-index.asciidoc +++ b/docs/reference/ilm/apis/remove-policy-from-index.asciidoc @@ -62,7 +62,7 @@ PUT _ilm/policy/my_policy } } -PUT my_index?include_type_name=true +PUT my_index { "settings": { "index.lifecycle.name": "my_policy" diff --git a/docs/reference/ilm/error-handling.asciidoc b/docs/reference/ilm/error-handling.asciidoc index a671e33f2b1db..abe643255bf95 100644 --- a/docs/reference/ilm/error-handling.asciidoc +++ b/docs/reference/ilm/error-handling.asciidoc @@ -41,7 +41,7 @@ telling it to use the policy they have created: [source,js] -------------------------------------------------- -PUT /myindex?include_type_name=true +PUT /myindex { "settings": { "index.number_of_shards": 2, diff --git a/docs/reference/ilm/getting-started-ilm.asciidoc b/docs/reference/ilm/getting-started-ilm.asciidoc index 5c5e188a9d474..f06c95f49c067 100644 --- a/docs/reference/ilm/getting-started-ilm.asciidoc +++ b/docs/reference/ilm/getting-started-ilm.asciidoc @@ -107,7 +107,7 @@ To begin, we will want to bootstrap our first index to write to. [source,js] ----------------------- -PUT datastream-000001?include_type_name=true +PUT datastream-000001 { "aliases": { "datastream": { diff --git a/docs/reference/ilm/policy-definitions.asciidoc b/docs/reference/ilm/policy-definitions.asciidoc index 2f71c20e2c76a..adf78cecd89fe 100644 --- a/docs/reference/ilm/policy-definitions.asciidoc +++ b/docs/reference/ilm/policy-definitions.asciidoc @@ -356,7 +356,7 @@ index "my_index" must be the write index for the alias. For more information, re [source,js] -------------------------------------------------- -PUT my_index?include_type_name=true +PUT my_index { "settings": { "index.lifecycle.name": "my_policy", diff --git a/docs/reference/ilm/set-up-lifecycle-policy.asciidoc b/docs/reference/ilm/set-up-lifecycle-policy.asciidoc index 417b4bf9ef875..7af686238f334 100644 --- a/docs/reference/ilm/set-up-lifecycle-policy.asciidoc +++ b/docs/reference/ilm/set-up-lifecycle-policy.asciidoc @@ -73,7 +73,7 @@ initial index which will be managed by our policy: [source,js] ----------------------- -PUT test-000001?include_type_name=true +PUT test-000001 { "aliases": { "test-alias":{ @@ -96,7 +96,7 @@ request so {ilm} immediately starts managing the index: [source,js] ----------------------- -PUT test-index?include_type_name=true +PUT test-index { "settings": { "number_of_shards": 1, diff --git a/docs/reference/ilm/start-stop-ilm.asciidoc b/docs/reference/ilm/start-stop-ilm.asciidoc index 1b5666f141096..e5366f028a9c7 100644 --- a/docs/reference/ilm/start-stop-ilm.asciidoc +++ b/docs/reference/ilm/start-stop-ilm.asciidoc @@ -39,7 +39,7 @@ PUT _ilm/policy/my_policy } } -PUT my_index?include_type_name=true +PUT my_index { "settings": { "index.lifecycle.name": "my_policy" diff --git a/docs/reference/ilm/update-lifecycle-policy.asciidoc b/docs/reference/ilm/update-lifecycle-policy.asciidoc index da3983d053c59..3e6627fdd3a7e 100644 --- a/docs/reference/ilm/update-lifecycle-policy.asciidoc +++ b/docs/reference/ilm/update-lifecycle-policy.asciidoc @@ -168,7 +168,7 @@ PUT _ilm/policy/my_executing_policy //// [source,js] ------------------------ -PUT my_index?include_type_name=true +PUT my_index { "settings": { "index.lifecycle.name": "my_executing_policy" @@ -486,7 +486,7 @@ PUT _ilm/policy/my_other_policy } } -PUT my_index?include_type_name=true +PUT my_index { "settings": { "index.lifecycle.name": "my_policy" diff --git a/docs/reference/index-modules/allocation/prioritization.asciidoc b/docs/reference/index-modules/allocation/prioritization.asciidoc index f702a2f20f67c..6693e6adb755e 100644 --- a/docs/reference/index-modules/allocation/prioritization.asciidoc +++ b/docs/reference/index-modules/allocation/prioritization.asciidoc @@ -19,14 +19,14 @@ PUT index_1 PUT index_2 -PUT index_3?include_type_name=true +PUT index_3 { "settings": { "index.priority": 10 } } -PUT index_4?include_type_name=true +PUT index_4 { "settings": { "index.priority": 5 diff --git a/docs/reference/index-modules/store.asciidoc b/docs/reference/index-modules/store.asciidoc index 1483a04868e81..8c1b99a42f2a6 100644 --- a/docs/reference/index-modules/store.asciidoc +++ b/docs/reference/index-modules/store.asciidoc @@ -24,7 +24,7 @@ creation time: [source,js] --------------------------------- -PUT /my_index?include_type_name=true +PUT /my_index { "settings": { "index.store.type": "niofs" @@ -114,7 +114,7 @@ or in the index settings at index creation time: [source,js] --------------------------------- -PUT /my_index?include_type_name=true +PUT /my_index { "settings": { "index.store.preload": ["nvd", "dvd"] diff --git a/docs/reference/indices/analyze.asciidoc b/docs/reference/indices/analyze.asciidoc index 8570176282ef6..a0d0f3c5b2449 100644 --- a/docs/reference/indices/analyze.asciidoc +++ b/docs/reference/indices/analyze.asciidoc @@ -224,7 +224,7 @@ The following setting allows to limit the number of tokens that can be produced: [source,js] -------------------------------------------------- -PUT analyze_sample?include_type_name=true +PUT analyze_sample { "settings" : { "index.analyze.max_token_count" : 20000 @@ -242,4 +242,4 @@ GET analyze_sample/_analyze } -------------------------------------------------- // CONSOLE -// TEST[setup:analyze_sample] \ No newline at end of file +// TEST[setup:analyze_sample] diff --git a/docs/reference/indices/recovery.asciidoc b/docs/reference/indices/recovery.asciidoc index 975beacc02cb2..0929b36e7742d 100644 --- a/docs/reference/indices/recovery.asciidoc +++ b/docs/reference/indices/recovery.asciidoc @@ -24,7 +24,7 @@ indices recovery result. [source,js] -------------------------------------------------- # create the index -PUT index1?include_type_name=true +PUT index1 {"settings": {"index.number_of_shards": 1}} # create the repository diff --git a/docs/reference/indices/rollover-index.asciidoc b/docs/reference/indices/rollover-index.asciidoc index d53fd46551983..1730c95e0dd24 100644 --- a/docs/reference/indices/rollover-index.asciidoc +++ b/docs/reference/indices/rollover-index.asciidoc @@ -187,7 +187,7 @@ override any values set in matching index templates. For example, the following [source,js] -------------------------------------------------- -PUT /logs-000001?include_type_name=true +PUT /logs-000001 { "aliases": { "logs_write": {} @@ -216,7 +216,7 @@ checked without performing the actual rollover: [source,js] -------------------------------------------------- -PUT /logs-000001?include_type_name=true +PUT /logs-000001 { "aliases": { "logs_write": {} @@ -258,7 +258,7 @@ Look at the behavior of the aliases in the following example where `is_write_ind [source,js] -------------------------------------------------- -PUT my_logs_index-000001?include_type_name=true +PUT my_logs_index-000001 { "aliases": { "logs": { "is_write_index": true } <1> diff --git a/docs/reference/indices/split-index.asciidoc b/docs/reference/indices/split-index.asciidoc index 465d7603a603d..ade0a8075d582 100644 --- a/docs/reference/indices/split-index.asciidoc +++ b/docs/reference/indices/split-index.asciidoc @@ -86,7 +86,7 @@ Create a new index: [source,js] -------------------------------------------------- -PUT my_source_index?include_type_name=true +PUT my_source_index { "settings": { "index.number_of_shards" : 1 diff --git a/docs/reference/mapping/types/geo-shape.asciidoc b/docs/reference/mapping/types/geo-shape.asciidoc index 11ba1c55cb228..a740b8c3b41a0 100644 --- a/docs/reference/mapping/types/geo-shape.asciidoc +++ b/docs/reference/mapping/types/geo-shape.asciidoc @@ -198,14 +198,12 @@ the cell right next to it -- even though the shape is very close to the point. [source,js] -------------------------------------------------- -PUT /example?include_type_name=true +PUT /example { "mappings": { - "doc": { - "properties": { - "location": { - "type": "geo_shape" - } + "properties": { + "location": { + "type": "geo_shape" } } } @@ -289,7 +287,7 @@ API. The following is an example of a point in GeoJSON. [source,js] -------------------------------------------------- -POST /example/doc +POST /example/_doc { "location" : { "type" : "point", @@ -303,7 +301,7 @@ The following is an example of a point in WKT: [source,js] -------------------------------------------------- -POST /example/doc +POST /example/_doc { "location" : "POINT (-77.03653 38.897676)" } @@ -320,7 +318,7 @@ following is an example of a LineString in GeoJSON. [source,js] -------------------------------------------------- -POST /example/doc +POST /example/_doc { "location" : { "type" : "linestring", @@ -334,7 +332,7 @@ The following is an example of a LineString in WKT: [source,js] -------------------------------------------------- -POST /example/doc +POST /example/_doc { "location" : "LINESTRING (-77.03653 38.897676, -77.009051 38.889939)" } @@ -353,7 +351,7 @@ closed). The following is an example of a Polygon in GeoJSON. [source,js] -------------------------------------------------- -POST /example/doc +POST /example/_doc { "location" : { "type" : "polygon", @@ -369,7 +367,7 @@ The following is an example of a Polygon in WKT: [source,js] -------------------------------------------------- -POST /example/doc +POST /example/_doc { "location" : "POLYGON ((100.0 0.0, 101.0 0.0, 101.0 1.0, 100.0 1.0, 100.0 0.0))" } @@ -382,7 +380,7 @@ of a polygon with a hole: [source,js] -------------------------------------------------- -POST /example/doc +POST /example/_doc { "location" : { "type" : "polygon", @@ -399,7 +397,7 @@ The following is an example of a Polygon with a hole in WKT: [source,js] -------------------------------------------------- -POST /example/doc +POST /example/_doc { "location" : "POLYGON ((100.0 0.0, 101.0 0.0, 101.0 1.0, 100.0 1.0, 100.0 0.0), (100.2 0.2, 100.8 0.2, 100.8 0.8, 100.2 0.8, 100.2 0.2))" } @@ -427,7 +425,7 @@ crosses the dateline. [source,js] -------------------------------------------------- -POST /example/doc +POST /example/_doc { "location" : { "type" : "polygon", @@ -447,7 +445,7 @@ overriding the orientation on a document: [source,js] -------------------------------------------------- -POST /example/doc +POST /example/_doc { "location" : { "type" : "polygon", @@ -467,7 +465,7 @@ The following is an example of a list of geojson points: [source,js] -------------------------------------------------- -POST /example/doc +POST /example/_doc { "location" : { "type" : "multipoint", @@ -483,7 +481,7 @@ The following is an example of a list of WKT points: [source,js] -------------------------------------------------- -POST /example/doc +POST /example/_doc { "location" : "MULTIPOINT (102.0 2.0, 103.0 2.0)" } @@ -497,7 +495,7 @@ The following is an example of a list of geojson linestrings: [source,js] -------------------------------------------------- -POST /example/doc +POST /example/_doc { "location" : { "type" : "multilinestring", @@ -515,7 +513,7 @@ The following is an example of a list of WKT linestrings: [source,js] -------------------------------------------------- -POST /example/doc +POST /example/_doc { "location" : "MULTILINESTRING ((102.0 2.0, 103.0 2.0, 103.0 3.0, 102.0 3.0), (100.0 0.0, 101.0 0.0, 101.0 1.0, 100.0 1.0), (100.2 0.2, 100.8 0.2, 100.8 0.8, 100.2 0.8))" } @@ -529,7 +527,7 @@ The following is an example of a list of geojson polygons (second polygon contai [source,js] -------------------------------------------------- -POST /example/doc +POST /example/_doc { "location" : { "type" : "multipolygon", @@ -547,7 +545,7 @@ The following is an example of a list of WKT polygons (second polygon contains a [source,js] -------------------------------------------------- -POST /example/doc +POST /example/_doc { "location" : "MULTIPOLYGON (((102.0 2.0, 103.0 2.0, 103.0 3.0, 102.0 3.0, 102.0 2.0)), ((100.0 0.0, 101.0 0.0, 101.0 1.0, 100.0 1.0, 100.0 0.0), (100.2 0.2, 100.8 0.2, 100.8 0.8, 100.2 0.8, 100.2 0.2)))" } @@ -561,7 +559,7 @@ The following is an example of a collection of geojson geometry objects: [source,js] -------------------------------------------------- -POST /example/doc +POST /example/_doc { "location" : { "type": "geometrycollection", @@ -584,7 +582,7 @@ The following is an example of a collection of WKT geometry objects: [source,js] -------------------------------------------------- -POST /example/doc +POST /example/_doc { "location" : "GEOMETRYCOLLECTION (POINT (100.0 0.0), LINESTRING (101.0 0.0, 102.0 1.0))" } @@ -601,7 +599,7 @@ bounding rectangle in the format [[minLon, maxLat],[maxLon, minLat]]: [source,js] -------------------------------------------------- -POST /example/doc +POST /example/_doc { "location" : { "type" : "envelope", @@ -617,7 +615,7 @@ The following is an example of an envelope using the WKT BBOX format: [source,js] -------------------------------------------------- -POST /example/doc +POST /example/_doc { "location" : "BBOX (100.0, 102.0, 2.0, 0.0)" } @@ -635,7 +633,7 @@ a `POLYGON`. [source,js] -------------------------------------------------- -POST /example/doc +POST /example/_doc { "location" : { "type" : "circle", diff --git a/docs/reference/modules/indices/request_cache.asciidoc b/docs/reference/modules/indices/request_cache.asciidoc index d13c108589836..fc04c5e9c63f6 100644 --- a/docs/reference/modules/indices/request_cache.asciidoc +++ b/docs/reference/modules/indices/request_cache.asciidoc @@ -55,7 +55,7 @@ index as follows: [source,js] ----------------------------- -PUT /my_index?include_type_name=true +PUT /my_index { "settings": { "index.requests.cache.enable": false diff --git a/docs/reference/query-dsl/mlt-query.asciidoc b/docs/reference/query-dsl/mlt-query.asciidoc index 3d8b08b8ff44e..509b4a9b44094 100644 --- a/docs/reference/query-dsl/mlt-query.asciidoc +++ b/docs/reference/query-dsl/mlt-query.asciidoc @@ -119,26 +119,24 @@ default, but there will be no speed up on analysis for these fields. [source,js] -------------------------------------------------- -PUT /imdb?include_type_name=true +PUT /imdb { "mappings": { - "movies": { - "properties": { - "title": { - "type": "text", - "term_vector": "yes" - }, - "description": { - "type": "text" - }, - "tags": { - "type": "text", - "fields" : { - "raw": { - "type" : "text", - "analyzer": "keyword", - "term_vector" : "yes" - } + "properties": { + "title": { + "type": "text", + "term_vector": "yes" + }, + "description": { + "type": "text" + }, + "tags": { + "type": "text", + "fields" : { + "raw": { + "type" : "text", + "analyzer": "keyword", + "term_vector" : "yes" } } } diff --git a/docs/reference/search/request/highlighting.asciidoc b/docs/reference/search/request/highlighting.asciidoc index 4dd037cf3c001..ad836c7c535e7 100644 --- a/docs/reference/search/request/highlighting.asciidoc +++ b/docs/reference/search/request/highlighting.asciidoc @@ -787,15 +787,13 @@ allow for highlighting using the postings: [source,js] -------------------------------------------------- -PUT /example?include_type_name=true +PUT /example { "mappings": { - "doc" : { - "properties": { - "comment" : { - "type": "text", - "index_options" : "offsets" - } + "properties": { + "comment" : { + "type": "text", + "index_options" : "offsets" } } } @@ -808,15 +806,13 @@ highlighting using the `term_vectors` (this will cause the index to be bigger): [source,js] -------------------------------------------------- -PUT /example?include_type_name=true +PUT /example { "mappings": { - "doc" : { - "properties": { - "comment" : { - "type": "text", - "term_vector" : "with_positions_offsets" - } + "properties": { + "comment" : { + "type": "text", + "term_vector" : "with_positions_offsets" } } } From 699d88173998ea13dd8ef88d09eaac6fff953d5b Mon Sep 17 00:00:00 2001 From: David Turner Date: Fri, 18 Jan 2019 09:15:30 +0000 Subject: [PATCH 42/71] Migrate IndicesExistsIT to Zen2 (#37526) This test was actually passing, for the wrong reason: it asserts a `MasterNotDiscoveredException` is thrown, expecting this to be due to a failure to perform state recovery, but in fact it's thrown because the node is not correctly bootstrapped. --- .../admin/indices/exists/IndicesExistsIT.java | 15 ++++++++++++++- 1 file changed, 14 insertions(+), 1 deletion(-) diff --git a/server/src/test/java/org/elasticsearch/action/admin/indices/exists/IndicesExistsIT.java b/server/src/test/java/org/elasticsearch/action/admin/indices/exists/IndicesExistsIT.java index 1116c88b6fc87..cd90cda2ba286 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/indices/exists/IndicesExistsIT.java +++ b/server/src/test/java/org/elasticsearch/action/admin/indices/exists/IndicesExistsIT.java @@ -19,6 +19,7 @@ package org.elasticsearch.action.admin.indices.exists; +import org.elasticsearch.cluster.coordination.ClusterBootstrapService; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.discovery.MasterNotDiscoveredException; @@ -29,17 +30,29 @@ import org.elasticsearch.test.InternalTestCluster; import java.io.IOException; +import java.util.List; +import java.util.stream.Collectors; +import static org.elasticsearch.node.Node.NODE_MASTER_SETTING; +import static org.elasticsearch.node.Node.NODE_NAME_SETTING; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertThrows; @ClusterScope(scope = ESIntegTestCase.Scope.TEST, numDataNodes = 0, numClientNodes = 0, transportClientRatio = 0.0, autoMinMasterNodes = false) public class IndicesExistsIT extends ESIntegTestCase { + @Override + protected List addExtraClusterBootstrapSettings(List allNodesSettings) { + final List masterNodeNames + = allNodesSettings.stream().filter(NODE_MASTER_SETTING::get).map(NODE_NAME_SETTING::get).collect(Collectors.toList()); + return allNodesSettings.stream().map(s -> Settings.builder().put(s) + .putList(ClusterBootstrapService.INITIAL_MASTER_NODES_SETTING.getKey(), masterNodeNames).build()).collect(Collectors.toList()); + } + public void testIndexExistsWithBlocksInPlace() throws IOException { Settings settings = Settings.builder() .put(GatewayService.RECOVER_AFTER_NODES_SETTING.getKey(), 99) - .put(ElectMasterService.DISCOVERY_ZEN_MINIMUM_MASTER_NODES_SETTING.getKey(), 1).build(); + .put(ElectMasterService.DISCOVERY_ZEN_MINIMUM_MASTER_NODES_SETTING.getKey(), Integer.MAX_VALUE).build(); String node = internalCluster().startNode(settings); assertThrows(client(node).admin().indices().prepareExists("test").setMasterNodeTimeout(TimeValue.timeValueSeconds(0)), From 65e76b3f6f5ae73c91f4f66f9cd2458d13d8c994 Mon Sep 17 00:00:00 2001 From: David Turner Date: Fri, 18 Jan 2019 09:15:51 +0000 Subject: [PATCH 43/71] Migrate RecoveryFromGatewayIT to Zen2 (#37520) * Fixes `testTwoNodeFirstNodeCleared` by manipulating voting config exclusions. * Removes `testRecoveryDifferentNodeOrderStartup` since state recovery is now handled entirely on the elected master, so the order in which the data nodes start is irrelevant. --- .../gateway/RecoveryFromGatewayIT.java | 62 +++++-------------- 1 file changed, 16 insertions(+), 46 deletions(-) diff --git a/server/src/test/java/org/elasticsearch/gateway/RecoveryFromGatewayIT.java b/server/src/test/java/org/elasticsearch/gateway/RecoveryFromGatewayIT.java index 3f337b9afee42..4b0e431c66352 100644 --- a/server/src/test/java/org/elasticsearch/gateway/RecoveryFromGatewayIT.java +++ b/server/src/test/java/org/elasticsearch/gateway/RecoveryFromGatewayIT.java @@ -20,7 +20,10 @@ package org.elasticsearch.gateway; import com.carrotsearch.hppc.cursors.ObjectCursor; - +import org.elasticsearch.action.admin.cluster.configuration.AddVotingConfigExclusionsAction; +import org.elasticsearch.action.admin.cluster.configuration.AddVotingConfigExclusionsRequest; +import org.elasticsearch.action.admin.cluster.configuration.ClearVotingConfigExclusionsAction; +import org.elasticsearch.action.admin.cluster.configuration.ClearVotingConfigExclusionsRequest; import org.elasticsearch.action.admin.indices.recovery.RecoveryResponse; import org.elasticsearch.action.admin.indices.stats.IndexStats; import org.elasticsearch.action.admin.indices.stats.ShardStats; @@ -33,7 +36,6 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentType; -import org.elasticsearch.env.Environment; import org.elasticsearch.env.NodeEnvironment; import org.elasticsearch.index.Index; import org.elasticsearch.index.IndexSettings; @@ -49,9 +51,7 @@ import org.elasticsearch.test.ESIntegTestCase.ClusterScope; import org.elasticsearch.test.ESIntegTestCase.Scope; import org.elasticsearch.test.InternalSettingsPlugin; -import org.elasticsearch.test.InternalTestCluster; import org.elasticsearch.test.InternalTestCluster.RestartCallback; -import org.elasticsearch.test.discovery.TestZenDiscovery; import org.elasticsearch.test.store.MockFSIndexStore; import java.nio.file.DirectoryStream; @@ -66,9 +66,11 @@ import java.util.Set; import java.util.stream.IntStream; +import static org.elasticsearch.cluster.coordination.ClusterBootstrapService.INITIAL_MASTER_NODES_SETTING; import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_REPLICAS; import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_SHARDS; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; +import static org.elasticsearch.gateway.GatewayService.RECOVER_AFTER_NODES_SETTING; import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery; import static org.elasticsearch.index.query.QueryBuilders.termQuery; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; @@ -88,14 +90,6 @@ protected Collection> nodePlugins() { return Arrays.asList(MockFSIndexStore.TestPlugin.class, InternalSettingsPlugin.class); } - @Override - protected Settings nodeSettings(int nodeOrdinal) { - return Settings.builder().put(super.nodeSettings(nodeOrdinal)) - // testTwoNodeFirstNodeCleared does unsafe things, and testLatestVersionLoaded / testRecoveryDifferentNodeOrderStartup also fail - .put(TestZenDiscovery.USE_ZEN2.getKey(), false) - .build(); - } - public void testOneNodeRecoverFromGateway() throws Exception { internalCluster().startNode(); @@ -312,10 +306,15 @@ public void testTwoNodeFirstNodeCleared() throws Exception { Map primaryTerms = assertAndCapturePrimaryTerms(null); + client().execute(AddVotingConfigExclusionsAction.INSTANCE, new AddVotingConfigExclusionsRequest(new String[]{firstNode})).get(); + internalCluster().fullRestart(new RestartCallback() { @Override - public Settings onNodeStopped(String nodeName) throws Exception { - return Settings.builder().put("gateway.recover_after_nodes", 2).build(); + public Settings onNodeStopped(String nodeName) { + return Settings.builder() + .put(RECOVER_AFTER_NODES_SETTING.getKey(), 2) + .putList(INITIAL_MASTER_NODES_SETTING.getKey()) // disable bootstrapping + .build(); } @Override @@ -332,6 +331,8 @@ public boolean clearData(String nodeName) { for (int i = 0; i < 10; i++) { assertHitCount(client().prepareSearch().setSize(0).setQuery(matchAllQuery()).execute().actionGet(), 2); } + + client().execute(ClearVotingConfigExclusionsAction.INSTANCE, new ClearVotingConfigExclusionsRequest()).get(); } public void testLatestVersionLoaded() throws Exception { @@ -523,37 +524,6 @@ public void assertSyncIdsNotNull() { } } - public void testRecoveryDifferentNodeOrderStartup() throws Exception { - // we need different data paths so we make sure we start the second node fresh - - final Path pathNode1 = createTempDir(); - final String node_1 = - internalCluster().startNode(Settings.builder().put(Environment.PATH_DATA_SETTING.getKey(), pathNode1).build()); - - client().prepareIndex("test", "type1", "1").setSource("field", "value").execute().actionGet(); - - final Path pathNode2 = createTempDir(); - final String node_2 = - internalCluster().startNode(Settings.builder().put(Environment.PATH_DATA_SETTING.getKey(), pathNode2).build()); - - ensureGreen(); - Map primaryTerms = assertAndCapturePrimaryTerms(null); - - if (randomBoolean()) { - internalCluster().stopRandomNode(InternalTestCluster.nameFilter(node_1)); - internalCluster().stopRandomNode(InternalTestCluster.nameFilter(node_2)); - } else { - internalCluster().stopRandomNode(InternalTestCluster.nameFilter(node_2)); - internalCluster().stopRandomNode(InternalTestCluster.nameFilter(node_1)); - } - // start the second node again - internalCluster().startNode(Settings.builder().put(Environment.PATH_DATA_SETTING.getKey(), pathNode2).build()); - ensureYellow(); - primaryTerms = assertAndCapturePrimaryTerms(primaryTerms); - assertThat(client().admin().indices().prepareExists("test").execute().actionGet().isExists(), equalTo(true)); - assertHitCount(client().prepareSearch("test").setSize(0).setQuery(QueryBuilders.matchAllQuery()).execute().actionGet(), 1); - } - public void testStartedShardFoundIfStateNotYetProcessed() throws Exception { // nodes may need to report the shards they processed the initial recovered cluster state from the master final String nodeName = internalCluster().startNode(); @@ -569,7 +539,7 @@ public void testStartedShardFoundIfStateNotYetProcessed() throws Exception { @Override public Settings onNodeStopped(String nodeName) throws Exception { // make sure state is not recovered - return Settings.builder().put(GatewayService.RECOVER_AFTER_NODES_SETTING.getKey(), 2).build(); + return Settings.builder().put(RECOVER_AFTER_NODES_SETTING.getKey(), 2).build(); } }); From 29d3a708da0623b1ee29c07db99b6db49ba9089b Mon Sep 17 00:00:00 2001 From: Tanguy Leroux Date: Fri, 18 Jan 2019 10:54:35 +0100 Subject: [PATCH 44/71] Fix BulkWithUpdatesIT and CloseIndexIT As of today the Close Index API does its best to close indices, but closing an index with ongoing recoveries might or might not be acknowledged depending of the values of the max seq number and global checkpoint at the time the TransportVerifyShardBeforeClose action is executed. These tests failed because they always expect that the index is correctly closed on the first try, which is not always the case. Instead we need to retry the closing until it succeed. Closes #37571 --- .../action/bulk/BulkWithUpdatesIT.java | 9 +++++++-- .../indices/state/CloseIndexIT.java | 18 +++++++++--------- 2 files changed, 16 insertions(+), 11 deletions(-) diff --git a/server/src/test/java/org/elasticsearch/action/bulk/BulkWithUpdatesIT.java b/server/src/test/java/org/elasticsearch/action/bulk/BulkWithUpdatesIT.java index 8a7c46ebcf60e..277c130cebb1b 100644 --- a/server/src/test/java/org/elasticsearch/action/bulk/BulkWithUpdatesIT.java +++ b/server/src/test/java/org/elasticsearch/action/bulk/BulkWithUpdatesIT.java @@ -34,6 +34,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.index.VersionType; +import org.elasticsearch.indices.IndexClosedException; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.script.MockScriptPlugin; import org.elasticsearch.script.Script; @@ -57,6 +58,7 @@ import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.nullValue; @@ -569,7 +571,7 @@ public void testThatMissingIndexDoesNotAbortFullBulkRequest() throws Exception{ SearchResponse searchResponse = client().prepareSearch("bulkindex*").get(); assertHitCount(searchResponse, 3); - assertAcked(client().admin().indices().prepareClose("bulkindex2")); + assertBusy(() -> assertAcked(client().admin().indices().prepareClose("bulkindex2"))); BulkResponse bulkResponse = client().bulk(bulkRequest).get(); assertThat(bulkResponse.hasFailures(), is(true)); @@ -581,7 +583,7 @@ public void testFailedRequestsOnClosedIndex() throws Exception { createIndex("bulkindex1"); client().prepareIndex("bulkindex1", "index1_type", "1").setSource("text", "test").get(); - assertAcked(client().admin().indices().prepareClose("bulkindex1")); + assertBusy(() -> assertAcked(client().admin().indices().prepareClose("bulkindex1"))); BulkRequest bulkRequest = new BulkRequest().setRefreshPolicy(RefreshPolicy.IMMEDIATE); bulkRequest.add(new IndexRequest("bulkindex1", "index1_type", "1").source(Requests.INDEX_CONTENT_TYPE, "text", "hallo1")) @@ -593,8 +595,11 @@ public void testFailedRequestsOnClosedIndex() throws Exception { BulkItemResponse[] responseItems = bulkResponse.getItems(); assertThat(responseItems.length, is(3)); assertThat(responseItems[0].getOpType(), is(OpType.INDEX)); + assertThat(responseItems[0].getFailure().getCause(), instanceOf(IndexClosedException.class)); assertThat(responseItems[1].getOpType(), is(OpType.UPDATE)); + assertThat(responseItems[1].getFailure().getCause(), instanceOf(IndexClosedException.class)); assertThat(responseItems[2].getOpType(), is(OpType.DELETE)); + assertThat(responseItems[2].getFailure().getCause(), instanceOf(IndexClosedException.class)); } // issue 9821 diff --git a/server/src/test/java/org/elasticsearch/indices/state/CloseIndexIT.java b/server/src/test/java/org/elasticsearch/indices/state/CloseIndexIT.java index a0304c96430f0..e00c5038ce273 100644 --- a/server/src/test/java/org/elasticsearch/indices/state/CloseIndexIT.java +++ b/server/src/test/java/org/elasticsearch/indices/state/CloseIndexIT.java @@ -21,7 +21,6 @@ import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.support.ActiveShardCount; -import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.block.ClusterBlockException; import org.elasticsearch.cluster.metadata.IndexMetaData; @@ -41,6 +40,7 @@ import static java.util.Collections.emptySet; import static java.util.stream.Collectors.toList; +import static org.elasticsearch.action.support.IndicesOptions.lenientExpandOpen; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; import static org.hamcrest.Matchers.containsString; @@ -64,9 +64,9 @@ public void testCloseOneMissingIndex() { assertThat(e.getMessage(), is("no such index [test2]")); } - public void testCloseOneMissingIndexIgnoreMissing() { + public void testCloseOneMissingIndexIgnoreMissing() throws Exception { createIndex("test1"); - assertAcked(client().admin().indices().prepareClose("test1", "test2").setIndicesOptions(IndicesOptions.lenientExpandOpen())); + assertBusy(() -> assertAcked(client().admin().indices().prepareClose("test1", "test2").setIndicesOptions(lenientExpandOpen()))); assertIndexIsClosed("test1"); } @@ -90,7 +90,7 @@ public void testCloseIndex() throws Exception { indexRandom(randomBoolean(), false, randomBoolean(), IntStream.range(0, nbDocs) .mapToObj(i -> client().prepareIndex(indexName, "_doc", String.valueOf(i)).setSource("num", i)).collect(toList())); - assertAcked(client().admin().indices().prepareClose(indexName)); + assertBusy(() -> assertAcked(client().admin().indices().prepareClose(indexName))); assertIndexIsClosed(indexName); assertAcked(client().admin().indices().prepareOpen(indexName)); @@ -106,15 +106,15 @@ public void testCloseAlreadyClosedIndex() throws Exception { .mapToObj(i -> client().prepareIndex(indexName, "_doc", String.valueOf(i)).setSource("num", i)).collect(toList())); } // First close should be acked - assertAcked(client().admin().indices().prepareClose(indexName)); + assertBusy(() -> assertAcked(client().admin().indices().prepareClose(indexName))); assertIndexIsClosed(indexName); // Second close should be acked too - assertAcked(client().admin().indices().prepareClose(indexName)); + assertBusy(() -> assertAcked(client().admin().indices().prepareClose(indexName))); assertIndexIsClosed(indexName); } - public void testCloseUnassignedIndex() { + public void testCloseUnassignedIndex() throws Exception { final String indexName = randomAlphaOfLength(10).toLowerCase(Locale.ROOT); assertAcked(prepareCreate(indexName) .setWaitForActiveShards(ActiveShardCount.NONE) @@ -124,7 +124,7 @@ public void testCloseUnassignedIndex() { assertThat(clusterState.metaData().indices().get(indexName).getState(), is(IndexMetaData.State.OPEN)); assertThat(clusterState.routingTable().allShards().stream().allMatch(ShardRouting::unassigned), is(true)); - assertAcked(client().admin().indices().prepareClose(indexName)); + assertBusy(() -> assertAcked(client().admin().indices().prepareClose(indexName))); assertIndexIsClosed(indexName); } @@ -172,7 +172,7 @@ public void testCloseWhileIndexingDocuments() throws Exception { indexer.setAssertNoFailuresOnStop(false); waitForDocs(randomIntBetween(10, 50), indexer); - assertAcked(client().admin().indices().prepareClose(indexName)); + assertBusy(() -> assertAcked(client().admin().indices().prepareClose(indexName))); indexer.stop(); nbDocs += indexer.totalIndexedDocs(); From 3a96608b3fea5a855b5ad5f8c6b427f7ade13180 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Christoph=20B=C3=BCscher?= Date: Fri, 18 Jan 2019 14:11:18 +0100 Subject: [PATCH 45/71] Remove more include_type_name and types from docs (#37601) --- .../painless/painless-execute-script.asciidoc | 26 ++++------ docs/plugins/analysis-icu.asciidoc | 38 +++++++------- docs/plugins/mapper-annotated-text.asciidoc | 32 ++++++------ docs/plugins/mapper-murmur3.asciidoc | 16 +++--- docs/plugins/mapper-size.asciidoc | 8 ++- .../bucket/children-aggregation.asciidoc | 14 +++--- .../bucket/composite-aggregation.asciidoc | 36 +++++++------ .../bucket/geodistance-aggregation.asciidoc | 12 ++--- .../bucket/geohashgrid-aggregation.asciidoc | 16 +++--- .../bucket/parent-aggregation.asciidoc | 20 ++++---- .../metrics/geobounds-aggregation.asciidoc | 12 ++--- .../metrics/tophits-aggregation.asciidoc | 18 +++---- docs/reference/analysis.asciidoc | 12 ++--- docs/reference/analysis/normalizers.asciidoc | 12 ++--- docs/reference/analysis/testing.asciidoc | 12 ++--- docs/reference/cat/fielddata.asciidoc | 20 ++++---- docs/reference/docs/get.asciidoc | 24 ++++----- docs/reference/docs/update-by-query.asciidoc | 10 ++-- docs/reference/indices/aliases.asciidoc | 26 ++++------ .../indices/get-field-mapping.asciidoc | 20 ++++---- docs/reference/indices/put-mapping.asciidoc | 20 ++++---- docs/reference/mapping.asciidoc | 23 ++++----- .../modules/scripting/fields.asciidoc | 26 +++++----- .../search/request/inner-hits.asciidoc | 50 ++++++++----------- 24 files changed, 220 insertions(+), 283 deletions(-) diff --git a/docs/painless/painless-execute-script.asciidoc b/docs/painless/painless-execute-script.asciidoc index 3b1b03ca3b698..fc5a6bf71d14a 100644 --- a/docs/painless/painless-execute-script.asciidoc +++ b/docs/painless/painless-execute-script.asciidoc @@ -71,14 +71,12 @@ index:: The name of an index containing a mapping that is compatible with the do [source,js] ---------------------------------------------------------------- -PUT /my-index?include_type_name=true +PUT /my-index { "mappings": { - "_doc": { - "properties": { - "field": { - "type": "keyword" - } + "properties": { + "field": { + "type": "keyword" } } } @@ -129,17 +127,15 @@ query:: If `_score` is used in the script then a query can specified that will b [source,js] ---------------------------------------------------------------- -PUT /my-index?include_type_name=true +PUT /my-index { "mappings": { - "_doc": { - "properties": { - "field": { - "type": "keyword" - }, - "rank": { - "type": "long" - } + "properties": { + "field": { + "type": "keyword" + }, + "rank": { + "type": "long" } } } diff --git a/docs/plugins/analysis-icu.asciidoc b/docs/plugins/analysis-icu.asciidoc index a29acf7f2b0ee..51be1907c9869 100644 --- a/docs/plugins/analysis-icu.asciidoc +++ b/docs/plugins/analysis-icu.asciidoc @@ -64,7 +64,7 @@ Here are two examples, the default usage and a customised character filter: [source,js] -------------------------------------------------- -PUT icu_sample?include_type_name=true +PUT icu_sample { "settings": { "index": { @@ -112,7 +112,7 @@ using custom rules to break Myanmar and Khmer text into syllables. [source,js] -------------------------------------------------- -PUT icu_sample?include_type_name=true +PUT icu_sample { "settings": { "index": { @@ -153,7 +153,7 @@ Then create an analyzer to use this rule file as follows: [source,js] -------------------------------------------------- -PUT icu_sample?include_type_name=true +PUT icu_sample { "settings": { "index":{ @@ -221,7 +221,7 @@ Here are two examples, the default usage and a customised token filter: [source,js] -------------------------------------------------- -PUT icu_sample?include_type_name=true +PUT icu_sample { "settings": { "index": { @@ -267,7 +267,7 @@ available to all indices: [source,js] -------------------------------------------------- -PUT icu_sample?include_type_name=true +PUT icu_sample { "settings": { "index": { @@ -301,7 +301,7 @@ these filtered character are not lowercased which is why we add the [source,js] -------------------------------------------------- -PUT icu_sample?include_type_name=true +PUT icu_sample { "settings": { "index": { @@ -354,21 +354,19 @@ Below is an example of how to set up a field for sorting German names in [source,js] -------------------------- -PUT my_index?include_type_name=true +PUT my_index { "mappings": { - "_doc": { - "properties": { - "name": { <1> - "type": "text", - "fields": { - "sort": { <2> - "type": "icu_collation_keyword", - "index": false, - "language": "de", - "country": "DE", - "variant": "@collation=phonebook" - } + "properties": { + "name": { <1> + "type": "text", + "fields": { + "sort": { <2> + "type": "icu_collation_keyword", + "index": false, + "language": "de", + "country": "DE", + "variant": "@collation=phonebook" } } } @@ -503,7 +501,7 @@ For example: [source,js] -------------------------------------------------- -PUT icu_sample?include_type_name=true +PUT icu_sample { "settings": { "index": { diff --git a/docs/plugins/mapper-annotated-text.asciidoc b/docs/plugins/mapper-annotated-text.asciidoc index d026b8a98b2f9..a148cec76bac0 100644 --- a/docs/plugins/mapper-annotated-text.asciidoc +++ b/docs/plugins/mapper-annotated-text.asciidoc @@ -24,14 +24,12 @@ the search index: [source,js] -------------------------- -PUT my_index?include_type_name=true +PUT my_index { "mappings": { - "_doc": { - "properties": { - "my_field": { - "type": "annotated_text" - } + "properties": { + "my_field": { + "type": "annotated_text" } } } @@ -168,20 +166,18 @@ sense to include them in dedicated structured fields to support discovery via ag [source,js] -------------------------- -PUT my_index?include_type_name=true +PUT my_index { "mappings": { - "_doc": { - "properties": { - "my_unstructured_text_field": { - "type": "annotated_text" - }, - "my_structured_people_field": { - "type": "text", - "fields": { - "keyword" :{ - "type": "keyword" - } + "properties": { + "my_unstructured_text_field": { + "type": "annotated_text" + }, + "my_structured_people_field": { + "type": "text", + "fields": { + "keyword" : { + "type": "keyword" } } } diff --git a/docs/plugins/mapper-murmur3.asciidoc b/docs/plugins/mapper-murmur3.asciidoc index dc6d055d40096..7d8a5c7410254 100644 --- a/docs/plugins/mapper-murmur3.asciidoc +++ b/docs/plugins/mapper-murmur3.asciidoc @@ -16,17 +16,15 @@ value and its hash are stored in the index: [source,js] -------------------------- -PUT my_index?include_type_name=true +PUT my_index { "mappings": { - "_doc": { - "properties": { - "my_field": { - "type": "keyword", - "fields": { - "hash": { - "type": "murmur3" - } + "properties": { + "my_field": { + "type": "keyword", + "fields": { + "hash": { + "type": "murmur3" } } } diff --git a/docs/plugins/mapper-size.asciidoc b/docs/plugins/mapper-size.asciidoc index 141cf382568bf..d4bb3c144a79c 100644 --- a/docs/plugins/mapper-size.asciidoc +++ b/docs/plugins/mapper-size.asciidoc @@ -15,13 +15,11 @@ In order to enable the `_size` field, set the mapping as follows: [source,js] -------------------------- -PUT my_index?include_type_name=true +PUT my_index { "mappings": { - "_doc": { - "_size": { - "enabled": true - } + "_size": { + "enabled": true } } } diff --git a/docs/reference/aggregations/bucket/children-aggregation.asciidoc b/docs/reference/aggregations/bucket/children-aggregation.asciidoc index 7a06d218b357f..d5ac6b5e09d14 100644 --- a/docs/reference/aggregations/bucket/children-aggregation.asciidoc +++ b/docs/reference/aggregations/bucket/children-aggregation.asciidoc @@ -11,16 +11,14 @@ For example, let's say we have an index of questions and answers. The answer typ [source,js] -------------------------------------------------- -PUT child_example?include_type_name=true +PUT child_example { "mappings": { - "_doc": { - "properties": { - "join": { - "type": "join", - "relations": { - "question": "answer" - } + "properties": { + "join": { + "type": "join", + "relations": { + "question": "answer" } } } diff --git a/docs/reference/aggregations/bucket/composite-aggregation.asciidoc b/docs/reference/aggregations/bucket/composite-aggregation.asciidoc index cc4d778bff032..eb56fa6f8500c 100644 --- a/docs/reference/aggregations/bucket/composite-aggregation.asciidoc +++ b/docs/reference/aggregations/bucket/composite-aggregation.asciidoc @@ -16,29 +16,27 @@ a composite bucket. [source,js] -------------------------------------------------- -PUT /sales?include_type_name=true +PUT /sales { - "mappings": { - "_doc": { - "properties": { - "product": { - "type": "keyword" - }, - "timestamp": { - "type": "date" - }, - "price": { - "type": "long" - }, - "shop": { - "type": "keyword" - } - } - } + "mappings": { + "properties": { + "product": { + "type": "keyword" + }, + "timestamp": { + "type": "date" + }, + "price": { + "type": "long" + }, + "shop": { + "type": "keyword" + } } + } } -POST /sales/_doc/_bulk?refresh +POST /sales/_bulk?refresh {"index":{"_id":0}} {"product": "mad max", "price": "20", "timestamp": "2017-05-09T14:35"} {"index":{"_id":1}} diff --git a/docs/reference/aggregations/bucket/geodistance-aggregation.asciidoc b/docs/reference/aggregations/bucket/geodistance-aggregation.asciidoc index aafcc808530a3..21886686b67ac 100644 --- a/docs/reference/aggregations/bucket/geodistance-aggregation.asciidoc +++ b/docs/reference/aggregations/bucket/geodistance-aggregation.asciidoc @@ -5,20 +5,18 @@ A multi-bucket aggregation that works on `geo_point` fields and conceptually wor [source,js] -------------------------------------------------- -PUT /museums?include_type_name=true +PUT /museums { "mappings": { - "_doc": { - "properties": { - "location": { - "type": "geo_point" - } + "properties": { + "location": { + "type": "geo_point" } } } } -POST /museums/_doc/_bulk?refresh +POST /museums/_bulk?refresh {"index":{"_id":1}} {"location": "52.374081,4.912350", "name": "NEMO Science Museum"} {"index":{"_id":2}} diff --git a/docs/reference/aggregations/bucket/geohashgrid-aggregation.asciidoc b/docs/reference/aggregations/bucket/geohashgrid-aggregation.asciidoc index bfaec5ee8254f..d956ef4bfdc9a 100644 --- a/docs/reference/aggregations/bucket/geohashgrid-aggregation.asciidoc +++ b/docs/reference/aggregations/bucket/geohashgrid-aggregation.asciidoc @@ -19,20 +19,18 @@ The specified field must be of type `geo_point` (which can only be set explicitl [source,js] -------------------------------------------------- -PUT /museums?include_type_name=true +PUT /museums { "mappings": { - "_doc": { - "properties": { - "location": { - "type": "geo_point" - } - } - } + "properties": { + "location": { + "type": "geo_point" + } + } } } -POST /museums/_doc/_bulk?refresh +POST /museums/_bulk?refresh {"index":{"_id":1}} {"location": "52.374081,4.912350", "name": "NEMO Science Museum"} {"index":{"_id":2}} diff --git a/docs/reference/aggregations/bucket/parent-aggregation.asciidoc b/docs/reference/aggregations/bucket/parent-aggregation.asciidoc index 3e0a1606ce277..37d98f8b9be3d 100644 --- a/docs/reference/aggregations/bucket/parent-aggregation.asciidoc +++ b/docs/reference/aggregations/bucket/parent-aggregation.asciidoc @@ -11,19 +11,17 @@ For example, let's say we have an index of questions and answers. The answer typ [source,js] -------------------------------------------------- -PUT parent_example?include_type_name=true +PUT parent_example { "mappings": { - "_doc": { - "properties": { - "join": { - "type": "join", - "relations": { - "question": "answer" - } - } - } - } + "properties": { + "join": { + "type": "join", + "relations": { + "question": "answer" + } + } + } } } -------------------------------------------------- diff --git a/docs/reference/aggregations/metrics/geobounds-aggregation.asciidoc b/docs/reference/aggregations/metrics/geobounds-aggregation.asciidoc index fc9765a330711..3859f1977ecbb 100644 --- a/docs/reference/aggregations/metrics/geobounds-aggregation.asciidoc +++ b/docs/reference/aggregations/metrics/geobounds-aggregation.asciidoc @@ -8,20 +8,18 @@ Example: [source,js] -------------------------------------------------- -PUT /museums?include_type_name=true +PUT /museums { "mappings": { - "_doc": { - "properties": { - "location": { - "type": "geo_point" - } + "properties": { + "location": { + "type": "geo_point" } } } } -POST /museums/_doc/_bulk?refresh +POST /museums/_bulk?refresh {"index":{"_id":1}} {"location": "52.374081,4.912350", "name": "NEMO Science Museum"} {"index":{"_id":2}} diff --git a/docs/reference/aggregations/metrics/tophits-aggregation.asciidoc b/docs/reference/aggregations/metrics/tophits-aggregation.asciidoc index 485e900d9628c..c3b6a9bad4cfc 100644 --- a/docs/reference/aggregations/metrics/tophits-aggregation.asciidoc +++ b/docs/reference/aggregations/metrics/tophits-aggregation.asciidoc @@ -240,18 +240,16 @@ Let's see how it works with a real sample. Considering the following mapping: [source,js] -------------------------------------------------- -PUT /sales?include_type_name=true +PUT /sales { "mappings": { - "_doc" : { - "properties" : { - "tags" : { "type" : "keyword" }, - "comments" : { <1> - "type" : "nested", - "properties" : { - "username" : { "type" : "keyword" }, - "comment" : { "type" : "text" } - } + "properties" : { + "tags" : { "type" : "keyword" }, + "comments" : { <1> + "type" : "nested", + "properties" : { + "username" : { "type" : "keyword" }, + "comment" : { "type" : "text" } } } } diff --git a/docs/reference/analysis.asciidoc b/docs/reference/analysis.asciidoc index 11a8527bbc61a..0a3240df091da 100644 --- a/docs/reference/analysis.asciidoc +++ b/docs/reference/analysis.asciidoc @@ -39,15 +39,13 @@ Each <> field in a mapping can specify its own [source,js] ------------------------- -PUT my_index?include_type_name=true +PUT my_index { "mappings": { - "_doc": { - "properties": { - "title": { - "type": "text", - "analyzer": "standard" - } + "properties": { + "title": { + "type": "text", + "analyzer": "standard" } } } diff --git a/docs/reference/analysis/normalizers.asciidoc b/docs/reference/analysis/normalizers.asciidoc index 64e69c3f56bf4..ddf37acd67b12 100644 --- a/docs/reference/analysis/normalizers.asciidoc +++ b/docs/reference/analysis/normalizers.asciidoc @@ -23,7 +23,7 @@ to get one is by building a custom one. Custom normalizers take a list of char [source,js] -------------------------------- -PUT index?include_type_name=true +PUT index { "settings": { "analysis": { @@ -46,12 +46,10 @@ PUT index?include_type_name=true } }, "mappings": { - "_doc": { - "properties": { - "foo": { - "type": "keyword", - "normalizer": "my_normalizer" - } + "properties": { + "foo": { + "type": "keyword", + "normalizer": "my_normalizer" } } } diff --git a/docs/reference/analysis/testing.asciidoc b/docs/reference/analysis/testing.asciidoc index 8102978635865..aa8fa4f9ec0de 100644 --- a/docs/reference/analysis/testing.asciidoc +++ b/docs/reference/analysis/testing.asciidoc @@ -41,7 +41,7 @@ referred to when running the `analyze` API on a specific index: [source,js] ------------------------------------- -PUT my_index?include_type_name=true +PUT my_index { "settings": { "analysis": { @@ -58,12 +58,10 @@ PUT my_index?include_type_name=true } }, "mappings": { - "_doc": { - "properties": { - "my_text": { - "type": "text", - "analyzer": "std_folded" <2> - } + "properties": { + "my_text": { + "type": "text", + "analyzer": "std_folded" <2> } } } diff --git a/docs/reference/cat/fielddata.asciidoc b/docs/reference/cat/fielddata.asciidoc index 45615090dd074..ff71728a4f50b 100644 --- a/docs/reference/cat/fielddata.asciidoc +++ b/docs/reference/cat/fielddata.asciidoc @@ -9,19 +9,17 @@ on every data node in the cluster. Hidden setup snippet to build an index with fielddata so our results are real: [source,js] -------------------------------------------------- -PUT test?include_type_name=true +PUT test { "mappings": { - "_doc": { - "properties": { - "body": { - "type": "text", - "fielddata":true - }, - "soul": { - "type": "text", - "fielddata":true - } + "properties": { + "body": { + "type": "text", + "fielddata":true + }, + "soul": { + "type": "text", + "fielddata":true } } } diff --git a/docs/reference/docs/get.asciidoc b/docs/reference/docs/get.asciidoc index 47fee04609501..742e258ac65c4 100644 --- a/docs/reference/docs/get.asciidoc +++ b/docs/reference/docs/get.asciidoc @@ -107,21 +107,19 @@ Consider for instance the following mapping: [source,js] -------------------------------------------------- -PUT twitter?include_type_name=true +PUT twitter { "mappings": { - "_doc": { - "properties": { - "counter": { - "type": "integer", - "store": false - }, - "tags": { - "type": "keyword", - "store": true - } - } - } + "properties": { + "counter": { + "type": "integer", + "store": false + }, + "tags": { + "type": "keyword", + "store": true + } + } } } -------------------------------------------------- diff --git a/docs/reference/docs/update-by-query.asciidoc b/docs/reference/docs/update-by-query.asciidoc index deef09dbbd792..096e4371be99a 100644 --- a/docs/reference/docs/update-by-query.asciidoc +++ b/docs/reference/docs/update-by-query.asciidoc @@ -637,14 +637,12 @@ added a mapping value to pick up more fields from the data: [source,js] -------------------------------------------------- -PUT test?include_type_name=true +PUT test { "mappings": { - "_doc": { - "dynamic": false, <1> - "properties": { - "text": {"type": "text"} - } + "dynamic": false, <1> + "properties": { + "text": {"type": "text"} } } } diff --git a/docs/reference/indices/aliases.asciidoc b/docs/reference/indices/aliases.asciidoc index 41cd8ce325e0d..5044e19278da9 100644 --- a/docs/reference/indices/aliases.asciidoc +++ b/docs/reference/indices/aliases.asciidoc @@ -142,14 +142,12 @@ exist in the mapping: [source,js] -------------------------------------------------- -PUT /test1?include_type_name=true +PUT /test1 { "mappings": { - "_doc": { - "properties": { - "user" : { - "type": "keyword" - } + "properties": { + "user" : { + "type": "keyword" } } } @@ -376,13 +374,11 @@ First create the index and add a mapping for the `user_id` field: [source,js] -------------------------------------------------- -PUT /users?include_type_name=true +PUT /users { "mappings" : { - "_doc" : { - "properties" : { - "user_id" : {"type" : "integer"} - } + "properties" : { + "user_id" : {"type" : "integer"} } } } @@ -416,13 +412,11 @@ Aliases can also be specified during <>: [source,js] -------------------------------------------------- -PUT /logs_20162801?include_type_name=true +PUT /logs_20162801 { "mappings" : { - "_doc" : { - "properties" : { - "year" : {"type" : "integer"} - } + "properties" : { + "year" : {"type" : "integer"} } }, "aliases" : { diff --git a/docs/reference/indices/get-field-mapping.asciidoc b/docs/reference/indices/get-field-mapping.asciidoc index e857c6994a529..13e80d5cc860e 100644 --- a/docs/reference/indices/get-field-mapping.asciidoc +++ b/docs/reference/indices/get-field-mapping.asciidoc @@ -9,19 +9,17 @@ For example, consider the following mapping: [source,js] -------------------------------------------------- -PUT publications?include_type_name=true +PUT publications { "mappings": { - "_doc": { - "properties": { - "id": { "type": "text" }, - "title": { "type": "text"}, - "abstract": { "type": "text"}, - "author": { - "properties": { - "id": { "type": "text" }, - "name": { "type": "text" } - } + "properties": { + "id": { "type": "text" }, + "title": { "type": "text"}, + "abstract": { "type": "text"}, + "author": { + "properties": { + "id": { "type": "text" }, + "name": { "type": "text" } } } } diff --git a/docs/reference/indices/put-mapping.asciidoc b/docs/reference/indices/put-mapping.asciidoc index 543497b8191dc..bdf899c6c8180 100644 --- a/docs/reference/indices/put-mapping.asciidoc +++ b/docs/reference/indices/put-mapping.asciidoc @@ -68,21 +68,19 @@ For example: [source,js] ----------------------------------- -PUT my_index?include_type_name=true <1> +PUT my_index <1> { "mappings": { - "_doc": { - "properties": { - "name": { - "properties": { - "first": { - "type": "text" - } + "properties": { + "name": { + "properties": { + "first": { + "type": "text" } - }, - "user_id": { - "type": "keyword" } + }, + "user_id": { + "type": "keyword" } } } diff --git a/docs/reference/mapping.asciidoc b/docs/reference/mapping.asciidoc index d7a8864431710..5469a063a0303 100644 --- a/docs/reference/mapping.asciidoc +++ b/docs/reference/mapping.asciidoc @@ -137,18 +137,16 @@ A mapping could be specified when creating an index, as follows: [source,js] --------------------------------------- -PUT my_index?include_type_name=true <1> +PUT my_index <1> { "mappings": { - "_doc": { <2> - "properties": { <3> - "title": { "type": "text" }, <4> - "name": { "type": "text" }, <4> - "age": { "type": "integer" }, <4> - "created": { - "type": "date", <4> - "format": "strict_date_optional_time||epoch_millis" - } + "properties": { <2> + "title": { "type": "text" }, <3> + "name": { "type": "text" }, <3> + "age": { "type": "integer" }, <3> + "created": { + "type": "date", <3> + "format": "strict_date_optional_time||epoch_millis" } } } @@ -156,9 +154,8 @@ PUT my_index?include_type_name=true <1> --------------------------------------- // CONSOLE <1> Create an index called `my_index`. -<2> Add a mapping type called `doc`. -<3> Specify fields or _properties_. -<4> Specify the data `type` and mapping for each field. +<2> Specify fields or _properties_. +<3> Specify the data `type` and mapping for each field. -- diff --git a/docs/reference/modules/scripting/fields.asciidoc b/docs/reference/modules/scripting/fields.asciidoc index d615038baa25f..681e8d6e54067 100644 --- a/docs/reference/modules/scripting/fields.asciidoc +++ b/docs/reference/modules/scripting/fields.asciidoc @@ -172,22 +172,20 @@ For instance: [source,js] ------------------------------- -PUT my_index?include_type_name=true +PUT my_index { "mappings": { - "_doc": { - "properties": { - "title": { <1> - "type": "text" - }, - "first_name": { - "type": "text", - "store": true - }, - "last_name": { - "type": "text", - "store": true - } + "properties": { + "title": { <1> + "type": "text" + }, + "first_name": { + "type": "text", + "store": true + }, + "last_name": { + "type": "text", + "store": true } } } diff --git a/docs/reference/search/request/inner-hits.asciidoc b/docs/reference/search/request/inner-hits.asciidoc index b67377edfe901..bcd2c297e5da3 100644 --- a/docs/reference/search/request/inner-hits.asciidoc +++ b/docs/reference/search/request/inner-hits.asciidoc @@ -84,14 +84,12 @@ The nested `inner_hits` can be used to include nested inner objects as inner hit [source,js] -------------------------------------------------- -PUT test?include_type_name=true +PUT test { "mappings": { - "_doc": { - "properties": { - "comments": { - "type": "nested" - } + "properties": { + "comments": { + "type": "nested" } } } @@ -210,14 +208,12 @@ including the source and solely rely on doc values fields. Like this: [source,js] -------------------------------------------------- -PUT test?include_type_name=true +PUT test { "mappings": { - "_doc": { - "properties": { - "comments": { - "type": "nested" - } + "properties": { + "comments": { + "type": "nested" } } } @@ -329,17 +325,15 @@ with the root hits then the following path can be defined: [source,js] -------------------------------------------------- -PUT test?include_type_name=true +PUT test { "mappings": { - "_doc": { - "properties": { - "comments": { - "type": "nested", - "properties": { - "votes": { - "type": "nested" - } + "properties": { + "comments": { + "type": "nested", + "properties": { + "votes": { + "type": "nested" } } } @@ -451,16 +445,14 @@ The parent/child `inner_hits` can be used to include parent or child: [source,js] -------------------------------------------------- -PUT test?include_type_name=true +PUT test { "mappings": { - "_doc": { - "properties": { - "my_join_field": { - "type": "join", - "relations": { - "my_parent": "my_child" - } + "properties": { + "my_join_field": { + "type": "join", + "relations": { + "my_parent": "my_child" } } } From ed297b7369e80a79ac5fe43164139ec271163933 Mon Sep 17 00:00:00 2001 From: Jason Tedor Date: Fri, 18 Jan 2019 08:20:05 -0500 Subject: [PATCH 46/71] Only update response headers if we have a new one (#37590) Currently when adding a response header, we do some de-duplication, and maybe drop the header on the floor if we have reached capacity. Yet, we still update the thread local tracking the response headers. This is really expensive because under the hood there is a shared reference that we synchronize on. In the case of a request processed across many shards in a tight loop, this contention can be detrimental to performance. We can avoid updating the thread local in these cases though, when the response header is duplicate of one that we have already seen, or when it's dropped on the floor. This commit addresses these performance issues by avoiding the unnecessary set. --- .../common/util/concurrent/ThreadContext.java | 12 +++++++++++- 1 file changed, 11 insertions(+), 1 deletion(-) diff --git a/server/src/main/java/org/elasticsearch/common/util/concurrent/ThreadContext.java b/server/src/main/java/org/elasticsearch/common/util/concurrent/ThreadContext.java index 2c1011d1d9e53..bd3507ef7764a 100644 --- a/server/src/main/java/org/elasticsearch/common/util/concurrent/ThreadContext.java +++ b/server/src/main/java/org/elasticsearch/common/util/concurrent/ThreadContext.java @@ -323,7 +323,17 @@ public void addResponseHeader(final String key, final String value) { * @param uniqueValue the function that produces de-duplication values */ public void addResponseHeader(final String key, final String value, final Function uniqueValue) { - threadLocal.set(threadLocal.get().putResponse(key, value, uniqueValue, maxWarningHeaderCount, maxWarningHeaderSize)); + /* + * Updating the thread local is expensive due to a shared reference that we synchronize on, so we should only do it if the thread + * context struct changed. It will not change if we de-duplicate this value to an existing one, or if we don't add a new one because + * we have reached capacity. + */ + final ThreadContextStruct current = threadLocal.get(); + final ThreadContextStruct maybeNext = + current.putResponse(key, value, uniqueValue, maxWarningHeaderCount, maxWarningHeaderSize); + if (current != maybeNext) { + threadLocal.set(maybeNext); + } } /** From 7597b7ce2bd280401fcbfbeb281dfbb205830d75 Mon Sep 17 00:00:00 2001 From: Ioannis Kakavas Date: Fri, 18 Jan 2019 17:06:40 +0200 Subject: [PATCH 47/71] Add validation for empty PutPrivilegeRequest (#37569) Return an error to the user if the put privilege api is called with an empty body (no privileges) Resolves: #37561 --- .../privilege/PutPrivilegesRequest.java | 50 ++++++++++--------- .../privilege/PutPrivilegesRequestTests.java | 3 ++ 2 files changed, 30 insertions(+), 23 deletions(-) diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/privilege/PutPrivilegesRequest.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/privilege/PutPrivilegesRequest.java index beba805f6df2f..651c695db6a41 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/privilege/PutPrivilegesRequest.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/privilege/PutPrivilegesRequest.java @@ -39,34 +39,38 @@ public PutPrivilegesRequest() { @Override public ActionRequestValidationException validate() { ActionRequestValidationException validationException = null; - for (ApplicationPrivilegeDescriptor privilege : privileges) { - try { - ApplicationPrivilege.validateApplicationName(privilege.getApplication()); - } catch (IllegalArgumentException e) { - validationException = addValidationError(e.getMessage(), validationException); - } - try { - ApplicationPrivilege.validatePrivilegeName(privilege.getName()); - } catch (IllegalArgumentException e) { - validationException = addValidationError(e.getMessage(), validationException); - } - if (privilege.getActions().isEmpty()) { - validationException = addValidationError("Application privileges must have at least one action", validationException); - } - for (String action : privilege.getActions()) { - if (action.indexOf('/') == -1 && action.indexOf('*') == -1 && action.indexOf(':') == -1) { - validationException = addValidationError("action [" + action + "] must contain one of [ '/' , '*' , ':' ]", - validationException); + if (privileges.isEmpty()) { + validationException = addValidationError("At least one application privilege must be provided", validationException); + } else { + for (ApplicationPrivilegeDescriptor privilege : privileges) { + try { + ApplicationPrivilege.validateApplicationName(privilege.getApplication()); + } catch (IllegalArgumentException e) { + validationException = addValidationError(e.getMessage(), validationException); } try { - ApplicationPrivilege.validatePrivilegeOrActionName(action); + ApplicationPrivilege.validatePrivilegeName(privilege.getName()); } catch (IllegalArgumentException e) { validationException = addValidationError(e.getMessage(), validationException); } - } - if (MetadataUtils.containsReservedMetadata(privilege.getMetadata())) { - validationException = addValidationError("metadata keys may not start with [" + MetadataUtils.RESERVED_PREFIX - + "] (in privilege " + privilege.getApplication() + ' ' + privilege.getName() + ")", validationException); + if (privilege.getActions().isEmpty()) { + validationException = addValidationError("Application privileges must have at least one action", validationException); + } + for (String action : privilege.getActions()) { + if (action.indexOf('/') == -1 && action.indexOf('*') == -1 && action.indexOf(':') == -1) { + validationException = addValidationError("action [" + action + "] must contain one of [ '/' , '*' , ':' ]", + validationException); + } + try { + ApplicationPrivilege.validatePrivilegeOrActionName(action); + } catch (IllegalArgumentException e) { + validationException = addValidationError(e.getMessage(), validationException); + } + } + if (MetadataUtils.containsReservedMetadata(privilege.getMetadata())) { + validationException = addValidationError("metadata keys may not start with [" + MetadataUtils.RESERVED_PREFIX + + "] (in privilege " + privilege.getApplication() + ' ' + privilege.getName() + ")", validationException); + } } } return validationException; diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/action/privilege/PutPrivilegesRequestTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/action/privilege/PutPrivilegesRequestTests.java index e258efd04c5ec..e1bdc7687e3e2 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/action/privilege/PutPrivilegesRequestTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/action/privilege/PutPrivilegesRequestTests.java @@ -74,6 +74,9 @@ public void testValidation() { assertValidationFailure(request(wildcardApp, numericName, reservedMetadata, badAction), "Application names may not contain", "Application privilege names must match", "metadata keys may not start", "must contain one of"); + + // Empty request + assertValidationFailure(new PutPrivilegesRequest(), "At least one application privilege must be provided"); } private ApplicationPrivilegeDescriptor descriptor(String application, String name, String... actions) { From 604422c6c51b5095e6b1d463d31916441661fb87 Mon Sep 17 00:00:00 2001 From: Michael Basnight Date: Fri, 18 Jan 2019 09:20:32 -0600 Subject: [PATCH 48/71] Update Execute Watch to allow unknown fields (#37498) ExecuteWatchResponse did not allow unknown fields. This commit fixes the test and ConstructingObjectParser such that it does now allow unknown fields. It also creates a new client side test for the response. Relates #36938 --- .../client/watcher/ExecuteWatchResponse.java | 3 +- .../watcher/ExecuteWatchResponseTests.java | 115 ++++++++++++++++++ 2 files changed, 116 insertions(+), 2 deletions(-) create mode 100644 client/rest-high-level/src/test/java/org/elasticsearch/client/watcher/ExecuteWatchResponseTests.java diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/watcher/ExecuteWatchResponse.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/watcher/ExecuteWatchResponse.java index cf5313d56ae57..7f0a7a26182e9 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/watcher/ExecuteWatchResponse.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/watcher/ExecuteWatchResponse.java @@ -86,7 +86,7 @@ public Map getRecordAsMap() { } private static final ConstructingObjectParser PARSER - = new ConstructingObjectParser<>("x_pack_execute_watch_response", false, + = new ConstructingObjectParser<>("x_pack_execute_watch_response", true, (fields) -> new ExecuteWatchResponse((String)fields[0], (BytesReference) fields[1])); static { PARSER.declareString(ConstructingObjectParser.constructorArg(), ID_FIELD); @@ -103,5 +103,4 @@ private static BytesReference readBytesReference(XContentParser parser) throws I return BytesReference.bytes(builder); } } - } diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/watcher/ExecuteWatchResponseTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/watcher/ExecuteWatchResponseTests.java new file mode 100644 index 0000000000000..3e0ef4c8a5e5f --- /dev/null +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/watcher/ExecuteWatchResponseTests.java @@ -0,0 +1,115 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.client.watcher; + +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.xcontent.ObjectPath; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentFactory; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.common.xcontent.json.JsonXContent; +import org.elasticsearch.test.ESTestCase; + +import java.io.IOException; +import java.io.InputStream; +import java.util.List; +import java.util.Map; + +import static org.elasticsearch.test.AbstractXContentTestCase.xContentTester; +import static org.hamcrest.Matchers.is; + +public class ExecuteWatchResponseTests extends ESTestCase { + + public static final String WATCH_ID_VALUE = "my_watch"; + public static final String NODE_VALUE = "my_node"; + public static final String TRIGGER_TYPE_VALUE = "manual"; + public static final String STATE_VALUE = "executed"; + public static final String STATE_KEY = "state"; + public static final String TRIGGER_EVENT_KEY = "trigger_event"; + public static final String TRIGGER_EVENT_TYPE_KEY = "type"; + public static final String MESSAGES_KEY = "messages"; + public static final String NODE_KEY = "node"; + public static final String WATCH_ID_KEY = "watch_id"; + + public void testFromXContent() throws IOException { + xContentTester(this::createParser, + ExecuteWatchResponseTests::createTestInstance, + this::toXContent, + ExecuteWatchResponse::fromXContent) + .supportsUnknownFields(true) + .assertEqualsConsumer(this::assertEqualInstances) + .assertToXContentEquivalence(false) + .test(); + } + + private void assertEqualInstances(ExecuteWatchResponse expected, ExecuteWatchResponse actual) { + assertThat(expected.getRecordId(), is(actual.getRecordId())); + + // This may have extra json, so lets just assume that if all of the original fields from the creation are there, then its equal + // This is the same code that is in createTestInstance in this class. + Map actualMap = actual.getRecordAsMap(); + assertThat(ObjectPath.eval(WATCH_ID_KEY, actualMap), is(WATCH_ID_VALUE)); + assertThat(ObjectPath.eval(NODE_KEY, actualMap), is(NODE_VALUE)); + List messages = ObjectPath.eval(MESSAGES_KEY, actualMap); + assertThat(messages.size(), is(0)); + assertThat(ObjectPath.eval(TRIGGER_EVENT_KEY + "." + TRIGGER_EVENT_TYPE_KEY, actualMap), is(TRIGGER_TYPE_VALUE)); + assertThat(ObjectPath.eval(STATE_KEY, actualMap), is(STATE_VALUE)); + } + + private XContentBuilder toXContent(BytesReference bytes, XContentBuilder builder) throws IOException { + // EMPTY is safe here because we never use namedObject + try (InputStream stream = bytes.streamInput(); + XContentParser parser = createParser(JsonXContent.jsonXContent, stream)) { + parser.nextToken(); + builder.generator().copyCurrentStructure(parser); + return builder; + } + } + + private XContentBuilder toXContent(ExecuteWatchResponse response, XContentBuilder builder) throws IOException { + builder.startObject(); + builder.field("_id", response.getRecordId()); + builder.field("watch_record"); + toXContent(response.getRecord(), builder); + return builder.endObject(); + } + + private static ExecuteWatchResponse createTestInstance() { + String id = "my_watch_0-2015-06-02T23:17:55.124Z"; + try { + XContentBuilder builder = XContentFactory.jsonBuilder(); + builder.startObject(); + builder.field(WATCH_ID_KEY, WATCH_ID_VALUE); + builder.field(NODE_KEY, NODE_VALUE); + builder.startArray(MESSAGES_KEY); + builder.endArray(); + builder.startObject(TRIGGER_EVENT_KEY); + builder.field(TRIGGER_EVENT_TYPE_KEY, TRIGGER_TYPE_VALUE); + builder.endObject(); + builder.field(STATE_KEY, STATE_VALUE); + builder.endObject(); + BytesReference bytes = BytesReference.bytes(builder); + return new ExecuteWatchResponse(id, bytes); + } + catch (IOException e) { + throw new AssertionError(e); + } + } +} From 377d96e376a9527c70804a4e7fbf5918b97d41ac Mon Sep 17 00:00:00 2001 From: Yannick Welsch Date: Fri, 18 Jan 2019 16:36:42 +0100 Subject: [PATCH 49/71] Remove initial_master_nodes on node restart (#37580) Some tests (e.g. testRestoreIndexWithShardsMissingInLocalGateway) were split-braining since being switched to Zen2 because the bootstrap setting was left around when nodes got restarted with data folders wiped. The test in question here was starting one node (which autobootstrapped to that single node), then another node. The first node was then shut down (after excluding it from the voting configuration), its data folder wiped, and restarted. After restart, the node had an empty data folder yet initial_master_nodes set to itself (i.e. same name). This made the node sometimes form a cluster of its own, and not rejoin the existing cluster with the other node. --- .../elasticsearch/gateway/QuorumGatewayIT.java | 6 ------ .../DedicatedClusterSnapshotRestoreIT.java | 3 +-- .../elasticsearch/test/InternalTestCluster.java | 17 +++++++---------- 3 files changed, 8 insertions(+), 18 deletions(-) diff --git a/server/src/test/java/org/elasticsearch/gateway/QuorumGatewayIT.java b/server/src/test/java/org/elasticsearch/gateway/QuorumGatewayIT.java index 6483c040a713a..fba9a005c539d 100644 --- a/server/src/test/java/org/elasticsearch/gateway/QuorumGatewayIT.java +++ b/server/src/test/java/org/elasticsearch/gateway/QuorumGatewayIT.java @@ -22,7 +22,6 @@ import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse; import org.elasticsearch.client.Client; import org.elasticsearch.cluster.health.ClusterHealthStatus; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.ESIntegTestCase.ClusterScope; import org.elasticsearch.test.ESIntegTestCase.Scope; @@ -66,11 +65,6 @@ public void testQuorumRecovery() throws Exception { } logger.info("--> restart all nodes"); internalCluster().fullRestart(new RestartCallback() { - @Override - public Settings onNodeStopped(String nodeName) throws Exception { - return null; - } - @Override public void doAfterNodes(int numNodes, final Client activeClient) throws Exception { if (numNodes == 1) { diff --git a/server/src/test/java/org/elasticsearch/snapshots/DedicatedClusterSnapshotRestoreIT.java b/server/src/test/java/org/elasticsearch/snapshots/DedicatedClusterSnapshotRestoreIT.java index 554231982dac1..d429145e32948 100644 --- a/server/src/test/java/org/elasticsearch/snapshots/DedicatedClusterSnapshotRestoreIT.java +++ b/server/src/test/java/org/elasticsearch/snapshots/DedicatedClusterSnapshotRestoreIT.java @@ -661,8 +661,7 @@ public void testRestoreIndexWithShardsMissingInLocalGateway() throws Exception { .put(EnableAllocationDecider.CLUSTER_ROUTING_REBALANCE_ENABLE_SETTING.getKey(), EnableAllocationDecider.Rebalance.NONE) .build(); - internalCluster().startNode(nodeSettings); - internalCluster().startNode(nodeSettings); + internalCluster().startNodes(2, nodeSettings); cluster().wipeIndices("_all"); logger.info("--> create repository"); diff --git a/test/framework/src/main/java/org/elasticsearch/test/InternalTestCluster.java b/test/framework/src/main/java/org/elasticsearch/test/InternalTestCluster.java index 70d3c95eb7f7d..012c574f9e6aa 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/InternalTestCluster.java +++ b/test/framework/src/main/java/org/elasticsearch/test/InternalTestCluster.java @@ -929,13 +929,15 @@ Settings closeForRestart(RestartCallback callback, int minMasterNodes) throws Ex assert callback != null; close(); Settings callbackSettings = callback.onNodeStopped(name); + assert callbackSettings != null; Settings.Builder newSettings = Settings.builder(); - if (callbackSettings != null) { - newSettings.put(callbackSettings); - } + newSettings.put(callbackSettings); if (minMasterNodes >= 0) { assert DISCOVERY_ZEN_MINIMUM_MASTER_NODES_SETTING.exists(newSettings.build()) == false : "min master nodes is auto managed"; - newSettings.put(DISCOVERY_ZEN_MINIMUM_MASTER_NODES_SETTING.getKey(), minMasterNodes).build(); + newSettings.put(DISCOVERY_ZEN_MINIMUM_MASTER_NODES_SETTING.getKey(), minMasterNodes); + if (INITIAL_MASTER_NODES_SETTING.exists(callbackSettings) == false) { + newSettings.putList(INITIAL_MASTER_NODES_SETTING.getKey()); + } } // delete data folders now, before we start other nodes that may claim it clearDataIfNeeded(callback); @@ -1691,12 +1693,7 @@ public synchronized void restartNode(String nodeName, RestartCallback callback) } } - public static final RestartCallback EMPTY_CALLBACK = new RestartCallback() { - @Override - public Settings onNodeStopped(String node) { - return null; - } - }; + public static final RestartCallback EMPTY_CALLBACK = new RestartCallback(); /** * Restarts all nodes in the cluster. It first stops all nodes and then restarts all the nodes again. From 6846666b6bb4a10db7c6f653ba6f45ce5b0c283c Mon Sep 17 00:00:00 2001 From: Martijn van Groningen Date: Fri, 18 Jan 2019 16:37:21 +0100 Subject: [PATCH 50/71] Add ccr follow info api (#37408) * Add ccr follow info api This api returns all follower indices and per follower index the provided parameters at put follow / resume follow time and whether index following is paused or active. Closes #37127 * iter * [DOCS] Edits the get follower info API * [DOCS] Fixes link to remote cluster * [DOCS] Clarifies descriptions for configured parameters --- docs/reference/ccr/apis/ccr-apis.asciidoc | 2 + .../ccr/apis/follow/get-follow-info.asciidoc | 169 +++++++ .../rest-api-spec/test/ccr/follow_info.yml | 75 ++++ .../java/org/elasticsearch/xpack/ccr/Ccr.java | 5 + .../ccr/action/TransportFollowInfoAction.java | 117 +++++ .../xpack/ccr/rest/RestFollowInfoAction.java | 39 ++ .../elasticsearch/xpack/ccr/AutoFollowIT.java | 50 ++- .../ccr/action/FollowInfoRequestTests.java | 25 ++ .../ccr/action/FollowInfoResponseTests.java | 147 ++++++ .../core/ccr/action/FollowInfoAction.java | 422 ++++++++++++++++++ .../rest-api-spec/api/ccr.follow_info.json | 16 + 11 files changed, 1048 insertions(+), 19 deletions(-) create mode 100644 docs/reference/ccr/apis/follow/get-follow-info.asciidoc create mode 100644 x-pack/plugin/ccr/qa/rest/src/test/resources/rest-api-spec/test/ccr/follow_info.yml create mode 100644 x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportFollowInfoAction.java create mode 100644 x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/rest/RestFollowInfoAction.java create mode 100644 x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/FollowInfoRequestTests.java create mode 100644 x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/FollowInfoResponseTests.java create mode 100644 x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/FollowInfoAction.java create mode 100644 x-pack/plugin/src/test/resources/rest-api-spec/api/ccr.follow_info.json diff --git a/docs/reference/ccr/apis/ccr-apis.asciidoc b/docs/reference/ccr/apis/ccr-apis.asciidoc index f0b23c410eae3..2009742c8322b 100644 --- a/docs/reference/ccr/apis/ccr-apis.asciidoc +++ b/docs/reference/ccr/apis/ccr-apis.asciidoc @@ -22,6 +22,7 @@ You can use the following APIs to perform {ccr} operations. * <> * <> * <> +* <> [float] [[ccr-api-auto-follow]] @@ -40,6 +41,7 @@ include::follow/post-pause-follow.asciidoc[] include::follow/post-resume-follow.asciidoc[] include::follow/post-unfollow.asciidoc[] include::follow/get-follow-stats.asciidoc[] +include::follow/get-follow-info.asciidoc[] // auto-follow include::auto-follow/put-auto-follow-pattern.asciidoc[] diff --git a/docs/reference/ccr/apis/follow/get-follow-info.asciidoc b/docs/reference/ccr/apis/follow/get-follow-info.asciidoc new file mode 100644 index 0000000000000..22418db10887c --- /dev/null +++ b/docs/reference/ccr/apis/follow/get-follow-info.asciidoc @@ -0,0 +1,169 @@ +[role="xpack"] +[testenv="platinum"] +[[ccr-get-follow-info]] +=== Get follower info API +++++ +Get follower info +++++ + +beta[] + +Retrieves information about all follower indices. + +==== Description + +This API lists the parameters and the status for each follower index. +For example, the results include follower index names, leader index names, +replication options and whether the follower indices are active or paused. + +==== Request + +////////////////////////// + +[source,js] +-------------------------------------------------- +PUT /follower_index/_ccr/follow +{ + "remote_cluster" : "remote_cluster", + "leader_index" : "leader_index" +} +-------------------------------------------------- +// CONSOLE +// TESTSETUP +// TEST[setup:remote_cluster_and_leader_index] + +[source,js] +-------------------------------------------------- +POST /follower_index/_ccr/pause_follow +-------------------------------------------------- +// CONSOLE +// TEARDOWN + +////////////////////////// + +[source,js] +-------------------------------------------------- +GET //_ccr/info +-------------------------------------------------- +// CONSOLE +// TEST[s//follower_index/] + +==== Path Parameters +`index` :: + (string) A comma-delimited list of follower index patterns + +==== Results + +This API returns the following information: + +`follower_indices`:: + (array) An array of follower index statistics + +The `indices` array consists of objects containing several fields: + +`indices[].follower_index`:: + (string) The name of the follower index + +`indices[].remote_cluster`:: + (string) The <> that contains the + leader index + +`indices[].leader_index`:: + (string) The name of the index in the leader cluster that is followed + +`indices[].status`:: + (string) Whether index following is `active` or `paused` + +`indices[].parameters`:: + (object) An object that encapsulates {ccr} parameters + +The `parameters` contains the following fields: + +`indices[].parameters.max_read_request_operation_count`:: + (integer) The maximum number of operations to pull per read from the remote + cluster + +`indices[].parameters.max_outstanding_read_requests`:: + (long) The maximum number of outstanding read requests from the remote cluster + +`indices[].parameters.max_read_request_size`:: + (<>) The maximum size in bytes of per read of a batch + of operations pulled from the remote cluster + +`indices[].parameters.max_write_request_operation_count`:: + (integer) The maximum number of operations per bulk write request executed on + the follower + +`indices[].parameters.max_write_request_size`:: + (<>) The maximum total bytes of operations per bulk + write request executed on the follower + +`indices[].parameters.max_outstanding_write_requests`:: + (integer) The maximum number of outstanding write requests on the follower + +`indices[].parameters.max_write_buffer_count`:: + (integer) The maximum number of operations that can be queued for writing. + When this limit is reached, reads from the remote cluster are deferred until + the number of queued operations goes below the limit + +`indices[].parameters.max_write_buffer_size`:: + (<>) The maximum total bytes of operations that can be + queued for writing. When this limit is reached, reads from the remote cluster + are deferred until the total bytes of queued operations goes below the limit + +`indices[].parameters.max_retry_delay`:: + (<>) The maximum time to wait before retrying an + operation that failed exceptionally. An exponential backoff strategy is + employed when retrying + +`indices[].parameters.read_poll_timeout`:: + (<>) The maximum time to wait for new operations on the + remote cluster when the follower index is synchronized with the leader index. + When the timeout has elapsed, the poll for operations returns to the follower + so that it can update some statistics, then the follower immediately attempts + to read from the leader again + +==== Authorization + +If the {es} {security-features} are enabled, you must have `monitor` cluster +privileges. For more information, see +{stack-ov}/security-privileges.html[Security privileges]. + +==== Example + +This example retrieves follower info: + +[source,js] +-------------------------------------------------- +GET /follower_index/_ccr/info +-------------------------------------------------- +// CONSOLE + +The API returns the following results: + +[source,js] +-------------------------------------------------- +{ + "follower_indices" : [ + { + "follower_index" : "follower_index", + "remote_cluster" : "remote_cluster", + "leader_index" : "leader_index", + "status" : "active", + "parameters" : { + "max_read_request_operation_count" : 5120, + "max_read_request_size" : "32mb", + "max_outstanding_read_requests" : 12, + "max_write_request_operation_count" : 5120, + "max_write_request_size" : "9223372036854775807b", + "max_outstanding_write_requests" : 9, + "max_write_buffer_count" : 2147483647, + "max_write_buffer_size" : "512mb", + "max_retry_delay" : "500ms", + "read_poll_timeout" : "1m" + } + } + ] +} +-------------------------------------------------- +// TESTRESPONSE diff --git a/x-pack/plugin/ccr/qa/rest/src/test/resources/rest-api-spec/test/ccr/follow_info.yml b/x-pack/plugin/ccr/qa/rest/src/test/resources/rest-api-spec/test/ccr/follow_info.yml new file mode 100644 index 0000000000000..f1e47d830cf97 --- /dev/null +++ b/x-pack/plugin/ccr/qa/rest/src/test/resources/rest-api-spec/test/ccr/follow_info.yml @@ -0,0 +1,75 @@ +--- +"Test info": + - do: + cluster.state: {} + + - set: {master_node: master} + + - do: + nodes.info: {} + + - set: {nodes.$master.transport_address: local_ip} + + - do: + cluster.put_settings: + body: + transient: + cluster.remote.local.seeds: $local_ip + flat_settings: true + + - match: {transient: {cluster.remote.local.seeds: $local_ip}} + + - do: + indices.create: + index: foo + body: + settings: + index: + number_of_shards: 1 + number_of_replicas: 0 + soft_deletes: + enabled: true + + - do: + ccr.follow: + index: bar + body: + remote_cluster: local + leader_index: foo + - is_true: follow_index_created + - is_true: follow_index_shards_acked + - is_true: index_following_started + + - do: + ccr.follow_info: + index: bar + - length: {follower_indices: 1} + - match: {follower_indices.0.follower_index: "bar"} + - match: {follower_indices.0.remote_cluster: "local"} + - match: {follower_indices.0.leader_index: "foo"} + - match: {follower_indices.0.status: "active"} + - match: {follower_indices.0.parameters.max_read_request_operation_count: 5120} + - match: {follower_indices.0.parameters.max_read_request_size: "32mb"} + - match: {follower_indices.0.parameters.max_outstanding_read_requests: 12} + - match: {follower_indices.0.parameters.max_write_request_operation_count: 5120} + - match: {follower_indices.0.parameters.max_write_request_size: "9223372036854775807b"} + - match: {follower_indices.0.parameters.max_outstanding_write_requests: 9} + - match: {follower_indices.0.parameters.max_write_buffer_count: 2147483647,} + - match: {follower_indices.0.parameters.max_write_buffer_size: "512mb"} + - match: {follower_indices.0.parameters.max_retry_delay: "500ms"} + - match: {follower_indices.0.parameters.read_poll_timeout: "1m"} + + - do: + ccr.pause_follow: + index: bar + - is_true: acknowledged + + - do: + ccr.follow_info: + index: bar + - length: {follower_indices: 1} + - match: {follower_indices.0.follower_index: "bar"} + - match: {follower_indices.0.remote_cluster: "local"} + - match: {follower_indices.0.leader_index: "foo"} + - match: {follower_indices.0.status: "paused"} + - is_false: follower_indices.0.parameters diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/Ccr.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/Ccr.java index 4a7f9600ffa42..6ff0460d51bc2 100644 --- a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/Ccr.java +++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/Ccr.java @@ -50,6 +50,7 @@ import org.elasticsearch.xpack.ccr.action.ShardFollowTasksExecutor; import org.elasticsearch.xpack.ccr.action.TransportCcrStatsAction; import org.elasticsearch.xpack.ccr.action.TransportDeleteAutoFollowPatternAction; +import org.elasticsearch.xpack.ccr.action.TransportFollowInfoAction; import org.elasticsearch.xpack.ccr.action.TransportFollowStatsAction; import org.elasticsearch.xpack.ccr.action.TransportGetAutoFollowPatternAction; import org.elasticsearch.xpack.ccr.action.TransportPauseFollowAction; @@ -69,6 +70,7 @@ import org.elasticsearch.xpack.ccr.repository.CcrRestoreSourceService; import org.elasticsearch.xpack.ccr.rest.RestCcrStatsAction; import org.elasticsearch.xpack.ccr.rest.RestDeleteAutoFollowPatternAction; +import org.elasticsearch.xpack.ccr.rest.RestFollowInfoAction; import org.elasticsearch.xpack.ccr.rest.RestFollowStatsAction; import org.elasticsearch.xpack.ccr.rest.RestGetAutoFollowPatternAction; import org.elasticsearch.xpack.ccr.rest.RestPauseFollowAction; @@ -80,6 +82,7 @@ import org.elasticsearch.xpack.core.ccr.ShardFollowNodeTaskStatus; import org.elasticsearch.xpack.core.ccr.action.CcrStatsAction; import org.elasticsearch.xpack.core.ccr.action.DeleteAutoFollowPatternAction; +import org.elasticsearch.xpack.core.ccr.action.FollowInfoAction; import org.elasticsearch.xpack.core.ccr.action.FollowStatsAction; import org.elasticsearch.xpack.core.ccr.action.GetAutoFollowPatternAction; import org.elasticsearch.xpack.core.ccr.action.PauseFollowAction; @@ -202,6 +205,7 @@ public List> getPersistentTasksExecutor(ClusterServic // stats action new ActionHandler<>(FollowStatsAction.INSTANCE, TransportFollowStatsAction.class), new ActionHandler<>(CcrStatsAction.INSTANCE, TransportCcrStatsAction.class), + new ActionHandler<>(FollowInfoAction.INSTANCE, TransportFollowInfoAction.class), // follow actions new ActionHandler<>(PutFollowAction.INSTANCE, TransportPutFollowAction.class), new ActionHandler<>(ResumeFollowAction.INSTANCE, TransportResumeFollowAction.class), @@ -225,6 +229,7 @@ public List getRestHandlers(Settings settings, RestController restC // stats API new RestFollowStatsAction(settings, restController), new RestCcrStatsAction(settings, restController), + new RestFollowInfoAction(settings, restController), // follow APIs new RestPutFollowAction(settings, restController), new RestResumeFollowAction(settings, restController), diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportFollowInfoAction.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportFollowInfoAction.java new file mode 100644 index 0000000000000..3e9c0ecbef881 --- /dev/null +++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportFollowInfoAction.java @@ -0,0 +1,117 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.ccr.action; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.IndicesOptions; +import org.elasticsearch.action.support.master.TransportMasterNodeReadAction; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.block.ClusterBlockException; +import org.elasticsearch.cluster.block.ClusterBlockLevel; +import org.elasticsearch.cluster.metadata.IndexMetaData; +import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.persistent.PersistentTasksCustomMetaData; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.transport.TransportService; +import org.elasticsearch.xpack.ccr.Ccr; +import org.elasticsearch.xpack.core.ccr.action.FollowInfoAction; +import org.elasticsearch.xpack.core.ccr.action.FollowInfoAction.Response.FollowParameters; +import org.elasticsearch.xpack.core.ccr.action.FollowInfoAction.Response.FollowerInfo; +import org.elasticsearch.xpack.core.ccr.action.FollowInfoAction.Response.Status; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; +import java.util.Map; +import java.util.Optional; + +public class TransportFollowInfoAction extends TransportMasterNodeReadAction { + + @Inject + public TransportFollowInfoAction(TransportService transportService, ClusterService clusterService, ThreadPool threadPool, + ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver) { + super(FollowInfoAction.NAME, transportService, clusterService, threadPool, actionFilters, FollowInfoAction.Request::new, + indexNameExpressionResolver); + } + + @Override + protected String executor() { + return ThreadPool.Names.SAME; + } + + @Override + protected FollowInfoAction.Response newResponse() { + throw new UnsupportedOperationException("usage of Streamable is to be replaced by Writeable"); + } + + @Override + protected FollowInfoAction.Response read(StreamInput in) throws IOException { + return new FollowInfoAction.Response(in); + } + + @Override + protected void masterOperation(FollowInfoAction.Request request, + ClusterState state, + ActionListener listener) throws Exception { + + List concreteFollowerIndices = Arrays.asList(indexNameExpressionResolver.concreteIndexNames(state, + IndicesOptions.STRICT_EXPAND_OPEN_CLOSED, request.getFollowerIndices())); + + + List followerInfos = new ArrayList<>(); + PersistentTasksCustomMetaData persistentTasks = state.metaData().custom(PersistentTasksCustomMetaData.TYPE); + + for (IndexMetaData indexMetaData : state.metaData()) { + Map ccrCustomData = indexMetaData.getCustomData(Ccr.CCR_CUSTOM_METADATA_KEY); + if (ccrCustomData != null) { + Optional result; + if (persistentTasks != null) { + result = persistentTasks.taskMap().values().stream() + .map(persistentTask -> (ShardFollowTask) persistentTask.getParams()) + .filter(shardFollowTask -> concreteFollowerIndices.isEmpty() || + concreteFollowerIndices.contains(shardFollowTask.getFollowShardId().getIndexName())) + .findAny(); + } else { + result = Optional.empty(); + } + + String followerIndex = indexMetaData.getIndex().getName(); + String remoteCluster = ccrCustomData.get(Ccr.CCR_CUSTOM_METADATA_REMOTE_CLUSTER_NAME_KEY); + String leaderIndex = ccrCustomData.get(Ccr.CCR_CUSTOM_METADATA_LEADER_INDEX_NAME_KEY); + if (result.isPresent()) { + ShardFollowTask params = result.get(); + FollowParameters followParameters = new FollowParameters( + params.getMaxReadRequestOperationCount(), + params.getMaxReadRequestSize(), + params.getMaxOutstandingReadRequests(), + params.getMaxWriteRequestOperationCount(), + params.getMaxWriteRequestSize(), + params.getMaxOutstandingWriteRequests(), + params.getMaxWriteBufferCount(), + params.getMaxWriteBufferSize(), + params.getMaxRetryDelay(), + params.getReadPollTimeout() + ); + followerInfos.add(new FollowerInfo(followerIndex, remoteCluster, leaderIndex, Status.ACTIVE, followParameters)); + } else { + followerInfos.add(new FollowerInfo(followerIndex, remoteCluster, leaderIndex, Status.PAUSED, null)); + } + } + } + + listener.onResponse(new FollowInfoAction.Response(followerInfos)); + } + + @Override + protected ClusterBlockException checkBlock(FollowInfoAction.Request request, ClusterState state) { + return state.blocks().globalBlockedException(ClusterBlockLevel.METADATA_READ); + } +} diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/rest/RestFollowInfoAction.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/rest/RestFollowInfoAction.java new file mode 100644 index 0000000000000..f2e256bf5f88c --- /dev/null +++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/rest/RestFollowInfoAction.java @@ -0,0 +1,39 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.ccr.rest; + +import org.elasticsearch.client.node.NodeClient; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.RestController; +import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.action.RestToXContentListener; +import org.elasticsearch.xpack.core.ccr.action.FollowInfoAction; + +import java.io.IOException; + +public class RestFollowInfoAction extends BaseRestHandler { + + public RestFollowInfoAction(final Settings settings, final RestController controller) { + super(settings); + controller.registerHandler(RestRequest.Method.GET, "/{index}/_ccr/info", this); + } + + @Override + public String getName() { + return "ccr_follower_info"; + } + + @Override + protected RestChannelConsumer prepareRequest(final RestRequest restRequest, final NodeClient client) throws IOException { + final FollowInfoAction.Request request = new FollowInfoAction.Request(); + request.setFollowerIndices(Strings.splitStringByCommaToArray(restRequest.param("index"))); + return channel -> client.execute(FollowInfoAction.INSTANCE, request, new RestToXContentListener<>(channel)); + } + +} diff --git a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/AutoFollowIT.java b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/AutoFollowIT.java index 70f624392367a..4025f647cb2a6 100644 --- a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/AutoFollowIT.java +++ b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/AutoFollowIT.java @@ -16,14 +16,16 @@ import org.elasticsearch.common.unit.ByteSizeUnit; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.index.IndexNotFoundException; import org.elasticsearch.index.IndexSettings; -import org.elasticsearch.persistent.PersistentTasksCustomMetaData; import org.elasticsearch.xpack.CcrIntegTestCase; -import org.elasticsearch.xpack.ccr.action.ShardFollowTask; import org.elasticsearch.xpack.core.ccr.AutoFollowMetadata; import org.elasticsearch.xpack.core.ccr.AutoFollowStats; import org.elasticsearch.xpack.core.ccr.action.CcrStatsAction; import org.elasticsearch.xpack.core.ccr.action.DeleteAutoFollowPatternAction; +import org.elasticsearch.xpack.core.ccr.action.FollowInfoAction; +import org.elasticsearch.xpack.core.ccr.action.FollowInfoAction.Response.FollowParameters; +import org.elasticsearch.xpack.core.ccr.action.FollowInfoAction.Response.FollowerInfo; import org.elasticsearch.xpack.core.ccr.action.PutAutoFollowPatternAction; import java.util.Arrays; @@ -223,42 +225,52 @@ public void testAutoFollowParameterAreDelegated() throws Exception { createLeaderIndex("logs-201901", leaderIndexSettings); assertBusy(() -> { - PersistentTasksCustomMetaData persistentTasksMetaData = - followerClient().admin().cluster().prepareState().get().getState().getMetaData().custom(PersistentTasksCustomMetaData.TYPE); - assertThat(persistentTasksMetaData, notNullValue()); - assertThat(persistentTasksMetaData.tasks().size(), equalTo(1)); - ShardFollowTask shardFollowTask = (ShardFollowTask) persistentTasksMetaData.tasks().iterator().next().getParams(); - assertThat(shardFollowTask.getLeaderShardId().getIndexName(), equalTo("logs-201901")); - assertThat(shardFollowTask.getFollowShardId().getIndexName(), equalTo("copy-logs-201901")); + FollowInfoAction.Request followInfoRequest = new FollowInfoAction.Request(); + followInfoRequest.setFollowerIndices("copy-logs-201901"); + FollowInfoAction.Response followInfoResponse; + try { + followInfoResponse = followerClient().execute(FollowInfoAction.INSTANCE, followInfoRequest).actionGet(); + } catch (IndexNotFoundException e) { + throw new AssertionError(e); + } + + assertThat(followInfoResponse.getFollowInfos().size(), equalTo(1)); + FollowerInfo followerInfo = followInfoResponse.getFollowInfos().get(0); + assertThat(followerInfo.getFollowerIndex(), equalTo("copy-logs-201901")); + assertThat(followerInfo.getRemoteCluster(), equalTo("leader_cluster")); + assertThat(followerInfo.getLeaderIndex(), equalTo("logs-201901")); + + FollowParameters followParameters = followerInfo.getParameters(); + assertThat(followParameters, notNullValue()); if (request.getMaxWriteBufferCount() != null) { - assertThat(shardFollowTask.getMaxWriteBufferCount(), equalTo(request.getMaxWriteBufferCount())); + assertThat(followParameters.getMaxWriteBufferCount(), equalTo(request.getMaxWriteBufferCount())); } if (request.getMaxWriteBufferSize() != null) { - assertThat(shardFollowTask.getMaxWriteBufferSize(), equalTo(request.getMaxWriteBufferSize())); + assertThat(followParameters.getMaxWriteBufferSize(), equalTo(request.getMaxWriteBufferSize())); } if (request.getMaxConcurrentReadBatches() != null) { - assertThat(shardFollowTask.getMaxOutstandingReadRequests(), equalTo(request.getMaxConcurrentReadBatches())); + assertThat(followParameters.getMaxOutstandingReadRequests(), equalTo(request.getMaxConcurrentReadBatches())); } if (request.getMaxConcurrentWriteBatches() != null) { - assertThat(shardFollowTask.getMaxOutstandingWriteRequests(), equalTo(request.getMaxConcurrentWriteBatches())); + assertThat(followParameters.getMaxOutstandingWriteRequests(), equalTo(request.getMaxConcurrentWriteBatches())); } if (request.getMaxReadRequestOperationCount() != null) { - assertThat(shardFollowTask.getMaxReadRequestOperationCount(), equalTo(request.getMaxReadRequestOperationCount())); + assertThat(followParameters.getMaxReadRequestOperationCount(), equalTo(request.getMaxReadRequestOperationCount())); } if (request.getMaxReadRequestSize() != null) { - assertThat(shardFollowTask.getMaxReadRequestSize(), equalTo(request.getMaxReadRequestSize())); + assertThat(followParameters.getMaxReadRequestSize(), equalTo(request.getMaxReadRequestSize())); } if (request.getMaxRetryDelay() != null) { - assertThat(shardFollowTask.getMaxRetryDelay(), equalTo(request.getMaxRetryDelay())); + assertThat(followParameters.getMaxRetryDelay(), equalTo(request.getMaxRetryDelay())); } if (request.getReadPollTimeout() != null) { - assertThat(shardFollowTask.getReadPollTimeout(), equalTo(request.getReadPollTimeout())); + assertThat(followParameters.getReadPollTimeout(), equalTo(request.getReadPollTimeout())); } if (request.getMaxWriteRequestOperationCount() != null) { - assertThat(shardFollowTask.getMaxWriteRequestOperationCount(), equalTo(request.getMaxWriteRequestOperationCount())); + assertThat(followParameters.getMaxWriteRequestOperationCount(), equalTo(request.getMaxWriteRequestOperationCount())); } if (request.getMaxWriteRequestSize() != null) { - assertThat(shardFollowTask.getMaxWriteRequestSize(), equalTo(request.getMaxWriteRequestSize())); + assertThat(followParameters.getMaxWriteRequestSize(), equalTo(request.getMaxWriteRequestSize())); } }); } diff --git a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/FollowInfoRequestTests.java b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/FollowInfoRequestTests.java new file mode 100644 index 0000000000000..d235d956e01d5 --- /dev/null +++ b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/FollowInfoRequestTests.java @@ -0,0 +1,25 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.ccr.action; + +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.test.AbstractWireSerializingTestCase; +import org.elasticsearch.xpack.core.ccr.action.FollowInfoAction; + +public class FollowInfoRequestTests extends AbstractWireSerializingTestCase { + + @Override + protected Writeable.Reader instanceReader() { + return FollowInfoAction.Request::new; + } + + @Override + protected FollowInfoAction.Request createTestInstance() { + FollowInfoAction.Request request = new FollowInfoAction.Request(); + request.setFollowerIndices(generateRandomStringArray(4, 4, true, false)); + return request; + } +} diff --git a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/FollowInfoResponseTests.java b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/FollowInfoResponseTests.java new file mode 100644 index 0000000000000..d21098506a121 --- /dev/null +++ b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/FollowInfoResponseTests.java @@ -0,0 +1,147 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.ccr.action; + +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.common.unit.ByteSizeValue; +import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.common.xcontent.ConstructingObjectParser; +import org.elasticsearch.common.xcontent.ObjectParser; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.test.AbstractSerializingTestCase; +import org.elasticsearch.xpack.core.ccr.action.FollowInfoAction; +import org.elasticsearch.xpack.core.ccr.action.FollowInfoAction.Response.FollowerInfo; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; + +import static org.elasticsearch.xpack.core.ccr.action.FollowInfoAction.Response.FOLLOWER_INDICES_FIELD; +import static org.elasticsearch.xpack.core.ccr.action.FollowInfoAction.Response.FollowParameters; +import static org.elasticsearch.xpack.core.ccr.action.FollowInfoAction.Response.Status; + +public class FollowInfoResponseTests extends AbstractSerializingTestCase { + + static final ConstructingObjectParser PARAMETERS_PARSER = new ConstructingObjectParser<>( + "parameters_parser", + args -> { + return new FollowParameters( + (Integer) args[0], + (ByteSizeValue) args[1], + (Integer) args[2], + (Integer) args[3], + (ByteSizeValue) args[4], + (Integer) args[5], + (Integer) args[6], + (ByteSizeValue) args[7], + (TimeValue) args[8], + (TimeValue) args[9] + ); + }); + + static { + PARAMETERS_PARSER.declareInt(ConstructingObjectParser.constructorArg(), ShardFollowTask.MAX_READ_REQUEST_OPERATION_COUNT); + PARAMETERS_PARSER.declareField( + ConstructingObjectParser.constructorArg(), + (p, c) -> ByteSizeValue.parseBytesSizeValue(p.text(), ShardFollowTask.MAX_READ_REQUEST_SIZE.getPreferredName()), + ShardFollowTask.MAX_READ_REQUEST_SIZE, + ObjectParser.ValueType.STRING); + PARAMETERS_PARSER.declareInt(ConstructingObjectParser.constructorArg(), ShardFollowTask.MAX_OUTSTANDING_READ_REQUESTS); + PARAMETERS_PARSER.declareInt(ConstructingObjectParser.constructorArg(), ShardFollowTask.MAX_WRITE_REQUEST_OPERATION_COUNT); + PARAMETERS_PARSER.declareField( + ConstructingObjectParser.constructorArg(), + (p, c) -> ByteSizeValue.parseBytesSizeValue(p.text(), ShardFollowTask.MAX_WRITE_REQUEST_SIZE.getPreferredName()), + ShardFollowTask.MAX_WRITE_REQUEST_SIZE, + ObjectParser.ValueType.STRING); + PARAMETERS_PARSER.declareInt(ConstructingObjectParser.constructorArg(), ShardFollowTask.MAX_OUTSTANDING_WRITE_REQUESTS); + PARAMETERS_PARSER.declareInt(ConstructingObjectParser.constructorArg(), ShardFollowTask.MAX_WRITE_BUFFER_COUNT); + PARAMETERS_PARSER.declareField( + ConstructingObjectParser.constructorArg(), + (p, c) -> ByteSizeValue.parseBytesSizeValue(p.text(), ShardFollowTask.MAX_WRITE_BUFFER_SIZE.getPreferredName()), + ShardFollowTask.MAX_WRITE_BUFFER_SIZE, + ObjectParser.ValueType.STRING); + PARAMETERS_PARSER.declareField( + ConstructingObjectParser.constructorArg(), + (p, c) -> TimeValue.parseTimeValue(p.text(), ShardFollowTask.MAX_RETRY_DELAY.getPreferredName()), + ShardFollowTask.MAX_RETRY_DELAY, + ObjectParser.ValueType.STRING); + PARAMETERS_PARSER.declareField( + ConstructingObjectParser.constructorArg(), + (p, c) -> TimeValue.parseTimeValue(p.text(), ShardFollowTask.READ_POLL_TIMEOUT.getPreferredName()), + ShardFollowTask.READ_POLL_TIMEOUT, + ObjectParser.ValueType.STRING); + } + + static final ConstructingObjectParser INFO_PARSER = new ConstructingObjectParser<>( + "info_parser", + args -> { + return new FollowerInfo( + (String) args[0], + (String) args[1], + (String) args[2], + Status.fromString((String) args[3]), + (FollowParameters) args[4] + ); + }); + + static { + INFO_PARSER.declareString(ConstructingObjectParser.constructorArg(), FollowerInfo.FOLLOWER_INDEX_FIELD); + INFO_PARSER.declareString(ConstructingObjectParser.constructorArg(), FollowerInfo.REMOTE_CLUSTER_FIELD); + INFO_PARSER.declareString(ConstructingObjectParser.constructorArg(), FollowerInfo.LEADER_INDEX_FIELD); + INFO_PARSER.declareString(ConstructingObjectParser.constructorArg(), FollowerInfo.STATUS_FIELD); + INFO_PARSER.declareObject(ConstructingObjectParser.optionalConstructorArg(), PARAMETERS_PARSER, FollowerInfo.PARAMETERS_FIELD); + } + + @SuppressWarnings("unchecked") + static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( + "response", + args -> { + return new FollowInfoAction.Response( + (List) args[0] + ); + }); + + static { + PARSER.declareObjectArray(ConstructingObjectParser.constructorArg(), INFO_PARSER, FOLLOWER_INDICES_FIELD); + } + + @Override + protected FollowInfoAction.Response doParseInstance(XContentParser parser) throws IOException { + return PARSER.apply(parser, null); + } + + @Override + protected Writeable.Reader instanceReader() { + return FollowInfoAction.Response::new; + } + + @Override + protected FollowInfoAction.Response createTestInstance() { + int numInfos = randomIntBetween(0, 32); + List infos = new ArrayList<>(numInfos); + for (int i = 0; i < numInfos; i++) { + FollowParameters followParameters = null; + if (randomBoolean()) { + followParameters = new FollowParameters( + randomIntBetween(0, Integer.MAX_VALUE), + new ByteSizeValue(randomNonNegativeLong()), + randomIntBetween(0, Integer.MAX_VALUE), + randomIntBetween(0, Integer.MAX_VALUE), + new ByteSizeValue(randomNonNegativeLong()), + randomIntBetween(0, Integer.MAX_VALUE), + randomIntBetween(0, Integer.MAX_VALUE), + new ByteSizeValue(randomNonNegativeLong()), + new TimeValue(randomNonNegativeLong()), + new TimeValue(randomNonNegativeLong()) + ); + } + + infos.add(new FollowerInfo(randomAlphaOfLength(4), randomAlphaOfLength(4), randomAlphaOfLength(4), + randomFrom(Status.values()), followParameters)); + } + return new FollowInfoAction.Response(infos); + } +} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/FollowInfoAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/FollowInfoAction.java new file mode 100644 index 0000000000000..11d4f22e1b7a8 --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/FollowInfoAction.java @@ -0,0 +1,422 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.core.ccr.action; + +import org.elasticsearch.action.Action; +import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.action.ActionResponse; +import org.elasticsearch.action.support.master.MasterNodeReadRequest; +import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.common.unit.ByteSizeValue; +import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.common.xcontent.ToXContentObject; +import org.elasticsearch.common.xcontent.XContentBuilder; + +import java.io.IOException; +import java.util.Arrays; +import java.util.List; +import java.util.Objects; + +import static org.elasticsearch.xpack.core.ccr.action.ResumeFollowAction.Request.MAX_OUTSTANDING_READ_REQUESTS; +import static org.elasticsearch.xpack.core.ccr.action.ResumeFollowAction.Request.MAX_OUTSTANDING_WRITE_REQUESTS; +import static org.elasticsearch.xpack.core.ccr.action.ResumeFollowAction.Request.MAX_READ_REQUEST_OPERATION_COUNT; +import static org.elasticsearch.xpack.core.ccr.action.ResumeFollowAction.Request.MAX_READ_REQUEST_SIZE; +import static org.elasticsearch.xpack.core.ccr.action.ResumeFollowAction.Request.MAX_RETRY_DELAY_FIELD; +import static org.elasticsearch.xpack.core.ccr.action.ResumeFollowAction.Request.MAX_WRITE_BUFFER_COUNT; +import static org.elasticsearch.xpack.core.ccr.action.ResumeFollowAction.Request.MAX_WRITE_BUFFER_SIZE; +import static org.elasticsearch.xpack.core.ccr.action.ResumeFollowAction.Request.MAX_WRITE_REQUEST_OPERATION_COUNT; +import static org.elasticsearch.xpack.core.ccr.action.ResumeFollowAction.Request.MAX_WRITE_REQUEST_SIZE; +import static org.elasticsearch.xpack.core.ccr.action.ResumeFollowAction.Request.READ_POLL_TIMEOUT; + +public class FollowInfoAction extends Action { + + public static final String NAME = "cluster:monitor/ccr/follow_info"; + + public static final FollowInfoAction INSTANCE = new FollowInfoAction(); + + private FollowInfoAction() { + super(NAME); + } + + @Override + public Response newResponse() { + throw new UnsupportedOperationException("usage of Streamable is to be replaced by Writeable"); + } + + @Override + public Writeable.Reader getResponseReader() { + return Response::new; + } + + public static class Request extends MasterNodeReadRequest { + + private String[] followerIndices; + + public Request() { + } + + public String[] getFollowerIndices() { + return followerIndices; + } + + public void setFollowerIndices(String... followerIndices) { + this.followerIndices = followerIndices; + } + + @Override + public ActionRequestValidationException validate() { + return null; + } + + public Request(StreamInput in) throws IOException { + super(in); + followerIndices = in.readOptionalStringArray(); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + out.writeOptionalStringArray(followerIndices); + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + Request request = (Request) o; + return Arrays.equals(followerIndices, request.followerIndices); + } + + @Override + public int hashCode() { + return Arrays.hashCode(followerIndices); + } + } + + public static class Response extends ActionResponse implements ToXContentObject { + + public static final ParseField FOLLOWER_INDICES_FIELD = new ParseField("follower_indices"); + + private final List followInfos; + + public Response(List followInfos) { + this.followInfos = followInfos; + } + + public List getFollowInfos() { + return followInfos; + } + + public Response(StreamInput in) throws IOException { + super(in); + followInfos = in.readList(FollowerInfo::new); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + out.writeList(followInfos); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + builder.startArray(FOLLOWER_INDICES_FIELD.getPreferredName()); + for (FollowerInfo followInfo : followInfos) { + followInfo.toXContent(builder, params); + } + builder.endArray(); + builder.endObject(); + return builder; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + Response response = (Response) o; + return Objects.equals(followInfos, response.followInfos); + } + + @Override + public int hashCode() { + return Objects.hash(followInfos); + } + + public String toString() { + return Strings.toString(this); + } + + public static class FollowerInfo implements Writeable, ToXContentObject { + + public static final ParseField FOLLOWER_INDEX_FIELD = new ParseField("follower_index"); + public static final ParseField REMOTE_CLUSTER_FIELD = new ParseField("remote_cluster"); + public static final ParseField LEADER_INDEX_FIELD = new ParseField("leader_index"); + public static final ParseField STATUS_FIELD = new ParseField("status"); + public static final ParseField PARAMETERS_FIELD = new ParseField("parameters"); + + private final String followerIndex; + private final String remoteCluster; + private final String leaderIndex; + private final Status status; + private final FollowParameters parameters; + + public FollowerInfo(String followerIndex, String remoteCluster, String leaderIndex, Status status, + FollowParameters parameters) { + this.followerIndex = followerIndex; + this.remoteCluster = remoteCluster; + this.leaderIndex = leaderIndex; + this.status = status; + this.parameters = parameters; + } + + public String getFollowerIndex() { + return followerIndex; + } + + public String getRemoteCluster() { + return remoteCluster; + } + + public String getLeaderIndex() { + return leaderIndex; + } + + public Status getStatus() { + return status; + } + + public FollowParameters getParameters() { + return parameters; + } + + FollowerInfo(StreamInput in) throws IOException { + followerIndex = in.readString(); + remoteCluster = in.readString(); + leaderIndex = in.readString(); + status = Status.fromString(in.readString()); + parameters = in.readOptionalWriteable(FollowParameters::new); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeString(followerIndex); + out.writeString(remoteCluster); + out.writeString(leaderIndex); + out.writeString(status.name); + out.writeOptionalWriteable(parameters); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + builder.field(FOLLOWER_INDEX_FIELD.getPreferredName(), followerIndex); + builder.field(REMOTE_CLUSTER_FIELD.getPreferredName(), remoteCluster); + builder.field(LEADER_INDEX_FIELD.getPreferredName(), leaderIndex); + builder.field(STATUS_FIELD.getPreferredName(), status.name); + if (parameters != null) { + builder.startObject(PARAMETERS_FIELD.getPreferredName()); + { + builder.field(MAX_READ_REQUEST_OPERATION_COUNT.getPreferredName(), parameters.maxReadRequestOperationCount); + builder.field(MAX_READ_REQUEST_SIZE.getPreferredName(), parameters.maxReadRequestSize.getStringRep()); + builder.field(MAX_OUTSTANDING_READ_REQUESTS.getPreferredName(), parameters.maxOutstandingReadRequests); + builder.field(MAX_WRITE_REQUEST_OPERATION_COUNT.getPreferredName(), parameters.maxWriteRequestOperationCount); + builder.field(MAX_WRITE_REQUEST_SIZE.getPreferredName(), parameters.maxWriteRequestSize.getStringRep()); + builder.field(MAX_OUTSTANDING_WRITE_REQUESTS.getPreferredName(), parameters.maxOutstandingWriteRequests); + builder.field(MAX_WRITE_BUFFER_COUNT.getPreferredName(), parameters.maxWriteBufferCount); + builder.field(MAX_WRITE_BUFFER_SIZE.getPreferredName(), parameters.maxWriteBufferSize.getStringRep()); + builder.field(MAX_RETRY_DELAY_FIELD.getPreferredName(), parameters.maxRetryDelay.getStringRep()); + builder.field(READ_POLL_TIMEOUT.getPreferredName(), parameters.readPollTimeout.getStringRep()); + } + builder.endObject(); + } + builder.endObject(); + return builder; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + FollowerInfo that = (FollowerInfo) o; + return Objects.equals(followerIndex, that.followerIndex) && + Objects.equals(remoteCluster, that.remoteCluster) && + Objects.equals(leaderIndex, that.leaderIndex) && + status == that.status && + Objects.equals(parameters, that.parameters); + } + + @Override + public int hashCode() { + return Objects.hash(followerIndex, remoteCluster, leaderIndex, status, parameters); + } + + public String toString() { + return Strings.toString(this); + } + } + + public static class FollowParameters implements Writeable { + + private final int maxReadRequestOperationCount; + private final ByteSizeValue maxReadRequestSize; + private final int maxOutstandingReadRequests; + private final int maxWriteRequestOperationCount; + private final ByteSizeValue maxWriteRequestSize; + private final int maxOutstandingWriteRequests; + private final int maxWriteBufferCount; + private final ByteSizeValue maxWriteBufferSize; + private final TimeValue maxRetryDelay; + private final TimeValue readPollTimeout; + + public FollowParameters(int maxReadRequestOperationCount, + ByteSizeValue maxReadRequestSize, int maxOutstandingReadRequests, + int maxWriteRequestOperationCount, ByteSizeValue maxWriteRequestSize, + int maxOutstandingWriteRequests, int maxWriteBufferCount, + ByteSizeValue maxWriteBufferSize, TimeValue maxRetryDelay, TimeValue readPollTimeout) { + this.maxReadRequestOperationCount = maxReadRequestOperationCount; + this.maxReadRequestSize = maxReadRequestSize; + this.maxOutstandingReadRequests = maxOutstandingReadRequests; + this.maxWriteRequestOperationCount = maxWriteRequestOperationCount; + this.maxWriteRequestSize = maxWriteRequestSize; + this.maxOutstandingWriteRequests = maxOutstandingWriteRequests; + this.maxWriteBufferCount = maxWriteBufferCount; + this.maxWriteBufferSize = maxWriteBufferSize; + this.maxRetryDelay = maxRetryDelay; + this.readPollTimeout = readPollTimeout; + } + + public int getMaxReadRequestOperationCount() { + return maxReadRequestOperationCount; + } + + public ByteSizeValue getMaxReadRequestSize() { + return maxReadRequestSize; + } + + public int getMaxOutstandingReadRequests() { + return maxOutstandingReadRequests; + } + + public int getMaxWriteRequestOperationCount() { + return maxWriteRequestOperationCount; + } + + public ByteSizeValue getMaxWriteRequestSize() { + return maxWriteRequestSize; + } + + public int getMaxOutstandingWriteRequests() { + return maxOutstandingWriteRequests; + } + + public int getMaxWriteBufferCount() { + return maxWriteBufferCount; + } + + public ByteSizeValue getMaxWriteBufferSize() { + return maxWriteBufferSize; + } + + public TimeValue getMaxRetryDelay() { + return maxRetryDelay; + } + + public TimeValue getReadPollTimeout() { + return readPollTimeout; + } + + FollowParameters(StreamInput in) throws IOException { + this.maxReadRequestOperationCount = in.readVInt(); + this.maxReadRequestSize = new ByteSizeValue(in); + this.maxOutstandingReadRequests = in.readVInt(); + this.maxWriteRequestOperationCount = in.readVInt(); + this.maxWriteRequestSize = new ByteSizeValue(in); + this.maxOutstandingWriteRequests = in.readVInt(); + this.maxWriteBufferCount = in.readVInt(); + this.maxWriteBufferSize = new ByteSizeValue(in); + this.maxRetryDelay = in.readTimeValue(); + this.readPollTimeout = in.readTimeValue(); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeVLong(maxReadRequestOperationCount); + maxReadRequestSize.writeTo(out); + out.writeVInt(maxOutstandingReadRequests); + out.writeVLong(maxWriteRequestOperationCount); + maxWriteRequestSize.writeTo(out); + out.writeVInt(maxOutstandingWriteRequests); + out.writeVInt(maxWriteBufferCount); + maxWriteBufferSize.writeTo(out); + out.writeTimeValue(maxRetryDelay); + out.writeTimeValue(readPollTimeout); + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + FollowParameters that = (FollowParameters) o; + return maxReadRequestOperationCount == that.maxReadRequestOperationCount && + maxOutstandingReadRequests == that.maxOutstandingReadRequests && + maxWriteRequestOperationCount == that.maxWriteRequestOperationCount && + maxOutstandingWriteRequests == that.maxOutstandingWriteRequests && + maxWriteBufferCount == that.maxWriteBufferCount && + Objects.equals(maxReadRequestSize, that.maxReadRequestSize) && + Objects.equals(maxWriteRequestSize, that.maxWriteRequestSize) && + Objects.equals(maxWriteBufferSize, that.maxWriteBufferSize) && + Objects.equals(maxRetryDelay, that.maxRetryDelay) && + Objects.equals(readPollTimeout, that.readPollTimeout); + } + + @Override + public int hashCode() { + return Objects.hash( + maxReadRequestOperationCount, + maxReadRequestSize, + maxOutstandingReadRequests, + maxWriteRequestOperationCount, + maxWriteRequestSize, + maxOutstandingWriteRequests, + maxWriteBufferCount, + maxWriteBufferSize, + maxRetryDelay, + readPollTimeout + ); + } + + } + + public enum Status { + + ACTIVE("active"), + PAUSED("paused"); + + private final String name; + + Status(String name) { + this.name = name; + } + + public static Status fromString(String value) { + switch (value) { + case "active": + return Status.ACTIVE; + case "paused": + return Status.PAUSED; + default: + throw new IllegalArgumentException("unexpected status value [" + value + "]"); + } + } + } + } + +} diff --git a/x-pack/plugin/src/test/resources/rest-api-spec/api/ccr.follow_info.json b/x-pack/plugin/src/test/resources/rest-api-spec/api/ccr.follow_info.json new file mode 100644 index 0000000000000..87fd387edc13a --- /dev/null +++ b/x-pack/plugin/src/test/resources/rest-api-spec/api/ccr.follow_info.json @@ -0,0 +1,16 @@ +{ + "ccr.follow_info": { + "documentation": "https://www.elastic.co/guide/en/elasticsearch/reference/current/ccr-get-follow-info.html", + "methods": [ "GET" ], + "url": { + "path": "/{index}/_ccr/info", + "paths": [ "/{index}/_ccr/info" ], + "parts": { + "index": { + "type": "list", + "description": "A comma-separated list of index patterns; use `_all` to perform the operation on all indices" + } + } + } + } +} From de55b4dfd1fe07d5b027a3a61e88a26796862153 Mon Sep 17 00:00:00 2001 From: Jack Conradson Date: Fri, 18 Jan 2019 09:13:49 -0800 Subject: [PATCH 51/71] Add types deprecation to script contexts (#37554) This adds deprecation to _type in the script contexts for ingest and update. This adds a DeprecationMap that wraps the ctx Map containing _type for these specific contexts. --- .../ingest/common/ScriptProcessor.java | 16 ++++++- .../ingest/common/ScriptProcessorTests.java | 24 ++++++++++ ...AsyncBulkByScrollActionScriptTestCase.java | 7 ++- .../ingest/ConditionalProcessor.java | 16 ++++++- .../script/AbstractSortScript.java | 2 +- .../script/AggregationScript.java | 2 +- .../elasticsearch/script/DeprecationMap.java | 7 ++- .../org/elasticsearch/script/FieldScript.java | 2 +- .../org/elasticsearch/script/ScoreScript.java | 2 +- .../script/ScriptedMetricAggContexts.java | 2 +- .../script/TermsSetQueryScript.java | 2 +- .../elasticsearch/script/UpdateScript.java | 14 +++++- .../search/lookup/LeafDocLookup.java | 2 +- .../ingest/ConditionalProcessorTests.java | 48 +++++++++++++++++++ 14 files changed, 133 insertions(+), 13 deletions(-) diff --git a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/ScriptProcessor.java b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/ScriptProcessor.java index e6ac9e71839ec..fcc6c68014207 100644 --- a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/ScriptProcessor.java +++ b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/ScriptProcessor.java @@ -30,6 +30,7 @@ import org.elasticsearch.ingest.AbstractProcessor; import org.elasticsearch.ingest.IngestDocument; import org.elasticsearch.ingest.Processor; +import org.elasticsearch.script.DeprecationMap; import org.elasticsearch.script.IngestScript; import org.elasticsearch.script.Script; import org.elasticsearch.script.ScriptException; @@ -37,6 +38,8 @@ import java.io.InputStream; import java.util.Arrays; +import java.util.Collections; +import java.util.HashMap; import java.util.Map; import static org.elasticsearch.ingest.ConfigurationUtils.newConfigurationException; @@ -46,6 +49,16 @@ */ public final class ScriptProcessor extends AbstractProcessor { + private static final Map DEPRECATIONS; + static { + Map deprecations = new HashMap<>(); + deprecations.put( + "_type", + "[types removal] Looking up doc types [_type] in scripts is deprecated." + ); + DEPRECATIONS = Collections.unmodifiableMap(deprecations); + } + public static final String TYPE = "script"; private final Script script; @@ -72,7 +85,8 @@ public final class ScriptProcessor extends AbstractProcessor { @Override public IngestDocument execute(IngestDocument document) { IngestScript.Factory factory = scriptService.compile(script, IngestScript.CONTEXT); - factory.newInstance(script.getParams()).execute(document.getSourceAndMetadata()); + factory.newInstance(script.getParams()).execute( + new DeprecationMap(document.getSourceAndMetadata(), DEPRECATIONS, "script_processor")); CollectionUtils.ensureNoSelfReferences(document.getSourceAndMetadata(), "ingest script"); return document; } diff --git a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/ScriptProcessorTests.java b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/ScriptProcessorTests.java index 10fcf5fe602ad..2378827c9aac7 100644 --- a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/ScriptProcessorTests.java +++ b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/ScriptProcessorTests.java @@ -74,4 +74,28 @@ Script.DEFAULT_SCRIPT_LANG, new MockScriptEngine( assertThat(ingestDocument.getSourceAndMetadata(), hasKey("bytes_total")); assertThat(ingestDocument.getSourceAndMetadata().get("bytes_total"), is(randomBytesTotal)); } + + public void testTypeDeprecation() throws Exception { + String scriptName = "script"; + ScriptService scriptService = new ScriptService(Settings.builder().build(), + Collections.singletonMap( + Script.DEFAULT_SCRIPT_LANG, new MockScriptEngine( + Script.DEFAULT_SCRIPT_LANG, + Collections.singletonMap( + scriptName, ctx -> { + ctx.get("_type"); + return null; + } + ), + Collections.emptyMap() + ) + ), + new HashMap<>(ScriptModule.CORE_CONTEXTS) + ); + Script script = new Script(ScriptType.INLINE, Script.DEFAULT_SCRIPT_LANG, scriptName, Collections.emptyMap()); + IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), Collections.emptyMap()); + ScriptProcessor processor = new ScriptProcessor(randomAlphaOfLength(10), script, scriptService); + processor.execute(ingestDocument); + assertWarnings("[types removal] Looking up doc types [_type] in scripts is deprecated."); + } } diff --git a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/AbstractAsyncBulkByScrollActionScriptTestCase.java b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/AbstractAsyncBulkByScrollActionScriptTestCase.java index e838b89eb3848..d452ea23bc155 100644 --- a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/AbstractAsyncBulkByScrollActionScriptTestCase.java +++ b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/AbstractAsyncBulkByScrollActionScriptTestCase.java @@ -58,7 +58,7 @@ protected T applyScript(Consumer> UpdateScript.Factory factory = (params, ctx) -> new UpdateScript(Collections.emptyMap(), ctx) { @Override public void execute() { - scriptBody.accept(ctx); + scriptBody.accept(getCtx()); } };; when(scriptService.compile(any(), eq(UpdateScript.CONTEXT))).thenReturn(factory); @@ -67,6 +67,11 @@ public void execute() { return (result != null) ? (T) result.self() : null; } + public void testTypeDeprecation() { + applyScript((Map ctx) -> ctx.get("_type")); + assertWarnings("[types removal] Looking up doc types [_type] in scripts is deprecated."); + } + public void testScriptAddingJunkToCtxIsError() { try { applyScript((Map ctx) -> ctx.put("junk", "junk")); diff --git a/server/src/main/java/org/elasticsearch/ingest/ConditionalProcessor.java b/server/src/main/java/org/elasticsearch/ingest/ConditionalProcessor.java index 2493f291bcddf..d5d489ec0e626 100644 --- a/server/src/main/java/org/elasticsearch/ingest/ConditionalProcessor.java +++ b/server/src/main/java/org/elasticsearch/ingest/ConditionalProcessor.java @@ -23,6 +23,7 @@ import java.util.Arrays; import java.util.Collection; import java.util.Collections; +import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.ListIterator; @@ -31,12 +32,24 @@ import java.util.concurrent.TimeUnit; import java.util.function.LongSupplier; import java.util.stream.Collectors; + +import org.elasticsearch.script.DeprecationMap; import org.elasticsearch.script.IngestConditionalScript; import org.elasticsearch.script.Script; import org.elasticsearch.script.ScriptService; public class ConditionalProcessor extends AbstractProcessor { + private static final Map DEPRECATIONS; + static { + Map deprecations = new HashMap<>(); + deprecations.put( + "_type", + "[types removal] Looking up doc types [_type] in scripts is deprecated." + ); + DEPRECATIONS = Collections.unmodifiableMap(deprecations); + } + static final String TYPE = "conditional"; private final Script condition; @@ -81,7 +94,8 @@ public IngestDocument execute(IngestDocument ingestDocument) throws Exception { boolean evaluate(IngestDocument ingestDocument) { IngestConditionalScript script = scriptService.compile(condition, IngestConditionalScript.CONTEXT).newInstance(condition.getParams()); - return script.execute(new UnmodifiableIngestData(ingestDocument.getSourceAndMetadata())); + return script.execute(new UnmodifiableIngestData( + new DeprecationMap(ingestDocument.getSourceAndMetadata(), DEPRECATIONS, "conditional-processor"))); } Processor getProcessor() { diff --git a/server/src/main/java/org/elasticsearch/script/AbstractSortScript.java b/server/src/main/java/org/elasticsearch/script/AbstractSortScript.java index 949ca5bdb7f0f..13b109766af83 100644 --- a/server/src/main/java/org/elasticsearch/script/AbstractSortScript.java +++ b/server/src/main/java/org/elasticsearch/script/AbstractSortScript.java @@ -66,7 +66,7 @@ abstract class AbstractSortScript implements ScorerAware { this.leafLookup = lookup.getLeafSearchLookup(leafContext); Map parameters = new HashMap<>(params); parameters.putAll(leafLookup.asMap()); - this.params = new DeprecationMap(parameters, DEPRECATIONS); + this.params = new DeprecationMap(parameters, DEPRECATIONS, "sort-script"); } protected AbstractSortScript() { diff --git a/server/src/main/java/org/elasticsearch/script/AggregationScript.java b/server/src/main/java/org/elasticsearch/script/AggregationScript.java index 5d36b91785f8a..f48d097355110 100644 --- a/server/src/main/java/org/elasticsearch/script/AggregationScript.java +++ b/server/src/main/java/org/elasticsearch/script/AggregationScript.java @@ -71,7 +71,7 @@ public abstract class AggregationScript implements ScorerAware { private Object value; public AggregationScript(Map params, SearchLookup lookup, LeafReaderContext leafContext) { - this.params = new DeprecationMap(new HashMap<>(params), DEPRECATIONS); + this.params = new DeprecationMap(new HashMap<>(params), DEPRECATIONS, "aggregation-script"); this.leafLookup = lookup.getLeafSearchLookup(leafContext); this.params.putAll(leafLookup.asMap()); } diff --git a/server/src/main/java/org/elasticsearch/script/DeprecationMap.java b/server/src/main/java/org/elasticsearch/script/DeprecationMap.java index 5b14e2e3b1168..094baa9e0bdf1 100644 --- a/server/src/main/java/org/elasticsearch/script/DeprecationMap.java +++ b/server/src/main/java/org/elasticsearch/script/DeprecationMap.java @@ -35,9 +35,12 @@ public final class DeprecationMap implements Map { private final Map deprecations; - public DeprecationMap(Map delegate, Map deprecations) { + private final String logKeyPrefix; + + public DeprecationMap(Map delegate, Map deprecations, String logKeyPrefix) { this.delegate = delegate; this.deprecations = deprecations; + this.logKeyPrefix = logKeyPrefix; } @Override @@ -64,7 +67,7 @@ public boolean containsValue(final Object value) { public Object get(final Object key) { String deprecationMessage = deprecations.get(key); if (deprecationMessage != null) { - deprecationLogger.deprecated(deprecationMessage); + deprecationLogger.deprecatedAndMaybeLog(logKeyPrefix + "_" + key, deprecationMessage); } return delegate.get(key); } diff --git a/server/src/main/java/org/elasticsearch/script/FieldScript.java b/server/src/main/java/org/elasticsearch/script/FieldScript.java index 4f0e3c7229037..7707301ab4c1d 100644 --- a/server/src/main/java/org/elasticsearch/script/FieldScript.java +++ b/server/src/main/java/org/elasticsearch/script/FieldScript.java @@ -63,7 +63,7 @@ public FieldScript(Map params, SearchLookup lookup, LeafReaderCo this.leafLookup = lookup.getLeafSearchLookup(leafContext); params = new HashMap<>(params); params.putAll(leafLookup.asMap()); - this.params = new DeprecationMap(params, DEPRECATIONS); + this.params = new DeprecationMap(params, DEPRECATIONS, "field-script"); } // for expression engine diff --git a/server/src/main/java/org/elasticsearch/script/ScoreScript.java b/server/src/main/java/org/elasticsearch/script/ScoreScript.java index 78d206888e998..6ac5935826bf7 100644 --- a/server/src/main/java/org/elasticsearch/script/ScoreScript.java +++ b/server/src/main/java/org/elasticsearch/script/ScoreScript.java @@ -73,7 +73,7 @@ public ScoreScript(Map params, SearchLookup lookup, LeafReaderCo this.leafLookup = lookup.getLeafSearchLookup(leafContext); params = new HashMap<>(params); params.putAll(leafLookup.asMap()); - this.params = new DeprecationMap(params, DEPRECATIONS); + this.params = new DeprecationMap(params, DEPRECATIONS, "score-script"); } } diff --git a/server/src/main/java/org/elasticsearch/script/ScriptedMetricAggContexts.java b/server/src/main/java/org/elasticsearch/script/ScriptedMetricAggContexts.java index a17503a026f24..f1c3e996ec8be 100644 --- a/server/src/main/java/org/elasticsearch/script/ScriptedMetricAggContexts.java +++ b/server/src/main/java/org/elasticsearch/script/ScriptedMetricAggContexts.java @@ -95,7 +95,7 @@ public MapScript(Map params, Map state, SearchLo if (leafLookup != null) { params = new HashMap<>(params); // copy params so we aren't modifying input params.putAll(leafLookup.asMap()); // add lookup vars - params = new DeprecationMap(params, DEPRECATIONS); // wrap with deprecations + params = new DeprecationMap(params, DEPRECATIONS, "map-script"); // wrap with deprecations } this.params = params; } diff --git a/server/src/main/java/org/elasticsearch/script/TermsSetQueryScript.java b/server/src/main/java/org/elasticsearch/script/TermsSetQueryScript.java index 9462b6a8e811a..c651c3ce69041 100644 --- a/server/src/main/java/org/elasticsearch/script/TermsSetQueryScript.java +++ b/server/src/main/java/org/elasticsearch/script/TermsSetQueryScript.java @@ -64,7 +64,7 @@ public TermsSetQueryScript(Map params, SearchLookup lookup, Leaf Map parameters = new HashMap<>(params); this.leafLookup = lookup.getLeafSearchLookup(leafContext); parameters.putAll(leafLookup.asMap()); - this.params = new DeprecationMap(parameters, DEPRECATIONS); + this.params = new DeprecationMap(parameters, DEPRECATIONS, "term-set-query-script"); } protected TermsSetQueryScript() { diff --git a/server/src/main/java/org/elasticsearch/script/UpdateScript.java b/server/src/main/java/org/elasticsearch/script/UpdateScript.java index 9b9e79c7b74ba..765489b7e4449 100644 --- a/server/src/main/java/org/elasticsearch/script/UpdateScript.java +++ b/server/src/main/java/org/elasticsearch/script/UpdateScript.java @@ -20,6 +20,8 @@ package org.elasticsearch.script; +import java.util.Collections; +import java.util.HashMap; import java.util.Map; /** @@ -27,6 +29,16 @@ */ public abstract class UpdateScript { + private static final Map DEPRECATIONS; + static { + Map deprecations = new HashMap<>(); + deprecations.put( + "_type", + "[types removal] Looking up doc types [_type] in scripts is deprecated." + ); + DEPRECATIONS = Collections.unmodifiableMap(deprecations); + } + public static final String[] PARAMETERS = { }; /** The context used to compile {@link UpdateScript} factories. */ @@ -40,7 +52,7 @@ public abstract class UpdateScript { public UpdateScript(Map params, Map ctx) { this.params = params; - this.ctx = ctx; + this.ctx = new DeprecationMap(ctx, DEPRECATIONS, "update-script"); } /** Return the parameters for this script. */ diff --git a/server/src/main/java/org/elasticsearch/search/lookup/LeafDocLookup.java b/server/src/main/java/org/elasticsearch/search/lookup/LeafDocLookup.java index 17518b2f1f60f..04522834579e4 100644 --- a/server/src/main/java/org/elasticsearch/search/lookup/LeafDocLookup.java +++ b/server/src/main/java/org/elasticsearch/search/lookup/LeafDocLookup.java @@ -44,7 +44,7 @@ public class LeafDocLookup implements Map> { = new DeprecationLogger(LogManager.getLogger(LeafDocLookup.class)); static final String TYPES_DEPRECATION_KEY = "type-field-doc-lookup"; static final String TYPES_DEPRECATION_MESSAGE = - "[types removal] Looking up doc types in scripts is deprecated."; + "[types removal] Looking up doc types [_type] in scripts is deprecated."; private final Map> localCacheFieldData = new HashMap<>(4); diff --git a/server/src/test/java/org/elasticsearch/ingest/ConditionalProcessorTests.java b/server/src/test/java/org/elasticsearch/ingest/ConditionalProcessorTests.java index c5548ae559400..f484957d897f1 100644 --- a/server/src/test/java/org/elasticsearch/ingest/ConditionalProcessorTests.java +++ b/server/src/test/java/org/elasticsearch/ingest/ConditionalProcessorTests.java @@ -133,6 +133,54 @@ public void testActsOnImmutableData() throws Exception { assertMutatingCtxThrows(ctx -> ((List)ctx.get("listField")).remove("bar")); } + public void testTypeDeprecation() throws Exception { + String scriptName = "conditionalScript"; + ScriptService scriptService = new ScriptService(Settings.builder().build(), + Collections.singletonMap( + Script.DEFAULT_SCRIPT_LANG, + new MockScriptEngine( + Script.DEFAULT_SCRIPT_LANG, + Collections.singletonMap( + scriptName, ctx -> { + ctx.get("_type"); + return true; + } + ), + Collections.emptyMap() + ) + ), + new HashMap<>(ScriptModule.CORE_CONTEXTS) + ); + + LongSupplier relativeTimeProvider = mock(LongSupplier.class); + when(relativeTimeProvider.getAsLong()).thenReturn(0L, TimeUnit.MILLISECONDS.toNanos(1), 0L, TimeUnit.MILLISECONDS.toNanos(2)); + ConditionalProcessor processor = new ConditionalProcessor( + randomAlphaOfLength(10), + new Script( + ScriptType.INLINE, Script.DEFAULT_SCRIPT_LANG, + scriptName, Collections.emptyMap()), scriptService, + new Processor() { + @Override + public IngestDocument execute(final IngestDocument ingestDocument){ + return ingestDocument; + } + + @Override + public String getType() { + return null; + } + + @Override + public String getTag() { + return null; + } + }, relativeTimeProvider); + + IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), Collections.emptyMap()); + processor.execute(ingestDocument); + assertWarnings("[types removal] Looking up doc types [_type] in scripts is deprecated."); + } + private static void assertMutatingCtxThrows(Consumer> mutation) throws Exception { String scriptName = "conditionalScript"; CompletableFuture expectedException = new CompletableFuture<>(); From 633bd09be055f89a42a89c132c2c4480978a035a Mon Sep 17 00:00:00 2001 From: Albert Zaharovits Date: Fri, 18 Jan 2019 18:53:58 +0200 Subject: [PATCH 52/71] Nit in settings.gradle for Eclipse Fixes ``./gradlew eclipse` failure introduced in 6d99e790b3ee --- settings.gradle | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/settings.gradle b/settings.gradle index 95285e20da6ce..d5eebd6f66fe2 100644 --- a/settings.gradle +++ b/settings.gradle @@ -96,7 +96,7 @@ if (isEclipse) { projects << 'libs:secure-sm-tests' projects << 'libs:grok-tests' projects << 'libs:geo-tests' - projects << 'libs:ssl-config' + projects << 'libs:ssl-config-tests' } include projects.toArray(new String[0]) From 54af8a4e7a51d5a078e07a5401a8bab4fa040687 Mon Sep 17 00:00:00 2001 From: Igor Motov Date: Fri, 18 Jan 2019 14:03:48 -0500 Subject: [PATCH 53/71] SQL: fix object extraction from sources (#37502) Throws an exception if hit extractor tries to retrieve unsupported object. For example, selecting "a" from `{"a": {"b": "c"}}` now throws an exception instead of returning null. Relates to #37364 --- .../search/extractor/FieldHitExtractor.java | 10 ++++++++-- .../extractor/FieldHitExtractorTests.java | 18 ++++++++++++++++++ 2 files changed, 26 insertions(+), 2 deletions(-) diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/extractor/FieldHitExtractor.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/extractor/FieldHitExtractor.java index 3ecfbbadedc14..ecb61e686a109 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/extractor/FieldHitExtractor.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/extractor/FieldHitExtractor.java @@ -166,8 +166,14 @@ Object extractFromSource(Map map) { sj.add(path[i]); Object node = subMap.get(sj.toString()); if (node instanceof Map) { - // Add the sub-map to the queue along with the current path index - queue.add(new Tuple<>(i, (Map) node)); + if (i < path.length - 1) { + // Add the sub-map to the queue along with the current path index + queue.add(new Tuple<>(i, (Map) node)); + } else { + // We exhausted the path and got a map + // If it is an object - it will be handled in the value extractor + value = node; + } } else if (node != null) { if (i < path.length - 1) { // If we reach a concrete value without exhausting the full path, something is wrong with the mapping diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/execution/search/extractor/FieldHitExtractorTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/execution/search/extractor/FieldHitExtractorTests.java index 7677878ddac4f..395f3bf270aa6 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/execution/search/extractor/FieldHitExtractorTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/execution/search/extractor/FieldHitExtractorTests.java @@ -336,6 +336,24 @@ public void testFieldWithDotsAndSamePathButDifferentHierarchy() { assertThat(ex.getMessage(), is("Multiple values (returned by [a.b.c.d.e.f.g]) are not supported")); } + public void testObjectsForSourceValue() throws IOException { + String fieldName = randomAlphaOfLength(5); + FieldHitExtractor fe = new FieldHitExtractor(fieldName, null, false); + SearchHit hit = new SearchHit(1); + XContentBuilder source = JsonXContent.contentBuilder(); + source.startObject(); { + source.startObject(fieldName); { + source.field("b", "c"); + } + source.endObject(); + } + source.endObject(); + BytesReference sourceRef = BytesReference.bytes(source); + hit.sourceRef(sourceRef); + SqlException ex = expectThrows(SqlException.class, () -> fe.extract(hit)); + assertThat(ex.getMessage(), is("Objects (returned by [" + fieldName + "]) are not supported")); + } + private Object randomValue() { Supplier value = randomFrom(Arrays.asList( () -> randomAlphaOfLength(10), From fc1c47649ad000b8ceb02a5c7d9d85b7590df8f0 Mon Sep 17 00:00:00 2001 From: Heather McCartney Date: Fri, 18 Jan 2019 19:16:50 +0000 Subject: [PATCH 54/71] Document the need for JAVA11_HOME (#37589) This commit updates the contribution docs to include java11 as a requirement for building and testing Elasticsearch. --- CONTRIBUTING.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 2e5f6685ecf9f..23ca7a299e588 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -100,8 +100,8 @@ JDK 11 and testing on a JDK 8 runtime; to do this, set `RUNTIME_JAVA_HOME` pointing to the Java home of a JDK 8 installation. Note that this mechanism can be used to test against other JDKs as well, this is not only limited to JDK 8. -> Note: It is also required to have `JAVA8_HOME`, `JAVA9_HOME`, and -`JAVA10_HOME` are available so that the tests can pass. +> Note: It is also required to have `JAVA8_HOME`, `JAVA9_HOME`, `JAVA10_HOME` +and `JAVA11_HOME` available so that the tests can pass. > Warning: do not use `sdkman` for Java installations which do not have proper `jrunscript` for jdk distributions. From 642e45e9e670b654042088ca4ed80a287cc57c8f Mon Sep 17 00:00:00 2001 From: jaymode Date: Fri, 18 Jan 2019 12:24:11 -0700 Subject: [PATCH 55/71] Fix setting openldap realm ssl config This change fixes the setup of the SSL configuration for the test openldap realm. The configuration was missing the realm identifier so the SSL settings being used were just the default JDK ones that do not trust the certificate of the idp fixture. See #37591 --- .../authc/ldap/OpenLdapUserSearchSessionFactoryTests.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/x-pack/qa/openldap-tests/src/test/java/org/elasticsearch/xpack/security/authc/ldap/OpenLdapUserSearchSessionFactoryTests.java b/x-pack/qa/openldap-tests/src/test/java/org/elasticsearch/xpack/security/authc/ldap/OpenLdapUserSearchSessionFactoryTests.java index ae73c140d9eef..a4351b696784a 100644 --- a/x-pack/qa/openldap-tests/src/test/java/org/elasticsearch/xpack/security/authc/ldap/OpenLdapUserSearchSessionFactoryTests.java +++ b/x-pack/qa/openldap-tests/src/test/java/org/elasticsearch/xpack/security/authc/ldap/OpenLdapUserSearchSessionFactoryTests.java @@ -51,7 +51,7 @@ public class OpenLdapUserSearchSessionFactoryTests extends ESTestCase { private static final String LDAPCACERT_PATH = "/ca.crt"; @Before - public void init() throws Exception { + public void init() { Path caPath = getDataPath(LDAPCACERT_PATH); /* * Prior to each test we reinitialize the socket factory with a new SSLService so that we get a new SSLContext. @@ -60,7 +60,7 @@ public void init() throws Exception { */ globalSettings = Settings.builder() .put("path.home", createTempDir()) - .put("xpack.security.authc.realms.ldap.ssl.certificate_authorities", caPath) + .put("xpack.security.authc.realms.ldap.oldap-test.ssl.certificate_authorities", caPath) .build(); threadPool = new TestThreadPool("LdapUserSearchSessionFactoryTests"); } From a2bdfb9041528e52ccc21369886f811802d12ce6 Mon Sep 17 00:00:00 2001 From: Ryan Ernst Date: Fri, 18 Jan 2019 11:49:40 -0800 Subject: [PATCH 56/71] Packaging: Update marker used to allow ELASTIC_PASSWORD (#37243) This commit updates the file docker's entrypoint script looks for when deciding to process the ELASTIC_PASSWORD env var. The x-pack subdir of bin no longer exists in 7.0, where the backcompat layer for x-pack script locations was removed. closes #37240 --- distribution/docker/src/docker/bin/docker-entrypoint.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/distribution/docker/src/docker/bin/docker-entrypoint.sh b/distribution/docker/src/docker/bin/docker-entrypoint.sh index 3158aaedae14b..b9aab95bc2e96 100644 --- a/distribution/docker/src/docker/bin/docker-entrypoint.sh +++ b/distribution/docker/src/docker/bin/docker-entrypoint.sh @@ -75,7 +75,7 @@ done < <(env) # will run in. export ES_JAVA_OPTS="-Des.cgroups.hierarchy.override=/ $ES_JAVA_OPTS" -if [[ -d bin/x-pack ]]; then +if [[ -f bin/elasticsearch-users ]]; then # Check for the ELASTIC_PASSWORD environment variable to set the # bootstrap password for Security. # From a3030c51e272bff2e62244c1bdce5f93afd1932f Mon Sep 17 00:00:00 2001 From: Martijn van Groningen Date: Fri, 18 Jan 2019 21:05:03 +0100 Subject: [PATCH 57/71] [ILM] Add unfollow action (#36970) This change adds the unfollow action for CCR follower indices. This is needed for the shrink action in case an index is a follower index. This will give the follower index the opportunity to fully catch up with the leader index, pause index following and unfollow the leader index. After this the shrink action can safely perform the ilm shrink. The unfollow action needs to be added to the hot phase and acts as barrier for going to the next phase (warm or delete phases), so that follower indices are being unfollowed properly before indices are expected to go in read-only mode. This allows the force merge action to execute its steps safely. The unfollow action has three steps: * `wait-for-indexing-complete` step: waits for the index in question to get the `index.lifecycle.indexing_complete` setting be set to `true` * `wait-for-follow-shard-tasks` step: waits for all the shard follow tasks for the index being handled to report that the leader shard global checkpoint is equal to the follower shard global checkpoint. * `pause-follower-index` step: Pauses index following, necessary to unfollow * `close-follower-index` step: Closes the index, necessary to unfollow * `unfollow-follower-index` step: Actually unfollows the index using the CCR Unfollow API * `open-follower-index` step: Reopens the index now that it is a normal index * `wait-for-yellow` step: Waits for primary shards to be allocated after reopening the index to ensure the index is ready for the next step In the case of the last two steps, if the index in being handled is a regular index then the steps acts as a no-op. Relates to #34648 Co-authored-by: Martijn van Groningen Co-authored-by: Gordon Brown --- .../IndexLifecycleNamedXContentProvider.java | 5 +- .../indexlifecycle/LifecyclePolicy.java | 6 +- .../client/indexlifecycle/UnfollowAction.java | 74 +++++ .../client/IndexLifecycleIT.java | 8 +- .../client/RestHighLevelClientTests.java | 6 +- .../GetLifecyclePolicyResponseTests.java | 3 +- .../LifecyclePolicyMetadataTests.java | 3 +- .../indexlifecycle/LifecyclePolicyTests.java | 14 +- .../indexlifecycle/UnfollowActionTests.java | 43 +++ .../reference/ilm/policy-definitions.asciidoc | 40 +++ .../xpack/core/XPackClientPlugin.java | 4 +- .../AbstractUnfollowIndexStep.java | 35 +++ .../CloseFollowerIndexStep.java | 31 ++ .../indexlifecycle/OpenFollowerIndexStep.java | 33 ++ .../PauseFollowerIndexStep.java | 31 ++ .../TimeseriesLifecycleType.java | 9 +- .../core/indexlifecycle/UnfollowAction.java | 119 ++++++++ .../UnfollowFollowIndexStep.java | 32 ++ .../WaitForFollowShardTasksStep.java | 181 +++++++++++ .../WaitForIndexingCompleteStep.java | 91 ++++++ .../indexlifecycle/WaitForYellowStep.java | 78 +++++ .../AbstractUnfollowIndexStepTestCase.java | 73 +++++ .../CloseFollowerIndexStepTests.java | 117 +++++++ .../LifecyclePolicyMetadataTests.java | 6 +- .../indexlifecycle/LifecyclePolicyTests.java | 10 +- .../OpenFollowerIndexStepTests.java | 137 +++++++++ .../PauseFollowerIndexStepTests.java | 112 +++++++ .../TimeseriesLifecycleTypeTests.java | 41 ++- .../indexlifecycle/UnfollowActionTests.java | 80 +++++ .../UnfollowFollowIndexStepTests.java | 115 +++++++ .../WaitForFollowShardTasksStepInfoTests.java | 70 +++++ .../WaitForFollowShardTasksStepTests.java | 210 +++++++++++++ .../WaitForIndexingCompleteStepTests.java | 124 ++++++++ .../WaitForYellowStepTests.java | 120 ++++++++ .../action/PutLifecycleRequestTests.java | 7 +- .../plugin/ilm/qa/multi-cluster/build.gradle | 54 ++++ .../indexlifecycle/CCRIndexLifecycleIT.java | 285 ++++++++++++++++++ .../xpack/indexlifecycle/IndexLifecycle.java | 4 +- .../IndexLifecycleMetadataTests.java | 7 +- 39 files changed, 2387 insertions(+), 31 deletions(-) create mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/indexlifecycle/UnfollowAction.java create mode 100644 client/rest-high-level/src/test/java/org/elasticsearch/client/indexlifecycle/UnfollowActionTests.java create mode 100644 x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/indexlifecycle/AbstractUnfollowIndexStep.java create mode 100644 x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/indexlifecycle/CloseFollowerIndexStep.java create mode 100644 x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/indexlifecycle/OpenFollowerIndexStep.java create mode 100644 x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/indexlifecycle/PauseFollowerIndexStep.java create mode 100644 x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/indexlifecycle/UnfollowAction.java create mode 100644 x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/indexlifecycle/UnfollowFollowIndexStep.java create mode 100644 x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/indexlifecycle/WaitForFollowShardTasksStep.java create mode 100644 x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/indexlifecycle/WaitForIndexingCompleteStep.java create mode 100644 x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/indexlifecycle/WaitForYellowStep.java create mode 100644 x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/indexlifecycle/AbstractUnfollowIndexStepTestCase.java create mode 100644 x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/indexlifecycle/CloseFollowerIndexStepTests.java create mode 100644 x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/indexlifecycle/OpenFollowerIndexStepTests.java create mode 100644 x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/indexlifecycle/PauseFollowerIndexStepTests.java create mode 100644 x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/indexlifecycle/UnfollowActionTests.java create mode 100644 x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/indexlifecycle/UnfollowFollowIndexStepTests.java create mode 100644 x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/indexlifecycle/WaitForFollowShardTasksStepInfoTests.java create mode 100644 x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/indexlifecycle/WaitForFollowShardTasksStepTests.java create mode 100644 x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/indexlifecycle/WaitForIndexingCompleteStepTests.java create mode 100644 x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/indexlifecycle/WaitForYellowStepTests.java create mode 100644 x-pack/plugin/ilm/qa/multi-cluster/build.gradle create mode 100644 x-pack/plugin/ilm/qa/multi-cluster/src/test/java/org/elasticsearch/xpack/indexlifecycle/CCRIndexLifecycleIT.java diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/indexlifecycle/IndexLifecycleNamedXContentProvider.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/indexlifecycle/IndexLifecycleNamedXContentProvider.java index a4e5f034b5154..90ef9d808997e 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/indexlifecycle/IndexLifecycleNamedXContentProvider.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/indexlifecycle/IndexLifecycleNamedXContentProvider.java @@ -56,7 +56,10 @@ public List getNamedXContentParsers() { FreezeAction::parse), new NamedXContentRegistry.Entry(LifecycleAction.class, new ParseField(SetPriorityAction.NAME), - SetPriorityAction::parse) + SetPriorityAction::parse), + new NamedXContentRegistry.Entry(LifecycleAction.class, + new ParseField(UnfollowAction.NAME), + UnfollowAction::parse) ); } } diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/indexlifecycle/LifecyclePolicy.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/indexlifecycle/LifecyclePolicy.java index 1a0f80b740ee7..5e4ae1f36bcbc 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/indexlifecycle/LifecyclePolicy.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/indexlifecycle/LifecyclePolicy.java @@ -57,10 +57,10 @@ public class LifecyclePolicy implements ToXContentObject { throw new IllegalArgumentException("ordered " + PHASES_FIELD.getPreferredName() + " are not supported"); }, PHASES_FIELD); - ALLOWED_ACTIONS.put("hot", Sets.newHashSet(SetPriorityAction.NAME, RolloverAction.NAME)); - ALLOWED_ACTIONS.put("warm", Sets.newHashSet(SetPriorityAction.NAME, AllocateAction.NAME, ForceMergeAction.NAME, + ALLOWED_ACTIONS.put("hot", Sets.newHashSet(UnfollowAction.NAME, SetPriorityAction.NAME, RolloverAction.NAME)); + ALLOWED_ACTIONS.put("warm", Sets.newHashSet(UnfollowAction.NAME, SetPriorityAction.NAME, AllocateAction.NAME, ForceMergeAction.NAME, ReadOnlyAction.NAME, ShrinkAction.NAME)); - ALLOWED_ACTIONS.put("cold", Sets.newHashSet(SetPriorityAction.NAME, AllocateAction.NAME, FreezeAction.NAME)); + ALLOWED_ACTIONS.put("cold", Sets.newHashSet(UnfollowAction.NAME, SetPriorityAction.NAME, AllocateAction.NAME, FreezeAction.NAME)); ALLOWED_ACTIONS.put("delete", Sets.newHashSet(DeleteAction.NAME)); } diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/indexlifecycle/UnfollowAction.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/indexlifecycle/UnfollowAction.java new file mode 100644 index 0000000000000..ba25cf937ec8f --- /dev/null +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/indexlifecycle/UnfollowAction.java @@ -0,0 +1,74 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.client.indexlifecycle; + +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.xcontent.ObjectParser; +import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.common.xcontent.ToXContentObject; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentParser; + +import java.io.IOException; + +public class UnfollowAction implements LifecycleAction, ToXContentObject { + public static final String NAME = "unfollow"; + + private static final ObjectParser PARSER = new ObjectParser<>(NAME, UnfollowAction::new); + + public UnfollowAction() {} + + @Override + public String getName() { + return NAME; + } + + public static UnfollowAction parse(XContentParser parser) { + return PARSER.apply(parser, null); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params params) throws IOException { + builder.startObject(); + builder.endObject(); + return builder; + } + + @Override + public int hashCode() { + return 36970; + } + + @Override + public boolean equals(Object obj) { + if (obj == null) { + return false; + } + if (obj.getClass() != getClass()) { + return false; + } + return true; + } + + @Override + public String toString() { + return Strings.toString(this); + } +} diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/IndexLifecycleIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/IndexLifecycleIT.java index 08ec5a5b3fe09..4ad6d2e6ce604 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/IndexLifecycleIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/IndexLifecycleIT.java @@ -48,6 +48,7 @@ import org.elasticsearch.client.indexlifecycle.ShrinkAction; import org.elasticsearch.client.indexlifecycle.StartILMRequest; import org.elasticsearch.client.indexlifecycle.StopILMRequest; +import org.elasticsearch.client.indexlifecycle.UnfollowAction; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.hamcrest.Matchers; @@ -144,19 +145,20 @@ public void testStartStopILM() throws Exception { public void testExplainLifecycle() throws Exception { Map lifecyclePhases = new HashMap<>(); - Map hotActions = Collections.singletonMap( - RolloverAction.NAME, - new RolloverAction(null, TimeValue.timeValueHours(50 * 24), null)); + Map hotActions = new HashMap<>(); + hotActions.put(RolloverAction.NAME, new RolloverAction(null, TimeValue.timeValueHours(50 * 24), null)); Phase hotPhase = new Phase("hot", randomFrom(TimeValue.ZERO, null), hotActions); lifecyclePhases.put("hot", hotPhase); Map warmActions = new HashMap<>(); + warmActions.put(UnfollowAction.NAME, new UnfollowAction()); warmActions.put(AllocateAction.NAME, new AllocateAction(null, null, null, Collections.singletonMap("_name", "node-1"))); warmActions.put(ShrinkAction.NAME, new ShrinkAction(1)); warmActions.put(ForceMergeAction.NAME, new ForceMergeAction(1000)); lifecyclePhases.put("warm", new Phase("warm", TimeValue.timeValueSeconds(1000), warmActions)); Map coldActions = new HashMap<>(); + coldActions.put(UnfollowAction.NAME, new UnfollowAction()); coldActions.put(AllocateAction.NAME, new AllocateAction(0, null, null, null)); lifecyclePhases.put("cold", new Phase("cold", TimeValue.timeValueSeconds(2000), coldActions)); diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/RestHighLevelClientTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/RestHighLevelClientTests.java index 6995fcf099ad2..1ea6056368051 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/RestHighLevelClientTests.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/RestHighLevelClientTests.java @@ -56,6 +56,7 @@ import org.elasticsearch.client.indexlifecycle.RolloverAction; import org.elasticsearch.client.indexlifecycle.SetPriorityAction; import org.elasticsearch.client.indexlifecycle.ShrinkAction; +import org.elasticsearch.client.indexlifecycle.UnfollowAction; import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.common.CheckedFunction; import org.elasticsearch.common.bytes.BytesReference; @@ -645,7 +646,7 @@ public void testDefaultNamedXContents() { public void testProvidedNamedXContents() { List namedXContents = RestHighLevelClient.getProvidedNamedXContents(); - assertEquals(19, namedXContents.size()); + assertEquals(20, namedXContents.size()); Map, Integer> categories = new HashMap<>(); List names = new ArrayList<>(); for (NamedXContentRegistry.Entry namedXContent : namedXContents) { @@ -669,7 +670,8 @@ public void testProvidedNamedXContents() { assertTrue(names.contains(MeanReciprocalRank.NAME)); assertTrue(names.contains(DiscountedCumulativeGain.NAME)); assertTrue(names.contains(ExpectedReciprocalRank.NAME)); - assertEquals(Integer.valueOf(8), categories.get(LifecycleAction.class)); + assertEquals(Integer.valueOf(9), categories.get(LifecycleAction.class)); + assertTrue(names.contains(UnfollowAction.NAME)); assertTrue(names.contains(AllocateAction.NAME)); assertTrue(names.contains(DeleteAction.NAME)); assertTrue(names.contains(ForceMergeAction.NAME)); diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/indexlifecycle/GetLifecyclePolicyResponseTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/indexlifecycle/GetLifecyclePolicyResponseTests.java index 0fb7b29067f22..c16c270512ca6 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/indexlifecycle/GetLifecyclePolicyResponseTests.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/indexlifecycle/GetLifecyclePolicyResponseTests.java @@ -68,7 +68,8 @@ protected NamedXContentRegistry xContentRegistry() { new NamedXContentRegistry.Entry(LifecycleAction.class, new ParseField(RolloverAction.NAME), RolloverAction::parse), new NamedXContentRegistry.Entry(LifecycleAction.class, new ParseField(ShrinkAction.NAME), ShrinkAction::parse), new NamedXContentRegistry.Entry(LifecycleAction.class, new ParseField(FreezeAction.NAME), FreezeAction::parse), - new NamedXContentRegistry.Entry(LifecycleAction.class, new ParseField(SetPriorityAction.NAME), SetPriorityAction::parse) + new NamedXContentRegistry.Entry(LifecycleAction.class, new ParseField(SetPriorityAction.NAME), SetPriorityAction::parse), + new NamedXContentRegistry.Entry(LifecycleAction.class, new ParseField(UnfollowAction.NAME), UnfollowAction::parse) )); return new NamedXContentRegistry(entries); } diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/indexlifecycle/LifecyclePolicyMetadataTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/indexlifecycle/LifecyclePolicyMetadataTests.java index 25bfa5a4c43d2..6d8014c432c28 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/indexlifecycle/LifecyclePolicyMetadataTests.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/indexlifecycle/LifecyclePolicyMetadataTests.java @@ -64,7 +64,8 @@ protected NamedXContentRegistry xContentRegistry() { new NamedXContentRegistry.Entry(LifecycleAction.class, new ParseField(RolloverAction.NAME), RolloverAction::parse), new NamedXContentRegistry.Entry(LifecycleAction.class, new ParseField(ShrinkAction.NAME), ShrinkAction::parse), new NamedXContentRegistry.Entry(LifecycleAction.class, new ParseField(FreezeAction.NAME), FreezeAction::parse), - new NamedXContentRegistry.Entry(LifecycleAction.class, new ParseField(SetPriorityAction.NAME), SetPriorityAction::parse) + new NamedXContentRegistry.Entry(LifecycleAction.class, new ParseField(SetPriorityAction.NAME), SetPriorityAction::parse), + new NamedXContentRegistry.Entry(LifecycleAction.class, new ParseField(UnfollowAction.NAME), UnfollowAction::parse) )); return new NamedXContentRegistry(entries); } diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/indexlifecycle/LifecyclePolicyTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/indexlifecycle/LifecyclePolicyTests.java index 4f04f814471c1..1690f66572142 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/indexlifecycle/LifecyclePolicyTests.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/indexlifecycle/LifecyclePolicyTests.java @@ -39,10 +39,11 @@ import static org.hamcrest.Matchers.equalTo; public class LifecyclePolicyTests extends AbstractXContentTestCase { - private static final Set VALID_HOT_ACTIONS = Sets.newHashSet(SetPriorityAction.NAME, RolloverAction.NAME); - private static final Set VALID_WARM_ACTIONS = Sets.newHashSet(SetPriorityAction.NAME, AllocateAction.NAME, + private static final Set VALID_HOT_ACTIONS = Sets.newHashSet(UnfollowAction.NAME, SetPriorityAction.NAME, RolloverAction.NAME); + private static final Set VALID_WARM_ACTIONS = Sets.newHashSet(UnfollowAction.NAME, SetPriorityAction.NAME, AllocateAction.NAME, ForceMergeAction.NAME, ReadOnlyAction.NAME, ShrinkAction.NAME); - private static final Set VALID_COLD_ACTIONS = Sets.newHashSet(SetPriorityAction.NAME, AllocateAction.NAME, FreezeAction.NAME); + private static final Set VALID_COLD_ACTIONS = Sets.newHashSet(UnfollowAction.NAME, SetPriorityAction.NAME, AllocateAction.NAME, + FreezeAction.NAME); private static final Set VALID_DELETE_ACTIONS = Sets.newHashSet(DeleteAction.NAME); private String lifecycleName; @@ -68,7 +69,8 @@ protected NamedXContentRegistry xContentRegistry() { new NamedXContentRegistry.Entry(LifecycleAction.class, new ParseField(RolloverAction.NAME), RolloverAction::parse), new NamedXContentRegistry.Entry(LifecycleAction.class, new ParseField(ShrinkAction.NAME), ShrinkAction::parse), new NamedXContentRegistry.Entry(LifecycleAction.class, new ParseField(FreezeAction.NAME), FreezeAction::parse), - new NamedXContentRegistry.Entry(LifecycleAction.class, new ParseField(SetPriorityAction.NAME), SetPriorityAction::parse) + new NamedXContentRegistry.Entry(LifecycleAction.class, new ParseField(SetPriorityAction.NAME), SetPriorityAction::parse), + new NamedXContentRegistry.Entry(LifecycleAction.class, new ParseField(UnfollowAction.NAME), UnfollowAction::parse) )); return new NamedXContentRegistry(entries); } @@ -213,6 +215,8 @@ public static LifecyclePolicy createRandomPolicy(String lifecycleName) { return new FreezeAction(); case SetPriorityAction.NAME: return SetPriorityActionTests.randomInstance(); + case UnfollowAction.NAME: + return new UnfollowAction(); default: throw new IllegalArgumentException("invalid action [" + action + "]"); }}; @@ -246,6 +250,8 @@ private LifecycleAction getTestAction(String actionName) { return new FreezeAction(); case SetPriorityAction.NAME: return SetPriorityActionTests.randomInstance(); + case UnfollowAction.NAME: + return new UnfollowAction(); default: throw new IllegalArgumentException("unsupported phase action [" + actionName + "]"); } diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/indexlifecycle/UnfollowActionTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/indexlifecycle/UnfollowActionTests.java new file mode 100644 index 0000000000000..4dd73c5a08ec2 --- /dev/null +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/indexlifecycle/UnfollowActionTests.java @@ -0,0 +1,43 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.client.indexlifecycle; + +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.test.AbstractXContentTestCase; + +import java.io.IOException; + +public class UnfollowActionTests extends AbstractXContentTestCase { + + @Override + protected UnfollowAction createTestInstance() { + return new UnfollowAction(); + } + + @Override + protected UnfollowAction doParseInstance(XContentParser parser) throws IOException { + return UnfollowAction.parse(parser); + } + + @Override + protected boolean supportsUnknownFields() { + return false; + } +} diff --git a/docs/reference/ilm/policy-definitions.asciidoc b/docs/reference/ilm/policy-definitions.asciidoc index adf78cecd89fe..881b58826b031 100644 --- a/docs/reference/ilm/policy-definitions.asciidoc +++ b/docs/reference/ilm/policy-definitions.asciidoc @@ -87,16 +87,19 @@ The below list shows the actions which are available in each phase. * Hot - <> - <> + - <> * Warm - <> - <> - <> - <> - <> + - <> * Cold - <> - <> - <> + - <> * Delete - <> @@ -616,6 +619,43 @@ PUT _ilm/policy/my_policy -------------------------------------------------- // CONSOLE +[[ilm-unfollow-action]] +==== Unfollow + +This action turns a {ref}/ccr-apis.html[ccr] follower index +into a regular index. This can be desired when moving follower +indices into the next phase. Also certain actions like shrink +and rollover can then be performed safely on follower indices. + +If the unfollow action encounters a follower index then +the following operations will be performed on it: + +* Pauses indexing following for the follower index. +* Closes the follower index. +* Unfollows the follower index. +* Opens the follower index (which is at this point is a regular index). + +The unfollow action does not have any options and +if it encounters a non follower index, then the +unfollow action leaves that index untouched and +lets the next action operate on this index. + +[source,js] +-------------------------------------------------- +PUT _ilm/policy/my_policy +{ + "policy": { + "phases": { + "hot": { + "actions": { + "unfollow" : {} + } + } + } + } +} +-------------------------------------------------- +// CONSOLE === Full Policy diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackClientPlugin.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackClientPlugin.java index 6865cd58c0dac..a121217d4cdaa 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackClientPlugin.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackClientPlugin.java @@ -57,6 +57,7 @@ import org.elasticsearch.xpack.core.indexlifecycle.RolloverAction; import org.elasticsearch.xpack.core.indexlifecycle.ShrinkAction; import org.elasticsearch.xpack.core.indexlifecycle.TimeseriesLifecycleType; +import org.elasticsearch.xpack.core.indexlifecycle.UnfollowAction; import org.elasticsearch.xpack.core.indexlifecycle.action.DeleteLifecycleAction; import org.elasticsearch.xpack.core.indexlifecycle.action.ExplainLifecycleAction; import org.elasticsearch.xpack.core.indexlifecycle.action.GetLifecycleAction; @@ -429,7 +430,8 @@ public List getNamedWriteables() { new NamedWriteableRegistry.Entry(LifecycleAction.class, ShrinkAction.NAME, ShrinkAction::new), new NamedWriteableRegistry.Entry(LifecycleAction.class, DeleteAction.NAME, DeleteAction::new), new NamedWriteableRegistry.Entry(LifecycleAction.class, FreezeAction.NAME, FreezeAction::new), - new NamedWriteableRegistry.Entry(LifecycleAction.class, SetPriorityAction.NAME, SetPriorityAction::new) + new NamedWriteableRegistry.Entry(LifecycleAction.class, SetPriorityAction.NAME, SetPriorityAction::new), + new NamedWriteableRegistry.Entry(LifecycleAction.class, UnfollowAction.NAME, UnfollowAction::new) ); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/indexlifecycle/AbstractUnfollowIndexStep.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/indexlifecycle/AbstractUnfollowIndexStep.java new file mode 100644 index 0000000000000..8e0626425b490 --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/indexlifecycle/AbstractUnfollowIndexStep.java @@ -0,0 +1,35 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.core.indexlifecycle; + +import org.elasticsearch.client.Client; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.metadata.IndexMetaData; + +import java.util.Map; + +import static org.elasticsearch.xpack.core.indexlifecycle.UnfollowAction.CCR_METADATA_KEY; + +abstract class AbstractUnfollowIndexStep extends AsyncActionStep { + + AbstractUnfollowIndexStep(StepKey key, StepKey nextStepKey, Client client) { + super(key, nextStepKey, client); + } + + @Override + public final void performAction(IndexMetaData indexMetaData, ClusterState currentClusterState, Listener listener) { + String followerIndex = indexMetaData.getIndex().getName(); + Map customIndexMetadata = indexMetaData.getCustomData(CCR_METADATA_KEY); + if (customIndexMetadata == null) { + listener.onResponse(true); + return; + } + + innerPerformAction(followerIndex, listener); + } + + abstract void innerPerformAction(String followerIndex, Listener listener); +} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/indexlifecycle/CloseFollowerIndexStep.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/indexlifecycle/CloseFollowerIndexStep.java new file mode 100644 index 0000000000000..3fb6e145236bc --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/indexlifecycle/CloseFollowerIndexStep.java @@ -0,0 +1,31 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.core.indexlifecycle; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.admin.indices.close.CloseIndexRequest; +import org.elasticsearch.client.Client; + +final class CloseFollowerIndexStep extends AbstractUnfollowIndexStep { + + static final String NAME = "close-follower-index"; + + CloseFollowerIndexStep(StepKey key, StepKey nextStepKey, Client client) { + super(key, nextStepKey, client); + } + + @Override + void innerPerformAction(String followerIndex, Listener listener) { + CloseIndexRequest closeIndexRequest = new CloseIndexRequest(followerIndex); + getClient().admin().indices().close(closeIndexRequest, ActionListener.wrap( + r -> { + assert r.isAcknowledged() : "close index response is not acknowledged"; + listener.onResponse(true); + }, + listener::onFailure) + ); + } +} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/indexlifecycle/OpenFollowerIndexStep.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/indexlifecycle/OpenFollowerIndexStep.java new file mode 100644 index 0000000000000..7ba2c4633ab99 --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/indexlifecycle/OpenFollowerIndexStep.java @@ -0,0 +1,33 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.core.indexlifecycle; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.admin.indices.open.OpenIndexRequest; +import org.elasticsearch.client.Client; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.metadata.IndexMetaData; + +final class OpenFollowerIndexStep extends AsyncActionStep { + + static final String NAME = "open-follower-index"; + + OpenFollowerIndexStep(StepKey key, StepKey nextStepKey, Client client) { + super(key, nextStepKey, client); + } + + @Override + public void performAction(IndexMetaData indexMetaData, ClusterState currentClusterState, Listener listener) { + OpenIndexRequest request = new OpenIndexRequest(indexMetaData.getIndex().getName()); + getClient().admin().indices().open(request, ActionListener.wrap( + r -> { + assert r.isAcknowledged() : "open index response is not acknowledged"; + listener.onResponse(true); + }, + listener::onFailure + )); + } +} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/indexlifecycle/PauseFollowerIndexStep.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/indexlifecycle/PauseFollowerIndexStep.java new file mode 100644 index 0000000000000..72b38c7b72797 --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/indexlifecycle/PauseFollowerIndexStep.java @@ -0,0 +1,31 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.core.indexlifecycle; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.client.Client; +import org.elasticsearch.xpack.core.ccr.action.PauseFollowAction; + +final class PauseFollowerIndexStep extends AbstractUnfollowIndexStep { + + static final String NAME = "pause-follower-index"; + + PauseFollowerIndexStep(StepKey key, StepKey nextStepKey, Client client) { + super(key, nextStepKey, client); + } + + @Override + void innerPerformAction(String followerIndex, Listener listener) { + PauseFollowAction.Request request = new PauseFollowAction.Request(followerIndex); + getClient().execute(PauseFollowAction.INSTANCE, request, ActionListener.wrap( + r -> { + assert r.isAcknowledged() : "pause follow response is not acknowledged"; + listener.onResponse(true); + }, + listener::onFailure + )); + } +} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/indexlifecycle/TimeseriesLifecycleType.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/indexlifecycle/TimeseriesLifecycleType.java index 5dad5725ba9ba..4d1c770cea4bc 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/indexlifecycle/TimeseriesLifecycleType.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/indexlifecycle/TimeseriesLifecycleType.java @@ -34,10 +34,11 @@ public class TimeseriesLifecycleType implements LifecycleType { public static final String TYPE = "timeseries"; static final List VALID_PHASES = Arrays.asList("hot", "warm", "cold", "delete"); - static final List ORDERED_VALID_HOT_ACTIONS = Arrays.asList(SetPriorityAction.NAME, RolloverAction.NAME); - static final List ORDERED_VALID_WARM_ACTIONS = Arrays.asList(SetPriorityAction.NAME, ReadOnlyAction.NAME, AllocateAction.NAME, - ShrinkAction.NAME, ForceMergeAction.NAME); - static final List ORDERED_VALID_COLD_ACTIONS = Arrays.asList(SetPriorityAction.NAME, AllocateAction.NAME, FreezeAction.NAME); + static final List ORDERED_VALID_HOT_ACTIONS = Arrays.asList(SetPriorityAction.NAME, UnfollowAction.NAME, RolloverAction.NAME); + static final List ORDERED_VALID_WARM_ACTIONS = Arrays.asList(SetPriorityAction.NAME, UnfollowAction.NAME, ReadOnlyAction.NAME, + AllocateAction.NAME, ShrinkAction.NAME, ForceMergeAction.NAME); + static final List ORDERED_VALID_COLD_ACTIONS = Arrays.asList(SetPriorityAction.NAME, UnfollowAction.NAME, AllocateAction.NAME, + FreezeAction.NAME); static final List ORDERED_VALID_DELETE_ACTIONS = Arrays.asList(DeleteAction.NAME); static final Set VALID_HOT_ACTIONS = Sets.newHashSet(ORDERED_VALID_HOT_ACTIONS); static final Set VALID_WARM_ACTIONS = Sets.newHashSet(ORDERED_VALID_WARM_ACTIONS); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/indexlifecycle/UnfollowAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/indexlifecycle/UnfollowAction.java new file mode 100644 index 0000000000000..20a0fb75b9daa --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/indexlifecycle/UnfollowAction.java @@ -0,0 +1,119 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.core.indexlifecycle; + +import org.elasticsearch.client.Client; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.xcontent.ObjectParser; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.xpack.core.indexlifecycle.Step.StepKey; + +import java.io.IOException; +import java.util.Arrays; +import java.util.List; + +/** + * Converts a CCR following index into a normal, standalone index, once the index is ready to be safely separated. + * + * "Readiness" is composed of two conditions: + * 1) The index must have {@link LifecycleSettings#LIFECYCLE_INDEXING_COMPLETE} set to {@code true}, which is + * done automatically by {@link RolloverAction} (or manually). + * 2) The index must be up to date with the leader, defined as the follower checkpoint being + * equal to the global checkpoint for all shards. + */ +public final class UnfollowAction implements LifecycleAction { + + public static final String NAME = "unfollow"; + public static final String CCR_METADATA_KEY = "ccr"; + + public UnfollowAction() {} + + @Override + public List toSteps(Client client, String phase, StepKey nextStepKey) { + StepKey indexingComplete = new StepKey(phase, NAME, WaitForIndexingCompleteStep.NAME); + StepKey waitForFollowShardTasks = new StepKey(phase, NAME, WaitForFollowShardTasksStep.NAME); + StepKey pauseFollowerIndex = new StepKey(phase, NAME, PauseFollowerIndexStep.NAME); + StepKey closeFollowerIndex = new StepKey(phase, NAME, CloseFollowerIndexStep.NAME); + StepKey unfollowFollowerIndex = new StepKey(phase, NAME, UnfollowFollowIndexStep.NAME); + StepKey openFollowerIndex = new StepKey(phase, NAME, OpenFollowerIndexStep.NAME); + StepKey waitForYellowStep = new StepKey(phase, NAME, WaitForYellowStep.NAME); + + WaitForIndexingCompleteStep step1 = new WaitForIndexingCompleteStep(indexingComplete, waitForFollowShardTasks); + WaitForFollowShardTasksStep step2 = new WaitForFollowShardTasksStep(waitForFollowShardTasks, pauseFollowerIndex, client); + PauseFollowerIndexStep step3 = new PauseFollowerIndexStep(pauseFollowerIndex, closeFollowerIndex, client); + CloseFollowerIndexStep step4 = new CloseFollowerIndexStep(closeFollowerIndex, unfollowFollowerIndex, client); + UnfollowFollowIndexStep step5 = new UnfollowFollowIndexStep(unfollowFollowerIndex, openFollowerIndex, client); + OpenFollowerIndexStep step6 = new OpenFollowerIndexStep(openFollowerIndex, waitForYellowStep, client); + WaitForYellowStep step7 = new WaitForYellowStep(waitForYellowStep, nextStepKey); + return Arrays.asList(step1, step2, step3, step4, step5, step6, step7); + } + + @Override + public List toStepKeys(String phase) { + StepKey indexingCompleteStep = new StepKey(phase, NAME, WaitForIndexingCompleteStep.NAME); + StepKey waitForFollowShardTasksStep = new StepKey(phase, NAME, WaitForFollowShardTasksStep.NAME); + StepKey pauseFollowerIndexStep = new StepKey(phase, NAME, PauseFollowerIndexStep.NAME); + StepKey closeFollowerIndexStep = new StepKey(phase, NAME, CloseFollowerIndexStep.NAME); + StepKey unfollowIndexStep = new StepKey(phase, NAME, UnfollowFollowIndexStep.NAME); + StepKey openFollowerIndexStep = new StepKey(phase, NAME, OpenFollowerIndexStep.NAME); + StepKey waitForYellowStep = new StepKey(phase, NAME, WaitForYellowStep.NAME); + return Arrays.asList(indexingCompleteStep, waitForFollowShardTasksStep, pauseFollowerIndexStep, + closeFollowerIndexStep, unfollowIndexStep, openFollowerIndexStep, waitForYellowStep); + } + + @Override + public boolean isSafeAction() { + // There are no settings to change, so therefor this action should be safe: + return true; + } + + @Override + public String getWriteableName() { + return NAME; + } + + public UnfollowAction(StreamInput in) throws IOException {} + + @Override + public void writeTo(StreamOutput out) throws IOException {} + + private static final ObjectParser PARSER = new ObjectParser<>(NAME, UnfollowAction::new); + + public static UnfollowAction parse(XContentParser parser) { + return PARSER.apply(parser, null); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + builder.endObject(); + return builder; + } + + @Override + public int hashCode() { + return 36970; + } + + @Override + public boolean equals(Object obj) { + if (obj == null) { + return false; + } + if (obj.getClass() != getClass()) { + return false; + } + return true; + } + + @Override + public String toString() { + return Strings.toString(this); + } +} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/indexlifecycle/UnfollowFollowIndexStep.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/indexlifecycle/UnfollowFollowIndexStep.java new file mode 100644 index 0000000000000..953450bbc763b --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/indexlifecycle/UnfollowFollowIndexStep.java @@ -0,0 +1,32 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.core.indexlifecycle; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.client.Client; +import org.elasticsearch.xpack.core.ccr.action.UnfollowAction; + +final class UnfollowFollowIndexStep extends AbstractUnfollowIndexStep { + + static final String NAME = "unfollow-follower-index"; + + UnfollowFollowIndexStep(StepKey key, StepKey nextStepKey, Client client) { + super(key, nextStepKey, client); + } + + @Override + void innerPerformAction(String followerIndex, Listener listener) { + UnfollowAction.Request request = new UnfollowAction.Request(followerIndex); + getClient().execute(UnfollowAction.INSTANCE, request, ActionListener.wrap( + r -> { + assert r.isAcknowledged() : "unfollow response is not acknowledged"; + listener.onResponse(true); + }, + listener::onFailure + )); + } + +} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/indexlifecycle/WaitForFollowShardTasksStep.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/indexlifecycle/WaitForFollowShardTasksStep.java new file mode 100644 index 0000000000000..f3938a1d3da2b --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/indexlifecycle/WaitForFollowShardTasksStep.java @@ -0,0 +1,181 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.core.indexlifecycle; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.client.Client; +import org.elasticsearch.cluster.metadata.IndexMetaData; +import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.xcontent.ToXContentObject; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.xpack.core.ccr.ShardFollowNodeTaskStatus; +import org.elasticsearch.xpack.core.ccr.action.FollowStatsAction; + +import java.io.IOException; +import java.util.List; +import java.util.Map; +import java.util.Objects; +import java.util.stream.Collectors; + +import static org.elasticsearch.xpack.core.indexlifecycle.UnfollowAction.CCR_METADATA_KEY; + +final class WaitForFollowShardTasksStep extends AsyncWaitStep { + + static final String NAME = "wait-for-follow-shard-tasks"; + + WaitForFollowShardTasksStep(StepKey key, StepKey nextStepKey, Client client) { + super(key, nextStepKey, client); + } + + @Override + public void evaluateCondition(IndexMetaData indexMetaData, Listener listener) { + Map customIndexMetadata = indexMetaData.getCustomData(CCR_METADATA_KEY); + if (customIndexMetadata == null) { + listener.onResponse(true, null); + return; + } + + FollowStatsAction.StatsRequest request = new FollowStatsAction.StatsRequest(); + request.setIndices(new String[]{indexMetaData.getIndex().getName()}); + getClient().execute(FollowStatsAction.INSTANCE, request, + ActionListener.wrap(r -> handleResponse(r, listener), listener::onFailure)); + } + + void handleResponse(FollowStatsAction.StatsResponses responses, Listener listener) { + List unSyncedShardFollowStatuses = responses.getStatsResponses() + .stream() + .map(FollowStatsAction.StatsResponse::status) + .filter(shardFollowStatus -> shardFollowStatus.leaderGlobalCheckpoint() != shardFollowStatus.followerGlobalCheckpoint()) + .collect(Collectors.toList()); + + // Follow stats api needs to return stats for follower index and all shard follow tasks should be synced: + boolean conditionMet = responses.getStatsResponses().size() > 0 && unSyncedShardFollowStatuses.isEmpty(); + if (conditionMet) { + listener.onResponse(true, null); + } else { + List shardFollowTaskInfos = unSyncedShardFollowStatuses + .stream() + .map(status -> new Info.ShardFollowTaskInfo(status.followerIndex(), status.getShardId(), + status.leaderGlobalCheckpoint(), status.followerGlobalCheckpoint())) + .collect(Collectors.toList()); + listener.onResponse(false, new Info(shardFollowTaskInfos)); + } + } + + static final class Info implements ToXContentObject { + + static final ParseField SHARD_FOLLOW_TASKS = new ParseField("shard_follow_tasks"); + static final ParseField MESSAGE = new ParseField("message"); + + private final List shardFollowTaskInfos; + + Info(List shardFollowTaskInfos) { + this.shardFollowTaskInfos = shardFollowTaskInfos; + } + + List getShardFollowTaskInfos() { + return shardFollowTaskInfos; + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + builder.field(SHARD_FOLLOW_TASKS.getPreferredName(), shardFollowTaskInfos); + String message; + if (shardFollowTaskInfos.size() > 0) { + message = "Waiting for [" + shardFollowTaskInfos.size() + "] shard follow tasks to be in sync"; + } else { + message = "Waiting for following to be unpaused and all shard follow tasks to be up to date"; + } + builder.field(MESSAGE.getPreferredName(), message); + builder.endObject(); + return builder; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + Info info = (Info) o; + return Objects.equals(shardFollowTaskInfos, info.shardFollowTaskInfos); + } + + @Override + public int hashCode() { + return Objects.hash(shardFollowTaskInfos); + } + + @Override + public String toString() { + return Strings.toString(this); + } + + static final class ShardFollowTaskInfo implements ToXContentObject { + + static final ParseField FOLLOWER_INDEX_FIELD = new ParseField("follower_index"); + static final ParseField SHARD_ID_FIELD = new ParseField("shard_id"); + static final ParseField LEADER_GLOBAL_CHECKPOINT_FIELD = new ParseField("leader_global_checkpoint"); + static final ParseField FOLLOWER_GLOBAL_CHECKPOINT_FIELD = new ParseField("follower_global_checkpoint"); + + private final String followerIndex; + private final int shardId; + private final long leaderGlobalCheckpoint; + private final long followerGlobalCheckpoint; + + ShardFollowTaskInfo(String followerIndex, int shardId, long leaderGlobalCheckpoint, long followerGlobalCheckpoint) { + this.followerIndex = followerIndex; + this.shardId = shardId; + this.leaderGlobalCheckpoint = leaderGlobalCheckpoint; + this.followerGlobalCheckpoint = followerGlobalCheckpoint; + } + + String getFollowerIndex() { + return followerIndex; + } + + + int getShardId() { + return shardId; + } + + long getLeaderGlobalCheckpoint() { + return leaderGlobalCheckpoint; + } + + long getFollowerGlobalCheckpoint() { + return followerGlobalCheckpoint; + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + builder.field(FOLLOWER_INDEX_FIELD.getPreferredName(), followerIndex); + builder.field(SHARD_ID_FIELD.getPreferredName(), shardId); + builder.field(LEADER_GLOBAL_CHECKPOINT_FIELD.getPreferredName(), leaderGlobalCheckpoint); + builder.field(FOLLOWER_GLOBAL_CHECKPOINT_FIELD.getPreferredName(), followerGlobalCheckpoint); + builder.endObject(); + return builder; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ShardFollowTaskInfo that = (ShardFollowTaskInfo) o; + return shardId == that.shardId && + leaderGlobalCheckpoint == that.leaderGlobalCheckpoint && + followerGlobalCheckpoint == that.followerGlobalCheckpoint && + Objects.equals(followerIndex, that.followerIndex); + } + + @Override + public int hashCode() { + return Objects.hash(followerIndex, shardId, leaderGlobalCheckpoint, followerGlobalCheckpoint); + } + } + } +} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/indexlifecycle/WaitForIndexingCompleteStep.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/indexlifecycle/WaitForIndexingCompleteStep.java new file mode 100644 index 0000000000000..3f795a88dd85b --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/indexlifecycle/WaitForIndexingCompleteStep.java @@ -0,0 +1,91 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.core.indexlifecycle; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.metadata.IndexMetaData; +import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.xcontent.ToXContentObject; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.index.Index; + +import java.io.IOException; +import java.util.Map; +import java.util.Objects; + +import static org.elasticsearch.xpack.core.indexlifecycle.UnfollowAction.CCR_METADATA_KEY; + +final class WaitForIndexingCompleteStep extends ClusterStateWaitStep { + private static final Logger logger = LogManager.getLogger(WaitForIndexingCompleteStep.class); + + static final String NAME = "wait-for-indexing-complete"; + + WaitForIndexingCompleteStep(StepKey key, StepKey nextStepKey) { + super(key, nextStepKey); + } + + @Override + public Result isConditionMet(Index index, ClusterState clusterState) { + IndexMetaData followerIndex = clusterState.metaData().index(index); + if (followerIndex == null) { + // Index must have been since deleted, ignore it + logger.debug("[{}] lifecycle action for index [{}] executed but index no longer exists", getKey().getAction(), index.getName()); + return new Result(false, null); + } + Map customIndexMetadata = followerIndex.getCustomData(CCR_METADATA_KEY); + if (customIndexMetadata == null) { + return new Result(true, null); + } + + boolean indexingComplete = LifecycleSettings.LIFECYCLE_INDEXING_COMPLETE_SETTING.get(followerIndex.getSettings()); + if (indexingComplete) { + return new Result(true, null); + } else { + return new Result(false, new IndexingNotCompleteInfo()); + } + } + + static final class IndexingNotCompleteInfo implements ToXContentObject { + + static final ParseField MESSAGE_FIELD = new ParseField("message"); + static final ParseField INDEXING_COMPLETE = new ParseField(LifecycleSettings.LIFECYCLE_INDEXING_COMPLETE); + + private final String message; + + IndexingNotCompleteInfo() { + this.message = "waiting for the [" + LifecycleSettings.LIFECYCLE_INDEXING_COMPLETE + + "] setting to be set to true on the leader index, it is currently [false]"; + } + + String getMessage() { + return message; + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + builder.field(MESSAGE_FIELD.getPreferredName(), message); + builder.field(INDEXING_COMPLETE.getPreferredName(), false); + builder.endObject(); + return builder; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + IndexingNotCompleteInfo info = (IndexingNotCompleteInfo) o; + return Objects.equals(getMessage(), info.getMessage()); + } + + @Override + public int hashCode() { + return Objects.hash(getMessage()); + } + } +} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/indexlifecycle/WaitForYellowStep.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/indexlifecycle/WaitForYellowStep.java new file mode 100644 index 0000000000000..75be80199e9b2 --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/indexlifecycle/WaitForYellowStep.java @@ -0,0 +1,78 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.core.indexlifecycle; + +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.routing.IndexRoutingTable; +import org.elasticsearch.cluster.routing.RoutingTable; +import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.xcontent.ToXContentObject; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.index.Index; + +import java.io.IOException; +import java.util.Objects; + +class WaitForYellowStep extends ClusterStateWaitStep { + + static final String NAME = "wait-for-yellow-step"; + + WaitForYellowStep(StepKey key, StepKey nextStepKey) { + super(key, nextStepKey); + } + + @Override + public Result isConditionMet(Index index, ClusterState clusterState) { + RoutingTable routingTable = clusterState.routingTable(); + IndexRoutingTable indexShardRoutingTable = routingTable.index(index); + if (indexShardRoutingTable == null) { + return new Result(false, new Info("index is red; no IndexRoutingTable")); + } + + boolean indexIsAtLeastYellow = indexShardRoutingTable.allPrimaryShardsActive(); + if (indexIsAtLeastYellow) { + return new Result(true, null); + } else { + return new Result(false, new Info("index is red; not all primary shards are active")); + } + } + + static final class Info implements ToXContentObject { + + static final ParseField MESSAGE_FIELD = new ParseField("message"); + + private final String message; + + Info(String message) { + this.message = message; + } + + String getMessage() { + return message; + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + builder.field(MESSAGE_FIELD.getPreferredName(), message); + builder.endObject(); + return builder; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + Info info = (Info) o; + return Objects.equals(getMessage(), info.getMessage()); + } + + @Override + public int hashCode() { + return Objects.hash(getMessage()); + } + } +} diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/indexlifecycle/AbstractUnfollowIndexStepTestCase.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/indexlifecycle/AbstractUnfollowIndexStepTestCase.java new file mode 100644 index 0000000000000..5ceb8ca657006 --- /dev/null +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/indexlifecycle/AbstractUnfollowIndexStepTestCase.java @@ -0,0 +1,73 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.core.indexlifecycle; + +import org.elasticsearch.Version; +import org.elasticsearch.client.Client; +import org.elasticsearch.cluster.metadata.IndexMetaData; +import org.mockito.Mockito; + +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.nullValue; + +public abstract class AbstractUnfollowIndexStepTestCase extends AbstractStepTestCase { + + @Override + protected final T createRandomInstance() { + Step.StepKey stepKey = randomStepKey(); + Step.StepKey nextStepKey = randomStepKey(); + return newInstance(stepKey, nextStepKey, Mockito.mock(Client.class)); + } + + @Override + protected final T mutateInstance(T instance) { + Step.StepKey key = instance.getKey(); + Step.StepKey nextKey = instance.getNextStepKey(); + + if (randomBoolean()) { + key = new Step.StepKey(key.getPhase(), key.getAction(), key.getName() + randomAlphaOfLength(5)); + } else { + nextKey = new Step.StepKey(key.getPhase(), key.getAction(), key.getName() + randomAlphaOfLength(5)); + } + + return newInstance(key, nextKey, instance.getClient()); + } + + @Override + protected final T copyInstance(T instance) { + return newInstance(instance.getKey(), instance.getNextStepKey(), instance.getClient()); + } + + public final void testNotAFollowerIndex() { + IndexMetaData indexMetadata = IndexMetaData.builder("follower-index") + .settings(settings(Version.CURRENT).put(LifecycleSettings.LIFECYCLE_INDEXING_COMPLETE, "true")) + .numberOfShards(1) + .numberOfReplicas(0) + .build(); + + Client client = Mockito.mock(Client.class); + T step = newInstance(randomStepKey(), randomStepKey(), client); + + Boolean[] completed = new Boolean[1]; + Exception[] failure = new Exception[1]; + step.performAction(indexMetadata, null, new AsyncActionStep.Listener() { + @Override + public void onResponse(boolean complete) { + completed[0] = complete; + } + + @Override + public void onFailure(Exception e) { + failure[0] = e; + } + }); + assertThat(completed[0], is(true)); + assertThat(failure[0], nullValue()); + Mockito.verifyZeroInteractions(client); + } + + protected abstract T newInstance(Step.StepKey key, Step.StepKey nextKey, Client client); +} diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/indexlifecycle/CloseFollowerIndexStepTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/indexlifecycle/CloseFollowerIndexStepTests.java new file mode 100644 index 0000000000000..528021189e107 --- /dev/null +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/indexlifecycle/CloseFollowerIndexStepTests.java @@ -0,0 +1,117 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.core.indexlifecycle; + +import org.elasticsearch.Version; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.admin.indices.close.CloseIndexRequest; +import org.elasticsearch.action.support.master.AcknowledgedResponse; +import org.elasticsearch.client.AdminClient; +import org.elasticsearch.client.Client; +import org.elasticsearch.client.IndicesAdminClient; +import org.elasticsearch.cluster.metadata.IndexMetaData; +import org.mockito.Mockito; + +import java.util.Collections; + +import static org.elasticsearch.xpack.core.indexlifecycle.UnfollowAction.CCR_METADATA_KEY; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.nullValue; +import static org.hamcrest.Matchers.sameInstance; + +public class CloseFollowerIndexStepTests extends AbstractUnfollowIndexStepTestCase { + + @Override + protected CloseFollowerIndexStep newInstance(Step.StepKey key, Step.StepKey nextKey, Client client) { + return new CloseFollowerIndexStep(key, nextKey, client); + } + + public void testCloseFollowingIndex() { + IndexMetaData indexMetadata = IndexMetaData.builder("follower-index") + .settings(settings(Version.CURRENT).put(LifecycleSettings.LIFECYCLE_INDEXING_COMPLETE, "true")) + .putCustom(CCR_METADATA_KEY, Collections.emptyMap()) + .numberOfShards(1) + .numberOfReplicas(0) + .build(); + + Client client = Mockito.mock(Client.class); + AdminClient adminClient = Mockito.mock(AdminClient.class); + Mockito.when(client.admin()).thenReturn(adminClient); + IndicesAdminClient indicesClient = Mockito.mock(IndicesAdminClient.class); + Mockito.when(adminClient.indices()).thenReturn(indicesClient); + + Mockito.doAnswer(invocation -> { + CloseIndexRequest closeIndexRequest = (CloseIndexRequest) invocation.getArguments()[0]; + assertThat(closeIndexRequest.indices()[0], equalTo("follower-index")); + @SuppressWarnings("unchecked") + ActionListener listener = (ActionListener) invocation.getArguments()[1]; + listener.onResponse(new AcknowledgedResponse(true)); + return null; + }).when(indicesClient).close(Mockito.any(), Mockito.any()); + + Boolean[] completed = new Boolean[1]; + Exception[] failure = new Exception[1]; + CloseFollowerIndexStep step = new CloseFollowerIndexStep(randomStepKey(), randomStepKey(), client); + step.performAction(indexMetadata, null, new AsyncActionStep.Listener() { + @Override + public void onResponse(boolean complete) { + completed[0] = complete; + } + + @Override + public void onFailure(Exception e) { + failure[0] = e; + } + }); + assertThat(completed[0], is(true)); + assertThat(failure[0], nullValue()); + } + + public void testCloseFollowingIndexFailed() { + IndexMetaData indexMetadata = IndexMetaData.builder("follower-index") + .settings(settings(Version.CURRENT).put(LifecycleSettings.LIFECYCLE_INDEXING_COMPLETE, "true")) + .putCustom(CCR_METADATA_KEY, Collections.emptyMap()) + .numberOfShards(1) + .numberOfReplicas(0) + .build(); + + // Mock pause follow api call: + Client client = Mockito.mock(Client.class); + AdminClient adminClient = Mockito.mock(AdminClient.class); + Mockito.when(client.admin()).thenReturn(adminClient); + IndicesAdminClient indicesClient = Mockito.mock(IndicesAdminClient.class); + Mockito.when(adminClient.indices()).thenReturn(indicesClient); + + Exception error = new RuntimeException(); + Mockito.doAnswer(invocation -> { + CloseIndexRequest closeIndexRequest = (CloseIndexRequest) invocation.getArguments()[0]; + assertThat(closeIndexRequest.indices()[0], equalTo("follower-index")); + ActionListener listener = (ActionListener) invocation.getArguments()[1]; + listener.onFailure(error); + return null; + }).when(indicesClient).close(Mockito.any(), Mockito.any()); + + Boolean[] completed = new Boolean[1]; + Exception[] failure = new Exception[1]; + CloseFollowerIndexStep step = new CloseFollowerIndexStep(randomStepKey(), randomStepKey(), client); + step.performAction(indexMetadata, null, new AsyncActionStep.Listener() { + @Override + public void onResponse(boolean complete) { + completed[0] = complete; + } + + @Override + public void onFailure(Exception e) { + failure[0] = e; + } + }); + assertThat(completed[0], nullValue()); + assertThat(failure[0], sameInstance(error)); + Mockito.verify(indicesClient).close(Mockito.any(), Mockito.any()); + Mockito.verifyNoMoreInteractions(indicesClient); + } +} diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/indexlifecycle/LifecyclePolicyMetadataTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/indexlifecycle/LifecyclePolicyMetadataTests.java index d943f7ea65308..fcca1cf01c0dd 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/indexlifecycle/LifecyclePolicyMetadataTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/indexlifecycle/LifecyclePolicyMetadataTests.java @@ -45,7 +45,8 @@ protected NamedWriteableRegistry getNamedWriteableRegistry() { new NamedWriteableRegistry.Entry(LifecycleAction.class, RolloverAction.NAME, RolloverAction::new), new NamedWriteableRegistry.Entry(LifecycleAction.class, ShrinkAction.NAME, ShrinkAction::new), new NamedWriteableRegistry.Entry(LifecycleAction.class, FreezeAction.NAME, FreezeAction::new), - new NamedWriteableRegistry.Entry(LifecycleAction.class, SetPriorityAction.NAME, SetPriorityAction::new) + new NamedWriteableRegistry.Entry(LifecycleAction.class, SetPriorityAction.NAME, SetPriorityAction::new), + new NamedWriteableRegistry.Entry(LifecycleAction.class, UnfollowAction.NAME, UnfollowAction::new) )); } @@ -62,7 +63,8 @@ protected NamedXContentRegistry xContentRegistry() { new NamedXContentRegistry.Entry(LifecycleAction.class, new ParseField(RolloverAction.NAME), RolloverAction::parse), new NamedXContentRegistry.Entry(LifecycleAction.class, new ParseField(ShrinkAction.NAME), ShrinkAction::parse), new NamedXContentRegistry.Entry(LifecycleAction.class, new ParseField(FreezeAction.NAME), FreezeAction::parse), - new NamedXContentRegistry.Entry(LifecycleAction.class, new ParseField(SetPriorityAction.NAME), SetPriorityAction::parse) + new NamedXContentRegistry.Entry(LifecycleAction.class, new ParseField(SetPriorityAction.NAME), SetPriorityAction::parse), + new NamedXContentRegistry.Entry(LifecycleAction.class, new ParseField(UnfollowAction.NAME), UnfollowAction::parse) )); return new NamedXContentRegistry(entries); } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/indexlifecycle/LifecyclePolicyTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/indexlifecycle/LifecyclePolicyTests.java index 34e09824ed4b4..1730213e68363 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/indexlifecycle/LifecyclePolicyTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/indexlifecycle/LifecyclePolicyTests.java @@ -54,7 +54,8 @@ protected NamedWriteableRegistry getNamedWriteableRegistry() { new NamedWriteableRegistry.Entry(LifecycleAction.class, RolloverAction.NAME, RolloverAction::new), new NamedWriteableRegistry.Entry(LifecycleAction.class, ShrinkAction.NAME, ShrinkAction::new), new NamedWriteableRegistry.Entry(LifecycleAction.class, FreezeAction.NAME, FreezeAction::new), - new NamedWriteableRegistry.Entry(LifecycleAction.class, SetPriorityAction.NAME, SetPriorityAction::new) + new NamedWriteableRegistry.Entry(LifecycleAction.class, SetPriorityAction.NAME, SetPriorityAction::new), + new NamedWriteableRegistry.Entry(LifecycleAction.class, UnfollowAction.NAME, UnfollowAction::new) )); } @@ -71,7 +72,8 @@ protected NamedXContentRegistry xContentRegistry() { new NamedXContentRegistry.Entry(LifecycleAction.class, new ParseField(RolloverAction.NAME), RolloverAction::parse), new NamedXContentRegistry.Entry(LifecycleAction.class, new ParseField(ShrinkAction.NAME), ShrinkAction::parse), new NamedXContentRegistry.Entry(LifecycleAction.class, new ParseField(FreezeAction.NAME), FreezeAction::parse), - new NamedXContentRegistry.Entry(LifecycleAction.class, new ParseField(SetPriorityAction.NAME), SetPriorityAction::parse) + new NamedXContentRegistry.Entry(LifecycleAction.class, new ParseField(SetPriorityAction.NAME), SetPriorityAction::parse), + new NamedXContentRegistry.Entry(LifecycleAction.class, new ParseField(UnfollowAction.NAME), UnfollowAction::parse) )); return new NamedXContentRegistry(entries); } @@ -120,6 +122,8 @@ public static LifecyclePolicy randomTimeseriesLifecyclePolicyWithAllPhases(@Null return new FreezeAction(); case SetPriorityAction.NAME: return SetPriorityActionTests.randomInstance(); + case UnfollowAction.NAME: + return new UnfollowAction(); default: throw new IllegalArgumentException("invalid action [" + action + "]"); }}; @@ -170,6 +174,8 @@ public static LifecyclePolicy randomTimeseriesLifecyclePolicy(@Nullable String l return new FreezeAction(); case SetPriorityAction.NAME: return SetPriorityActionTests.randomInstance(); + case UnfollowAction.NAME: + return new UnfollowAction(); default: throw new IllegalArgumentException("invalid action [" + action + "]"); }}; diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/indexlifecycle/OpenFollowerIndexStepTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/indexlifecycle/OpenFollowerIndexStepTests.java new file mode 100644 index 0000000000000..2d5086ec88fac --- /dev/null +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/indexlifecycle/OpenFollowerIndexStepTests.java @@ -0,0 +1,137 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.core.indexlifecycle; + +import org.elasticsearch.Version; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.admin.indices.open.OpenIndexRequest; +import org.elasticsearch.action.admin.indices.open.OpenIndexResponse; +import org.elasticsearch.client.AdminClient; +import org.elasticsearch.client.Client; +import org.elasticsearch.client.IndicesAdminClient; +import org.elasticsearch.cluster.metadata.IndexMetaData; +import org.mockito.Mockito; + +import java.util.Collections; + +import static org.elasticsearch.xpack.core.indexlifecycle.UnfollowAction.CCR_METADATA_KEY; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.nullValue; +import static org.hamcrest.Matchers.sameInstance; + +public class OpenFollowerIndexStepTests extends AbstractStepTestCase { + + @Override + protected OpenFollowerIndexStep createRandomInstance() { + Step.StepKey stepKey = randomStepKey(); + Step.StepKey nextStepKey = randomStepKey(); + return new OpenFollowerIndexStep(stepKey, nextStepKey, Mockito.mock(Client.class)); + } + + @Override + protected OpenFollowerIndexStep mutateInstance(OpenFollowerIndexStep instance) { + Step.StepKey key = instance.getKey(); + Step.StepKey nextKey = instance.getNextStepKey(); + + if (randomBoolean()) { + key = new Step.StepKey(key.getPhase(), key.getAction(), key.getName() + randomAlphaOfLength(5)); + } else { + nextKey = new Step.StepKey(key.getPhase(), key.getAction(), key.getName() + randomAlphaOfLength(5)); + } + + return new OpenFollowerIndexStep(key, nextKey, instance.getClient()); + } + + @Override + protected OpenFollowerIndexStep copyInstance(OpenFollowerIndexStep instance) { + return new OpenFollowerIndexStep(instance.getKey(), instance.getNextStepKey(), instance.getClient()); + } + + public void testOpenFollowingIndex() { + IndexMetaData indexMetadata = IndexMetaData.builder("follower-index") + .settings(settings(Version.CURRENT).put(LifecycleSettings.LIFECYCLE_INDEXING_COMPLETE, "true")) + .putCustom(CCR_METADATA_KEY, Collections.emptyMap()) + .numberOfShards(1) + .numberOfReplicas(0) + .build(); + + Client client = Mockito.mock(Client.class); + AdminClient adminClient = Mockito.mock(AdminClient.class); + Mockito.when(client.admin()).thenReturn(adminClient); + IndicesAdminClient indicesClient = Mockito.mock(IndicesAdminClient.class); + Mockito.when(adminClient.indices()).thenReturn(indicesClient); + + Mockito.doAnswer(invocation -> { + OpenIndexRequest closeIndexRequest = (OpenIndexRequest) invocation.getArguments()[0]; + assertThat(closeIndexRequest.indices()[0], equalTo("follower-index")); + @SuppressWarnings("unchecked") + ActionListener listener = (ActionListener) invocation.getArguments()[1]; + listener.onResponse(new OpenIndexResponse(true, true)); + return null; + }).when(indicesClient).open(Mockito.any(), Mockito.any()); + + Boolean[] completed = new Boolean[1]; + Exception[] failure = new Exception[1]; + OpenFollowerIndexStep step = new OpenFollowerIndexStep(randomStepKey(), randomStepKey(), client); + step.performAction(indexMetadata, null, new AsyncActionStep.Listener() { + @Override + public void onResponse(boolean complete) { + completed[0] = complete; + } + + @Override + public void onFailure(Exception e) { + failure[0] = e; + } + }); + assertThat(completed[0], is(true)); + assertThat(failure[0], nullValue()); + } + + public void testOpenFollowingIndexFailed() { + IndexMetaData indexMetadata = IndexMetaData.builder("follower-index") + .settings(settings(Version.CURRENT).put(LifecycleSettings.LIFECYCLE_INDEXING_COMPLETE, "true")) + .putCustom(CCR_METADATA_KEY, Collections.emptyMap()) + .numberOfShards(1) + .numberOfReplicas(0) + .build(); + + Client client = Mockito.mock(Client.class); + AdminClient adminClient = Mockito.mock(AdminClient.class); + Mockito.when(client.admin()).thenReturn(adminClient); + IndicesAdminClient indicesClient = Mockito.mock(IndicesAdminClient.class); + Mockito.when(adminClient.indices()).thenReturn(indicesClient); + + Exception error = new RuntimeException(); + Mockito.doAnswer(invocation -> { + OpenIndexRequest closeIndexRequest = (OpenIndexRequest) invocation.getArguments()[0]; + assertThat(closeIndexRequest.indices()[0], equalTo("follower-index")); + ActionListener listener = (ActionListener) invocation.getArguments()[1]; + listener.onFailure(error); + return null; + }).when(indicesClient).open(Mockito.any(), Mockito.any()); + + Boolean[] completed = new Boolean[1]; + Exception[] failure = new Exception[1]; + OpenFollowerIndexStep step = new OpenFollowerIndexStep(randomStepKey(), randomStepKey(), client); + step.performAction(indexMetadata, null, new AsyncActionStep.Listener() { + @Override + public void onResponse(boolean complete) { + completed[0] = complete; + } + + @Override + public void onFailure(Exception e) { + failure[0] = e; + } + }); + assertThat(completed[0], nullValue()); + assertThat(failure[0], sameInstance(error)); + Mockito.verify(indicesClient).open(Mockito.any(), Mockito.any()); + Mockito.verifyNoMoreInteractions(indicesClient); + } +} diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/indexlifecycle/PauseFollowerIndexStepTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/indexlifecycle/PauseFollowerIndexStepTests.java new file mode 100644 index 0000000000000..fa877ef080ff4 --- /dev/null +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/indexlifecycle/PauseFollowerIndexStepTests.java @@ -0,0 +1,112 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.core.indexlifecycle; + +import org.elasticsearch.Version; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.support.master.AcknowledgedResponse; +import org.elasticsearch.client.AdminClient; +import org.elasticsearch.client.Client; +import org.elasticsearch.client.IndicesAdminClient; +import org.elasticsearch.cluster.metadata.IndexMetaData; +import org.elasticsearch.xpack.core.ccr.action.PauseFollowAction; +import org.mockito.Mockito; + +import java.util.Collections; + +import static org.elasticsearch.xpack.core.indexlifecycle.UnfollowAction.CCR_METADATA_KEY; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.nullValue; +import static org.hamcrest.Matchers.sameInstance; + +public class PauseFollowerIndexStepTests extends AbstractUnfollowIndexStepTestCase { + + @Override + protected PauseFollowerIndexStep newInstance(Step.StepKey key, Step.StepKey nextKey, Client client) { + return new PauseFollowerIndexStep(key, nextKey, client); + } + + public void testPauseFollowingIndex() { + IndexMetaData indexMetadata = IndexMetaData.builder("follower-index") + .settings(settings(Version.CURRENT).put(LifecycleSettings.LIFECYCLE_INDEXING_COMPLETE, "true")) + .putCustom(CCR_METADATA_KEY, Collections.emptyMap()) + .numberOfShards(1) + .numberOfReplicas(0) + .build(); + + Client client = Mockito.mock(Client.class); + AdminClient adminClient = Mockito.mock(AdminClient.class); + Mockito.when(client.admin()).thenReturn(adminClient); + IndicesAdminClient indicesClient = Mockito.mock(IndicesAdminClient.class); + Mockito.when(adminClient.indices()).thenReturn(indicesClient); + + Mockito.doAnswer(invocation -> { + PauseFollowAction.Request request = (PauseFollowAction.Request) invocation.getArguments()[1]; + assertThat(request.getFollowIndex(), equalTo("follower-index")); + @SuppressWarnings("unchecked") + ActionListener listener = (ActionListener) invocation.getArguments()[2]; + listener.onResponse(new AcknowledgedResponse(true)); + return null; + }).when(client).execute(Mockito.same(PauseFollowAction.INSTANCE), Mockito.any(), Mockito.any()); + + Boolean[] completed = new Boolean[1]; + Exception[] failure = new Exception[1]; + PauseFollowerIndexStep step = new PauseFollowerIndexStep(randomStepKey(), randomStepKey(), client); + step.performAction(indexMetadata, null, new AsyncActionStep.Listener() { + @Override + public void onResponse(boolean complete) { + completed[0] = complete; + } + + @Override + public void onFailure(Exception e) { + failure[0] = e; + } + }); + assertThat(completed[0], is(true)); + assertThat(failure[0], nullValue()); + } + + public void testPauseFollowingIndexFailed() { + IndexMetaData indexMetadata = IndexMetaData.builder("follower-index") + .settings(settings(Version.CURRENT).put(LifecycleSettings.LIFECYCLE_INDEXING_COMPLETE, "true")) + .putCustom(CCR_METADATA_KEY, Collections.emptyMap()) + .numberOfShards(1) + .numberOfReplicas(0) + .build(); + + // Mock pause follow api call: + Client client = Mockito.mock(Client.class); + Exception error = new RuntimeException(); + Mockito.doAnswer(invocation -> { + PauseFollowAction.Request request = (PauseFollowAction.Request) invocation.getArguments()[1]; + assertThat(request.getFollowIndex(), equalTo("follower-index")); + ActionListener listener = (ActionListener) invocation.getArguments()[2]; + listener.onFailure(error); + return null; + }).when(client).execute(Mockito.same(PauseFollowAction.INSTANCE), Mockito.any(), Mockito.any()); + + Boolean[] completed = new Boolean[1]; + Exception[] failure = new Exception[1]; + PauseFollowerIndexStep step = new PauseFollowerIndexStep(randomStepKey(), randomStepKey(), client); + step.performAction(indexMetadata, null, new AsyncActionStep.Listener() { + @Override + public void onResponse(boolean complete) { + completed[0] = complete; + } + + @Override + public void onFailure(Exception e) { + failure[0] = e; + } + }); + assertThat(completed[0], nullValue()); + assertThat(failure[0], sameInstance(error)); + Mockito.verify(client).execute(Mockito.same(PauseFollowAction.INSTANCE), Mockito.any(), Mockito.any()); + Mockito.verifyNoMoreInteractions(client); + } +} diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/indexlifecycle/TimeseriesLifecycleTypeTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/indexlifecycle/TimeseriesLifecycleTypeTests.java index 76c8b1dd515ed..4efb34873d471 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/indexlifecycle/TimeseriesLifecycleTypeTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/indexlifecycle/TimeseriesLifecycleTypeTests.java @@ -40,6 +40,7 @@ public class TimeseriesLifecycleTypeTests extends ESTestCase { private static final ReadOnlyAction TEST_READ_ONLY_ACTION = new ReadOnlyAction(); private static final FreezeAction TEST_FREEZE_ACTION = new FreezeAction(); private static final SetPriorityAction TEST_PRIORITY_ACTION = new SetPriorityAction(0); + private static final UnfollowAction TEST_UNFOLLOW_ACTION = new UnfollowAction(); public void testValidatePhases() { boolean invalid = randomBoolean(); @@ -305,10 +306,14 @@ public void testGetPreviousPhaseName() { public void testGetNextActionName() { // Hot Phase + assertNextActionName("hot", SetPriorityAction.NAME, UnfollowAction.NAME, + new String[] {UnfollowAction.NAME, RolloverAction.NAME}); + assertNextActionName("hot", SetPriorityAction.NAME, RolloverAction.NAME, new String[]{RolloverAction.NAME}); assertNextActionName("hot", SetPriorityAction.NAME, null, new String[] {}); - assertNextActionName("hot", SetPriorityAction.NAME, RolloverAction.NAME, new String[]{SetPriorityAction.NAME, RolloverAction.NAME}); + assertNextActionName("hot", RolloverAction.NAME, null, new String[] {}); assertNextActionName("hot", RolloverAction.NAME, null, new String[] { RolloverAction.NAME }); + assertInvalidAction("hot", "foo", new String[] { RolloverAction.NAME }); assertInvalidAction("hot", AllocateAction.NAME, new String[] { RolloverAction.NAME }); assertInvalidAction("hot", DeleteAction.NAME, new String[] { RolloverAction.NAME }); @@ -317,6 +322,9 @@ public void testGetNextActionName() { assertInvalidAction("hot", ShrinkAction.NAME, new String[] { RolloverAction.NAME }); // Warm Phase + assertNextActionName("warm", SetPriorityAction.NAME, UnfollowAction.NAME, + new String[]{SetPriorityAction.NAME, UnfollowAction.NAME, ReadOnlyAction.NAME, AllocateAction.NAME, + ShrinkAction.NAME, ForceMergeAction.NAME}); assertNextActionName("warm", SetPriorityAction.NAME, ReadOnlyAction.NAME, new String[]{SetPriorityAction.NAME, ReadOnlyAction.NAME, AllocateAction.NAME, ShrinkAction.NAME, ForceMergeAction.NAME}); assertNextActionName("warm", SetPriorityAction.NAME, AllocateAction.NAME, @@ -327,6 +335,17 @@ public void testGetNextActionName() { new String[]{SetPriorityAction.NAME, ForceMergeAction.NAME}); assertNextActionName("warm", SetPriorityAction.NAME, null, new String[]{SetPriorityAction.NAME}); + assertNextActionName("warm", UnfollowAction.NAME, ReadOnlyAction.NAME, + new String[] { SetPriorityAction.NAME, ReadOnlyAction.NAME, AllocateAction.NAME, ShrinkAction.NAME, ForceMergeAction.NAME }); + assertNextActionName("warm", UnfollowAction.NAME, ReadOnlyAction.NAME, + new String[] { ReadOnlyAction.NAME, AllocateAction.NAME, ShrinkAction.NAME, ForceMergeAction.NAME }); + assertNextActionName("warm", UnfollowAction.NAME, AllocateAction.NAME, + new String[] { AllocateAction.NAME, ShrinkAction.NAME, ForceMergeAction.NAME }); + assertNextActionName("warm", UnfollowAction.NAME, ShrinkAction.NAME, + new String[] { ShrinkAction.NAME, ForceMergeAction.NAME }); + assertNextActionName("warm", UnfollowAction.NAME, ForceMergeAction.NAME, new String[] { ForceMergeAction.NAME }); + assertNextActionName("warm", UnfollowAction.NAME, null, new String[] {}); + assertNextActionName("warm", ReadOnlyAction.NAME, AllocateAction.NAME, new String[] { ReadOnlyAction.NAME, AllocateAction.NAME, ShrinkAction.NAME, ForceMergeAction.NAME }); assertNextActionName("warm", ReadOnlyAction.NAME, ShrinkAction.NAME, @@ -371,15 +390,27 @@ public void testGetNextActionName() { new String[] { ReadOnlyAction.NAME, AllocateAction.NAME, ShrinkAction.NAME, ForceMergeAction.NAME }); // Cold Phase - assertNextActionName("cold", SetPriorityAction.NAME, FreezeAction.NAME, new String[]{SetPriorityAction.NAME, FreezeAction.NAME}); + assertNextActionName("cold", SetPriorityAction.NAME, UnfollowAction.NAME, + new String[]{UnfollowAction.NAME, SetPriorityAction.NAME, FreezeAction.NAME}); + assertNextActionName("cold", SetPriorityAction.NAME, FreezeAction.NAME, + new String[]{SetPriorityAction.NAME, FreezeAction.NAME}); assertNextActionName("cold", SetPriorityAction.NAME, AllocateAction.NAME, new String[]{SetPriorityAction.NAME, AllocateAction.NAME}); assertNextActionName("cold", SetPriorityAction.NAME, null, new String[] { SetPriorityAction.NAME }); assertNextActionName("cold", SetPriorityAction.NAME, null, new String[] {}); + + assertNextActionName("cold", UnfollowAction.NAME, AllocateAction.NAME, + new String[] {SetPriorityAction.NAME, AllocateAction.NAME, FreezeAction.NAME}); + assertNextActionName("cold", UnfollowAction.NAME, AllocateAction.NAME, + new String[] {AllocateAction.NAME, FreezeAction.NAME}); + assertNextActionName("cold", UnfollowAction.NAME, FreezeAction.NAME, new String[] {FreezeAction.NAME}); + assertNextActionName("cold", UnfollowAction.NAME, null, new String[] {}); + assertNextActionName("cold", AllocateAction.NAME, null, new String[] { AllocateAction.NAME }); assertNextActionName("cold", AllocateAction.NAME, null, new String[] {}); assertNextActionName("cold", AllocateAction.NAME, null, new String[] {}); assertNextActionName("cold", AllocateAction.NAME, FreezeAction.NAME, FreezeAction.NAME); + assertNextActionName("cold", FreezeAction.NAME, null); assertNextActionName("cold", FreezeAction.NAME, null, AllocateAction.NAME); @@ -393,6 +424,7 @@ public void testGetNextActionName() { // Delete Phase assertNextActionName("delete", DeleteAction.NAME, null, new String[] {}); assertNextActionName("delete", DeleteAction.NAME, null, new String[] { DeleteAction.NAME }); + assertInvalidAction("delete", "foo", new String[] { DeleteAction.NAME }); assertInvalidAction("delete", AllocateAction.NAME, new String[] { DeleteAction.NAME }); assertInvalidAction("delete", ForceMergeAction.NAME, new String[] { DeleteAction.NAME }); @@ -401,6 +433,7 @@ public void testGetNextActionName() { assertInvalidAction("delete", ShrinkAction.NAME, new String[] { DeleteAction.NAME }); assertInvalidAction("delete", FreezeAction.NAME, new String[] { DeleteAction.NAME }); assertInvalidAction("delete", SetPriorityAction.NAME, new String[] { DeleteAction.NAME }); + assertInvalidAction("delete", UnfollowAction.NAME, new String[] { DeleteAction.NAME }); Phase phase = new Phase("foo", TimeValue.ZERO, Collections.emptyMap()); IllegalArgumentException exception = expectThrows(IllegalArgumentException.class, @@ -444,6 +477,8 @@ private ConcurrentMap convertActionNamesToActions(Strin return new FreezeAction(); case SetPriorityAction.NAME: return new SetPriorityAction(0); + case UnfollowAction.NAME: + return new UnfollowAction(); } return new DeleteAction(); }).collect(Collectors.toConcurrentMap(LifecycleAction::getWriteableName, Function.identity())); @@ -509,6 +544,8 @@ private LifecycleAction getTestAction(String actionName) { return TEST_FREEZE_ACTION; case SetPriorityAction.NAME: return TEST_PRIORITY_ACTION; + case UnfollowAction.NAME: + return TEST_UNFOLLOW_ACTION; default: throw new IllegalArgumentException("unsupported timeseries phase action [" + actionName + "]"); } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/indexlifecycle/UnfollowActionTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/indexlifecycle/UnfollowActionTests.java new file mode 100644 index 0000000000000..42f299a8aeafd --- /dev/null +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/indexlifecycle/UnfollowActionTests.java @@ -0,0 +1,80 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.core.indexlifecycle; + +import org.elasticsearch.common.io.stream.Writeable.Reader; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.xpack.core.indexlifecycle.Step.StepKey; + +import java.io.IOException; +import java.util.List; + +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.notNullValue; + +public class UnfollowActionTests extends AbstractActionTestCase { + + @Override + protected UnfollowAction doParseInstance(XContentParser parser) throws IOException { + return UnfollowAction.parse(parser); + } + + @Override + protected UnfollowAction createTestInstance() { + return new UnfollowAction(); + } + + @Override + protected Reader instanceReader() { + return UnfollowAction::new; + } + + public void testToSteps() { + UnfollowAction action = createTestInstance(); + String phase = randomAlphaOfLengthBetween(1, 10); + StepKey nextStepKey = new StepKey(randomAlphaOfLengthBetween(1, 10), randomAlphaOfLengthBetween(1, 10), + randomAlphaOfLengthBetween(1, 10)); + List steps = action.toSteps(null, phase, nextStepKey); + assertThat(steps, notNullValue()); + assertThat(steps.size(), equalTo(7)); + + StepKey expectedFirstStepKey = new StepKey(phase, UnfollowAction.NAME, WaitForIndexingCompleteStep.NAME); + StepKey expectedSecondStepKey = new StepKey(phase, UnfollowAction.NAME, WaitForFollowShardTasksStep.NAME); + StepKey expectedThirdStepKey = new StepKey(phase, UnfollowAction.NAME, PauseFollowerIndexStep.NAME); + StepKey expectedFourthStepKey = new StepKey(phase, UnfollowAction.NAME, CloseFollowerIndexStep.NAME); + StepKey expectedFifthStepKey = new StepKey(phase, UnfollowAction.NAME, UnfollowFollowIndexStep.NAME); + StepKey expectedSixthStepKey = new StepKey(phase, UnfollowAction.NAME, OpenFollowerIndexStep.NAME); + StepKey expectedSeventhStepKey = new StepKey(phase, UnfollowAction.NAME, WaitForYellowStep.NAME); + + WaitForIndexingCompleteStep firstStep = (WaitForIndexingCompleteStep) steps.get(0); + assertThat(firstStep.getKey(), equalTo(expectedFirstStepKey)); + assertThat(firstStep.getNextStepKey(), equalTo(expectedSecondStepKey)); + + WaitForFollowShardTasksStep secondStep = (WaitForFollowShardTasksStep) steps.get(1); + assertThat(secondStep.getKey(), equalTo(expectedSecondStepKey)); + assertThat(secondStep.getNextStepKey(), equalTo(expectedThirdStepKey)); + + PauseFollowerIndexStep thirdStep = (PauseFollowerIndexStep) steps.get(2); + assertThat(thirdStep.getKey(), equalTo(expectedThirdStepKey)); + assertThat(thirdStep.getNextStepKey(), equalTo(expectedFourthStepKey)); + + CloseFollowerIndexStep fourthStep = (CloseFollowerIndexStep) steps.get(3); + assertThat(fourthStep.getKey(), equalTo(expectedFourthStepKey)); + assertThat(fourthStep.getNextStepKey(), equalTo(expectedFifthStepKey)); + + UnfollowFollowIndexStep fifthStep = (UnfollowFollowIndexStep) steps.get(4); + assertThat(fifthStep.getKey(), equalTo(expectedFifthStepKey)); + assertThat(fifthStep.getNextStepKey(), equalTo(expectedSixthStepKey)); + + OpenFollowerIndexStep sixthStep = (OpenFollowerIndexStep) steps.get(5); + assertThat(sixthStep.getKey(), equalTo(expectedSixthStepKey)); + assertThat(sixthStep.getNextStepKey(), equalTo(expectedSeventhStepKey)); + + WaitForYellowStep seventhStep = (WaitForYellowStep) steps.get(6); + assertThat(seventhStep.getKey(), equalTo(expectedSeventhStepKey)); + assertThat(seventhStep.getNextStepKey(), equalTo(nextStepKey)); + } +} diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/indexlifecycle/UnfollowFollowIndexStepTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/indexlifecycle/UnfollowFollowIndexStepTests.java new file mode 100644 index 0000000000000..58558c92d2511 --- /dev/null +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/indexlifecycle/UnfollowFollowIndexStepTests.java @@ -0,0 +1,115 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.core.indexlifecycle; + +import org.elasticsearch.Version; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.support.master.AcknowledgedResponse; +import org.elasticsearch.client.AdminClient; +import org.elasticsearch.client.Client; +import org.elasticsearch.client.IndicesAdminClient; +import org.elasticsearch.cluster.metadata.IndexMetaData; +import org.elasticsearch.xpack.core.ccr.action.UnfollowAction; +import org.mockito.Mockito; + +import java.util.Collections; + +import static org.elasticsearch.xpack.core.indexlifecycle.UnfollowAction.CCR_METADATA_KEY; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.nullValue; +import static org.hamcrest.Matchers.sameInstance; + +public class UnfollowFollowIndexStepTests extends AbstractUnfollowIndexStepTestCase { + + @Override + protected UnfollowFollowIndexStep newInstance(Step.StepKey key, Step.StepKey nextKey, Client client) { + return new UnfollowFollowIndexStep(key, nextKey, client); + } + + public void testUnFollow() { + IndexMetaData indexMetadata = IndexMetaData.builder("follower-index") + .settings(settings(Version.CURRENT).put(LifecycleSettings.LIFECYCLE_INDEXING_COMPLETE, "true")) + .putCustom(CCR_METADATA_KEY, Collections.emptyMap()) + .numberOfShards(1) + .numberOfReplicas(0) + .build(); + + Client client = Mockito.mock(Client.class); + AdminClient adminClient = Mockito.mock(AdminClient.class); + Mockito.when(client.admin()).thenReturn(adminClient); + IndicesAdminClient indicesClient = Mockito.mock(IndicesAdminClient.class); + Mockito.when(adminClient.indices()).thenReturn(indicesClient); + + Mockito.doAnswer(invocation -> { + UnfollowAction.Request request = (UnfollowAction.Request) invocation.getArguments()[1]; + assertThat(request.getFollowerIndex(), equalTo("follower-index")); + @SuppressWarnings("unchecked") + ActionListener listener = (ActionListener) invocation.getArguments()[2]; + listener.onResponse(new AcknowledgedResponse(true)); + return null; + }).when(client).execute(Mockito.same(UnfollowAction.INSTANCE), Mockito.any(), Mockito.any()); + + Boolean[] completed = new Boolean[1]; + Exception[] failure = new Exception[1]; + UnfollowFollowIndexStep step = new UnfollowFollowIndexStep(randomStepKey(), randomStepKey(), client); + step.performAction(indexMetadata, null, new AsyncActionStep.Listener() { + @Override + public void onResponse(boolean complete) { + completed[0] = complete; + } + + @Override + public void onFailure(Exception e) { + failure[0] = e; + } + }); + assertThat(completed[0], is(true)); + assertThat(failure[0], nullValue()); + } + + public void testUnFollowUnfollowFailed() { + IndexMetaData indexMetadata = IndexMetaData.builder("follower-index") + .settings(settings(Version.CURRENT).put(LifecycleSettings.LIFECYCLE_INDEXING_COMPLETE, "true")) + .putCustom(CCR_METADATA_KEY, Collections.emptyMap()) + .numberOfShards(1) + .numberOfReplicas(0) + .build(); + + Client client = Mockito.mock(Client.class); + AdminClient adminClient = Mockito.mock(AdminClient.class); + Mockito.when(client.admin()).thenReturn(adminClient); + IndicesAdminClient indicesClient = Mockito.mock(IndicesAdminClient.class); + Mockito.when(adminClient.indices()).thenReturn(indicesClient); + + // Mock unfollow api call: + Exception error = new RuntimeException(); + Mockito.doAnswer(invocation -> { + UnfollowAction.Request request = (UnfollowAction.Request) invocation.getArguments()[1]; + assertThat(request.getFollowerIndex(), equalTo("follower-index")); + ActionListener listener = (ActionListener) invocation.getArguments()[2]; + listener.onFailure(error); + return null; + }).when(client).execute(Mockito.same(UnfollowAction.INSTANCE), Mockito.any(), Mockito.any()); + + Boolean[] completed = new Boolean[1]; + Exception[] failure = new Exception[1]; + UnfollowFollowIndexStep step = new UnfollowFollowIndexStep(randomStepKey(), randomStepKey(), client); + step.performAction(indexMetadata, null, new AsyncActionStep.Listener() { + @Override + public void onResponse(boolean complete) { + completed[0] = complete; + } + + @Override + public void onFailure(Exception e) { + failure[0] = e; + } + }); + assertThat(completed[0], nullValue()); + assertThat(failure[0], sameInstance(error)); + } +} diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/indexlifecycle/WaitForFollowShardTasksStepInfoTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/indexlifecycle/WaitForFollowShardTasksStepInfoTests.java new file mode 100644 index 0000000000000..483df7632e2a4 --- /dev/null +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/indexlifecycle/WaitForFollowShardTasksStepInfoTests.java @@ -0,0 +1,70 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.core.indexlifecycle; + +import org.elasticsearch.common.xcontent.ConstructingObjectParser; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.test.AbstractXContentTestCase; +import org.elasticsearch.xpack.core.indexlifecycle.WaitForFollowShardTasksStep.Info; +import org.elasticsearch.xpack.core.indexlifecycle.WaitForFollowShardTasksStep.Info.ShardFollowTaskInfo; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; + +public class WaitForFollowShardTasksStepInfoTests extends AbstractXContentTestCase { + + private static final ConstructingObjectParser SHARD_FOLLOW_TASK_INFO_PARSER = + new ConstructingObjectParser<>( + "shard_follow_task_info_parser", + args -> new ShardFollowTaskInfo((String) args[0], (Integer) args[1], (Long) args[2], (Long) args[3]) + ); + + static { + SHARD_FOLLOW_TASK_INFO_PARSER.declareString(ConstructingObjectParser.constructorArg(), ShardFollowTaskInfo.FOLLOWER_INDEX_FIELD); + SHARD_FOLLOW_TASK_INFO_PARSER.declareInt(ConstructingObjectParser.constructorArg(), ShardFollowTaskInfo.SHARD_ID_FIELD); + SHARD_FOLLOW_TASK_INFO_PARSER.declareLong(ConstructingObjectParser.constructorArg(), + ShardFollowTaskInfo.LEADER_GLOBAL_CHECKPOINT_FIELD); + SHARD_FOLLOW_TASK_INFO_PARSER.declareLong(ConstructingObjectParser.constructorArg(), + ShardFollowTaskInfo.FOLLOWER_GLOBAL_CHECKPOINT_FIELD); + } + + private static final ConstructingObjectParser INFO_PARSER = new ConstructingObjectParser<>( + "info_parser", + args -> { + @SuppressWarnings("unchecked") + Info info = new Info((List) args[0]); + return info; + } + ); + + static { + INFO_PARSER.declareObjectArray(ConstructingObjectParser.constructorArg(), SHARD_FOLLOW_TASK_INFO_PARSER, + Info.SHARD_FOLLOW_TASKS); + INFO_PARSER.declareString((i, s) -> {}, Info.MESSAGE); + } + + @Override + protected Info createTestInstance() { + int numInfos = randomIntBetween(0, 32); + List shardFollowTaskInfos = new ArrayList<>(numInfos); + for (int i = 0; i < numInfos; i++) { + shardFollowTaskInfos.add(new ShardFollowTaskInfo(randomAlphaOfLength(3), randomIntBetween(0, 10), + randomNonNegativeLong(), randomNonNegativeLong())); + } + return new Info(shardFollowTaskInfos); + } + + @Override + protected Info doParseInstance(XContentParser parser) throws IOException { + return INFO_PARSER.apply(parser, null); + } + + @Override + protected boolean supportsUnknownFields() { + return false; + } +} diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/indexlifecycle/WaitForFollowShardTasksStepTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/indexlifecycle/WaitForFollowShardTasksStepTests.java new file mode 100644 index 0000000000000..a0ee01a240347 --- /dev/null +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/indexlifecycle/WaitForFollowShardTasksStepTests.java @@ -0,0 +1,210 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.core.indexlifecycle; + +import org.elasticsearch.Version; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.client.Client; +import org.elasticsearch.cluster.metadata.IndexMetaData; +import org.elasticsearch.common.xcontent.ToXContentObject; +import org.elasticsearch.xpack.core.ccr.ShardFollowNodeTaskStatus; +import org.elasticsearch.xpack.core.ccr.action.FollowStatsAction; +import org.elasticsearch.xpack.core.indexlifecycle.Step.StepKey; +import org.mockito.Mockito; + +import java.util.Arrays; +import java.util.Collections; +import java.util.List; + +import static org.elasticsearch.xpack.core.indexlifecycle.UnfollowAction.CCR_METADATA_KEY; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.nullValue; +import static org.hamcrest.core.IsNull.notNullValue; + +public class WaitForFollowShardTasksStepTests extends AbstractStepTestCase { + + @Override + protected WaitForFollowShardTasksStep createRandomInstance() { + StepKey stepKey = randomStepKey(); + StepKey nextStepKey = randomStepKey(); + return new WaitForFollowShardTasksStep(stepKey, nextStepKey, Mockito.mock(Client.class)); + } + + @Override + protected WaitForFollowShardTasksStep mutateInstance(WaitForFollowShardTasksStep instance) { + StepKey key = instance.getKey(); + StepKey nextKey = instance.getNextStepKey(); + + if (randomBoolean()) { + key = new StepKey(key.getPhase(), key.getAction(), key.getName() + randomAlphaOfLength(5)); + } else { + nextKey = new StepKey(key.getPhase(), key.getAction(), key.getName() + randomAlphaOfLength(5)); + } + + return new WaitForFollowShardTasksStep(key, nextKey, instance.getClient()); + } + + @Override + protected WaitForFollowShardTasksStep copyInstance(WaitForFollowShardTasksStep instance) { + return new WaitForFollowShardTasksStep(instance.getKey(), instance.getNextStepKey(), instance.getClient()); + } + + public void testConditionMet() { + IndexMetaData indexMetadata = IndexMetaData.builder("follower-index") + .settings(settings(Version.CURRENT).put(LifecycleSettings.LIFECYCLE_INDEXING_COMPLETE, "true")) + .putCustom(CCR_METADATA_KEY, Collections.emptyMap()) + .numberOfShards(2) + .numberOfReplicas(0) + .build(); + Client client = Mockito.mock(Client.class); + List statsResponses = Arrays.asList( + new FollowStatsAction.StatsResponse(createShardFollowTaskStatus(0, 9, 9)), + new FollowStatsAction.StatsResponse(createShardFollowTaskStatus(1, 3, 3)) + ); + mockFollowStatsCall(client, indexMetadata.getIndex().getName(), statsResponses); + + WaitForFollowShardTasksStep step = new WaitForFollowShardTasksStep(randomStepKey(), randomStepKey(), client); + final boolean[] conditionMetHolder = new boolean[1]; + final ToXContentObject[] informationContextHolder = new ToXContentObject[1]; + final Exception[] exceptionHolder = new Exception[1]; + step.evaluateCondition(indexMetadata, new AsyncWaitStep.Listener() { + @Override + public void onResponse(boolean conditionMet, ToXContentObject informationContext) { + conditionMetHolder[0] = conditionMet; + informationContextHolder[0] = informationContext; + } + + @Override + public void onFailure(Exception e) { + exceptionHolder[0] = e; + } + }); + + assertThat(conditionMetHolder[0], is(true)); + assertThat(informationContextHolder[0], nullValue()); + assertThat(exceptionHolder[0], nullValue()); + } + + public void testConditionNotMetShardsNotInSync() { + IndexMetaData indexMetadata = IndexMetaData.builder("follower-index") + .settings(settings(Version.CURRENT).put(LifecycleSettings.LIFECYCLE_INDEXING_COMPLETE, "true")) + .putCustom(CCR_METADATA_KEY, Collections.emptyMap()) + .numberOfShards(2) + .numberOfReplicas(0) + .build(); + Client client = Mockito.mock(Client.class); + List statsResponses = Arrays.asList( + new FollowStatsAction.StatsResponse(createShardFollowTaskStatus(0, 9, 9)), + new FollowStatsAction.StatsResponse(createShardFollowTaskStatus(1, 8, 3)) + ); + mockFollowStatsCall(client, indexMetadata.getIndex().getName(), statsResponses); + + WaitForFollowShardTasksStep step = new WaitForFollowShardTasksStep(randomStepKey(), randomStepKey(), client); + final boolean[] conditionMetHolder = new boolean[1]; + final ToXContentObject[] informationContextHolder = new ToXContentObject[1]; + final Exception[] exceptionHolder = new Exception[1]; + step.evaluateCondition(indexMetadata, new AsyncWaitStep.Listener() { + @Override + public void onResponse(boolean conditionMet, ToXContentObject informationContext) { + conditionMetHolder[0] = conditionMet; + informationContextHolder[0] = informationContext; + } + + @Override + public void onFailure(Exception e) { + exceptionHolder[0] = e; + } + }); + + assertThat(conditionMetHolder[0], is(false)); + assertThat(informationContextHolder[0], notNullValue()); + assertThat(exceptionHolder[0], nullValue()); + WaitForFollowShardTasksStep.Info info = (WaitForFollowShardTasksStep.Info) informationContextHolder[0]; + assertThat(info.getShardFollowTaskInfos().size(), equalTo(1)); + assertThat(info.getShardFollowTaskInfos().get(0).getShardId(), equalTo(1)); + assertThat(info.getShardFollowTaskInfos().get(0).getLeaderGlobalCheckpoint(), equalTo(8L)); + assertThat(info.getShardFollowTaskInfos().get(0).getFollowerGlobalCheckpoint(), equalTo(3L)); + } + + public void testConditionNotMetNotAFollowerIndex() { + IndexMetaData indexMetadata = IndexMetaData.builder("follower-index") + .settings(settings(Version.CURRENT).put(LifecycleSettings.LIFECYCLE_INDEXING_COMPLETE, "true")) + .numberOfShards(2) + .numberOfReplicas(0) + .build(); + Client client = Mockito.mock(Client.class); + + WaitForFollowShardTasksStep step = new WaitForFollowShardTasksStep(randomStepKey(), randomStepKey(), client); + final boolean[] conditionMetHolder = new boolean[1]; + final ToXContentObject[] informationContextHolder = new ToXContentObject[1]; + final Exception[] exceptionHolder = new Exception[1]; + step.evaluateCondition(indexMetadata, new AsyncWaitStep.Listener() { + @Override + public void onResponse(boolean conditionMet, ToXContentObject informationContext) { + conditionMetHolder[0] = conditionMet; + informationContextHolder[0] = informationContext; + } + + @Override + public void onFailure(Exception e) { + exceptionHolder[0] = e; + } + }); + + assertThat(conditionMetHolder[0], is(true)); + assertThat(informationContextHolder[0], nullValue()); + assertThat(exceptionHolder[0], nullValue()); + Mockito.verifyZeroInteractions(client); + } + + private static ShardFollowNodeTaskStatus createShardFollowTaskStatus(int shardId, long leaderGCP, long followerGCP) { + return new ShardFollowNodeTaskStatus( + "remote", + "leader-index", + "follower-index", + shardId, + leaderGCP, + -1, + followerGCP, + -1, + -1, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + Collections.emptyNavigableMap(), + 0, + null + ); + } + + private void mockFollowStatsCall(Client client, String expectedIndexName, List statsResponses) { + Mockito.doAnswer(invocationOnMock -> { + FollowStatsAction.StatsRequest request = (FollowStatsAction.StatsRequest) invocationOnMock.getArguments()[1]; + assertThat(request.indices().length, equalTo(1)); + assertThat(request.indices()[0], equalTo(expectedIndexName)); + + @SuppressWarnings("unchecked") + ActionListener listener = + (ActionListener) invocationOnMock.getArguments()[2]; + listener.onResponse(new FollowStatsAction.StatsResponses(Collections.emptyList(), Collections.emptyList(), statsResponses)); + return null; + }).when(client).execute(Mockito.eq(FollowStatsAction.INSTANCE), Mockito.any(), Mockito.any()); + } +} diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/indexlifecycle/WaitForIndexingCompleteStepTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/indexlifecycle/WaitForIndexingCompleteStepTests.java new file mode 100644 index 0000000000000..41a9c5983a78c --- /dev/null +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/indexlifecycle/WaitForIndexingCompleteStepTests.java @@ -0,0 +1,124 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.core.indexlifecycle; + +import org.elasticsearch.Version; +import org.elasticsearch.cluster.ClusterName; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.metadata.IndexMetaData; +import org.elasticsearch.cluster.metadata.MetaData; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.index.Index; +import org.elasticsearch.xpack.core.indexlifecycle.Step.StepKey; + +import java.util.Collections; + +import static org.elasticsearch.xpack.core.indexlifecycle.UnfollowAction.CCR_METADATA_KEY; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.notNullValue; +import static org.hamcrest.Matchers.nullValue; + +public class WaitForIndexingCompleteStepTests extends AbstractStepTestCase { + + @Override + protected WaitForIndexingCompleteStep createRandomInstance() { + StepKey stepKey = randomStepKey(); + StepKey nextStepKey = randomStepKey(); + return new WaitForIndexingCompleteStep(stepKey, nextStepKey); + } + + @Override + protected WaitForIndexingCompleteStep mutateInstance(WaitForIndexingCompleteStep instance) { + StepKey key = instance.getKey(); + StepKey nextKey = instance.getNextStepKey(); + + if (randomBoolean()) { + key = new StepKey(key.getPhase(), key.getAction(), key.getName() + randomAlphaOfLength(5)); + } else { + nextKey = new StepKey(key.getPhase(), key.getAction(), key.getName() + randomAlphaOfLength(5)); + } + + return new WaitForIndexingCompleteStep(key, nextKey); + } + + @Override + protected WaitForIndexingCompleteStep copyInstance(WaitForIndexingCompleteStep instance) { + return new WaitForIndexingCompleteStep(instance.getKey(), instance.getNextStepKey()); + } + + public void testConditionMet() { + IndexMetaData indexMetadata = IndexMetaData.builder("follower-index") + .settings(settings(Version.CURRENT).put(LifecycleSettings.LIFECYCLE_INDEXING_COMPLETE, "true")) + .putCustom(CCR_METADATA_KEY, Collections.emptyMap()) + .numberOfShards(1) + .numberOfReplicas(0) + .build(); + + ClusterState clusterState = ClusterState.builder(new ClusterName("cluster")) + .metaData(MetaData.builder().put(indexMetadata, true).build()) + .build(); + + WaitForIndexingCompleteStep step = createRandomInstance(); + ClusterStateWaitStep.Result result = step.isConditionMet(indexMetadata.getIndex(), clusterState); + assertThat(result.isComplete(), is(true)); + assertThat(result.getInfomationContext(), nullValue()); + } + + public void testConditionMetNotAFollowerIndex() { + IndexMetaData indexMetadata = IndexMetaData.builder("follower-index") + .settings(settings(Version.CURRENT)) + .numberOfShards(1) + .numberOfReplicas(0) + .build(); + + ClusterState clusterState = ClusterState.builder(new ClusterName("cluster")) + .metaData(MetaData.builder().put(indexMetadata, true).build()) + .build(); + + WaitForIndexingCompleteStep step = createRandomInstance(); + ClusterStateWaitStep.Result result = step.isConditionMet(indexMetadata.getIndex(), clusterState); + assertThat(result.isComplete(), is(true)); + assertThat(result.getInfomationContext(), nullValue()); + } + + public void testConditionNotMet() { + Settings.Builder indexSettings = settings(Version.CURRENT); + if (randomBoolean()) { + indexSettings.put(LifecycleSettings.LIFECYCLE_INDEXING_COMPLETE, "false"); + } + IndexMetaData indexMetadata = IndexMetaData.builder("follower-index") + .settings(indexSettings) + .putCustom(CCR_METADATA_KEY, Collections.emptyMap()) + .numberOfShards(1) + .numberOfReplicas(0) + .build(); + + ClusterState clusterState = ClusterState.builder(new ClusterName("cluster")) + .metaData(MetaData.builder().put(indexMetadata, true).build()) + .build(); + + WaitForIndexingCompleteStep step = createRandomInstance(); + ClusterStateWaitStep.Result result = step.isConditionMet(indexMetadata.getIndex(), clusterState); + assertThat(result.isComplete(), is(false)); + assertThat(result.getInfomationContext(), notNullValue()); + WaitForIndexingCompleteStep.IndexingNotCompleteInfo info = + (WaitForIndexingCompleteStep.IndexingNotCompleteInfo) result.getInfomationContext(); + assertThat(info.getMessage(), equalTo("waiting for the [index.lifecycle.indexing_complete] setting to be set to " + + "true on the leader index, it is currently [false]")); + } + + public void testIndexDeleted() { + ClusterState clusterState = ClusterState.builder(new ClusterName("cluster")) + .metaData(MetaData.builder().build()) + .build(); + + WaitForIndexingCompleteStep step = createRandomInstance(); + ClusterStateWaitStep.Result result = step.isConditionMet(new Index("this-index-doesnt-exist", "uuid"), clusterState); + assertThat(result.isComplete(), is(false)); + assertThat(result.getInfomationContext(), nullValue()); + } +} diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/indexlifecycle/WaitForYellowStepTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/indexlifecycle/WaitForYellowStepTests.java new file mode 100644 index 0000000000000..6c3915d87cde4 --- /dev/null +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/indexlifecycle/WaitForYellowStepTests.java @@ -0,0 +1,120 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.core.indexlifecycle; + +import org.elasticsearch.Version; +import org.elasticsearch.cluster.ClusterName; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.metadata.IndexMetaData; +import org.elasticsearch.cluster.metadata.MetaData; +import org.elasticsearch.cluster.routing.IndexRoutingTable; +import org.elasticsearch.cluster.routing.RoutingTable; +import org.elasticsearch.cluster.routing.ShardRouting; +import org.elasticsearch.cluster.routing.ShardRoutingState; +import org.elasticsearch.cluster.routing.TestShardRouting; +import org.elasticsearch.xpack.core.indexlifecycle.Step.StepKey; + +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.nullValue; +import static org.hamcrest.core.IsNull.notNullValue; + +public class WaitForYellowStepTests extends AbstractStepTestCase { + + @Override + protected WaitForYellowStep createRandomInstance() { + StepKey stepKey = randomStepKey(); + StepKey nextStepKey = randomStepKey(); + return new WaitForYellowStep(stepKey, nextStepKey); + } + + @Override + protected WaitForYellowStep mutateInstance(WaitForYellowStep instance) { + StepKey key = instance.getKey(); + StepKey nextKey = instance.getNextStepKey(); + + if (randomBoolean()) { + key = new StepKey(key.getPhase(), key.getAction(), key.getName() + randomAlphaOfLength(5)); + } else { + nextKey = new StepKey(key.getPhase(), key.getAction(), key.getName() + randomAlphaOfLength(5)); + } + + return new WaitForYellowStep(key, nextKey); + } + + @Override + protected WaitForYellowStep copyInstance(WaitForYellowStep instance) { + return new WaitForYellowStep(instance.getKey(), instance.getNextStepKey()); + } + + public void testConditionMet() { + IndexMetaData indexMetadata = IndexMetaData.builder("former-follower-index") + .settings(settings(Version.CURRENT)) + .numberOfShards(1) + .numberOfReplicas(0) + .build(); + + ShardRouting shardRouting = + TestShardRouting.newShardRouting("index2", 0, "1", true, ShardRoutingState.STARTED); + IndexRoutingTable indexRoutingTable = IndexRoutingTable.builder(indexMetadata.getIndex()) + .addShard(shardRouting).build(); + + ClusterState clusterState = ClusterState.builder(new ClusterName("_name")) + .metaData(MetaData.builder().put(indexMetadata, true).build()) + .routingTable(RoutingTable.builder().add(indexRoutingTable).build()) + .build(); + + WaitForYellowStep step = new WaitForYellowStep(randomStepKey(), randomStepKey()); + ClusterStateWaitStep.Result result = step.isConditionMet(indexMetadata.getIndex(), clusterState); + assertThat(result.isComplete(), is(true)); + assertThat(result.getInfomationContext(), nullValue()); + } + + public void testConditionNotMet() { + IndexMetaData indexMetadata = IndexMetaData.builder("former-follower-index") + .settings(settings(Version.CURRENT)) + .numberOfShards(1) + .numberOfReplicas(0) + .build(); + + ShardRouting shardRouting = + TestShardRouting.newShardRouting("index2", 0, "1", true, ShardRoutingState.INITIALIZING); + IndexRoutingTable indexRoutingTable = IndexRoutingTable.builder(indexMetadata.getIndex()) + .addShard(shardRouting).build(); + + ClusterState clusterState = ClusterState.builder(new ClusterName("_name")) + .metaData(MetaData.builder().put(indexMetadata, true).build()) + .routingTable(RoutingTable.builder().add(indexRoutingTable).build()) + .build(); + + WaitForYellowStep step = new WaitForYellowStep(randomStepKey(), randomStepKey()); + ClusterStateWaitStep.Result result = step.isConditionMet(indexMetadata.getIndex(), clusterState); + assertThat(result.isComplete(), is(false)); + WaitForYellowStep.Info info = (WaitForYellowStep.Info) result.getInfomationContext(); + assertThat(info, notNullValue()); + assertThat(info.getMessage(), equalTo("index is red; not all primary shards are active")); + } + + public void testConditionNotMetNoIndexRoutingTable() { + IndexMetaData indexMetadata = IndexMetaData.builder("former-follower-index") + .settings(settings(Version.CURRENT)) + .numberOfShards(1) + .numberOfReplicas(0) + .build(); + + ClusterState clusterState = ClusterState.builder(new ClusterName("_name")) + .metaData(MetaData.builder().put(indexMetadata, true).build()) + .routingTable(RoutingTable.builder().build()) + .build(); + + WaitForYellowStep step = new WaitForYellowStep(randomStepKey(), randomStepKey()); + ClusterStateWaitStep.Result result = step.isConditionMet(indexMetadata.getIndex(), clusterState); + assertThat(result.isComplete(), is(false)); + WaitForYellowStep.Info info = (WaitForYellowStep.Info) result.getInfomationContext(); + assertThat(info, notNullValue()); + assertThat(info.getMessage(), equalTo("index is red; no IndexRoutingTable")); + } +} diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/indexlifecycle/action/PutLifecycleRequestTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/indexlifecycle/action/PutLifecycleRequestTests.java index d747e26161234..2c59d9ca5782a 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/indexlifecycle/action/PutLifecycleRequestTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/indexlifecycle/action/PutLifecycleRequestTests.java @@ -24,6 +24,7 @@ import org.elasticsearch.xpack.core.indexlifecycle.SetPriorityAction; import org.elasticsearch.xpack.core.indexlifecycle.ShrinkAction; import org.elasticsearch.xpack.core.indexlifecycle.TimeseriesLifecycleType; +import org.elasticsearch.xpack.core.indexlifecycle.UnfollowAction; import org.elasticsearch.xpack.core.indexlifecycle.action.PutLifecycleAction.Request; import org.junit.Before; @@ -68,7 +69,8 @@ protected NamedWriteableRegistry getNamedWriteableRegistry() { new NamedWriteableRegistry.Entry(LifecycleAction.class, RolloverAction.NAME, RolloverAction::new), new NamedWriteableRegistry.Entry(LifecycleAction.class, ShrinkAction.NAME, ShrinkAction::new), new NamedWriteableRegistry.Entry(LifecycleAction.class, FreezeAction.NAME, FreezeAction::new), - new NamedWriteableRegistry.Entry(LifecycleAction.class, SetPriorityAction.NAME, SetPriorityAction::new) + new NamedWriteableRegistry.Entry(LifecycleAction.class, SetPriorityAction.NAME, SetPriorityAction::new), + new NamedWriteableRegistry.Entry(LifecycleAction.class, UnfollowAction.NAME, UnfollowAction::new) )); } @@ -85,7 +87,8 @@ protected NamedXContentRegistry xContentRegistry() { new NamedXContentRegistry.Entry(LifecycleAction.class, new ParseField(RolloverAction.NAME), RolloverAction::parse), new NamedXContentRegistry.Entry(LifecycleAction.class, new ParseField(ShrinkAction.NAME), ShrinkAction::parse), new NamedXContentRegistry.Entry(LifecycleAction.class, new ParseField(FreezeAction.NAME), FreezeAction::parse), - new NamedXContentRegistry.Entry(LifecycleAction.class, new ParseField(SetPriorityAction.NAME), SetPriorityAction::parse) + new NamedXContentRegistry.Entry(LifecycleAction.class, new ParseField(SetPriorityAction.NAME), SetPriorityAction::parse), + new NamedXContentRegistry.Entry(LifecycleAction.class, new ParseField(UnfollowAction.NAME), UnfollowAction::parse) )); return new NamedXContentRegistry(entries); } diff --git a/x-pack/plugin/ilm/qa/multi-cluster/build.gradle b/x-pack/plugin/ilm/qa/multi-cluster/build.gradle new file mode 100644 index 0000000000000..59df733892944 --- /dev/null +++ b/x-pack/plugin/ilm/qa/multi-cluster/build.gradle @@ -0,0 +1,54 @@ +import org.elasticsearch.gradle.test.RestIntegTestTask + +apply plugin: 'elasticsearch.standalone-test' + +dependencies { + testCompile project(':x-pack:plugin:ccr:qa') +} + +task leaderClusterTest(type: RestIntegTestTask) { + mustRunAfter(precommit) +} + +leaderClusterTestCluster { + numNodes = 1 + clusterName = 'leader-cluster' + setting 'xpack.ilm.enabled', 'true' + setting 'xpack.ccr.enabled', 'true' + setting 'xpack.security.enabled', 'false' + setting 'xpack.watcher.enabled', 'false' + setting 'xpack.monitoring.enabled', 'false' + setting 'xpack.ml.enabled', 'false' + setting 'xpack.license.self_generated.type', 'trial' + setting 'indices.lifecycle.poll_interval', '1000ms' +} + +leaderClusterTestRunner { + systemProperty 'tests.target_cluster', 'leader' +} + +task followClusterTest(type: RestIntegTestTask) {} + +followClusterTestCluster { + dependsOn leaderClusterTestRunner + numNodes = 1 + clusterName = 'follow-cluster' + setting 'xpack.ilm.enabled', 'true' + setting 'xpack.ccr.enabled', 'true' + setting 'xpack.security.enabled', 'false' + setting 'xpack.watcher.enabled', 'false' + setting 'xpack.monitoring.enabled', 'false' + setting 'xpack.ml.enabled', 'false' + setting 'xpack.license.self_generated.type', 'trial' + setting 'indices.lifecycle.poll_interval', '1000ms' + setting 'cluster.remote.leader_cluster.seeds', "\"${-> leaderClusterTest.nodes.get(0).transportUri()}\"" +} + +followClusterTestRunner { + systemProperty 'tests.target_cluster', 'follow' + systemProperty 'tests.leader_host', "${-> leaderClusterTest.nodes.get(0).httpUri()}" + finalizedBy 'leaderClusterTestCluster#stop' +} + +check.dependsOn followClusterTest +unitTest.enabled = false // no unit tests for this module, only the rest integration test diff --git a/x-pack/plugin/ilm/qa/multi-cluster/src/test/java/org/elasticsearch/xpack/indexlifecycle/CCRIndexLifecycleIT.java b/x-pack/plugin/ilm/qa/multi-cluster/src/test/java/org/elasticsearch/xpack/indexlifecycle/CCRIndexLifecycleIT.java new file mode 100644 index 0000000000000..797916c7c405f --- /dev/null +++ b/x-pack/plugin/ilm/qa/multi-cluster/src/test/java/org/elasticsearch/xpack/indexlifecycle/CCRIndexLifecycleIT.java @@ -0,0 +1,285 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.indexlifecycle; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.elasticsearch.client.Request; +import org.elasticsearch.client.Response; +import org.elasticsearch.client.RestClient; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.xpack.ccr.ESCCRRestTestCase; + +import java.io.IOException; +import java.util.Map; + +import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.nullValue; + +public class CCRIndexLifecycleIT extends ESCCRRestTestCase { + + private static final Logger LOGGER = LogManager.getLogger(CCRIndexLifecycleIT.class); + + public void testBasicCCRAndILMIntegration() throws Exception { + String indexName = "logs-1"; + + String policyName = "basic-test"; + if ("leader".equals(targetCluster)) { + putILMPolicy(policyName, "50GB", null, TimeValue.timeValueHours(7*24)); + Settings indexSettings = Settings.builder() + .put("index.soft_deletes.enabled", true) + .put("index.number_of_shards", 1) + .put("index.number_of_replicas", 0) + .put("index.lifecycle.name", policyName) + .put("index.lifecycle.rollover_alias", "logs") + .build(); + createIndex(indexName, indexSettings, "", "\"logs\": { }"); + ensureGreen(indexName); + } else if ("follow".equals(targetCluster)) { + // Policy with the same name must exist in follower cluster too: + putILMPolicy(policyName, "50GB", null, TimeValue.timeValueHours(7*24)); + followIndex(indexName, indexName); + // Aliases are not copied from leader index, so we need to add that for the rollover action in follower cluster: + client().performRequest(new Request("PUT", "/" + indexName + "/_alias/logs")); + + try (RestClient leaderClient = buildLeaderClient()) { + index(leaderClient, indexName, "1"); + assertDocumentExists(leaderClient, indexName, "1"); + + assertBusy(() -> { + assertDocumentExists(client(), indexName, "1"); + // Sanity check that following_index setting has been set, so that we can verify later that this setting has been unset: + assertThat(getIndexSetting(client(), indexName, "index.xpack.ccr.following_index"), equalTo("true")); + + assertILMPolicy(leaderClient, indexName, policyName, "hot"); + assertILMPolicy(client(), indexName, policyName, "hot"); + }); + + updateIndexSettings(leaderClient, indexName, Settings.builder() + .put("index.lifecycle.indexing_complete", true) + .build() + ); + + assertBusy(() -> { + // Ensure that 'index.lifecycle.indexing_complete' is replicated: + assertThat(getIndexSetting(leaderClient, indexName, "index.lifecycle.indexing_complete"), equalTo("true")); + assertThat(getIndexSetting(client(), indexName, "index.lifecycle.indexing_complete"), equalTo("true")); + + assertILMPolicy(leaderClient, indexName, policyName, "warm"); + assertILMPolicy(client(), indexName, policyName, "warm"); + + // ILM should have placed both indices in the warm phase and there these indices are read-only: + assertThat(getIndexSetting(leaderClient, indexName, "index.blocks.write"), equalTo("true")); + assertThat(getIndexSetting(client(), indexName, "index.blocks.write"), equalTo("true")); + // ILM should have unfollowed the follower index, so the following_index setting should have been removed: + // (this controls whether the follow engine is used) + assertThat(getIndexSetting(client(), indexName, "index.xpack.ccr.following_index"), nullValue()); + }); + } + } else { + fail("unexpected target cluster [" + targetCluster + "]"); + } + } + + public void testCcrAndIlmWithRollover() throws Exception { + String alias = "metrics"; + String indexName = "metrics-000001"; + String nextIndexName = "metrics-000002"; + String policyName = "rollover-test"; + + if ("leader".equals(targetCluster)) { + // Create a policy on the leader + putILMPolicy(policyName, null, 1, null); + Request templateRequest = new Request("PUT", "_template/my_template"); + Settings indexSettings = Settings.builder() + .put("index.soft_deletes.enabled", true) + .put("index.number_of_shards", 1) + .put("index.number_of_replicas", 0) + .put("index.lifecycle.name", policyName) + .put("index.lifecycle.rollover_alias", alias) + .build(); + templateRequest.setJsonEntity("{\"index_patterns\": [\"metrics-*\"], \"settings\": " + Strings.toString(indexSettings) + "}"); + assertOK(client().performRequest(templateRequest)); + } else if ("follow".equals(targetCluster)) { + // Policy with the same name must exist in follower cluster too: + putILMPolicy(policyName, null, 1, null); + + // Set up an auto-follow pattern + Request createAutoFollowRequest = new Request("PUT", "/_ccr/auto_follow/my_auto_follow_pattern"); + createAutoFollowRequest.setJsonEntity("{\"leader_index_patterns\": [\"metrics-*\"], " + + "\"remote_cluster\": \"leader_cluster\", \"read_poll_timeout\": \"1000ms\"}"); + assertOK(client().performRequest(createAutoFollowRequest)); + + try (RestClient leaderClient = buildLeaderClient()) { + // Create an index on the leader using the template set up above + Request createIndexRequest = new Request("PUT", "/" + indexName); + createIndexRequest.setJsonEntity("{" + + "\"mappings\": {\"_doc\": {\"properties\": {\"field\": {\"type\": \"keyword\"}}}}, " + + "\"aliases\": {\"" + alias + "\": {\"is_write_index\": true}} }"); + assertOK(leaderClient.performRequest(createIndexRequest)); + // Check that the new index is creeg + Request checkIndexRequest = new Request("GET", "/_cluster/health/" + indexName); + checkIndexRequest.addParameter("wait_for_status", "green"); + checkIndexRequest.addParameter("timeout", "70s"); + checkIndexRequest.addParameter("level", "shards"); + assertOK(leaderClient.performRequest(checkIndexRequest)); + + // Check that it got replicated to the follower + assertBusy(() -> assertTrue(indexExists(indexName))); + + // Aliases are not copied from leader index, so we need to add that for the rollover action in follower cluster: + client().performRequest(new Request("PUT", "/" + indexName + "/_alias/" + alias)); + + index(leaderClient, indexName, "1"); + assertDocumentExists(leaderClient, indexName, "1"); + + assertBusy(() -> { + assertDocumentExists(client(), indexName, "1"); + // Sanity check that following_index setting has been set, so that we can verify later that this setting has been unset: + assertThat(getIndexSetting(client(), indexName, "index.xpack.ccr.following_index"), equalTo("true")); + }); + + // Wait for the index to roll over on the leader + assertBusy(() -> { + assertOK(leaderClient.performRequest(new Request("HEAD", "/" + nextIndexName))); + assertThat(getIndexSetting(leaderClient, indexName, "index.lifecycle.indexing_complete"), equalTo("true")); + + }); + + assertBusy(() -> { + // Wait for the next index should have been created on the leader + assertOK(leaderClient.performRequest(new Request("HEAD", "/" + nextIndexName))); + // And the old index should have a write block and indexing complete set + assertThat(getIndexSetting(leaderClient, indexName, "index.blocks.write"), equalTo("true")); + assertThat(getIndexSetting(leaderClient, indexName, "index.lifecycle.indexing_complete"), equalTo("true")); + + }); + + assertBusy(() -> { + // Wait for the setting to get replicated to the follower + assertThat(getIndexSetting(client(), indexName, "index.lifecycle.indexing_complete"), equalTo("true")); + }); + + assertBusy(() -> { + // ILM should have unfollowed the follower index, so the following_index setting should have been removed: + // (this controls whether the follow engine is used) + assertThat(getIndexSetting(client(), indexName, "index.xpack.ccr.following_index"), nullValue()); + // The next index should have been created on the follower as well + indexExists(nextIndexName); + }); + + assertBusy(() -> { + // And the previously-follower index should be in the warm phase + assertILMPolicy(client(), indexName, policyName, "warm"); + }); + + // Clean up + leaderClient.performRequest(new Request("DELETE", "/_template/my_template")); + } + } else { + fail("unexpected target cluster [" + targetCluster + "]"); + } + } + + private static void putILMPolicy(String name, String maxSize, Integer maxDocs, TimeValue maxAge) throws IOException { + final Request request = new Request("PUT", "_ilm/policy/" + name); + XContentBuilder builder = jsonBuilder(); + builder.startObject(); + { + builder.startObject("policy"); + { + builder.startObject("phases"); + { + builder.startObject("hot"); + { + builder.startObject("actions"); + { + builder.startObject("rollover"); + if (maxSize != null) { + builder.field("max_size", maxSize); + } + if (maxAge != null) { + builder.field("max_age", maxAge); + } + if (maxDocs != null) { + builder.field("max_docs", maxDocs); + } + builder.endObject(); + } + { + builder.startObject("unfollow"); + builder.endObject(); + } + builder.endObject(); + } + builder.endObject(); + builder.startObject("warm"); + { + builder.startObject("actions"); + { + builder.startObject("readonly"); + builder.endObject(); + } + builder.endObject(); + } + builder.endObject(); + builder.startObject("delete"); + { + builder.field("min_age", "7d"); + builder.startObject("actions"); + { + builder.startObject("delete"); + builder.endObject(); + } + builder.endObject(); + } + builder.endObject(); + } + builder.endObject(); + } + builder.endObject(); + } + builder.endObject(); + request.setJsonEntity(Strings.toString(builder)); + assertOK(client().performRequest(request)); + } + + private static void assertILMPolicy(RestClient client, String index, String policy, String expectedPhase) throws IOException { + final Request request = new Request("GET", "/" + index + "/_ilm/explain"); + Map response = toMap(client.performRequest(request)); + LOGGER.info("response={}", response); + Map explanation = (Map) ((Map) response.get("indices")).get(index); + assertThat(explanation.get("managed"), is(true)); + assertThat(explanation.get("policy"), equalTo(policy)); + assertThat(explanation.get("phase"), equalTo(expectedPhase)); + } + + private static void updateIndexSettings(RestClient client, String index, Settings settings) throws IOException { + final Request request = new Request("PUT", "/" + index + "/_settings"); + request.setJsonEntity(Strings.toString(settings)); + assertOK(client.performRequest(request)); + } + + private static Object getIndexSetting(RestClient client, String index, String setting) throws IOException { + Request request = new Request("GET", "/" + index + "/_settings"); + request.addParameter("flat_settings", "true"); + Map response = toMap(client.performRequest(request)); + Map settings = (Map) ((Map) response.get(index)).get("settings"); + return settings.get(setting); + } + + private static void assertDocumentExists(RestClient client, String index, String id) throws IOException { + Request request = new Request("HEAD", "/" + index + "/_doc/" + id); + Response response = client.performRequest(request); + assertThat(response.getStatusLine().getStatusCode(), equalTo(200)); + } + +} diff --git a/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/indexlifecycle/IndexLifecycle.java b/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/indexlifecycle/IndexLifecycle.java index 0088b7fde1cba..baa1d8facd958 100644 --- a/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/indexlifecycle/IndexLifecycle.java +++ b/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/indexlifecycle/IndexLifecycle.java @@ -47,6 +47,7 @@ import org.elasticsearch.xpack.core.indexlifecycle.RolloverAction; import org.elasticsearch.xpack.core.indexlifecycle.ShrinkAction; import org.elasticsearch.xpack.core.indexlifecycle.TimeseriesLifecycleType; +import org.elasticsearch.xpack.core.indexlifecycle.UnfollowAction; import org.elasticsearch.xpack.core.indexlifecycle.action.DeleteLifecycleAction; import org.elasticsearch.xpack.core.indexlifecycle.action.ExplainLifecycleAction; import org.elasticsearch.xpack.core.indexlifecycle.action.GetLifecycleAction; @@ -161,7 +162,8 @@ public List getNa new NamedXContentRegistry.Entry(LifecycleAction.class, new ParseField(ShrinkAction.NAME), ShrinkAction::parse), new NamedXContentRegistry.Entry(LifecycleAction.class, new ParseField(DeleteAction.NAME), DeleteAction::parse), new NamedXContentRegistry.Entry(LifecycleAction.class, new ParseField(FreezeAction.NAME), FreezeAction::parse), - new NamedXContentRegistry.Entry(LifecycleAction.class, new ParseField(SetPriorityAction.NAME), SetPriorityAction::parse) + new NamedXContentRegistry.Entry(LifecycleAction.class, new ParseField(SetPriorityAction.NAME), SetPriorityAction::parse), + new NamedXContentRegistry.Entry(LifecycleAction.class, new ParseField(UnfollowAction.NAME), UnfollowAction::parse) ); } diff --git a/x-pack/plugin/ilm/src/test/java/org/elasticsearch/xpack/indexlifecycle/IndexLifecycleMetadataTests.java b/x-pack/plugin/ilm/src/test/java/org/elasticsearch/xpack/indexlifecycle/IndexLifecycleMetadataTests.java index 5ac01f4753012..790dd5de632e6 100644 --- a/x-pack/plugin/ilm/src/test/java/org/elasticsearch/xpack/indexlifecycle/IndexLifecycleMetadataTests.java +++ b/x-pack/plugin/ilm/src/test/java/org/elasticsearch/xpack/indexlifecycle/IndexLifecycleMetadataTests.java @@ -34,6 +34,7 @@ import org.elasticsearch.xpack.core.indexlifecycle.SetPriorityAction; import org.elasticsearch.xpack.core.indexlifecycle.ShrinkAction; import org.elasticsearch.xpack.core.indexlifecycle.TimeseriesLifecycleType; +import org.elasticsearch.xpack.core.indexlifecycle.UnfollowAction; import java.io.IOException; import java.util.ArrayList; @@ -85,7 +86,8 @@ protected NamedWriteableRegistry getNamedWriteableRegistry() { new NamedWriteableRegistry.Entry(LifecycleAction.class, RolloverAction.NAME, RolloverAction::new), new NamedWriteableRegistry.Entry(LifecycleAction.class, ShrinkAction.NAME, ShrinkAction::new), new NamedWriteableRegistry.Entry(LifecycleAction.class, FreezeAction.NAME, FreezeAction::new), - new NamedWriteableRegistry.Entry(LifecycleAction.class, SetPriorityAction.NAME, SetPriorityAction::new) + new NamedWriteableRegistry.Entry(LifecycleAction.class, SetPriorityAction.NAME, SetPriorityAction::new), + new NamedWriteableRegistry.Entry(LifecycleAction.class, UnfollowAction.NAME, UnfollowAction::new) )); } @@ -102,7 +104,8 @@ protected NamedXContentRegistry xContentRegistry() { new NamedXContentRegistry.Entry(LifecycleAction.class, new ParseField(RolloverAction.NAME), RolloverAction::parse), new NamedXContentRegistry.Entry(LifecycleAction.class, new ParseField(ShrinkAction.NAME), ShrinkAction::parse), new NamedXContentRegistry.Entry(LifecycleAction.class, new ParseField(FreezeAction.NAME), FreezeAction::parse), - new NamedXContentRegistry.Entry(LifecycleAction.class, new ParseField(SetPriorityAction.NAME), SetPriorityAction::parse) + new NamedXContentRegistry.Entry(LifecycleAction.class, new ParseField(SetPriorityAction.NAME), SetPriorityAction::parse), + new NamedXContentRegistry.Entry(LifecycleAction.class, new ParseField(UnfollowAction.NAME), UnfollowAction::parse) )); return new NamedXContentRegistry(entries); } From 8da7a27f3b12c307bcbfaff7fdfa9d2a1f369b6a Mon Sep 17 00:00:00 2001 From: Julie Tibshirani Date: Fri, 18 Jan 2019 12:28:31 -0800 Subject: [PATCH 58/71] Deprecate types in the put mapping API. (#37280) From #29453 and #37285, the `include_type_name` parameter was already present and defaulted to false. This PR makes the following updates: - Add deprecation warnings to `RestPutMappingAction`, plus tests in `RestPutMappingActionTests`. - Add a typeless 'put mappings' method to the Java HLRC, and deprecate the old typed version. To do this cleanly, I opted to create a new `PutMappingRequest` object that differs from the existing server one. --- .../elasticsearch/client/IndicesClient.java | 41 ++++- .../client/IndicesRequestConverters.java | 17 +- .../client/indices/PutMappingRequest.java | 156 ++++++++++++++++++ .../elasticsearch/client/IndicesClientIT.java | 40 ++++- .../client/IndicesRequestConvertersTests.java | 28 +++- .../client/RestHighLevelClientTests.java | 82 ++++----- .../IndicesClientDocumentationIT.java | 35 ++-- .../indices/PutMappingRequestTests.java | 66 ++++++++ .../high-level/indices/put_mapping.asciidoc | 12 +- .../test/indices.put_mapping/10_basic.yml | 22 --- .../admin/indices/RestPutMappingAction.java | 14 +- .../mapping/put/PutMappingRequestTests.java | 7 +- .../indices/RestPutMappingActionTests.java | 77 +++++++++ .../index/RandomCreateIndexGenerator.java | 22 ++- 14 files changed, 503 insertions(+), 116 deletions(-) create mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/indices/PutMappingRequest.java create mode 100644 client/rest-high-level/src/test/java/org/elasticsearch/client/indices/PutMappingRequestTests.java create mode 100644 server/src/test/java/org/elasticsearch/rest/action/admin/indices/RestPutMappingActionTests.java diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/IndicesClient.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/IndicesClient.java index 325d22fa9a0dd..d658e1f0682cd 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/IndicesClient.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/IndicesClient.java @@ -41,7 +41,6 @@ import org.elasticsearch.action.admin.indices.mapping.get.GetFieldMappingsResponse; import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsRequest; import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsResponse; -import org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequest; import org.elasticsearch.action.admin.indices.open.OpenIndexRequest; import org.elasticsearch.action.admin.indices.open.OpenIndexResponse; import org.elasticsearch.action.admin.indices.refresh.RefreshRequest; @@ -63,6 +62,7 @@ import org.elasticsearch.client.indices.FreezeIndexRequest; import org.elasticsearch.client.indices.GetIndexTemplatesRequest; import org.elasticsearch.client.indices.IndexTemplatesExistRequest; +import org.elasticsearch.client.indices.PutMappingRequest; import org.elasticsearch.client.indices.UnfreezeIndexRequest; import org.elasticsearch.rest.RestStatus; @@ -166,6 +166,45 @@ public void putMappingAsync(PutMappingRequest putMappingRequest, RequestOptions AcknowledgedResponse::fromXContent, listener, emptySet()); } + /** + * Updates the mappings on an index using the Put Mapping API. + * See + * Put Mapping API on elastic.co + * @param putMappingRequest the request + * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized + * @return the response + * @throws IOException in case there is a problem sending the request or parsing back the response + * + * @deprecated This method uses an old request object which still refers to types, a deprecated feature. The method + * {@link #putMapping(PutMappingRequest, RequestOptions)} should be used instead, which accepts a new request object. + */ + @Deprecated + public AcknowledgedResponse putMapping(org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequest putMappingRequest, + RequestOptions options) throws IOException { + return restHighLevelClient.performRequestAndParseEntity(putMappingRequest, IndicesRequestConverters::putMapping, options, + AcknowledgedResponse::fromXContent, emptySet()); + } + + /** + * Asynchronously updates the mappings on an index using the Put Mapping API. + * See + * Put Mapping API on elastic.co + * @param putMappingRequest the request + * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized + * @param listener the listener to be notified upon request completion + * + * @deprecated This method uses an old request object which still refers to types, a deprecated feature. The + * method {@link #putMappingAsync(PutMappingRequest, RequestOptions, ActionListener)} should be used instead, + * which accepts a new request object. + */ + @Deprecated + public void putMappingAsync(org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequest putMappingRequest, + RequestOptions options, + ActionListener listener) { + restHighLevelClient.performRequestAsyncAndParseEntity(putMappingRequest, IndicesRequestConverters::putMapping, options, + AcknowledgedResponse::fromXContent, listener, emptySet()); + } + /** * Retrieves the mappings on an index or indices using the Get Mapping API. * See diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/IndicesRequestConverters.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/IndicesRequestConverters.java index f3ce8d2a935ce..33041d5e772bc 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/IndicesRequestConverters.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/IndicesRequestConverters.java @@ -37,7 +37,6 @@ import org.elasticsearch.action.admin.indices.get.GetIndexRequest; import org.elasticsearch.action.admin.indices.mapping.get.GetFieldMappingsRequest; import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsRequest; -import org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequest; import org.elasticsearch.action.admin.indices.open.OpenIndexRequest; import org.elasticsearch.action.admin.indices.refresh.RefreshRequest; import org.elasticsearch.action.admin.indices.rollover.RolloverRequest; @@ -51,6 +50,7 @@ import org.elasticsearch.client.indices.FreezeIndexRequest; import org.elasticsearch.client.indices.GetIndexTemplatesRequest; import org.elasticsearch.client.indices.IndexTemplatesExistRequest; +import org.elasticsearch.client.indices.PutMappingRequest; import org.elasticsearch.client.indices.UnfreezeIndexRequest; import org.elasticsearch.common.Strings; @@ -122,14 +122,25 @@ static Request updateAliases(IndicesAliasesRequest indicesAliasesRequest) throws return request; } + static Request putMapping(PutMappingRequest putMappingRequest) throws IOException { + Request request = new Request(HttpPut.METHOD_NAME, RequestConverters.endpoint(putMappingRequest.indices(), "_mapping")); + + RequestConverters.Params parameters = new RequestConverters.Params(request); + parameters.withTimeout(putMappingRequest.timeout()); + parameters.withMasterTimeout(putMappingRequest.masterNodeTimeout()); + request.setEntity(RequestConverters.createEntity(putMappingRequest, RequestConverters.REQUEST_BODY_CONTENT_TYPE)); + return request; + } + + static Request putMapping(org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequest putMappingRequest) throws IOException { // The concreteIndex is an internal concept, not applicable to requests made over the REST API. if (putMappingRequest.getConcreteIndex() != null) { throw new IllegalArgumentException("concreteIndex cannot be set on PutMapping requests made over the REST API"); } - Request request = new Request(HttpPut.METHOD_NAME, RequestConverters.endpoint(putMappingRequest.indices(), "_mapping", - putMappingRequest.type())); + Request request = new Request(HttpPut.METHOD_NAME, RequestConverters.endpoint(putMappingRequest.indices(), + "_mapping", putMappingRequest.type())); RequestConverters.Params parameters = new RequestConverters.Params(request); parameters.withTimeout(putMappingRequest.timeout()); diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/indices/PutMappingRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/indices/PutMappingRequest.java new file mode 100644 index 0000000000000..4607e7a5589ce --- /dev/null +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/indices/PutMappingRequest.java @@ -0,0 +1,156 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.client.indices; + +import com.carrotsearch.hppc.ObjectHashSet; +import org.elasticsearch.ElasticsearchGenerationException; +import org.elasticsearch.action.IndicesRequest; +import org.elasticsearch.action.support.IndicesOptions; +import org.elasticsearch.client.TimedRequest; +import org.elasticsearch.common.bytes.BytesArray; +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.common.xcontent.ToXContentObject; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentFactory; +import org.elasticsearch.common.xcontent.XContentType; + +import java.io.IOException; +import java.io.InputStream; +import java.util.Map; + +/** + * Put a mapping definition into one or more indices. If an index already contains mappings, + * the new mappings will be merged with the existing one. If there are elements that cannot + * be merged, the request will be rejected. + */ +public class PutMappingRequest extends TimedRequest implements IndicesRequest, ToXContentObject { + + private static ObjectHashSet RESERVED_FIELDS = ObjectHashSet.from( + "_uid", "_id", "_type", "_source", "_all", "_analyzer", "_parent", "_routing", "_index", + "_size", "_timestamp", "_ttl", "_field_names" + ); + + private final String[] indices; + private IndicesOptions indicesOptions = IndicesOptions.fromOptions(false, false, true, true); + + private BytesReference source; + private XContentType xContentType; + + /** + * Constructs a new put mapping request against one or more indices. If no indices + * are provided then it will be executed against all indices. + */ + public PutMappingRequest(String... indices) { + this.indices = indices; + } + + /** + * The indices into which the mappings will be put. + */ + @Override + public String[] indices() { + return indices; + } + + @Override + public IndicesOptions indicesOptions() { + return indicesOptions; + } + + public PutMappingRequest indicesOptions(IndicesOptions indicesOptions) { + this.indicesOptions = indicesOptions; + return this; + } + + /** + * The mapping source definition. + */ + public BytesReference source() { + return source; + } + + /** + * The {@link XContentType} of the mapping source. + */ + public XContentType xContentType() { + return xContentType; + } + + /** + * The mapping source definition. + * + * Note that the definition should *not* be nested under a type name. + */ + public PutMappingRequest source(Map mappingSource) { + try { + XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON); + builder.map(mappingSource); + return source(builder); + } catch (IOException e) { + throw new ElasticsearchGenerationException("Failed to generate [" + mappingSource + "]", e); + } + } + + /** + * The mapping source definition. + * + * Note that the definition should *not* be nested under a type name. + */ + public PutMappingRequest source(String mappingSource, XContentType xContentType) { + this.source = new BytesArray(mappingSource); + this.xContentType = xContentType; + return this; + } + + /** + * The mapping source definition. + * + * Note that the definition should *not* be nested under a type name. + */ + public PutMappingRequest source(XContentBuilder builder) { + this.source = BytesReference.bytes(builder); + this.xContentType = builder.contentType(); + return this; + } + + /** + * The mapping source definition. + * + * Note that the definition should *not* be nested under a type name. + */ + public PutMappingRequest source(BytesReference source, XContentType xContentType) { + this.source = source; + this.xContentType = xContentType; + return this; + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params params) throws IOException { + if (source != null) { + try (InputStream stream = source.streamInput()) { + builder.rawValue(stream, xContentType); + } + } else { + builder.startObject().endObject(); + } + return builder; + } +} diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/IndicesClientIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/IndicesClientIT.java index 0f29950355472..af8a51b4900d8 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/IndicesClientIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/IndicesClientIT.java @@ -47,7 +47,6 @@ import org.elasticsearch.action.admin.indices.mapping.get.GetFieldMappingsResponse; import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsRequest; import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsResponse; -import org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequest; import org.elasticsearch.action.admin.indices.open.OpenIndexRequest; import org.elasticsearch.action.admin.indices.open.OpenIndexResponse; import org.elasticsearch.action.admin.indices.refresh.RefreshRequest; @@ -74,6 +73,7 @@ import org.elasticsearch.client.indices.FreezeIndexRequest; import org.elasticsearch.client.indices.GetIndexTemplatesRequest; import org.elasticsearch.client.indices.IndexTemplatesExistRequest; +import org.elasticsearch.client.indices.PutMappingRequest; import org.elasticsearch.client.indices.UnfreezeIndexRequest; import org.elasticsearch.cluster.metadata.AliasMetaData; import org.elasticsearch.cluster.metadata.IndexMetaData; @@ -93,6 +93,7 @@ import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.rest.action.admin.indices.RestPutMappingAction; import java.io.IOException; import java.util.Arrays; @@ -404,24 +405,49 @@ public void testGetIndexNonExistentIndex() throws IOException { } public void testPutMapping() throws IOException { - // Add mappings to index String indexName = "mapping_index"; createIndex(indexName, Settings.EMPTY); PutMappingRequest putMappingRequest = new PutMappingRequest(indexName); - putMappingRequest.type("_doc"); XContentBuilder mappingBuilder = JsonXContent.contentBuilder(); mappingBuilder.startObject().startObject("properties").startObject("field"); mappingBuilder.field("type", "text"); mappingBuilder.endObject().endObject().endObject(); putMappingRequest.source(mappingBuilder); - AcknowledgedResponse putMappingResponse = - execute(putMappingRequest, highLevelClient().indices()::putMapping, highLevelClient().indices()::putMappingAsync); + AcknowledgedResponse putMappingResponse = execute(putMappingRequest, + highLevelClient().indices()::putMapping, + highLevelClient().indices()::putMappingAsync); assertTrue(putMappingResponse.isAcknowledged()); Map getIndexResponse = getAsMap(indexName); - assertEquals("text", XContentMapValues.extractValue(indexName + ".mappings.properties.field.type", getIndexResponse)); + assertEquals("text", XContentMapValues.extractValue(indexName + ".mappings.properties.field.type", + getIndexResponse)); + } + + public void testPutMappingWithTypes() throws IOException { + String indexName = "mapping_index"; + createIndex(indexName, Settings.EMPTY); + + org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequest putMappingRequest = + new org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequest(indexName); + putMappingRequest.type("some_type"); + + XContentBuilder mappingBuilder = JsonXContent.contentBuilder(); + mappingBuilder.startObject().startObject("properties").startObject("field"); + mappingBuilder.field("type", "text"); + mappingBuilder.endObject().endObject().endObject(); + putMappingRequest.source(mappingBuilder); + + AcknowledgedResponse putMappingResponse = execute(putMappingRequest, + highLevelClient().indices()::putMapping, + highLevelClient().indices()::putMappingAsync, + expectWarnings(RestPutMappingAction.TYPES_DEPRECATION_MESSAGE)); + assertTrue(putMappingResponse.isAcknowledged()); + + Map getIndexResponse = getAsMap(indexName); + assertEquals("text", XContentMapValues.extractValue(indexName + ".mappings.properties.field.type", + getIndexResponse)); } public void testGetMapping() throws IOException { @@ -429,7 +455,6 @@ public void testGetMapping() throws IOException { createIndex(indexName, Settings.EMPTY); PutMappingRequest putMappingRequest = new PutMappingRequest(indexName); - putMappingRequest.type("_doc"); XContentBuilder mappingBuilder = JsonXContent.contentBuilder(); mappingBuilder.startObject().startObject("properties").startObject("field"); mappingBuilder.field("type", "text"); @@ -463,7 +488,6 @@ public void testGetFieldMapping() throws IOException { createIndex(indexName, Settings.EMPTY); PutMappingRequest putMappingRequest = new PutMappingRequest(indexName); - putMappingRequest.type("_doc"); XContentBuilder mappingBuilder = JsonXContent.contentBuilder(); mappingBuilder.startObject().startObject("properties").startObject("field"); mappingBuilder.field("type", "text"); diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/IndicesRequestConvertersTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/IndicesRequestConvertersTests.java index 308c576edafe6..3410792e56132 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/IndicesRequestConvertersTests.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/IndicesRequestConvertersTests.java @@ -40,7 +40,6 @@ import org.elasticsearch.action.admin.indices.get.GetIndexRequest; import org.elasticsearch.action.admin.indices.mapping.get.GetFieldMappingsRequest; import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsRequest; -import org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequest; import org.elasticsearch.action.admin.indices.open.OpenIndexRequest; import org.elasticsearch.action.admin.indices.refresh.RefreshRequest; import org.elasticsearch.action.admin.indices.rollover.RolloverRequest; @@ -54,6 +53,7 @@ import org.elasticsearch.action.support.master.AcknowledgedRequest; import org.elasticsearch.client.indices.GetIndexTemplatesRequest; import org.elasticsearch.client.indices.IndexTemplatesExistRequest; +import org.elasticsearch.client.indices.PutMappingRequest; import org.elasticsearch.common.CheckedFunction; import org.elasticsearch.common.Strings; import org.elasticsearch.common.settings.Settings; @@ -163,7 +163,31 @@ public void testUpdateAliases() throws IOException { } public void testPutMapping() throws IOException { - PutMappingRequest putMappingRequest = new PutMappingRequest(); + String[] indices = RequestConvertersTests.randomIndicesNames(0, 5); + PutMappingRequest putMappingRequest = new PutMappingRequest(indices); + + Map expectedParams = new HashMap<>(); + RequestConvertersTests.setRandomTimeout(putMappingRequest, AcknowledgedRequest.DEFAULT_ACK_TIMEOUT, expectedParams); + RequestConvertersTests.setRandomMasterTimeout(putMappingRequest, expectedParams); + + Request request = IndicesRequestConverters.putMapping(putMappingRequest); + + StringJoiner endpoint = new StringJoiner("/", "/", ""); + String index = String.join(",", indices); + if (Strings.hasLength(index)) { + endpoint.add(index); + } + endpoint.add("_mapping"); + + Assert.assertEquals(endpoint.toString(), request.getEndpoint()); + Assert.assertEquals(expectedParams, request.getParameters()); + Assert.assertEquals(HttpPut.METHOD_NAME, request.getMethod()); + RequestConvertersTests.assertToXContentBody(putMappingRequest, request.getEntity()); + } + + public void testPutMappingWithTypes() throws IOException { + org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequest putMappingRequest = + new org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequest(); String[] indices = RequestConvertersTests.randomIndicesNames(0, 5); putMappingRequest.indices(indices); diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/RestHighLevelClientTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/RestHighLevelClientTests.java index 1ea6056368051..2fa4283971b40 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/RestHighLevelClientTests.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/RestHighLevelClientTests.java @@ -723,55 +723,57 @@ public void testApiNamingConventions() throws Exception { ClientYamlSuiteRestSpec restSpec = ClientYamlSuiteRestSpec.load("/rest-api-spec/api"); Set apiSpec = restSpec.getApis().stream().map(ClientYamlSuiteRestApi::getName).collect(Collectors.toSet()); + Set apiUnsupported = new HashSet<>(apiSpec); + Set apiNotFound = new HashSet<>(); Set topLevelMethodsExclusions = new HashSet<>(); topLevelMethodsExclusions.add("getLowLevelClient"); topLevelMethodsExclusions.add("close"); - Map methods = Arrays.stream(RestHighLevelClient.class.getMethods()) + Map> methods = Arrays.stream(RestHighLevelClient.class.getMethods()) .filter(method -> method.getDeclaringClass().equals(RestHighLevelClient.class) && topLevelMethodsExclusions.contains(method.getName()) == false) .map(method -> Tuple.tuple(toSnakeCase(method.getName()), method)) .flatMap(tuple -> tuple.v2().getReturnType().getName().endsWith("Client") ? getSubClientMethods(tuple.v1(), tuple.v2().getReturnType()) : Stream.of(tuple)) - .collect(Collectors.toMap(Tuple::v1, Tuple::v2)); - - Set apiNotFound = new HashSet<>(); + .collect(Collectors.groupingBy(Tuple::v1, + Collectors.mapping(Tuple::v2, Collectors.toSet()))); - for (Map.Entry entry : methods.entrySet()) { - Method method = entry.getValue(); + for (Map.Entry> entry : methods.entrySet()) { String apiName = entry.getKey(); - assertTrue("method [" + apiName + "] is not final", + for (Method method : entry.getValue()) { + assertTrue("method [" + apiName + "] is not final", Modifier.isFinal(method.getClass().getModifiers()) || Modifier.isFinal(method.getModifiers())); - assertTrue("method [" + method + "] should be public", Modifier.isPublic(method.getModifiers())); - - //we convert all the method names to snake case, hence we need to look for the '_async' suffix rather than 'Async' - if (apiName.endsWith("_async")) { - assertAsyncMethod(methods, method, apiName); - } else if (isSubmitTaskMethod(apiName)) { - assertSubmitTaskMethod(methods, method, apiName, restSpec); - } else { - assertSyncMethod(method, apiName); - boolean remove = apiSpec.remove(apiName); - if (remove == false) { - if (deprecatedMethods.contains(apiName)) { - assertTrue("method [" + method.getName() + "], api [" + apiName + "] should be deprecated", - method.isAnnotationPresent(Deprecated.class)); - } else { - //TODO xpack api are currently ignored, we need to load xpack yaml spec too - if (apiName.startsWith("xpack.") == false && - apiName.startsWith("license.") == false && - apiName.startsWith("machine_learning.") == false && - apiName.startsWith("rollup.") == false && - apiName.startsWith("watcher.") == false && - apiName.startsWith("graph.") == false && - apiName.startsWith("migration.") == false && - apiName.startsWith("security.") == false && - apiName.startsWith("index_lifecycle.") == false && - apiName.startsWith("ccr.") == false && - apiName.endsWith("freeze") == false) { - apiNotFound.add(apiName); + assertTrue("method [" + method + "] should be public", Modifier.isPublic(method.getModifiers())); + + //we convert all the method names to snake case, hence we need to look for the '_async' suffix rather than 'Async' + if (apiName.endsWith("_async")) { + assertAsyncMethod(methods, method, apiName); + } else if (isSubmitTaskMethod(apiName)) { + assertSubmitTaskMethod(methods, method, apiName, restSpec); + } else { + assertSyncMethod(method, apiName); + apiUnsupported.remove(apiName); + if (apiSpec.contains(apiName) == false) { + if (deprecatedMethods.contains(apiName)) { + assertTrue("method [" + method.getName() + "], api [" + apiName + "] should be deprecated", + method.isAnnotationPresent(Deprecated.class)); + } else { + //TODO xpack api are currently ignored, we need to load xpack yaml spec too + if (apiName.startsWith("xpack.") == false && + apiName.startsWith("license.") == false && + apiName.startsWith("machine_learning.") == false && + apiName.startsWith("rollup.") == false && + apiName.startsWith("watcher.") == false && + apiName.startsWith("graph.") == false && + apiName.startsWith("migration.") == false && + apiName.startsWith("security.") == false && + apiName.startsWith("index_lifecycle.") == false && + apiName.startsWith("ccr.") == false && + apiName.endsWith("freeze") == false) { + apiNotFound.add(apiName); + } } } } @@ -781,11 +783,11 @@ public void testApiNamingConventions() throws Exception { apiNotFound.size(), equalTo(0)); //we decided not to support cat API in the high-level REST client, they are supposed to be used from a low-level client - apiSpec.removeIf(api -> api.startsWith("cat.")); + apiUnsupported.removeIf(api -> api.startsWith("cat.")); Stream.concat(Arrays.stream(notYetSupportedApi), Arrays.stream(notRequiredApi)).forEach( api -> assertTrue(api + " API is either not defined in the spec or already supported by the high-level client", - apiSpec.remove(api))); - assertThat("Some API are not supported but they should be: " + apiSpec, apiSpec.size(), equalTo(0)); + apiUnsupported.remove(api))); + assertThat("Some API are not supported but they should be: " + apiUnsupported, apiUnsupported.size(), equalTo(0)); } private static void assertSyncMethod(Method method, String apiName) { @@ -816,7 +818,7 @@ private static void assertSyncMethod(Method method, String apiName) { } } - private static void assertAsyncMethod(Map methods, Method method, String apiName) { + private static void assertAsyncMethod(Map> methods, Method method, String apiName) { assertTrue("async method [" + method.getName() + "] doesn't have corresponding sync method", methods.containsKey(apiName.substring(0, apiName.length() - 6))); assertThat("async method [" + method + "] should return void", method.getReturnType(), equalTo(Void.TYPE)); @@ -836,7 +838,7 @@ private static void assertAsyncMethod(Map methods, Method method } } - private static void assertSubmitTaskMethod(Map methods, Method method, String apiName, + private static void assertSubmitTaskMethod(Map> methods, Method method, String apiName, ClientYamlSuiteRestSpec restSpec) { String methodName = extractMethodName(apiName); assertTrue("submit task method [" + method.getName() + "] doesn't have corresponding sync method", diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/IndicesClientDocumentationIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/IndicesClientDocumentationIT.java index 3d1b77562215b..9e13f2cb34d92 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/IndicesClientDocumentationIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/IndicesClientDocumentationIT.java @@ -46,7 +46,6 @@ import org.elasticsearch.action.admin.indices.mapping.get.GetFieldMappingsResponse; import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsRequest; import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsResponse; -import org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequest; import org.elasticsearch.action.admin.indices.open.OpenIndexRequest; import org.elasticsearch.action.admin.indices.open.OpenIndexResponse; import org.elasticsearch.action.admin.indices.refresh.RefreshRequest; @@ -78,6 +77,7 @@ import org.elasticsearch.client.indices.FreezeIndexRequest; import org.elasticsearch.client.indices.GetIndexTemplatesRequest; import org.elasticsearch.client.indices.IndexTemplatesExistRequest; +import org.elasticsearch.client.indices.PutMappingRequest; import org.elasticsearch.client.indices.UnfreezeIndexRequest; import org.elasticsearch.cluster.metadata.AliasMetaData; import org.elasticsearch.cluster.metadata.IndexTemplateMetaData; @@ -465,7 +465,6 @@ public void testPutMapping() throws IOException { { // tag::put-mapping-request PutMappingRequest request = new PutMappingRequest("twitter"); // <1> - request.type("_doc"); // <2> // end::put-mapping-request { @@ -518,21 +517,13 @@ public void testPutMapping() throws IOException { AcknowledgedResponse putMappingResponse = client.indices().putMapping(request, RequestOptions.DEFAULT); assertTrue(putMappingResponse.isAcknowledged()); } - { - //tag::put-mapping-shortcut - request.source("message", "type=text"); // <1> - //end::put-mapping-shortcut - AcknowledgedResponse putMappingResponse = client.indices().putMapping(request, RequestOptions.DEFAULT); - assertTrue(putMappingResponse.isAcknowledged()); - } // tag::put-mapping-request-timeout - request.timeout(TimeValue.timeValueMinutes(2)); // <1> - request.timeout("2m"); // <2> + request.setTimeout(TimeValue.timeValueMinutes(2)); // <1> // end::put-mapping-request-timeout + // tag::put-mapping-request-masterTimeout - request.masterNodeTimeout(TimeValue.timeValueMinutes(1)); // <1> - request.masterNodeTimeout("1m"); // <2> + request.setMasterTimeout(TimeValue.timeValueMinutes(1)); // <1> // end::put-mapping-request-masterTimeout // tag::put-mapping-execute @@ -555,7 +546,7 @@ public void testPutMappingAsync() throws Exception { } { - PutMappingRequest request = new PutMappingRequest("twitter").type("_doc"); + PutMappingRequest request = new PutMappingRequest("twitter"); // tag::put-mapping-execute-listener ActionListener listener = @@ -591,7 +582,6 @@ public void testGetMapping() throws IOException { CreateIndexResponse createIndexResponse = client.indices().create(new CreateIndexRequest("twitter"), RequestOptions.DEFAULT); assertTrue(createIndexResponse.isAcknowledged()); PutMappingRequest request = new PutMappingRequest("twitter"); - request.type("_doc"); request.source( "{\n" + " \"properties\": {\n" + @@ -647,7 +637,6 @@ public void testGetMappingAsync() throws Exception { CreateIndexResponse createIndexResponse = client.indices().create(new CreateIndexRequest("twitter"), RequestOptions.DEFAULT); assertTrue(createIndexResponse.isAcknowledged()); PutMappingRequest request = new PutMappingRequest("twitter"); - request.type("_doc"); request.source( "{\n" + " \"properties\": {\n" + @@ -717,7 +706,6 @@ public void testGetFieldMapping() throws IOException, InterruptedException { CreateIndexResponse createIndexResponse = client.indices().create(new CreateIndexRequest("twitter"), RequestOptions.DEFAULT); assertTrue(createIndexResponse.isAcknowledged()); PutMappingRequest request = new PutMappingRequest("twitter"); - request.type("_doc"); request.source( "{\n" + " \"properties\": {\n" + @@ -2519,10 +2507,15 @@ public void testAnalyze() throws IOException, InterruptedException { CreateIndexResponse resp = client.indices().create(req, RequestOptions.DEFAULT); assertTrue(resp.isAcknowledged()); - PutMappingRequest pmReq = new PutMappingRequest() - .indices("my_index") - .type("_doc") - .source("my_field", "type=text,analyzer=english"); + PutMappingRequest pmReq = new PutMappingRequest("my_index") + .source(XContentFactory.jsonBuilder().startObject() + .startObject("properties") + .startObject("my_field") + .field("type", "text") + .field("analyzer", "english") + .endObject() + .endObject() + .endObject()); AcknowledgedResponse pmResp = client.indices().putMapping(pmReq, RequestOptions.DEFAULT); assertTrue(pmResp.isAcknowledged()); diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/indices/PutMappingRequestTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/indices/PutMappingRequestTests.java new file mode 100644 index 0000000000000..40b38e40b2647 --- /dev/null +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/indices/PutMappingRequestTests.java @@ -0,0 +1,66 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.client.indices; + +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.index.RandomCreateIndexGenerator; +import org.elasticsearch.test.AbstractXContentTestCase; + +import java.io.IOException; + +public class PutMappingRequestTests extends AbstractXContentTestCase { + + @Override + protected PutMappingRequest createTestInstance() { + PutMappingRequest request = new PutMappingRequest(); + if (frequently()) { + try { + XContentBuilder builder = RandomCreateIndexGenerator.randomMapping(); + request.source(builder); + } catch (IOException e) { + throw new RuntimeException(e); + } + } + return request; + } + + @Override + protected PutMappingRequest doParseInstance(XContentParser parser) throws IOException { + PutMappingRequest request = new PutMappingRequest(); + request.source(parser.map()); + return request; + } + + @Override + protected boolean supportsUnknownFields() { + return false; + } + + @Override + protected void assertEqualInstances(PutMappingRequest expected, PutMappingRequest actual) { + try (XContentParser expectedJson = createParser(expected.xContentType().xContent(), expected.source()); + XContentParser actualJson = createParser(actual.xContentType().xContent(), actual.source())) { + assertEquals(expectedJson.mapOrdered(), actualJson.mapOrdered()); + } catch (IOException e) { + throw new RuntimeException(e); + } + } +} diff --git a/docs/java-rest/high-level/indices/put_mapping.asciidoc b/docs/java-rest/high-level/indices/put_mapping.asciidoc index d1b9c6ad8c6fd..971ad52d62b78 100644 --- a/docs/java-rest/high-level/indices/put_mapping.asciidoc +++ b/docs/java-rest/high-level/indices/put_mapping.asciidoc @@ -10,14 +10,13 @@ [id="{upid}-{api}-request"] ==== Put Mapping Request -A +{request}+ requires an `index` argument, and a type: +A +{request}+ requires an `index` argument: ["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- include-tagged::{doc-tests-file}[{api}-request] -------------------------------------------------- <1> The index to add the mapping to -<2> The type to create (or update) ==== Mapping source A description of the fields to create on the mapping; if not defined, the mapping will default to empty. @@ -46,13 +45,6 @@ include-tagged::{doc-tests-file}[{api}-xcontent] <1> Mapping source provided as an `XContentBuilder` object, the Elasticsearch built-in helpers to generate JSON content -["source","java",subs="attributes,callouts,macros"] --------------------------------------------------- -include-tagged::{doc-tests-file}[{api}-shortcut] --------------------------------------------------- -<1> Mapping source provided as `Object` key-pairs, which gets converted to -JSON format - ==== Optional arguments The following arguments can optionally be provided: @@ -61,14 +53,12 @@ The following arguments can optionally be provided: include-tagged::{doc-tests-file}[{api}-request-timeout] -------------------------------------------------- <1> Timeout to wait for the all the nodes to acknowledge the index creation as a `TimeValue` -<2> Timeout to wait for the all the nodes to acknowledge the index creation as a `String` ["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- include-tagged::{doc-tests-file}[{api}-request-masterTimeout] -------------------------------------------------- <1> Timeout to connect to the master node as a `TimeValue` -<2> Timeout to connect to the master node as a `String` include::../execution.asciidoc[] diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.put_mapping/10_basic.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.put_mapping/10_basic.yml index 443e33d2c33a4..cdf64b07f9110 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.put_mapping/10_basic.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.put_mapping/10_basic.yml @@ -70,25 +70,3 @@ properties: "": type: keyword - ---- -"PUT mapping with a type and include_type_name: false": - - - skip: - version: " - 6.99.99" - reason: include_type_name defaults to true before 7.0 - - do: - indices.create: - index: index - include_type_name: false - - - do: - catch: /illegal_argument_exception/ - indices.put_mapping: - index: index - type: _doc - body: - properties: - bar: - type: float - diff --git a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestPutMappingAction.java b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestPutMappingAction.java index 525b398d48b7a..bd99c26a155a0 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestPutMappingAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestPutMappingAction.java @@ -19,10 +19,12 @@ package org.elasticsearch.rest.action.admin.indices; +import org.apache.logging.log4j.LogManager; import org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequest; import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.common.Strings; +import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.rest.BaseRestHandler; @@ -37,6 +39,11 @@ import static org.elasticsearch.rest.RestRequest.Method.PUT; public class RestPutMappingAction extends BaseRestHandler { + private static final DeprecationLogger deprecationLogger = new DeprecationLogger( + LogManager.getLogger(RestPutMappingAction.class)); + public static final String TYPES_DEPRECATION_MESSAGE = "[types removal] Using include_type_name in put " + + "mapping requests is deprecated. The parameter will be removed in the next major version."; + public RestPutMappingAction(Settings settings, RestController controller) { super(settings); controller.registerHandler(PUT, "/{index}/_mapping/", this); @@ -70,12 +77,17 @@ public String getName() { public RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException { final boolean includeTypeName = request.paramAsBoolean(INCLUDE_TYPE_NAME_PARAMETER, DEFAULT_INCLUDE_TYPE_NAME_POLICY); - PutMappingRequest putMappingRequest = putMappingRequest(Strings.splitStringByCommaToArray(request.param("index"))); + if (request.hasParam(INCLUDE_TYPE_NAME_PARAMETER)) { + deprecationLogger.deprecatedAndMaybeLog("put_mapping_with_types", TYPES_DEPRECATION_MESSAGE); + } + final String type = request.param("type"); if (type != null && includeTypeName == false) { throw new IllegalArgumentException("Types cannot be provided in put mapping requests, unless " + "the include_type_name parameter is set to true."); } + + PutMappingRequest putMappingRequest = putMappingRequest(Strings.splitStringByCommaToArray(request.param("index"))); putMappingRequest.type(includeTypeName ? type : MapperService.SINGLE_MAPPING_NAME); putMappingRequest.source(request.requiredContent(), request.getXContentType()); putMappingRequest.timeout(request.paramAsTime("timeout", putMappingRequest.timeout())); diff --git a/server/src/test/java/org/elasticsearch/action/admin/indices/mapping/put/PutMappingRequestTests.java b/server/src/test/java/org/elasticsearch/action/admin/indices/mapping/put/PutMappingRequestTests.java index 5243ffd33b39c..6fddf2dd5f85b 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/indices/mapping/put/PutMappingRequestTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/indices/mapping/put/PutMappingRequestTests.java @@ -38,14 +38,9 @@ public class PutMappingRequestTests extends ESTestCase { public void testValidation() { - PutMappingRequest r = new PutMappingRequest("myindex"); + PutMappingRequest r = new PutMappingRequest("myindex").type(""); ActionRequestValidationException ex = r.validate(); assertNotNull("type validation should fail", ex); - assertTrue(ex.getMessage().contains("type is missing")); - - r.type(""); - ex = r.validate(); - assertNotNull("type validation should fail", ex); assertTrue(ex.getMessage().contains("type is empty")); r.type("mytype"); diff --git a/server/src/test/java/org/elasticsearch/rest/action/admin/indices/RestPutMappingActionTests.java b/server/src/test/java/org/elasticsearch/rest/action/admin/indices/RestPutMappingActionTests.java new file mode 100644 index 0000000000000..daa69c20007f1 --- /dev/null +++ b/server/src/test/java/org/elasticsearch/rest/action/admin/indices/RestPutMappingActionTests.java @@ -0,0 +1,77 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.rest.action.admin.indices; + +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.util.concurrent.ThreadContext; +import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.test.rest.FakeRestChannel; +import org.elasticsearch.test.rest.FakeRestRequest; +import org.elasticsearch.test.rest.RestActionTestCase; +import org.junit.Before; + +import java.util.HashMap; +import java.util.Map; + +import static org.elasticsearch.rest.BaseRestHandler.INCLUDE_TYPE_NAME_PARAMETER; + +public class RestPutMappingActionTests extends RestActionTestCase { + + @Before + public void setUpAction() { + new RestPutMappingAction(Settings.EMPTY, controller()); + } + + public void testIncludeTypeName() { + Map params = new HashMap<>(); + params.put(INCLUDE_TYPE_NAME_PARAMETER, randomFrom("true", "false")); + RestRequest deprecatedRequest = new FakeRestRequest.Builder(xContentRegistry()) + .withMethod(RestRequest.Method.PUT) + .withPath("/some_index/_mapping/") + .withParams(params) + .build(); + + dispatchRequest(deprecatedRequest); + assertWarnings(RestPutMappingAction.TYPES_DEPRECATION_MESSAGE); + + RestRequest validRequest = new FakeRestRequest.Builder(xContentRegistry()) + .withMethod(RestRequest.Method.PUT) + .withPath("/some_index/_mapping") + .build(); + dispatchRequest(validRequest); + } + + public void testTypeInPath() { + // Test that specifying a type while include_type_name is false + // results in an illegal argument exception. + RestRequest request = new FakeRestRequest.Builder(xContentRegistry()) + .withMethod(RestRequest.Method.PUT) + .withPath("/some_index/_mapping/some_type") + .build(); + + FakeRestChannel channel = new FakeRestChannel(request, false, 1); + ThreadContext threadContext = new ThreadContext(Settings.EMPTY); + controller().dispatchRequest(request, channel, threadContext); + + assertEquals(1, channel.errors().get()); + assertEquals(RestStatus.BAD_REQUEST, channel.capturedResponse().status()); + } +} diff --git a/test/framework/src/main/java/org/elasticsearch/index/RandomCreateIndexGenerator.java b/test/framework/src/main/java/org/elasticsearch/index/RandomCreateIndexGenerator.java index e88a9f0a38d2c..e4836150c6e86 100644 --- a/test/framework/src/main/java/org/elasticsearch/index/RandomCreateIndexGenerator.java +++ b/test/framework/src/main/java/org/elasticsearch/index/RandomCreateIndexGenerator.java @@ -24,6 +24,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; +import org.elasticsearch.common.xcontent.XContentType; import java.io.IOException; @@ -31,6 +32,7 @@ import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_SHARDS; import static org.elasticsearch.test.ESTestCase.randomAlphaOfLength; import static org.elasticsearch.test.ESTestCase.randomBoolean; +import static org.elasticsearch.test.ESTestCase.randomFrom; import static org.elasticsearch.test.ESTestCase.randomIntBetween; public final class RandomCreateIndexGenerator { @@ -76,8 +78,26 @@ public static Settings randomIndexSettings() { return builder.build(); } + + /** + * Creates a random mapping, with no mention of types. + */ + public static XContentBuilder randomMapping() throws IOException { + XContentBuilder builder = XContentFactory.contentBuilder(randomFrom(XContentType.values())); + builder.startObject(); + + randomMappingFields(builder, true); + + builder.endObject(); + return builder; + } + + /** + * Creates a random mapping, with the mapping definition nested + * under the given type name. + */ public static XContentBuilder randomMapping(String type) throws IOException { - XContentBuilder builder = XContentFactory.jsonBuilder(); + XContentBuilder builder = XContentFactory.contentBuilder(randomFrom(XContentType.values())); builder.startObject().startObject(type); randomMappingFields(builder, true); From 5384162a42b066ee4bcadba6ab6d699f6966747f Mon Sep 17 00:00:00 2001 From: Benjamin Trent Date: Fri, 18 Jan 2019 14:32:34 -0600 Subject: [PATCH 59/71] ML: creating ML State write alias and pointing writes there (#37483) * ML: creating ML State write alias and pointing writes there * Moving alias check to openJob method * adjusting concrete index lookup for ml-state --- .../persistence/AnomalyDetectorsIndex.java | 87 +++++++++++++++- .../authz/store/ReservedRolesStoreTests.java | 5 +- .../xpack/test/rest/XPackRestTestHelper.java | 5 +- .../ml/integration/DeleteExpiredDataIT.java | 2 +- .../xpack/ml/MachineLearning.java | 9 +- .../xpack/ml/MlConfigMigrator.java | 15 ++- .../ml/action/TransportOpenJobAction.java | 6 +- .../TransportRevertModelSnapshotAction.java | 47 +++++---- .../job/persistence/JobResultsPersister.java | 4 +- .../autodetect/AutodetectProcessManager.java | 98 ++++++++++--------- .../output/AutodetectStateProcessor.java | 2 +- .../action/TransportOpenJobActionTests.java | 6 +- .../ml/integration/MlConfigMigratorIT.java | 41 +++++--- .../AutodetectProcessManagerTests.java | 51 ++++++---- .../xpack/test/rest/XPackRestIT.java | 3 +- .../test/upgraded_cluster/30_ml_jobs_crud.yml | 5 + 16 files changed, 265 insertions(+), 121 deletions(-) diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/persistence/AnomalyDetectorsIndex.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/persistence/AnomalyDetectorsIndex.java index b9f887d2d49fc..7e61d42705a90 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/persistence/AnomalyDetectorsIndex.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/persistence/AnomalyDetectorsIndex.java @@ -5,6 +5,24 @@ */ package org.elasticsearch.xpack.core.ml.job.persistence; +import org.elasticsearch.ResourceAlreadyExistsException; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.admin.indices.alias.Alias; +import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest; +import org.elasticsearch.action.admin.indices.create.CreateIndexRequest; +import org.elasticsearch.action.admin.indices.create.CreateIndexResponse; +import org.elasticsearch.action.support.IndicesOptions; +import org.elasticsearch.action.support.master.AcknowledgedResponse; +import org.elasticsearch.client.Client; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; + +import java.util.Arrays; +import java.util.Collections; + +import static org.elasticsearch.xpack.core.ClientHelper.ML_ORIGIN; +import static org.elasticsearch.xpack.core.ClientHelper.executeAsyncWithOrigin; + /** * Methods for handling index naming related functions */ @@ -40,11 +58,11 @@ public static String resultsWriteAlias(String jobId) { } /** - * The name of the default index where a job's state is stored - * @return The index name + * The name of the alias pointing to the appropriate index for writing job state + * @return The write alias name */ - public static String jobStateIndexName() { - return AnomalyDetectorsIndexFields.STATE_INDEX_PREFIX; + public static String jobStateIndexWriteAlias() { + return AnomalyDetectorsIndexFields.STATE_INDEX_PREFIX + "-write"; } /** @@ -64,4 +82,65 @@ public static String configIndexName() { return AnomalyDetectorsIndexFields.CONFIG_INDEX; } + /** + * Create the .ml-state index (if necessary) + * Create the .ml-state-write alias for the .ml-state index (if necessary) + */ + public static void createStateIndexAndAliasIfNecessary(Client client, ClusterState state, final ActionListener finalListener) { + + if (state.getMetaData().getAliasAndIndexLookup().containsKey(jobStateIndexWriteAlias())) { + finalListener.onResponse(false); + return; + } + + final ActionListener createAliasListener = ActionListener.wrap( + concreteIndexName -> { + final IndicesAliasesRequest request = client.admin() + .indices() + .prepareAliases() + .addAlias(concreteIndexName, jobStateIndexWriteAlias()) + .request(); + executeAsyncWithOrigin(client.threadPool().getThreadContext(), + ML_ORIGIN, + request, + ActionListener.wrap( + resp -> finalListener.onResponse(resp.isAcknowledged()), + finalListener::onFailure), + client.admin().indices()::aliases); + }, + finalListener::onFailure + ); + + IndexNameExpressionResolver indexNameExpressionResolver = new IndexNameExpressionResolver(); + String[] stateIndices = indexNameExpressionResolver.concreteIndexNames(state, + IndicesOptions.lenientExpandOpen(), + jobStateIndexPattern()); + if (stateIndices.length > 0) { + Arrays.sort(stateIndices, Collections.reverseOrder()); + createAliasListener.onResponse(stateIndices[0]); + } else { + CreateIndexRequest createIndexRequest = client.admin() + .indices() + .prepareCreate(AnomalyDetectorsIndexFields.STATE_INDEX_PREFIX) + .addAlias(new Alias(jobStateIndexWriteAlias())) + .request(); + executeAsyncWithOrigin(client.threadPool().getThreadContext(), + ML_ORIGIN, + createIndexRequest, + ActionListener.wrap( + createIndexResponse -> finalListener.onResponse(true), + createIndexFailure -> { + // If it was created between our last check, and this request being handled, we should add the alias + // Adding an alias that already exists is idempotent. So, no need to double check if the alias exists + // as well. + if (createIndexFailure instanceof ResourceAlreadyExistsException) { + createAliasListener.onResponse(AnomalyDetectorsIndexFields.STATE_INDEX_PREFIX); + } else { + finalListener.onFailure(createIndexFailure); + } + }), + client.admin().indices()::create); + } + } + } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/store/ReservedRolesStoreTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/store/ReservedRolesStoreTests.java index fc9869a12803f..9c9e3ecd3c6cc 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/store/ReservedRolesStoreTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/store/ReservedRolesStoreTests.java @@ -73,7 +73,6 @@ import org.elasticsearch.xpack.core.ml.action.UpdateProcessAction; import org.elasticsearch.xpack.core.ml.action.ValidateDetectorAction; import org.elasticsearch.xpack.core.ml.action.ValidateJobConfigAction; -import org.elasticsearch.xpack.core.ml.job.persistence.AnomalyDetectorsIndex; import org.elasticsearch.xpack.core.ml.job.persistence.AnomalyDetectorsIndexFields; import org.elasticsearch.xpack.core.ml.notifications.AuditorField; import org.elasticsearch.xpack.core.monitoring.action.MonitoringBulkAction; @@ -762,7 +761,7 @@ public void testMachineLearningAdminRole() { assertNoAccessAllowed(role, "foo"); assertOnlyReadAllowed(role, MlMetaIndex.INDEX_NAME); - assertOnlyReadAllowed(role, AnomalyDetectorsIndex.jobStateIndexName()); + assertOnlyReadAllowed(role, AnomalyDetectorsIndexFields.STATE_INDEX_PREFIX); assertOnlyReadAllowed(role, AnomalyDetectorsIndexFields.RESULTS_INDEX_PREFIX + AnomalyDetectorsIndexFields.RESULTS_INDEX_DEFAULT); assertOnlyReadAllowed(role, AuditorField.NOTIFICATIONS_INDEX); } @@ -814,7 +813,7 @@ public void testMachineLearningUserRole() { assertNoAccessAllowed(role, "foo"); assertNoAccessAllowed(role, MlMetaIndex.INDEX_NAME); - assertNoAccessAllowed(role, AnomalyDetectorsIndex.jobStateIndexName()); + assertNoAccessAllowed(role, AnomalyDetectorsIndexFields.STATE_INDEX_PREFIX); assertOnlyReadAllowed(role, AnomalyDetectorsIndexFields.RESULTS_INDEX_PREFIX + AnomalyDetectorsIndexFields.RESULTS_INDEX_DEFAULT); assertOnlyReadAllowed(role, AuditorField.NOTIFICATIONS_INDEX); } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/test/rest/XPackRestTestHelper.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/test/rest/XPackRestTestHelper.java index 47580bf731a44..082992d95ff87 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/test/rest/XPackRestTestHelper.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/test/rest/XPackRestTestHelper.java @@ -16,6 +16,7 @@ import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.core.ml.MlMetaIndex; import org.elasticsearch.xpack.core.ml.job.persistence.AnomalyDetectorsIndex; +import org.elasticsearch.xpack.core.ml.job.persistence.AnomalyDetectorsIndexFields; import org.elasticsearch.xpack.core.ml.notifications.AuditorField; import java.io.IOException; @@ -30,13 +31,13 @@ public final class XPackRestTestHelper { public static final List ML_PRE_V660_TEMPLATES = Collections.unmodifiableList( Arrays.asList(AuditorField.NOTIFICATIONS_INDEX, MlMetaIndex.INDEX_NAME, - AnomalyDetectorsIndex.jobStateIndexName(), + AnomalyDetectorsIndexFields.STATE_INDEX_PREFIX, AnomalyDetectorsIndex.jobResultsIndexPrefix())); public static final List ML_POST_V660_TEMPLATES = Collections.unmodifiableList( Arrays.asList(AuditorField.NOTIFICATIONS_INDEX, MlMetaIndex.INDEX_NAME, - AnomalyDetectorsIndex.jobStateIndexName(), + AnomalyDetectorsIndexFields.STATE_INDEX_PREFIX, AnomalyDetectorsIndex.jobResultsIndexPrefix(), AnomalyDetectorsIndex.configIndexName())); diff --git a/x-pack/plugin/ml/qa/native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/DeleteExpiredDataIT.java b/x-pack/plugin/ml/qa/native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/DeleteExpiredDataIT.java index f2ca43bf53c26..2a63ccaf41245 100644 --- a/x-pack/plugin/ml/qa/native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/DeleteExpiredDataIT.java +++ b/x-pack/plugin/ml/qa/native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/DeleteExpiredDataIT.java @@ -180,7 +180,7 @@ public void testDeleteExpiredData() throws Exception { bulkRequestBuilder.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE); for (int i = 0; i < 10010; i++) { String docId = "non_existing_job_" + randomFrom("model_state_1234567#" + i, "quantiles", "categorizer_state#" + i); - IndexRequest indexRequest = new IndexRequest(AnomalyDetectorsIndex.jobStateIndexName(), "doc", docId); + IndexRequest indexRequest = new IndexRequest(AnomalyDetectorsIndex.jobStateIndexWriteAlias(), "doc", docId); indexRequest.source(Collections.emptyMap()); bulkRequestBuilder.add(indexRequest); } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MachineLearning.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MachineLearning.java index 11d302470c708..43674d42a56e6 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MachineLearning.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MachineLearning.java @@ -109,6 +109,7 @@ import org.elasticsearch.xpack.core.ml.action.ValidateDetectorAction; import org.elasticsearch.xpack.core.ml.action.ValidateJobConfigAction; import org.elasticsearch.xpack.core.ml.job.persistence.AnomalyDetectorsIndex; +import org.elasticsearch.xpack.core.ml.job.persistence.AnomalyDetectorsIndexFields; import org.elasticsearch.xpack.core.ml.job.persistence.ElasticsearchMappings; import org.elasticsearch.xpack.core.ml.notifications.AuditMessage; import org.elasticsearch.xpack.core.ml.notifications.AuditorField; @@ -701,7 +702,7 @@ public UnaryOperator> getIndexTemplateMetaDat } try (XContentBuilder stateMapping = ElasticsearchMappings.stateMapping()) { - IndexTemplateMetaData stateTemplate = IndexTemplateMetaData.builder(AnomalyDetectorsIndex.jobStateIndexName()) + IndexTemplateMetaData stateTemplate = IndexTemplateMetaData.builder(AnomalyDetectorsIndexFields.STATE_INDEX_PREFIX) .patterns(Collections.singletonList(AnomalyDetectorsIndex.jobStateIndexPattern())) // TODO review these settings .settings(Settings.builder() @@ -710,9 +711,9 @@ public UnaryOperator> getIndexTemplateMetaDat .putMapping(ElasticsearchMappings.DOC_TYPE, Strings.toString(stateMapping)) .version(Version.CURRENT.id) .build(); - templates.put(AnomalyDetectorsIndex.jobStateIndexName(), stateTemplate); + templates.put(AnomalyDetectorsIndexFields.STATE_INDEX_PREFIX, stateTemplate); } catch (IOException e) { - logger.error("Error loading the template for the " + AnomalyDetectorsIndex.jobStateIndexName() + " index", e); + logger.error("Error loading the template for the " + AnomalyDetectorsIndexFields.STATE_INDEX_PREFIX + " index", e); } try (XContentBuilder docMapping = ElasticsearchMappings.resultsMapping()) { @@ -742,7 +743,7 @@ public UnaryOperator> getIndexTemplateMetaDat public static boolean allTemplatesInstalled(ClusterState clusterState) { boolean allPresent = true; List templateNames = Arrays.asList(AuditorField.NOTIFICATIONS_INDEX, MlMetaIndex.INDEX_NAME, - AnomalyDetectorsIndex.jobStateIndexName(), AnomalyDetectorsIndex.jobResultsIndexPrefix()); + AnomalyDetectorsIndexFields.STATE_INDEX_PREFIX, AnomalyDetectorsIndex.jobResultsIndexPrefix()); for (String templateName : templateNames) { allPresent = allPresent && TemplateUtils.checkTemplateExistsAndVersionIsGTECurrentVersion(templateName, clusterState); } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MlConfigMigrator.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MlConfigMigrator.java index cd025dc37101d..74948986d7013 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MlConfigMigrator.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MlConfigMigrator.java @@ -439,7 +439,7 @@ public void snapshotMlMeta(MlMetadata mlMetadata, ActionListener listen logger.debug("taking a snapshot of ml_metadata"); String documentId = "ml-config"; - IndexRequestBuilder indexRequest = client.prepareIndex(AnomalyDetectorsIndex.jobStateIndexName(), + IndexRequestBuilder indexRequest = client.prepareIndex(AnomalyDetectorsIndex.jobStateIndexWriteAlias(), ElasticsearchMappings.DOC_TYPE, documentId) .setOpType(DocWriteRequest.OpType.CREATE); @@ -456,8 +456,10 @@ public void snapshotMlMeta(MlMetadata mlMetadata, ActionListener listen return; } - executeAsyncWithOrigin(client.threadPool().getThreadContext(), ML_ORIGIN, indexRequest.request(), - ActionListener.wrap( + AnomalyDetectorsIndex.createStateIndexAndAliasIfNecessary(client, clusterService.state(), ActionListener.wrap( + r -> { + executeAsyncWithOrigin(client.threadPool().getThreadContext(), ML_ORIGIN, indexRequest.request(), + ActionListener.wrap( indexResponse -> { listener.onResponse(indexResponse.getResult() == DocWriteResponse.Result.CREATED); }, @@ -469,8 +471,11 @@ public void snapshotMlMeta(MlMetadata mlMetadata, ActionListener listen listener.onFailure(e); } }), - client::index - ); + client::index + ); + }, + listener::onFailure + )); } private void createConfigIndex(ActionListener listener) { diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportOpenJobAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportOpenJobAction.java index 4b0d9ad63c6e5..2da89c359e793 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportOpenJobAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportOpenJobAction.java @@ -529,7 +529,7 @@ public void onFailure(Exception e) { // Try adding state doc mapping ActionListener resultsPutMappingHandler = ActionListener.wrap( response -> { - addDocMappingIfMissing(AnomalyDetectorsIndex.jobStateIndexName(), ElasticsearchMappings::stateMapping, + addDocMappingIfMissing(AnomalyDetectorsIndex.jobStateIndexWriteAlias(), ElasticsearchMappings::stateMapping, state, jobUpdateListener); }, listener::onFailure ); @@ -673,6 +673,7 @@ public static class OpenJobPersistentTasksExecutor extends PersistentTasksExecut private volatile int maxConcurrentJobAllocations; private volatile int maxMachineMemoryPercent; private volatile int maxLazyMLNodes; + private volatile ClusterState clusterState; public OpenJobPersistentTasksExecutor(Settings settings, ClusterService clusterService, AutodetectProcessManager autodetectProcessManager, MlMemoryTracker memoryTracker, @@ -689,6 +690,7 @@ public OpenJobPersistentTasksExecutor(Settings settings, ClusterService clusterS clusterService.getClusterSettings() .addSettingsUpdateConsumer(MachineLearning.MAX_MACHINE_MEMORY_PERCENT, this::setMaxMachineMemoryPercent); clusterService.getClusterSettings().addSettingsUpdateConsumer(MachineLearning.MAX_LAZY_ML_NODES, this::setMaxLazyMLNodes); + clusterService.addListener(event -> clusterState = event.state()); } @Override @@ -748,7 +750,7 @@ protected void nodeOperation(AllocatedPersistentTask task, OpenJobAction.JobPara } String jobId = jobTask.getJobId(); - autodetectProcessManager.openJob(jobTask, e2 -> { + autodetectProcessManager.openJob(jobTask, clusterState, e2 -> { if (e2 == null) { FinalizeJobExecutionAction.Request finalizeRequest = new FinalizeJobExecutionAction.Request(new String[]{jobId}); executeAsyncWithOrigin(client, ML_ORIGIN, FinalizeJobExecutionAction.INSTANCE, finalizeRequest, diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportRevertModelSnapshotAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportRevertModelSnapshotAction.java index a940d6666c9fd..ab2fb1368345a 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportRevertModelSnapshotAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportRevertModelSnapshotAction.java @@ -24,6 +24,7 @@ import org.elasticsearch.xpack.core.ml.action.RevertModelSnapshotAction; import org.elasticsearch.xpack.core.ml.job.config.JobState; import org.elasticsearch.xpack.core.ml.job.messages.Messages; +import org.elasticsearch.xpack.core.ml.job.persistence.AnomalyDetectorsIndex; import org.elasticsearch.xpack.core.ml.job.process.autodetect.state.ModelSnapshot; import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper; import org.elasticsearch.xpack.ml.MlConfigMigrationEligibilityCheck; @@ -79,26 +80,38 @@ protected void masterOperation(RevertModelSnapshotAction.Request request, Cluste logger.debug("Received request to revert to snapshot id '{}' for job '{}', deleting intervening results: {}", request.getSnapshotId(), request.getJobId(), request.getDeleteInterveningResults()); - jobManager.jobExists(request.getJobId(), ActionListener.wrap( - exists -> { - PersistentTasksCustomMetaData tasks = state.getMetaData().custom(PersistentTasksCustomMetaData.TYPE); - JobState jobState = MlTasks.getJobState(request.getJobId(), tasks); - if (jobState.equals(JobState.CLOSED) == false) { - throw ExceptionsHelper.conflictStatusException(Messages.getMessage(Messages.REST_JOB_NOT_CLOSED_REVERT)); + // 3. Revert the state + ActionListener jobExistsListener = ActionListener.wrap( + exists -> { + PersistentTasksCustomMetaData tasks = state.getMetaData().custom(PersistentTasksCustomMetaData.TYPE); + JobState jobState = MlTasks.getJobState(request.getJobId(), tasks); + + if (jobState.equals(JobState.CLOSED) == false) { + throw ExceptionsHelper.conflictStatusException(Messages.getMessage(Messages.REST_JOB_NOT_CLOSED_REVERT)); + } + + getModelSnapshot(request, jobResultsProvider, modelSnapshot -> { + ActionListener wrappedListener = listener; + if (request.getDeleteInterveningResults()) { + wrappedListener = wrapDeleteOldDataListener(wrappedListener, modelSnapshot, request.getJobId()); + wrappedListener = wrapRevertDataCountsListener(wrappedListener, modelSnapshot, request.getJobId()); } + jobManager.revertSnapshot(request, wrappedListener, modelSnapshot); + }, listener::onFailure); + }, + listener::onFailure + ); + + + // 2. Verify the job exists + ActionListener createStateIndexListener = ActionListener.wrap( + r -> jobManager.jobExists(request.getJobId(), jobExistsListener), + listener::onFailure + ); - getModelSnapshot(request, jobResultsProvider, modelSnapshot -> { - ActionListener wrappedListener = listener; - if (request.getDeleteInterveningResults()) { - wrappedListener = wrapDeleteOldDataListener(wrappedListener, modelSnapshot, request.getJobId()); - wrappedListener = wrapRevertDataCountsListener(wrappedListener, modelSnapshot, request.getJobId()); - } - jobManager.revertSnapshot(request, wrappedListener, modelSnapshot); - }, listener::onFailure); - }, - listener::onFailure - )); + // 1. Verify/Create the state index and its alias exists + AnomalyDetectorsIndex.createStateIndexAndAliasIfNecessary(client, state, createStateIndexListener); } private void getModelSnapshot(RevertModelSnapshotAction.Request request, JobResultsProvider provider, Consumer handler, diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobResultsPersister.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobResultsPersister.java index e57d85aefa72c..2a16b1c8ddd8a 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobResultsPersister.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobResultsPersister.java @@ -228,7 +228,7 @@ public void persistCategoryDefinition(CategoryDefinition category) { */ public void persistQuantiles(Quantiles quantiles) { Persistable persistable = new Persistable(quantiles.getJobId(), quantiles, Quantiles.documentId(quantiles.getJobId())); - persistable.persist(AnomalyDetectorsIndex.jobStateIndexName()).actionGet(); + persistable.persist(AnomalyDetectorsIndex.jobStateIndexWriteAlias()).actionGet(); } /** @@ -237,7 +237,7 @@ public void persistQuantiles(Quantiles quantiles) { public void persistQuantiles(Quantiles quantiles, WriteRequest.RefreshPolicy refreshPolicy, ActionListener listener) { Persistable persistable = new Persistable(quantiles.getJobId(), quantiles, Quantiles.documentId(quantiles.getJobId())); persistable.setRefreshPolicy(refreshPolicy); - persistable.persist(AnomalyDetectorsIndex.jobStateIndexName(), listener); + persistable.persist(AnomalyDetectorsIndex.jobStateIndexWriteAlias(), listener); } /** diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/AutodetectProcessManager.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/AutodetectProcessManager.java index 567ade2c22a0f..9695d73ed05c5 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/AutodetectProcessManager.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/AutodetectProcessManager.java @@ -11,6 +11,7 @@ import org.elasticsearch.ElasticsearchStatusException; import org.elasticsearch.action.ActionListener; import org.elasticsearch.client.Client; +import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.common.CheckedConsumer; import org.elasticsearch.common.SuppressForbidden; import org.elasticsearch.common.collect.Tuple; @@ -38,6 +39,7 @@ import org.elasticsearch.xpack.core.ml.job.config.JobState; import org.elasticsearch.xpack.core.ml.job.config.JobTaskState; import org.elasticsearch.xpack.core.ml.job.config.MlFilter; +import org.elasticsearch.xpack.core.ml.job.persistence.AnomalyDetectorsIndex; import org.elasticsearch.xpack.core.ml.job.process.autodetect.output.FlushAcknowledgement; import org.elasticsearch.xpack.core.ml.job.process.autodetect.state.DataCounts; import org.elasticsearch.xpack.core.ml.job.process.autodetect.state.ModelSizeStats; @@ -412,68 +414,70 @@ public void onFailure(Exception e) { } } - public void openJob(JobTask jobTask, Consumer closeHandler) { + public void openJob(JobTask jobTask, ClusterState clusterState, Consumer closeHandler) { String jobId = jobTask.getJobId(); logger.info("Opening job [{}]", jobId); - - jobManager.getJob(jobId, ActionListener.wrap( - job -> { - if (job.getJobVersion() == null) { - closeHandler.accept(ExceptionsHelper.badRequestException("Cannot open job [" + jobId + AnomalyDetectorsIndex.createStateIndexAndAliasIfNecessary(client, clusterState, ActionListener.wrap( + r -> { + jobManager.getJob(jobId, ActionListener.wrap( + job -> { + if (job.getJobVersion() == null) { + closeHandler.accept(ExceptionsHelper.badRequestException("Cannot open job [" + jobId + "] because jobs created prior to version 5.5 are not supported")); - return; - } - + return; + } - processByAllocation.putIfAbsent(jobTask.getAllocationId(), new ProcessContext(jobTask)); - jobResultsProvider.getAutodetectParams(job, params -> { - // We need to fork, otherwise we restore model state from a network thread (several GET api calls): - threadPool.executor(MachineLearning.UTILITY_THREAD_POOL_NAME).execute(new AbstractRunnable() { - @Override - public void onFailure(Exception e) { - closeHandler.accept(e); - } - @Override - protected void doRun() { - ProcessContext processContext = processByAllocation.get(jobTask.getAllocationId()); - if (processContext == null) { - logger.debug("Aborted opening job [{}] as it has been closed", jobId); - return; + processByAllocation.putIfAbsent(jobTask.getAllocationId(), new ProcessContext(jobTask)); + jobResultsProvider.getAutodetectParams(job, params -> { + // We need to fork, otherwise we restore model state from a network thread (several GET api calls): + threadPool.executor(MachineLearning.UTILITY_THREAD_POOL_NAME).execute(new AbstractRunnable() { + @Override + public void onFailure(Exception e) { + closeHandler.accept(e); } - if (processContext.getState() != ProcessContext.ProcessStateName.NOT_RUNNING) { - logger.debug("Cannot open job [{}] when its state is [{}]", + + @Override + protected void doRun() { + ProcessContext processContext = processByAllocation.get(jobTask.getAllocationId()); + if (processContext == null) { + logger.debug("Aborted opening job [{}] as it has been closed", jobId); + return; + } + if (processContext.getState() != ProcessContext.ProcessStateName.NOT_RUNNING) { + logger.debug("Cannot open job [{}] when its state is [{}]", jobId, processContext.getState().getClass().getName()); - return; - } + return; + } - try { - createProcessAndSetRunning(processContext, job, params, closeHandler); - processContext.getAutodetectCommunicator().init(params.modelSnapshot()); - setJobState(jobTask, JobState.OPENED); - } catch (Exception e1) { - // No need to log here as the persistent task framework will log it try { - // Don't leave a partially initialised process hanging around - processContext.newKillBuilder() + createProcessAndSetRunning(processContext, job, params, closeHandler); + processContext.getAutodetectCommunicator().init(params.modelSnapshot()); + setJobState(jobTask, JobState.OPENED); + } catch (Exception e1) { + // No need to log here as the persistent task framework will log it + try { + // Don't leave a partially initialised process hanging around + processContext.newKillBuilder() .setAwaitCompletion(false) .setFinish(false) .kill(); - processByAllocation.remove(jobTask.getAllocationId()); - } finally { - setJobState(jobTask, JobState.FAILED, e2 -> closeHandler.accept(e1)); + processByAllocation.remove(jobTask.getAllocationId()); + } finally { + setJobState(jobTask, JobState.FAILED, e2 -> closeHandler.accept(e1)); + } } } - } + }); + }, e1 -> { + logger.warn("Failed to gather information required to open job [" + jobId + "]", e1); + setJobState(jobTask, JobState.FAILED, e2 -> closeHandler.accept(e1)); }); - }, e1 -> { - logger.warn("Failed to gather information required to open job [" + jobId + "]", e1); - setJobState(jobTask, JobState.FAILED, e2 -> closeHandler.accept(e1)); - }); - }, - closeHandler - )); - + }, + closeHandler + )); + }, + closeHandler)); } private void createProcessAndSetRunning(ProcessContext processContext, Job job, AutodetectParams params, Consumer handler) { diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/output/AutodetectStateProcessor.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/output/AutodetectStateProcessor.java index 63a496f0503bc..9d3afd0ad0dcb 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/output/AutodetectStateProcessor.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/output/AutodetectStateProcessor.java @@ -98,7 +98,7 @@ private BytesReference splitAndPersist(BytesReference bytesRef, int searchFrom) void persist(BytesReference bytes) throws IOException { BulkRequest bulkRequest = new BulkRequest(); - bulkRequest.add(bytes, AnomalyDetectorsIndex.jobStateIndexName(), ElasticsearchMappings.DOC_TYPE, XContentType.JSON); + bulkRequest.add(bytes, AnomalyDetectorsIndex.jobStateIndexWriteAlias(), ElasticsearchMappings.DOC_TYPE, XContentType.JSON); if (bulkRequest.numberOfActions() > 0) { LOGGER.trace("[{}] Persisting job state document", jobId); try (ThreadContext.StoredContext ignore = stashWithOrigin(client.threadPool().getThreadContext(), ML_ORIGIN)) { diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/action/TransportOpenJobActionTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/action/TransportOpenJobActionTests.java index 9349c56ef75e9..9bd32bdc9eff3 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/action/TransportOpenJobActionTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/action/TransportOpenJobActionTests.java @@ -12,6 +12,7 @@ import org.elasticsearch.client.Client; import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.metadata.AliasMetaData; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.metadata.MappingMetaData; @@ -614,7 +615,7 @@ public static void addJobTask(String jobId, String nodeId, JobState jobState, Pe private void addIndices(MetaData.Builder metaData, RoutingTable.Builder routingTable) { List indices = new ArrayList<>(); - indices.add(AnomalyDetectorsIndex.jobStateIndexName()); + indices.add(AnomalyDetectorsIndexFields.STATE_INDEX_PREFIX); indices.add(MlMetaIndex.INDEX_NAME); indices.add(AuditorField.NOTIFICATIONS_INDEX); indices.add(AnomalyDetectorsIndexFields.RESULTS_INDEX_PREFIX + AnomalyDetectorsIndexFields.RESULTS_INDEX_DEFAULT); @@ -625,6 +626,9 @@ private void addIndices(MetaData.Builder metaData, RoutingTable.Builder routingT .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1) .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 0) ); + if (indexName.equals(AnomalyDetectorsIndexFields.STATE_INDEX_PREFIX)) { + indexMetaData.putAlias(new AliasMetaData.Builder(AnomalyDetectorsIndex.jobStateIndexWriteAlias())); + } metaData.put(indexMetaData); Index index = new Index(indexName, "_uuid"); ShardId shardId = new ShardId(index, 0); diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/integration/MlConfigMigratorIT.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/integration/MlConfigMigratorIT.java index b8eae71b4bcab..4ee76a4b1ab21 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/integration/MlConfigMigratorIT.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/integration/MlConfigMigratorIT.java @@ -10,10 +10,12 @@ import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.action.index.IndexResponse; import org.elasticsearch.action.search.SearchResponse; +import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.action.support.WriteRequest; import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ClusterStateUpdateTask; +import org.elasticsearch.cluster.metadata.AliasOrIndex; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.cluster.routing.IndexRoutingTable; @@ -52,6 +54,8 @@ import java.util.HashSet; import java.util.List; import java.util.Set; +import java.util.SortedMap; +import java.util.TreeMap; import java.util.concurrent.atomic.AtomicReference; import static org.elasticsearch.xpack.core.ml.job.config.JobTests.buildJobBuilder; @@ -74,7 +78,13 @@ public void setUpTests() { clusterService = mock(ClusterService.class); ClusterSettings clusterSettings = new ClusterSettings(nodeSettings(), new HashSet<>(Collections.singletonList( MlConfigMigrationEligibilityCheck.ENABLE_CONFIG_MIGRATION))); + MetaData metaData = mock(MetaData.class); + SortedMap aliasOrIndexSortedMap = new TreeMap<>(); + when(metaData.getAliasAndIndexLookup()).thenReturn(aliasOrIndexSortedMap); + ClusterState clusterState = mock(ClusterState.class); + when(clusterState.getMetaData()).thenReturn(metaData); when(clusterService.getClusterSettings()).thenReturn(clusterSettings); + when(clusterService.state()).thenReturn(clusterState); } public void testWriteConfigToIndex() throws InterruptedException { @@ -139,6 +149,7 @@ public void testMigrateConfigs() throws InterruptedException, IOException { .metaData(metaData.putCustom(MlMetadata.TYPE, mlMetadata.build())) .routingTable(routingTable.build()) .build(); + when(clusterService.state()).thenReturn(clusterState); doAnswer(invocation -> { ClusterStateUpdateTask listener = (ClusterStateUpdateTask) invocation.getArguments()[1]; @@ -184,15 +195,6 @@ public void testMigrateConfigs() throws InterruptedException, IOException { } public void testExistingSnapshotDoesNotBlockMigration() throws InterruptedException { - // index a doc with the same Id as the config snapshot - IndexRequestBuilder indexRequest = client().prepareIndex(AnomalyDetectorsIndex.jobStateIndexName(), - ElasticsearchMappings.DOC_TYPE, "ml-config") - .setSource(Collections.singletonMap("a_field", "a_value")) - .setOpType(DocWriteRequest.OpType.CREATE) - .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE); - - indexRequest.execute().actionGet(); - // define the configs MlMetadata.Builder mlMetadata = new MlMetadata.Builder(); mlMetadata.putJob(buildJobBuilder("job-foo").build(), false); @@ -201,9 +203,23 @@ public void testExistingSnapshotDoesNotBlockMigration() throws InterruptedExcept RoutingTable.Builder routingTable = RoutingTable.builder(); addMlConfigIndex(metaData, routingTable); ClusterState clusterState = ClusterState.builder(new ClusterName("_name")) - .metaData(metaData.putCustom(MlMetadata.TYPE, mlMetadata.build())) - .routingTable(routingTable.build()) - .build(); + .metaData(metaData.putCustom(MlMetadata.TYPE, mlMetadata.build())) + .routingTable(routingTable.build()) + .build(); + when(clusterService.state()).thenReturn(clusterState); + + // index a doc with the same Id as the config snapshot + PlainActionFuture future = PlainActionFuture.newFuture(); + AnomalyDetectorsIndex.createStateIndexAndAliasIfNecessary(client(), clusterService.state(), future); + future.actionGet(); + + IndexRequestBuilder indexRequest = client().prepareIndex(AnomalyDetectorsIndex.jobStateIndexWriteAlias(), + ElasticsearchMappings.DOC_TYPE, "ml-config") + .setSource(Collections.singletonMap("a_field", "a_value")) + .setOpType(DocWriteRequest.OpType.CREATE) + .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE); + + indexRequest.execute().actionGet(); doAnswer(invocation -> { ClusterStateUpdateTask listener = (ClusterStateUpdateTask) invocation.getArguments()[1]; @@ -258,6 +274,7 @@ public void testMigrateConfigs_GivenLargeNumberOfJobsAndDatafeeds() throws Inter .metaData(metaData.putCustom(MlMetadata.TYPE, mlMetadata.build())) .routingTable(routingTable.build()) .build(); + when(clusterService.state()).thenReturn(clusterState); doAnswer(invocation -> { ClusterStateUpdateTask listener = (ClusterStateUpdateTask) invocation.getArguments()[1]; diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/process/autodetect/AutodetectProcessManagerTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/process/autodetect/AutodetectProcessManagerTests.java index 998297070c27b..9024d0edcee9c 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/process/autodetect/AutodetectProcessManagerTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/process/autodetect/AutodetectProcessManagerTests.java @@ -8,6 +8,9 @@ import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.ActionListener; import org.elasticsearch.client.Client; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.metadata.AliasOrIndex; +import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.common.CheckedConsumer; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.EsExecutors; @@ -32,6 +35,7 @@ import org.elasticsearch.xpack.core.ml.job.config.JobUpdate; import org.elasticsearch.xpack.core.ml.job.config.MlFilter; import org.elasticsearch.xpack.core.ml.job.config.ModelPlotConfig; +import org.elasticsearch.xpack.core.ml.job.persistence.AnomalyDetectorsIndex; import org.elasticsearch.xpack.core.ml.job.process.autodetect.state.DataCounts; import org.elasticsearch.xpack.core.ml.job.process.autodetect.state.ModelSizeStats; import org.elasticsearch.xpack.core.ml.job.process.autodetect.state.ModelSnapshot; @@ -63,6 +67,8 @@ import java.util.List; import java.util.Optional; import java.util.Set; +import java.util.SortedMap; +import java.util.TreeMap; import java.util.concurrent.Callable; import java.util.concurrent.CountDownLatch; import java.util.concurrent.ExecutorService; @@ -109,6 +115,7 @@ public class AutodetectProcessManagerTests extends ESTestCase { private JobDataCountsPersister jobDataCountsPersister; private NormalizerFactory normalizerFactory; private Auditor auditor; + private ClusterState clusterState; private DataCounts dataCounts = new DataCounts("foo"); private ModelSizeStats modelSizeStats = new ModelSizeStats.Builder("foo").build(); @@ -128,6 +135,12 @@ public void setup() throws Exception { jobDataCountsPersister = mock(JobDataCountsPersister.class); normalizerFactory = mock(NormalizerFactory.class); auditor = mock(Auditor.class); + MetaData metaData = mock(MetaData.class); + SortedMap aliasOrIndexSortedMap = new TreeMap<>(); + aliasOrIndexSortedMap.put(AnomalyDetectorsIndex.jobStateIndexWriteAlias(), mock(AliasOrIndex.Alias.class)); + when(metaData.getAliasAndIndexLookup()).thenReturn(aliasOrIndexSortedMap); + clusterState = mock(ClusterState.class); + when(clusterState.getMetaData()).thenReturn(metaData); doAnswer(invocationOnMock -> { @SuppressWarnings("unchecked") @@ -170,7 +183,7 @@ public void testOpenJob() { JobTask jobTask = mock(JobTask.class); when(jobTask.getJobId()).thenReturn("foo"); when(jobTask.getAllocationId()).thenReturn(1L); - manager.openJob(jobTask, e -> {}); + manager.openJob(jobTask, clusterState, e -> {}); assertEquals(1, manager.numberOfOpenJobs()); assertTrue(manager.jobHasActiveAutodetectProcess(jobTask)); verify(jobTask).updatePersistentTaskState(eq(new JobTaskState(JobState.OPENED, 1L)), any()); @@ -196,7 +209,7 @@ public void testOpenJob_withoutVersion() { JobTask jobTask = mock(JobTask.class); when(jobTask.getJobId()).thenReturn(job.getId()); AtomicReference errorHolder = new AtomicReference<>(); - manager.openJob(jobTask, errorHolder::set); + manager.openJob(jobTask, clusterState, errorHolder::set); Exception error = errorHolder.get(); assertThat(error, is(notNullValue())); assertThat(error.getMessage(), equalTo("Cannot open job [no_version] because jobs created prior to version 5.5 are not supported")); @@ -242,22 +255,22 @@ public void testOpenJob_exceedMaxNumJobs() { JobTask jobTask = mock(JobTask.class); when(jobTask.getJobId()).thenReturn("foo"); - manager.openJob(jobTask, e -> {}); + manager.openJob(jobTask, clusterState, e -> {}); jobTask = mock(JobTask.class); when(jobTask.getJobId()).thenReturn("bar"); when(jobTask.getAllocationId()).thenReturn(1L); - manager.openJob(jobTask, e -> {}); + manager.openJob(jobTask, clusterState, e -> {}); jobTask = mock(JobTask.class); when(jobTask.getJobId()).thenReturn("baz"); when(jobTask.getAllocationId()).thenReturn(2L); - manager.openJob(jobTask, e -> {}); + manager.openJob(jobTask, clusterState, e -> {}); assertEquals(3, manager.numberOfOpenJobs()); Exception[] holder = new Exception[1]; jobTask = mock(JobTask.class); when(jobTask.getJobId()).thenReturn("foobar"); when(jobTask.getAllocationId()).thenReturn(3L); - manager.openJob(jobTask, e -> holder[0] = e); + manager.openJob(jobTask, clusterState, e -> holder[0] = e); Exception e = holder[0]; assertEquals("max running job capacity [3] reached", e.getMessage()); @@ -266,7 +279,7 @@ public void testOpenJob_exceedMaxNumJobs() { when(jobTask.getJobId()).thenReturn("baz"); manager.closeJob(jobTask, false, null); assertEquals(2, manager.numberOfOpenJobs()); - manager.openJob(jobTask, e1 -> {}); + manager.openJob(jobTask, clusterState, e1 -> {}); assertEquals(3, manager.numberOfOpenJobs()); } @@ -278,7 +291,7 @@ public void testProcessData() { JobTask jobTask = mock(JobTask.class); when(jobTask.getJobId()).thenReturn("foo"); DataLoadParams params = new DataLoadParams(TimeRange.builder().build(), Optional.empty()); - manager.openJob(jobTask, e -> {}); + manager.openJob(jobTask, clusterState, e -> {}); manager.processData(jobTask, analysisRegistry, createInputStream(""), randomFrom(XContentType.values()), params, (dataCounts1, e) -> {}); assertEquals(1, manager.numberOfOpenJobs()); @@ -301,7 +314,7 @@ public void testProcessDataThrowsElasticsearchStatusException_onIoException() { JobTask jobTask = mock(JobTask.class); when(jobTask.getJobId()).thenReturn("foo"); - manager.openJob(jobTask, e -> {}); + manager.openJob(jobTask, clusterState, e -> {}); Exception[] holder = new Exception[1]; manager.processData(jobTask, analysisRegistry, inputStream, xContentType, params, (dataCounts1, e) -> holder[0] = e); assertNotNull(holder[0]); @@ -314,7 +327,7 @@ public void testCloseJob() { JobTask jobTask = mock(JobTask.class); when(jobTask.getJobId()).thenReturn("foo"); - manager.openJob(jobTask, e -> {}); + manager.openJob(jobTask, clusterState, e -> {}); manager.processData(jobTask, analysisRegistry, createInputStream(""), randomFrom(XContentType.values()), mock(DataLoadParams.class), (dataCounts1, e) -> {}); @@ -342,7 +355,7 @@ public void testCanCloseClosingJob() throws Exception { JobTask jobTask = mock(JobTask.class); when(jobTask.getJobId()).thenReturn("foo"); - manager.openJob(jobTask, e -> {}); + manager.openJob(jobTask, clusterState, e -> {}); manager.processData(jobTask, analysisRegistry, createInputStream(""), randomFrom(XContentType.values()), mock(DataLoadParams.class), (dataCounts1, e) -> {}); @@ -390,7 +403,7 @@ public void testCanKillClosingJob() throws Exception { JobTask jobTask = mock(JobTask.class); when(jobTask.getJobId()).thenReturn("foo"); - manager.openJob(jobTask, e -> {}); + manager.openJob(jobTask, clusterState, e -> {}); manager.processData(jobTask, analysisRegistry, createInputStream(""), randomFrom(XContentType.values()), mock(DataLoadParams.class), (dataCounts1, e) -> {}); @@ -419,7 +432,7 @@ public void testBucketResetMessageIsSent() { InputStream inputStream = createInputStream(""); JobTask jobTask = mock(JobTask.class); when(jobTask.getJobId()).thenReturn("foo"); - manager.openJob(jobTask, e -> {}); + manager.openJob(jobTask, clusterState, e -> {}); manager.processData(jobTask, analysisRegistry, inputStream, xContentType, params, (dataCounts1, e) -> {}); verify(communicator).writeToJob(same(inputStream), same(analysisRegistry), same(xContentType), same(params), any()); } @@ -431,7 +444,7 @@ public void testFlush() { JobTask jobTask = mock(JobTask.class); when(jobTask.getJobId()).thenReturn("foo"); InputStream inputStream = createInputStream(""); - manager.openJob(jobTask, e -> {}); + manager.openJob(jobTask, clusterState, e -> {}); manager.processData(jobTask, analysisRegistry, inputStream, randomFrom(XContentType.values()), mock(DataLoadParams.class), (dataCounts1, e) -> {}); @@ -471,7 +484,7 @@ public void testCloseThrows() { // create a jobtask JobTask jobTask = mock(JobTask.class); when(jobTask.getJobId()).thenReturn("foo"); - manager.openJob(jobTask, e -> {}); + manager.openJob(jobTask, clusterState, e -> {}); manager.processData(jobTask, analysisRegistry, createInputStream(""), randomFrom(XContentType.values()), mock(DataLoadParams.class), (dataCounts1, e) -> { }); @@ -511,7 +524,7 @@ public void testJobHasActiveAutodetectProcess() { when(jobTask.getJobId()).thenReturn("foo"); assertFalse(manager.jobHasActiveAutodetectProcess(jobTask)); - manager.openJob(jobTask, e -> {}); + manager.openJob(jobTask, clusterState, e -> {}); manager.processData(jobTask, analysisRegistry, createInputStream(""), randomFrom(XContentType.values()), mock(DataLoadParams.class), (dataCounts1, e) -> {}); @@ -529,7 +542,7 @@ public void testKillKillsAutodetectProcess() throws IOException { when(jobTask.getJobId()).thenReturn("foo"); assertFalse(manager.jobHasActiveAutodetectProcess(jobTask)); - manager.openJob(jobTask, e -> {}); + manager.openJob(jobTask, clusterState, e -> {}); manager.processData(jobTask, analysisRegistry, createInputStream(""), randomFrom(XContentType.values()), mock(DataLoadParams.class), (dataCounts1, e) -> {}); @@ -563,7 +576,7 @@ public void testProcessData_GivenStateNotOpened() { JobTask jobTask = mock(JobTask.class); when(jobTask.getJobId()).thenReturn("foo"); - manager.openJob(jobTask, e -> {}); + manager.openJob(jobTask, clusterState, e -> {}); InputStream inputStream = createInputStream(""); DataCounts[] dataCounts = new DataCounts[1]; manager.processData(jobTask, analysisRegistry, inputStream, @@ -728,7 +741,7 @@ private AutodetectProcessManager createManagerAndCallProcessData(AutodetectCommu AutodetectProcessManager manager = createManager(communicator); JobTask jobTask = mock(JobTask.class); when(jobTask.getJobId()).thenReturn(jobId); - manager.openJob(jobTask, e -> {}); + manager.openJob(jobTask, clusterState, e -> {}); manager.processData(jobTask, analysisRegistry, createInputStream(""), randomFrom(XContentType.values()), mock(DataLoadParams.class), (dataCounts, e) -> {}); return manager; diff --git a/x-pack/plugin/src/test/java/org/elasticsearch/xpack/test/rest/XPackRestIT.java b/x-pack/plugin/src/test/java/org/elasticsearch/xpack/test/rest/XPackRestIT.java index 57c944788482e..336ddadea4c32 100644 --- a/x-pack/plugin/src/test/java/org/elasticsearch/xpack/test/rest/XPackRestIT.java +++ b/x-pack/plugin/src/test/java/org/elasticsearch/xpack/test/rest/XPackRestIT.java @@ -25,6 +25,7 @@ import org.elasticsearch.xpack.core.ml.MlMetaIndex; import org.elasticsearch.xpack.core.ml.integration.MlRestTestStateCleaner; import org.elasticsearch.xpack.core.ml.job.persistence.AnomalyDetectorsIndex; +import org.elasticsearch.xpack.core.ml.job.persistence.AnomalyDetectorsIndexFields; import org.elasticsearch.xpack.core.ml.notifications.AuditorField; import org.elasticsearch.xpack.core.rollup.job.RollupJob; import org.elasticsearch.xpack.core.watcher.support.WatcherIndexTemplateRegistryField; @@ -87,7 +88,7 @@ private void waitForTemplates() throws Exception { if (installTemplates()) { List templates = new ArrayList<>(); templates.addAll(Arrays.asList(AuditorField.NOTIFICATIONS_INDEX, MlMetaIndex.INDEX_NAME, - AnomalyDetectorsIndex.jobStateIndexName(), + AnomalyDetectorsIndexFields.STATE_INDEX_PREFIX, AnomalyDetectorsIndex.jobResultsIndexPrefix(), AnomalyDetectorsIndex.configIndexName())); diff --git a/x-pack/qa/rolling-upgrade/src/test/resources/rest-api-spec/test/upgraded_cluster/30_ml_jobs_crud.yml b/x-pack/qa/rolling-upgrade/src/test/resources/rest-api-spec/test/upgraded_cluster/30_ml_jobs_crud.yml index 507362507cdeb..e962c20a7e9eb 100644 --- a/x-pack/qa/rolling-upgrade/src/test/resources/rest-api-spec/test/upgraded_cluster/30_ml_jobs_crud.yml +++ b/x-pack/qa/rolling-upgrade/src/test/resources/rest-api-spec/test/upgraded_cluster/30_ml_jobs_crud.yml @@ -72,6 +72,11 @@ setup: ml.get_jobs: job_id: mixed-cluster-job + - do: + indices.exists_alias: + name: ".ml-state-write" + - is_true: '' + --- "Test job with no model memory limit has established model memory after reopening": - do: From 12cdf1cba44d84fd8a8aa3cae39e76a8b359b595 Mon Sep 17 00:00:00 2001 From: Benjamin Trent Date: Fri, 18 Jan 2019 15:08:53 -0600 Subject: [PATCH 60/71] ML: Add support for single bucket aggs in Datafeeds (#37544) Single bucket aggs are now supported in datafeed aggregation configurations. --- docs/reference/ml/aggregations.asciidoc | 47 +++++++++++++++++++ .../ml/integration/DatafeedJobsRestIT.java | 38 +++++++++++++++ .../AggregationToJsonProcessor.java | 37 ++++++++++++++- .../aggregation/AggregationTestUtils.java | 9 ++++ .../AggregationToJsonProcessorTests.java | 33 +++++++++++++ 5 files changed, 163 insertions(+), 1 deletion(-) diff --git a/docs/reference/ml/aggregations.asciidoc b/docs/reference/ml/aggregations.asciidoc index 3f09022d17eaa..a50016807a714 100644 --- a/docs/reference/ml/aggregations.asciidoc +++ b/docs/reference/ml/aggregations.asciidoc @@ -145,6 +145,53 @@ pipeline aggregation to find the first order derivative of the counter ---------------------------------- // NOTCONSOLE +{dfeeds-cap} not only supports multi-bucket aggregations, but also single bucket aggregations. +The following shows two `filter` aggregations, each gathering the number of unique entries for +the `error` field. + +[source,js] +---------------------------------- +{ + "job_id":"servers-unique-errors", + "indices": ["logs-*"], + "aggregations": { + "buckets": { + "date_histogram": { + "field": "time", + "interval": "360s", + "time_zone": "UTC" + }, + "aggregations": { + "time": { + "max": {"field": "time"} + } + "server1": { + "filter": {"term": {"source": "server-name-1"}}, + "aggregations": { + "server1_error_count": { + "value_count": { + "field": "error" + } + } + } + }, + "server2": { + "filter": {"term": {"source": "server-name-2"}}, + "aggregations": { + "server2_error_count": { + "value_count": { + "field": "error" + } + } + } + } + } + } + } +} +---------------------------------- +// NOTCONSOLE + When you define an aggregation in a {dfeed}, it must have the following form: [source,js] diff --git a/x-pack/plugin/ml/qa/native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/DatafeedJobsRestIT.java b/x-pack/plugin/ml/qa/native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/DatafeedJobsRestIT.java index 2e69702381bcf..b794fee311805 100644 --- a/x-pack/plugin/ml/qa/native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/DatafeedJobsRestIT.java +++ b/x-pack/plugin/ml/qa/native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/DatafeedJobsRestIT.java @@ -894,6 +894,44 @@ public void testLookbackWithoutPermissionsAndRollup() throws Exception { "action [indices:admin/xpack/rollup/search] is unauthorized for user [ml_admin_plus_data]\"")); } + public void testLookbackWithSingleBucketAgg() throws Exception { + String jobId = "aggs-date-histogram-with-single-bucket-agg-job"; + Request createJobRequest = new Request("PUT", MachineLearning.BASE_PATH + "anomaly_detectors/" + jobId); + createJobRequest.setJsonEntity("{\n" + + " \"description\": \"Aggs job\",\n" + + " \"analysis_config\": {\n" + + " \"bucket_span\": \"3600s\",\n" + + " \"summary_count_field_name\": \"doc_count\",\n" + + " \"detectors\": [\n" + + " {\n" + + " \"function\": \"mean\",\n" + + " \"field_name\": \"responsetime\"" + + " }\n" + + " ]\n" + + " },\n" + + " \"data_description\": {\"time_field\": \"time stamp\"}\n" + + "}"); + client().performRequest(createJobRequest); + + String datafeedId = "datafeed-" + jobId; + String aggregations = "{\"time stamp\":{\"date_histogram\":{\"field\":\"time stamp\",\"interval\":\"1h\"}," + + "\"aggregations\":{" + + "\"time stamp\":{\"max\":{\"field\":\"time stamp\"}}," + + "\"airlineFilter\":{\"filter\":{\"term\": {\"airline\":\"AAA\"}}," + + " \"aggregations\":{\"responsetime\":{\"avg\":{\"field\":\"responsetime\"}}}}}}}"; + new DatafeedBuilder(datafeedId, jobId, "airline-data-aggs", "response").setAggregations(aggregations).build(); + openJob(client(), jobId); + + startDatafeedAndWaitUntilStopped(datafeedId); + waitUntilJobIsClosed(jobId); + Response jobStatsResponse = client().performRequest(new Request("GET", + MachineLearning.BASE_PATH + "anomaly_detectors/" + jobId + "/_stats")); + String jobStatsResponseAsString = EntityUtils.toString(jobStatsResponse.getEntity()); + assertThat(jobStatsResponseAsString, containsString("\"input_record_count\":2")); + assertThat(jobStatsResponseAsString, containsString("\"processed_record_count\":2")); + assertThat(jobStatsResponseAsString, containsString("\"missing_field_count\":0")); + } + public void testRealtime() throws Exception { String jobId = "job-realtime-1"; createJob(jobId, "airline"); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/aggregation/AggregationToJsonProcessor.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/aggregation/AggregationToJsonProcessor.java index c934653a6268e..db8dea22675f2 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/aggregation/AggregationToJsonProcessor.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/aggregation/AggregationToJsonProcessor.java @@ -13,6 +13,7 @@ import org.elasticsearch.search.aggregations.Aggregation; import org.elasticsearch.search.aggregations.Aggregations; import org.elasticsearch.search.aggregations.bucket.MultiBucketsAggregation; +import org.elasticsearch.search.aggregations.bucket.SingleBucketAggregation; import org.elasticsearch.search.aggregations.bucket.histogram.Histogram; import org.elasticsearch.search.aggregations.metrics.Max; import org.elasticsearch.search.aggregations.metrics.NumericMetricsAggregation; @@ -34,6 +35,7 @@ import java.util.Set; import java.util.SortedMap; import java.util.TreeMap; +import java.util.stream.Collectors; /** * Processes {@link Aggregation} objects and writes flat JSON documents for each leaf aggregation. @@ -93,18 +95,39 @@ private void processAggs(long docCount, List aggregations) throws I List leafAggregations = new ArrayList<>(); List bucketAggregations = new ArrayList<>(); + List singleBucketAggregations = new ArrayList<>(); // Sort into leaf and bucket aggregations. // The leaf aggregations will be processed first. for (Aggregation agg : aggregations) { if (agg instanceof MultiBucketsAggregation) { bucketAggregations.add((MultiBucketsAggregation)agg); + } else if (agg instanceof SingleBucketAggregation){ + // Skip a level down for single bucket aggs, if they have a sub-agg that is not + // a bucketed agg we should treat it like a leaf in this bucket + SingleBucketAggregation singleBucketAggregation = (SingleBucketAggregation)agg; + for (Aggregation subAgg : singleBucketAggregation.getAggregations()) { + if (subAgg instanceof MultiBucketsAggregation || subAgg instanceof SingleBucketAggregation) { + singleBucketAggregations.add(singleBucketAggregation); + } else { + leafAggregations.add(subAgg); + } + } } else { leafAggregations.add(agg); } } - if (bucketAggregations.size() > 1) { + // If on the current level (indicated via bucketAggregations) or one of the next levels (singleBucketAggregations) + // we have more than 1 `MultiBucketsAggregation`, we should error out. + // We need to make the check in this way as each of the items in `singleBucketAggregations` is treated as a separate branch + // in the recursive handling of this method. + int bucketAggLevelCount = Math.max(bucketAggregations.size(), (int)singleBucketAggregations.stream() + .flatMap(s -> asList(s.getAggregations()).stream()) + .filter(MultiBucketsAggregation.class::isInstance) + .count()); + + if (bucketAggLevelCount > 1) { throw new IllegalArgumentException("Multiple bucket aggregations at the same level are not supported"); } @@ -137,6 +160,18 @@ private void processAggs(long docCount, List aggregations) throws I } } } + noMoreBucketsToProcess = singleBucketAggregations.isEmpty() && noMoreBucketsToProcess; + // we support more than one `SingleBucketAggregation` at each level + // However, we only want to recurse with multi/single bucket aggs. + // Non-bucketed sub-aggregations were handle as leaf aggregations at this level + for (SingleBucketAggregation singleBucketAggregation : singleBucketAggregations) { + processAggs(singleBucketAggregation.getDocCount(), + asList(singleBucketAggregation.getAggregations()) + .stream() + .filter( + aggregation -> (aggregation instanceof MultiBucketsAggregation || aggregation instanceof SingleBucketAggregation)) + .collect(Collectors.toList())); + } // If there are no more bucket aggregations to process we've reached the end // and it's time to write the doc diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/extractor/aggregation/AggregationTestUtils.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/extractor/aggregation/AggregationTestUtils.java index 47d2eb828c6a4..38202eee0ff06 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/extractor/aggregation/AggregationTestUtils.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/extractor/aggregation/AggregationTestUtils.java @@ -7,6 +7,7 @@ import org.elasticsearch.search.aggregations.Aggregation; import org.elasticsearch.search.aggregations.Aggregations; +import org.elasticsearch.search.aggregations.bucket.SingleBucketAggregation; import org.elasticsearch.search.aggregations.bucket.histogram.Histogram; import org.elasticsearch.search.aggregations.bucket.terms.StringTerms; import org.elasticsearch.search.aggregations.bucket.terms.Terms; @@ -37,6 +38,14 @@ static Histogram.Bucket createHistogramBucket(long timestamp, long docCount, Lis return bucket; } + static SingleBucketAggregation createSingleBucketAgg(String name, long docCount, List subAggregations) { + SingleBucketAggregation singleBucketAggregation = mock(SingleBucketAggregation.class); + when(singleBucketAggregation.getName()).thenReturn(name); + when(singleBucketAggregation.getDocCount()).thenReturn(docCount); + when(singleBucketAggregation.getAggregations()).thenReturn(createAggs(subAggregations)); + return singleBucketAggregation; + } + static Histogram.Bucket createHistogramBucket(long timestamp, long docCount) { return createHistogramBucket(timestamp, docCount, Collections.emptyList()); } diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/extractor/aggregation/AggregationToJsonProcessorTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/extractor/aggregation/AggregationToJsonProcessorTests.java index bf283b5be519d..be79b461eeb18 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/extractor/aggregation/AggregationToJsonProcessorTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/extractor/aggregation/AggregationToJsonProcessorTests.java @@ -31,6 +31,7 @@ import static org.elasticsearch.xpack.ml.datafeed.extractor.aggregation.AggregationTestUtils.createHistogramBucket; import static org.elasticsearch.xpack.ml.datafeed.extractor.aggregation.AggregationTestUtils.createMax; import static org.elasticsearch.xpack.ml.datafeed.extractor.aggregation.AggregationTestUtils.createPercentiles; +import static org.elasticsearch.xpack.ml.datafeed.extractor.aggregation.AggregationTestUtils.createSingleBucketAgg; import static org.elasticsearch.xpack.ml.datafeed.extractor.aggregation.AggregationTestUtils.createSingleValue; import static org.elasticsearch.xpack.ml.datafeed.extractor.aggregation.AggregationTestUtils.createTerms; import static org.hamcrest.Matchers.containsString; @@ -439,6 +440,38 @@ public void testBucketsBeforeStartArePruned() throws IOException { "{\"time\":4000,\"my_field\":4.0,\"doc_count\":14}")); } + public void testSingleBucketAgg() throws IOException { + List histogramBuckets = Arrays.asList( + createHistogramBucket(1000L, 4, Arrays.asList( + createMax("time", 1000), + createSingleBucketAgg("agg1", 3, Collections.singletonList(createMax("field1", 5.0))), + createSingleBucketAgg("agg2", 1, Collections.singletonList(createMax("field2", 3.0))))), + createHistogramBucket(2000L, 7, Arrays.asList( + createMax("time", 2000), + createSingleBucketAgg("agg2", 3, Collections.singletonList(createMax("field2", 1.0))), + createSingleBucketAgg("agg1", 4, Collections.singletonList(createMax("field1", 7.0)))))); + + String json = aggToString(Sets.newHashSet("field1", "field2"), histogramBuckets); + + assertThat(json, equalTo("{\"time\":1000,\"field1\":5.0,\"field2\":3.0,\"doc_count\":4}" + + " {\"time\":2000,\"field2\":1.0,\"field1\":7.0,\"doc_count\":7}")); + } + + public void testSingleBucketAgg_failureWithSubMultiBucket() throws IOException { + + List histogramBuckets = Collections.singletonList( + createHistogramBucket(1000L, 4, Arrays.asList( + createMax("time", 1000), + createSingleBucketAgg("agg1", 3, + Arrays.asList(createHistogramAggregation("histo", Collections.emptyList()),createMax("field1", 5.0))), + createSingleBucketAgg("agg2", 1, + Arrays.asList(createHistogramAggregation("histo", Collections.emptyList()),createMax("field1", 3.0)))))); + + + expectThrows(IllegalArgumentException.class, + () -> aggToString(Sets.newHashSet("my_field"), histogramBuckets)); + } + private String aggToString(Set fields, Histogram.Bucket bucket) throws IOException { return aggToString(fields, Collections.singletonList(bucket)); } From 88b981056712bb2d6c668c365e357c605e6ffecd Mon Sep 17 00:00:00 2001 From: Gordon Brown Date: Fri, 18 Jan 2019 14:24:34 -0700 Subject: [PATCH 61/71] Remove obsolete deprecation checks (#37510) * Remove obsolete deprecation checks This also updates the old-indices check to be appropriate for the 7.x series of releases, and leaves it as the only deprecation check in place. * Add toString to DeprecationIssue * Bring filterChecks across from 6.x * License headers --- .../core/deprecation/DeprecationIssue.java | 6 + .../xpack/deprecation/DeprecationChecks.java | 22 +++- .../deprecation/IndexDeprecationChecks.java | 122 +----------------- .../deprecation/DeprecationChecksTests.java | 41 ++++++ .../IndexDeprecationChecksTests.java | 32 +++-- 5 files changed, 83 insertions(+), 140 deletions(-) create mode 100644 x-pack/plugin/deprecation/src/test/java/org/elasticsearch/xpack/deprecation/DeprecationChecksTests.java diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/deprecation/DeprecationIssue.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/deprecation/DeprecationIssue.java index 85a7edd109386..8413938f2b21c 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/deprecation/DeprecationIssue.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/deprecation/DeprecationIssue.java @@ -7,6 +7,7 @@ import org.elasticsearch.common.Nullable; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; @@ -137,5 +138,10 @@ public boolean equals(Object o) { public int hashCode() { return Objects.hash(level, message, url, details); } + + @Override + public String toString() { + return Strings.toString(this); + } } diff --git a/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/DeprecationChecks.java b/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/DeprecationChecks.java index 2589cb80cb504..1363d3a09a03f 100644 --- a/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/DeprecationChecks.java +++ b/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/DeprecationChecks.java @@ -15,8 +15,10 @@ import java.util.Arrays; import java.util.Collections; import java.util.List; +import java.util.Objects; import java.util.function.BiFunction; import java.util.function.Function; +import java.util.stream.Collectors; /** * Class containing all the cluster, node, and index deprecation checks that will be served @@ -37,11 +39,17 @@ private DeprecationChecks() { static List> INDEX_SETTINGS_CHECKS = Collections.unmodifiableList(Arrays.asList( - IndexDeprecationChecks::baseSimilarityDefinedCheck, - IndexDeprecationChecks::dynamicTemplateWithMatchMappingTypeCheck, - IndexDeprecationChecks::indexSharedFileSystemCheck, - IndexDeprecationChecks::indexStoreTypeCheck, - IndexDeprecationChecks::storeThrottleSettingsCheck, - IndexDeprecationChecks::delimitedPayloadFilterCheck)); - + IndexDeprecationChecks::oldIndicesCheck)); + + /** + * helper utility function to reduce repeat of running a specific {@link List} of checks. + * + * @param checks The functional checks to execute using the mapper function + * @param mapper The function that executes the lambda check with the appropriate arguments + * @param The signature of the check (BiFunction, Function, including the appropriate arguments) + * @return The list of {@link DeprecationIssue} that were found in the cluster + */ + static List filterChecks(List checks, Function mapper) { + return checks.stream().map(mapper).filter(Objects::nonNull).collect(Collectors.toList()); + } } diff --git a/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/IndexDeprecationChecks.java b/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/IndexDeprecationChecks.java index fcc6532fc99fd..7defb80ccaa6a 100644 --- a/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/IndexDeprecationChecks.java +++ b/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/IndexDeprecationChecks.java @@ -7,18 +7,12 @@ import com.carrotsearch.hppc.cursors.ObjectCursor; - import org.elasticsearch.Version; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.MappingMetaData; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.index.analysis.AnalysisRegistry; -import org.elasticsearch.index.mapper.DynamicTemplate; import org.elasticsearch.xpack.core.deprecation.DeprecationIssue; import java.util.ArrayList; -import java.util.Arrays; -import java.util.Collections; import java.util.List; import java.util.Map; import java.util.function.BiConsumer; @@ -79,119 +73,15 @@ private static List findInPropertiesRecursively(String type, Map issues = new ArrayList<>(); - fieldLevelMappingIssue(indexMetaData, (mappingMetaData, sourceAsMap) -> { - List dynamicTemplates = (List) mappingMetaData - .getSourceAsMap().getOrDefault("dynamic_templates", Collections.emptyList()); - for (Object template : dynamicTemplates) { - for (Map.Entry prop : ((Map) template).entrySet()) { - Map val = (Map) prop.getValue(); - if (val.containsKey("match_mapping_type")) { - Object mappingMatchType = val.get("match_mapping_type"); - boolean isValidMatchType = Arrays.stream(DynamicTemplate.XContentFieldType.values()) - .anyMatch(v -> v.toString().equals(mappingMatchType)); - if (isValidMatchType == false) { - issues.add("type: " + mappingMetaData.type() + ", dynamicFieldDefinition" - + prop.getKey() + ", unknown match_mapping_type[" + mappingMatchType + "]"); - } - } - } - } - }); - if (issues.size() > 0) { - return new DeprecationIssue(DeprecationIssue.Level.CRITICAL, - "Unrecognized match_mapping_type options not silently ignored", - "https://www.elastic.co/guide/en/elasticsearch/reference/master/" + - "breaking_60_mappings_changes.html" + - "#_unrecognized_literal_match_mapping_type_literal_options_not_silently_ignored", - issues.toString()); - } - } - return null; - } - - static DeprecationIssue baseSimilarityDefinedCheck(IndexMetaData indexMetaData) { - if (indexMetaData.getCreationVersion().before(Version.V_6_0_0_alpha1)) { - Settings settings = indexMetaData.getSettings().getAsSettings("index.similarity.base"); - if (settings.size() > 0) { - return new DeprecationIssue(DeprecationIssue.Level.WARNING, - "The base similarity is now ignored as coords and query normalization have been removed." + - "If provided, this setting will be ignored and issue a deprecation warning", - "https://www.elastic.co/guide/en/elasticsearch/reference/master/" + - "breaking_60_settings_changes.html#_similarity_settings", null); - - } - } - return null; - } - - static DeprecationIssue delimitedPayloadFilterCheck(IndexMetaData indexMetaData) { - if (indexMetaData.getCreationVersion().before(Version.V_7_0_0)) { - List issues = new ArrayList<>(); - Map filters = indexMetaData.getSettings().getGroups(AnalysisRegistry.INDEX_ANALYSIS_FILTER); - for (Map.Entry entry : filters.entrySet()) { - if ("delimited_payload_filter".equals(entry.getValue().get("type"))) { - issues.add("The filter [" + entry.getKey() + "] is of deprecated 'delimited_payload_filter' type. " - + "The filter type should be changed to 'delimited_payload'."); - } - } - if (issues.size() > 0) { - return new DeprecationIssue(DeprecationIssue.Level.WARNING, - "Use of 'delimited_payload_filter'.", - "https://www.elastic.co/guide/en/elasticsearch/reference/master/breaking_70_analysis_changes.html", - issues.toString()); - } - } - return null; - } - - static DeprecationIssue indexStoreTypeCheck(IndexMetaData indexMetaData) { - if (indexMetaData.getCreationVersion().before(Version.V_6_0_0_alpha1) && - indexMetaData.getSettings().get("index.store.type") != null) { - return new DeprecationIssue(DeprecationIssue.Level.CRITICAL, - "The default index.store.type has been removed. If you were using it, " + - "we advise that you simply remove it from your index settings and Elasticsearch" + - "will use the best store implementation for your operating system.", - "https://www.elastic.co/guide/en/elasticsearch/reference/master/" + - "breaking_60_settings_changes.html#_store_settings", null); - - } - return null; - } - - static DeprecationIssue storeThrottleSettingsCheck(IndexMetaData indexMetaData) { - if (indexMetaData.getCreationVersion().before(Version.V_6_0_0_alpha1)) { - Settings settings = indexMetaData.getSettings(); - Settings throttleSettings = settings.getAsSettings("index.store.throttle"); - ArrayList foundSettings = new ArrayList<>(); - if (throttleSettings.get("max_bytes_per_sec") != null) { - foundSettings.add("index.store.throttle.max_bytes_per_sec"); - } - if (throttleSettings.get("type") != null) { - foundSettings.add("index.store.throttle.type"); - } - - if (foundSettings.isEmpty() == false) { + static DeprecationIssue oldIndicesCheck(IndexMetaData indexMetaData) { + Version createdWith = indexMetaData.getCreationVersion(); + if (createdWith.before(Version.V_7_0_0)) { return new DeprecationIssue(DeprecationIssue.Level.CRITICAL, - "index.store.throttle settings are no longer recognized. these settings should be removed", + "Index created before 7.0", "https://www.elastic.co/guide/en/elasticsearch/reference/master/" + - "breaking_60_settings_changes.html#_store_throttling_settings", "present settings: " + foundSettings); + "breaking-changes-8.0.html", + "This index was created using version: " + createdWith); } - } - return null; - } - - static DeprecationIssue indexSharedFileSystemCheck(IndexMetaData indexMetaData) { - if (indexMetaData.getCreationVersion().before(Version.V_6_0_0_alpha1) && - indexMetaData.getSettings().get("index.shared_filesystem") != null) { - return new DeprecationIssue(DeprecationIssue.Level.CRITICAL, - "[index.shared_filesystem] setting should be removed", - "https://www.elastic.co/guide/en/elasticsearch/reference/6.0/" + - "breaking_60_indices_changes.html#_shadow_replicas_have_been_removed", null); - - } return null; } } diff --git a/x-pack/plugin/deprecation/src/test/java/org/elasticsearch/xpack/deprecation/DeprecationChecksTests.java b/x-pack/plugin/deprecation/src/test/java/org/elasticsearch/xpack/deprecation/DeprecationChecksTests.java new file mode 100644 index 0000000000000..57b579a508520 --- /dev/null +++ b/x-pack/plugin/deprecation/src/test/java/org/elasticsearch/xpack/deprecation/DeprecationChecksTests.java @@ -0,0 +1,41 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.deprecation; + +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.core.deprecation.DeprecationInfoAction; +import org.elasticsearch.xpack.core.deprecation.DeprecationIssue; + +import java.util.ArrayList; +import java.util.List; +import java.util.function.Supplier; + +import static org.hamcrest.Matchers.equalTo; + +public class DeprecationChecksTests extends ESTestCase { + + public void testFilterChecks() { + DeprecationIssue issue = createRandomDeprecationIssue(); + int numChecksPassed = randomIntBetween(0, 5); + int numChecksFailed = 10 - numChecksPassed; + List> checks = new ArrayList<>(); + for (int i = 0; i < numChecksFailed; i++) { + checks.add(() -> issue); + } + for (int i = 0; i < numChecksPassed; i++) { + checks.add(() -> null); + } + List filteredIssues = DeprecationInfoAction.filterChecks(checks, Supplier::get); + assertThat(filteredIssues.size(), equalTo(numChecksFailed)); + } + + private static DeprecationIssue createRandomDeprecationIssue() { + String details = randomBoolean() ? randomAlphaOfLength(10) : null; + return new DeprecationIssue(randomFrom(DeprecationIssue.Level.values()), randomAlphaOfLength(10), + randomAlphaOfLength(10), details); + } +} diff --git a/x-pack/plugin/deprecation/src/test/java/org/elasticsearch/xpack/deprecation/IndexDeprecationChecksTests.java b/x-pack/plugin/deprecation/src/test/java/org/elasticsearch/xpack/deprecation/IndexDeprecationChecksTests.java index 7203a853846d1..b0f5a556ac627 100644 --- a/x-pack/plugin/deprecation/src/test/java/org/elasticsearch/xpack/deprecation/IndexDeprecationChecksTests.java +++ b/x-pack/plugin/deprecation/src/test/java/org/elasticsearch/xpack/deprecation/IndexDeprecationChecksTests.java @@ -3,37 +3,35 @@ * or more contributor license agreements. Licensed under the Elastic License; * you may not use this file except in compliance with the Elastic License. */ + package org.elasticsearch.xpack.deprecation; import org.elasticsearch.Version; import org.elasticsearch.cluster.metadata.IndexMetaData; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.VersionUtils; -import org.elasticsearch.xpack.core.deprecation.DeprecationInfoAction; import org.elasticsearch.xpack.core.deprecation.DeprecationIssue; -import java.io.IOException; import java.util.List; import static java.util.Collections.singletonList; import static org.elasticsearch.xpack.deprecation.DeprecationChecks.INDEX_SETTINGS_CHECKS; public class IndexDeprecationChecksTests extends ESTestCase { - public void testDelimitedPayloadFilterCheck() throws IOException { - Settings settings = settings( - VersionUtils.randomVersionBetween(random(), Version.V_6_0_0, VersionUtils.getPreviousVersion(Version.V_7_0_0))) - .put("index.analysis.filter.my_delimited_payload_filter.type", "delimited_payload_filter") - .put("index.analysis.filter.my_delimited_payload_filter.delimiter", "^") - .put("index.analysis.filter.my_delimited_payload_filter.encoding", "identity").build(); - - IndexMetaData indexMetaData = IndexMetaData.builder("test").settings(settings).numberOfShards(1).numberOfReplicas(0).build(); - - DeprecationIssue expected = new DeprecationIssue(DeprecationIssue.Level.WARNING, "Use of 'delimited_payload_filter'.", - "https://www.elastic.co/guide/en/elasticsearch/reference/master/breaking_70_analysis_changes.html", - "[The filter [my_delimited_payload_filter] is of deprecated 'delimited_payload_filter' type. " - + "The filter type should be changed to 'delimited_payload'.]"); - List issues = DeprecationInfoAction.filterChecks(INDEX_SETTINGS_CHECKS, c -> c.apply(indexMetaData)); + public void testOldIndicesCheck() { + Version createdWith = VersionUtils.randomVersionBetween(random(), Version.V_6_0_0, + VersionUtils.getPreviousVersion(Version.V_7_0_0)); + IndexMetaData indexMetaData = IndexMetaData.builder("test") + .settings(settings(createdWith)) + .numberOfShards(1) + .numberOfReplicas(0) + .build(); + DeprecationIssue expected = new DeprecationIssue(DeprecationIssue.Level.CRITICAL, + "Index created before 7.0", + "https://www.elastic.co/guide/en/elasticsearch/reference/master/" + + "breaking-changes-8.0.html", + "This index was created using version: " + createdWith); + List issues = DeprecationChecks.filterChecks(INDEX_SETTINGS_CHECKS, c -> c.apply(indexMetaData)); assertEquals(singletonList(expected), issues); } } From 106f900dfb3ac49ab45ec20a7dae43753de2e4c8 Mon Sep 17 00:00:00 2001 From: Tal Levy Date: Fri, 18 Jan 2019 13:40:00 -0800 Subject: [PATCH 62/71] refactor inner geogrid classes to own class files (#37596) To make further refactoring of GeoGrid aggregations easier (related: #30320), splitting out these inner class dependencies into their own files makes it easier to map the relationship between classes --- .../bucket/geogrid/CellIdSource.java | 95 ++++++++++++ .../geogrid/GeoGridAggregationBuilder.java | 68 --------- .../bucket/geogrid/GeoGridBucket.java | 132 +++++++++++++++++ .../bucket/geogrid/GeoHashGridAggregator.java | 8 +- .../geogrid/GeoHashGridAggregatorFactory.java | 3 +- .../bucket/geogrid/InternalGeoHashGrid.java | 137 +++--------------- .../geogrid/InternalGeoHashGridTests.java | 23 ++- 7 files changed, 262 insertions(+), 204 deletions(-) create mode 100644 server/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/CellIdSource.java create mode 100644 server/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoGridBucket.java diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/CellIdSource.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/CellIdSource.java new file mode 100644 index 0000000000000..268a27b4669db --- /dev/null +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/CellIdSource.java @@ -0,0 +1,95 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.search.aggregations.bucket.geogrid; + +import org.apache.lucene.index.LeafReaderContext; +import org.apache.lucene.index.SortedNumericDocValues; +import org.elasticsearch.common.geo.GeoHashUtils; +import org.elasticsearch.index.fielddata.AbstractSortingNumericDocValues; +import org.elasticsearch.index.fielddata.MultiGeoPointValues; +import org.elasticsearch.index.fielddata.SortedBinaryDocValues; +import org.elasticsearch.index.fielddata.SortedNumericDoubleValues; +import org.elasticsearch.search.aggregations.support.ValuesSource; + +import java.io.IOException; + +/** + * Wrapper class to help convert {@link MultiGeoPointValues} + * to numeric long values for bucketing. + */ +class CellIdSource extends ValuesSource.Numeric { + private final ValuesSource.GeoPoint valuesSource; + private final int precision; + + CellIdSource(GeoPoint valuesSource, int precision) { + this.valuesSource = valuesSource; + //different GeoPoints could map to the same or different geohash cells. + this.precision = precision; + } + + public int precision() { + return precision; + } + + @Override + public boolean isFloatingPoint() { + return false; + } + + @Override + public SortedNumericDocValues longValues(LeafReaderContext ctx) { + return new CellValues(valuesSource.geoPointValues(ctx), precision); + } + + @Override + public SortedNumericDoubleValues doubleValues(LeafReaderContext ctx) { + throw new UnsupportedOperationException(); + } + + @Override + public SortedBinaryDocValues bytesValues(LeafReaderContext ctx) { + throw new UnsupportedOperationException(); + } + + private static class CellValues extends AbstractSortingNumericDocValues { + private MultiGeoPointValues geoValues; + private int precision; + + protected CellValues(MultiGeoPointValues geoValues, int precision) { + this.geoValues = geoValues; + this.precision = precision; + } + + @Override + public boolean advanceExact(int docId) throws IOException { + if (geoValues.advanceExact(docId)) { + resize(geoValues.docValueCount()); + for (int i = 0; i < docValueCount(); ++i) { + org.elasticsearch.common.geo.GeoPoint target = geoValues.nextValue(); + values[i] = GeoHashUtils.longEncode(target.getLon(), target.getLat(), + precision); + } + sort(); + return true; + } else { + return false; + } + } + } +} diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoGridAggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoGridAggregationBuilder.java index 38469ff875365..85e4c8b228e1a 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoGridAggregationBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoGridAggregationBuilder.java @@ -19,21 +19,13 @@ package org.elasticsearch.search.aggregations.bucket.geogrid; -import org.apache.lucene.index.LeafReaderContext; -import org.apache.lucene.index.SortedNumericDocValues; import org.elasticsearch.ElasticsearchException; -import org.elasticsearch.common.geo.GeoHashUtils; -import org.elasticsearch.common.geo.GeoPoint; import org.elasticsearch.common.geo.GeoUtils; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.ObjectParser; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.index.fielddata.AbstractSortingNumericDocValues; -import org.elasticsearch.index.fielddata.MultiGeoPointValues; -import org.elasticsearch.index.fielddata.SortedBinaryDocValues; -import org.elasticsearch.index.fielddata.SortedNumericDoubleValues; import org.elasticsearch.search.aggregations.AggregationBuilder; import org.elasticsearch.search.aggregations.AggregatorFactories.Builder; import org.elasticsearch.search.aggregations.AggregatorFactory; @@ -207,65 +199,5 @@ public String getType() { return NAME; } - private static class CellValues extends AbstractSortingNumericDocValues { - private MultiGeoPointValues geoValues; - private int precision; - protected CellValues(MultiGeoPointValues geoValues, int precision) { - this.geoValues = geoValues; - this.precision = precision; - } - - @Override - public boolean advanceExact(int docId) throws IOException { - if (geoValues.advanceExact(docId)) { - resize(geoValues.docValueCount()); - for (int i = 0; i < docValueCount(); ++i) { - GeoPoint target = geoValues.nextValue(); - values[i] = GeoHashUtils.longEncode(target.getLon(), target.getLat(), - precision); - } - sort(); - return true; - } else { - return false; - } - } - } - - static class CellIdSource extends ValuesSource.Numeric { - private final ValuesSource.GeoPoint valuesSource; - private final int precision; - - CellIdSource(ValuesSource.GeoPoint valuesSource, int precision) { - this.valuesSource = valuesSource; - //different GeoPoints could map to the same or different geohash cells. - this.precision = precision; - } - - public int precision() { - return precision; - } - - @Override - public boolean isFloatingPoint() { - return false; - } - - @Override - public SortedNumericDocValues longValues(LeafReaderContext ctx) { - return new CellValues(valuesSource.geoPointValues(ctx), precision); - } - - @Override - public SortedNumericDoubleValues doubleValues(LeafReaderContext ctx) { - throw new UnsupportedOperationException(); - } - - @Override - public SortedBinaryDocValues bytesValues(LeafReaderContext ctx) { - throw new UnsupportedOperationException(); - } - - } } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoGridBucket.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoGridBucket.java new file mode 100644 index 0000000000000..8246d629bd527 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoGridBucket.java @@ -0,0 +1,132 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.search.aggregations.bucket.geogrid; + +import org.elasticsearch.common.geo.GeoHashUtils; +import org.elasticsearch.common.geo.GeoPoint; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.search.aggregations.Aggregation; +import org.elasticsearch.search.aggregations.Aggregations; +import org.elasticsearch.search.aggregations.InternalAggregation; +import org.elasticsearch.search.aggregations.InternalAggregations; +import org.elasticsearch.search.aggregations.InternalMultiBucketAggregation; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; +import java.util.Objects; + +class GeoGridBucket extends InternalMultiBucketAggregation.InternalBucket implements GeoHashGrid.Bucket, Comparable { + + protected long geohashAsLong; + protected long docCount; + protected InternalAggregations aggregations; + + GeoGridBucket(long geohashAsLong, long docCount, InternalAggregations aggregations) { + this.docCount = docCount; + this.aggregations = aggregations; + this.geohashAsLong = geohashAsLong; + } + + /** + * Read from a stream. + */ + GeoGridBucket(StreamInput in) throws IOException { + geohashAsLong = in.readLong(); + docCount = in.readVLong(); + aggregations = InternalAggregations.readAggregations(in); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeLong(geohashAsLong); + out.writeVLong(docCount); + aggregations.writeTo(out); + } + + @Override + public String getKeyAsString() { + return GeoHashUtils.stringEncode(geohashAsLong); + } + + @Override + public GeoPoint getKey() { + return GeoPoint.fromGeohash(geohashAsLong); + } + + @Override + public long getDocCount() { + return docCount; + } + + @Override + public Aggregations getAggregations() { + return aggregations; + } + + @Override + public int compareTo(GeoGridBucket other) { + if (this.geohashAsLong > other.geohashAsLong) { + return 1; + } + if (this.geohashAsLong < other.geohashAsLong) { + return -1; + } + return 0; + } + + public GeoGridBucket reduce(List buckets, InternalAggregation.ReduceContext context) { + List aggregationsList = new ArrayList<>(buckets.size()); + long docCount = 0; + for (GeoGridBucket bucket : buckets) { + docCount += bucket.docCount; + aggregationsList.add(bucket.aggregations); + } + final InternalAggregations aggs = InternalAggregations.reduce(aggregationsList, context); + return new GeoGridBucket(geohashAsLong, docCount, aggs); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + builder.field(Aggregation.CommonFields.KEY.getPreferredName(), getKeyAsString()); + builder.field(Aggregation.CommonFields.DOC_COUNT.getPreferredName(), docCount); + aggregations.toXContentInternal(builder, params); + builder.endObject(); + return builder; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GeoGridBucket bucket = (GeoGridBucket) o; + return geohashAsLong == bucket.geohashAsLong && + docCount == bucket.docCount && + Objects.equals(aggregations, bucket.aggregations); + } + + @Override + public int hashCode() { + return Objects.hash(geohashAsLong, docCount, aggregations); + } + +} diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoHashGridAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoHashGridAggregator.java index f43cfae61ba86..1ead747bb93e2 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoHashGridAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoHashGridAggregator.java @@ -44,10 +44,10 @@ public class GeoHashGridAggregator extends BucketsAggregator { private final int requiredSize; private final int shardSize; - private final GeoGridAggregationBuilder.CellIdSource valuesSource; + private final CellIdSource valuesSource; private final LongHash bucketOrds; - GeoHashGridAggregator(String name, AggregatorFactories factories, GeoGridAggregationBuilder.CellIdSource valuesSource, + GeoHashGridAggregator(String name, AggregatorFactories factories, CellIdSource valuesSource, int requiredSize, int shardSize, SearchContext aggregationContext, Aggregator parent, List pipelineAggregators, Map metaData) throws IOException { super(name, factories, aggregationContext, parent, pipelineAggregators, metaData); @@ -96,7 +96,7 @@ public void collect(int doc, long bucket) throws IOException { } // private impl that stores a bucket ord. This allows for computing the aggregations lazily. - static class OrdinalBucket extends InternalGeoHashGrid.Bucket { + static class OrdinalBucket extends GeoGridBucket { long bucketOrd; @@ -125,7 +125,7 @@ public InternalGeoHashGrid buildAggregation(long owningBucketOrdinal) throws IOE spare = (OrdinalBucket) ordered.insertWithOverflow(spare); } - final InternalGeoHashGrid.Bucket[] list = new InternalGeoHashGrid.Bucket[ordered.size()]; + final GeoGridBucket[] list = new GeoGridBucket[ordered.size()]; for (int i = ordered.size() - 1; i >= 0; --i) { final OrdinalBucket bucket = (OrdinalBucket) ordered.pop(); bucket.aggregations = bucketAggregations(bucket.bucketOrd); diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoHashGridAggregatorFactory.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoHashGridAggregatorFactory.java index 13b4850156483..b7cb50b5f44c0 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoHashGridAggregatorFactory.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoHashGridAggregatorFactory.java @@ -24,7 +24,6 @@ import org.elasticsearch.search.aggregations.AggregatorFactory; import org.elasticsearch.search.aggregations.InternalAggregation; import org.elasticsearch.search.aggregations.NonCollectingAggregator; -import org.elasticsearch.search.aggregations.bucket.geogrid.GeoGridAggregationBuilder.CellIdSource; import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; import org.elasticsearch.search.aggregations.support.ValuesSource; import org.elasticsearch.search.aggregations.support.ValuesSource.GeoPoint; @@ -56,7 +55,7 @@ public class GeoHashGridAggregatorFactory extends ValuesSourceAggregatorFactory< protected Aggregator createUnmapped(Aggregator parent, List pipelineAggregators, Map metaData) throws IOException { final InternalAggregation aggregation = new InternalGeoHashGrid(name, requiredSize, - Collections. emptyList(), pipelineAggregators, metaData); + Collections.emptyList(), pipelineAggregators, metaData); return new NonCollectingAggregator(name, context, parent, pipelineAggregators, metaData) { @Override public InternalAggregation buildEmptyAggregation() { diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/InternalGeoHashGrid.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/InternalGeoHashGrid.java index bc60f5945eb9f..6f887e644b349 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/InternalGeoHashGrid.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/InternalGeoHashGrid.java @@ -19,13 +19,10 @@ package org.elasticsearch.search.aggregations.bucket.geogrid; import org.apache.lucene.util.PriorityQueue; -import org.elasticsearch.common.geo.GeoHashUtils; -import org.elasticsearch.common.geo.GeoPoint; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.util.LongObjectPagedHashMap; import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.search.aggregations.Aggregations; import org.elasticsearch.search.aggregations.InternalAggregation; import org.elasticsearch.search.aggregations.InternalAggregations; import org.elasticsearch.search.aggregations.InternalMultiBucketAggregation; @@ -45,110 +42,14 @@ * All geohashes in a grid are of the same precision and held internally as a single long * for efficiency's sake. */ -public class InternalGeoHashGrid extends InternalMultiBucketAggregation implements +public class InternalGeoHashGrid extends InternalMultiBucketAggregation implements GeoHashGrid { - static class Bucket extends InternalMultiBucketAggregation.InternalBucket implements GeoHashGrid.Bucket, Comparable { - - protected long geohashAsLong; - protected long docCount; - protected InternalAggregations aggregations; - - Bucket(long geohashAsLong, long docCount, InternalAggregations aggregations) { - this.docCount = docCount; - this.aggregations = aggregations; - this.geohashAsLong = geohashAsLong; - } - - /** - * Read from a stream. - */ - private Bucket(StreamInput in) throws IOException { - geohashAsLong = in.readLong(); - docCount = in.readVLong(); - aggregations = InternalAggregations.readAggregations(in); - } - - @Override - public void writeTo(StreamOutput out) throws IOException { - out.writeLong(geohashAsLong); - out.writeVLong(docCount); - aggregations.writeTo(out); - } - - @Override - public String getKeyAsString() { - return GeoHashUtils.stringEncode(geohashAsLong); - } - - @Override - public GeoPoint getKey() { - return GeoPoint.fromGeohash(geohashAsLong); - } - - @Override - public long getDocCount() { - return docCount; - } - - @Override - public Aggregations getAggregations() { - return aggregations; - } - - @Override - public int compareTo(Bucket other) { - if (this.geohashAsLong > other.geohashAsLong) { - return 1; - } - if (this.geohashAsLong < other.geohashAsLong) { - return -1; - } - return 0; - } - - public Bucket reduce(List buckets, ReduceContext context) { - List aggregationsList = new ArrayList<>(buckets.size()); - long docCount = 0; - for (Bucket bucket : buckets) { - docCount += bucket.docCount; - aggregationsList.add(bucket.aggregations); - } - final InternalAggregations aggs = InternalAggregations.reduce(aggregationsList, context); - return new Bucket(geohashAsLong, docCount, aggs); - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field(CommonFields.KEY.getPreferredName(), getKeyAsString()); - builder.field(CommonFields.DOC_COUNT.getPreferredName(), docCount); - aggregations.toXContentInternal(builder, params); - builder.endObject(); - return builder; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - Bucket bucket = (Bucket) o; - return geohashAsLong == bucket.geohashAsLong && - docCount == bucket.docCount && - Objects.equals(aggregations, bucket.aggregations); - } - - @Override - public int hashCode() { - return Objects.hash(geohashAsLong, docCount, aggregations); - } - - } private final int requiredSize; - private final List buckets; + private final List buckets; - InternalGeoHashGrid(String name, int requiredSize, List buckets, List pipelineAggregators, - Map metaData) { + InternalGeoHashGrid(String name, int requiredSize, List buckets, List pipelineAggregators, + Map metaData) { super(name, pipelineAggregators, metaData); this.requiredSize = requiredSize; this.buckets = buckets; @@ -160,7 +61,7 @@ public int hashCode() { public InternalGeoHashGrid(StreamInput in) throws IOException { super(in); requiredSize = readSize(in); - buckets = in.readList(Bucket::new); + buckets = in.readList(GeoGridBucket::new); } @Override @@ -175,30 +76,30 @@ public String getWriteableName() { } @Override - public InternalGeoHashGrid create(List buckets) { + public InternalGeoHashGrid create(List buckets) { return new InternalGeoHashGrid(this.name, this.requiredSize, buckets, this.pipelineAggregators(), this.metaData); } @Override - public Bucket createBucket(InternalAggregations aggregations, Bucket prototype) { - return new Bucket(prototype.geohashAsLong, prototype.docCount, aggregations); + public GeoGridBucket createBucket(InternalAggregations aggregations, GeoGridBucket prototype) { + return new GeoGridBucket(prototype.geohashAsLong, prototype.docCount, aggregations); } @Override - public List getBuckets() { + public List getBuckets() { return unmodifiableList(buckets); } @Override public InternalGeoHashGrid doReduce(List aggregations, ReduceContext reduceContext) { - LongObjectPagedHashMap> buckets = null; + LongObjectPagedHashMap> buckets = null; for (InternalAggregation aggregation : aggregations) { InternalGeoHashGrid grid = (InternalGeoHashGrid) aggregation; if (buckets == null) { buckets = new LongObjectPagedHashMap<>(grid.buckets.size(), reduceContext.bigArrays()); } - for (Bucket bucket : grid.buckets) { - List existingBuckets = buckets.get(bucket.geohashAsLong); + for (GeoGridBucket bucket : grid.buckets) { + List existingBuckets = buckets.get(bucket.geohashAsLong); if (existingBuckets == null) { existingBuckets = new ArrayList<>(aggregations.size()); buckets.put(bucket.geohashAsLong, existingBuckets); @@ -209,9 +110,9 @@ public InternalGeoHashGrid doReduce(List aggregations, Redu final int size = Math.toIntExact(reduceContext.isFinalReduce() == false ? buckets.size() : Math.min(requiredSize, buckets.size())); BucketPriorityQueue ordered = new BucketPriorityQueue(size); - for (LongObjectPagedHashMap.Cursor> cursor : buckets) { - List sameCellBuckets = cursor.value; - Bucket removed = ordered.insertWithOverflow(sameCellBuckets.get(0).reduce(sameCellBuckets, reduceContext)); + for (LongObjectPagedHashMap.Cursor> cursor : buckets) { + List sameCellBuckets = cursor.value; + GeoGridBucket removed = ordered.insertWithOverflow(sameCellBuckets.get(0).reduce(sameCellBuckets, reduceContext)); if (removed != null) { reduceContext.consumeBucketsAndMaybeBreak(-countInnerBucket(removed)); } else { @@ -219,7 +120,7 @@ public InternalGeoHashGrid doReduce(List aggregations, Redu } } buckets.close(); - Bucket[] list = new Bucket[ordered.size()]; + GeoGridBucket[] list = new GeoGridBucket[ordered.size()]; for (int i = ordered.size() - 1; i >= 0; i--) { list[i] = ordered.pop(); } @@ -229,7 +130,7 @@ public InternalGeoHashGrid doReduce(List aggregations, Redu @Override public XContentBuilder doXContentBody(XContentBuilder builder, Params params) throws IOException { builder.startArray(CommonFields.BUCKETS.getPreferredName()); - for (Bucket bucket : buckets) { + for (GeoGridBucket bucket : buckets) { bucket.toXContent(builder, params); } builder.endArray(); @@ -253,14 +154,14 @@ protected boolean doEquals(Object obj) { Objects.equals(buckets, other.buckets); } - static class BucketPriorityQueue extends PriorityQueue { + static class BucketPriorityQueue extends PriorityQueue { BucketPriorityQueue(int size) { super(size); } @Override - protected boolean lessThan(Bucket o1, Bucket o2) { + protected boolean lessThan(GeoGridBucket o1, GeoGridBucket o2) { int cmp = Long.compare(o2.getDocCount(), o1.getDocCount()); if (cmp == 0) { cmp = o2.compareTo(o1); diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/geogrid/InternalGeoHashGridTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/geogrid/InternalGeoHashGridTests.java index 822e05ffa6582..78016833dbc0f 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/geogrid/InternalGeoHashGridTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/geogrid/InternalGeoHashGridTests.java @@ -24,7 +24,6 @@ import org.elasticsearch.search.aggregations.InternalAggregations; import org.elasticsearch.test.InternalMultiBucketAggregationTestCase; import org.elasticsearch.search.aggregations.ParsedMultiBucketAggregation; -import org.elasticsearch.search.aggregations.bucket.geogrid.InternalGeoHashGrid.Bucket; import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; import java.util.ArrayList; @@ -50,13 +49,13 @@ protected InternalGeoHashGrid createTestInstance(String name, Map metaData, InternalAggregations aggregations) { int size = randomNumberOfBuckets(); - List buckets = new ArrayList<>(size); + List buckets = new ArrayList<>(size); for (int i = 0; i < size; i++) { double latitude = randomDoubleBetween(-90.0, 90.0, false); double longitude = randomDoubleBetween(-180.0, 180.0, false); long geoHashAsLong = GeoHashUtils.longEncode(longitude, latitude, 4); - buckets.add(new InternalGeoHashGrid.Bucket(geoHashAsLong, randomInt(IndexWriter.MAX_DOCS), aggregations)); + buckets.add(new GeoGridBucket(geoHashAsLong, randomInt(IndexWriter.MAX_DOCS), aggregations)); } return new InternalGeoHashGrid(name, size, buckets, pipelineAggregators, metaData); } @@ -68,24 +67,24 @@ protected Writeable.Reader instanceReader() { @Override protected void assertReduced(InternalGeoHashGrid reduced, List inputs) { - Map> map = new HashMap<>(); + Map> map = new HashMap<>(); for (InternalGeoHashGrid input : inputs) { for (GeoHashGrid.Bucket bucket : input.getBuckets()) { - InternalGeoHashGrid.Bucket internalBucket = (InternalGeoHashGrid.Bucket) bucket; - List buckets = map.get(internalBucket.geohashAsLong); + GeoGridBucket internalBucket = (GeoGridBucket) bucket; + List buckets = map.get(internalBucket.geohashAsLong); if (buckets == null) { map.put(internalBucket.geohashAsLong, buckets = new ArrayList<>()); } buckets.add(internalBucket); } } - List expectedBuckets = new ArrayList<>(); - for (Map.Entry> entry : map.entrySet()) { + List expectedBuckets = new ArrayList<>(); + for (Map.Entry> entry : map.entrySet()) { long docCount = 0; - for (InternalGeoHashGrid.Bucket bucket : entry.getValue()) { + for (GeoGridBucket bucket : entry.getValue()) { docCount += bucket.docCount; } - expectedBuckets.add(new InternalGeoHashGrid.Bucket(entry.getKey(), docCount, InternalAggregations.EMPTY)); + expectedBuckets.add(new GeoGridBucket(entry.getKey(), docCount, InternalAggregations.EMPTY)); } expectedBuckets.sort((first, second) -> { int cmp = Long.compare(second.docCount, first.docCount); @@ -114,7 +113,7 @@ protected Class implementationClass() { protected InternalGeoHashGrid mutateInstance(InternalGeoHashGrid instance) { String name = instance.getName(); int size = instance.getRequiredSize(); - List buckets = instance.getBuckets(); + List buckets = instance.getBuckets(); List pipelineAggregators = instance.pipelineAggregators(); Map metaData = instance.getMetaData(); switch (between(0, 3)) { @@ -124,7 +123,7 @@ protected InternalGeoHashGrid mutateInstance(InternalGeoHashGrid instance) { case 1: buckets = new ArrayList<>(buckets); buckets.add( - new InternalGeoHashGrid.Bucket(randomNonNegativeLong(), randomInt(IndexWriter.MAX_DOCS), InternalAggregations.EMPTY)); + new GeoGridBucket(randomNonNegativeLong(), randomInt(IndexWriter.MAX_DOCS), InternalAggregations.EMPTY)); break; case 2: size = size + between(1, 10); From adae233f77238a12911733d15c45e5acbe689512 Mon Sep 17 00:00:00 2001 From: Jason Tedor Date: Fri, 18 Jan 2019 16:42:25 -0500 Subject: [PATCH 63/71] Add some deprecation optimizations (#37597) This commit optimizes some of the performance issues from using deprecation logging: - we optimize encoding the deprecation value - we optimize formatting the deprecation string - we optimize away getting the current time (by using cached startup time) --- .../common/logging/DeprecationLogger.java | 59 +++++++++++++++---- 1 file changed, 48 insertions(+), 11 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/common/logging/DeprecationLogger.java b/server/src/main/java/org/elasticsearch/common/logging/DeprecationLogger.java index e8f06a43a5c13..1eb3d52b46cde 100644 --- a/server/src/main/java/org/elasticsearch/common/logging/DeprecationLogger.java +++ b/server/src/main/java/org/elasticsearch/common/logging/DeprecationLogger.java @@ -157,14 +157,13 @@ public void deprecatedAndMaybeLog(final String key, final String msg, final Obje * arbitrary token; here we use the Elasticsearch version and build hash. The warn text must be quoted. The warn-date is an optional * quoted field that can be in a variety of specified date formats; here we use RFC 1123 format. */ - private static final String WARNING_FORMAT = + private static final String WARNING_PREFIX = String.format( Locale.ROOT, - "299 Elasticsearch-%s%s-%s ", + "299 Elasticsearch-%s%s-%s", Version.CURRENT.toString(), Build.CURRENT.isSnapshot() ? "-SNAPSHOT" : "", - Build.CURRENT.shortHash()) + - "\"%s\" \"%s\""; + Build.CURRENT.shortHash()); /* * RFC 7234 section 5.5 specifies that the warn-date is a quoted HTTP-date. HTTP-date is defined in RFC 7234 Appendix B as being from @@ -223,7 +222,7 @@ public void deprecatedAndMaybeLog(final String key, final String msg, final Obje .toFormatter(Locale.getDefault(Locale.Category.FORMAT)); } - private static final ZoneId GMT = ZoneId.of("GMT"); + private static final String STARTUP_TIME = RFC_7231_DATE_TIME.format(ZonedDateTime.now(ZoneId.of("GMT"))); /** * Regular expression to test if a string matches the RFC7234 specification for warning headers. This pattern assumes that the warn code @@ -339,7 +338,9 @@ public Void run() { * @return a warning value formatted according to RFC 7234 */ public static String formatWarning(final String s) { - return String.format(Locale.ROOT, WARNING_FORMAT, escapeAndEncode(s), RFC_7231_DATE_TIME.format(ZonedDateTime.now(GMT))); + return WARNING_PREFIX + " " + + "\"" + escapeAndEncode(s) + "\"" + " " + + "\"" + STARTUP_TIME + "\""; } /** @@ -359,7 +360,31 @@ public static String escapeAndEncode(final String s) { * @return the escaped string */ static String escapeBackslashesAndQuotes(final String s) { - return s.replaceAll("([\"\\\\])", "\\\\$1"); + /* + * We want a fast path check to avoid creating the string builder and copying characters if needed. So we walk the string looking + * for either of the characters that we need to escape. If we find a character that needs escaping, we start over and + */ + boolean escapingNeeded = false; + for (int i = 0; i < s.length(); i++) { + final char c = s.charAt(i); + if (c == '\\' || c == '"') { + escapingNeeded = true; + break; + } + } + + if (escapingNeeded) { + final StringBuilder sb = new StringBuilder(); + for (final char c : s.toCharArray()) { + if (c == '\\' || c == '"') { + sb.append("\\"); + } + sb.append(c); + } + return sb.toString(); + } else { + return s; + } } private static BitSet doesNotNeedEncoding; @@ -384,7 +409,7 @@ static String escapeBackslashesAndQuotes(final String s) { for (int i = 0x80; i <= 0xFF; i++) { doesNotNeedEncoding.set(i); } - assert !doesNotNeedEncoding.get('%'); + assert doesNotNeedEncoding.get('%') == false : doesNotNeedEncoding; } private static final Charset UTF_8 = Charset.forName("UTF-8"); @@ -396,8 +421,21 @@ static String escapeBackslashesAndQuotes(final String s) { * @return the encoded string */ static String encode(final String s) { - final StringBuilder sb = new StringBuilder(s.length()); + // first check if the string needs any encoding; this is the fast path and we want to avoid creating a string builder and copying boolean encodingNeeded = false; + for (int i = 0; i < s.length(); i++) { + int current = s.charAt(i); + if (doesNotNeedEncoding.get(current) == false) { + encodingNeeded = true; + break; + } + } + + if (encodingNeeded == false) { + return s; + } + + final StringBuilder sb = new StringBuilder(s.length()); for (int i = 0; i < s.length();) { int current = s.charAt(i); /* @@ -420,10 +458,9 @@ static String encode(final String s) { for (int j = 0; j < bytes.length; j++) { sb.append('%').append(hex(bytes[j] >> 4)).append(hex(bytes[j])); } - encodingNeeded = true; } } - return encodingNeeded ? sb.toString() : s; + return sb.toString(); } private static char hex(int b) { From cd412893965e244cc708dae25f8d5bf44baac723 Mon Sep 17 00:00:00 2001 From: Tim Brooks Date: Fri, 18 Jan 2019 14:48:20 -0700 Subject: [PATCH 64/71] Add local session timeouts to leader node (#37438) This is related to #35975. This commit adds timeout functionality to the local session on a leader node. When a session is started, a timeout is scheduled using a repeatable runnable. If the session is not accessed in between two runs the session is closed. When the sssion is closed, the repeating task is cancelled. Additionally, this commit moves session uuid generation to the leader cluster. And renames the PutCcrRestoreSessionRequest to StartCcrRestoreSessionRequest to reflect that change. --- .../java/org/elasticsearch/xpack/ccr/Ccr.java | 4 +- .../elasticsearch/xpack/ccr/CcrSettings.java | 19 +++ .../repository/CcrRestoreSourceService.java | 84 +++++++---- .../CcrRestoreSourceServiceTests.java | 133 ++++++++++++------ 4 files changed, 168 insertions(+), 72 deletions(-) diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/Ccr.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/Ccr.java index 6ff0460d51bc2..4dd167a6568ab 100644 --- a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/Ccr.java +++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/Ccr.java @@ -161,10 +161,10 @@ public Collection createComponents( return emptyList(); } - CcrRestoreSourceService restoreSourceService = new CcrRestoreSourceService(); - this.restoreSourceService.set(restoreSourceService); CcrSettings ccrSettings = new CcrSettings(settings, clusterService.getClusterSettings()); this.ccrSettings.set(ccrSettings); + CcrRestoreSourceService restoreSourceService = new CcrRestoreSourceService(threadPool, ccrSettings); + this.restoreSourceService.set(restoreSourceService); return Arrays.asList( ccrLicenseChecker, restoreSourceService, diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/CcrSettings.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/CcrSettings.java index fe0eb7853e3ce..26089ab46952d 100644 --- a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/CcrSettings.java +++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/CcrSettings.java @@ -43,6 +43,14 @@ public final class CcrSettings { Setting.byteSizeSetting("ccr.indices.recovery.max_bytes_per_sec", new ByteSizeValue(40, ByteSizeUnit.MB), Setting.Property.Dynamic, Setting.Property.NodeScope); + /** + * The leader must open resources for a ccr recovery. If there is no activity for this interval of time, + * the leader will close the restore session. + */ + public static final Setting INDICES_RECOVERY_ACTIVITY_TIMEOUT_SETTING = + Setting.timeSetting("ccr.indices.recovery.recovery_activity_timeout", TimeValue.timeValueSeconds(60), + Setting.Property.Dynamic, Setting.Property.NodeScope); + /** * The settings defined by CCR. * @@ -53,22 +61,33 @@ static List> getSettings() { XPackSettings.CCR_ENABLED_SETTING, CCR_FOLLOWING_INDEX_SETTING, RECOVERY_MAX_BYTES_PER_SECOND, + INDICES_RECOVERY_ACTIVITY_TIMEOUT_SETTING, CCR_AUTO_FOLLOW_WAIT_FOR_METADATA_TIMEOUT); } private final CombinedRateLimiter ccrRateLimiter; + private volatile TimeValue recoveryActivityTimeout; public CcrSettings(Settings settings, ClusterSettings clusterSettings) { + this.recoveryActivityTimeout = INDICES_RECOVERY_ACTIVITY_TIMEOUT_SETTING.get(settings); this.ccrRateLimiter = new CombinedRateLimiter(RECOVERY_MAX_BYTES_PER_SECOND.get(settings)); clusterSettings.addSettingsUpdateConsumer(RECOVERY_MAX_BYTES_PER_SECOND, this::setMaxBytesPerSec); + clusterSettings.addSettingsUpdateConsumer(INDICES_RECOVERY_ACTIVITY_TIMEOUT_SETTING, this::setRecoveryActivityTimeout); } private void setMaxBytesPerSec(ByteSizeValue maxBytesPerSec) { ccrRateLimiter.setMBPerSec(maxBytesPerSec); } + private void setRecoveryActivityTimeout(TimeValue recoveryActivityTimeout) { + this.recoveryActivityTimeout = recoveryActivityTimeout; + } + public CombinedRateLimiter getRateLimiter() { return ccrRateLimiter; } + public TimeValue getRecoveryActivityTimeout() { + return recoveryActivityTimeout; + } } diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/repository/CcrRestoreSourceService.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/repository/CcrRestoreSourceService.java index 785600dd5f8fc..1c7f9f95adbbe 100644 --- a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/repository/CcrRestoreSourceService.java +++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/repository/CcrRestoreSourceService.java @@ -17,6 +17,7 @@ import org.elasticsearch.common.component.AbstractLifecycleComponent; import org.elasticsearch.common.lease.Releasable; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.concurrent.AbstractRefCounted; import org.elasticsearch.common.util.concurrent.ConcurrentCollections; import org.elasticsearch.common.util.concurrent.KeyedLock; @@ -28,6 +29,9 @@ import org.elasticsearch.index.shard.IndexShardState; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.index.store.Store; +import org.elasticsearch.threadpool.Scheduler; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.xpack.ccr.CcrSettings; import java.io.Closeable; import java.io.IOException; @@ -45,8 +49,14 @@ public class CcrRestoreSourceService extends AbstractLifecycleComponent implemen private final Map onGoingRestores = ConcurrentCollections.newConcurrentMap(); private final Map> sessionsForShard = new HashMap<>(); - private final CopyOnWriteArrayList> openSessionListeners = new CopyOnWriteArrayList<>(); private final CopyOnWriteArrayList> closeSessionListeners = new CopyOnWriteArrayList<>(); + private final ThreadPool threadPool; + private final CcrSettings ccrSettings; + + public CcrRestoreSourceService(ThreadPool threadPool, CcrSettings ccrSettings) { + this.threadPool = threadPool; + this.ccrSettings = ccrSettings; + } @Override public synchronized void afterIndexShardClosed(ShardId shardId, @Nullable IndexShard indexShard, Settings indexSettings) { @@ -81,26 +91,10 @@ protected synchronized void doClose() throws IOException { // TODO: The listeners are for testing. Once end-to-end file restore is implemented and can be tested, // these should be removed. - public void addOpenSessionListener(Consumer listener) { - openSessionListeners.add(listener); - } - public void addCloseSessionListener(Consumer listener) { closeSessionListeners.add(listener); } - // default visibility for testing - synchronized HashSet getSessionsForShard(IndexShard indexShard) { - return sessionsForShard.get(indexShard); - } - - // default visibility for testing - synchronized RestoreSession getOngoingRestore(String sessionUUID) { - return onGoingRestores.get(sessionUUID); - } - - // TODO: Add a local timeout for the session. This timeout might might be for the entire session to be - // complete. Or it could be for session to have been touched. public synchronized Store.MetadataSnapshot openSession(String sessionUUID, IndexShard indexShard) throws IOException { boolean success = false; RestoreSession restore = null; @@ -113,9 +107,8 @@ public synchronized Store.MetadataSnapshot openSession(String sessionUUID, Index if (indexShard.state() == IndexShardState.CLOSED) { throw new IndexShardClosedException(indexShard.shardId(), "cannot open ccr restore session if shard closed"); } - restore = new RestoreSession(sessionUUID, indexShard, indexShard.acquireSafeIndexCommit()); + restore = new RestoreSession(sessionUUID, indexShard, indexShard.acquireSafeIndexCommit(), scheduleTimeout(sessionUUID)); onGoingRestores.put(sessionUUID, restore); - openSessionListeners.forEach(c -> c.accept(sessionUUID)); HashSet sessions = sessionsForShard.computeIfAbsent(indexShard, (s) -> new HashSet<>()); sessions.add(sessionUUID); } @@ -133,34 +126,60 @@ public synchronized Store.MetadataSnapshot openSession(String sessionUUID, Index } public void closeSession(String sessionUUID) { + internalCloseSession(sessionUUID, true); + } + + public synchronized SessionReader getSessionReader(String sessionUUID) { + RestoreSession restore = onGoingRestores.get(sessionUUID); + if (restore == null) { + logger.debug("could not get session [{}] because session not found", sessionUUID); + throw new IllegalArgumentException("session [" + sessionUUID + "] not found"); + } + restore.idle = false; + return new SessionReader(restore); + } + + private void internalCloseSession(String sessionUUID, boolean throwIfSessionMissing) { final RestoreSession restore; synchronized (this) { - closeSessionListeners.forEach(c -> c.accept(sessionUUID)); restore = onGoingRestores.remove(sessionUUID); if (restore == null) { - logger.debug("could not close session [{}] because session not found", sessionUUID); - throw new IllegalArgumentException("session [" + sessionUUID + "] not found"); + if (throwIfSessionMissing) { + logger.debug("could not close session [{}] because session not found", sessionUUID); + throw new IllegalArgumentException("session [" + sessionUUID + "] not found"); + } else { + return; + } } HashSet sessions = sessionsForShard.get(restore.indexShard); assert sessions != null : "No session UUIDs for shard even though one [" + sessionUUID + "] is active in ongoing restores"; if (sessions != null) { boolean removed = sessions.remove(sessionUUID); - assert removed : "No session found for UUID [" + sessionUUID +"]"; + assert removed : "No session found for UUID [" + sessionUUID + "]"; if (sessions.isEmpty()) { sessionsForShard.remove(restore.indexShard); } } } + closeSessionListeners.forEach(c -> c.accept(sessionUUID)); restore.decRef(); + } - public synchronized SessionReader getSessionReader(String sessionUUID) { - RestoreSession restore = onGoingRestores.get(sessionUUID); - if (restore == null) { - logger.debug("could not get session [{}] because session not found", sessionUUID); - throw new IllegalArgumentException("session [" + sessionUUID + "] not found"); + private Scheduler.Cancellable scheduleTimeout(String sessionUUID) { + TimeValue idleTimeout = ccrSettings.getRecoveryActivityTimeout(); + return threadPool.scheduleWithFixedDelay(() -> maybeTimeout(sessionUUID), idleTimeout, ThreadPool.Names.GENERIC); + } + + private void maybeTimeout(String sessionUUID) { + RestoreSession restoreSession = onGoingRestores.get(sessionUUID); + if (restoreSession != null) { + if (restoreSession.idle) { + internalCloseSession(sessionUUID, false); + } else { + restoreSession.idle = true; + } } - return new SessionReader(restore); } private static class RestoreSession extends AbstractRefCounted { @@ -168,14 +187,18 @@ private static class RestoreSession extends AbstractRefCounted { private final String sessionUUID; private final IndexShard indexShard; private final Engine.IndexCommitRef commitRef; + private final Scheduler.Cancellable timeoutTask; private final KeyedLock keyedLock = new KeyedLock<>(); private final Map cachedInputs = new ConcurrentHashMap<>(); + private volatile boolean idle = false; - private RestoreSession(String sessionUUID, IndexShard indexShard, Engine.IndexCommitRef commitRef) { + private RestoreSession(String sessionUUID, IndexShard indexShard, Engine.IndexCommitRef commitRef, + Scheduler.Cancellable timeoutTask) { super("restore-session"); this.sessionUUID = sessionUUID; this.indexShard = indexShard; this.commitRef = commitRef; + this.timeoutTask = timeoutTask; } private Store.MetadataSnapshot getMetaData() throws IOException { @@ -223,6 +246,7 @@ private long readFileBytes(String fileName, BytesReference reference) throws IOE protected void closeInternal() { logger.debug("closing session [{}] for shard [{}]", sessionUUID, indexShard.shardId()); assert keyedLock.hasLockedKeys() == false : "Should not hold any file locks when closing"; + timeoutTask.cancel(); IOUtils.closeWhileHandlingException(cachedInputs.values()); } } diff --git a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/repository/CcrRestoreSourceServiceTests.java b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/repository/CcrRestoreSourceServiceTests.java index c0b7863edf25a..5f352788d9597 100644 --- a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/repository/CcrRestoreSourceServiceTests.java +++ b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/repository/CcrRestoreSourceServiceTests.java @@ -8,28 +8,41 @@ import org.apache.lucene.store.IOContext; import org.apache.lucene.store.IndexInput; +import org.elasticsearch.cluster.coordination.DeterministicTaskQueue; import org.elasticsearch.common.UUIDs; import org.elasticsearch.common.bytes.BytesArray; +import org.elasticsearch.common.settings.ClusterSettings; +import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.util.set.Sets; import org.elasticsearch.index.engine.Engine; import org.elasticsearch.index.shard.IllegalIndexShardStateException; import org.elasticsearch.index.shard.IndexShard; import org.elasticsearch.index.shard.IndexShardTestCase; import org.elasticsearch.index.store.StoreFileMetaData; +import org.elasticsearch.xpack.ccr.CcrSettings; import org.junit.Before; import java.io.IOException; import java.util.ArrayList; -import java.util.HashSet; +import java.util.Set; + +import static org.elasticsearch.node.Node.NODE_NAME_SETTING; public class CcrRestoreSourceServiceTests extends IndexShardTestCase { private CcrRestoreSourceService restoreSourceService; + private DeterministicTaskQueue taskQueue; @Before public void setUp() throws Exception { super.setUp(); - restoreSourceService = new CcrRestoreSourceService(); + Settings settings = Settings.builder().put(NODE_NAME_SETTING.getKey(), "node").build(); + taskQueue = new DeterministicTaskQueue(settings, random()); + Set> registeredSettings = Sets.newHashSet(CcrSettings.INDICES_RECOVERY_ACTIVITY_TIMEOUT_SETTING, + CcrSettings.RECOVERY_MAX_BYTES_PER_SECOND); + ClusterSettings clusterSettings = new ClusterSettings(Settings.EMPTY, registeredSettings); + restoreSourceService = new CcrRestoreSourceService(taskQueue.getThreadPool(), new CcrSettings(Settings.EMPTY, clusterSettings)); } public void testOpenSession() throws IOException { @@ -39,22 +52,21 @@ public void testOpenSession() throws IOException { final String sessionUUID2 = UUIDs.randomBase64UUID(); final String sessionUUID3 = UUIDs.randomBase64UUID(); - assertNull(restoreSourceService.getSessionsForShard(indexShard1)); + restoreSourceService.openSession(sessionUUID1, indexShard1); + restoreSourceService.openSession(sessionUUID2, indexShard1); + + try (CcrRestoreSourceService.SessionReader reader1 = restoreSourceService.getSessionReader(sessionUUID1); + CcrRestoreSourceService.SessionReader reader2 = restoreSourceService.getSessionReader(sessionUUID2)) { + // Would throw exception if missing + } - assertNotNull(restoreSourceService.openSession(sessionUUID1, indexShard1)); - HashSet sessionsForShard = restoreSourceService.getSessionsForShard(indexShard1); - assertEquals(1, sessionsForShard.size()); - assertTrue(sessionsForShard.contains(sessionUUID1)); - assertNotNull(restoreSourceService.openSession(sessionUUID2, indexShard1)); - sessionsForShard = restoreSourceService.getSessionsForShard(indexShard1); - assertEquals(2, sessionsForShard.size()); - assertTrue(sessionsForShard.contains(sessionUUID2)); + restoreSourceService.openSession(sessionUUID3, indexShard2); - assertNull(restoreSourceService.getSessionsForShard(indexShard2)); - assertNotNull(restoreSourceService.openSession(sessionUUID3, indexShard2)); - sessionsForShard = restoreSourceService.getSessionsForShard(indexShard2); - assertEquals(1, sessionsForShard.size()); - assertTrue(sessionsForShard.contains(sessionUUID3)); + try (CcrRestoreSourceService.SessionReader reader1 = restoreSourceService.getSessionReader(sessionUUID1); + CcrRestoreSourceService.SessionReader reader2 = restoreSourceService.getSessionReader(sessionUUID2); + CcrRestoreSourceService.SessionReader reader3 = restoreSourceService.getSessionReader(sessionUUID3)) { + // Would throw exception if missing + } restoreSourceService.closeSession(sessionUUID1); restoreSourceService.closeSession(sessionUUID2); @@ -68,7 +80,6 @@ public void testCannotOpenSessionForClosedShard() throws IOException { closeShards(indexShard); String sessionUUID = UUIDs.randomBase64UUID(); expectThrows(IllegalIndexShardStateException.class, () -> restoreSourceService.openSession(sessionUUID, indexShard)); - assertNull(restoreSourceService.getOngoingRestore(sessionUUID)); } public void testCloseSession() throws IOException { @@ -82,25 +93,26 @@ public void testCloseSession() throws IOException { restoreSourceService.openSession(sessionUUID2, indexShard1); restoreSourceService.openSession(sessionUUID3, indexShard2); - assertEquals(2, restoreSourceService.getSessionsForShard(indexShard1).size()); - assertEquals(1, restoreSourceService.getSessionsForShard(indexShard2).size()); - assertNotNull(restoreSourceService.getOngoingRestore(sessionUUID1)); - assertNotNull(restoreSourceService.getOngoingRestore(sessionUUID2)); - assertNotNull(restoreSourceService.getOngoingRestore(sessionUUID3)); + try (CcrRestoreSourceService.SessionReader reader1 = restoreSourceService.getSessionReader(sessionUUID1); + CcrRestoreSourceService.SessionReader reader2 = restoreSourceService.getSessionReader(sessionUUID2); + CcrRestoreSourceService.SessionReader reader3 = restoreSourceService.getSessionReader(sessionUUID3)) { + // Would throw exception if missing + } + + assertTrue(taskQueue.hasDeferredTasks()); restoreSourceService.closeSession(sessionUUID1); - assertEquals(1, restoreSourceService.getSessionsForShard(indexShard1).size()); - assertNull(restoreSourceService.getOngoingRestore(sessionUUID1)); - assertFalse(restoreSourceService.getSessionsForShard(indexShard1).contains(sessionUUID1)); - assertTrue(restoreSourceService.getSessionsForShard(indexShard1).contains(sessionUUID2)); + expectThrows(IllegalArgumentException.class, () -> restoreSourceService.getSessionReader(sessionUUID1)); restoreSourceService.closeSession(sessionUUID2); - assertNull(restoreSourceService.getSessionsForShard(indexShard1)); - assertNull(restoreSourceService.getOngoingRestore(sessionUUID2)); + expectThrows(IllegalArgumentException.class, () -> restoreSourceService.getSessionReader(sessionUUID2)); restoreSourceService.closeSession(sessionUUID3); - assertNull(restoreSourceService.getSessionsForShard(indexShard2)); - assertNull(restoreSourceService.getOngoingRestore(sessionUUID3)); + expectThrows(IllegalArgumentException.class, () -> restoreSourceService.getSessionReader(sessionUUID3)); + + taskQueue.runAllTasks(); + // The tasks will not be rescheduled as the sessions are closed. + assertFalse(taskQueue.hasDeferredTasks()); closeShards(indexShard1, indexShard2); } @@ -116,14 +128,20 @@ public void testCloseShardListenerFunctionality() throws IOException { restoreSourceService.openSession(sessionUUID2, indexShard1); restoreSourceService.openSession(sessionUUID3, indexShard2); - assertEquals(2, restoreSourceService.getSessionsForShard(indexShard1).size()); - assertEquals(1, restoreSourceService.getSessionsForShard(indexShard2).size()); + try (CcrRestoreSourceService.SessionReader reader1 = restoreSourceService.getSessionReader(sessionUUID1); + CcrRestoreSourceService.SessionReader reader2 = restoreSourceService.getSessionReader(sessionUUID2); + CcrRestoreSourceService.SessionReader reader3 = restoreSourceService.getSessionReader(sessionUUID3)) { + // Would throw exception if missing + } restoreSourceService.afterIndexShardClosed(indexShard1.shardId(), indexShard1, Settings.EMPTY); - assertNull(restoreSourceService.getSessionsForShard(indexShard1)); - assertNull(restoreSourceService.getOngoingRestore(sessionUUID1)); - assertNull(restoreSourceService.getOngoingRestore(sessionUUID2)); + expectThrows(IllegalArgumentException.class, () -> restoreSourceService.getSessionReader(sessionUUID1)); + expectThrows(IllegalArgumentException.class, () -> restoreSourceService.getSessionReader(sessionUUID2)); + + try (CcrRestoreSourceService.SessionReader reader3 = restoreSourceService.getSessionReader(sessionUUID3)) { + // Would throw exception if missing + } restoreSourceService.closeSession(sessionUUID3); closeShards(indexShard1, indexShard2); @@ -167,24 +185,59 @@ public void testGetSessionDoesNotLeakFileIfClosed() throws IOException { indexDoc(indexShard, "_doc", Integer.toString(i)); flushShard(indexShard, true); } - final String sessionUUID1 = UUIDs.randomBase64UUID(); + final String sessionUUID = UUIDs.randomBase64UUID(); - restoreSourceService.openSession(sessionUUID1, indexShard); + restoreSourceService.openSession(sessionUUID, indexShard); ArrayList files = new ArrayList<>(); indexShard.snapshotStoreMetadata().forEach(files::add); - try (CcrRestoreSourceService.SessionReader sessionReader = restoreSourceService.getSessionReader(sessionUUID1)) { + try (CcrRestoreSourceService.SessionReader sessionReader = restoreSourceService.getSessionReader(sessionUUID)) { sessionReader.readFileBytes(files.get(0).name(), new BytesArray(new byte[10])); } // Request a second file to ensure that original file is not leaked - try (CcrRestoreSourceService.SessionReader sessionReader = restoreSourceService.getSessionReader(sessionUUID1)) { + try (CcrRestoreSourceService.SessionReader sessionReader = restoreSourceService.getSessionReader(sessionUUID)) { sessionReader.readFileBytes(files.get(1).name(), new BytesArray(new byte[10])); } - restoreSourceService.closeSession(sessionUUID1); + restoreSourceService.closeSession(sessionUUID); closeShards(indexShard); // Exception will be thrown if file is not closed. } + + public void testSessionCanTimeout() throws Exception { + IndexShard indexShard = newStartedShard(true); + + final String sessionUUID = UUIDs.randomBase64UUID(); + + restoreSourceService.openSession(sessionUUID, indexShard); + + // Session starts as not idle. First task will mark it as idle + assertTrue(taskQueue.hasDeferredTasks()); + taskQueue.advanceTime(); + taskQueue.runAllRunnableTasks(); + // Task is still scheduled + assertTrue(taskQueue.hasDeferredTasks()); + + // Accessing session marks it as not-idle + try (CcrRestoreSourceService.SessionReader reader = restoreSourceService.getSessionReader(sessionUUID)) { + // Check session exists + } + + assertTrue(taskQueue.hasDeferredTasks()); + taskQueue.advanceTime(); + taskQueue.runAllRunnableTasks(); + // Task is still scheduled + assertTrue(taskQueue.hasDeferredTasks()); + + taskQueue.advanceTime(); + taskQueue.runAllRunnableTasks(); + // Task is cancelled when the session times out + assertFalse(taskQueue.hasDeferredTasks()); + + expectThrows(IllegalArgumentException.class, () -> restoreSourceService.getSessionReader(sessionUUID)); + + closeShards(indexShard); + } } From fe753ee1d23aa104ae37907a064eb2a6dfa5860b Mon Sep 17 00:00:00 2001 From: Tim Brooks Date: Fri, 18 Jan 2019 16:31:21 -0700 Subject: [PATCH 65/71] Do not add index event listener if CCR disabled (#37432) Currently we add the CcrRestoreSourceService as a index event listener. However, if ccr is disabled, this service is null and we attempt to add a null listener throwing an exception. This commit only adds the listener if ccr is enabled. --- .../java/org/elasticsearch/xpack/ccr/Ccr.java | 4 +- .../xpack/ccr/CcrDisabledIT.java | 45 +++++++++++++++++++ 2 files changed, 48 insertions(+), 1 deletion(-) create mode 100644 x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/CcrDisabledIT.java diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/Ccr.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/Ccr.java index 4dd167a6568ab..ab0f995803762 100644 --- a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/Ccr.java +++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/Ccr.java @@ -306,7 +306,9 @@ public Map getInternalRepositories(Environment env, @Override public void onIndexModule(IndexModule indexModule) { - indexModule.addIndexEventListener(this.restoreSourceService.get()); + if (enabled) { + indexModule.addIndexEventListener(this.restoreSourceService.get()); + } } protected XPackLicenseState getLicenseState() { return XPackPlugin.getSharedLicenseState(); } diff --git a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/CcrDisabledIT.java b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/CcrDisabledIT.java new file mode 100644 index 0000000000000..92e0ea06a30e7 --- /dev/null +++ b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/CcrDisabledIT.java @@ -0,0 +1,45 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.ccr; + +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.test.ESIntegTestCase; +import org.elasticsearch.xpack.core.XPackClientPlugin; +import org.elasticsearch.xpack.core.XPackSettings; + +import java.util.Collection; +import java.util.Collections; + +public class CcrDisabledIT extends ESIntegTestCase { + + public void testClusterCanStartWithCcrInstalledButNotEnabled() throws Exception { + // TODO: Assert that x-pack ccr feature is not enabled once feature functionality has been added + ensureGreen(); + } + + @Override + protected Settings nodeSettings(int nodeOrdinal) { + return Settings.builder().put(super.nodeSettings(nodeOrdinal)).put(XPackSettings.CCR_ENABLED_SETTING.getKey(), true) + .put(XPackSettings.SECURITY_ENABLED.getKey(), false).build(); + } + + @Override + protected Settings transportClientSettings() { + return Settings.builder().put(super.transportClientSettings()).put(XPackSettings.SECURITY_ENABLED.getKey(), false).build(); + } + + @Override + protected Collection> nodePlugins() { + return Collections.singletonList(LocalStateCcr.class); + } + + @Override + protected Collection> transportClientPlugins() { + return Collections.singletonList(XPackClientPlugin.class); + } +} From c03308a071a4e7b6f740d93719eeba959dc1f0d7 Mon Sep 17 00:00:00 2001 From: Michael Basnight Date: Fri, 18 Jan 2019 17:50:51 -0600 Subject: [PATCH 66/71] Update get users to allow unknown fields (#37593) The subparser in get users allows for unknown fields. This commit sets the value to true for the parser and modifies the test such that it accurately tests it. Relates #36938 --- .../client/security/GetUsersResponse.java | 2 +- .../security/GetUsersResponseTests.java | 109 ++++++++++++------ 2 files changed, 75 insertions(+), 36 deletions(-) diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/security/GetUsersResponse.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/security/GetUsersResponse.java index 107b93afe7ce4..39d4a25a64207 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/security/GetUsersResponse.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/security/GetUsersResponse.java @@ -95,7 +95,7 @@ public int hashCode() { public static final ParseField ENABLED = new ParseField("enabled"); @SuppressWarnings("unchecked") - public static final ConstructingObjectParser USER_PARSER = new ConstructingObjectParser<>("user_info", + public static final ConstructingObjectParser USER_PARSER = new ConstructingObjectParser<>("user_info", true, (constructorObjects) -> { int i = 0; final String username = (String) constructorObjects[i++]; diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/security/GetUsersResponseTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/security/GetUsersResponseTests.java index 3025241bb3909..69db584287ceb 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/security/GetUsersResponseTests.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/security/GetUsersResponseTests.java @@ -19,58 +19,97 @@ package org.elasticsearch.client.security; import org.elasticsearch.client.security.user.User; -import org.elasticsearch.common.xcontent.DeprecationHandler; -import org.elasticsearch.common.xcontent.NamedXContentRegistry; +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentType; +import org.elasticsearch.common.xcontent.json.JsonXContent; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.EqualsHashCodeTestUtils; +import org.elasticsearch.test.XContentTestUtils; import java.io.IOException; +import java.util.ArrayList; import java.util.Arrays; -import java.util.Collections; import java.util.HashMap; import java.util.HashSet; +import java.util.List; import java.util.Map; import java.util.Set; +import java.util.function.Predicate; import java.util.stream.Collectors; -import static org.hamcrest.Matchers.equalTo; +import static org.elasticsearch.test.AbstractXContentTestCase.xContentTester; /** tests the Response for getting users from the security HLRC */ public class GetUsersResponseTests extends ESTestCase { + public void testFromXContent() throws IOException { - String json = - "{\n" + - " \"jacknich\": {\n" + - " \"username\": \"jacknich\",\n" + - " \"roles\": [\n" + - " \"admin\", \"other_role1\"\n" + - " ],\n" + - " \"full_name\": \"Jack Nicholson\",\n" + - " \"email\": \"jacknich@example.com\",\n" + - " \"metadata\": { \"intelligence\" : 7 },\n" + - " \"enabled\": true\n" + - " }\n" + - "}"; - final GetUsersResponse response = GetUsersResponse.fromXContent((XContentType.JSON.xContent().createParser( - new NamedXContentRegistry(Collections.emptyList()), new DeprecationHandler() { - @Override - public void usedDeprecatedName(String usedName, String modernName) { - } + xContentTester(this::createParser, + GetUsersResponseTests::createTestInstance, + this::toXContent, + GetUsersResponse::fromXContent) + .supportsUnknownFields(false) + .assertToXContentEquivalence(false) + .test(); + } + + private XContentBuilder toXContentUser(User user, boolean enabled, XContentBuilder builder) throws IOException { + XContentBuilder tempBuilder = JsonXContent.contentBuilder(); + tempBuilder.startObject(); + tempBuilder.field("username", user.getUsername()); + tempBuilder.array("roles", user.getRoles().toArray()); + tempBuilder.field("full_name", user.getFullName()); + tempBuilder.field("email", user.getEmail()); + tempBuilder.field("metadata", user.getMetadata()); + tempBuilder.field("enabled", enabled); + tempBuilder.endObject(); + + // This sub object should support unknown fields, but metadata cannot contain complex extra objects or it will fail + Predicate excludeFilter = path -> path.equals("metadata"); + BytesReference newBytes = XContentTestUtils.insertRandomFields(XContentType.JSON, BytesReference.bytes(tempBuilder), + excludeFilter, random()); + builder.rawValue(newBytes.streamInput(), XContentType.JSON); + return builder; + } + + private XContentBuilder toXContent(GetUsersResponse response, XContentBuilder builder) throws IOException { + builder.startObject(); + + List disabledUsers = new ArrayList<>(response.getUsers()); + disabledUsers.removeAll(response.getEnabledUsers()); + + for (User user : disabledUsers) { + builder.field(user.getUsername()); + toXContentUser(user, false, builder); + } + for (User user : response.getEnabledUsers()) { + builder.field(user.getUsername()); + toXContentUser(user, true, builder); + } + builder.endObject(); + return builder; + } + + private static GetUsersResponse createTestInstance() { + final Set users = new HashSet<>(); + final Set enabledUsers = new HashSet<>(); + Map metadata = new HashMap<>(); + metadata.put(randomAlphaOfLengthBetween(1, 5), randomInt()); - @Override - public void usedDeprecatedField(String usedName, String replacedWith) { - } - }, json))); - assertThat(response.getUsers().size(), equalTo(1)); - final User user = response.getUsers().iterator().next(); - assertThat(user.getUsername(), equalTo("jacknich")); - assertThat(user.getRoles().size(), equalTo(2)); - assertThat(user.getFullName(), equalTo("Jack Nicholson")); - assertThat(user.getEmail(), equalTo("jacknich@example.com")); - final Map metadata = new HashMap<>(); - metadata.put("intelligence", 7); - assertThat(metadata, equalTo(user.getMetadata())); + final User user1 = new User(randomAlphaOfLength(8), + Arrays.asList(new String[] {randomAlphaOfLength(5), randomAlphaOfLength(5)}), + metadata, randomAlphaOfLength(10), null); + users.add(user1); + enabledUsers.add(user1); + Map metadata2 = new HashMap<>(); + metadata2.put(randomAlphaOfLengthBetween(1, 5), randomInt()); + metadata2.put(randomAlphaOfLengthBetween(1, 5), randomBoolean()); + + final User user2 = new User(randomAlphaOfLength(8), + Arrays.asList(new String[] {randomAlphaOfLength(5), randomAlphaOfLength(5)}), + metadata2, randomAlphaOfLength(10), null); + users.add(user2); + return new GetUsersResponse(users, enabledUsers); } public void testEqualsHashCode() { From b4c18a9eb41f64aa21fa97fff4d92aad37e617a5 Mon Sep 17 00:00:00 2001 From: Julie Tibshirani Date: Fri, 18 Jan 2019 14:43:50 -0800 Subject: [PATCH 67/71] Remove an unused constant in PutMappingRequest. --- .../org/elasticsearch/client/indices/PutMappingRequest.java | 6 ------ 1 file changed, 6 deletions(-) diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/indices/PutMappingRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/indices/PutMappingRequest.java index 4607e7a5589ce..16a885796a9ad 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/indices/PutMappingRequest.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/indices/PutMappingRequest.java @@ -19,7 +19,6 @@ package org.elasticsearch.client.indices; -import com.carrotsearch.hppc.ObjectHashSet; import org.elasticsearch.ElasticsearchGenerationException; import org.elasticsearch.action.IndicesRequest; import org.elasticsearch.action.support.IndicesOptions; @@ -43,11 +42,6 @@ */ public class PutMappingRequest extends TimedRequest implements IndicesRequest, ToXContentObject { - private static ObjectHashSet RESERVED_FIELDS = ObjectHashSet.from( - "_uid", "_id", "_type", "_source", "_all", "_analyzer", "_parent", "_routing", "_index", - "_size", "_timestamp", "_ttl", "_field_names" - ); - private final String[] indices; private IndicesOptions indicesOptions = IndicesOptions.fromOptions(false, false, true, true); From 9b32f57cf1a3103c1e845840119adc7f4b2bc29d Mon Sep 17 00:00:00 2001 From: Michael Basnight Date: Fri, 18 Jan 2019 19:01:35 -0600 Subject: [PATCH 68/71] Update jdk used by the docker builds (#37621) With the release of 11.0.2, the old URLs no longer work. This exposed a few small bugs in the gradle config. One was that --no-cache was not present in the docker build command, so it was not failing at first. Then once only the ext.expansions was changed and the docker build task was not, it was not executing it. --- distribution/docker/build.gradle | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/distribution/docker/build.gradle b/distribution/docker/build.gradle index ec1b6ebb799bf..219d81e1117ec 100644 --- a/distribution/docker/build.gradle +++ b/distribution/docker/build.gradle @@ -19,8 +19,8 @@ dependencies { ext.expansions = { oss -> return [ 'elasticsearch' : oss ? "elasticsearch-oss-${VersionProperties.elasticsearch}.tar.gz" : "elasticsearch-${VersionProperties.elasticsearch}.tar.gz", - 'jdkUrl' : 'https://download.java.net/java/GA/jdk11/13/GPL/openjdk-11.0.1_linux-x64_bin.tar.gz', - 'jdkVersion' : '11.0.1', + 'jdkUrl' : 'https://download.java.net/java/GA/jdk11/9/GPL/openjdk-11.0.2_linux-x64_bin.tar.gz', + 'jdkVersion' : '11.0.2', 'license': oss ? 'Apache-2.0' : 'Elastic License', 'version' : VersionProperties.elasticsearch ] @@ -58,6 +58,7 @@ void addCopyDockerContextTask(final boolean oss) { void addCopyDockerfileTask(final boolean oss) { task(taskName("copy", oss, "Dockerfile"), type: Copy) { + inputs.properties(expansions(oss)) // ensure task is run when ext.expansions is changed mustRunAfter(taskName("copy", oss, "DockerContext")) into files(oss) @@ -82,7 +83,7 @@ void addBuildDockerImage(final boolean oss) { ] } executable 'docker' - final List dockerArgs = ['build', files(oss), '--pull'] + final List dockerArgs = ['build', files(oss), '--pull', '--no-cache'] for (final String tag : tags) { dockerArgs.add('--tag') dockerArgs.add(tag) From fc99eb3e65b7a8fb25517782c760f7a1692bf2cc Mon Sep 17 00:00:00 2001 From: Ryan Ernst Date: Sat, 19 Jan 2019 16:16:58 -0800 Subject: [PATCH 69/71] Add cache cleaning task for ML snapshot (#37505) The ML subproject of xpack has a cache for the cpp artifact snapshots which is checked on each build. The cache is outside of the build dir so that it is not wiped on a typical clean, as the artifacts can be large and do not change often. This commit adds a cleanCache task which will wipe the cache dir, as over time the size of the directory can become bloated. --- x-pack/plugin/ml/cpp-snapshot/build.gradle | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/x-pack/plugin/ml/cpp-snapshot/build.gradle b/x-pack/plugin/ml/cpp-snapshot/build.gradle index e47566cc82cea..e5b55293159aa 100644 --- a/x-pack/plugin/ml/cpp-snapshot/build.gradle +++ b/x-pack/plugin/ml/cpp-snapshot/build.gradle @@ -46,6 +46,10 @@ task downloadMachineLearningSnapshot { } } +task cleanCache(type: Delete) { + delete "${projectDir}/.cache" +} + artifacts { 'default' file: snapshotZip, name: 'ml-cpp', type: 'zip', builtBy: downloadMachineLearningSnapshot } From 5308746270292775ddb14c377e96871b0e8e256e Mon Sep 17 00:00:00 2001 From: Albert Zaharovits Date: Sun, 20 Jan 2019 12:51:24 +0200 Subject: [PATCH 70/71] Remove Watcher Account "unsecure" settings (#36736) Removes all sensitive settings (passwords, auth tokens, urls, etc...) for watcher notifications accounts. These settings were deprecated (and herein removed) in favor of their secure sibling that is set inside the elasticsearch keystore. For example: `xpack.notification.email.account..smtp.password` is no longer a valid setting, and it is replaced by `xpack.notification.email.account..smtp.secure_password` --- .../migration/migrate_7_0/settings.asciidoc | 23 ++++++++ .../settings/notification-settings.asciidoc | 20 +++---- .../watcher/notification/email/Account.java | 20 +++---- .../notification/email/EmailService.java | 7 +-- .../notification/hipchat/HipChatAccount.java | 17 ++---- .../notification/hipchat/HipChatService.java | 8 +-- .../notification/jira/JiraAccount.java | 40 +++++-------- .../notification/jira/JiraService.java | 17 +----- .../pagerduty/PagerDutyAccount.java | 25 +++----- .../pagerduty/PagerDutyService.java | 9 +-- .../notification/slack/SlackAccount.java | 28 ++++----- .../notification/slack/SlackService.java | 9 +-- .../actions/email/EmailAttachmentTests.java | 5 +- .../actions/email/EmailMessageIdTests.java | 5 +- .../jira/ExecutableJiraActionTests.java | 19 ++++--- .../watcher/actions/jira/JiraActionTests.java | 10 +++- .../slack/ExecutableSlackActionTests.java | 7 ++- .../watcher/history/HistoryStoreTests.java | 7 ++- .../HistoryTemplateEmailMappingsTests.java | 6 +- .../notification/email/AccountTests.java | 38 +++---------- .../hipchat/HipChatAccountsTests.java | 5 +- .../hipchat/HipChatServiceTests.java | 22 +++++-- .../hipchat/IntegrationAccountTests.java | 21 +++++-- .../hipchat/UserAccountTests.java | 23 ++++++-- .../notification/hipchat/V1AccountTests.java | 12 +++- .../notification/jira/JiraAccountTests.java | 57 ++++++++++++------- .../pagerduty/PagerDutyAccountsTests.java | 7 ++- .../build.gradle | 2 +- 28 files changed, 243 insertions(+), 226 deletions(-) diff --git a/docs/reference/migration/migrate_7_0/settings.asciidoc b/docs/reference/migration/migrate_7_0/settings.asciidoc index 9a271c65271a3..b5ae85eb7dff6 100644 --- a/docs/reference/migration/migrate_7_0/settings.asciidoc +++ b/docs/reference/migration/migrate_7_0/settings.asciidoc @@ -131,3 +131,26 @@ The removal of these default settings also removes the ability for a component t fallback to a default configuration when using TLS. Each component (realm, transport, http, http client, etc) must now be configured with their own settings for TLS if it is being used. + +[float] +[[watcher-notifications-account-settings]] +==== Watcher notifications account settings + +The following settings have been removed in favor of the secure variants. +The <> have to be defined inside each cluster +node's keystore, i.e., they are not to be specified via the cluster settings API. + +- `xpack.notification.email.account..smtp.password`, instead use +`xpack.notification.email.account..smtp.secure_password` +- `xpack.notification.hipchat.account..auth_token`, instead use +`xpack.notification.hipchat.account..secure_auth_token` +- `xpack.notification.jira.account..url`, instead use +`xpack.notification.jira.account..secure_url` +- `xpack.notification.jira.account..user`, instead use +`xpack.notification.jira.account..secure_user` +- `xpack.notification.jira.account..password`, instead use +`xpack.notification.jira.account..secure_password` +- `xpack.notification.pagerduty.account..service_api_key`, instead use +`xpack.notification.pagerduty.account..secure_service_api_key` +- `xpack.notification.slack.account..url`, instead use +`xpack.notification.slack.account..secure_url` diff --git a/docs/reference/settings/notification-settings.asciidoc b/docs/reference/settings/notification-settings.asciidoc index e098f22716876..2f14dd276b849 100644 --- a/docs/reference/settings/notification-settings.asciidoc +++ b/docs/reference/settings/notification-settings.asciidoc @@ -115,7 +115,7 @@ can specify the following email account attributes: `smtp.user` (<>);; The user name for SMTP. Required. - `smtp.password` (<>);; + `smtp.secure_password` (<>);; The password for the specified SMTP user. `smtp.starttls.enable` (<>);; @@ -222,9 +222,8 @@ via HipChat. You can specify the following HipChat account attributes: The HipChat account profile to use: `integration`, `user`, or `v1`. Required. - `auth_token`;; - The authentication token to use to access - the HipChat API. Required. + `secure_auth_token` (<>);; + The authentication token to use to access the HipChat API. Required. `host`;; The HipChat server hostname. Defaults to `api.hipchat.com`. @@ -268,9 +267,8 @@ via Slack. You can specify the following Slack account attributes: [[slack-account-attributes]] - `url`;; - The Incoming Webhook URL to use to post - messages to Slack. Required. + `secure_url` (<>);; + The Incoming Webhook URL to use to post messages to Slack. Required. `message_defaults.from`;; The sender name to display in the @@ -309,13 +307,13 @@ issues in Jira. You can specify the following Jira account attributes: [[jira-account-attributes]] - `url`;; + `secure_url` (<>);; The URL of the Jira Software server. Required. - `user`;; + `secure_user` (<>);; The name of the user to connect to the Jira Software server. Required. - `password`;; + `secure_password` (<>);; The password of the user to connect to the Jira Software server. Required. `issue_defaults`;; @@ -341,7 +339,7 @@ via PagerDuty. You can specify the following PagerDuty account attributes: A name for the PagerDuty account associated with the API key you are using to access PagerDuty. Required. - `service_api_key`;; + `secure_service_api_key` (<>);; The https://developer.pagerduty.com/documentation/rest/authentication[ PagerDuty API key] to use to access PagerDuty. Required. diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/notification/email/Account.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/notification/email/Account.java index 02c0e1167dd95..b6a6e259ecc74 100644 --- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/notification/email/Account.java +++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/notification/email/Account.java @@ -32,8 +32,7 @@ public class Account { static final String SMTP_PROTOCOL = "smtp"; - private static final String SMTP_PASSWORD = "password"; - private static final Setting SECURE_PASSWORD_SETTING = SecureSetting.secureString("secure_" + SMTP_PASSWORD, null); + public static final Setting SECURE_PASSWORD_SETTING = SecureSetting.secureString("secure_password", null); static { SecurityManager sm = System.getSecurityManager(); @@ -213,7 +212,7 @@ static class Smtp { port = settings.getAsInt("port", settings.getAsInt("localport", settings.getAsInt("local_port", 25))); user = settings.get("user", settings.get("from", null)); - password = getSecureSetting(SMTP_PASSWORD, settings, SECURE_PASSWORD_SETTING); + password = getSecureSetting(settings, SECURE_PASSWORD_SETTING); //password = passStr != null ? passStr.toCharArray() : null; properties = loadSmtpProperties(settings); } @@ -225,17 +224,12 @@ static class Smtp { * Note: if your setting was not previously secure, than the string reference that is in the setting object is still * insecure. This is only constructing a new SecureString with the char[] of the insecure setting. */ - private static SecureString getSecureSetting(String settingName, Settings settings, Setting secureSetting) { - String value = settings.get(settingName); - if (value == null) { - SecureString secureString = secureSetting.get(settings); - if (secureString != null && secureString.length() > 0) { - return secureString; - } else { - return null; - } + private static SecureString getSecureSetting(Settings settings, Setting secureSetting) { + SecureString secureString = secureSetting.get(settings); + if (secureString != null && secureString.length() > 0) { + return secureString; } else { - return new SecureString(value.toCharArray()); + return null; } } diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/notification/email/EmailService.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/notification/email/EmailService.java index 0933ba4616280..de7161dcdd1d6 100644 --- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/notification/email/EmailService.java +++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/notification/email/EmailService.java @@ -65,10 +65,6 @@ public class EmailService extends NotificationService { Setting.affixKeySetting("xpack.notification.email.account.", "smtp.user", (key) -> Setting.simpleString(key, Property.Dynamic, Property.NodeScope)); - private static final Setting.AffixSetting SETTING_SMTP_PASSWORD = - Setting.affixKeySetting("xpack.notification.email.account.", "smtp.password", - (key) -> Setting.simpleString(key, Property.Dynamic, Property.NodeScope, Property.Filtered)); - private static final Setting.AffixSetting SETTING_SECURE_PASSWORD = Setting.affixKeySetting("xpack.notification.email.account.", "smtp.secure_password", (key) -> SecureSetting.secureString(key, null)); @@ -122,7 +118,6 @@ public EmailService(Settings settings, @Nullable CryptoService cryptoService, Cl clusterSettings.addAffixUpdateConsumer(SETTING_SMTP_HOST, (s, o) -> {}, (s, o) -> {}); clusterSettings.addAffixUpdateConsumer(SETTING_SMTP_PORT, (s, o) -> {}, (s, o) -> {}); clusterSettings.addAffixUpdateConsumer(SETTING_SMTP_USER, (s, o) -> {}, (s, o) -> {}); - clusterSettings.addAffixUpdateConsumer(SETTING_SMTP_PASSWORD, (s, o) -> {}, (s, o) -> {}); clusterSettings.addAffixUpdateConsumer(SETTING_SMTP_TIMEOUT, (s, o) -> {}, (s, o) -> {}); clusterSettings.addAffixUpdateConsumer(SETTING_SMTP_CONNECTION_TIMEOUT, (s, o) -> {}, (s, o) -> {}); clusterSettings.addAffixUpdateConsumer(SETTING_SMTP_WRITE_TIMEOUT, (s, o) -> {}, (s, o) -> {}); @@ -182,7 +177,7 @@ public Email email() { private static List> getDynamicSettings() { return Arrays.asList(SETTING_DEFAULT_ACCOUNT, SETTING_PROFILE, SETTING_EMAIL_DEFAULTS, SETTING_SMTP_AUTH, SETTING_SMTP_HOST, - SETTING_SMTP_PASSWORD, SETTING_SMTP_PORT, SETTING_SMTP_STARTTLS_ENABLE, SETTING_SMTP_USER, SETTING_SMTP_STARTTLS_REQUIRED, + SETTING_SMTP_PORT, SETTING_SMTP_STARTTLS_ENABLE, SETTING_SMTP_USER, SETTING_SMTP_STARTTLS_REQUIRED, SETTING_SMTP_TIMEOUT, SETTING_SMTP_CONNECTION_TIMEOUT, SETTING_SMTP_WRITE_TIMEOUT, SETTING_SMTP_LOCAL_ADDRESS, SETTING_SMTP_LOCAL_PORT, SETTING_SMTP_SEND_PARTIAL, SETTING_SMTP_WAIT_ON_QUIT, SETTING_SMTP_SSL_TRUST_ADDRESS); } diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/notification/hipchat/HipChatAccount.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/notification/hipchat/HipChatAccount.java index 53f8c1533a193..67aee91f13976 100644 --- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/notification/hipchat/HipChatAccount.java +++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/notification/hipchat/HipChatAccount.java @@ -23,7 +23,6 @@ public abstract class HipChatAccount { - public static final String AUTH_TOKEN_SETTING = "auth_token"; public static final String ROOM_SETTING = HipChatMessage.Field.ROOM.getPreferredName(); public static final String DEFAULT_ROOM_SETTING = "message_defaults." + HipChatMessage.Field.ROOM.getPreferredName(); public static final String DEFAULT_USER_SETTING = "message_defaults." + HipChatMessage.Field.USER.getPreferredName(); @@ -32,7 +31,7 @@ public abstract class HipChatAccount { public static final String DEFAULT_COLOR_SETTING = "message_defaults." + HipChatMessage.Field.COLOR.getPreferredName(); public static final String DEFAULT_NOTIFY_SETTING = "message_defaults." + HipChatMessage.Field.NOTIFY.getPreferredName(); - private static final Setting SECURE_AUTH_TOKEN_SETTING = SecureSetting.secureString("secure_" + AUTH_TOKEN_SETTING, null); + static final Setting SECURE_AUTH_TOKEN_SETTING = SecureSetting.secureString("secure_auth_token", null); protected final Logger logger; protected final String name; @@ -52,16 +51,12 @@ protected HipChatAccount(String name, Profile profile, Settings settings, HipCha } private static String getAuthToken(String name, Settings settings) { - String authToken = settings.get(AUTH_TOKEN_SETTING); - if (authToken == null || authToken.length() == 0) { - SecureString secureString = SECURE_AUTH_TOKEN_SETTING.get(settings); - if (secureString == null || secureString.length() < 1) { - throw new SettingsException("hipchat account [" + name + "] missing required [" + AUTH_TOKEN_SETTING + "] setting"); - } - authToken = secureString.toString(); + SecureString secureString = SECURE_AUTH_TOKEN_SETTING.get(settings); + if (secureString == null || secureString.length() < 1) { + throw new SettingsException( + "hipchat account [" + name + "] missing required [" + SECURE_AUTH_TOKEN_SETTING.getKey() + "] secure setting"); } - - return authToken; + return secureString.toString(); } public abstract String type(); diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/notification/hipchat/HipChatService.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/notification/hipchat/HipChatService.java index 39b1f0cb61709..efa403fd7697e 100644 --- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/notification/hipchat/HipChatService.java +++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/notification/hipchat/HipChatService.java @@ -34,11 +34,6 @@ public class HipChatService extends NotificationService { static final Setting SETTING_DEFAULT_PORT = Setting.intSetting("xpack.notification.hipchat.port", 443, Setting.Property.Dynamic, Setting.Property.NodeScope); - private static final Setting.AffixSetting SETTING_AUTH_TOKEN = - Setting.affixKeySetting("xpack.notification.hipchat.account.", "auth_token", - (key) -> Setting.simpleString(key, Setting.Property.Dynamic, Setting.Property.NodeScope, Setting.Property.Filtered, - Setting.Property.Deprecated)); - private static final Setting.AffixSetting SETTING_AUTH_TOKEN_SECURE = Setting.affixKeySetting("xpack.notification.hipchat.account.", "secure_auth_token", (key) -> SecureSetting.secureString(key, null)); @@ -75,7 +70,6 @@ public HipChatService(Settings settings, HttpClient httpClient, ClusterSettings clusterSettings.addSettingsUpdateConsumer(SETTING_DEFAULT_ACCOUNT, (s) -> {}); clusterSettings.addSettingsUpdateConsumer(SETTING_DEFAULT_HOST, (s) -> {}); clusterSettings.addSettingsUpdateConsumer(SETTING_DEFAULT_PORT, (s) -> {}); - clusterSettings.addAffixUpdateConsumer(SETTING_AUTH_TOKEN, (s, o) -> {}, (s, o) -> {}); clusterSettings.addAffixUpdateConsumer(SETTING_PROFILE, (s, o) -> {}, (s, o) -> {}); clusterSettings.addAffixUpdateConsumer(SETTING_ROOM, (s, o) -> {}, (s, o) -> {}); clusterSettings.addAffixUpdateConsumer(SETTING_HOST, (s, o) -> {}, (s, o) -> {}); @@ -101,7 +95,7 @@ protected HipChatAccount createAccount(String name, Settings accountSettings) { } private static List> getDynamicSettings() { - return Arrays.asList(SETTING_DEFAULT_ACCOUNT, SETTING_AUTH_TOKEN, SETTING_PROFILE, SETTING_ROOM, SETTING_MESSAGE_DEFAULTS, + return Arrays.asList(SETTING_DEFAULT_ACCOUNT, SETTING_PROFILE, SETTING_ROOM, SETTING_MESSAGE_DEFAULTS, SETTING_DEFAULT_HOST, SETTING_DEFAULT_PORT, SETTING_HOST, SETTING_PORT); } diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/notification/jira/JiraAccount.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/notification/jira/JiraAccount.java index 17f8657ec9d88..b539d007eeef8 100644 --- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/notification/jira/JiraAccount.java +++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/notification/jira/JiraAccount.java @@ -6,7 +6,6 @@ package org.elasticsearch.xpack.watcher.notification.jira; import org.elasticsearch.common.Booleans; -import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.settings.SecureSetting; import org.elasticsearch.common.settings.SecureString; @@ -42,15 +41,12 @@ public class JiraAccount { **/ public static final String DEFAULT_PATH = "/rest/api/2/issue"; - static final String USER_SETTING = "user"; - static final String PASSWORD_SETTING = "password"; - static final String URL_SETTING = "url"; static final String ISSUE_DEFAULTS_SETTING = "issue_defaults"; static final String ALLOW_HTTP_SETTING = "allow_http"; - private static final Setting SECURE_USER_SETTING = SecureSetting.secureString("secure_" + USER_SETTING, null); - private static final Setting SECURE_PASSWORD_SETTING = SecureSetting.secureString("secure_" + PASSWORD_SETTING, null); - private static final Setting SECURE_URL_SETTING = SecureSetting.secureString("secure_" + URL_SETTING, null); + public static final Setting SECURE_USER_SETTING = SecureSetting.secureString("secure_user", null); + public static final Setting SECURE_PASSWORD_SETTING = SecureSetting.secureString("secure_password", null); + public static final Setting SECURE_URL_SETTING = SecureSetting.secureString("secure_url", null); private final HttpClient httpClient; private final String name; @@ -62,7 +58,7 @@ public class JiraAccount { public JiraAccount(String name, Settings settings, HttpClient httpClient) { this.httpClient = httpClient; this.name = name; - String url = getSetting(name, URL_SETTING, settings, SECURE_URL_SETTING); + String url = getSetting(name, settings, SECURE_URL_SETTING); try { URI uri = new URI(url); Scheme protocol = Scheme.parse(uri.getScheme()); @@ -71,16 +67,11 @@ public JiraAccount(String name, Settings settings, HttpClient httpClient) { } this.url = uri; } catch (URISyntaxException | IllegalArgumentException e) { - throw new SettingsException("invalid jira [" + name + "] account settings. invalid [" + URL_SETTING + "] setting", e); - } - this.user = getSetting(name, USER_SETTING, settings, SECURE_USER_SETTING); - if (Strings.isEmpty(this.user)) { - throw requiredSettingException(name, USER_SETTING); - } - this.password = getSetting(name, PASSWORD_SETTING, settings, SECURE_PASSWORD_SETTING); - if (Strings.isEmpty(this.password)) { - throw requiredSettingException(name, PASSWORD_SETTING); + throw new SettingsException( + "invalid jira [" + name + "] account settings. invalid [" + SECURE_URL_SETTING.getKey() + "] setting", e); } + this.user = getSetting(name, settings, SECURE_USER_SETTING); + this.password = getSetting(name, settings, SECURE_PASSWORD_SETTING); try (XContentBuilder builder = XContentBuilder.builder(XContentType.JSON.xContent())) { builder.startObject(); settings.getAsSettings(ISSUE_DEFAULTS_SETTING).toXContent(builder, ToXContent.EMPTY_PARAMS); @@ -95,17 +86,12 @@ public JiraAccount(String name, Settings settings, HttpClient httpClient) { } } - private static String getSetting(String accountName, String settingName, Settings settings, Setting secureSetting) { - String value = settings.get(settingName); - if (value == null) { - SecureString secureString = secureSetting.get(settings); - if (secureString == null || secureString.length() < 1) { - throw requiredSettingException(accountName, settingName); - } - value = secureString.toString(); + private static String getSetting(String accountName, Settings settings, Setting secureSetting) { + SecureString secureString = secureSetting.get(settings); + if (secureString == null || secureString.length() < 1) { + throw requiredSettingException(accountName, secureSetting.getKey()); } - - return value; + return secureString.toString(); } public String getName() { diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/notification/jira/JiraService.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/notification/jira/JiraService.java index 425ef0ee44fd1..9989d0145ed50 100644 --- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/notification/jira/JiraService.java +++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/notification/jira/JiraService.java @@ -32,18 +32,6 @@ public class JiraService extends NotificationService { Setting.affixKeySetting("xpack.notification.jira.account.", "allow_http", (key) -> Setting.boolSetting(key, false, Property.Dynamic, Property.NodeScope)); - private static final Setting.AffixSetting SETTING_URL = - Setting.affixKeySetting("xpack.notification.jira.account.", "url", - (key) -> Setting.simpleString(key, Property.Dynamic, Property.NodeScope, Property.Filtered)); - - private static final Setting.AffixSetting SETTING_USER = - Setting.affixKeySetting("xpack.notification.jira.account.", "user", - (key) -> Setting.simpleString(key, Property.Dynamic, Property.NodeScope, Property.Filtered)); - - private static final Setting.AffixSetting SETTING_PASSWORD = - Setting.affixKeySetting("xpack.notification.jira.account.", "password", - (key) -> Setting.simpleString(key, Property.Dynamic, Property.NodeScope, Property.Filtered, Property.Deprecated)); - private static final Setting.AffixSetting SETTING_SECURE_USER = Setting.affixKeySetting("xpack.notification.jira.account.", "secure_user", (key) -> SecureSetting.secureString(key, null)); @@ -68,9 +56,6 @@ public JiraService(Settings settings, HttpClient httpClient, ClusterSettings clu // ensure logging of setting changes clusterSettings.addSettingsUpdateConsumer(SETTING_DEFAULT_ACCOUNT, (s) -> {}); clusterSettings.addAffixUpdateConsumer(SETTING_ALLOW_HTTP, (s, o) -> {}, (s, o) -> {}); - clusterSettings.addAffixUpdateConsumer(SETTING_URL, (s, o) -> {}, (s, o) -> {}); - clusterSettings.addAffixUpdateConsumer(SETTING_USER, (s, o) -> {}, (s, o) -> {}); - clusterSettings.addAffixUpdateConsumer(SETTING_PASSWORD, (s, o) -> {}, (s, o) -> {}); clusterSettings.addAffixUpdateConsumer(SETTING_DEFAULTS, (s, o) -> {}, (s, o) -> {}); // do an initial load reload(settings); @@ -82,7 +67,7 @@ protected JiraAccount createAccount(String name, Settings settings) { } private static List> getDynamicSettings() { - return Arrays.asList(SETTING_DEFAULT_ACCOUNT, SETTING_ALLOW_HTTP, SETTING_URL, SETTING_USER, SETTING_PASSWORD, SETTING_DEFAULTS); + return Arrays.asList(SETTING_DEFAULT_ACCOUNT, SETTING_ALLOW_HTTP, SETTING_DEFAULTS); } private static List> getSecureSettings() { diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/notification/pagerduty/PagerDutyAccount.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/notification/pagerduty/PagerDutyAccount.java index fdc5ca07b84c1..b2a1a1b8b9de2 100644 --- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/notification/pagerduty/PagerDutyAccount.java +++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/notification/pagerduty/PagerDutyAccount.java @@ -19,19 +19,17 @@ public class PagerDutyAccount { - private static final String SERVICE_KEY_SETTING = "service_api_key"; private static final String TRIGGER_DEFAULTS_SETTING = "event_defaults"; - private static final Setting SECURE_SERVICE_API_KEY_SETTING = - SecureSetting.secureString("secure_" + SERVICE_KEY_SETTING, null); + public static final Setting SECURE_SERVICE_API_KEY_SETTING = SecureSetting.secureString("secure_service_api_key", null); private final String name; private final String serviceKey; private final HttpClient httpClient; private final IncidentEventDefaults eventDefaults; - PagerDutyAccount(String name, Settings accountSettings, Settings serviceSettings, HttpClient httpClient) { + PagerDutyAccount(String name, Settings accountSettings, HttpClient httpClient) { this.name = name; - this.serviceKey = getServiceKey(name, accountSettings, serviceSettings); + this.serviceKey = getServiceKey(name, accountSettings); this.httpClient = httpClient; this.eventDefaults = new IncidentEventDefaults(accountSettings.getAsSettings(TRIGGER_DEFAULTS_SETTING)); @@ -51,17 +49,12 @@ public SentEvent send(IncidentEvent event, Payload payload, String watchId) thro return SentEvent.responded(event, request, response); } - private static String getServiceKey(String name, Settings accountSettings, Settings serviceSettings) { - String serviceKey = accountSettings.get(SERVICE_KEY_SETTING, serviceSettings.get(SERVICE_KEY_SETTING, null)); - if (serviceKey == null) { - SecureString secureString = SECURE_SERVICE_API_KEY_SETTING.get(accountSettings); - if (secureString == null || secureString.length() < 1) { - throw new SettingsException("invalid pagerduty account [" + name + "]. missing required [" + SERVICE_KEY_SETTING + - "] setting"); - } - serviceKey = secureString.toString(); + private static String getServiceKey(String name, Settings accountSettings) { + SecureString secureString = SECURE_SERVICE_API_KEY_SETTING.get(accountSettings); + if (secureString == null || secureString.length() < 1) { + throw new SettingsException( + "invalid pagerduty account [" + name + "]. missing required [" + SECURE_SERVICE_API_KEY_SETTING.getKey() + "] setting"); } - - return serviceKey; + return secureString.toString(); } } diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/notification/pagerduty/PagerDutyService.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/notification/pagerduty/PagerDutyService.java index 6a0fa5b5bf45b..84d7c16b43784 100644 --- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/notification/pagerduty/PagerDutyService.java +++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/notification/pagerduty/PagerDutyService.java @@ -26,10 +26,6 @@ public class PagerDutyService extends NotificationService { private static final Setting SETTING_DEFAULT_ACCOUNT = Setting.simpleString("xpack.notification.pagerduty.default_account", Property.Dynamic, Property.NodeScope); - private static final Setting.AffixSetting SETTING_SERVICE_API_KEY = - Setting.affixKeySetting("xpack.notification.pagerduty.account.", "service_api_key", - (key) -> Setting.simpleString(key, Property.Dynamic, Property.NodeScope, Property.Filtered, Property.Deprecated)); - private static final Setting.AffixSetting SETTING_SECURE_SERVICE_API_KEY = Setting.affixKeySetting("xpack.notification.pagerduty.account.", "secure_service_api_key", (key) -> SecureSetting.secureString(key, null)); @@ -45,7 +41,6 @@ public PagerDutyService(Settings settings, HttpClient httpClient, ClusterSetting this.httpClient = httpClient; // ensure logging of setting changes clusterSettings.addSettingsUpdateConsumer(SETTING_DEFAULT_ACCOUNT, (s) -> {}); - clusterSettings.addAffixUpdateConsumer(SETTING_SERVICE_API_KEY, (s, o) -> {}, (s, o) -> {}); clusterSettings.addAffixUpdateConsumer(SETTING_DEFAULTS, (s, o) -> {}, (s, o) -> {}); // do an initial load reload(settings); @@ -53,11 +48,11 @@ public PagerDutyService(Settings settings, HttpClient httpClient, ClusterSetting @Override protected PagerDutyAccount createAccount(String name, Settings accountSettings) { - return new PagerDutyAccount(name, accountSettings, accountSettings, httpClient); + return new PagerDutyAccount(name, accountSettings, httpClient); } private static List> getDynamicSettings() { - return Arrays.asList(SETTING_SERVICE_API_KEY, SETTING_DEFAULTS, SETTING_DEFAULT_ACCOUNT); + return Arrays.asList(SETTING_DEFAULTS, SETTING_DEFAULT_ACCOUNT); } private static List> getSecureSettings() { diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/notification/slack/SlackAccount.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/notification/slack/SlackAccount.java index 98857cc4cb28a..db6b043fffe3b 100644 --- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/notification/slack/SlackAccount.java +++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/notification/slack/SlackAccount.java @@ -32,11 +32,9 @@ public class SlackAccount { - - public static final String URL_SETTING = "url"; public static final String MESSAGE_DEFAULTS_SETTING = "message_defaults"; - private static final Setting SECURE_URL_SETTING = SecureSetting.secureString("secure_" + URL_SETTING, null); + private static final Setting SECURE_URL_SETTING = SecureSetting.secureString("secure_url", null); final String name; final URI url; @@ -44,9 +42,9 @@ public class SlackAccount { final Logger logger; final SlackMessageDefaults messageDefaults; - public SlackAccount(String name, Settings settings, Settings defaultSettings, HttpClient httpClient, Logger logger) { + public SlackAccount(String name, Settings settings, HttpClient httpClient, Logger logger) { this.name = name; - this.url = url(name, settings, defaultSettings); + this.url = url(name, settings); this.messageDefaults = new SlackMessageDefaults(settings.getAsSettings(MESSAGE_DEFAULTS_SETTING)); this.httpClient = httpClient; this.logger = logger; @@ -120,21 +118,17 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws } } - static URI url(String name, Settings settings, Settings defaultSettings) { - String url = settings.get(URL_SETTING, defaultSettings.get(URL_SETTING, null)); - if (url == null) { - SecureString secureStringUrl = SECURE_URL_SETTING.get(settings); - if (secureStringUrl != null && secureStringUrl.length() > 0) { - url = secureStringUrl.toString(); - } - } - if (url == null) { - throw new SettingsException("invalid slack [" + name + "] account settings. missing required [" + URL_SETTING + "] setting"); + static URI url(String name, Settings settings) { + SecureString secureStringUrl = SECURE_URL_SETTING.get(settings); + if (secureStringUrl == null || secureStringUrl.length() < 1) { + throw new SettingsException( + "invalid slack [" + name + "] account settings. missing required [" + SECURE_URL_SETTING.getKey() + "] setting"); } try { - return new URI(url); + return new URI(secureStringUrl.toString()); } catch (URISyntaxException e) { - throw new SettingsException("invalid slack [" + name + "] account settings. invalid [" + URL_SETTING + "] setting", e); + throw new SettingsException( + "invalid slack [" + name + "] account settings. invalid [" + SECURE_URL_SETTING.getKey() + "] setting", e); } } } diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/notification/slack/SlackService.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/notification/slack/SlackService.java index 0d8d0bc67faf6..9c3c4151530b0 100644 --- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/notification/slack/SlackService.java +++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/notification/slack/SlackService.java @@ -28,10 +28,6 @@ public class SlackService extends NotificationService { private static final Setting SETTING_DEFAULT_ACCOUNT = Setting.simpleString("xpack.notification.slack.default_account", Property.Dynamic, Property.NodeScope); - private static final Setting.AffixSetting SETTING_URL = - Setting.affixKeySetting("xpack.notification.slack.account.", "url", - (key) -> Setting.simpleString(key, Property.Dynamic, Property.NodeScope, Property.Filtered, Property.Deprecated)); - private static final Setting.AffixSetting SETTING_URL_SECURE = Setting.affixKeySetting("xpack.notification.slack.account.", "secure_url", (key) -> SecureSetting.secureString(key, null)); @@ -48,7 +44,6 @@ public SlackService(Settings settings, HttpClient httpClient, ClusterSettings cl this.httpClient = httpClient; // ensure logging of setting changes clusterSettings.addSettingsUpdateConsumer(SETTING_DEFAULT_ACCOUNT, (s) -> {}); - clusterSettings.addAffixUpdateConsumer(SETTING_URL, (s, o) -> {}, (s, o) -> {}); clusterSettings.addAffixUpdateConsumer(SETTING_DEFAULTS, (s, o) -> {}, (s, o) -> {}); // do an initial load reload(settings); @@ -56,11 +51,11 @@ public SlackService(Settings settings, HttpClient httpClient, ClusterSettings cl @Override protected SlackAccount createAccount(String name, Settings accountSettings) { - return new SlackAccount(name, accountSettings, accountSettings, httpClient, logger); + return new SlackAccount(name, accountSettings, httpClient, logger); } private static List> getDynamicSettings() { - return Arrays.asList(SETTING_URL, SETTING_DEFAULT_ACCOUNT, SETTING_DEFAULTS); + return Arrays.asList(SETTING_DEFAULT_ACCOUNT, SETTING_DEFAULTS); } private static List> getSecureSettings() { diff --git a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/actions/email/EmailAttachmentTests.java b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/actions/email/EmailAttachmentTests.java index abc418f5c5e03..f8d7171a13d8c 100644 --- a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/actions/email/EmailAttachmentTests.java +++ b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/actions/email/EmailAttachmentTests.java @@ -8,6 +8,7 @@ import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.Streams; +import org.elasticsearch.common.settings.MockSecureSettings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -82,13 +83,15 @@ public void cleanup() throws Exception { @Override protected Settings nodeSettings(int nodeOrdinal) { + final MockSecureSettings secureSettings = new MockSecureSettings(); + secureSettings.setString("xpack.notification.email.account.test.smtp.secure_password", EmailServer.PASSWORD); return Settings.builder() .put(super.nodeSettings(nodeOrdinal)) .put("xpack.notification.email.account.test.smtp.auth", true) .put("xpack.notification.email.account.test.smtp.user", EmailServer.USERNAME) - .put("xpack.notification.email.account.test.smtp.password", EmailServer.PASSWORD) .put("xpack.notification.email.account.test.smtp.port", server.port()) .put("xpack.notification.email.account.test.smtp.host", "localhost") + .setSecureSettings(secureSettings) .build(); } diff --git a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/actions/email/EmailMessageIdTests.java b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/actions/email/EmailMessageIdTests.java index 08d25e3908b0e..495ac99fb9ed0 100644 --- a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/actions/email/EmailMessageIdTests.java +++ b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/actions/email/EmailMessageIdTests.java @@ -6,6 +6,7 @@ package org.elasticsearch.xpack.watcher.actions.email; import org.elasticsearch.common.settings.ClusterSettings; +import org.elasticsearch.common.settings.MockSecureSettings; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.test.ESTestCase; @@ -42,12 +43,14 @@ public class EmailMessageIdTests extends ESTestCase { public void startSmtpServer() { server = EmailServer.localhost(logger); + final MockSecureSettings secureSettings = new MockSecureSettings(); + secureSettings.setString("xpack.notification.email.account.test.smtp.secure_password", EmailServer.PASSWORD); Settings settings = Settings.builder() .put("xpack.notification.email.account.test.smtp.auth", true) .put("xpack.notification.email.account.test.smtp.user", EmailServer.USERNAME) - .put("xpack.notification.email.account.test.smtp.password", EmailServer.PASSWORD) .put("xpack.notification.email.account.test.smtp.port", server.port()) .put("xpack.notification.email.account.test.smtp.host", "localhost") + .setSecureSettings(secureSettings) .build(); Set> registeredSettings = new HashSet<>(ClusterSettings.BUILT_IN_CLUSTER_SETTINGS); diff --git a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/actions/jira/ExecutableJiraActionTests.java b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/actions/jira/ExecutableJiraActionTests.java index c4604d8e2a14d..4806412aeaa60 100644 --- a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/actions/jira/ExecutableJiraActionTests.java +++ b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/actions/jira/ExecutableJiraActionTests.java @@ -6,6 +6,7 @@ package org.elasticsearch.xpack.watcher.actions.jira; import org.elasticsearch.common.collect.MapBuilder; +import org.elasticsearch.common.settings.MockSecureSettings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.script.ScriptService; import org.elasticsearch.test.ESTestCase; @@ -63,11 +64,11 @@ public void testProxy() throws Exception { final String user = randomAlphaOfLength(10); final String password = randomAlphaOfLength(10); - Settings accountSettings = Settings.builder() - .put("url", url) - .put("user", user) - .put("password", password) - .build(); + final MockSecureSettings secureSettings = new MockSecureSettings(); + secureSettings.setString(JiraAccount.SECURE_URL_SETTING.getKey(), url); + secureSettings.setString(JiraAccount.SECURE_USER_SETTING.getKey(), user); + secureSettings.setString(JiraAccount.SECURE_PASSWORD_SETTING.getKey(), password); + Settings accountSettings = Settings.builder().setSecureSettings(secureSettings).build(); JiraAccount account = new JiraAccount("account1", accountSettings, httpClient); @@ -259,10 +260,12 @@ public void testExecutionFieldsStringArraysNotOverridden() throws Exception { } private JiraAction.Simulated simulateExecution(Map actionFields, Map accountFields) throws Exception { + final MockSecureSettings secureSettings = new MockSecureSettings(); + secureSettings.setString(JiraAccount.SECURE_URL_SETTING.getKey(), "https://internal-jira.elastic.co:443"); + secureSettings.setString(JiraAccount.SECURE_USER_SETTING.getKey(), "elastic"); + secureSettings.setString(JiraAccount.SECURE_PASSWORD_SETTING.getKey(), "secret"); Settings.Builder settings = Settings.builder() - .put("url", "https://internal-jira.elastic.co:443") - .put("user", "elastic") - .put("password", "secret") + .setSecureSettings(secureSettings) .putProperties(accountFields, s -> "issue_defaults." + s); JiraAccount account = new JiraAccount("account", settings.build(), mock(HttpClient.class)); diff --git a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/actions/jira/JiraActionTests.java b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/actions/jira/JiraActionTests.java index a0d09e39c029e..b4ce31f75778e 100644 --- a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/actions/jira/JiraActionTests.java +++ b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/actions/jira/JiraActionTests.java @@ -9,6 +9,7 @@ import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.collect.MapBuilder; +import org.elasticsearch.common.settings.MockSecureSettings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -216,10 +217,13 @@ public void testExecute() throws Exception { HttpClient httpClient = mock(HttpClient.class); when(httpClient.execute(any(HttpRequest.class))).thenReturn(new HttpResponse(HttpStatus.SC_CREATED)); + final MockSecureSettings secureSettings = new MockSecureSettings(); + secureSettings.setString("secure_url", "https://internal-jira.elastic.co:443"); + secureSettings.setString("secure_user", "elastic"); + secureSettings.setString("secure_password", "secret"); + Settings.Builder settings = Settings.builder() - .put("url", "https://internal-jira.elastic.co:443") - .put("user", "elastic") - .put("password", "secret") + .setSecureSettings(secureSettings) .put("issue_defaults.customfield_000", "foo") .put("issue_defaults.customfield_001", "bar"); diff --git a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/actions/slack/ExecutableSlackActionTests.java b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/actions/slack/ExecutableSlackActionTests.java index 4c945ec9fd526..e78f1afc548c5 100644 --- a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/actions/slack/ExecutableSlackActionTests.java +++ b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/actions/slack/ExecutableSlackActionTests.java @@ -5,6 +5,7 @@ */ package org.elasticsearch.xpack.watcher.actions.slack; +import org.elasticsearch.common.settings.MockSecureSettings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.core.watcher.execution.WatchExecutionContext; @@ -40,8 +41,10 @@ public void testProxy() throws Exception { ArgumentCaptor argumentCaptor = ArgumentCaptor.forClass(HttpRequest.class); when(httpClient.execute(argumentCaptor.capture())).thenReturn(new HttpResponse(200)); - Settings accountSettings = Settings.builder().put("url", "http://example.org").build(); - SlackAccount account = new SlackAccount("account1", accountSettings, Settings.EMPTY, httpClient, logger); + final MockSecureSettings secureSettings = new MockSecureSettings(); + secureSettings.setString("secure_url", "http://example.org"); + Settings accountSettings = Settings.builder().setSecureSettings(secureSettings).build(); + SlackAccount account = new SlackAccount("account1", accountSettings, httpClient, logger); SlackService service = mock(SlackService.class); when(service.getAccount(eq("account1"))).thenReturn(account); diff --git a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/history/HistoryStoreTests.java b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/history/HistoryStoreTests.java index 2ea364de18b4e..c12cfe380c0dc 100644 --- a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/history/HistoryStoreTests.java +++ b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/history/HistoryStoreTests.java @@ -16,6 +16,7 @@ import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.index.IndexResponse; import org.elasticsearch.client.Client; +import org.elasticsearch.common.settings.MockSecureSettings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.test.ESTestCase; @@ -121,7 +122,11 @@ public void testStoreWithHideSecrets() throws Exception { final String password = randomFrom("secret", "supersecret", "123456"); final String url = "https://" + randomFrom("localhost", "internal-jira.elastic.co") + ":" + randomFrom(80, 8080, 449, 9443); - Settings settings = Settings.builder().put("url", url).put("user", username).put("password", password).build(); + final MockSecureSettings secureSettings = new MockSecureSettings(); + secureSettings.setString("secure_url", url); + secureSettings.setString("secure_user", username); + secureSettings.setString("secure_password", password); + Settings settings = Settings.builder().setSecureSettings(secureSettings).build(); JiraAccount account = new JiraAccount("_account", settings, httpClient); JiraIssue jiraIssue = account.createIssue(singletonMap("foo", "bar"), null); diff --git a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/history/HistoryTemplateEmailMappingsTests.java b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/history/HistoryTemplateEmailMappingsTests.java index c9d642b8dc05f..af1a7ad19fcce 100644 --- a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/history/HistoryTemplateEmailMappingsTests.java +++ b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/history/HistoryTemplateEmailMappingsTests.java @@ -6,6 +6,7 @@ package org.elasticsearch.xpack.watcher.history; import org.elasticsearch.action.search.SearchResponse; +import org.elasticsearch.common.settings.MockSecureSettings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.protocol.xpack.watcher.PutWatchResponse; import org.elasticsearch.search.aggregations.Aggregations; @@ -52,16 +53,17 @@ public void cleanup() throws Exception { @Override protected Settings nodeSettings(int nodeOrdinal) { + final MockSecureSettings secureSettings = new MockSecureSettings(); + secureSettings.setString("xpack.notification.email.account.test.smtp.secure_password", EmailServer.PASSWORD); return Settings.builder() .put(super.nodeSettings(nodeOrdinal)) // email .put("xpack.notification.email.account.test.smtp.auth", true) .put("xpack.notification.email.account.test.smtp.user", EmailServer.USERNAME) - .put("xpack.notification.email.account.test.smtp.password", EmailServer.PASSWORD) .put("xpack.notification.email.account.test.smtp.port", server.port()) .put("xpack.notification.email.account.test.smtp.host", "localhost") - + .setSecureSettings(secureSettings) .build(); } diff --git a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/notification/email/AccountTests.java b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/notification/email/AccountTests.java index 1cbaecef8fec5..5e87a4305fee8 100644 --- a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/notification/email/AccountTests.java +++ b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/notification/email/AccountTests.java @@ -116,8 +116,9 @@ public void testConfig() throws Exception { String password = null; if (randomBoolean()) { password = randomAlphaOfLength(8); - smtpBuilder.put("password", password); - smtpProps.put("mail.smtp.password", password); + final MockSecureSettings secureSettings = new MockSecureSettings(); + secureSettings.setString("smtp." + Account.SECURE_PASSWORD_SETTING.getKey(), password); + builder.setSecureSettings(secureSettings); } for (int i = 0; i < 5; i++) { String name = randomAlphaOfLength(5); @@ -157,11 +158,13 @@ public void testConfig() throws Exception { } public void testSend() throws Exception { + final MockSecureSettings secureSettings = new MockSecureSettings(); + secureSettings.setString("smtp." + Account.SECURE_PASSWORD_SETTING.getKey(), EmailServer.PASSWORD); Account account = new Account(new Account.Config("default", Settings.builder() .put("smtp.host", "localhost") .put("smtp.port", server.port()) .put("smtp.user", EmailServer.USERNAME) - .put("smtp.password", EmailServer.PASSWORD) + .setSecureSettings(secureSettings) .build()), null, logger); Email email = Email.builder() @@ -192,11 +195,13 @@ public void testSend() throws Exception { } public void testSendCCAndBCC() throws Exception { + final MockSecureSettings secureSettings = new MockSecureSettings(); + secureSettings.setString("smtp." + Account.SECURE_PASSWORD_SETTING.getKey(), EmailServer.PASSWORD); Account account = new Account(new Account.Config("default", Settings.builder() .put("smtp.host", "localhost") .put("smtp.port", server.port()) .put("smtp.user", EmailServer.USERNAME) - .put("smtp.password", EmailServer.PASSWORD) + .setSecureSettings(secureSettings) .build()), null, logger); Email email = Email.builder() @@ -293,29 +298,4 @@ public void testAccountTimeoutsConfiguredAsNumberAreRejected() { }); } - public void testEnsurePasswordSetAsSecureSetting() { - String password = "password"; - MockSecureSettings secureSettings = new MockSecureSettings(); - secureSettings.setString("smtp.secure_password", password); - - Settings settings = Settings.builder() - .put("smtp.host", "localhost") - .put("smtp.port", server.port()) - .put("smtp.connection_timeout", TimeValue.timeValueMinutes(4)) - .setSecureSettings(secureSettings) - .build(); - - Account.Config config = new Account.Config("default", settings); - assertThat(config.smtp.password.getChars(), equalTo(password.toCharArray())); - - settings = Settings.builder() - .put("smtp.host", "localhost") - .put("smtp.port", server.port()) - .put("smtp.connection_timeout", TimeValue.timeValueMinutes(4)) - .put("smtp.password", password) - .build(); - - config = new Account.Config("default", settings); - assertThat(config.smtp.password.getChars(), equalTo(password.toCharArray())); - } } diff --git a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/notification/hipchat/HipChatAccountsTests.java b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/notification/hipchat/HipChatAccountsTests.java index 795f5aaacc7c9..3057d935d7138 100644 --- a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/notification/hipchat/HipChatAccountsTests.java +++ b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/notification/hipchat/HipChatAccountsTests.java @@ -6,6 +6,7 @@ package org.elasticsearch.xpack.watcher.notification.hipchat; import org.elasticsearch.common.settings.ClusterSettings; +import org.elasticsearch.common.settings.MockSecureSettings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.watcher.common.http.HttpClient; @@ -50,9 +51,11 @@ public void testProxy() throws Exception { } private void addAccountSettings(String name, Settings.Builder builder) { + final MockSecureSettings secureSettings = new MockSecureSettings(); + secureSettings.setString("xpack.notification.hipchat.account." + name + ".secure_auth_token", randomAlphaOfLength(50)); HipChatAccount.Profile profile = randomFrom(HipChatAccount.Profile.values()); builder.put("xpack.notification.hipchat.account." + name + ".profile", profile.value()); - builder.put("xpack.notification.hipchat.account." + name + ".auth_token", randomAlphaOfLength(50)); + builder.setSecureSettings(secureSettings); if (profile == HipChatAccount.Profile.INTEGRATION) { builder.put("xpack.notification.hipchat.account." + name + ".room", randomAlphaOfLength(10)); } diff --git a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/notification/hipchat/HipChatServiceTests.java b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/notification/hipchat/HipChatServiceTests.java index 7b5d6c7f081a4..a10a102e414ca 100644 --- a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/notification/hipchat/HipChatServiceTests.java +++ b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/notification/hipchat/HipChatServiceTests.java @@ -6,6 +6,7 @@ package org.elasticsearch.xpack.watcher.notification.hipchat; import org.elasticsearch.common.settings.ClusterSettings; +import org.elasticsearch.common.settings.MockSecureSettings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.SettingsException; import org.elasticsearch.test.ESTestCase; @@ -40,9 +41,11 @@ public void testSingleAccountV1() throws Exception { HipChatMessage.Color defaultColor = randomBoolean() ? null : randomFrom(HipChatMessage.Color.values()); HipChatMessage.Format defaultFormat = randomBoolean() ? null : randomFrom(HipChatMessage.Format.values()); Boolean defaultNotify = randomBoolean() ? null : (Boolean) randomBoolean(); + final MockSecureSettings secureSettings = new MockSecureSettings(); + secureSettings.setString("xpack.notification.hipchat.account." + accountName + ".secure_auth_token", "_token"); Settings.Builder settingsBuilder = Settings.builder() .put("xpack.notification.hipchat.account." + accountName + ".profile", HipChatAccount.Profile.V1.value()) - .put("xpack.notification.hipchat.account." + accountName + ".auth_token", "_token"); + .setSecureSettings(secureSettings); if (host != null) { settingsBuilder.put("xpack.notification.hipchat.account." + accountName + ".host", host); } @@ -86,10 +89,12 @@ public void testSingleAccountIntegration() throws Exception { HipChatMessage.Color defaultColor = randomBoolean() ? null : randomFrom(HipChatMessage.Color.values()); HipChatMessage.Format defaultFormat = randomBoolean() ? null : randomFrom(HipChatMessage.Format.values()); Boolean defaultNotify = randomBoolean() ? null : (Boolean) randomBoolean(); + final MockSecureSettings secureSettings = new MockSecureSettings(); + secureSettings.setString("xpack.notification.hipchat.account." + accountName + ".secure_auth_token", "_token"); Settings.Builder settingsBuilder = Settings.builder() .put("xpack.notification.hipchat.account." + accountName + ".profile", HipChatAccount.Profile.INTEGRATION.value()) - .put("xpack.notification.hipchat.account." + accountName + ".auth_token", "_token") + .setSecureSettings(secureSettings) .put("xpack.notification.hipchat.account." + accountName + ".room", room); if (host != null) { settingsBuilder.put("xpack.notification.hipchat.account." + accountName + ".host", host); @@ -122,10 +127,12 @@ public void testSingleAccountIntegration() throws Exception { public void testSingleAccountIntegrationNoRoomSetting() throws Exception { String accountName = randomAlphaOfLength(10); + final MockSecureSettings secureSettings = new MockSecureSettings(); + secureSettings.setString("xpack.notification.hipchat.account." + accountName + ".secure_auth_token", "_token"); Settings.Builder settingsBuilder = Settings.builder() .put("xpack.notification.hipchat.account." + accountName + ".profile", HipChatAccount.Profile.INTEGRATION.value()) - .put("xpack.notification.hipchat.account." + accountName + ".auth_token", "_token"); + .setSecureSettings(secureSettings); SettingsException e = expectThrows(SettingsException.class, () -> new HipChatService(settingsBuilder.build(), httpClient, new ClusterSettings(settingsBuilder.build(), new HashSet<>(HipChatService.getSettings()))).getAccount(null)); @@ -141,9 +148,12 @@ public void testSingleAccountUser() throws Exception { HipChatMessage.Color defaultColor = randomBoolean() ? null : randomFrom(HipChatMessage.Color.values()); HipChatMessage.Format defaultFormat = randomBoolean() ? null : randomFrom(HipChatMessage.Format.values()); Boolean defaultNotify = randomBoolean() ? null : (Boolean) randomBoolean(); + + final MockSecureSettings secureSettings = new MockSecureSettings(); + secureSettings.setString("xpack.notification.hipchat.account." + accountName + ".secure_auth_token", "_token"); Settings.Builder settingsBuilder = Settings.builder() .put("xpack.notification.hipchat.account." + accountName + ".profile", HipChatAccount.Profile.USER.value()) - .put("xpack.notification.hipchat.account." + accountName + ".auth_token", "_token"); + .setSecureSettings(secureSettings); if (host != null) { settingsBuilder.put("xpack.notification.hipchat.account." + accountName + ".host", host); } @@ -189,6 +199,8 @@ public void testMultipleAccounts() throws Exception { Settings.Builder settingsBuilder = Settings.builder(); String defaultAccount = "_a" + randomIntBetween(0, 4); settingsBuilder.put("xpack.notification.hipchat.default_account", defaultAccount); + final MockSecureSettings secureSettings = new MockSecureSettings(); + settingsBuilder.setSecureSettings(secureSettings); final boolean customGlobalServer = randomBoolean(); if (customGlobalServer) { @@ -201,7 +213,7 @@ public void testMultipleAccounts() throws Exception { String prefix = "xpack.notification.hipchat.account." + name; HipChatAccount.Profile profile = randomFrom(HipChatAccount.Profile.values()); settingsBuilder.put(prefix + ".profile", profile); - settingsBuilder.put(prefix + ".auth_token", "_token" + i); + secureSettings.setString(prefix + ".secure_auth_token", "_token" + i); if (profile == HipChatAccount.Profile.INTEGRATION) { settingsBuilder.put(prefix + ".room", "_room" + i); } diff --git a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/notification/hipchat/IntegrationAccountTests.java b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/notification/hipchat/IntegrationAccountTests.java index b85348d7810bb..df1f5d3f47294 100644 --- a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/notification/hipchat/IntegrationAccountTests.java +++ b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/notification/hipchat/IntegrationAccountTests.java @@ -7,6 +7,7 @@ import org.apache.logging.log4j.Logger; import org.elasticsearch.common.Strings; +import org.elasticsearch.common.settings.MockSecureSettings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.SettingsException; import org.elasticsearch.common.xcontent.ToXContent; @@ -36,7 +37,9 @@ public void testSettings() throws Exception { Settings.Builder sb = Settings.builder(); String authToken = randomAlphaOfLength(50); - sb.put(IntegrationAccount.AUTH_TOKEN_SETTING, authToken); + final MockSecureSettings secureSettings = new MockSecureSettings(); + secureSettings.setString(IntegrationAccount.SECURE_AUTH_TOKEN_SETTING.getKey(), authToken); + sb.setSecureSettings(secureSettings); String host = HipChatServer.DEFAULT.host(); if (randomBoolean()) { @@ -90,13 +93,16 @@ public void testSettingsNoAuthToken() throws Exception { new IntegrationAccount("_name", sb.build(), HipChatServer.DEFAULT, mock(HttpClient.class), mock(Logger.class)); fail("Expected SettingsException"); } catch (SettingsException e) { - assertThat(e.getMessage(), is("hipchat account [_name] missing required [auth_token] setting")); + assertThat(e.getMessage(), is("hipchat account [_name] missing required [secure_auth_token] secure setting")); } } public void testSettingsWithoutRoom() throws Exception { Settings.Builder sb = Settings.builder(); - sb.put(IntegrationAccount.AUTH_TOKEN_SETTING, randomAlphaOfLength(50)); + String authToken = randomAlphaOfLength(50); + final MockSecureSettings secureSettings = new MockSecureSettings(); + secureSettings.setString(IntegrationAccount.SECURE_AUTH_TOKEN_SETTING.getKey(), authToken); + sb.setSecureSettings(secureSettings); try { new IntegrationAccount("_name", sb.build(), HipChatServer.DEFAULT, mock(HttpClient.class), mock(Logger.class)); fail("Expected SettingsException"); @@ -107,7 +113,10 @@ public void testSettingsWithoutRoom() throws Exception { public void testSettingsWithoutMultipleRooms() throws Exception { Settings.Builder sb = Settings.builder(); - sb.put(IntegrationAccount.AUTH_TOKEN_SETTING, randomAlphaOfLength(50)); + String authToken = randomAlphaOfLength(50); + final MockSecureSettings secureSettings = new MockSecureSettings(); + secureSettings.setString(IntegrationAccount.SECURE_AUTH_TOKEN_SETTING.getKey(), authToken); + sb.setSecureSettings(secureSettings); sb.put(IntegrationAccount.ROOM_SETTING, "_r1,_r2"); try { new IntegrationAccount("_name", sb.build(), HipChatServer.DEFAULT, mock(HttpClient.class), mock(Logger.class)); @@ -121,10 +130,12 @@ public void testSend() throws Exception { String token = randomAlphaOfLength(10); HttpClient httpClient = mock(HttpClient.class); String room = "Room with Spaces"; + final MockSecureSettings secureSettings = new MockSecureSettings(); + secureSettings.setString(IntegrationAccount.SECURE_AUTH_TOKEN_SETTING.getKey(), token); IntegrationAccount account = new IntegrationAccount("_name", Settings.builder() .put("host", "_host") .put("port", "443") - .put("auth_token", token) + .setSecureSettings(secureSettings) .put("room", room) .build(), HipChatServer.DEFAULT, httpClient, mock(Logger.class)); diff --git a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/notification/hipchat/UserAccountTests.java b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/notification/hipchat/UserAccountTests.java index 28609efd025d5..6893999776b02 100644 --- a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/notification/hipchat/UserAccountTests.java +++ b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/notification/hipchat/UserAccountTests.java @@ -8,6 +8,7 @@ import org.apache.logging.log4j.Logger; import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.settings.MockSecureSettings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.SettingsException; import org.elasticsearch.test.ESTestCase; @@ -43,7 +44,9 @@ public void testSettings() throws Exception { Settings.Builder sb = Settings.builder(); String authToken = randomAlphaOfLength(50); - sb.put(UserAccount.AUTH_TOKEN_SETTING, authToken); + final MockSecureSettings secureSettings = new MockSecureSettings(); + secureSettings.setString(UserAccount.SECURE_AUTH_TOKEN_SETTING.getKey(), authToken); + sb.setSecureSettings(secureSettings); String host = HipChatServer.DEFAULT.host(); if (randomBoolean()) { @@ -111,16 +114,18 @@ public void testSettingsNoAuthToken() throws Exception { new UserAccount("_name", sb.build(), HipChatServer.DEFAULT, mock(HttpClient.class), mock(Logger.class)); fail("Expected SettingsException"); } catch (SettingsException e) { - assertThat(e.getMessage(), is("hipchat account [_name] missing required [auth_token] setting")); + assertThat(e.getMessage(), is("hipchat account [_name] missing required [secure_auth_token] secure setting")); } } public void testSend() throws Exception { HttpClient httpClient = mock(HttpClient.class); + final MockSecureSettings secureSettings = new MockSecureSettings(); + secureSettings.setString(IntegrationAccount.SECURE_AUTH_TOKEN_SETTING.getKey(), "_token"); UserAccount account = new UserAccount("_name", Settings.builder() .put("host", "_host") .put("port", "443") - .put("auth_token", "_token") + .setSecureSettings(secureSettings) .build(), HipChatServer.DEFAULT, httpClient, mock(Logger.class)); HipChatMessage.Format format = randomFrom(HipChatMessage.Format.values()); @@ -240,9 +245,11 @@ public void testSend() throws Exception { } public void testColorIsOptional() throws Exception { + final MockSecureSettings secureSettings = new MockSecureSettings(); + secureSettings.setString(IntegrationAccount.SECURE_AUTH_TOKEN_SETTING.getKey(), "awesome-auth-token"); Settings settings = Settings.builder() .put("user", "testuser") - .put("auth_token", "awesome-auth-token") + .setSecureSettings(secureSettings) .build(); UserAccount userAccount = createUserAccount(settings); @@ -256,9 +263,11 @@ public void testColorIsOptional() throws Exception { } public void testFormatIsOptional() throws Exception { + final MockSecureSettings secureSettings = new MockSecureSettings(); + secureSettings.setString(IntegrationAccount.SECURE_AUTH_TOKEN_SETTING.getKey(), "awesome-auth-token"); Settings settings = Settings.builder() .put("user", "testuser") - .put("auth_token", "awesome-auth-token") + .setSecureSettings(secureSettings) .build(); UserAccount userAccount = createUserAccount(settings); @@ -272,9 +281,11 @@ public void testFormatIsOptional() throws Exception { } public void testRoomNameIsUrlEncoded() throws Exception { + final MockSecureSettings secureSettings = new MockSecureSettings(); + secureSettings.setString(IntegrationAccount.SECURE_AUTH_TOKEN_SETTING.getKey(), "awesome-auth-token"); Settings settings = Settings.builder() .put("user", "testuser") - .put("auth_token", "awesome-auth-token") + .setSecureSettings(secureSettings) .build(); HipChatServer hipChatServer = mock(HipChatServer.class); HttpClient httpClient = mock(HttpClient.class); diff --git a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/notification/hipchat/V1AccountTests.java b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/notification/hipchat/V1AccountTests.java index c7391afb29bb6..105965539f604 100644 --- a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/notification/hipchat/V1AccountTests.java +++ b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/notification/hipchat/V1AccountTests.java @@ -7,6 +7,7 @@ import org.apache.logging.log4j.Logger; import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.settings.MockSecureSettings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.SettingsException; import org.elasticsearch.test.ESTestCase; @@ -32,7 +33,9 @@ public void testSettings() throws Exception { Settings.Builder sb = Settings.builder(); String authToken = randomAlphaOfLength(50); - sb.put(V1Account.AUTH_TOKEN_SETTING, authToken); + final MockSecureSettings secureSettings = new MockSecureSettings(); + secureSettings.setString(V1Account.SECURE_AUTH_TOKEN_SETTING.getKey(), authToken); + sb.setSecureSettings(secureSettings); String host = HipChatServer.DEFAULT.host(); if (randomBoolean()) { @@ -96,16 +99,19 @@ public void testSettingsNoAuthToken() throws Exception { new V1Account("_name", sb.build(), HipChatServer.DEFAULT, mock(HttpClient.class), mock(Logger.class)); fail("Expected SettingsException"); } catch (SettingsException e) { - assertThat(e.getMessage(), is("hipchat account [_name] missing required [auth_token] setting")); + assertThat(e.getMessage(), is("hipchat account [_name] missing required [secure_auth_token] secure setting")); } } public void testSend() throws Exception { HttpClient httpClient = mock(HttpClient.class); + String authToken = randomAlphaOfLength(50); + final MockSecureSettings secureSettings = new MockSecureSettings(); + secureSettings.setString(IntegrationAccount.SECURE_AUTH_TOKEN_SETTING.getKey(), "_token"); V1Account account = new V1Account("_name", Settings.builder() .put("host", "_host") .put("port", "443") - .put("auth_token", "_token") + .setSecureSettings(secureSettings) .build(), HipChatServer.DEFAULT, httpClient, mock(Logger.class)); HipChatMessage.Format format = randomFrom(HipChatMessage.Format.values()); diff --git a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/notification/jira/JiraAccountTests.java b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/notification/jira/JiraAccountTests.java index 01ee6d399875e..997a6aa6a8dc3 100644 --- a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/notification/jira/JiraAccountTests.java +++ b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/notification/jira/JiraAccountTests.java @@ -9,6 +9,7 @@ import org.elasticsearch.common.collect.MapBuilder; import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.settings.ClusterSettings; +import org.elasticsearch.common.settings.MockSecureSettings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.SettingsException; import org.elasticsearch.test.ESTestCase; @@ -53,29 +54,38 @@ public void init() throws Exception { public void testJiraAccountSettings() { final String url = "https://internal-jira.elastic.co:443"; + final MockSecureSettings secureSettings = new MockSecureSettings(); SettingsException e = expectThrows(SettingsException.class, () -> new JiraAccount(null, Settings.EMPTY, null)); - assertThat(e.getMessage(), containsString("invalid jira [null] account settings. missing required [url] setting")); + assertThat(e.getMessage(), containsString("invalid jira [null] account settings. missing required [secure_url] setting")); - Settings settings1 = Settings.builder().put("url", url).build(); + secureSettings.setString("secure_url", url); + Settings settings1 = Settings.builder().setSecureSettings(secureSettings).build(); e = expectThrows(SettingsException.class, () -> new JiraAccount("test", settings1, null)); - assertThat(e.getMessage(), containsString("invalid jira [test] account settings. missing required [user] setting")); + assertThat(e.getMessage(), containsString("invalid jira [test] account settings. missing required [secure_user] setting")); - Settings settings2 = Settings.builder().put("url", url).put("user", "").build(); + secureSettings.setString("secure_user", ""); + Settings settings2 = Settings.builder().setSecureSettings(secureSettings).build(); e = expectThrows(SettingsException.class, () -> new JiraAccount("test", settings2, null)); - assertThat(e.getMessage(), containsString("invalid jira [test] account settings. missing required [user] setting")); + assertThat(e.getMessage(), containsString("invalid jira [test] account settings. missing required [secure_user] setting")); - Settings settings3 = Settings.builder().put("url", url).put("user", "foo").build(); + secureSettings.setString("secure_user", "foo"); + Settings settings3 = Settings.builder().setSecureSettings(secureSettings).build(); e = expectThrows(SettingsException.class, () -> new JiraAccount("test", settings3, null)); - assertThat(e.getMessage(), containsString("invalid jira [test] account settings. missing required [password] setting")); + assertThat(e.getMessage(), containsString("invalid jira [test] account settings. missing required [secure_password] setting")); - Settings settings4 = Settings.builder().put("url", url).put("user", "foo").put("password", "").build(); + secureSettings.setString("secure_password", ""); + Settings settings4 = Settings.builder().setSecureSettings(secureSettings).build(); e = expectThrows(SettingsException.class, () -> new JiraAccount("test", settings4, null)); - assertThat(e.getMessage(), containsString("invalid jira [test] account settings. missing required [password] setting")); + assertThat(e.getMessage(), containsString("invalid jira [test] account settings. missing required [secure_password] setting")); } public void testUnsecureAccountUrl() throws Exception { - Settings settings = Settings.builder().put("url", "http://localhost").put("user", "foo").put("password", "bar").build(); + final MockSecureSettings secureSettings = new MockSecureSettings(); + secureSettings.setString(JiraAccount.SECURE_USER_SETTING.getKey(), "foo"); + secureSettings.setString(JiraAccount.SECURE_PASSWORD_SETTING.getKey(), "password"); + secureSettings.setString(JiraAccount.SECURE_URL_SETTING.getKey(), "http://localhost"); + Settings settings = Settings.builder().setSecureSettings(secureSettings).build(); SettingsException e = expectThrows(SettingsException.class, () -> new JiraAccount("test", settings, null)); assertThat(e.getMessage(), containsString("invalid jira [test] account settings. unsecure scheme [HTTP]")); @@ -128,15 +138,19 @@ public void testCreateIssue() throws Exception { } public void testCustomUrls() throws Exception { - assertCustomUrl(Settings.builder().put("url", "https://localhost/foo").build(), "/foo"); - assertCustomUrl(Settings.builder().put("url", "https://localhost/foo/").build(), "/foo/"); + assertCustomUrl("https://localhost/foo", "/foo"); + assertCustomUrl("https://localhost/foo/", "/foo/"); // this ensures we retain backwards compatibility - assertCustomUrl(Settings.builder().put("url", "https://localhost/").build(), JiraAccount.DEFAULT_PATH); - assertCustomUrl(Settings.builder().put("url", "https://localhost").build(), JiraAccount.DEFAULT_PATH); + assertCustomUrl("https://localhost/", JiraAccount.DEFAULT_PATH); + assertCustomUrl("https://localhost", JiraAccount.DEFAULT_PATH); } - private void assertCustomUrl(Settings urlSettings, String expectedPath) throws IOException { - Settings settings = Settings.builder().put(urlSettings).put("user", "foo").put("password", "bar").build(); + private void assertCustomUrl(String urlSettings, String expectedPath) throws IOException { + final MockSecureSettings secureSettings = new MockSecureSettings(); + secureSettings.setString("secure_url", urlSettings); + secureSettings.setString("secure_user", "foo"); + secureSettings.setString("secure_password", "bar"); + Settings settings = Settings.builder().setSecureSettings(secureSettings).build(); HttpClient client = mock(HttpClient.class); HttpResponse response = new HttpResponse(200); @@ -153,9 +167,14 @@ private void assertCustomUrl(Settings urlSettings, String expectedPath) throws I } private void addAccountSettings(String name, Settings.Builder builder) { - builder.put("xpack.notification.jira.account." + name + "." + JiraAccount.URL_SETTING, "https://internal-jira.elastic.co:443"); - builder.put("xpack.notification.jira.account." + name + "." + JiraAccount.USER_SETTING, randomAlphaOfLength(10)); - builder.put("xpack.notification.jira.account." + name + "." + JiraAccount.PASSWORD_SETTING, randomAlphaOfLength(10)); + final MockSecureSettings secureSettings = new MockSecureSettings(); + secureSettings.setString("xpack.notification.jira.account." + name + "." + JiraAccount.SECURE_URL_SETTING.getKey(), + "https://internal-jira.elastic.co:443"); + secureSettings.setString("xpack.notification.jira.account." + name + "." + JiraAccount.SECURE_USER_SETTING.getKey(), + randomAlphaOfLength(10)); + secureSettings.setString("xpack.notification.jira.account." + name + "." + JiraAccount.SECURE_PASSWORD_SETTING.getKey(), + randomAlphaOfLength(10)); + builder.setSecureSettings(secureSettings); Map defaults = randomIssueDefaults(); for (Map.Entry setting : defaults.entrySet()) { diff --git a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/notification/pagerduty/PagerDutyAccountsTests.java b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/notification/pagerduty/PagerDutyAccountsTests.java index 1e88c69614270..22d3e2adf4176 100644 --- a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/notification/pagerduty/PagerDutyAccountsTests.java +++ b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/notification/pagerduty/PagerDutyAccountsTests.java @@ -7,6 +7,7 @@ import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.settings.ClusterSettings; +import org.elasticsearch.common.settings.MockSecureSettings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.json.JsonXContent; import org.elasticsearch.test.ESTestCase; @@ -83,7 +84,11 @@ public void testContextIsSentCorrect() throws Exception { } private void addAccountSettings(String name, Settings.Builder builder) { - builder.put("xpack.notification.pagerduty.account." + name + ".service_api_key", randomAlphaOfLength(50)); + final MockSecureSettings secureSettings = new MockSecureSettings(); + secureSettings.setString( + "xpack.notification.pagerduty.account." + name + "." + PagerDutyAccount.SECURE_SERVICE_API_KEY_SETTING.getKey(), + randomAlphaOfLength(50)); + builder.setSecureSettings(secureSettings); Settings defaults = SlackMessageDefaultsTests.randomSettings(); for (String setting : defaults.keySet()) { builder.copy("xpack.notification.pagerduty.message_defaults." + setting, setting, defaults); diff --git a/x-pack/qa/smoke-test-watcher-with-security/build.gradle b/x-pack/qa/smoke-test-watcher-with-security/build.gradle index f5007e5b0910b..2ff2ff9272fa5 100644 --- a/x-pack/qa/smoke-test-watcher-with-security/build.gradle +++ b/x-pack/qa/smoke-test-watcher-with-security/build.gradle @@ -22,7 +22,7 @@ integTestCluster { setting 'xpack.notification.email.account._email.smtp.host', 'host.domain' setting 'xpack.notification.email.account._email.smtp.port', '587' setting 'xpack.notification.email.account._email.smtp.user', '_user' - setting 'xpack.notification.email.account._email.smtp.password', '_passwd' + keystoreSetting 'xpack.notification.email.account._email.smtp.secure_password', '_passwd' setting 'xpack.license.self_generated.type', 'trial' extraConfigFile 'roles.yml', 'roles.yml' setupCommand 'setupTestAdminUser', From 63e9e462029a2bc4373611056b82bc826a1e10ca Mon Sep 17 00:00:00 2001 From: Ioannis Kakavas Date: Fri, 18 Jan 2019 19:09:26 +0200 Subject: [PATCH 71/71] OpenID Connect Realm base functionality (#37009) This commit adds * An OpenID Connect Realm definition * Necessary OpenID Connect Realm settings to support Authorization code grant and Implicit grant flows * Rest and Transport Action and Request/Response objects for initiating and completing the authentication flow * Functionality for generating OIDC Authentication Request URIs Unit tests Notably missing (to be handled in subsequent PRs): * The actual implementation of the authentication flows * Necessary JW{T,S,E} functionality Relates: #35339 --- .../oidc/OpenIdConnectAuthenticateAction.java | 11 ++- .../OpenIdConnectAuthenticateRequest.java | 21 +++-- .../OpenIdConnectAuthenticateResponse.java | 15 ++-- ...nIdConnectPrepareAuthenticationAction.java | 11 ++- ...IdConnectPrepareAuthenticationRequest.java | 31 ++----- ...ctPrepareAuthenticationRequestBuilder.java | 10 --- ...dConnectPrepareAuthenticationResponse.java | 44 ++++++++-- .../core/security/authc/RealmSettings.java | 2 +- .../oidc/OpenIdConnectRealmSettings.java | 10 +-- ...nsportOpenIdConnectAuthenticateAction.java | 8 +- ...nIdConnectPrepareAuthenticationAction.java | 28 +++---- .../OpenIdConnectProviderConfiguration.java | 50 ++++++++++++ .../authc/oidc/OpenIdConnectRealm.java | 80 +++++++----------- .../authc/oidc/OpenIdConnectToken.java | 14 ++-- .../authc/oidc/RelyingPartyConfiguration.java | 42 ++++++++++ ...nIdConnectPrepareAuthenticationAction.java | 10 +-- ...OpenIdConnectAuthenticateRequestTests.java | 3 +- ...nectPrepareAuthenticationRequestTests.java | 14 +--- .../authc/oidc/OpenIdConnectRealmTests.java | 81 +++---------------- 19 files changed, 244 insertions(+), 241 deletions(-) create mode 100644 x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/oidc/OpenIdConnectProviderConfiguration.java create mode 100644 x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/oidc/RelyingPartyConfiguration.java diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/oidc/OpenIdConnectAuthenticateAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/oidc/OpenIdConnectAuthenticateAction.java index a0159d81a9220..b27a71e202e55 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/oidc/OpenIdConnectAuthenticateAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/oidc/OpenIdConnectAuthenticateAction.java @@ -6,6 +6,7 @@ package org.elasticsearch.xpack.core.security.action.oidc; import org.elasticsearch.action.Action; +import org.elasticsearch.common.io.stream.Writeable; /** * Action for initiating an authentication process using OpenID Connect @@ -15,11 +16,17 @@ public final class OpenIdConnectAuthenticateAction extends Action getResponseReader() { + return OpenIdConnectAuthenticateResponse::new; } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/oidc/OpenIdConnectAuthenticateRequest.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/oidc/OpenIdConnectAuthenticateRequest.java index 44d8fb44322d4..3605e182ca460 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/oidc/OpenIdConnectAuthenticateRequest.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/oidc/OpenIdConnectAuthenticateRequest.java @@ -18,24 +18,32 @@ public class OpenIdConnectAuthenticateRequest extends ActionRequest { /** - * The URI were the OP redirected the browser after the authentication attempt. This is passed as is from the + * The URI where the OP redirected the browser after the authentication attempt. This is passed as is from the * facilitator entity (i.e. Kibana) */ private String redirectUri; /** - * The state value that either we or the facilitator generated for this specific flow and that was stored at the user's session with + * The state value that we generated for this specific flow and that should be stored at the user's session with * the facilitator */ private String state; /** - * The nonce value that the facilitator generated for this specific flow and that was stored at the user's session with + * The nonce value that we generated for this specific flow and that should be stored at the user's session with * the facilitator */ private String nonce; public OpenIdConnectAuthenticateRequest() { + + } + + public OpenIdConnectAuthenticateRequest(StreamInput in) throws IOException { + super.readFrom(in); + redirectUri = in.readString(); + state = in.readString(); + nonce = in.readOptionalString(); } public String getRedirectUri() { @@ -76,11 +84,8 @@ public void writeTo(StreamOutput out) throws IOException { } @Override - public void readFrom(StreamInput in) throws IOException { - super.readFrom(in); - redirectUri = in.readString(); - state = in.readString(); - nonce = in.readOptionalString(); + public void readFrom(StreamInput in) { + throw new UnsupportedOperationException("usage of Streamable is to be replaced by Writeable"); } public String toString() { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/oidc/OpenIdConnectAuthenticateResponse.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/oidc/OpenIdConnectAuthenticateResponse.java index 033d496139615..93b7c6b292ae9 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/oidc/OpenIdConnectAuthenticateResponse.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/oidc/OpenIdConnectAuthenticateResponse.java @@ -25,7 +25,12 @@ public OpenIdConnectAuthenticateResponse(String principal, String accessTokenStr this.expiresIn = expiresIn; } - public OpenIdConnectAuthenticateResponse() { + public OpenIdConnectAuthenticateResponse(StreamInput in) throws IOException { + super.readFrom(in); + principal = in.readString(); + accessTokenString = in.readString(); + refreshTokenString = in.readString(); + expiresIn = in.readTimeValue(); } public String getPrincipal() { @@ -45,12 +50,8 @@ public TimeValue getExpiresIn() { } @Override - public void readFrom(StreamInput in) throws IOException { - super.readFrom(in); - principal = in.readString(); - accessTokenString = in.readString(); - refreshTokenString = in.readString(); - expiresIn = in.readTimeValue(); + public void readFrom(StreamInput in) { + throw new UnsupportedOperationException("usage of Streamable is to be replaced by Writeable"); } @Override diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/oidc/OpenIdConnectPrepareAuthenticationAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/oidc/OpenIdConnectPrepareAuthenticationAction.java index 0fe2b356e8313..2aa82c7286cec 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/oidc/OpenIdConnectPrepareAuthenticationAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/oidc/OpenIdConnectPrepareAuthenticationAction.java @@ -6,17 +6,24 @@ package org.elasticsearch.xpack.core.security.action.oidc; import org.elasticsearch.action.Action; +import org.elasticsearch.common.io.stream.Writeable; public class OpenIdConnectPrepareAuthenticationAction extends Action { public static final OpenIdConnectPrepareAuthenticationAction INSTANCE = new OpenIdConnectPrepareAuthenticationAction(); public static final String NAME = "cluster:admin/xpack/security/oidc/prepare"; - protected OpenIdConnectPrepareAuthenticationAction() { + private OpenIdConnectPrepareAuthenticationAction() { super(NAME); } + @Override public OpenIdConnectPrepareAuthenticationResponse newResponse() { - return new OpenIdConnectPrepareAuthenticationResponse(); + throw new UnsupportedOperationException("usage of Streamable is to be replaced by Writeable"); + } + + @Override + public Writeable.Reader getResponseReader() { + return OpenIdConnectPrepareAuthenticationResponse::new; } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/oidc/OpenIdConnectPrepareAuthenticationRequest.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/oidc/OpenIdConnectPrepareAuthenticationRequest.java index a64d690784eca..af690b606feb3 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/oidc/OpenIdConnectPrepareAuthenticationRequest.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/oidc/OpenIdConnectPrepareAuthenticationRequest.java @@ -16,36 +16,26 @@ import static org.elasticsearch.action.ValidateActions.addValidationError; /** - * Represents a request to prepare an OAuth 2.0 authentication request + * Represents a request to prepare an OAuth 2.0 authorization request */ public class OpenIdConnectPrepareAuthenticationRequest extends ActionRequest { private String realmName; - private String state; - private String nonce; public String getRealmName() { return realmName; } - public String getState() { - return state; - } - - public String getNonce() { - return nonce; - } - public void setRealmName(String realmName) { this.realmName = realmName; } - public void setState(String state) { - this.state = state; + public OpenIdConnectPrepareAuthenticationRequest() { } - public void setNonce(String nonce) { - this.nonce = nonce; + public OpenIdConnectPrepareAuthenticationRequest(StreamInput in) throws IOException { + super.readFrom(in); + realmName = in.readString(); } @Override @@ -61,20 +51,15 @@ public ActionRequestValidationException validate() { public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); out.writeString(realmName); - out.writeOptionalString(state); - out.writeOptionalString(nonce); } @Override - public void readFrom(StreamInput in) throws IOException { - super.readFrom(in); - realmName = in.readString(); - state = in.readOptionalString(); - nonce = in.readOptionalString(); + public void readFrom(StreamInput in) { + throw new UnsupportedOperationException("usage of Streamable is to be replaced by Writeable"); } public String toString() { - return "{realmName=" + realmName + ", state=" + state + ", nonce=" + nonce + "}"; + return "{realmName=" + realmName + "}"; } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/oidc/OpenIdConnectPrepareAuthenticationRequestBuilder.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/oidc/OpenIdConnectPrepareAuthenticationRequestBuilder.java index 1637d6f690bd4..b7992345a105a 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/oidc/OpenIdConnectPrepareAuthenticationRequestBuilder.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/oidc/OpenIdConnectPrepareAuthenticationRequestBuilder.java @@ -22,14 +22,4 @@ public OpenIdConnectPrepareAuthenticationRequestBuilder realmName(String name) { request.setRealmName(name); return this; } - - public OpenIdConnectPrepareAuthenticationRequestBuilder state(String state) { - request.setState(state); - return this; - } - - public OpenIdConnectPrepareAuthenticationRequestBuilder nonce(String nonce) { - request.setNonce(nonce); - return this; - } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/oidc/OpenIdConnectPrepareAuthenticationResponse.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/oidc/OpenIdConnectPrepareAuthenticationResponse.java index ce54be4a5f5c9..cf8bce6896882 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/oidc/OpenIdConnectPrepareAuthenticationResponse.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/oidc/OpenIdConnectPrepareAuthenticationResponse.java @@ -8,23 +8,38 @@ import org.elasticsearch.action.ActionResponse; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.xcontent.ToXContentObject; +import org.elasticsearch.common.xcontent.XContentBuilder; import java.io.IOException; /** - * A response containing the authorization endpoint URL and the appropriate request parameters as URL parameters + * A response object that contains the OpenID Connect Authentication Request as a URL and the state and nonce values that were + * generated for this request. */ -public class OpenIdConnectPrepareAuthenticationResponse extends ActionResponse { +public class OpenIdConnectPrepareAuthenticationResponse extends ActionResponse implements ToXContentObject { private String authenticationRequestUrl; + /* + * The oAuth2 state parameter used for CSRF protection. + */ private String state; + /* + * String value used to associate a Client session with an ID Token, and to mitigate replay attacks. + */ + private String nonce; - public OpenIdConnectPrepareAuthenticationResponse(String authorizationEndpointUrl, String state) { + public OpenIdConnectPrepareAuthenticationResponse(String authorizationEndpointUrl, String state, String nonce) { this.authenticationRequestUrl = authorizationEndpointUrl; this.state = state; + this.nonce = nonce; } - public OpenIdConnectPrepareAuthenticationResponse() { + public OpenIdConnectPrepareAuthenticationResponse(StreamInput in) throws IOException { + super.readFrom(in); + authenticationRequestUrl = in.readString(); + state = in.readString(); + nonce = in.readString(); } public String getAuthenticationRequestUrl() { @@ -35,19 +50,34 @@ public String getState() { return state; } + public String getNonce() { + return nonce; + } + @Override public void readFrom(StreamInput in) throws IOException { - super.readFrom(in); - authenticationRequestUrl = in.readString(); + throw new UnsupportedOperationException("usage of Streamable is to be replaced by Writeable"); } @Override public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); out.writeString(authenticationRequestUrl); + out.writeString(state); + out.writeString(nonce); } public String toString() { - return "{authenticationRequestUrl=" + authenticationRequestUrl + ", state=" + state + "}"; + return "{authenticationRequestUrl=" + authenticationRequestUrl + ", state=" + state + ", nonce=" + nonce + "}"; + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + builder.field("authentication_request_url", authenticationRequestUrl); + builder.field("state", state); + builder.field("nonce", nonce); + builder.endObject(); + return builder; } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/RealmSettings.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/RealmSettings.java index d66a5e2ce1f21..0c35525f1debb 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/RealmSettings.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/RealmSettings.java @@ -58,7 +58,7 @@ public static Setting.AffixSetting simpleString(String realmType, String } /** - * Create a {@link SecureSetting#secureString secure string} {@link Setting} object for a realm of + * Create a {@link SecureSetting#secureString secure string} {@link Setting} object of a realm of * with the provided type and setting suffix. * * @param realmType The type of the realm, used within the setting prefix diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/oidc/OpenIdConnectRealmSettings.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/oidc/OpenIdConnectRealmSettings.java index 41297702daaa0..5d51d23c3c69a 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/oidc/OpenIdConnectRealmSettings.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/oidc/OpenIdConnectRealmSettings.java @@ -45,17 +45,11 @@ private OpenIdConnectRealmSettings() { public static final Setting.AffixSetting> RP_REQUESTED_SCOPES = Setting.affixKeySetting( RealmSettings.realmSettingPrefix(TYPE), "rp.requested_scopes", key -> Setting.listSetting(key, Collections.singletonList("openid"), Function.identity(), Setting.Property.NodeScope)); - public static final Setting.AffixSetting> RP_ALLOWED_SCOPES = Setting.affixKeySetting( - RealmSettings.realmSettingPrefix(TYPE), "rp.allowed_scopes", - key -> Setting.listSetting(key, Collections.emptyList(), Function.identity(), Setting.Property.NodeScope)); - public static final Setting.AffixSetting> RP_ALLOWED_SIGNATURE_ALGORITHMS = Setting.affixKeySetting( - RealmSettings.realmSettingPrefix(TYPE), "rp.allowed_signature_algorithms", - key -> Setting.listSetting(key, Collections.emptyList(), Function.identity(), Setting.Property.NodeScope)); public static Set> getSettings() { final Set> set = Sets.newHashSet( - OP_NAME, RP_CLIENT_ID, RP_REDIRECT_URI, RP_RESPONSE_TYPE, RP_REQUESTED_SCOPES, RP_ALLOWED_SCOPES, RP_CLIENT_SECRET, - RP_ALLOWED_SIGNATURE_ALGORITHMS, OP_AUTHORIZATION_ENDPOINT, OP_TOKEN_ENDPOINT, OP_USERINFO_ENDPOINT, OP_ISSUER); + OP_NAME, RP_CLIENT_ID, RP_REDIRECT_URI, RP_RESPONSE_TYPE, RP_REQUESTED_SCOPES, RP_CLIENT_SECRET, + OP_AUTHORIZATION_ENDPOINT, OP_TOKEN_ENDPOINT, OP_USERINFO_ENDPOINT, OP_ISSUER); set.addAll(DelegatedAuthorizationSettings.getSettings(TYPE)); set.addAll(RealmSettings.getStandardSettings(TYPE)); return set; diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/oidc/TransportOpenIdConnectAuthenticateAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/oidc/TransportOpenIdConnectAuthenticateAction.java index 7c0fcbf791709..4f58fa7c6f72c 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/oidc/TransportOpenIdConnectAuthenticateAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/oidc/TransportOpenIdConnectAuthenticateAction.java @@ -10,6 +10,7 @@ import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.HandledTransportAction; import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.tasks.Task; @@ -27,8 +28,8 @@ import java.util.Map; -public class TransportOpenIdConnectAuthenticateAction extends HandledTransportAction { +public class TransportOpenIdConnectAuthenticateAction + extends HandledTransportAction { private final ThreadPool threadPool; private final AuthenticationService authenticationService; @@ -38,7 +39,8 @@ public class TransportOpenIdConnectAuthenticateAction extends HandledTransportAc public TransportOpenIdConnectAuthenticateAction(ThreadPool threadPool, TransportService transportService, ActionFilters actionFilters, AuthenticationService authenticationService, TokenService tokenService) { - super(OpenIdConnectAuthenticateAction.NAME, transportService, actionFilters, OpenIdConnectAuthenticateRequest::new); + super(OpenIdConnectAuthenticateAction.NAME, transportService, actionFilters, + (Writeable.Reader) OpenIdConnectAuthenticateRequest::new); this.threadPool = threadPool; this.authenticationService = authenticationService; this.tokenService = tokenService; diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/oidc/TransportOpenIdConnectPrepareAuthenticationAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/oidc/TransportOpenIdConnectPrepareAuthenticationAction.java index 3c304664bfb32..5d3930c791982 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/oidc/TransportOpenIdConnectPrepareAuthenticationAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/oidc/TransportOpenIdConnectPrepareAuthenticationAction.java @@ -10,18 +10,17 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.HandledTransportAction; -import org.elasticsearch.common.Nullable; import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.tasks.Task; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.security.action.oidc.OpenIdConnectPrepareAuthenticationAction; import org.elasticsearch.xpack.core.security.action.oidc.OpenIdConnectPrepareAuthenticationRequest; import org.elasticsearch.xpack.core.security.action.oidc.OpenIdConnectPrepareAuthenticationResponse; +import org.elasticsearch.xpack.core.security.authc.Realm; import org.elasticsearch.xpack.security.authc.Realms; import org.elasticsearch.xpack.security.authc.oidc.OpenIdConnectRealm; -import java.util.List; -import java.util.stream.Collectors; public class TransportOpenIdConnectPrepareAuthenticationAction extends HandledTransportAction { @@ -32,33 +31,26 @@ public class TransportOpenIdConnectPrepareAuthenticationAction extends HandledTr public TransportOpenIdConnectPrepareAuthenticationAction(TransportService transportService, ActionFilters actionFilters, Realms realms) { super(OpenIdConnectPrepareAuthenticationAction.NAME, transportService, actionFilters, - OpenIdConnectPrepareAuthenticationRequest::new); + (Writeable.Reader) OpenIdConnectPrepareAuthenticationRequest::new); this.realms = realms; } @Override protected void doExecute(Task task, OpenIdConnectPrepareAuthenticationRequest request, ActionListener listener) { - List realms = this.realms.stream() - .filter(r -> r instanceof OpenIdConnectRealm) - .map(r -> (OpenIdConnectRealm) r) - .filter(r -> r.name().equals(request.getRealmName())) - .collect(Collectors.toList()); - if (realms.isEmpty()) { - listener.onFailure(new ElasticsearchSecurityException("Cannot find OIDC realm with name [{}]", request.getRealmName())); - } else if (realms.size() > 1) { - // Can't define multiple realms with the same name in configuration, but check, still. - listener.onFailure(new ElasticsearchSecurityException("Found multiple ([{}]) OIDC realms with name [{}]", realms.size(), - request.getRealmName())); + final Realm realm = this.realms.realm(request.getRealmName()); + if (null == realm || realm instanceof OpenIdConnectRealm == false) { + listener.onFailure( + new ElasticsearchSecurityException("Cannot find OpenID Connect realm with name [{}]", request.getRealmName())); } else { - prepareAuthenticationResponse(realms.get(0), request.getState(), request.getNonce(), listener); + prepareAuthenticationResponse((OpenIdConnectRealm) realm, listener); } } - private void prepareAuthenticationResponse(OpenIdConnectRealm realm, @Nullable String state, @Nullable String nonce, + private void prepareAuthenticationResponse(OpenIdConnectRealm realm, ActionListener listener) { try { - final OpenIdConnectPrepareAuthenticationResponse authenticationResponse = realm.buildAuthenticationRequestUri(state, nonce); + final OpenIdConnectPrepareAuthenticationResponse authenticationResponse = realm.buildAuthenticationRequestUri(); listener.onResponse(authenticationResponse); } catch (ElasticsearchException e) { listener.onFailure(e); diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/oidc/OpenIdConnectProviderConfiguration.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/oidc/OpenIdConnectProviderConfiguration.java new file mode 100644 index 0000000000000..0bfab29e626f2 --- /dev/null +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/oidc/OpenIdConnectProviderConfiguration.java @@ -0,0 +1,50 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.security.authc.oidc; + +import org.elasticsearch.common.Nullable; + +import java.util.Objects; + +/** + * A Class that contains all the OpenID Connect Provider configuration + */ +public class OpenIdConnectProviderConfiguration { + private final String providerName; + private final String authorizationEndpoint; + private final String tokenEndpoint; + private final String userinfoEndpoint; + private final String issuer; + + public OpenIdConnectProviderConfiguration(String providerName, String issuer, String authorizationEndpoint, + @Nullable String tokenEndpoint, @Nullable String userinfoEndpoint) { + this.providerName = Objects.requireNonNull(providerName, "OP Name must be provided"); + this.authorizationEndpoint = Objects.requireNonNull(authorizationEndpoint, "Authorization Endpoint must be provided"); + this.tokenEndpoint = tokenEndpoint; + this.userinfoEndpoint = userinfoEndpoint; + this.issuer = Objects.requireNonNull(issuer, "OP Issuer must be provided"); + } + + public String getProviderName() { + return providerName; + } + + public String getAuthorizationEndpoint() { + return authorizationEndpoint; + } + + public String getTokenEndpoint() { + return tokenEndpoint; + } + + public String getUserinfoEndpoint() { + return userinfoEndpoint; + } + + public String getIssuer() { + return issuer; + } +} diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/oidc/OpenIdConnectRealm.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/oidc/OpenIdConnectRealm.java index 3092e934feb10..0e6c35456cf9a 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/oidc/OpenIdConnectRealm.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/oidc/OpenIdConnectRealm.java @@ -5,14 +5,9 @@ */ package org.elasticsearch.xpack.security.authc.oidc; -import org.apache.log4j.LogManager; -import org.apache.log4j.Logger; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.ActionListener; -import org.elasticsearch.common.Nullable; import org.elasticsearch.common.Strings; -import org.elasticsearch.common.hash.MessageDigests; -import org.elasticsearch.common.lease.Releasable; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.SettingsException; import org.elasticsearch.common.util.concurrent.ThreadContext; @@ -28,7 +23,7 @@ import java.net.URLEncoder; import java.nio.charset.StandardCharsets; import java.security.SecureRandom; -import java.util.Collections; +import java.util.Base64; import java.util.List; import static org.elasticsearch.xpack.core.security.authc.oidc.OpenIdConnectRealmSettings.OP_AUTHORIZATION_ENDPOINT; @@ -36,30 +31,22 @@ import static org.elasticsearch.xpack.core.security.authc.oidc.OpenIdConnectRealmSettings.OP_NAME; import static org.elasticsearch.xpack.core.security.authc.oidc.OpenIdConnectRealmSettings.OP_TOKEN_ENDPOINT; import static org.elasticsearch.xpack.core.security.authc.oidc.OpenIdConnectRealmSettings.OP_USERINFO_ENDPOINT; -import static org.elasticsearch.xpack.core.security.authc.oidc.OpenIdConnectRealmSettings.RP_ALLOWED_SCOPES; -import static org.elasticsearch.xpack.core.security.authc.oidc.OpenIdConnectRealmSettings.RP_ALLOWED_SIGNATURE_ALGORITHMS; import static org.elasticsearch.xpack.core.security.authc.oidc.OpenIdConnectRealmSettings.RP_CLIENT_ID; import static org.elasticsearch.xpack.core.security.authc.oidc.OpenIdConnectRealmSettings.RP_REDIRECT_URI; import static org.elasticsearch.xpack.core.security.authc.oidc.OpenIdConnectRealmSettings.RP_RESPONSE_TYPE; import static org.elasticsearch.xpack.core.security.authc.oidc.OpenIdConnectRealmSettings.RP_REQUESTED_SCOPES; -public class OpenIdConnectRealm extends Realm implements Releasable { +public class OpenIdConnectRealm extends Realm { public static final String CONTEXT_TOKEN_DATA = "_oidc_tokendata"; private static final SecureRandom RANDOM_INSTANCE = new SecureRandom(); - private static final Logger logger = LogManager.getLogger(OpenIdConnectRealm.class); - private final OPConfiguration opConfiguration; - private final RPConfiguration rpConfiguration; + private final OpenIdConnectProviderConfiguration opConfiguration; + private final RelyingPartyConfiguration rpConfiguration; public OpenIdConnectRealm(RealmConfig config) { super(config); - this.rpConfiguration = buildRPConfiguration(config); - this.opConfiguration = buildOPConfiguration(config); - } - - @Override - public void close() { - + this.rpConfiguration = buildRelyingPartyConfiguration(config); + this.opConfiguration = buildOpenIdConnectProviderConfiguration(config); } @Override @@ -82,27 +69,27 @@ public void lookupUser(String username, ActionListener listener) { } - private RPConfiguration buildRPConfiguration(RealmConfig config) { + private RelyingPartyConfiguration buildRelyingPartyConfiguration(RealmConfig config) { String redirectUri = require(config, RP_REDIRECT_URI); String clientId = require(config, RP_CLIENT_ID); String responseType = require(config, RP_RESPONSE_TYPE); - List requestedScopes = config.hasSetting(RP_REQUESTED_SCOPES) ? - config.getSetting(RP_REQUESTED_SCOPES) : Collections.emptyList(); - List allowedScopes = config.hasSetting(RP_ALLOWED_SCOPES) ? - config.getSetting(RP_ALLOWED_SCOPES) : Collections.emptyList(); - List allowedSignatureAlgorithms = requireListSetting(config, RP_ALLOWED_SIGNATURE_ALGORITHMS); + if (responseType.equals("id_token") == false && responseType.equals("code") == false) { + throw new SettingsException("The configuration setting [" + RealmSettings.getFullSettingKey(config, RP_RESPONSE_TYPE) + + "] value can only be code or id_token"); + } + List requestedScopes = config.getSetting(RP_REQUESTED_SCOPES); - return new RPConfiguration(clientId, redirectUri, responseType, allowedSignatureAlgorithms, requestedScopes, allowedScopes); + return new RelyingPartyConfiguration(clientId, redirectUri, responseType, requestedScopes); } - private OPConfiguration buildOPConfiguration(RealmConfig config) { + private OpenIdConnectProviderConfiguration buildOpenIdConnectProviderConfiguration(RealmConfig config) { String providerName = require(config, OP_NAME); String authorizationEndpoint = require(config, OP_AUTHORIZATION_ENDPOINT); String issuer = require(config, OP_ISSUER); String tokenEndpoint = config.getSetting(OP_TOKEN_ENDPOINT, () -> null); String userinfoEndpoint = config.getSetting(OP_USERINFO_ENDPOINT, () -> null); - return new OPConfiguration(providerName, issuer, authorizationEndpoint, tokenEndpoint, userinfoEndpoint); + return new OpenIdConnectProviderConfiguration(providerName, issuer, authorizationEndpoint, tokenEndpoint, userinfoEndpoint); } static String require(RealmConfig config, Setting.AffixSetting setting) { @@ -114,30 +101,17 @@ static String require(RealmConfig config, Setting.AffixSetting setting) return value; } - static List requireListSetting(RealmConfig config, Setting.AffixSetting> setting) { - final List value = config.getSetting(setting); - if (value.isEmpty()) { - throw new SettingsException("The configuration setting [" + RealmSettings.getFullSettingKey(config, setting) - + "] is required"); - } - return value; - } - /** - * Creates the URI for an OIDC Authentication Request from the realm configuration using URI Query String Serialization and possibly - * generates a state parameter. It then returns the URI and state encapsulated in a {@link OpenIdConnectPrepareAuthenticationResponse} + * Creates the URI for an OIDC Authentication Request from the realm configuration using URI Query String Serialization and + * generates a state parameter and a nonce. It then returns the URI, state and nonce encapsulated in a + * {@link OpenIdConnectPrepareAuthenticationResponse} * - * @param state The oAuth2 state parameter used for CSRF protection. If the facilitator doesn't supply one, we generate one ourselves - * @param nonce String value used to associate a Client session with an ID Token, and to mitigate replay attacks. If the facilitator - * doesn't supply one, we don't set one for the authentication request * @return an {@link OpenIdConnectPrepareAuthenticationResponse} */ - public OpenIdConnectPrepareAuthenticationResponse buildAuthenticationRequestUri(@Nullable String state, @Nullable String nonce) - throws ElasticsearchException { + public OpenIdConnectPrepareAuthenticationResponse buildAuthenticationRequestUri() throws ElasticsearchException { try { - if (Strings.hasText(state) == false) { - state = createNonceValue(); - } + final String state = createNonceValue(); + final String nonce = createNonceValue(); StringBuilder builder = new StringBuilder(); builder.append(opConfiguration.getAuthorizationEndpoint()); addParameter(builder, "response_type", rpConfiguration.getResponseType(), true); @@ -148,9 +122,9 @@ public OpenIdConnectPrepareAuthenticationResponse buildAuthenticationRequestUri( addParameter(builder, "nonce", nonce); } addParameter(builder, "redirect_uri", rpConfiguration.getRedirectUri()); - return new OpenIdConnectPrepareAuthenticationResponse(builder.toString(), state); + return new OpenIdConnectPrepareAuthenticationResponse(builder.toString(), state, nonce); } catch (UnsupportedEncodingException e) { - throw new ElasticsearchException("Cannot build OIDC Authentication Request", e); + throw new ElasticsearchException("Cannot build OpenID Connect Authentication Request", e); } } @@ -166,13 +140,15 @@ private void addParameter(StringBuilder builder, String parameter, String value) } /** - * Creates a cryptographically secure alphanumeric string to be used as a nonce + * Creates a cryptographically secure alphanumeric string to be used as a nonce or state. It adheres to the + * specification's requirements by using 180 bits for the random value. + * The random string is encoded in a URL safe manner. * * @return an alphanumeric string */ private static String createNonceValue() { - final byte[] randomBytes = new byte[16]; + final byte[] randomBytes = new byte[20]; RANDOM_INSTANCE.nextBytes(randomBytes); - return MessageDigests.toHexString(randomBytes); + return Base64.getUrlEncoder().withoutPadding().encodeToString(randomBytes); } } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/oidc/OpenIdConnectToken.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/oidc/OpenIdConnectToken.java index 2fca308fdc4e3..9fa04090fec63 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/oidc/OpenIdConnectToken.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/oidc/OpenIdConnectToken.java @@ -21,12 +21,12 @@ public class OpenIdConnectToken implements AuthenticationToken { private String nonce; /** - * @param redirectUri The URI were the OP redirected the browser after the authentication event at the OP. This is passed as is from the - * facilitator entity (i.e. Kibana), so it is URL Encoded. - * @param state The state value that either we or the facilitator generated for this specific flow and that was stored - * at the user's session with the facilitator. - * @param nonce The nonce value that the facilitator generated for this specific flow and that was stored at the user's - * session with the facilitator. + * @param redirectUri The URI where the OP redirected the browser after the authentication event at the OP. This is passed as is from + * the facilitator entity (i.e. Kibana), so it is URL Encoded. + * @param state The state value that we generated for this specific flow and should be stored at the user's session with the + * facilitator. + * @param nonce The nonce value that we generated for this specific flow and should be stored at the user's session with the + * facilitator. */ public OpenIdConnectToken(String redirectUri, String state, String nonce) { this.redirectUri = redirectUri; @@ -36,7 +36,7 @@ public OpenIdConnectToken(String redirectUri, String state, String nonce) { @Override public String principal() { - return ""; + return ""; } @Override diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/oidc/RelyingPartyConfiguration.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/oidc/RelyingPartyConfiguration.java new file mode 100644 index 0000000000000..516f787d0efeb --- /dev/null +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/oidc/RelyingPartyConfiguration.java @@ -0,0 +1,42 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.security.authc.oidc; + +import java.util.List; +import java.util.Objects; + +/** + * A Class that contains all the OpenID Connect Relying Party configuration + */ +public class RelyingPartyConfiguration { + private final String clientId; + private final String redirectUri; + private final String responseType; + private final List requestedScopes; + + public RelyingPartyConfiguration(String clientId, String redirectUri, String responseType, List requestedScopes) { + this.clientId = Objects.requireNonNull(clientId, "clientId must be provided"); + this.redirectUri = Objects.requireNonNull(redirectUri, "redirectUri must be provided"); + this.responseType = Objects.requireNonNull(responseType, "responseType must be provided"); + this.requestedScopes = Objects.requireNonNull(requestedScopes, "responseType must be provided"); + } + + public String getClientId() { + return clientId; + } + + public String getRedirectUri() { + return redirectUri; + } + + public String getResponseType() { + return responseType; + } + + public List getRequestedScopes() { + return requestedScopes; + } +} diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/oidc/RestOpenIdConnectPrepareAuthenticationAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/oidc/RestOpenIdConnectPrepareAuthenticationAction.java index 0ff8cb8027ad6..a8775271a879a 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/oidc/RestOpenIdConnectPrepareAuthenticationAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/oidc/RestOpenIdConnectPrepareAuthenticationAction.java @@ -31,13 +31,11 @@ */ public class RestOpenIdConnectPrepareAuthenticationAction extends OpenIdConnectBaseRestHandler { - static final ObjectParser PARSER = new ObjectParser<>("oidc_prepare_auithentication", + static final ObjectParser PARSER = new ObjectParser<>("oidc_prepare_authentication", OpenIdConnectPrepareAuthenticationRequest::new); static { PARSER.declareString(OpenIdConnectPrepareAuthenticationRequest::setRealmName, new ParseField("realm")); - PARSER.declareString(OpenIdConnectPrepareAuthenticationRequest::setState, new ParseField("state")); - PARSER.declareString(OpenIdConnectPrepareAuthenticationRequest::setNonce, new ParseField("nonce")); } public RestOpenIdConnectPrepareAuthenticationAction(Settings settings, RestController controller, XPackLicenseState licenseState) { @@ -56,11 +54,7 @@ protected RestChannelConsumer innerPrepareRequest(RestRequest request, NodeClien public RestResponse buildResponse(OpenIdConnectPrepareAuthenticationResponse response, XContentBuilder builder) throws Exception { logger.trace("OIDC Prepare Authentication Response: " + response); - builder.startObject(); - builder.field("authentication_request_url", response.getAuthenticationRequestUrl()); - builder.field("state", response.getState()); - builder.endObject(); - return new BytesRestResponse(RestStatus.OK, builder); + return new BytesRestResponse(RestStatus.OK, response.toXContent(builder, request)); } }); } diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/oidc/OpenIdConnectAuthenticateRequestTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/oidc/OpenIdConnectAuthenticateRequestTests.java index 0d8142fc6fdf0..fe3e5c5dcc5f1 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/oidc/OpenIdConnectAuthenticateRequestTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/oidc/OpenIdConnectAuthenticateRequestTests.java @@ -27,8 +27,7 @@ public void testSerialization() throws IOException { final BytesStreamOutput out = new BytesStreamOutput(); request.writeTo(out); - final OpenIdConnectAuthenticateRequest unserialized = new OpenIdConnectAuthenticateRequest(); - unserialized.readFrom(out.bytes().streamInput()); + final OpenIdConnectAuthenticateRequest unserialized = new OpenIdConnectAuthenticateRequest(out.bytes().streamInput()); assertThat(unserialized.getRedirectUri(), equalTo(redirectUri)); assertThat(unserialized.getState(), equalTo(state)); assertThat(unserialized.getNonce(), equalTo(nonce)); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/oidc/OpenIdConnectPrepareAuthenticationRequestTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/oidc/OpenIdConnectPrepareAuthenticationRequestTests.java index 3080cbcbeae16..bfff933e2c7ad 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/oidc/OpenIdConnectPrepareAuthenticationRequestTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/oidc/OpenIdConnectPrepareAuthenticationRequestTests.java @@ -19,27 +19,17 @@ public class OpenIdConnectPrepareAuthenticationRequestTests extends ESTestCase { public void testSerialization() throws IOException { final OpenIdConnectPrepareAuthenticationRequest request = new OpenIdConnectPrepareAuthenticationRequest(); - final String nonce = randomBoolean() ? null : randomAlphaOfLengthBetween(8, 12); - final String state = randomBoolean() ? null : randomAlphaOfLengthBetween(8, 12); - request.setState(state); - request.setNonce(nonce); request.setRealmName("oidc-realm1"); final BytesStreamOutput out = new BytesStreamOutput(); request.writeTo(out); - final OpenIdConnectPrepareAuthenticationRequest unserialized = new OpenIdConnectPrepareAuthenticationRequest(); - unserialized.readFrom(out.bytes().streamInput()); + final OpenIdConnectPrepareAuthenticationRequest unserialized = + new OpenIdConnectPrepareAuthenticationRequest(out.bytes().streamInput()); assertThat(unserialized.getRealmName(), equalTo("oidc-realm1")); - assertThat(unserialized.getState(), equalTo(state)); - assertThat(unserialized.getNonce(), equalTo(nonce)); } public void testValidation() { - final String nonce = randomBoolean() ? null : randomAlphaOfLengthBetween(8, 12); - final String state = randomBoolean() ? null : randomAlphaOfLengthBetween(8, 12); final OpenIdConnectPrepareAuthenticationRequest request = new OpenIdConnectPrepareAuthenticationRequest(); - request.setState(state); - request.setNonce(nonce); final ActionRequestValidationException validation = request.validate(); assertNotNull(validation); assertThat(validation.validationErrors().size(), equalTo(1)); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/oidc/OpenIdConnectRealmTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/oidc/OpenIdConnectRealmTests.java index edb8444d92083..1b8cbea8dde53 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/oidc/OpenIdConnectRealmTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/oidc/OpenIdConnectRealmTests.java @@ -44,12 +44,11 @@ public void testIncorrectResponseTypeThrowsError() { .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.OP_NAME), "the op") .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.RP_REDIRECT_URI), "https://rp.my.com") .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.RP_CLIENT_ID), "rp-my") - .putList(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.RP_ALLOWED_SIGNATURE_ALGORITHMS), "HS256", "HS512") .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.RP_RESPONSE_TYPE), "hybrid"); - IllegalArgumentException exception = expectThrows(IllegalArgumentException.class, () -> { + SettingsException exception = expectThrows(SettingsException.class, () -> { new OpenIdConnectRealm(buildConfig(settingsBuilder.build())); }); - assertThat(exception.getMessage(), Matchers.containsString("Invalid response type provided")); + assertThat(exception.getMessage(), Matchers.containsString("value can only be code or id_token")); } public void testMissingAuthorizationEndpointThrowsError() { @@ -58,7 +57,6 @@ public void testMissingAuthorizationEndpointThrowsError() { .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.OP_NAME), "the op") .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.RP_REDIRECT_URI), "https://rp.my.com") .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.RP_CLIENT_ID), "rp-my") - .putList(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.RP_ALLOWED_SIGNATURE_ALGORITHMS), "HS256", "HS512") .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.RP_RESPONSE_TYPE), "code"); SettingsException exception = expectThrows(SettingsException.class, () -> { new OpenIdConnectRealm(buildConfig(settingsBuilder.build())); @@ -73,7 +71,6 @@ public void testMissingIssuerThrowsError() { .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.OP_NAME), "the op") .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.RP_REDIRECT_URI), "https://rp.my.com") .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.RP_CLIENT_ID), "rp-my") - .putList(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.RP_ALLOWED_SIGNATURE_ALGORITHMS), "HS256", "HS512") .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.RP_RESPONSE_TYPE), "code"); SettingsException exception = expectThrows(SettingsException.class, () -> { new OpenIdConnectRealm(buildConfig(settingsBuilder.build())); @@ -88,7 +85,6 @@ public void testMissingNameTypeThrowsError() { .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.OP_ISSUER), "https://op.example.com") .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.RP_REDIRECT_URI), "https://rp.my.com") .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.RP_CLIENT_ID), "rp-my") - .putList(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.RP_ALLOWED_SIGNATURE_ALGORITHMS), "HS256", "HS512") .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.RP_RESPONSE_TYPE), "code"); SettingsException exception = expectThrows(SettingsException.class, () -> { new OpenIdConnectRealm(buildConfig(settingsBuilder.build())); @@ -103,7 +99,6 @@ public void testMissingRedirectUriThrowsError() { .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.OP_ISSUER), "https://op.example.com") .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.OP_NAME), "the op") .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.RP_CLIENT_ID), "rp-my") - .putList(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.RP_ALLOWED_SIGNATURE_ALGORITHMS), "HS256", "HS512") .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.RP_RESPONSE_TYPE), "code"); SettingsException exception = expectThrows(SettingsException.class, () -> { new OpenIdConnectRealm(buildConfig(settingsBuilder.build())); @@ -112,29 +107,13 @@ public void testMissingRedirectUriThrowsError() { Matchers.containsString(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.RP_REDIRECT_URI))); } - public void testMissingAllowedAlgorithms() { - final Settings.Builder settingsBuilder = Settings.builder() - .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.OP_AUTHORIZATION_ENDPOINT), "https://op.example.com/login") - .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.OP_ISSUER), "https://op.example.com") - .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.OP_NAME), "the op") - .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.RP_CLIENT_ID), "rp-my") - .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.RP_REDIRECT_URI), "https://rp.my.com") - .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.RP_RESPONSE_TYPE), "code"); - SettingsException exception = expectThrows(SettingsException.class, () -> { - new OpenIdConnectRealm(buildConfig(settingsBuilder.build())); - }); - assertThat(exception.getMessage(), - Matchers.containsString(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.RP_ALLOWED_SIGNATURE_ALGORITHMS))); - } - public void testMissingClientIdThrowsError() { final Settings.Builder settingsBuilder = Settings.builder() .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.OP_AUTHORIZATION_ENDPOINT), "https://op.example.com/login") .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.OP_ISSUER), "https://op.example.com") .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.OP_NAME), "the op") .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.RP_REDIRECT_URI), "https://rp.my.com") - .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.RP_RESPONSE_TYPE), "code") - .putList(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.RP_ALLOWED_SIGNATURE_ALGORITHMS), "HS256", "HS512"); + .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.RP_RESPONSE_TYPE), "code"); SettingsException exception = expectThrows(SettingsException.class, () -> { new OpenIdConnectRealm(buildConfig(settingsBuilder.build())); }); @@ -149,58 +128,18 @@ public void testBuilidingAuthenticationRequest() { .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.OP_NAME), "the op") .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.RP_REDIRECT_URI), "https://rp.my.com/cb") .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.RP_CLIENT_ID), "rp-my") - .putList(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.RP_ALLOWED_SIGNATURE_ALGORITHMS), "HS256", "HS512") .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.RP_RESPONSE_TYPE), "code") .putList(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.RP_REQUESTED_SCOPES), Arrays.asList("openid", "scope1", "scope2")); final OpenIdConnectRealm realm = new OpenIdConnectRealm(buildConfig(settingsBuilder.build())); - final String nonce = randomAlphaOfLength(12); - final String state = randomAlphaOfLength(12); - final OpenIdConnectPrepareAuthenticationResponse response = realm.buildAuthenticationRequestUri(state, nonce); + final OpenIdConnectPrepareAuthenticationResponse response = realm.buildAuthenticationRequestUri(); + final String state = response.getState(); + final String nonce = response.getNonce(); assertThat(response.getAuthenticationRequestUrl(), equalTo("https://op.example.com/login?response_type=code&scope=openid+scope1+scope2&client_id=rp-my&state=" + state + "&nonce=" + nonce + "&redirect_uri=https%3A%2F%2Frp.my.com%2Fcb")); - assertThat(response.getState(), equalTo(state)); } - public void testBuilidingAuthenticationRequestWithoutState() { - final Settings.Builder settingsBuilder = Settings.builder() - .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.OP_AUTHORIZATION_ENDPOINT), "https://op.example.com/login") - .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.OP_ISSUER), "https://op.example.com") - .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.OP_NAME), "the op") - .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.RP_REDIRECT_URI), "https://rp.my.com/cb") - .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.RP_CLIENT_ID), "rp-my") - .putList(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.RP_ALLOWED_SIGNATURE_ALGORITHMS), "HS256", "HS512") - .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.RP_RESPONSE_TYPE), "code") - .putList(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.RP_REQUESTED_SCOPES), - Arrays.asList("openid", "scope1", "scope2")); - final OpenIdConnectRealm realm = new OpenIdConnectRealm(buildConfig(settingsBuilder.build())); - final String nonce = randomAlphaOfLength(12); - final OpenIdConnectPrepareAuthenticationResponse response = realm.buildAuthenticationRequestUri(null, nonce); - final String generatedState = response.getState(); - assertThat(response.getAuthenticationRequestUrl(), - equalTo("https://op.example.com/login?response_type=code&scope=openid+scope1+scope2&client_id=rp-my&state=" - + generatedState + "&nonce=" + nonce + "&redirect_uri=https%3A%2F%2Frp.my.com%2Fcb")); - } - - public void testBuilidingAuthenticationRequestWithoutStateAndNonce() { - final Settings.Builder settingsBuilder = Settings.builder() - .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.OP_AUTHORIZATION_ENDPOINT), "https://op.example.com/login") - .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.OP_ISSUER), "https://op.example.com") - .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.OP_NAME), "the op") - .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.RP_REDIRECT_URI), "https://rp.my.com/cb") - .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.RP_CLIENT_ID), "rp-my") - .putList(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.RP_ALLOWED_SIGNATURE_ALGORITHMS), "HS256", "HS512") - .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.RP_RESPONSE_TYPE), "code") - .putList(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.RP_REQUESTED_SCOPES), - Arrays.asList("openid", "scope1", "scope2")); - final OpenIdConnectRealm realm = new OpenIdConnectRealm(buildConfig(settingsBuilder.build())); - final OpenIdConnectPrepareAuthenticationResponse response = realm.buildAuthenticationRequestUri(null, null); - final String generatedState = response.getState(); - assertThat(response.getAuthenticationRequestUrl(), - equalTo("https://op.example.com/login?response_type=code&scope=openid+scope1+scope2&client_id=rp-my&state=" - + generatedState + "&redirect_uri=https%3A%2F%2Frp.my.com%2Fcb")); - } public void testBuilidingAuthenticationRequestWithDefaultScope() { final Settings.Builder settingsBuilder = Settings.builder() @@ -209,13 +148,13 @@ public void testBuilidingAuthenticationRequestWithDefaultScope() { .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.OP_NAME), "the op") .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.RP_REDIRECT_URI), "https://rp.my.com/cb") .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.RP_CLIENT_ID), "rp-my") - .putList(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.RP_ALLOWED_SIGNATURE_ALGORITHMS), "HS256", "HS512") .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.RP_RESPONSE_TYPE), "code"); final OpenIdConnectRealm realm = new OpenIdConnectRealm(buildConfig(settingsBuilder.build())); - final OpenIdConnectPrepareAuthenticationResponse response = realm.buildAuthenticationRequestUri(null, null); - final String generatedState = response.getState(); + final OpenIdConnectPrepareAuthenticationResponse response = realm.buildAuthenticationRequestUri(); + final String state = response.getState(); + final String nonce = response.getNonce(); assertThat(response.getAuthenticationRequestUrl(), equalTo("https://op.example.com/login?response_type=code&scope=openid" - + "&client_id=rp-my&state=" + generatedState + "&redirect_uri=https%3A%2F%2Frp.my.com%2Fcb")); + + "&client_id=rp-my&state=" + state + "&nonce=" + nonce + "&redirect_uri=https%3A%2F%2Frp.my.com%2Fcb")); } private RealmConfig buildConfig(Settings realmSettings) {