i
remoteExc = null;
}
if (id < 0) {
- if (connectionRegistryCall != null) {
- LOG.debug("process connection registry call");
- finishCall(responseHeader, in, connectionRegistryCall);
- connectionRegistryCall = null;
+ LOG.debug("process preamble call response with response type {}",
+ preambleCall != null
+ ? preambleCall.responseDefaultType.getDescriptorForType().getName()
+ : "null");
+ if (preambleCall == null) {
+ // fall through so later we will skip this response
+ LOG.warn("Got a negative call id {} but there is no preamble call", id);
+ } else {
+ if (remoteExc != null) {
+ preambleCall.setException(remoteExc);
+ } else {
+ finishCall(responseHeader, in, preambleCall);
+ }
return;
}
}
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/SecurityNotEnabledException.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/SecurityNotEnabledException.java
new file mode 100644
index 000000000000..207188de8c6e
--- /dev/null
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/SecurityNotEnabledException.java
@@ -0,0 +1,34 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.ipc;
+
+import org.apache.hadoop.hbase.HBaseIOException;
+import org.apache.yetus.audience.InterfaceAudience;
+
+/**
+ * Will be thrown when server received a security preamble call for asking the server principal but
+ * security is not enabled for this server.
+ *
+ * This exception will not be thrown to upper layer so mark it as IA.Private.
+ */
+@InterfaceAudience.Private
+public class SecurityNotEnabledException extends HBaseIOException {
+
+ private static final long serialVersionUID = -3682812966232247662L;
+
+}
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/security/AbstractHBaseSaslRpcClient.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/security/AbstractHBaseSaslRpcClient.java
index 87b2287a6014..4e6f2eab4781 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/security/AbstractHBaseSaslRpcClient.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/security/AbstractHBaseSaslRpcClient.java
@@ -45,38 +45,38 @@ public abstract class AbstractHBaseSaslRpcClient {
/**
* Create a HBaseSaslRpcClient for an authentication method
- * @param conf the configuration object
- * @param provider the authentication provider
- * @param token token to use if needed by the authentication method
- * @param serverAddr the address of the hbase service
- * @param securityInfo the security details for the remote hbase service
- * @param fallbackAllowed does the client allow fallback to simple authentication
+ * @param conf the configuration object
+ * @param provider the authentication provider
+ * @param token token to use if needed by the authentication method
+ * @param serverAddr the address of the hbase service
+ * @param servicePrincipal the service principal to use if needed by the authentication method
+ * @param fallbackAllowed does the client allow fallback to simple authentication
*/
protected AbstractHBaseSaslRpcClient(Configuration conf,
SaslClientAuthenticationProvider provider, Token extends TokenIdentifier> token,
- InetAddress serverAddr, SecurityInfo securityInfo, boolean fallbackAllowed) throws IOException {
- this(conf, provider, token, serverAddr, securityInfo, fallbackAllowed, "authentication");
+ InetAddress serverAddr, String servicePrincipal, boolean fallbackAllowed) throws IOException {
+ this(conf, provider, token, serverAddr, servicePrincipal, fallbackAllowed, "authentication");
}
/**
* Create a HBaseSaslRpcClient for an authentication method
- * @param conf configuration object
- * @param provider the authentication provider
- * @param token token to use if needed by the authentication method
- * @param serverAddr the address of the hbase service
- * @param securityInfo the security details for the remote hbase service
- * @param fallbackAllowed does the client allow fallback to simple authentication
- * @param rpcProtection the protection level ("authentication", "integrity" or "privacy")
+ * @param conf configuration object
+ * @param provider the authentication provider
+ * @param token token to use if needed by the authentication method
+ * @param serverAddr the address of the hbase service
+ * @param servicePrincipal the service principal to use if needed by the authentication method
+ * @param fallbackAllowed does the client allow fallback to simple authentication
+ * @param rpcProtection the protection level ("authentication", "integrity" or "privacy")
*/
protected AbstractHBaseSaslRpcClient(Configuration conf,
SaslClientAuthenticationProvider provider, Token extends TokenIdentifier> token,
- InetAddress serverAddr, SecurityInfo securityInfo, boolean fallbackAllowed,
- String rpcProtection) throws IOException {
+ InetAddress serverAddr, String servicePrincipal, boolean fallbackAllowed, String rpcProtection)
+ throws IOException {
this.fallbackAllowed = fallbackAllowed;
saslProps = SaslUtil.initSaslProperties(rpcProtection);
saslClient =
- provider.createClient(conf, serverAddr, securityInfo, token, fallbackAllowed, saslProps);
+ provider.createClient(conf, serverAddr, servicePrincipal, token, fallbackAllowed, saslProps);
if (saslClient == null) {
throw new IOException(
"Authentication provider " + provider.getClass() + " returned a null SaslClient");
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/security/HBaseSaslRpcClient.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/security/HBaseSaslRpcClient.java
index ace1c38ab22a..ebf0a7f875fb 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/security/HBaseSaslRpcClient.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/security/HBaseSaslRpcClient.java
@@ -63,15 +63,15 @@ public class HBaseSaslRpcClient extends AbstractHBaseSaslRpcClient {
private boolean initStreamForCrypto;
public HBaseSaslRpcClient(Configuration conf, SaslClientAuthenticationProvider provider,
- Token extends TokenIdentifier> token, InetAddress serverAddr, SecurityInfo securityInfo,
+ Token extends TokenIdentifier> token, InetAddress serverAddr, String servicePrincipal,
boolean fallbackAllowed) throws IOException {
- super(conf, provider, token, serverAddr, securityInfo, fallbackAllowed);
+ super(conf, provider, token, serverAddr, servicePrincipal, fallbackAllowed);
}
public HBaseSaslRpcClient(Configuration conf, SaslClientAuthenticationProvider provider,
- Token extends TokenIdentifier> token, InetAddress serverAddr, SecurityInfo securityInfo,
+ Token extends TokenIdentifier> token, InetAddress serverAddr, String servicePrincipal,
boolean fallbackAllowed, String rpcProtection, boolean initStreamForCrypto) throws IOException {
- super(conf, provider, token, serverAddr, securityInfo, fallbackAllowed, rpcProtection);
+ super(conf, provider, token, serverAddr, servicePrincipal, fallbackAllowed, rpcProtection);
this.initStreamForCrypto = initStreamForCrypto;
}
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/security/NettyHBaseSaslRpcClient.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/security/NettyHBaseSaslRpcClient.java
index fe5481a10b25..47d380d71046 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/security/NettyHBaseSaslRpcClient.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/security/NettyHBaseSaslRpcClient.java
@@ -40,9 +40,9 @@ public class NettyHBaseSaslRpcClient extends AbstractHBaseSaslRpcClient {
private static final Logger LOG = LoggerFactory.getLogger(NettyHBaseSaslRpcClient.class);
public NettyHBaseSaslRpcClient(Configuration conf, SaslClientAuthenticationProvider provider,
- Token extends TokenIdentifier> token, InetAddress serverAddr, SecurityInfo securityInfo,
+ Token extends TokenIdentifier> token, InetAddress serverAddr, String serverPrincipal,
boolean fallbackAllowed, String rpcProtection) throws IOException {
- super(conf, provider, token, serverAddr, securityInfo, fallbackAllowed, rpcProtection);
+ super(conf, provider, token, serverAddr, serverPrincipal, fallbackAllowed, rpcProtection);
}
public void setupSaslHandler(ChannelPipeline p, String addAfter) {
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/security/NettyHBaseSaslRpcClientHandler.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/security/NettyHBaseSaslRpcClientHandler.java
index cc71355d4297..567b5675b710 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/security/NettyHBaseSaslRpcClientHandler.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/security/NettyHBaseSaslRpcClientHandler.java
@@ -68,14 +68,14 @@ public class NettyHBaseSaslRpcClientHandler extends SimpleChannelInboundHandler<
*/
public NettyHBaseSaslRpcClientHandler(Promise saslPromise, UserGroupInformation ugi,
SaslClientAuthenticationProvider provider, Token extends TokenIdentifier> token,
- InetAddress serverAddr, SecurityInfo securityInfo, boolean fallbackAllowed, Configuration conf)
+ InetAddress serverAddr, String serverPrincipal, boolean fallbackAllowed, Configuration conf)
throws IOException {
this.saslPromise = saslPromise;
this.ugi = ugi;
this.conf = conf;
this.provider = provider;
this.saslRpcClient = new NettyHBaseSaslRpcClient(conf, provider, token, serverAddr,
- securityInfo, fallbackAllowed, conf.get("hbase.rpc.protection",
+ serverPrincipal, fallbackAllowed, conf.get("hbase.rpc.protection",
SaslUtil.QualityOfProtection.AUTHENTICATION.name().toLowerCase()));
}
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/security/SecurityInfo.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/security/SecurityInfo.java
index 2e16d5646953..f330883a811b 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/security/SecurityInfo.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/security/SecurityInfo.java
@@ -17,10 +17,14 @@
*/
package org.apache.hadoop.hbase.security;
+import java.util.Arrays;
+import java.util.List;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import org.apache.yetus.audience.InterfaceAudience;
+import org.apache.hbase.thirdparty.com.google.common.base.Preconditions;
+
import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos;
import org.apache.hadoop.hbase.shaded.protobuf.generated.AuthenticationProtos.TokenIdentifier.Kind;
import org.apache.hadoop.hbase.shaded.protobuf.generated.BootstrapNodeProtos;
@@ -51,7 +55,8 @@ public class SecurityInfo {
infos.put(MasterProtos.HbckService.getDescriptor().getName(),
new SecurityInfo(SecurityConstants.MASTER_KRB_PRINCIPAL, Kind.HBASE_AUTH_TOKEN));
infos.put(RegistryProtos.ClientMetaService.getDescriptor().getName(),
- new SecurityInfo(SecurityConstants.MASTER_KRB_PRINCIPAL, Kind.HBASE_AUTH_TOKEN));
+ new SecurityInfo(Kind.HBASE_AUTH_TOKEN, SecurityConstants.MASTER_KRB_PRINCIPAL,
+ SecurityConstants.REGIONSERVER_KRB_PRINCIPAL));
infos.put(BootstrapNodeProtos.BootstrapNodeService.getDescriptor().getName(),
new SecurityInfo(SecurityConstants.REGIONSERVER_KRB_PRINCIPAL, Kind.HBASE_AUTH_TOKEN));
infos.put(LockServiceProtos.LockService.getDescriptor().getName(),
@@ -75,16 +80,33 @@ public static SecurityInfo getInfo(String serviceName) {
return infos.get(serviceName);
}
- private final String serverPrincipal;
+ private final List serverPrincipals;
private final Kind tokenKind;
public SecurityInfo(String serverPrincipal, Kind tokenKind) {
- this.serverPrincipal = serverPrincipal;
+ this(tokenKind, serverPrincipal);
+ }
+
+ public SecurityInfo(Kind tokenKind, String... serverPrincipal) {
+ Preconditions.checkArgument(serverPrincipal.length > 0);
this.tokenKind = tokenKind;
+ this.serverPrincipals = Arrays.asList(serverPrincipal);
}
+ /**
+ * Since 2.5.8 and 2.6.0, will be removed in 3.0.0. Use {@link #getServerPrincipals()} instead.
+ *
+ * Although this class is IA.Private, we leak this class in
+ * {@code SaslClientAuthenticationProvider}, so need to align with the deprecation cycle for that
+ * class.
+ */
+ @Deprecated
public String getServerPrincipal() {
- return serverPrincipal;
+ return serverPrincipals.get(0);
+ }
+
+ public List getServerPrincipals() {
+ return serverPrincipals;
}
public Kind getTokenKind() {
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/security/provider/DigestSaslClientAuthenticationProvider.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/security/provider/DigestSaslClientAuthenticationProvider.java
index 480e724599bd..65893c1a75ca 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/security/provider/DigestSaslClientAuthenticationProvider.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/security/provider/DigestSaslClientAuthenticationProvider.java
@@ -31,7 +31,6 @@
import javax.security.sasl.SaslClient;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.security.SaslUtil;
-import org.apache.hadoop.hbase.security.SecurityInfo;
import org.apache.hadoop.hbase.security.User;
import org.apache.hadoop.security.token.Token;
import org.apache.hadoop.security.token.TokenIdentifier;
@@ -46,9 +45,9 @@ public class DigestSaslClientAuthenticationProvider extends DigestSaslAuthentica
implements SaslClientAuthenticationProvider {
@Override
- public SaslClient createClient(Configuration conf, InetAddress serverAddr,
- SecurityInfo securityInfo, Token extends TokenIdentifier> token, boolean fallbackAllowed,
- Map saslProps) throws IOException {
+ public SaslClient createClient(Configuration conf, InetAddress serverAddr, String serverPrincipal,
+ Token extends TokenIdentifier> token, boolean fallbackAllowed, Map saslProps)
+ throws IOException {
return Sasl.createSaslClient(new String[] { getSaslAuthMethod().getSaslMechanism() }, null,
null, SaslUtil.SASL_DEFAULT_REALM, saslProps, new DigestSaslClientCallbackHandler(token));
}
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/security/provider/GssSaslClientAuthenticationProvider.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/security/provider/GssSaslClientAuthenticationProvider.java
index 218fd13b60c1..77e92b35bd8c 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/security/provider/GssSaslClientAuthenticationProvider.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/security/provider/GssSaslClientAuthenticationProvider.java
@@ -24,10 +24,7 @@
import javax.security.sasl.SaslClient;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.security.SaslUtil;
-import org.apache.hadoop.hbase.security.SecurityConstants;
-import org.apache.hadoop.hbase.security.SecurityInfo;
import org.apache.hadoop.hbase.security.User;
-import org.apache.hadoop.security.SecurityUtil;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.security.token.Token;
import org.apache.hadoop.security.token.TokenIdentifier;
@@ -43,46 +40,10 @@ public class GssSaslClientAuthenticationProvider extends GssSaslAuthenticationPr
private static final Logger LOG =
LoggerFactory.getLogger(GssSaslClientAuthenticationProvider.class);
- private static boolean useCanonicalHostname(Configuration conf) {
- return !conf.getBoolean(
- SecurityConstants.UNSAFE_HBASE_CLIENT_KERBEROS_HOSTNAME_DISABLE_REVERSEDNS,
- SecurityConstants.DEFAULT_UNSAFE_HBASE_CLIENT_KERBEROS_HOSTNAME_DISABLE_REVERSEDNS);
- }
-
- public static String getHostnameForServerPrincipal(Configuration conf, InetAddress addr) {
- final String hostname;
-
- if (useCanonicalHostname(conf)) {
- hostname = addr.getCanonicalHostName();
- if (hostname.equals(addr.getHostAddress())) {
- LOG.warn("Canonical hostname for SASL principal is the same with IP address: " + hostname
- + ", " + addr.getHostName() + ". Check DNS configuration or consider "
- + SecurityConstants.UNSAFE_HBASE_CLIENT_KERBEROS_HOSTNAME_DISABLE_REVERSEDNS + "=true");
- }
- } else {
- hostname = addr.getHostName();
- }
-
- return hostname.toLowerCase();
- }
-
- String getServerPrincipal(Configuration conf, SecurityInfo securityInfo, InetAddress server)
- throws IOException {
- String hostname = getHostnameForServerPrincipal(conf, server);
-
- String serverKey = securityInfo.getServerPrincipal();
- if (serverKey == null) {
- throw new IllegalArgumentException(
- "Can't obtain server Kerberos config key from SecurityInfo");
- }
- return SecurityUtil.getServerPrincipal(conf.get(serverKey), hostname);
- }
-
@Override
- public SaslClient createClient(Configuration conf, InetAddress serverAddr,
- SecurityInfo securityInfo, Token extends TokenIdentifier> token, boolean fallbackAllowed,
- Map saslProps) throws IOException {
- String serverPrincipal = getServerPrincipal(conf, securityInfo, serverAddr);
+ public SaslClient createClient(Configuration conf, InetAddress serverAddr, String serverPrincipal,
+ Token extends TokenIdentifier> token, boolean fallbackAllowed, Map saslProps)
+ throws IOException {
LOG.debug("Setting up Kerberos RPC to server={}", serverPrincipal);
String[] names = SaslUtil.splitKerberosName(serverPrincipal);
if (names.length != 3) {
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/security/provider/SaslClientAuthenticationProvider.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/security/provider/SaslClientAuthenticationProvider.java
index bbc5ddac91aa..4e23247ca764 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/security/provider/SaslClientAuthenticationProvider.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/security/provider/SaslClientAuthenticationProvider.java
@@ -31,6 +31,7 @@
import org.apache.yetus.audience.InterfaceAudience;
import org.apache.yetus.audience.InterfaceStability;
+import org.apache.hadoop.hbase.shaded.protobuf.generated.AuthenticationProtos.TokenIdentifier.Kind;
import org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation;
/**
@@ -45,11 +46,33 @@
public interface SaslClientAuthenticationProvider extends SaslAuthenticationProvider {
/**
- * Creates the SASL client instance for this auth'n method.
+ * Creates the SASL client instance for this authentication method.
+ * @deprecated Since 2.5.8 and 2.6.0. In our own code will not call this method any more,
+ * customized authentication method should implement
+ * {@link #createClient(Configuration, InetAddress, String, Token, boolean, Map)}
+ * instead. Will be removed in 4.0.0.
*/
- SaslClient createClient(Configuration conf, InetAddress serverAddr, SecurityInfo securityInfo,
- Token extends TokenIdentifier> token, boolean fallbackAllowed, Map saslProps)
- throws IOException;
+ @Deprecated
+ default SaslClient createClient(Configuration conf, InetAddress serverAddr,
+ SecurityInfo securityInfo, Token extends TokenIdentifier> token, boolean fallbackAllowed,
+ Map saslProps) throws IOException {
+ throw new UnsupportedOperationException("should not be used any more");
+ }
+
+ /**
+ * Create the SASL client instance for this authentication method.
+ *
+ * The default implementation is create a fake {@link SecurityInfo} and call the above method, for
+ * keeping compatible with old customized authentication method
+ */
+ default SaslClient createClient(Configuration conf, InetAddress serverAddr,
+ String serverPrincipal, Token extends TokenIdentifier> token, boolean fallbackAllowed,
+ Map saslProps) throws IOException {
+ String principalKey = "hbase.fake.kerberos.principal";
+ conf.set(principalKey, serverPrincipal);
+ return createClient(conf, serverAddr, new SecurityInfo(principalKey, Kind.HBASE_AUTH_TOKEN),
+ token, fallbackAllowed, saslProps);
+ }
/**
* Constructs a {@link UserInformation} from the given {@link UserGroupInformation}
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/security/provider/SimpleSaslClientAuthenticationProvider.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/security/provider/SimpleSaslClientAuthenticationProvider.java
index 6fff703689c9..70e469003c87 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/security/provider/SimpleSaslClientAuthenticationProvider.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/security/provider/SimpleSaslClientAuthenticationProvider.java
@@ -22,7 +22,6 @@
import java.util.Map;
import javax.security.sasl.SaslClient;
import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hbase.security.SecurityInfo;
import org.apache.hadoop.hbase.security.User;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.security.token.Token;
@@ -37,7 +36,7 @@ public class SimpleSaslClientAuthenticationProvider extends SimpleSaslAuthentica
@Override
public SaslClient createClient(Configuration conf, InetAddress serverAddress,
- SecurityInfo securityInfo, Token extends TokenIdentifier> token, boolean fallbackAllowed,
+ String serverPrincipal, Token extends TokenIdentifier> token, boolean fallbackAllowed,
Map saslProps) throws IOException {
return null;
}
diff --git a/hbase-client/src/test/java/org/apache/hadoop/hbase/security/TestHBaseSaslRpcClient.java b/hbase-client/src/test/java/org/apache/hadoop/hbase/security/TestHBaseSaslRpcClient.java
index 7b42ba224fac..6b1e7c338329 100644
--- a/hbase-client/src/test/java/org/apache/hadoop/hbase/security/TestHBaseSaslRpcClient.java
+++ b/hbase-client/src/test/java/org/apache/hadoop/hbase/security/TestHBaseSaslRpcClient.java
@@ -53,10 +53,8 @@
import org.apache.hadoop.security.token.Token;
import org.apache.hadoop.security.token.TokenIdentifier;
import org.junit.ClassRule;
-import org.junit.Rule;
import org.junit.Test;
import org.junit.experimental.categories.Category;
-import org.junit.rules.ExpectedException;
import org.mockito.Mockito;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -80,9 +78,6 @@ public class TestHBaseSaslRpcClient {
private static final Logger LOG = LoggerFactory.getLogger(TestHBaseSaslRpcClient.class);
- @Rule
- public ExpectedException exception = ExpectedException.none();
-
@Test
public void testSaslClientUsesGivenRpcProtection() throws Exception {
Token extends TokenIdentifier> token =
@@ -90,8 +85,7 @@ public void testSaslClientUsesGivenRpcProtection() throws Exception {
DigestSaslClientAuthenticationProvider provider = new DigestSaslClientAuthenticationProvider();
for (SaslUtil.QualityOfProtection qop : SaslUtil.QualityOfProtection.values()) {
String negotiatedQop = new HBaseSaslRpcClient(HBaseConfiguration.create(), provider, token,
- Mockito.mock(InetAddress.class), Mockito.mock(SecurityInfo.class), false, qop.name(),
- false) {
+ Mockito.mock(InetAddress.class), "", false, qop.name(), false) {
public String getQop() {
return saslProps.get(Sasl.QOP);
}
@@ -192,14 +186,14 @@ private boolean assertIOExceptionWhenGetStreamsBeforeConnectCall(String principa
DigestSaslClientAuthenticationProvider provider = new DigestSaslClientAuthenticationProvider() {
@Override
public SaslClient createClient(Configuration conf, InetAddress serverAddress,
- SecurityInfo securityInfo, Token extends TokenIdentifier> token, boolean fallbackAllowed,
+ String serverPrincipal, Token extends TokenIdentifier> token, boolean fallbackAllowed,
Map saslProps) {
return Mockito.mock(SaslClient.class);
}
};
HBaseSaslRpcClient rpcClient = new HBaseSaslRpcClient(HBaseConfiguration.create(), provider,
- createTokenMockWithCredentials(principal, password), Mockito.mock(InetAddress.class),
- Mockito.mock(SecurityInfo.class), false);
+ createTokenMockWithCredentials(principal, password), Mockito.mock(InetAddress.class), "",
+ false);
try {
rpcClient.getInputStream();
@@ -224,14 +218,14 @@ private boolean assertIOExceptionThenSaslClientIsNull(String principal, String p
new DigestSaslClientAuthenticationProvider() {
@Override
public SaslClient createClient(Configuration conf, InetAddress serverAddress,
- SecurityInfo securityInfo, Token extends TokenIdentifier> token,
- boolean fallbackAllowed, Map saslProps) {
+ String serverPrincipal, Token extends TokenIdentifier> token, boolean fallbackAllowed,
+ Map saslProps) {
return null;
}
};
new HBaseSaslRpcClient(HBaseConfiguration.create(), provider,
- createTokenMockWithCredentials(principal, password), Mockito.mock(InetAddress.class),
- Mockito.mock(SecurityInfo.class), false);
+ createTokenMockWithCredentials(principal, password), Mockito.mock(InetAddress.class), "",
+ false);
return false;
} catch (IOException ex) {
return true;
@@ -254,8 +248,8 @@ private boolean assertSuccessCreationDigestPrincipal(String principal, String pa
try {
rpcClient = new HBaseSaslRpcClient(HBaseConfiguration.create(),
new DigestSaslClientAuthenticationProvider(),
- createTokenMockWithCredentials(principal, password), Mockito.mock(InetAddress.class),
- Mockito.mock(SecurityInfo.class), false);
+ createTokenMockWithCredentials(principal, password), Mockito.mock(InetAddress.class), "",
+ false);
} catch (Exception ex) {
LOG.error(ex.getMessage(), ex);
}
@@ -275,7 +269,7 @@ private boolean assertSuccessCreationSimple() {
private HBaseSaslRpcClient createSaslRpcClientForKerberos() throws IOException {
return new HBaseSaslRpcClient(HBaseConfiguration.create(),
new GssSaslClientAuthenticationProvider(), createTokenMock(), Mockito.mock(InetAddress.class),
- Mockito.mock(SecurityInfo.class), false);
+ "", false);
}
private Token extends TokenIdentifier> createTokenMockWithCredentials(String principal,
@@ -291,7 +285,7 @@ private Token extends TokenIdentifier> createTokenMockWithCredentials(String p
private HBaseSaslRpcClient createSaslRpcClientSimple() throws IOException {
return new HBaseSaslRpcClient(HBaseConfiguration.create(),
new SimpleSaslClientAuthenticationProvider(), createTokenMock(),
- Mockito.mock(InetAddress.class), Mockito.mock(SecurityInfo.class), false);
+ Mockito.mock(InetAddress.class), "", false);
}
@SuppressWarnings("unchecked")
diff --git a/hbase-examples/src/main/java/org/apache/hadoop/hbase/security/provider/example/ShadeSaslClientAuthenticationProvider.java b/hbase-examples/src/main/java/org/apache/hadoop/hbase/security/provider/example/ShadeSaslClientAuthenticationProvider.java
index d0930a0f3148..3b83d7dda637 100644
--- a/hbase-examples/src/main/java/org/apache/hadoop/hbase/security/provider/example/ShadeSaslClientAuthenticationProvider.java
+++ b/hbase-examples/src/main/java/org/apache/hadoop/hbase/security/provider/example/ShadeSaslClientAuthenticationProvider.java
@@ -31,7 +31,6 @@
import javax.security.sasl.SaslClient;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.security.SaslUtil;
-import org.apache.hadoop.hbase.security.SecurityInfo;
import org.apache.hadoop.hbase.security.User;
import org.apache.hadoop.hbase.security.provider.SaslClientAuthenticationProvider;
import org.apache.hadoop.hbase.util.Bytes;
@@ -46,9 +45,9 @@ public class ShadeSaslClientAuthenticationProvider extends ShadeSaslAuthenticati
implements SaslClientAuthenticationProvider {
@Override
- public SaslClient createClient(Configuration conf, InetAddress serverAddr,
- SecurityInfo securityInfo, Token extends TokenIdentifier> token, boolean fallbackAllowed,
- Map saslProps) throws IOException {
+ public SaslClient createClient(Configuration conf, InetAddress serverAddr, String serverPrincipal,
+ Token extends TokenIdentifier> token, boolean fallbackAllowed, Map saslProps)
+ throws IOException {
return Sasl.createSaslClient(new String[] { getSaslAuthMethod().getSaslMechanism() }, null,
null, SaslUtil.SASL_DEFAULT_REALM, saslProps, new ShadeSaslClientCallbackHandler(token));
}
diff --git a/hbase-protocol-shaded/src/main/protobuf/rpc/RPC.proto b/hbase-protocol-shaded/src/main/protobuf/rpc/RPC.proto
index e992e681fbff..3e44f8e16fa6 100644
--- a/hbase-protocol-shaded/src/main/protobuf/rpc/RPC.proto
+++ b/hbase-protocol-shaded/src/main/protobuf/rpc/RPC.proto
@@ -159,3 +159,7 @@ message ResponseHeader {
// If present, then an encoded data block follows.
optional CellBlockMeta cell_block_meta = 3;
}
+
+message SecurityPreamableResponse {
+ required string server_principal = 1;
+}
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/RpcServer.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/RpcServer.java
index 0876a1fd55f4..a84d132a0132 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/RpcServer.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/RpcServer.java
@@ -67,6 +67,7 @@
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
+import org.apache.hbase.thirdparty.com.google.common.base.Preconditions;
import org.apache.hbase.thirdparty.com.google.gson.Gson;
import org.apache.hbase.thirdparty.com.google.protobuf.BlockingService;
import org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.MethodDescriptor;
@@ -117,6 +118,7 @@ public abstract class RpcServer implements RpcServerInterface, ConfigurationObse
LoggerFactory.getLogger("SecurityLogger." + Server.class.getName());
protected SecretManager secretManager;
protected final Map saslProps;
+ protected final String serverPrincipal;
protected ServiceAuthorizationManager authManager;
@@ -211,7 +213,7 @@ public abstract class RpcServer implements RpcServerInterface, ConfigurationObse
protected final RpcScheduler scheduler;
- protected UserProvider userProvider;
+ protected final UserProvider userProvider;
protected final ByteBuffAllocator bbAllocator;
@@ -300,8 +302,11 @@ public RpcServer(final Server server, final String name,
if (isSecurityEnabled) {
saslProps = SaslUtil.initSaslProperties(conf.get("hbase.rpc.protection",
QualityOfProtection.AUTHENTICATION.name().toLowerCase(Locale.ROOT)));
+ serverPrincipal = Preconditions.checkNotNull(userProvider.getCurrentUserName(),
+ "can not get current user name when security is enabled");
} else {
saslProps = Collections.emptyMap();
+ serverPrincipal = HConstants.EMPTY_STRING;
}
this.isOnlineLogProviderEnabled = getIsOnlineLogProviderEnabled(conf);
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/ServerRpcConnection.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/ServerRpcConnection.java
index be97ad582c37..31f46f30c382 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/ServerRpcConnection.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/ServerRpcConnection.java
@@ -88,6 +88,7 @@
import org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ConnectionHeader;
import org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.RequestHeader;
import org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ResponseHeader;
+import org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.SecurityPreamableResponse;
import org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation;
import org.apache.hadoop.hbase.shaded.protobuf.generated.RegistryProtos.GetConnectionRegistryResponse;
import org.apache.hadoop.hbase.shaded.protobuf.generated.TracingProtos.RPCTInfo;
@@ -695,6 +696,13 @@ private void doBadPreambleHandling(String msg, Exception e) throws IOException {
doRespond(getErrorResponse(msg, e));
}
+ private void doPreambleResponse(Message resp) throws IOException {
+ ResponseHeader header = ResponseHeader.newBuilder().setCallId(-1).build();
+ ByteBuffer buf = ServerCall.createHeaderAndMessageBytes(resp, header, 0, null);
+ BufferChain bufChain = new BufferChain(buf);
+ doRespond(() -> bufChain);
+ }
+
private boolean doConnectionRegistryResponse() throws IOException {
if (!(rpcServer.server instanceof ConnectionRegistryEndpoint)) {
// should be in tests or some scenarios where we should not reach here
@@ -710,13 +718,22 @@ private boolean doConnectionRegistryResponse() throws IOException {
}
GetConnectionRegistryResponse resp =
GetConnectionRegistryResponse.newBuilder().setClusterId(clusterId).build();
- ResponseHeader header = ResponseHeader.newBuilder().setCallId(-1).build();
- ByteBuffer buf = ServerCall.createHeaderAndMessageBytes(resp, header, 0, null);
- BufferChain bufChain = new BufferChain(buf);
- doRespond(() -> bufChain);
+ doPreambleResponse(resp);
return true;
}
+ private void doSecurityPreambleResponse() throws IOException {
+ if (rpcServer.isSecurityEnabled) {
+ SecurityPreamableResponse resp = SecurityPreamableResponse.newBuilder()
+ .setServerPrincipal(rpcServer.serverPrincipal).build();
+ doPreambleResponse(resp);
+ } else {
+ // security is not enabled, do not need a principal when connecting, throw a special exception
+ // to let client know it should just use simple authentication
+ doRespond(getErrorResponse("security is not enabled", new SecurityNotEnabledException()));
+ }
+ }
+
protected final void callCleanupIfNeeded() {
if (callCleanup != null) {
callCleanup.run();
@@ -738,6 +755,13 @@ protected final PreambleResponse processPreamble(ByteBuffer preambleBuffer) thro
) {
return PreambleResponse.CLOSE;
}
+ if (
+ ByteBufferUtils.equals(preambleBuffer, preambleBuffer.position(), 6,
+ RpcClient.SECURITY_PREAMBLE_HEADER, 0, 6)
+ ) {
+ doSecurityPreambleResponse();
+ return PreambleResponse.CONTINUE;
+ }
if (!ByteBufferUtils.equals(preambleBuffer, preambleBuffer.position(), 4, RPC_HEADER, 0, 4)) {
doBadPreambleHandling(
"Expected HEADER=" + Bytes.toStringBinary(RPC_HEADER) + " but received HEADER="
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/SimpleServerRpcConnection.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/SimpleServerRpcConnection.java
index 9e90a7a31339..1b28c19b4306 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/SimpleServerRpcConnection.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/SimpleServerRpcConnection.java
@@ -145,7 +145,7 @@ private int readPreamble() throws IOException {
return count;
case CONTINUE:
// wait for the next preamble header
- preambleBuffer.reset();
+ preambleBuffer.clear();
return count;
case CLOSE:
return -1;
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestMultipleServerPrincipalsIPC.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestMultipleServerPrincipalsIPC.java
new file mode 100644
index 000000000000..237f1cb40259
--- /dev/null
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestMultipleServerPrincipalsIPC.java
@@ -0,0 +1,277 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.ipc;
+
+import static org.hamcrest.MatcherAssert.assertThat;
+import static org.hamcrest.Matchers.containsString;
+import static org.hamcrest.Matchers.either;
+import static org.hamcrest.Matchers.instanceOf;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertThrows;
+
+import java.io.File;
+import java.io.IOException;
+import java.lang.reflect.UndeclaredThrowableException;
+import java.net.InetSocketAddress;
+import java.security.PrivilegedExceptionAction;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.List;
+import javax.security.sasl.SaslException;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.CommonConfigurationKeys;
+import org.apache.hadoop.hbase.HBaseClassTestRule;
+import org.apache.hadoop.hbase.HBaseTestingUtil;
+import org.apache.hadoop.hbase.HConstants;
+import org.apache.hadoop.hbase.ServerName;
+import org.apache.hadoop.hbase.security.SecurityInfo;
+import org.apache.hadoop.hbase.security.User;
+import org.apache.hadoop.hbase.testclassification.MediumTests;
+import org.apache.hadoop.hbase.testclassification.SecurityTests;
+import org.apache.hadoop.ipc.RemoteException;
+import org.apache.hadoop.minikdc.MiniKdc;
+import org.apache.hadoop.security.UserGroupInformation;
+import org.junit.After;
+import org.junit.AfterClass;
+import org.junit.Before;
+import org.junit.BeforeClass;
+import org.junit.ClassRule;
+import org.junit.Test;
+import org.junit.experimental.categories.Category;
+import org.junit.runner.RunWith;
+import org.junit.runners.Parameterized;
+import org.junit.runners.Parameterized.Parameter;
+import org.junit.runners.Parameterized.Parameters;
+
+import org.apache.hbase.thirdparty.com.google.common.collect.Iterables;
+import org.apache.hbase.thirdparty.com.google.common.collect.Lists;
+import org.apache.hbase.thirdparty.com.google.common.io.Closeables;
+import org.apache.hbase.thirdparty.com.google.protobuf.BlockingRpcChannel;
+import org.apache.hbase.thirdparty.com.google.protobuf.ServiceException;
+import org.apache.hbase.thirdparty.io.netty.handler.codec.DecoderException;
+
+import org.apache.hadoop.hbase.shaded.ipc.protobuf.generated.TestProtos;
+import org.apache.hadoop.hbase.shaded.ipc.protobuf.generated.TestRpcServiceProtos.TestProtobufRpcProto;
+import org.apache.hadoop.hbase.shaded.protobuf.generated.AuthenticationProtos.TokenIdentifier.Kind;
+
+/**
+ * Tests for HBASE-28321, where we have multiple server principals candidates for a rpc service.
+ *
+ * Put here just because we need to visit some package private classes under this package.
+ */
+@RunWith(Parameterized.class)
+@Category({ SecurityTests.class, MediumTests.class })
+public class TestMultipleServerPrincipalsIPC {
+
+ @ClassRule
+ public static final HBaseClassTestRule CLASS_RULE =
+ HBaseClassTestRule.forClass(TestMultipleServerPrincipalsIPC.class);
+
+ private static final HBaseTestingUtil TEST_UTIL = new HBaseTestingUtil();
+
+ private static final File KEYTAB_FILE =
+ new File(TEST_UTIL.getDataTestDir("keytab").toUri().getPath());
+
+ private static MiniKdc KDC;
+ private static String HOST = "localhost";
+ private static String SERVER_PRINCIPAL;
+ private static String SERVER_PRINCIPAL2;
+ private static String CLIENT_PRINCIPAL;
+
+ @Parameter(0)
+ public Class extends RpcServer> rpcServerImpl;
+
+ @Parameter(1)
+ public Class extends RpcClient> rpcClientImpl;
+
+ private Configuration clientConf;
+ private Configuration serverConf;
+ private UserGroupInformation clientUGI;
+ private UserGroupInformation serverUGI;
+ private RpcServer rpcServer;
+ private RpcClient rpcClient;
+
+ @Parameters(name = "{index}: rpcServerImpl={0}, rpcClientImpl={1}")
+ public static List params() {
+ List params = new ArrayList<>();
+ List> rpcServerImpls =
+ Arrays.asList(NettyRpcServer.class, SimpleRpcServer.class);
+ List> rpcClientImpls =
+ Arrays.asList(NettyRpcClient.class, BlockingRpcClient.class);
+ for (Class extends RpcServer> rpcServerImpl : rpcServerImpls) {
+ for (Class extends RpcClient> rpcClientImpl : rpcClientImpls) {
+ params.add(new Object[] { rpcServerImpl, rpcClientImpl });
+ }
+ }
+ return params;
+ }
+
+ @BeforeClass
+ public static void setUpBeforeClass() throws Exception {
+ KDC = TEST_UTIL.setupMiniKdc(KEYTAB_FILE);
+ SERVER_PRINCIPAL = "server/" + HOST + "@" + KDC.getRealm();
+ SERVER_PRINCIPAL2 = "server2/" + HOST + "@" + KDC.getRealm();
+ CLIENT_PRINCIPAL = "client";
+ KDC.createPrincipal(KEYTAB_FILE, CLIENT_PRINCIPAL, SERVER_PRINCIPAL, SERVER_PRINCIPAL2);
+ setSecuredConfiguration(TEST_UTIL.getConfiguration());
+ TEST_UTIL.getConfiguration().setInt("hbase.security.relogin.maxbackoff", 1);
+ TEST_UTIL.getConfiguration().setInt("hbase.security.relogin.maxretries", 0);
+ TEST_UTIL.getConfiguration().setInt(RpcClient.FAILED_SERVER_EXPIRY_KEY, 10);
+ }
+
+ @AfterClass
+ public static void tearDownAfterClass() {
+ if (KDC != null) {
+ KDC.stop();
+ }
+ }
+
+ private static void setSecuredConfiguration(Configuration conf) {
+ conf.set(CommonConfigurationKeys.HADOOP_SECURITY_AUTHENTICATION, "kerberos");
+ conf.set(User.HBASE_SECURITY_CONF_KEY, "kerberos");
+ conf.setBoolean(User.HBASE_SECURITY_AUTHORIZATION_CONF_KEY, true);
+ }
+
+ private void loginAndStartRpcServer(String principal, int port) throws Exception {
+ UserGroupInformation.setConfiguration(serverConf);
+ serverUGI = UserGroupInformation.loginUserFromKeytabAndReturnUGI(principal,
+ KEYTAB_FILE.getCanonicalPath());
+ rpcServer = serverUGI.doAs((PrivilegedExceptionAction<
+ RpcServer>) () -> RpcServerFactory.createRpcServer(null, getClass().getSimpleName(),
+ Lists.newArrayList(
+ new RpcServer.BlockingServiceAndInterface(TestProtobufRpcServiceImpl.SERVICE, null)),
+ new InetSocketAddress(HOST, port), serverConf, new FifoRpcScheduler(serverConf, 1)));
+ rpcServer.start();
+ }
+
+ @Before
+ public void setUp() throws Exception {
+ clientConf = new Configuration(TEST_UTIL.getConfiguration());
+ clientConf.setClass(RpcClientFactory.CUSTOM_RPC_CLIENT_IMPL_CONF_KEY, rpcClientImpl,
+ RpcClient.class);
+ String serverPrincipalConfigName = "hbase.test.multiple.principal.first";
+ String serverPrincipalConfigName2 = "hbase.test.multiple.principal.second";
+ clientConf.set(serverPrincipalConfigName, SERVER_PRINCIPAL);
+ clientConf.set(serverPrincipalConfigName2, SERVER_PRINCIPAL2);
+ serverConf = new Configuration(TEST_UTIL.getConfiguration());
+ serverConf.setClass(RpcServerFactory.CUSTOM_RPC_SERVER_IMPL_CONF_KEY, rpcServerImpl,
+ RpcServer.class);
+ SecurityInfo securityInfo = new SecurityInfo(Kind.HBASE_AUTH_TOKEN, serverPrincipalConfigName2,
+ serverPrincipalConfigName);
+ SecurityInfo.addInfo(TestProtobufRpcProto.getDescriptor().getName(), securityInfo);
+
+ UserGroupInformation.setConfiguration(clientConf);
+ clientUGI = UserGroupInformation.loginUserFromKeytabAndReturnUGI(CLIENT_PRINCIPAL,
+ KEYTAB_FILE.getCanonicalPath());
+ loginAndStartRpcServer(SERVER_PRINCIPAL, 0);
+ rpcClient = clientUGI.doAs((PrivilegedExceptionAction) () -> RpcClientFactory
+ .createClient(clientConf, HConstants.DEFAULT_CLUSTER_ID.toString()));
+ }
+
+ @After
+ public void tearDown() throws IOException {
+ Closeables.close(rpcClient, true);
+ rpcServer.stop();
+ }
+
+ private String echo(String msg) throws Exception {
+ return clientUGI.doAs((PrivilegedExceptionAction) () -> {
+ BlockingRpcChannel channel = rpcClient.createBlockingRpcChannel(
+ ServerName.valueOf(HOST, rpcServer.getListenerAddress().getPort(), -1), User.getCurrent(),
+ 10000);
+ TestProtobufRpcProto.BlockingInterface stub = TestProtobufRpcProto.newBlockingStub(channel);
+ return stub.echo(null, TestProtos.EchoRequestProto.newBuilder().setMessage(msg).build())
+ .getMessage();
+ });
+ }
+
+ @Test
+ public void testEcho() throws Exception {
+ String msg = "Hello World";
+ assertEquals(msg, echo(msg));
+ }
+
+ @Test
+ public void testMaliciousServer() throws Exception {
+ // reset the server principals so the principal returned by server does not match
+ SecurityInfo securityInfo =
+ SecurityInfo.getInfo(TestProtobufRpcProto.getDescriptor().getName());
+ for (int i = 0; i < securityInfo.getServerPrincipals().size(); i++) {
+ clientConf.set(securityInfo.getServerPrincipals().get(i),
+ "valid_server_" + i + "/" + HOST + "@" + KDC.getRealm());
+ }
+ UndeclaredThrowableException error =
+ assertThrows(UndeclaredThrowableException.class, () -> echo("whatever"));
+ assertThat(error.getCause(), instanceOf(ServiceException.class));
+ assertThat(error.getCause().getCause(), instanceOf(SaslException.class));
+ }
+
+ @Test
+ public void testRememberLastSucceededServerPrincipal() throws Exception {
+ // after this call we will remember the last succeeded server principal
+ assertEquals("a", echo("a"));
+ // shutdown the connection, but does not remove it from pool
+ RpcConnection conn =
+ Iterables.getOnlyElement(((AbstractRpcClient>) rpcClient).getConnections().values());
+ conn.shutdown();
+ // recreate rpc server with server principal2
+ int port = rpcServer.getListenerAddress().getPort();
+ rpcServer.stop();
+ serverUGI.logoutUserFromKeytab();
+ loginAndStartRpcServer(SERVER_PRINCIPAL2, port);
+ // this time we will still use the remembered server principal, so we will get a sasl exception
+ UndeclaredThrowableException error =
+ assertThrows(UndeclaredThrowableException.class, () -> echo("a"));
+ assertThat(error.getCause(), instanceOf(ServiceException.class));
+ // created by IPCUtil.wrap, to prepend the server address
+ assertThat(error.getCause().getCause(), instanceOf(IOException.class));
+ // wraped IPCUtil.toIOE
+ assertThat(error.getCause().getCause().getCause(), instanceOf(IOException.class));
+ Throwable cause = error.getCause().getCause().getCause().getCause();
+ // for netty rpc client, it is DecoderException, for blocking rpc client, it is already
+ // RemoteExcetion
+ assertThat(cause,
+ either(instanceOf(DecoderException.class)).or(instanceOf(RemoteException.class)));
+ RemoteException rme;
+ if (!(cause instanceof RemoteException)) {
+ assertThat(cause.getCause(), instanceOf(RemoteException.class));
+ rme = (RemoteException) cause.getCause();
+ } else {
+ rme = (RemoteException) cause;
+ }
+ assertEquals(SaslException.class.getName(), rme.getClassName());
+ // the above failure will clear the remembered server principal, so this time we will get the
+ // correct one. We use retry here just because a failure of sasl negotiation will trigger a
+ // relogin and it may take some time, and for netty based implementation the relogin is async
+ TEST_UTIL.waitFor(10000, () -> {
+ try {
+ echo("a");
+ } catch (UndeclaredThrowableException e) {
+ Throwable t = e.getCause().getCause();
+ assertThat(t, instanceOf(IOException.class));
+ if (!(t instanceof FailedServerException)) {
+ // for netty rpc client
+ assertThat(e.getCause().getMessage(),
+ containsString(RpcConnectionConstants.RELOGIN_IS_IN_PROGRESS));
+ }
+ return false;
+ }
+ return true;
+ });
+ }
+}
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestRpcSkipInitialSaslHandshake.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestRpcSkipInitialSaslHandshake.java
index bc791754a12e..345514396d6b 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestRpcSkipInitialSaslHandshake.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestRpcSkipInitialSaslHandshake.java
@@ -28,6 +28,7 @@
import java.io.File;
import java.net.InetSocketAddress;
+import java.util.Collections;
import java.util.concurrent.atomic.AtomicReference;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseClassTestRule;
@@ -125,8 +126,8 @@ public void setUpTest() throws Exception {
@Test
public void test() throws Exception {
SecurityInfo securityInfoMock = Mockito.mock(SecurityInfo.class);
- Mockito.when(securityInfoMock.getServerPrincipal())
- .thenReturn(HBaseKerberosUtils.KRB_PRINCIPAL);
+ Mockito.when(securityInfoMock.getServerPrincipals())
+ .thenReturn(Collections.singletonList(HBaseKerberosUtils.KRB_PRINCIPAL));
SecurityInfo.addInfo("TestProtobufRpcProto", securityInfoMock);
final AtomicReference conn = new AtomicReference<>(null);
@@ -152,7 +153,6 @@ protected NettyServerRpcConnection createNettyServerRpcConnection(Channel channe
.getMessage();
assertTrue("test".equals(response));
assertFalse(conn.get().useSasl);
-
} finally {
rpcServer.stop();
}
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestSecurityRpcSentBytesMetrics.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestSecurityRpcSentBytesMetrics.java
index b5e46b5c7cf5..a74477bf28c4 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestSecurityRpcSentBytesMetrics.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestSecurityRpcSentBytesMetrics.java
@@ -27,6 +27,7 @@
import java.io.File;
import java.net.InetSocketAddress;
+import java.util.Collections;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseClassTestRule;
import org.apache.hadoop.hbase.HBaseTestingUtil;
@@ -122,8 +123,8 @@ public void setUpTest() throws Exception {
@Test
public void test() throws Exception {
SecurityInfo securityInfoMock = Mockito.mock(SecurityInfo.class);
- Mockito.when(securityInfoMock.getServerPrincipal())
- .thenReturn(HBaseKerberosUtils.KRB_PRINCIPAL);
+ Mockito.when(securityInfoMock.getServerPrincipals())
+ .thenReturn(Collections.singletonList(HBaseKerberosUtils.KRB_PRINCIPAL));
SecurityInfo.addInfo("TestProtobufRpcProto", securityInfoMock);
NettyRpcServer rpcServer = new NettyRpcServer(null, getClass().getSimpleName(),
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/AbstractTestSecureIPC.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/AbstractTestSecureIPC.java
index 998896c94685..31e01a98ad69 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/AbstractTestSecureIPC.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/AbstractTestSecureIPC.java
@@ -104,7 +104,7 @@ protected static void initKDCAndConf() throws Exception {
TEST_UTIL.getConfiguration().setInt("hbase.security.relogin.maxbackoff", 100);
}
- protected static void stopKDC() throws InterruptedException {
+ protected static void stopKDC() {
if (KDC != null) {
KDC.stop();
}
@@ -192,8 +192,8 @@ public static class CanonicalHostnameTestingAuthenticationProviderSelector
return new SaslClientAuthenticationProvider() {
@Override
public SaslClient createClient(Configuration conf, InetAddress serverAddr,
- SecurityInfo securityInfo, Token extends TokenIdentifier> token,
- boolean fallbackAllowed, Map saslProps) throws IOException {
+ String serverPrincipal, Token extends TokenIdentifier> token, boolean fallbackAllowed,
+ Map saslProps) throws IOException {
final String s = conf.get(CANONICAL_HOST_NAME_KEY);
if (s != null) {
try {
@@ -206,7 +206,7 @@ public SaslClient createClient(Configuration conf, InetAddress serverAddr,
}
}
- return delegate.createClient(conf, serverAddr, securityInfo, token, fallbackAllowed,
+ return delegate.createClient(conf, serverAddr, serverPrincipal, token, fallbackAllowed,
saslProps);
}
@@ -385,8 +385,8 @@ private void setCryptoAES(String clientCryptoAES, String serverCryptoAES) {
*/
private void callRpcService(User serverUser, User clientUser) throws Exception {
SecurityInfo securityInfoMock = Mockito.mock(SecurityInfo.class);
- Mockito.when(securityInfoMock.getServerPrincipal())
- .thenReturn(HBaseKerberosUtils.KRB_PRINCIPAL);
+ Mockito.when(securityInfoMock.getServerPrincipals())
+ .thenReturn(Collections.singletonList(HBaseKerberosUtils.KRB_PRINCIPAL));
SecurityInfo.addInfo("TestProtobufRpcProto", securityInfoMock);
InetSocketAddress isa = new InetSocketAddress(HOST, 0);
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/TestMultipleServerPrincipalsFallbackToSimple.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/TestMultipleServerPrincipalsFallbackToSimple.java
new file mode 100644
index 000000000000..6f1cc148204a
--- /dev/null
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/TestMultipleServerPrincipalsFallbackToSimple.java
@@ -0,0 +1,189 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.security;
+
+import static org.hamcrest.MatcherAssert.assertThat;
+import static org.hamcrest.Matchers.instanceOf;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertThrows;
+
+import java.io.File;
+import java.io.IOException;
+import java.lang.reflect.UndeclaredThrowableException;
+import java.net.InetSocketAddress;
+import java.security.PrivilegedExceptionAction;
+import java.util.Arrays;
+import java.util.List;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.CommonConfigurationKeys;
+import org.apache.hadoop.hbase.HBaseClassTestRule;
+import org.apache.hadoop.hbase.HBaseTestingUtil;
+import org.apache.hadoop.hbase.HConstants;
+import org.apache.hadoop.hbase.ServerName;
+import org.apache.hadoop.hbase.ipc.BlockingRpcClient;
+import org.apache.hadoop.hbase.ipc.FallbackDisallowedException;
+import org.apache.hadoop.hbase.ipc.FifoRpcScheduler;
+import org.apache.hadoop.hbase.ipc.NettyRpcClient;
+import org.apache.hadoop.hbase.ipc.RpcClient;
+import org.apache.hadoop.hbase.ipc.RpcClientFactory;
+import org.apache.hadoop.hbase.ipc.RpcServer;
+import org.apache.hadoop.hbase.ipc.RpcServerFactory;
+import org.apache.hadoop.hbase.ipc.TestProtobufRpcServiceImpl;
+import org.apache.hadoop.hbase.testclassification.MediumTests;
+import org.apache.hadoop.hbase.testclassification.SecurityTests;
+import org.apache.hadoop.minikdc.MiniKdc;
+import org.apache.hadoop.security.UserGroupInformation;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.BeforeClass;
+import org.junit.ClassRule;
+import org.junit.Test;
+import org.junit.experimental.categories.Category;
+import org.junit.runner.RunWith;
+import org.junit.runners.Parameterized;
+import org.junit.runners.Parameterized.Parameter;
+import org.junit.runners.Parameterized.Parameters;
+
+import org.apache.hbase.thirdparty.com.google.common.collect.Lists;
+import org.apache.hbase.thirdparty.com.google.common.io.Closeables;
+import org.apache.hbase.thirdparty.com.google.protobuf.BlockingRpcChannel;
+
+import org.apache.hadoop.hbase.shaded.ipc.protobuf.generated.TestProtos;
+import org.apache.hadoop.hbase.shaded.ipc.protobuf.generated.TestRpcServiceProtos.TestProtobufRpcProto;
+import org.apache.hadoop.hbase.shaded.protobuf.generated.AuthenticationProtos.TokenIdentifier.Kind;
+
+/**
+ * Test secure client connecting to a non secure server, where we have multiple server principal
+ * candidates for a rpc service. See HBASE-28321.
+ */
+@RunWith(Parameterized.class)
+@Category({ SecurityTests.class, MediumTests.class })
+public class TestMultipleServerPrincipalsFallbackToSimple {
+
+ @ClassRule
+ public static final HBaseClassTestRule CLASS_RULE =
+ HBaseClassTestRule.forClass(TestMultipleServerPrincipalsFallbackToSimple.class);
+
+ private static final HBaseTestingUtil TEST_UTIL = new HBaseTestingUtil();
+
+ private static final File KEYTAB_FILE =
+ new File(TEST_UTIL.getDataTestDir("keytab").toUri().getPath());
+
+ private static MiniKdc KDC;
+ private static String HOST = "localhost";
+ private static String SERVER_PRINCIPAL;
+ private static String SERVER_PRINCIPAL2;
+ private static String CLIENT_PRINCIPAL;
+
+ @Parameter
+ public Class extends RpcClient> rpcClientImpl;
+
+ private Configuration clientConf;
+ private UserGroupInformation clientUGI;
+ private RpcServer rpcServer;
+ private RpcClient rpcClient;
+
+ @Parameters(name = "{index}: rpcClientImpl={0}")
+ public static List params() {
+ return Arrays.asList(new Object[] { NettyRpcClient.class },
+ new Object[] { BlockingRpcClient.class });
+ }
+
+ private static void setSecuredConfiguration(Configuration conf) {
+ conf.set(CommonConfigurationKeys.HADOOP_SECURITY_AUTHENTICATION, "kerberos");
+ conf.set(User.HBASE_SECURITY_CONF_KEY, "kerberos");
+ conf.setBoolean(User.HBASE_SECURITY_AUTHORIZATION_CONF_KEY, true);
+ }
+
+ @BeforeClass
+ public static void setUpBeforeClass() throws Exception {
+ KDC = TEST_UTIL.setupMiniKdc(KEYTAB_FILE);
+ SERVER_PRINCIPAL = "server/" + HOST;
+ SERVER_PRINCIPAL2 = "server2/" + HOST;
+ CLIENT_PRINCIPAL = "client";
+ KDC.createPrincipal(KEYTAB_FILE, CLIENT_PRINCIPAL, SERVER_PRINCIPAL, SERVER_PRINCIPAL2);
+ TEST_UTIL.getConfiguration().setInt("hbase.security.relogin.maxbackoff", 1);
+ TEST_UTIL.getConfiguration().setInt("hbase.security.relogin.maxretries", 0);
+ TEST_UTIL.getConfiguration().setInt(RpcClient.FAILED_SERVER_EXPIRY_KEY, 10);
+ }
+
+ @Before
+ public void setUp() throws Exception {
+ clientConf = new Configuration(TEST_UTIL.getConfiguration());
+ setSecuredConfiguration(clientConf);
+ clientConf.setClass(RpcClientFactory.CUSTOM_RPC_CLIENT_IMPL_CONF_KEY, rpcClientImpl,
+ RpcClient.class);
+ String serverPrincipalConfigName = "hbase.test.multiple.principal.first";
+ String serverPrincipalConfigName2 = "hbase.test.multiple.principal.second";
+ clientConf.set(serverPrincipalConfigName, "server/localhost@" + KDC.getRealm());
+ clientConf.set(serverPrincipalConfigName2, "server2/localhost@" + KDC.getRealm());
+ SecurityInfo securityInfo = new SecurityInfo(Kind.HBASE_AUTH_TOKEN, serverPrincipalConfigName2,
+ serverPrincipalConfigName);
+ SecurityInfo.addInfo(TestProtobufRpcProto.getDescriptor().getName(), securityInfo);
+
+ UserGroupInformation.setConfiguration(clientConf);
+ clientUGI = UserGroupInformation.loginUserFromKeytabAndReturnUGI(CLIENT_PRINCIPAL,
+ KEYTAB_FILE.getCanonicalPath());
+
+ rpcServer = RpcServerFactory.createRpcServer(null, getClass().getSimpleName(),
+ Lists.newArrayList(
+ new RpcServer.BlockingServiceAndInterface(TestProtobufRpcServiceImpl.SERVICE, null)),
+ new InetSocketAddress(HOST, 0), TEST_UTIL.getConfiguration(),
+ new FifoRpcScheduler(TEST_UTIL.getConfiguration(), 1));
+ rpcServer.start();
+ }
+
+ @After
+ public void tearDown() throws IOException {
+ Closeables.close(rpcClient, true);
+ rpcServer.stop();
+ }
+
+ private RpcClient createClient() throws Exception {
+ return clientUGI.doAs((PrivilegedExceptionAction) () -> RpcClientFactory
+ .createClient(clientConf, HConstants.DEFAULT_CLUSTER_ID.toString()));
+ }
+
+ private String echo(String msg) throws Exception {
+ return clientUGI.doAs((PrivilegedExceptionAction) () -> {
+ BlockingRpcChannel channel = rpcClient.createBlockingRpcChannel(
+ ServerName.valueOf(HOST, rpcServer.getListenerAddress().getPort(), -1), User.getCurrent(),
+ 10000);
+ TestProtobufRpcProto.BlockingInterface stub = TestProtobufRpcProto.newBlockingStub(channel);
+ return stub.echo(null, TestProtos.EchoRequestProto.newBuilder().setMessage(msg).build())
+ .getMessage();
+ });
+ }
+
+ @Test
+ public void testAllowFallbackToSimple() throws Exception {
+ clientConf.setBoolean(RpcClient.IPC_CLIENT_FALLBACK_TO_SIMPLE_AUTH_ALLOWED_KEY, true);
+ rpcClient = createClient();
+ assertEquals("allow", echo("allow"));
+ }
+
+ @Test
+ public void testDisallowFallbackToSimple() throws Exception {
+ clientConf.setBoolean(RpcClient.IPC_CLIENT_FALLBACK_TO_SIMPLE_AUTH_ALLOWED_KEY, false);
+ rpcClient = createClient();
+ UndeclaredThrowableException error =
+ assertThrows(UndeclaredThrowableException.class, () -> echo("disallow"));
+ Throwable cause = error.getCause().getCause().getCause();
+ assertThat(cause, instanceOf(FallbackDisallowedException.class));
+ }
+}
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/TestSaslTlsIPCRejectPlainText.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/TestSaslTlsIPCRejectPlainText.java
index a6984fcdf3a8..ea9b6948011d 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/TestSaslTlsIPCRejectPlainText.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/TestSaslTlsIPCRejectPlainText.java
@@ -21,6 +21,7 @@
import static org.apache.hadoop.hbase.security.HBaseKerberosUtils.setSecuredConfiguration;
import java.io.File;
+import java.util.Collections;
import org.apache.hadoop.hbase.HBaseClassTestRule;
import org.apache.hadoop.hbase.HBaseTestingUtil;
import org.apache.hadoop.hbase.ipc.TestProtobufRpcServiceImpl;
@@ -66,8 +67,8 @@ public static void setUpBeforeClass() throws Exception {
UGI = loginKerberosPrincipal(KEYTAB_FILE.getCanonicalPath(), PRINCIPAL);
setSecuredConfiguration(util.getConfiguration());
SecurityInfo securityInfoMock = Mockito.mock(SecurityInfo.class);
- Mockito.when(securityInfoMock.getServerPrincipal())
- .thenReturn(HBaseKerberosUtils.KRB_PRINCIPAL);
+ Mockito.when(securityInfoMock.getServerPrincipals())
+ .thenReturn(Collections.singletonList(HBaseKerberosUtils.KRB_PRINCIPAL));
SecurityInfo.addInfo("TestProtobufRpcProto", securityInfoMock);
}
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/provider/CustomSaslAuthenticationProviderTestBase.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/provider/CustomSaslAuthenticationProviderTestBase.java
index feba17364cc4..66b65ba03f04 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/provider/CustomSaslAuthenticationProviderTestBase.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/provider/CustomSaslAuthenticationProviderTestBase.java
@@ -75,7 +75,6 @@
import org.apache.hadoop.hbase.security.AccessDeniedException;
import org.apache.hadoop.hbase.security.HBaseKerberosUtils;
import org.apache.hadoop.hbase.security.SaslUtil;
-import org.apache.hadoop.hbase.security.SecurityInfo;
import org.apache.hadoop.hbase.security.User;
import org.apache.hadoop.hbase.security.token.SecureTestCluster;
import org.apache.hadoop.hbase.security.token.TokenProvider;
@@ -202,7 +201,7 @@ public static class InMemoryClientProvider extends AbstractSaslClientAuthenticat
@Override
public SaslClient createClient(Configuration conf, InetAddress serverAddr,
- SecurityInfo securityInfo, Token extends TokenIdentifier> token, boolean fallbackAllowed,
+ String serverPrincipal, Token extends TokenIdentifier> token, boolean fallbackAllowed,
Map saslProps) throws IOException {
return Sasl.createSaslClient(new String[] { MECHANISM }, null, null,
SaslUtil.SASL_DEFAULT_REALM, saslProps, new InMemoryClientProviderCallbackHandler(token));
From a3456907e111aff70c4c7f493365a80db0bb7d1a Mon Sep 17 00:00:00 2001
From: Duo Zhang
Date: Tue, 20 Feb 2024 22:02:50 +0800
Subject: [PATCH 2/2] deprecation
---
.../java/org/apache/hadoop/hbase/security/SecurityInfo.java | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/security/SecurityInfo.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/security/SecurityInfo.java
index f330883a811b..a33f49573dee 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/security/SecurityInfo.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/security/SecurityInfo.java
@@ -94,11 +94,11 @@ public SecurityInfo(Kind tokenKind, String... serverPrincipal) {
}
/**
- * Since 2.5.8 and 2.6.0, will be removed in 3.0.0. Use {@link #getServerPrincipals()} instead.
- *
* Although this class is IA.Private, we leak this class in
* {@code SaslClientAuthenticationProvider}, so need to align with the deprecation cycle for that
* class.
+ * @deprecated Since 2.5.8 and 2.6.0, will be removed in 4.0.0. Use {@link #getServerPrincipals()}
+ * instead.
*/
@Deprecated
public String getServerPrincipal() {