diff --git a/hbase-http/src/main/java/org/apache/hadoop/hbase/http/lib/AuthenticationFilterInitializer.java b/hbase-http/src/main/java/org/apache/hadoop/hbase/http/lib/AuthenticationFilterInitializer.java
new file mode 100644
index 000000000000..f3a10d6a54e3
--- /dev/null
+++ b/hbase-http/src/main/java/org/apache/hadoop/hbase/http/lib/AuthenticationFilterInitializer.java
@@ -0,0 +1,90 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.http.lib;
+
+import java.io.IOException;
+import java.util.HashMap;
+import java.util.Map;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hbase.HBaseInterfaceAudience;
+import org.apache.hadoop.hbase.http.FilterContainer;
+import org.apache.hadoop.hbase.http.FilterInitializer;
+import org.apache.hadoop.hbase.http.HttpServer;
+import org.apache.hadoop.security.SecurityUtil;
+import org.apache.hadoop.security.authentication.server.AuthenticationFilter;
+import org.apache.hadoop.security.authentication.server.KerberosAuthenticationHandler;
+import org.apache.yetus.audience.InterfaceAudience;
+
+/**
+ * This class is copied from Hadoop. Initializes hadoop-auth AuthenticationFilter which provides
+ * support for Kerberos HTTP SPNEGO authentication.
+ *
+ * It enables anonymous access, simple/pseudo and Kerberos HTTP SPNEGO authentication for HBase web
+ * UI endpoints.
+ *
+ * Refer to the core-default.xml file, after the comment 'HTTP Authentication' for
+ * details on the configuration options. All related configuration properties have
+ * 'hadoop.http.authentication.' as prefix.
+ */
+@InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.CONFIG)
+public class AuthenticationFilterInitializer extends FilterInitializer {
+
+ static final String PREFIX = "hadoop.http.authentication.";
+
+ /**
+ * Initializes hadoop-auth AuthenticationFilter.
+ *
+ * Propagates to hadoop-auth AuthenticationFilter configuration all Hadoop configuration
+ * properties prefixed with "hadoop.http.authentication."
+ * @param container The filter container
+ * @param conf Configuration for run-time parameters
+ */
+ @Override
+ public void initFilter(FilterContainer container, Configuration conf) {
+ Map filterConfig = getFilterConfigMap(conf, PREFIX);
+
+ container.addFilter("authentication", AuthenticationFilter.class.getName(), filterConfig);
+ }
+
+ public static Map getFilterConfigMap(Configuration conf, String prefix) {
+ Map filterConfig = new HashMap();
+
+ // setting the cookie path to root '/' so it is used for all resources.
+ filterConfig.put(AuthenticationFilter.COOKIE_PATH, "/");
+ Map propsWithPrefix = conf.getPropsWithPrefix(prefix);
+
+ for (Map.Entry entry : propsWithPrefix.entrySet()) {
+ filterConfig.put(entry.getKey(), entry.getValue());
+ }
+
+ // Resolve _HOST into bind address
+ String bindAddress = conf.get(HttpServer.BIND_ADDRESS);
+ String principal = filterConfig.get(KerberosAuthenticationHandler.PRINCIPAL);
+ if (principal != null) {
+ try {
+ principal = SecurityUtil.getServerPrincipal(principal, bindAddress);
+ } catch (IOException ex) {
+ throw new RuntimeException("Could not resolve Kerberos principal name: " + ex.toString(),
+ ex);
+ }
+ filterConfig.put(KerberosAuthenticationHandler.PRINCIPAL, principal);
+ }
+ return filterConfig;
+ }
+
+}
diff --git a/hbase-http/src/test/java/org/apache/hadoop/hbase/http/LdapConstants.java b/hbase-http/src/test/java/org/apache/hadoop/hbase/http/LdapConstants.java
new file mode 100644
index 000000000000..a73aaa810c3d
--- /dev/null
+++ b/hbase-http/src/test/java/org/apache/hadoop/hbase/http/LdapConstants.java
@@ -0,0 +1,35 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.http;
+
+/**
+ * This class defines the constants used by the LDAP integration tests.
+ */
+public final class LdapConstants {
+
+ /**
+ * This class defines constants to be used for LDAP integration testing. Hence this class is not
+ * expected to be instantiated.
+ */
+ private LdapConstants() {
+ }
+
+ public static final String LDAP_BASE_DN = "dc=example,dc=com";
+ public static final String LDAP_SERVER_ADDR = "localhost";
+
+}
diff --git a/hbase-http/src/test/java/org/apache/hadoop/hbase/http/TestLdapHttpServer.java b/hbase-http/src/test/java/org/apache/hadoop/hbase/http/TestLdapHttpServer.java
new file mode 100644
index 000000000000..8bb48d50753e
--- /dev/null
+++ b/hbase-http/src/test/java/org/apache/hadoop/hbase/http/TestLdapHttpServer.java
@@ -0,0 +1,140 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.http;
+
+import static org.junit.Assert.assertEquals;
+
+import java.io.IOException;
+import java.net.HttpURLConnection;
+import java.net.URL;
+import org.apache.commons.codec.binary.Base64;
+import org.apache.directory.server.annotations.CreateLdapServer;
+import org.apache.directory.server.annotations.CreateTransport;
+import org.apache.directory.server.core.annotations.ApplyLdifs;
+import org.apache.directory.server.core.annotations.ContextEntry;
+import org.apache.directory.server.core.annotations.CreateDS;
+import org.apache.directory.server.core.annotations.CreatePartition;
+import org.apache.directory.server.core.integ.CreateLdapServerRule;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hbase.HBaseClassTestRule;
+import org.apache.hadoop.hbase.http.resource.JerseyResource;
+import org.apache.hadoop.hbase.testclassification.MiscTests;
+import org.apache.hadoop.hbase.testclassification.SmallTests;
+import org.junit.AfterClass;
+import org.junit.BeforeClass;
+import org.junit.ClassRule;
+import org.junit.Test;
+import org.junit.experimental.categories.Category;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ * Test class for LDAP authentication on the HttpServer.
+ */
+@Category({ MiscTests.class, SmallTests.class })
+@CreateLdapServer(
+ transports = { @CreateTransport(protocol = "LDAP", address = LdapConstants.LDAP_SERVER_ADDR), })
+@CreateDS(allowAnonAccess = true,
+ partitions = { @CreatePartition(name = "Test_Partition", suffix = LdapConstants.LDAP_BASE_DN,
+ contextEntry = @ContextEntry(entryLdif = "dn: " + LdapConstants.LDAP_BASE_DN + " \n"
+ + "dc: example\n" + "objectClass: top\n" + "objectClass: domain\n\n")) })
+@ApplyLdifs({ "dn: uid=bjones," + LdapConstants.LDAP_BASE_DN, "cn: Bob Jones", "sn: Jones",
+ "objectClass: inetOrgPerson", "uid: bjones", "userPassword: p@ssw0rd" })
+public class TestLdapHttpServer extends HttpServerFunctionalTest {
+
+ @ClassRule
+ public static final HBaseClassTestRule CLASS_RULE =
+ HBaseClassTestRule.forClass(TestLdapHttpServer.class);
+ @ClassRule
+ public static CreateLdapServerRule serverRule = new CreateLdapServerRule();
+
+ private static final Logger LOG = LoggerFactory.getLogger(TestLdapHttpServer.class);
+
+ private static HttpServer server;
+ private static URL baseUrl;
+
+ @BeforeClass
+ public static void setupServer() throws Exception {
+ Configuration conf = new Configuration();
+ buildLdapConfiguration(conf);
+ server = createTestServer(conf);
+ server.addUnprivilegedServlet("echo", "/echo", TestHttpServer.EchoServlet.class);
+ server.addJerseyResourcePackage(JerseyResource.class.getPackage().getName(), "/jersey/*");
+ server.start();
+ baseUrl = getServerURL(server);
+
+ LOG.info("HTTP server started: " + baseUrl);
+ }
+
+ @AfterClass
+ public static void stopServer() throws Exception {
+ try {
+ if (null != server) {
+ server.stop();
+ }
+ } catch (Exception e) {
+ LOG.info("Failed to stop info server", e);
+ }
+ }
+
+ private static Configuration buildLdapConfiguration(Configuration conf) {
+
+ conf.setInt(HttpServer.HTTP_MAX_THREADS, TestHttpServer.MAX_THREADS);
+
+ // Enable LDAP (pre-req)
+ conf.set(HttpServer.HTTP_UI_AUTHENTICATION, "ldap");
+ conf.set(HttpServer.FILTER_INITIALIZERS_PROPERTY,
+ "org.apache.hadoop.hbase.http.lib.AuthenticationFilterInitializer");
+ conf.set("hadoop.http.authentication.type", "ldap");
+ conf.set("hadoop.http.authentication.ldap.providerurl", String.format("ldap://%s:%s",
+ LdapConstants.LDAP_SERVER_ADDR, serverRule.getLdapServer().getPort()));
+ conf.set("hadoop.http.authentication.ldap.enablestarttls", "false");
+ conf.set("hadoop.http.authentication.ldap.basedn", LdapConstants.LDAP_BASE_DN);
+ return conf;
+ }
+
+ @Test
+ public void testUnauthorizedClientsDisallowed() throws IOException {
+ URL url = new URL(getServerURL(server), "/echo?a=b");
+ HttpURLConnection conn = (HttpURLConnection) url.openConnection();
+ assertEquals(HttpURLConnection.HTTP_UNAUTHORIZED, conn.getResponseCode());
+ }
+
+ @Test
+ public void testAllowedClient() throws IOException {
+ URL url = new URL(getServerURL(server), "/echo?a=b");
+ HttpURLConnection conn = (HttpURLConnection) url.openConnection();
+ final Base64 base64 = new Base64(0);
+ String userCredentials = "bjones:p@ssw0rd";
+ String basicAuth = "Basic " + base64.encodeToString(userCredentials.getBytes());
+ conn.setRequestProperty("Authorization", basicAuth);
+ assertEquals(HttpURLConnection.HTTP_OK, conn.getResponseCode());
+ }
+
+ @Test
+ public void testWrongAuthClientsDisallowed() throws IOException {
+ URL url = new URL(getServerURL(server), "/echo?a=b");
+ HttpURLConnection conn = (HttpURLConnection) url.openConnection();
+ final Base64 base64 = new Base64(0);
+ String userCredentials = "bjones:password";
+ String basicAuth = "Basic " + base64.encodeToString(userCredentials.getBytes());
+ conn.setRequestProperty("Authorization", basicAuth);
+ assertEquals(HttpURLConnection.HTTP_FORBIDDEN, conn.getResponseCode());
+ }
+
+}
diff --git a/hbase-http/src/test/java/org/apache/hadoop/hbase/http/lib/TestAuthenticationFilterInitializer.java b/hbase-http/src/test/java/org/apache/hadoop/hbase/http/lib/TestAuthenticationFilterInitializer.java
new file mode 100644
index 000000000000..68c48f282737
--- /dev/null
+++ b/hbase-http/src/test/java/org/apache/hadoop/hbase/http/lib/TestAuthenticationFilterInitializer.java
@@ -0,0 +1,80 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.http.lib;
+
+import static org.junit.Assert.*;
+import static org.mockito.ArgumentMatchers.any;
+
+import java.util.Map;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hbase.HBaseClassTestRule;
+import org.apache.hadoop.hbase.http.FilterContainer;
+import org.apache.hadoop.hbase.http.HttpServer;
+import org.apache.hadoop.hbase.testclassification.MiscTests;
+import org.apache.hadoop.hbase.testclassification.SmallTests;
+import org.apache.hadoop.security.authentication.server.AuthenticationFilter;
+import org.junit.ClassRule;
+import org.junit.Test;
+import org.junit.experimental.categories.Category;
+import org.mockito.Mockito;
+import org.mockito.invocation.InvocationOnMock;
+import org.mockito.stubbing.Answer;
+
+@Category({ MiscTests.class, SmallTests.class })
+public class TestAuthenticationFilterInitializer {
+ @ClassRule
+ public static final HBaseClassTestRule CLASS_RULE =
+ HBaseClassTestRule.forClass(TestAuthenticationFilterInitializer.class);
+
+ @Test
+ public void testConfiguration() throws Exception {
+ Configuration conf = new Configuration();
+ conf.set("hadoop.http.authentication.foo", "bar");
+
+ conf.set(HttpServer.BIND_ADDRESS, "barhost");
+
+ FilterContainer container = Mockito.mock(FilterContainer.class);
+ Mockito.doAnswer(new Answer() {
+ @Override
+ public Object answer(InvocationOnMock invocationOnMock) throws Throwable {
+ Object[] args = invocationOnMock.getArguments();
+
+ assertEquals("authentication", args[0]);
+
+ assertEquals(AuthenticationFilter.class.getName(), args[1]);
+
+ Map conf = (Map) args[2];
+ assertEquals("/", conf.get("cookie.path"));
+
+ assertEquals("simple", conf.get("type"));
+ assertEquals("36000", conf.get("token.validity"));
+ assertNull(conf.get("cookie.domain"));
+ assertEquals("true", conf.get("simple.anonymous.allowed"));
+ assertEquals("HTTP/barhost@LOCALHOST", conf.get("kerberos.principal"));
+ assertEquals(System.getProperty("user.home") + "/hadoop.keytab",
+ conf.get("kerberos.keytab"));
+ assertEquals("bar", conf.get("foo"));
+
+ return null;
+ }
+ }).when(container).addFilter(any(), any(), any());
+
+ new AuthenticationFilterInitializer().initFilter(container, conf);
+ }
+
+}
diff --git a/pom.xml b/pom.xml
index c88a72c8fe04..3e951d669095 100644
--- a/pom.xml
+++ b/pom.xml
@@ -725,6 +725,9 @@
none
1.2.0
+
+ 2.0.0.AM26
+ 2.0.0