From fde6425513e954742a32de620f3dc3058feb361d Mon Sep 17 00:00:00 2001 From: prasad-acit Date: Fri, 8 Apr 2022 16:29:20 +0530 Subject: [PATCH] HADOOP-17843. Support IPv6 with IP for internal & external communication. --- .../java/org/apache/hadoop/net/NetUtils.java | 27 +++++++++++++++++++ .../org/apache/hadoop/net/TestNetUtils.java | 16 +++++++++++ .../java/org/apache/hadoop/hdfs/DFSUtil.java | 4 +++ .../org/apache/hadoop/hdfs/TestDFSUtil.java | 7 +++++ .../apache/hadoop/yarn/webapp/WebApps.java | 11 +++----- .../yarn/server/webproxy/WebAppProxy.java | 12 ++++----- 6 files changed, 63 insertions(+), 14 deletions(-) diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/NetUtils.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/NetUtils.java index 19cdbc073672c..cbacddabd1378 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/NetUtils.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/NetUtils.java @@ -46,6 +46,7 @@ import javax.net.SocketFactory; import org.apache.commons.lang3.StringUtils; +import org.apache.commons.lang3.tuple.Pair; import org.apache.hadoop.security.AccessControlException; import org.apache.hadoop.thirdparty.com.google.common.cache.Cache; import org.apache.hadoop.thirdparty.com.google.common.cache.CacheBuilder; @@ -1218,4 +1219,30 @@ public static InetAddress bindToLocalAddress(InetAddress localAddr, boolean } return null; } + + /** + * Parse the given address into IP & Port number pair. + * @param bindAddress IP Address to parse + * @return Pair of IP-Address & Port number. + */ + public static Pair parseAddress2IpAndPort( + String bindAddress) { + String[] parts = org.apache.hadoop.util.StringUtils + .split(bindAddress, ':'); + Pair pair = null; + //If bind address is IPv6 + if (parts.length > 2) { + String target = bindAddress; + int i = target.lastIndexOf(":"); + String ipAddress = '['+target.substring(0, i)+']'; + pair = Pair.of(ipAddress, Integer.parseInt(parts[parts.length-1])); + } else if (parts.length == 2) { + //Given address is IPv4 Address + pair = Pair.of(parts[0], Integer.parseInt(parts[1])); + } else { + //No port specified, consider port number as 0 + pair = Pair.of(bindAddress, 0); + } + return pair; + } } diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/net/TestNetUtils.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/net/TestNetUtils.java index e602e66a771c9..4f3aca67068cd 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/net/TestNetUtils.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/net/TestNetUtils.java @@ -39,6 +39,7 @@ import java.util.concurrent.TimeUnit; import org.apache.commons.lang3.StringUtils; +import org.apache.commons.lang3.tuple.Pair; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.io.IOUtils; import org.apache.hadoop.security.KerberosAuthException; @@ -854,4 +855,19 @@ public void testCreateSocketAddressWithIPV6() throws Throwable { assertEquals(ipv6Address, addr.getHostName()); assertEquals(12345, addr.getPort()); } + + @Test + public void testParseAddress2IpAndPort() { + String ip = "10.10.10.10"; + Pair pair = NetUtils.parseAddress2IpAndPort(ip); + assertEquals(pair.getLeft(), ip); + assertEquals(pair.getRight().longValue(), 0); + pair = NetUtils.parseAddress2IpAndPort(ip+":8080"); + assertEquals(pair.getLeft(), ip); + assertEquals(pair.getRight().longValue(), 8080); + ip = "10:10:10:10:10:10:10:10"; + pair = NetUtils.parseAddress2IpAndPort(ip+":8080"); + assertEquals(pair.getLeft(), '['+ip+']'); + assertEquals(pair.getRight().longValue(), 8080); + } } diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSUtil.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSUtil.java index 9efb6449739cf..1bb46f80e7125 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSUtil.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSUtil.java @@ -1030,6 +1030,10 @@ public static URI getInfoServer(InetSocketAddress namenodeAddr, authority = substituteForWildcardAddress(authority, namenodeAddr.getHostName()); } + if (!authority.startsWith("[") + && StringUtils.countMatches(authority, ":") > 2) { + authority = NetUtils.normalizeV6Address(authority); + } return URI.create(scheme + "://" + authority); } diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDFSUtil.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDFSUtil.java index 43bd44e2bbb8e..38186c9f4e9a6 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDFSUtil.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDFSUtil.java @@ -513,6 +513,13 @@ public void testGetInfoServer() throws IOException, URISyntaxException { assertEquals( URI.create("http://localhost:" + DFS_NAMENODE_HTTP_PORT_DEFAULT), httpAddress); + + //Verify IPv6 Address + httpAddress = DFSUtil.getInfoServer(new InetSocketAddress( + "::0", 8020), conf, "http"); + assertEquals( + URI.create("http://[0:0:0:0:0:0:0:0]:" + DFS_NAMENODE_HTTP_PORT_DEFAULT), + httpAddress); } @Test diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/WebApps.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/WebApps.java index a088f4bf39f12..46b30f75fc895 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/WebApps.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/WebApps.java @@ -32,12 +32,13 @@ import javax.servlet.http.HttpServlet; -import org.apache.commons.lang3.StringUtils; +import org.apache.commons.lang3.tuple.Pair; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration.IntegerRanges; import org.apache.hadoop.http.HttpConfig.Policy; import org.apache.hadoop.http.HttpServer2; +import org.apache.hadoop.net.NetUtils; import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.security.authorize.AccessControlList; import org.apache.hadoop.security.http.RestCsrfPreventionFilter; @@ -118,12 +119,8 @@ static class ServletStruct { } public Builder at(String bindAddress) { - String[] parts = StringUtils.split(bindAddress, ':'); - if (parts.length == 2) { - int port = Integer.parseInt(parts[1]); - return at(parts[0], port, port == 0); - } - return at(bindAddress, 0, true); + Pair pair = NetUtils.parseAddress2IpAndPort(bindAddress); + return at(pair.getLeft(), pair.getRight(), pair.getRight() == 0); } public Builder at(int port) { diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-web-proxy/src/main/java/org/apache/hadoop/yarn/server/webproxy/WebAppProxy.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-web-proxy/src/main/java/org/apache/hadoop/yarn/server/webproxy/WebAppProxy.java index eaf93605b2c25..1b0f200fde56c 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-web-proxy/src/main/java/org/apache/hadoop/yarn/server/webproxy/WebAppProxy.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-web-proxy/src/main/java/org/apache/hadoop/yarn/server/webproxy/WebAppProxy.java @@ -22,11 +22,12 @@ import java.net.URI; import com.google.common.net.HostAndPort; +import org.apache.commons.lang3.tuple.Pair; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.http.HttpServer2; +import org.apache.hadoop.net.NetUtils; import org.apache.hadoop.security.authorize.AccessControlList; import org.apache.hadoop.service.AbstractService; -import org.apache.hadoop.util.StringUtils; import org.apache.hadoop.yarn.conf.YarnConfiguration; import org.apache.hadoop.yarn.exceptions.YarnRuntimeException; import org.apache.hadoop.yarn.webapp.util.WebAppUtils; @@ -79,12 +80,9 @@ protected void serviceInit(Configuration conf) throws Exception { " is not set so the proxy will not run."); } - String[] parts = StringUtils.split(bindAddress, ':'); - port = 0; - if (parts.length == 2) { - bindAddress = parts[0]; - port = Integer.parseInt(parts[1]); - } + Pair pair = NetUtils.parseAddress2IpAndPort(bindAddress); + bindAddress = pair.getLeft(); + port = pair.getRight(); String bindHost = conf.getTrimmed(YarnConfiguration.PROXY_BIND_HOST, null); if (bindHost != null) {