Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
32 changes: 32 additions & 0 deletions hadoop-hdfs-project/hadoop-hdfs-httpfs/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -343,6 +343,38 @@
<excludeFilterFile>${basedir}/dev-support/findbugsExcludeFile.xml</excludeFilterFile>
</configuration>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-enforcer-plugin</artifactId>
<dependencies>
<dependency>
<groupId>de.skuzzle.enforcer</groupId>
<artifactId>restrict-imports-enforcer-rule</artifactId>
<version>${restrict-imports.enforcer.version}</version>
</dependency>
</dependencies>
<executions>
<execution>
<id>banned-illegal-imports</id>
<phase>process-sources</phase>
<goals>
<goal>enforce</goal>
</goals>
<configuration>
<rules>
<restrictImports implementation="de.skuzzle.enforcer.restrictimports.rule.RestrictImports">
<includeTestCode>true</includeTestCode>
<reason>Use hadoop-common provided Sets rather than Guava provided Sets</reason>
<bannedImports>
<bannedImport>org.apache.hadoop.thirdparty.com.google.common.collect.Sets</bannedImport>
<bannedImport>com.google.common.collect.Sets</bannedImport>
</bannedImports>
</restrictImports>
</rules>
</configuration>
</execution>
</executions>
</plugin>
</plugins>
</build>

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,9 +23,9 @@
import java.util.List;
import java.util.Set;

import org.apache.hadoop.thirdparty.com.google.common.collect.Sets;
import org.apache.hadoop.security.GroupMappingServiceProvider;
import org.apache.hadoop.test.HadoopUsersConfTestHelper;
import org.apache.hadoop.util.Sets;

public class DummyGroupMapping implements GroupMappingServiceProvider {

Expand Down
32 changes: 32 additions & 0 deletions hadoop-hdfs-project/hadoop-hdfs-rbf/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -305,6 +305,38 @@ https://maven.apache.org/xsd/maven-4.0.0.xsd">
</filesets>
</configuration>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-enforcer-plugin</artifactId>
<dependencies>
<dependency>
<groupId>de.skuzzle.enforcer</groupId>
<artifactId>restrict-imports-enforcer-rule</artifactId>
<version>${restrict-imports.enforcer.version}</version>
</dependency>
</dependencies>
<executions>
<execution>
<id>banned-illegal-imports</id>
<phase>process-sources</phase>
<goals>
<goal>enforce</goal>
</goals>
<configuration>
<rules>
<restrictImports implementation="de.skuzzle.enforcer.restrictimports.rule.RestrictImports">
<includeTestCode>true</includeTestCode>
<reason>Use hadoop-common provided Sets rather than Guava provided Sets</reason>
<bannedImports>
<bannedImport>org.apache.hadoop.thirdparty.com.google.common.collect.Sets</bannedImport>
<bannedImport>com.google.common.collect.Sets</bannedImport>
</bannedImports>
</restrictImports>
</rules>
</configuration>
</execution>
</executions>
</plugin>
</plugins>
</build>
<profiles>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -30,6 +30,7 @@
import org.apache.hadoop.hdfs.protocolPB.RouterPolicyProvider;
import org.apache.hadoop.hdfs.server.datanode.DataNode;
import org.apache.hadoop.security.authorize.Service;
import org.apache.hadoop.util.Sets;

import org.junit.BeforeClass;
import org.junit.Rule;
Expand All @@ -41,8 +42,6 @@
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import org.apache.hadoop.thirdparty.com.google.common.collect.Sets;

/**
* Test suite covering RouterPolicyProvider. We expect that it contains a
* security policy definition for every RPC protocol used in HDFS. The test
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,6 @@

package org.apache.hadoop.hdfs.server.federation.router;

import org.apache.hadoop.thirdparty.com.google.common.collect.Sets;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.CommonConfigurationKeys;
import org.apache.hadoop.fs.FileSystem;
Expand All @@ -40,6 +39,7 @@
import org.apache.hadoop.test.GenericTestUtils;
import org.apache.hadoop.test.LambdaTestUtils;
import org.apache.hadoop.tools.GetUserMappingsProtocol;
import org.apache.hadoop.util.Sets;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
Expand Down
32 changes: 32 additions & 0 deletions hadoop-hdfs-project/hadoop-hdfs/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -437,6 +437,38 @@ https://maven.apache.org/xsd/maven-4.0.0.xsd">
</filesets>
</configuration>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-enforcer-plugin</artifactId>
<dependencies>
<dependency>
<groupId>de.skuzzle.enforcer</groupId>
<artifactId>restrict-imports-enforcer-rule</artifactId>
<version>${restrict-imports.enforcer.version}</version>
</dependency>
</dependencies>
<executions>
<execution>
<id>banned-illegal-imports</id>
<phase>process-sources</phase>
<goals>
<goal>enforce</goal>
</goals>
<configuration>
<rules>
<restrictImports implementation="de.skuzzle.enforcer.restrictimports.rule.RestrictImports">
<includeTestCode>true</includeTestCode>
<reason>Use hadoop-common provided Sets rather than Guava provided Sets</reason>
<bannedImports>
<bannedImport>org.apache.hadoop.thirdparty.com.google.common.collect.Sets</bannedImport>
<bannedImport>com.google.common.collect.Sets</bannedImport>
</bannedImports>
</restrictImports>
</rules>
</configuration>
</execution>
</executions>
</plugin>
</plugins>
</build>

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -73,6 +73,7 @@
import org.apache.hadoop.hdfs.server.namenode.INodesInPath;
import org.apache.hadoop.ipc.ProtobufRpcEngine;
import org.apache.hadoop.security.AccessControlException;
import org.apache.hadoop.util.Sets;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.hadoop.HadoopIllegalArgumentException;
Expand Down Expand Up @@ -107,7 +108,6 @@
import org.apache.hadoop.thirdparty.com.google.common.base.Joiner;
import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions;
import org.apache.hadoop.thirdparty.com.google.common.collect.Lists;
import org.apache.hadoop.thirdparty.com.google.common.collect.Sets;
import org.apache.hadoop.thirdparty.protobuf.BlockingService;

@InterfaceAudience.Private
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,11 +23,6 @@

import org.apache.hadoop.classification.InterfaceAudience;

import org.apache.hadoop.thirdparty.com.google.common.base.Joiner;
import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions;
import org.apache.hadoop.thirdparty.com.google.common.collect.ImmutableSet;
import org.apache.hadoop.thirdparty.com.google.common.collect.Sets;

/**
* LayoutFlags represent features which the FSImage and edit logs can either
* support or not, independently of layout version.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,6 @@

import org.apache.hadoop.thirdparty.com.google.common.collect.ImmutableList;
import org.apache.hadoop.thirdparty.com.google.common.collect.Lists;
import org.apache.hadoop.thirdparty.com.google.common.collect.Sets;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileUtil;
Expand All @@ -40,6 +39,7 @@
import org.apache.hadoop.security.SecurityUtil;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.util.Daemon;
import org.apache.hadoop.util.Sets;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -22,15 +22,12 @@
import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_NAMENODE_BLOCKPLACEMENTPOLICY_EXCLUDE_SLOW_NODES_ENABLED_DEFAULT;
import static org.apache.hadoop.util.Time.monotonicNow;

import org.apache.hadoop.thirdparty.com.google.common.collect.Sets;
import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.thirdparty.com.google.common.annotations.VisibleForTesting;
import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions;
import org.apache.hadoop.thirdparty.com.google.common.net.InetAddresses;

import org.apache.hadoop.fs.StorageType;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.hadoop.HadoopIllegalArgumentException;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
Expand Down Expand Up @@ -59,7 +56,10 @@
import org.apache.hadoop.security.token.Token;
import org.apache.hadoop.util.Daemon;
import org.apache.hadoop.util.ReflectionUtils;
import org.apache.hadoop.util.Sets;
import org.apache.hadoop.util.Timer;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import javax.annotation.Nonnull;
import javax.annotation.Nullable;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,6 @@
import org.apache.hadoop.thirdparty.com.google.common.annotations.VisibleForTesting;
import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions;
import org.apache.hadoop.thirdparty.com.google.common.collect.Lists;
import org.apache.hadoop.thirdparty.com.google.common.collect.Sets;

import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.fs.StorageType;
Expand All @@ -33,6 +32,7 @@
import org.apache.hadoop.hdfs.server.protocol.*;
import org.apache.hadoop.hdfs.server.protocol.BlockECReconstructionCommand.BlockECReconstructionInfo;
import org.apache.hadoop.hdfs.server.protocol.ReceivedDeletedBlockInfo.BlockStatus;
import org.apache.hadoop.util.Sets;

import org.slf4j.Logger;

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -28,12 +28,12 @@
import org.apache.hadoop.hdfs.DFSConfigKeys;
import org.apache.hadoop.hdfs.DFSUtil;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.util.Sets;

import org.apache.hadoop.thirdparty.com.google.common.base.Joiner;
import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions;
import org.apache.hadoop.thirdparty.com.google.common.collect.Lists;
import org.apache.hadoop.thirdparty.com.google.common.collect.Maps;
import org.apache.hadoop.thirdparty.com.google.common.collect.Sets;
import org.slf4j.Logger;

/**
Expand Down Expand Up @@ -177,8 +177,8 @@ private void doRefreshNamenodes(
throws IOException {
assert Thread.holdsLock(refreshNamenodesLock);

Set<String> toRefresh = Sets.newLinkedHashSet();
Set<String> toAdd = Sets.newLinkedHashSet();
Set<String> toRefresh = new LinkedHashSet<>();
Set<String> toAdd = new LinkedHashSet<>();
Set<String> toRemove;

synchronized (this) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,6 @@

import org.apache.hadoop.thirdparty.com.google.common.annotations.VisibleForTesting;
import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions;
import org.apache.hadoop.thirdparty.com.google.common.collect.Sets;
import org.apache.hadoop.thirdparty.com.google.common.util.concurrent.FutureCallback;
import org.apache.hadoop.thirdparty.com.google.common.util.concurrent.Futures;
import org.apache.hadoop.thirdparty.com.google.common.util.concurrent.ListenableFuture;
Expand All @@ -37,6 +36,7 @@
import org.apache.hadoop.hdfs.server.datanode.fsdataset.FsVolumeSpi.VolumeCheckContext;
import org.apache.hadoop.io.IOUtils;
import org.apache.hadoop.util.DiskChecker.DiskErrorException;
import org.apache.hadoop.util.Sets;
import org.apache.hadoop.util.Timer;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -119,12 +119,12 @@
import org.apache.hadoop.util.DiskChecker.DiskOutOfSpaceException;
import org.apache.hadoop.util.InstrumentedReadWriteLock;
import org.apache.hadoop.util.ReflectionUtils;
import org.apache.hadoop.util.Sets;
import org.apache.hadoop.util.Time;
import org.apache.hadoop.util.Timer;

import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions;
import org.apache.hadoop.thirdparty.com.google.common.collect.Lists;
import org.apache.hadoop.thirdparty.com.google.common.collect.Sets;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -40,10 +40,11 @@
import org.apache.hadoop.hdfs.server.protocol.NamespaceInfo;
import org.apache.hadoop.hdfs.server.protocol.RemoteEditLog;
import org.apache.hadoop.hdfs.server.protocol.RemoteEditLogManifest;
import org.apache.hadoop.util.Sets;

import org.apache.hadoop.thirdparty.com.google.common.annotations.VisibleForTesting;
import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions;
import org.apache.hadoop.thirdparty.com.google.common.collect.Lists;
import org.apache.hadoop.thirdparty.com.google.common.collect.Sets;

/**
* Manages a collection of Journals. None of the methods are synchronized, it is
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,6 @@
import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions;
import org.apache.hadoop.thirdparty.com.google.common.collect.ComparisonChain;
import org.apache.hadoop.thirdparty.com.google.common.collect.Lists;
import org.apache.hadoop.thirdparty.com.google.common.collect.Sets;

/**
* The NNStorageRetentionManager is responsible for inspecting the storage
Expand Down Expand Up @@ -192,7 +191,7 @@ private long getImageTxIdToRetain(
return 0L;
}

TreeSet<Long> imageTxIds = Sets.newTreeSet(Collections.reverseOrder());
TreeSet<Long> imageTxIds = new TreeSet<>(Collections.reverseOrder());
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

That's correct. Hence, I have also created this follow-up Jira HADOOP-17726 for the same so that empty set constructors can be directly created.

for (FSImageFile image : images) {
imageTxIds.add(image.getCheckpointTxId());
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,6 @@
import org.apache.hadoop.thirdparty.com.google.common.base.Joiner;
import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions;
import org.apache.hadoop.thirdparty.com.google.common.collect.Lists;
import org.apache.hadoop.thirdparty.com.google.common.collect.Sets;

import java.util.Set;
import org.apache.commons.logging.Log;
Expand Down Expand Up @@ -93,6 +92,7 @@
import org.apache.hadoop.util.GenericOptionsParser;
import org.apache.hadoop.util.JvmPauseMonitor;
import org.apache.hadoop.util.ServicePlugin;
import org.apache.hadoop.util.Sets;
import org.apache.hadoop.util.StringUtils;
import org.apache.hadoop.util.Time;
import org.apache.hadoop.util.GcTimeMonitor;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -86,6 +86,7 @@
import org.apache.hadoop.hdfs.server.namenode.ImageServlet;
import org.apache.hadoop.http.HttpConfig;
import org.apache.hadoop.security.ssl.KeyStoreTestUtil;
import org.apache.hadoop.util.Sets;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.hadoop.classification.InterfaceAudience;
Expand Down Expand Up @@ -148,7 +149,6 @@
import org.apache.hadoop.thirdparty.com.google.common.base.Joiner;
import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions;
import org.apache.hadoop.thirdparty.com.google.common.collect.Lists;
import org.apache.hadoop.thirdparty.com.google.common.collect.Sets;

/**
* This class creates a single-process DFS cluster for junit testing.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,6 @@
import java.util.*;

import org.apache.hadoop.thirdparty.com.google.common.collect.Lists;
import org.apache.hadoop.thirdparty.com.google.common.collect.Sets;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.BlockStoragePolicySpi;
Expand All @@ -48,6 +47,7 @@
import org.apache.hadoop.security.token.SecretManager;
import org.apache.hadoop.test.GenericTestUtils;
import org.apache.hadoop.test.PathUtils;
import org.apache.hadoop.util.Sets;
import org.junit.Assert;
import static org.junit.Assert.fail;
import org.junit.Test;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -80,12 +80,11 @@
import org.apache.hadoop.security.alias.JavaKeyStoreProvider;
import org.apache.hadoop.test.GenericTestUtils;
import org.apache.hadoop.test.LambdaTestUtils;
import org.apache.hadoop.util.Sets;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;

import org.apache.hadoop.thirdparty.com.google.common.collect.Sets;

public class TestDFSUtil {

static final String NS1_NN_ADDR = "ns1-nn.example.com:8020";
Expand Down
Loading