Skip to content

Commit de4aeae

Browse files
committed
HBASE-27148 Move minimum hadoop 3 support version to 3.2.3 (apache#4561)
Signed-off-by: Xin Sun <[email protected]> (cherry picked from commit 41972cb)
1 parent d79d812 commit de4aeae

File tree

10 files changed

+134
-276
lines changed

10 files changed

+134
-276
lines changed

hbase-http/pom.xml

Lines changed: 0 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -367,11 +367,6 @@
367367
<groupId>org.apache.hadoop</groupId>
368368
<artifactId>hadoop-auth</artifactId>
369369
</dependency>
370-
<dependency>
371-
<groupId>org.codehaus.jackson</groupId>
372-
<artifactId>jackson-core-asl</artifactId>
373-
<version>1.9.13</version>
374-
</dependency>
375370
</dependencies>
376371
<build>
377372
<plugins>

hbase-server/src/main/java/org/apache/hadoop/hbase/master/SplitWALManager.java

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -26,7 +26,6 @@
2626
import java.util.Arrays;
2727
import java.util.Collections;
2828
import java.util.List;
29-
import java.util.Optional;
3029
import java.util.stream.Collectors;
3130
import org.apache.hadoop.conf.Configuration;
3231
import org.apache.hadoop.fs.FileStatus;
@@ -160,7 +159,7 @@ public ServerName acquireSplitWALWorker(Procedure<?> procedure)
160159
/**
161160
* After the worker finished the split WAL task, it will release the worker, and wake up all the
162161
* suspend procedures in the ProcedureEvent
163-
* @param worker worker which is about to release
162+
* @param worker worker which is about to release
164163
*/
165164
public void releaseSplitWALWorker(ServerName worker) {
166165
LOG.debug("Release split WAL worker={}", worker);

hbase-server/src/main/java/org/apache/hadoop/hbase/master/snapshot/SnapshotManager.java

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -26,7 +26,6 @@
2626
import java.util.Iterator;
2727
import java.util.List;
2828
import java.util.Map;
29-
import java.util.Optional;
3029
import java.util.Set;
3130
import java.util.concurrent.ConcurrentHashMap;
3231
import java.util.concurrent.Executors;

hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestingUtility.java

Lines changed: 63 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -144,6 +144,8 @@
144144
import org.apache.hadoop.hdfs.DFSClient;
145145
import org.apache.hadoop.hdfs.DistributedFileSystem;
146146
import org.apache.hadoop.hdfs.MiniDFSCluster;
147+
import org.apache.hadoop.hdfs.server.datanode.DataNode;
148+
import org.apache.hadoop.hdfs.server.datanode.fsdataset.FsDatasetSpi;
147149
import org.apache.hadoop.hdfs.server.namenode.EditLogFileOutputStream;
148150
import org.apache.hadoop.mapred.JobConf;
149151
import org.apache.hadoop.mapred.MiniMRCluster;
@@ -196,6 +198,7 @@ public class HBaseTestingUtility extends HBaseZKTestingUtility {
196198
public static final boolean PRESPLIT_TEST_TABLE = true;
197199

198200
private MiniDFSCluster dfsCluster = null;
201+
private FsDatasetAsyncDiskServiceFixer dfsClusterFixer = null;
199202

200203
private volatile HBaseCluster hbaseCluster = null;
201204
private MiniMRCluster mrCluster = null;
@@ -574,6 +577,56 @@ public boolean cleanupDataTestDirOnTestFS(String subdirName) throws IOException
574577
return getTestFileSystem().delete(cpath, true);
575578
}
576579

580+
// Workaround to avoid IllegalThreadStateException
581+
// See HBASE-27148 for more details
582+
private static final class FsDatasetAsyncDiskServiceFixer extends Thread {
583+
584+
private volatile boolean stopped = false;
585+
586+
private final MiniDFSCluster cluster;
587+
588+
FsDatasetAsyncDiskServiceFixer(MiniDFSCluster cluster) {
589+
super("FsDatasetAsyncDiskServiceFixer");
590+
setDaemon(true);
591+
this.cluster = cluster;
592+
}
593+
594+
@Override
595+
public void run() {
596+
while (!stopped) {
597+
try {
598+
Thread.sleep(30000);
599+
} catch (InterruptedException e) {
600+
Thread.currentThread().interrupt();
601+
continue;
602+
}
603+
// we could add new datanodes during tests, so here we will check every 30 seconds, as the
604+
// timeout of the thread pool executor is 60 seconds by default.
605+
try {
606+
for (DataNode dn : cluster.getDataNodes()) {
607+
FsDatasetSpi<?> dataset = dn.getFSDataset();
608+
Field service = dataset.getClass().getDeclaredField("asyncDiskService");
609+
service.setAccessible(true);
610+
Object asyncDiskService = service.get(dataset);
611+
Field group = asyncDiskService.getClass().getDeclaredField("threadGroup");
612+
group.setAccessible(true);
613+
ThreadGroup threadGroup = (ThreadGroup) group.get(asyncDiskService);
614+
if (threadGroup.isDaemon()) {
615+
threadGroup.setDaemon(false);
616+
}
617+
}
618+
} catch (Exception e) {
619+
LOG.warn("failed to reset thread pool timeout for FsDatasetAsyncDiskService", e);
620+
}
621+
}
622+
}
623+
624+
void shutdown() {
625+
stopped = true;
626+
interrupt();
627+
}
628+
}
629+
577630
/**
578631
* Start a minidfscluster.
579632
* @param servers How many DNs to start. n * @see #shutdownMiniDFSCluster()
@@ -632,7 +685,8 @@ public MiniDFSCluster startMiniDFSCluster(int servers, final String racks[], Str
632685

633686
this.dfsCluster =
634687
new MiniDFSCluster(0, this.conf, servers, true, true, true, null, racks, hosts, null);
635-
688+
this.dfsClusterFixer = new FsDatasetAsyncDiskServiceFixer(dfsCluster);
689+
this.dfsClusterFixer.start();
636690
// Set this just-started cluster as our filesystem.
637691
setFs();
638692

@@ -656,6 +710,8 @@ public MiniDFSCluster startMiniDFSClusterForTestWAL(int namenodePort) throws IOE
656710
"ERROR");
657711
dfsCluster =
658712
new MiniDFSCluster(namenodePort, conf, 5, false, true, true, null, null, null, null);
713+
this.dfsClusterFixer = new FsDatasetAsyncDiskServiceFixer(dfsCluster);
714+
this.dfsClusterFixer.start();
659715
return dfsCluster;
660716
}
661717

@@ -778,6 +834,12 @@ public void shutdownMiniDFSCluster() throws IOException {
778834
// The below throws an exception per dn, AsynchronousCloseException.
779835
this.dfsCluster.shutdown();
780836
dfsCluster = null;
837+
// It is possible that the dfs cluster is set through setDFSCluster method, where we will not
838+
// have a fixer
839+
if (dfsClusterFixer != null) {
840+
this.dfsClusterFixer.shutdown();
841+
dfsClusterFixer = null;
842+
}
781843
dataTestDirOnTestFS = null;
782844
CommonFSUtils.setFsDefault(this.conf, new Path("file:///"));
783845
}

hbase-shaded/hbase-shaded-client-byo-hadoop/pom.xml

Lines changed: 0 additions & 32 deletions
Original file line numberDiff line numberDiff line change
@@ -82,38 +82,6 @@
8282
<artifactId>hadoop-common</artifactId>
8383
<scope>provided</scope>
8484
</dependency>
85-
<dependency>
86-
<groupId>org.codehaus.jackson</groupId>
87-
<artifactId>jackson-jaxrs</artifactId>
88-
<version>1.9.13</version>
89-
<scope>provided</scope>
90-
<exclusions>
91-
<exclusion>
92-
<groupId>org.codehaus.jackson</groupId>
93-
<artifactId>jackson-mapper-asl</artifactId>
94-
</exclusion>
95-
<exclusion>
96-
<groupId>org.codehaus.jackson</groupId>
97-
<artifactId>jackson-core-asl</artifactId>
98-
</exclusion>
99-
</exclusions>
100-
</dependency>
101-
<dependency>
102-
<groupId>org.codehaus.jackson</groupId>
103-
<artifactId>jackson-xc</artifactId>
104-
<version>1.9.13</version>
105-
<scope>provided</scope>
106-
<exclusions>
107-
<exclusion>
108-
<groupId>org.codehaus.jackson</groupId>
109-
<artifactId>jackson-mapper-asl</artifactId>
110-
</exclusion>
111-
<exclusion>
112-
<groupId>org.codehaus.jackson</groupId>
113-
<artifactId>jackson-core-asl</artifactId>
114-
</exclusion>
115-
</exclusions>
116-
</dependency>
11785
</dependencies>
11886
</profile>
11987

hbase-shaded/hbase-shaded-mapreduce/pom.xml

Lines changed: 0 additions & 79 deletions
Original file line numberDiff line numberDiff line change
@@ -233,7 +233,6 @@
233233
<dependency>
234234
<groupId>org.apache.hadoop</groupId>
235235
<artifactId>hadoop-hdfs</artifactId>
236-
<version>${hadoop-two.version}</version>
237236
<scope>provided</scope>
238237
<exclusions>
239238
<exclusion>
@@ -269,38 +268,6 @@
269268
</exclusion>
270269
</exclusions>
271270
</dependency>
272-
<dependency>
273-
<groupId>org.codehaus.jackson</groupId>
274-
<artifactId>jackson-jaxrs</artifactId>
275-
<version>1.9.13</version>
276-
<scope>provided</scope>
277-
<exclusions>
278-
<exclusion>
279-
<groupId>org.codehaus.jackson</groupId>
280-
<artifactId>jackson-mapper-asl</artifactId>
281-
</exclusion>
282-
<exclusion>
283-
<groupId>org.codehaus.jackson</groupId>
284-
<artifactId>jackson-core-asl</artifactId>
285-
</exclusion>
286-
</exclusions>
287-
</dependency>
288-
<dependency>
289-
<groupId>org.codehaus.jackson</groupId>
290-
<artifactId>jackson-xc</artifactId>
291-
<version>1.9.13</version>
292-
<scope>provided</scope>
293-
<exclusions>
294-
<exclusion>
295-
<groupId>org.codehaus.jackson</groupId>
296-
<artifactId>jackson-mapper-asl</artifactId>
297-
</exclusion>
298-
<exclusion>
299-
<groupId>org.codehaus.jackson</groupId>
300-
<artifactId>jackson-core-asl</artifactId>
301-
</exclusion>
302-
</exclusions>
303-
</dependency>
304271
<dependency>
305272
<groupId>org.apache.hadoop</groupId>
306273
<artifactId>hadoop-auth</artifactId>
@@ -344,52 +311,6 @@
344311
<groupId>org.apache.hadoop</groupId>
345312
<artifactId>hadoop-mapreduce-client-core</artifactId>
346313
<scope>provided</scope>
347-
<exclusions>
348-
<exclusion>
349-
<groupId>com.google.guava</groupId>
350-
<artifactId>guava</artifactId>
351-
</exclusion>
352-
<exclusion>
353-
<groupId>javax.xml.bind</groupId>
354-
<artifactId>jaxb-api</artifactId>
355-
</exclusion>
356-
<exclusion>
357-
<groupId>javax.ws.rs</groupId>
358-
<artifactId>jsr311-api</artifactId>
359-
</exclusion>
360-
</exclusions>
361-
</dependency>
362-
<dependency>
363-
<groupId>org.codehaus.jackson</groupId>
364-
<artifactId>jackson-jaxrs</artifactId>
365-
<version>1.9.13</version>
366-
<scope>provided</scope>
367-
<exclusions>
368-
<exclusion>
369-
<groupId>org.codehaus.jackson</groupId>
370-
<artifactId>jackson-mapper-asl</artifactId>
371-
</exclusion>
372-
<exclusion>
373-
<groupId>org.codehaus.jackson</groupId>
374-
<artifactId>jackson-core-asl</artifactId>
375-
</exclusion>
376-
</exclusions>
377-
</dependency>
378-
<dependency>
379-
<groupId>org.codehaus.jackson</groupId>
380-
<artifactId>jackson-xc</artifactId>
381-
<version>1.9.13</version>
382-
<scope>provided</scope>
383-
<exclusions>
384-
<exclusion>
385-
<groupId>org.codehaus.jackson</groupId>
386-
<artifactId>jackson-mapper-asl</artifactId>
387-
</exclusion>
388-
<exclusion>
389-
<groupId>org.codehaus.jackson</groupId>
390-
<artifactId>jackson-core-asl</artifactId>
391-
</exclusion>
392-
</exclusions>
393314
</dependency>
394315
</dependencies>
395316
</profile>

hbase-shaded/hbase-shaded-testing-util-tester/pom.xml

Lines changed: 0 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -84,12 +84,6 @@
8484
<version>${project.version}</version>
8585
<scope>test</scope>
8686
</dependency>
87-
<dependency>
88-
<groupId>org.codehaus.jackson</groupId>
89-
<artifactId>jackson-mapper-asl</artifactId>
90-
<version>1.9.13</version>
91-
<scope>test</scope>
92-
</dependency>
9387
</dependencies>
9488

9589
</project>

hbase-shaded/hbase-shaded-testing-util/pom.xml

Lines changed: 0 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -98,12 +98,6 @@
9898
<type>test-jar</type>
9999
<scope>compile</scope>
100100
</dependency>
101-
<dependency>
102-
<groupId>org.codehaus.jackson</groupId>
103-
<artifactId>jackson-jaxrs</artifactId>
104-
<version>1.9.13</version>
105-
<scope>compile</scope>
106-
</dependency>
107101
<dependency>
108102
<groupId>org.apache.hbase</groupId>
109103
<artifactId>hbase-testing-util</artifactId>

hbase-testing-util/pom.xml

Lines changed: 9 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -126,6 +126,15 @@
126126
<groupId>org.apache.hbase</groupId>
127127
<artifactId>${compat.module}</artifactId>
128128
<type>test-jar</type>
129+
</dependency>
130+
<dependency>
131+
<groupId>org.mockito</groupId>
132+
<artifactId>mockito-core</artifactId>
133+
<scope>compile</scope>
134+
</dependency>
135+
<dependency>
136+
<groupId>com.github.stephenc.findbugs</groupId>
137+
<artifactId>findbugs-annotations</artifactId>
129138
<scope>compile</scope>
130139
</dependency>
131140
<dependency>

0 commit comments

Comments
 (0)