Skip to content

Commit 1483af2

Browse files
committed
HBASE-22867 The ForkJoinPool in CleanerChore will spawn thousands of threads in our cluster with thousands table
1 parent a59f7d4 commit 1483af2

File tree

2 files changed

+112
-128
lines changed

2 files changed

+112
-128
lines changed

hbase-server/src/main/java/org/apache/hadoop/hbase/master/cleaner/CleanerChore.java

Lines changed: 96 additions & 120 deletions
Original file line numberDiff line numberDiff line change
@@ -18,32 +18,32 @@
1818
package org.apache.hadoop.hbase.master.cleaner;
1919

2020
import java.io.IOException;
21-
import java.util.Collections;
21+
import java.util.ArrayList;
22+
import java.util.Arrays;
2223
import java.util.Comparator;
2324
import java.util.HashMap;
2425
import java.util.LinkedList;
2526
import java.util.List;
2627
import java.util.Map;
27-
import java.util.Optional;
28-
import java.util.concurrent.ExecutionException;
29-
import java.util.concurrent.RecursiveTask;
28+
import java.util.concurrent.CompletableFuture;
3029
import java.util.concurrent.atomic.AtomicBoolean;
30+
import java.util.stream.Collectors;
31+
3132
import org.apache.hadoop.conf.Configuration;
3233
import org.apache.hadoop.fs.FileStatus;
3334
import org.apache.hadoop.fs.FileSystem;
3435
import org.apache.hadoop.fs.Path;
3536
import org.apache.hadoop.fs.PathIsNotEmptyDirectoryException;
3637
import org.apache.hadoop.hbase.ScheduledChore;
3738
import org.apache.hadoop.hbase.Stoppable;
38-
import org.apache.hadoop.hbase.util.FSUtils;
39+
import org.apache.hadoop.hbase.util.FutureUtils;
3940
import org.apache.hadoop.ipc.RemoteException;
4041
import org.apache.yetus.audience.InterfaceAudience;
4142
import org.slf4j.Logger;
4243
import org.slf4j.LoggerFactory;
4344

4445
import org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;
4546
import org.apache.hbase.thirdparty.com.google.common.base.Preconditions;
46-
import org.apache.hbase.thirdparty.com.google.common.base.Predicate;
4747
import org.apache.hbase.thirdparty.com.google.common.collect.ImmutableSet;
4848
import org.apache.hbase.thirdparty.com.google.common.collect.Iterables;
4949
import org.apache.hbase.thirdparty.com.google.common.collect.Lists;
@@ -211,11 +211,16 @@ private void preRunCleaner() {
211211
cleanersChain.forEach(FileCleanerDelegate::preClean);
212212
}
213213

214-
public Boolean runCleaner() {
214+
public boolean runCleaner() {
215215
preRunCleaner();
216-
CleanerTask task = new CleanerTask(this.oldFileDir, true);
217-
pool.execute(task);
218-
return task.join();
216+
try {
217+
CompletableFuture<Boolean> future = new CompletableFuture<>();
218+
pool.execute(() -> traverseAndDelete(oldFileDir, true, future));
219+
return future.get();
220+
} catch (Exception e) {
221+
LOG.info("Failed to traverse and delete the dir: {}", oldFileDir, e);
222+
return false;
223+
}
219224
}
220225

221226
/**
@@ -380,126 +385,97 @@ public boolean setEnabled(final boolean enabled) {
380385
}
381386

382387
private interface Action<T> {
383-
T act() throws IOException;
388+
T act() throws Exception;
384389
}
385390

386391
/**
387-
* Attemps to clean up a directory, its subdirectories, and files. Return value is true if
388-
* everything was deleted. false on partial / total failures.
392+
* Attempts to clean up a directory(its subdirectories, and files) in a
393+
* {@link java.util.concurrent.ThreadPoolExecutor} concurrently. We can get the final result by
394+
* calling result.get().
389395
*/
390-
private final class CleanerTask extends RecursiveTask<Boolean> {
391-
392-
private static final long serialVersionUID = -5444212174088754172L;
393-
394-
private final Path dir;
395-
private final boolean root;
396-
397-
CleanerTask(final FileStatus dir, final boolean root) {
398-
this(dir.getPath(), root);
399-
}
400-
401-
CleanerTask(final Path dir, final boolean root) {
402-
this.dir = dir;
403-
this.root = root;
404-
}
405-
406-
@Override
407-
protected Boolean compute() {
408-
LOG.trace("Cleaning under {}", dir);
409-
List<FileStatus> subDirs;
410-
List<FileStatus> files;
411-
try {
412-
// if dir doesn't exist, we'll get null back for both of these
413-
// which will fall through to succeeding.
414-
subDirs = getFilteredStatus(FileStatus::isDirectory);
415-
files = getFilteredStatus(FileStatus::isFile);
416-
} catch (IOException ioe) {
417-
LOG.warn("failed to get FileStatus for contents of '{}'", dir, ioe);
418-
return false;
419-
}
420-
421-
boolean allFilesDeleted = true;
422-
if (!files.isEmpty()) {
423-
allFilesDeleted = deleteAction(() -> checkAndDeleteFiles(files), "files");
424-
}
425-
426-
boolean allSubdirsDeleted = true;
396+
private void traverseAndDelete(Path dir, boolean root, CompletableFuture<Boolean> result) {
397+
try {
398+
// Step.1: List all files under the given directory.
399+
List<FileStatus> allPaths = Arrays.asList(fs.listStatus(dir));
400+
List<FileStatus> subDirs =
401+
allPaths.stream().filter(FileStatus::isDirectory).collect(Collectors.toList());
402+
List<FileStatus> files =
403+
allPaths.stream().filter(FileStatus::isFile).collect(Collectors.toList());
404+
405+
// Step.2: Try to delete all the deletable files.
406+
boolean allFilesDeleted =
407+
files.isEmpty() || deleteAction(() -> checkAndDeleteFiles(files), "files", dir);
408+
409+
// Step.3: Start to traverse and delete the sub-directories.
410+
List<CompletableFuture<Boolean>> futures = new ArrayList<>();
427411
if (!subDirs.isEmpty()) {
428-
List<CleanerTask> tasks = Lists.newArrayListWithCapacity(subDirs.size());
429412
sortByConsumedSpace(subDirs);
430-
for (FileStatus subdir : subDirs) {
431-
CleanerTask task = new CleanerTask(subdir, false);
432-
tasks.add(task);
433-
task.fork();
434-
}
435-
allSubdirsDeleted = deleteAction(() -> getCleanResult(tasks), "subdirs");
413+
// Submit the request of sub-directory deletion.
414+
subDirs.forEach(subDir -> {
415+
CompletableFuture<Boolean> subFuture = new CompletableFuture<>();
416+
pool.execute(() -> traverseAndDelete(subDir.getPath(), false, subFuture));
417+
futures.add(subFuture);
418+
});
436419
}
437420

438-
boolean result = allFilesDeleted && allSubdirsDeleted && isEmptyDirDeletable(dir);
439-
// if and only if files and subdirs under current dir are deleted successfully, and the empty
440-
// directory can be deleted, and it is not the root dir then task will try to delete it.
441-
if (result && !root) {
442-
result &= deleteAction(() -> fs.delete(dir, false), "dir");
443-
}
444-
return result;
445-
}
446-
447-
/**
448-
* Get FileStatus with filter.
449-
* @param function a filter function
450-
* @return filtered FileStatus or empty list if dir doesn't exist
451-
* @throws IOException if there's an error other than dir not existing
452-
*/
453-
private List<FileStatus> getFilteredStatus(Predicate<FileStatus> function) throws IOException {
454-
return Optional.ofNullable(FSUtils.listStatusWithStatusFilter(fs, dir,
455-
status -> function.test(status))).orElseGet(Collections::emptyList);
456-
}
457-
458-
/**
459-
* Perform a delete on a specified type.
460-
* @param deletion a delete
461-
* @param type possible values are 'files', 'subdirs', 'dirs'
462-
* @return true if it deleted successfully, false otherwise
463-
*/
464-
private boolean deleteAction(Action<Boolean> deletion, String type) {
465-
boolean deleted;
466-
try {
467-
LOG.trace("Start deleting {} under {}", type, dir);
468-
deleted = deletion.act();
469-
} catch (PathIsNotEmptyDirectoryException exception) {
470-
// N.B. HDFS throws this exception when we try to delete a non-empty directory, but
471-
// LocalFileSystem throws a bare IOException. So some test code will get the verbose
472-
// message below.
473-
LOG.debug("Couldn't delete '{}' yet because it isn't empty. Probably transient. " +
474-
"exception details at TRACE.", dir);
475-
LOG.trace("Couldn't delete '{}' yet because it isn't empty w/exception.", dir, exception);
476-
deleted = false;
477-
} catch (IOException ioe) {
478-
LOG.info("Could not delete {} under {}. might be transient; we'll retry. if it keeps " +
479-
"happening, use following exception when asking on mailing list.",
480-
type, dir, ioe);
481-
deleted = false;
482-
}
483-
LOG.trace("Finish deleting {} under {}, deleted=", type, dir, deleted);
484-
return deleted;
421+
// Step.4: Once all sub-files & sub-directories are deleted, then can try to delete the
422+
// current directory asynchronously.
423+
FutureUtils.addListener(
424+
CompletableFuture.allOf(futures.toArray(new CompletableFuture[futures.size()])),
425+
(voidObj, e) -> {
426+
if (e != null) {
427+
result.completeExceptionally(e);
428+
return;
429+
}
430+
try {
431+
boolean allSubDirsDeleted = futures.stream().allMatch(CompletableFuture::join);
432+
boolean deleted = allFilesDeleted && allSubDirsDeleted && isEmptyDirDeletable(dir);
433+
if (deleted && !root) {
434+
// If and only if files and sub-dirs under current dir are deleted successfully, and
435+
// the empty directory can be deleted, and it is not the root dir then task will
436+
// try to delete it.
437+
deleted = deleteAction(() -> fs.delete(dir, false), "dir", dir);
438+
}
439+
result.complete(deleted);
440+
} catch (Exception ie) {
441+
// Must handle the inner exception here, otherwise the result may get stuck if one
442+
// sub-directory get some failure.
443+
result.completeExceptionally(ie);
444+
}
445+
});
446+
} catch (Exception e) {
447+
LOG.warn("Failed to traverse and delete the path: {}", dir, e);
448+
result.completeExceptionally(e);
485449
}
450+
}
486451

487-
/**
488-
* Get cleaner results of subdirs.
489-
* @param tasks subdirs cleaner tasks
490-
* @return true if all subdirs deleted successfully, false for patial/all failures
491-
* @throws IOException something happen during computation
492-
*/
493-
private boolean getCleanResult(List<CleanerTask> tasks) throws IOException {
494-
boolean cleaned = true;
495-
try {
496-
for (CleanerTask task : tasks) {
497-
cleaned &= task.get();
498-
}
499-
} catch (InterruptedException | ExecutionException e) {
500-
throw new IOException(e);
501-
}
502-
return cleaned;
452+
/**
453+
* Perform a delete on a specified type.
454+
* @param deletion a delete
455+
* @param type possible values are 'files', 'subdirs', 'dirs'
456+
* @return true if it deleted successfully, false otherwise
457+
*/
458+
private boolean deleteAction(Action<Boolean> deletion, String type, Path dir) {
459+
boolean deleted;
460+
try {
461+
LOG.trace("Start deleting {} under {}", type, dir);
462+
deleted = deletion.act();
463+
} catch (PathIsNotEmptyDirectoryException exception) {
464+
// N.B. HDFS throws this exception when we try to delete a non-empty directory, but
465+
// LocalFileSystem throws a bare IOException. So some test code will get the verbose
466+
// message below.
467+
LOG.debug("Couldn't delete '{}' yet because it isn't empty w/exception.", dir, exception);
468+
deleted = false;
469+
} catch (IOException ioe) {
470+
LOG.info("Could not delete {} under {}. might be transient; we'll retry. if it keeps "
471+
+ "happening, use following exception when asking on mailing list.",
472+
type, dir, ioe);
473+
deleted = false;
474+
} catch (Exception e) {
475+
LOG.info("unexpected exception: ", e);
476+
deleted = false;
503477
}
478+
LOG.trace("Finish deleting {} under {}, deleted=", type, dir, deleted);
479+
return deleted;
504480
}
505481
}

hbase-server/src/main/java/org/apache/hadoop/hbase/master/cleaner/DirScanPool.java

Lines changed: 16 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -17,9 +17,12 @@
1717
*/
1818
package org.apache.hadoop.hbase.master.cleaner;
1919

20-
import java.util.concurrent.ForkJoinPool;
21-
import java.util.concurrent.ForkJoinTask;
20+
import java.util.concurrent.LinkedBlockingQueue;
21+
import java.util.concurrent.ThreadPoolExecutor;
22+
import java.util.concurrent.TimeUnit;
23+
2224
import org.apache.hadoop.conf.Configuration;
25+
import org.apache.hadoop.hbase.DaemonThreadFactory;
2326
import org.apache.hadoop.hbase.conf.ConfigurationObserver;
2427
import org.apache.yetus.audience.InterfaceAudience;
2528
import org.slf4j.Logger;
@@ -32,7 +35,7 @@
3235
public class DirScanPool implements ConfigurationObserver {
3336
private static final Logger LOG = LoggerFactory.getLogger(DirScanPool.class);
3437
private volatile int size;
35-
private ForkJoinPool pool;
38+
private final ThreadPoolExecutor pool;
3639
private int cleanerLatch;
3740
private boolean reconfigNotification;
3841

@@ -42,11 +45,16 @@ public DirScanPool(Configuration conf) {
4245
// poolSize may be 0 or 0.0 from a careless configuration,
4346
// double check to make sure.
4447
size = size == 0 ? CleanerChore.calculatePoolSize(CleanerChore.DEFAULT_CHORE_POOL_SIZE) : size;
45-
pool = new ForkJoinPool(size);
48+
pool = initializePool(size);
4649
LOG.info("Cleaner pool size is {}", size);
4750
cleanerLatch = 0;
4851
}
4952

53+
private static ThreadPoolExecutor initializePool(int size) {
54+
return new ThreadPoolExecutor(size, size, 500, TimeUnit.SECONDS, new LinkedBlockingQueue<>(),
55+
new DaemonThreadFactory("dir-scan-pool"));
56+
}
57+
5058
/**
5159
* Checks if pool can be updated. If so, mark for update later.
5260
* @param conf configuration
@@ -73,8 +81,8 @@ synchronized void latchCountDown() {
7381
notifyAll();
7482
}
7583

76-
synchronized void execute(ForkJoinTask<?> task) {
77-
pool.execute(task);
84+
synchronized void execute(Runnable runnable) {
85+
pool.execute(runnable);
7886
}
7987

8088
public synchronized void shutdownNow() {
@@ -100,8 +108,8 @@ synchronized void tryUpdatePoolSize(long timeout) {
100108
}
101109
}
102110
shutdownNow();
103-
LOG.info("Update chore's pool size from {} to {}", pool.getParallelism(), size);
104-
pool = new ForkJoinPool(size);
111+
LOG.info("Update chore's pool size from {} to {}", pool.getPoolSize(), size);
112+
pool.setCorePoolSize(size);
105113
}
106114

107115
public int getSize() {

0 commit comments

Comments
 (0)