Skip to content

Commit 2408066

Browse files
author
Inigo Goiri
committed
HDFS-14908. LeaseManager should check parent-child relationship when filter open files. Contributed by Jinglun.
1 parent 578bd10 commit 2408066

File tree

4 files changed

+71
-9
lines changed

4 files changed

+71
-9
lines changed

hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSNamesystem.java

Lines changed: 4 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1826,16 +1826,17 @@ BatchedListEntries<OpenFileEntry> listOpenFiles(long prevId,
18261826
checkSuperuserPrivilege();
18271827
checkOperation(OperationCategory.READ);
18281828
BatchedListEntries<OpenFileEntry> batchedListEntries;
1829+
String normalizedPath = new Path(path).toString(); // normalize path.
18291830
try {
18301831
readLock();
18311832
try {
18321833
checkOperation(OperationCategory.READ);
18331834
if (openFilesTypes.contains(OpenFilesType.ALL_OPEN_FILES)) {
18341835
batchedListEntries = leaseManager.getUnderConstructionFiles(prevId,
1835-
path);
1836+
normalizedPath);
18361837
} else {
18371838
if (openFilesTypes.contains(OpenFilesType.BLOCKING_DECOMMISSION)) {
1838-
batchedListEntries = getFilesBlockingDecom(prevId, path);
1839+
batchedListEntries = getFilesBlockingDecom(prevId, normalizedPath);
18391840
} else {
18401841
throw new IllegalArgumentException("Unknown OpenFileType: "
18411842
+ openFilesTypes);
@@ -1874,7 +1875,7 @@ public BatchedListEntries<OpenFileEntry> getFilesBlockingDecom(long prevId,
18741875

18751876
String fullPathName = inodeFile.getFullPathName();
18761877
if (org.apache.commons.lang3.StringUtils.isEmpty(path)
1877-
|| fullPathName.startsWith(path)) {
1878+
|| DFSUtil.isParentEntry(fullPathName, path)) {
18781879
openFileEntries.add(new OpenFileEntry(inodeFile.getId(),
18791880
inodeFile.getFullPathName(),
18801881
inodeFile.getFileUnderConstructionFeature().getClientName(),

hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/LeaseManager.java

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -42,6 +42,7 @@
4242
import org.apache.commons.lang3.StringUtils;
4343
import org.apache.hadoop.classification.InterfaceAudience;
4444
import org.apache.hadoop.fs.BatchedRemoteIterator.BatchedListEntries;
45+
import org.apache.hadoop.hdfs.DFSUtil;
4546
import org.apache.hadoop.hdfs.protocol.HdfsConstants;
4647
import org.apache.hadoop.hdfs.protocol.OpenFileEntry;
4748
import org.apache.hadoop.hdfs.protocol.OpenFilesIterator;
@@ -315,7 +316,8 @@ public BatchedListEntries<OpenFileEntry> getUnderConstructionFiles(
315316
}
316317

317318
fullPathName = inodeFile.getFullPathName();
318-
if (StringUtils.isEmpty(path) || fullPathName.startsWith(path)) {
319+
if (StringUtils.isEmpty(path) ||
320+
DFSUtil.isParentEntry(fullPathName, path)) {
319321
openFileEntries.add(new OpenFileEntry(inodeFile.getId(), fullPathName,
320322
inodeFile.getFileUnderConstructionFeature().getClientName(),
321323
inodeFile.getFileUnderConstructionFeature().getClientMachine()));

hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/DFSTestUtil.java

Lines changed: 20 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -2392,13 +2392,32 @@ public static void verifyDelete(FsShell shell, FileSystem fs, Path path,
23922392
}
23932393
}
23942394

2395+
/**
2396+
* Create open files under root path.
2397+
* @param fs the filesystem.
2398+
* @param filePrefix the prefix of the files.
2399+
* @param numFilesToCreate the number of files to create.
2400+
*/
23952401
public static Map<Path, FSDataOutputStream> createOpenFiles(FileSystem fs,
23962402
String filePrefix, int numFilesToCreate) throws IOException {
2403+
return createOpenFiles(fs, new Path("/"), filePrefix, numFilesToCreate);
2404+
}
2405+
2406+
/**
2407+
* Create open files.
2408+
* @param fs the filesystem.
2409+
* @param baseDir the base path of the files.
2410+
* @param filePrefix the prefix of the files.
2411+
* @param numFilesToCreate the number of files to create.
2412+
*/
2413+
public static Map<Path, FSDataOutputStream> createOpenFiles(FileSystem fs,
2414+
Path baseDir, String filePrefix, int numFilesToCreate)
2415+
throws IOException {
23972416
final Map<Path, FSDataOutputStream> filesCreated = new HashMap<>();
23982417
final byte[] buffer = new byte[(int) (1024 * 1.75)];
23992418
final Random rand = new Random(0xFEED0BACL);
24002419
for (int i = 0; i < numFilesToCreate; i++) {
2401-
Path file = new Path("/" + filePrefix + "-" + i);
2420+
Path file = new Path(baseDir, filePrefix + "-" + i);
24022421
FSDataOutputStream stm = fs.create(file, true, 1024, (short) 1, 1024);
24032422
rand.nextBytes(buffer);
24042423
stm.write(buffer);

hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestListOpenFiles.java

Lines changed: 44 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -157,13 +157,22 @@ private void verifyOpenFiles(Map<Path, FSDataOutputStream> openFiles,
157157
remainingFiles.size() == 0);
158158
}
159159

160+
/**
161+
* Verify all open files.
162+
*/
160163
private void verifyOpenFiles(Map<Path, FSDataOutputStream> openFiles)
161164
throws IOException {
162-
verifyOpenFiles(openFiles, EnumSet.of(OpenFilesType.ALL_OPEN_FILES),
163-
OpenFilesIterator.FILTER_PATH_DEFAULT);
165+
verifyOpenFiles(openFiles, OpenFilesIterator.FILTER_PATH_DEFAULT);
166+
}
167+
168+
/**
169+
* Verify open files with specified filter path.
170+
*/
171+
private void verifyOpenFiles(Map<Path, FSDataOutputStream> openFiles,
172+
String path) throws IOException {
173+
verifyOpenFiles(openFiles, EnumSet.of(OpenFilesType.ALL_OPEN_FILES), path);
164174
verifyOpenFiles(new HashMap<>(),
165-
EnumSet.of(OpenFilesType.BLOCKING_DECOMMISSION),
166-
OpenFilesIterator.FILTER_PATH_DEFAULT);
175+
EnumSet.of(OpenFilesType.BLOCKING_DECOMMISSION), path);
167176
}
168177

169178
private Set<Path> createFiles(FileSystem fileSystem, String fileNamePrefix,
@@ -255,4 +264,35 @@ public void run() {
255264
}
256265
}
257266
}
267+
268+
@Test(timeout = 120000)
269+
public void testListOpenFilesWithFilterPath() throws IOException {
270+
HashMap<Path, FSDataOutputStream> openFiles = new HashMap<>();
271+
createFiles(fs, "closed", 10);
272+
verifyOpenFiles(openFiles, OpenFilesIterator.FILTER_PATH_DEFAULT);
273+
274+
BatchedEntries<OpenFileEntry> openFileEntryBatchedEntries = nnRpc
275+
.listOpenFiles(0, EnumSet.of(OpenFilesType.ALL_OPEN_FILES),
276+
OpenFilesIterator.FILTER_PATH_DEFAULT);
277+
assertTrue("Open files list should be empty!",
278+
openFileEntryBatchedEntries.size() == 0);
279+
BatchedEntries<OpenFileEntry> openFilesBlockingDecomEntries = nnRpc
280+
.listOpenFiles(0, EnumSet.of(OpenFilesType.BLOCKING_DECOMMISSION),
281+
OpenFilesIterator.FILTER_PATH_DEFAULT);
282+
assertTrue("Open files list blocking decommission should be empty!",
283+
openFilesBlockingDecomEntries.size() == 0);
284+
285+
openFiles.putAll(
286+
DFSTestUtil.createOpenFiles(fs, new Path("/base"), "open-1", 1));
287+
Map<Path, FSDataOutputStream> baseOpen =
288+
DFSTestUtil.createOpenFiles(fs, new Path("/base-open"), "open-1", 1);
289+
verifyOpenFiles(openFiles, "/base");
290+
verifyOpenFiles(openFiles, "/base/");
291+
292+
openFiles.putAll(baseOpen);
293+
while (openFiles.size() > 0) {
294+
DFSTestUtil.closeOpenFiles(openFiles, 1);
295+
verifyOpenFiles(openFiles, OpenFilesIterator.FILTER_PATH_DEFAULT);
296+
}
297+
}
258298
}

0 commit comments

Comments
 (0)