From ee06d41fcfb891420452e90046be4c224a1ab1b4 Mon Sep 17 00:00:00 2001 From: gf13871 Date: Thu, 25 Nov 2021 10:34:44 +0800 Subject: [PATCH 1/3] 'triggerrebuild' From 3276544a8d8f0cf050c9d60c74e222316f8235a0 Mon Sep 17 00:00:00 2001 From: gf13871 Date: Mon, 11 Apr 2022 19:08:00 +0800 Subject: [PATCH 2/3] 'fixdecodeerror' --- .../org/apache/hadoop/hdfs/server/namenode/FSEditLogOp.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSEditLogOp.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSEditLogOp.java index 8bddc6741a1aa..3181588890a6a 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSEditLogOp.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSEditLogOp.java @@ -2946,7 +2946,7 @@ void fromXml(Stanza st) throws InvalidXmlException { this.newLength = Long.parseLong(st.getValue("NEWLENGTH")); this.timestamp = Long.parseLong(st.getValue("TIMESTAMP")); if (st.hasChildren("BLOCK")) - this.truncateBlock = FSEditLogOp.blockFromXml(st); + this.truncateBlock = FSEditLogOp.blockFromXml(st.getChildren("BLOCK").get(0)); } @Override From 4dc415002d7088553846237fb90d0aeb257619b2 Mon Sep 17 00:00:00 2001 From: guophilipse Date: Sun, 24 Apr 2022 23:54:52 +0800 Subject: [PATCH 3/3] addtestcase --- .../namenode/OfflineEditsViewerHelper.java | 21 +++++++++++++++++++ 1 file changed, 21 insertions(+) diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/OfflineEditsViewerHelper.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/OfflineEditsViewerHelper.java index e7f51ce2e9407..124af239ac07a 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/OfflineEditsViewerHelper.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/OfflineEditsViewerHelper.java @@ -22,6 +22,10 @@ import java.io.IOException; import java.util.Iterator; +import org.apache.hadoop.fs.FSDataOutputStream; +import org.apache.hadoop.fs.Path; +import org.apache.hadoop.hdfs.AppendTestUtil; +import org.apache.hadoop.hdfs.protocol.Block; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.apache.hadoop.classification.InterfaceAudience; @@ -132,6 +136,23 @@ private CheckpointSignature runOperations() throws IOException { DFSTestUtil.runOperations(cluster, dfs, cluster.getConfiguration(0), dfs.getDefaultBlockSize(), 0); + String client = "client"; + String clientMachine = "clientMachine"; + String src = "/test/testTruncate"; + Path srcPath = new Path(src); + byte[] contents = AppendTestUtil.initBuffer(512); + FSDataOutputStream out = dfs.create(srcPath, true, 4, (short)3, + dfs.getDefaultBlockSize()); + out.write(contents, 0, 511); + out.close(); + + INodesInPath iip = cluster.getNamesystem().getFSDirectory().getINodesInPath(src, FSDirectory.DirOp.WRITE); + cluster.getNamesystem().writeLock(); + Block truncateBlock = FSDirTruncateOp.prepareFileForTruncate(cluster.getNamesystem(), iip, + client, clientMachine, 1, null); + cluster.getNamesystem().getEditLog().logTruncate(src, client, clientMachine, truncateBlock.getNumBytes()-1, Time.now(), truncateBlock); + cluster.getNamesystem().writeUnlock(); + // OP_ROLLING_UPGRADE_START cluster.getNamesystem().getEditLog().logStartRollingUpgrade(Time.now()); // OP_ROLLING_UPGRADE_FINALIZE