Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
23 commits
Select commit Hold shift + click to select a range
3c51579
Merge pull request #1 from apache/trunk
GuoPhilipse Nov 29, 2020
a449d04
Merge pull request #2 from apache/trunk
GuoPhilipse Dec 12, 2020
d4f968b
Merge pull request #3 from apache/trunk
GuoPhilipse Apr 7, 2021
d522abf
Merge pull request #4 from apache/trunk
GuoPhilipse Apr 21, 2021
de633b8
Merge pull request #5 from apache/trunk
GuoPhilipse Oct 16, 2021
66ce06b
Merge pull request #6 from apache/trunk
GuoPhilipse Nov 8, 2021
bea061f
Merge pull request #7 from apache/trunk
GuoPhilipse Nov 19, 2021
ee06d41
'triggerrebuild'
tclxgf13871 Nov 25, 2021
9375f21
Merge pull request #8 from apache/trunk
GuoPhilipse Dec 7, 2021
8721aa8
Merge branch 'trunk' of github.com:GuoPhilipse/hadoop into trunk
tclxgf13871 Dec 8, 2021
56a7aea
Merge pull request #9 from apache/trunk
GuoPhilipse Dec 10, 2021
9d39e29
Merge branch 'trunk' of github.com:GuoPhilipse/hadoop into trunk
tclxgf13871 Dec 13, 2021
989f31c
Merge pull request #10 from apache/trunk
GuoPhilipse Dec 30, 2021
d028aae
Merge branch 'trunk' of github.com:GuoPhilipse/hadoop into trunk
tclxgf13871 Dec 30, 2021
60f347a
Merge pull request #11 from apache/trunk
GuoPhilipse Jan 19, 2022
296d306
Merge branch 'trunk' of github.com:GuoPhilipse/hadoop into trunk
tclxgf13871 Jan 19, 2022
99a3513
Merge pull request #12 from apache/trunk
GuoPhilipse Feb 9, 2022
0797b25
Merge pull request #13 from apache/trunk
GuoPhilipse Mar 21, 2022
1881045
Merge branch 'trunk' of github.com:GuoPhilipse/hadoop into trunk
tclxgf13871 Mar 21, 2022
6f29c61
Merge pull request #14 from apache/trunk
GuoPhilipse Apr 11, 2022
0e68cc6
Merge branch 'trunk' of github.com:GuoPhilipse/hadoop into trunk
tclxgf13871 Apr 11, 2022
3276544
'fixdecodeerror'
tclxgf13871 Apr 11, 2022
4dc4150
addtestcase
GuoPhilipse Apr 24, 2022
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -2946,7 +2946,7 @@ void fromXml(Stanza st) throws InvalidXmlException {
this.newLength = Long.parseLong(st.getValue("NEWLENGTH"));
this.timestamp = Long.parseLong(st.getValue("TIMESTAMP"));
if (st.hasChildren("BLOCK"))
this.truncateBlock = FSEditLogOp.blockFromXml(st);
this.truncateBlock = FSEditLogOp.blockFromXml(st.getChildren("BLOCK").get(0));
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Good catch here. I think it is better to fix it at org.apache.hadoop.hdfs.server.namenode.FSEditLogOp#blockFromXml. Thanks.

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

After another review, I think it is proper now. Do you mind to add new unit test to cover this case?

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

After another review, I think it is proper now. Do you mind to add new unit test to cover this case?

Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

thanks @Hexiaoqiao for your review, will add test later

}

@Override
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,10 @@
import java.io.IOException;
import java.util.Iterator;

import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hdfs.AppendTestUtil;
import org.apache.hadoop.hdfs.protocol.Block;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.hadoop.classification.InterfaceAudience;
Expand Down Expand Up @@ -132,6 +136,23 @@ private CheckpointSignature runOperations() throws IOException {
DFSTestUtil.runOperations(cluster, dfs, cluster.getConfiguration(0),
dfs.getDefaultBlockSize(), 0);

String client = "client";
String clientMachine = "clientMachine";
String src = "/test/testTruncate";
Path srcPath = new Path(src);
byte[] contents = AppendTestUtil.initBuffer(512);
FSDataOutputStream out = dfs.create(srcPath, true, 4, (short)3,
dfs.getDefaultBlockSize());
out.write(contents, 0, 511);
out.close();

INodesInPath iip = cluster.getNamesystem().getFSDirectory().getINodesInPath(src, FSDirectory.DirOp.WRITE);
cluster.getNamesystem().writeLock();
Block truncateBlock = FSDirTruncateOp.prepareFileForTruncate(cluster.getNamesystem(), iip,
client, clientMachine, 1, null);
cluster.getNamesystem().getEditLog().logTruncate(src, client, clientMachine, truncateBlock.getNumBytes()-1, Time.now(), truncateBlock);
cluster.getNamesystem().writeUnlock();

// OP_ROLLING_UPGRADE_START
cluster.getNamesystem().getEditLog().logStartRollingUpgrade(Time.now());
// OP_ROLLING_UPGRADE_FINALIZE
Expand Down