Skip to content

Commit f531a4a

Browse files
author
Chen Liang
committed
HDFS-15191. EOF when reading legacy buffer in BlockTokenIdentifier. Contributed by Steven Rand.
1 parent cbe71ea commit f531a4a

File tree

2 files changed

+67
-36
lines changed

2 files changed

+67
-36
lines changed

hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/security/token/block/BlockTokenIdentifier.java

Lines changed: 29 additions & 22 deletions
Original file line numberDiff line numberDiff line change
@@ -248,31 +248,34 @@ void readFieldsLegacy(DataInput in) throws IOException {
248248
modes.add(WritableUtils.readEnum(in, AccessMode.class));
249249
}
250250

251-
length = WritableUtils.readVInt(in);
252-
StorageType[] readStorageTypes = new StorageType[length];
253-
for (int i = 0; i < length; i++) {
254-
readStorageTypes[i] = WritableUtils.readEnum(in, StorageType.class);
255-
}
256-
storageTypes = readStorageTypes;
257-
258-
length = WritableUtils.readVInt(in);
259-
String[] readStorageIds = new String[length];
260-
for (int i = 0; i < length; i++) {
261-
readStorageIds[i] = WritableUtils.readString(in);
262-
}
263-
storageIds = readStorageIds;
251+
try {
252+
length = WritableUtils.readVInt(in);
253+
StorageType[] readStorageTypes = new StorageType[length];
254+
for (int i = 0; i < length; i++) {
255+
readStorageTypes[i] = WritableUtils.readEnum(in, StorageType.class);
256+
}
257+
storageTypes = readStorageTypes;
264258

265-
useProto = false;
259+
length = WritableUtils.readVInt(in);
260+
String[] readStorageIds = new String[length];
261+
for (int i = 0; i < length; i++) {
262+
readStorageIds[i] = WritableUtils.readString(in);
263+
}
264+
storageIds = readStorageIds;
266265

267-
try {
268266
int handshakeMsgLen = WritableUtils.readVInt(in);
269267
if (handshakeMsgLen != 0) {
270268
handshakeMsg = new byte[handshakeMsgLen];
271269
in.readFully(handshakeMsg);
272270
}
273271
} catch (EOFException eof) {
274-
272+
// If the NameNode is on a version before HDFS-6708 and HDFS-9807, then
273+
// the block token won't have storage types or storage IDs. For backward
274+
// compatibility, swallow the EOF that we get when we try to read those
275+
// fields. Same for the handshake secret field from HDFS-14611.
275276
}
277+
278+
useProto = false;
276279
}
277280

278281
@VisibleForTesting
@@ -332,13 +335,17 @@ void writeLegacy(DataOutput out) throws IOException {
332335
for (AccessMode aMode : modes) {
333336
WritableUtils.writeEnum(out, aMode);
334337
}
335-
WritableUtils.writeVInt(out, storageTypes.length);
336-
for (StorageType type: storageTypes){
337-
WritableUtils.writeEnum(out, type);
338+
if (storageTypes != null) {
339+
WritableUtils.writeVInt(out, storageTypes.length);
340+
for (StorageType type : storageTypes) {
341+
WritableUtils.writeEnum(out, type);
342+
}
338343
}
339-
WritableUtils.writeVInt(out, storageIds.length);
340-
for (String id: storageIds) {
341-
WritableUtils.writeString(out, id);
344+
if (storageIds != null) {
345+
WritableUtils.writeVInt(out, storageIds.length);
346+
for (String id : storageIds) {
347+
WritableUtils.writeString(out, id);
348+
}
342349
}
343350
if (handshakeMsg != null && handshakeMsg.length > 0) {
344351
WritableUtils.writeVInt(out, handshakeMsg.length);

hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/security/token/block/TestBlockToken.java

Lines changed: 38 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -42,6 +42,7 @@
4242
import java.util.Set;
4343

4444
import org.mockito.Mockito;
45+
import org.apache.commons.lang3.reflect.FieldUtils;
4546
import org.slf4j.Logger;
4647
import org.slf4j.LoggerFactory;
4748
import org.apache.hadoop.conf.Configuration;
@@ -617,6 +618,24 @@ public void testEmptyLegacyBlockTokenBytesIsLegacy() throws IOException {
617618
readToken.readFields(dib);
618619
}
619620

621+
/**
622+
* If the NameNode predates HDFS-6708 and HDFS-9807, then the LocatedBlocks
623+
* that it returns to the client will have block tokens that don't include
624+
* the storage types or storage IDs. Simulate this by setting the storage
625+
* type and storage ID to null to test backwards compatibility.
626+
*/
627+
@Test
628+
public void testLegacyBlockTokenWithoutStorages() throws IOException,
629+
IllegalAccessException {
630+
BlockTokenIdentifier identifier = new BlockTokenIdentifier("user",
631+
"blockpool", 123,
632+
EnumSet.allOf(BlockTokenIdentifier.AccessMode.class), null, null,
633+
false);
634+
FieldUtils.writeField(identifier, "storageTypes", null, true);
635+
FieldUtils.writeField(identifier, "storageIds", null, true);
636+
testCraftedBlockTokenIdentifier(identifier, false, false, false);
637+
}
638+
620639
@Test
621640
public void testProtobufBlockTokenBytesIsProtobuf() throws IOException {
622641
final boolean useProto = true;
@@ -662,13 +681,17 @@ public void testProtobufBlockTokenBytesIsProtobuf() throws IOException {
662681
assertEquals(protobufToken, readToken);
663682
}
664683

665-
private void testCraftedProtobufBlockTokenIdentifier(
684+
private void testCraftedBlockTokenIdentifier(
666685
BlockTokenIdentifier identifier, boolean expectIOE,
667-
boolean expectRTE) throws IOException {
686+
boolean expectRTE, boolean isProtobuf) throws IOException {
668687
DataOutputBuffer dob = new DataOutputBuffer(4096);
669688
DataInputBuffer dib = new DataInputBuffer();
670689

671-
identifier.writeProtobuf(dob);
690+
if (isProtobuf) {
691+
identifier.writeProtobuf(dob);
692+
} else {
693+
identifier.writeLegacy(dob);
694+
}
672695
byte[] identBytes = Arrays.copyOf(dob.getData(), dob.getLength());
673696

674697
BlockTokenIdentifier legacyToken = new BlockTokenIdentifier();
@@ -691,22 +714,23 @@ private void testCraftedProtobufBlockTokenIdentifier(
691714
invalidLegacyMessage = true;
692715
}
693716

694-
assertTrue(invalidLegacyMessage);
695-
696-
dib.reset(identBytes, identBytes.length);
697-
protobufToken.readFieldsProtobuf(dib);
717+
if (isProtobuf) {
718+
assertTrue(invalidLegacyMessage);
698719

699-
dib.reset(identBytes, identBytes.length);
700-
readToken.readFieldsProtobuf(dib);
701-
assertEquals(protobufToken, readToken);
702-
assertEquals(identifier, readToken);
720+
dib.reset(identBytes, identBytes.length);
721+
protobufToken.readFieldsProtobuf(dib);
722+
dib.reset(identBytes, identBytes.length);
723+
readToken.readFields(dib);
724+
assertEquals(identifier, readToken);
725+
assertEquals(protobufToken, readToken);
726+
}
703727
}
704728

705729
@Test
706730
public void testEmptyProtobufBlockTokenBytesIsProtobuf() throws IOException {
707731
// Empty BlockTokenIdentifiers throw IOException
708732
BlockTokenIdentifier identifier = new BlockTokenIdentifier();
709-
testCraftedProtobufBlockTokenIdentifier(identifier, true, false);
733+
testCraftedBlockTokenIdentifier(identifier, true, false, true);
710734
}
711735

712736
@Test
@@ -727,10 +751,10 @@ public void testCraftedProtobufBlockTokenBytesIsProtobuf() throws
727751
datetime = ((datetime / 1000) * 1000); // strip milliseconds.
728752
datetime = datetime + 71; // 2017-02-09 00:12:35,071+0100
729753
identifier.setExpiryDate(datetime);
730-
testCraftedProtobufBlockTokenIdentifier(identifier, false, true);
754+
testCraftedBlockTokenIdentifier(identifier, false, true, true);
731755
datetime += 1; // 2017-02-09 00:12:35,072+0100
732756
identifier.setExpiryDate(datetime);
733-
testCraftedProtobufBlockTokenIdentifier(identifier, true, false);
757+
testCraftedBlockTokenIdentifier(identifier, true, false, true);
734758
}
735759

736760
private BlockTokenIdentifier writeAndReadBlockToken(

0 commit comments

Comments
 (0)