1818package org .apache .hadoop .hbase .io .hfile ;
1919
2020import static org .junit .Assert .assertArrayEquals ;
21+ import static org .junit .Assert .assertEquals ;
2122import static org .junit .Assert .assertFalse ;
2223import static org .junit .Assert .assertTrue ;
2324import static org .junit .Assert .fail ;
2829import static org .mockito .Mockito .verifyNoMoreInteractions ;
2930import static org .mockito .Mockito .when ;
3031
32+ import java .io .DataOutputStream ;
3133import java .io .IOException ;
3234import java .io .InputStream ;
3335import java .nio .ByteBuffer ;
36+ import java .util .Random ;
37+ import org .apache .hadoop .conf .Configuration ;
3438import org .apache .hadoop .fs .FSDataInputStream ;
3539import org .apache .hadoop .fs .FSDataOutputStream ;
3640import org .apache .hadoop .fs .FileSystem ;
3741import org .apache .hadoop .fs .Path ;
3842import org .apache .hadoop .hbase .HBaseClassTestRule ;
3943import org .apache .hadoop .hbase .HBaseTestingUtility ;
44+ import org .apache .hadoop .hbase .HConstants ;
45+ import org .apache .hadoop .hbase .fs .HFileSystem ;
46+ import org .apache .hadoop .hbase .io .ByteBuffAllocator ;
47+ import org .apache .hadoop .hbase .io .FSDataInputStreamWrapper ;
48+ import org .apache .hadoop .hbase .io .compress .Compression ;
4049import org .apache .hadoop .hbase .io .util .BlockIOUtils ;
4150import org .apache .hadoop .hbase .nio .ByteBuff ;
4251import org .apache .hadoop .hbase .nio .MultiByteBuff ;
4352import org .apache .hadoop .hbase .nio .SingleByteBuff ;
4453import org .apache .hadoop .hbase .testclassification .IOTests ;
4554import org .apache .hadoop .hbase .testclassification .SmallTests ;
4655import org .apache .hadoop .hbase .util .Bytes ;
56+ import org .apache .hadoop .hbase .util .EnvironmentEdgeManager ;
4757import org .junit .ClassRule ;
4858import org .junit .Rule ;
4959import org .junit .Test ;
5060import org .junit .experimental .categories .Category ;
5161import org .junit .rules .ExpectedException ;
62+ import org .junit .rules .TestName ;
5263
5364@ Category ({ IOTests .class , SmallTests .class })
5465public class TestBlockIOUtils {
@@ -57,11 +68,17 @@ public class TestBlockIOUtils {
5768 public static final HBaseClassTestRule CLASS_RULE =
5869 HBaseClassTestRule .forClass (TestBlockIOUtils .class );
5970
71+ @ Rule
72+ public TestName testName = new TestName ();
73+
6074 @ Rule
6175 public ExpectedException exception = ExpectedException .none ();
6276
6377 private static final HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility ();
6478
79+ private static final int NUM_TEST_BLOCKS = 2 ;
80+ private static final Compression .Algorithm COMPRESSION_ALGO = Compression .Algorithm .GZ ;
81+
6582 @ Test
6683 public void testIsByteBufferReadable () throws IOException {
6784 FileSystem fs = TEST_UTIL .getTestFileSystem ();
@@ -92,6 +109,103 @@ public void testReadFully() throws IOException {
92109 assertArrayEquals (Bytes .toBytes (s ), heapBuf );
93110 }
94111
112+ @ Test
113+ public void testPreadWithReadFullBytes () throws IOException {
114+ testPreadReadFullBytesInternal (true , EnvironmentEdgeManager .currentTime ());
115+ }
116+
117+ @ Test
118+ public void testPreadWithoutReadFullBytes () throws IOException {
119+ testPreadReadFullBytesInternal (false , EnvironmentEdgeManager .currentTime ());
120+ }
121+
122+ private void testPreadReadFullBytesInternal (boolean readAllBytes , long randomSeed )
123+ throws IOException {
124+ Configuration conf = TEST_UTIL .getConfiguration ();
125+ conf .setBoolean (HConstants .HFILE_PREAD_ALL_BYTES_ENABLED_KEY , readAllBytes );
126+ FileSystem fs = TEST_UTIL .getTestFileSystem ();
127+ Path path = new Path (TEST_UTIL .getDataTestDirOnTestFS (), testName .getMethodName ());
128+ // give a fixed seed such we can see failure easily.
129+ Random rand = new Random (randomSeed );
130+ long totalDataBlockBytes =
131+ writeBlocks (TEST_UTIL .getConfiguration (), rand , COMPRESSION_ALGO , path );
132+ readDataBlocksAndVerify (fs , path , COMPRESSION_ALGO , totalDataBlockBytes );
133+ }
134+
135+ private long writeBlocks (Configuration conf , Random rand , Compression .Algorithm compressAlgo ,
136+ Path path ) throws IOException {
137+ FileSystem fs = HFileSystem .get (conf );
138+ FSDataOutputStream os = fs .create (path );
139+ HFileContext meta =
140+ new HFileContextBuilder ().withHBaseCheckSum (true ).withCompression (compressAlgo ).build ();
141+ HFileBlock .Writer hbw = new HFileBlock .Writer (conf , null , meta );
142+ long totalDataBlockBytes = 0 ;
143+ for (int i = 0 ; i < NUM_TEST_BLOCKS ; ++i ) {
144+ int blockTypeOrdinal = rand .nextInt (BlockType .values ().length );
145+ if (blockTypeOrdinal == BlockType .ENCODED_DATA .ordinal ()) {
146+ blockTypeOrdinal = BlockType .DATA .ordinal ();
147+ }
148+ BlockType bt = BlockType .values ()[blockTypeOrdinal ];
149+ DataOutputStream dos = hbw .startWriting (bt );
150+ int size = rand .nextInt (500 );
151+ for (int j = 0 ; j < size ; ++j ) {
152+ dos .writeShort (i + 1 );
153+ dos .writeInt (j + 1 );
154+ }
155+
156+ hbw .writeHeaderAndData (os );
157+ totalDataBlockBytes += hbw .getOnDiskSizeWithHeader ();
158+ }
159+ // append a dummy trailer and in a actual HFile it should have more data.
160+ FixedFileTrailer trailer = new FixedFileTrailer (3 , 3 );
161+ trailer .setFirstDataBlockOffset (0 );
162+ trailer .setLastDataBlockOffset (totalDataBlockBytes );
163+ trailer .setComparatorClass (meta .getCellComparator ().getClass ());
164+ trailer .setDataIndexCount (NUM_TEST_BLOCKS );
165+ trailer .setCompressionCodec (compressAlgo );
166+ trailer .serialize (os );
167+ // close the stream
168+ os .close ();
169+ return totalDataBlockBytes ;
170+ }
171+
172+ private void readDataBlocksAndVerify (FileSystem fs , Path path , Compression .Algorithm compressAlgo ,
173+ long totalDataBlockBytes ) throws IOException {
174+ FSDataInputStream is = fs .open (path );
175+ HFileContext fileContext =
176+ new HFileContextBuilder ().withHBaseCheckSum (true ).withCompression (compressAlgo ).build ();
177+ ReaderContext context =
178+ new ReaderContextBuilder ().withInputStreamWrapper (new FSDataInputStreamWrapper (is ))
179+ .withReaderType (ReaderContext .ReaderType .PREAD ).withFileSize (totalDataBlockBytes )
180+ .withFilePath (path ).withFileSystem (fs ).build ();
181+ HFileBlock .FSReader hbr =
182+ new HFileBlock .FSReaderImpl (context , fileContext , ByteBuffAllocator .HEAP , fs .getConf ());
183+
184+ long onDiskSizeOfNextBlock = -1 ;
185+ long offset = 0 ;
186+ int numOfReadBlock = 0 ;
187+ // offset and totalBytes shares the same logic in the HFilePreadReader
188+ while (offset < totalDataBlockBytes ) {
189+ HFileBlock block = hbr .readBlockData (offset , onDiskSizeOfNextBlock , true , false , false );
190+ numOfReadBlock ++;
191+ try {
192+ onDiskSizeOfNextBlock = block .getNextBlockOnDiskSize ();
193+ offset += block .getOnDiskSizeWithHeader ();
194+ } finally {
195+ block .release ();
196+ }
197+ }
198+ assertEquals (totalDataBlockBytes , offset );
199+ assertEquals (NUM_TEST_BLOCKS , numOfReadBlock );
200+ deleteFile (fs , path );
201+ }
202+
203+ private void deleteFile (FileSystem fs , Path path ) throws IOException {
204+ if (fs .exists (path )) {
205+ fs .delete (path , true );
206+ }
207+ }
208+
95209 @ Test
96210 public void testReadWithExtra () throws IOException {
97211 FileSystem fs = TEST_UTIL .getTestFileSystem ();
0 commit comments