diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/tool/BulkLoadHFilesTool.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/tool/BulkLoadHFilesTool.java index e1bea90f49d9..e54de3403e7a 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/tool/BulkLoadHFilesTool.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/tool/BulkLoadHFilesTool.java @@ -88,6 +88,7 @@ import org.apache.hadoop.hbase.security.UserProvider; import org.apache.hadoop.hbase.security.token.FsDelegationToken; import org.apache.hadoop.hbase.util.Bytes; +import org.apache.hadoop.hbase.util.EnvironmentEdgeManager; import org.apache.hadoop.hbase.util.FSVisitor; import org.apache.hadoop.hbase.util.FutureUtils; import org.apache.hadoop.hbase.util.Pair; @@ -767,7 +768,7 @@ private static void copyHFileHalf(Configuration conf, Path inFile, Path outFile, .withChecksumType(StoreUtils.getChecksumType(conf)) .withBytesPerCheckSum(StoreUtils.getBytesPerChecksum(conf)).withBlockSize(blocksize) .withDataBlockEncoding(familyDescriptor.getDataBlockEncoding()).withIncludesTags(true) - .build(); + .withCreateTime(EnvironmentEdgeManager.currentTime()).build(); halfWriter = new StoreFileWriter.Builder(conf, cacheConf, fs).withFilePath(outFile) .withBloomType(bloomFilterType).withFileContext(hFileContext).build(); HFileScanner scanner = halfReader.getScanner(false, false, false); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/tool/TestBulkLoadHFiles.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/tool/TestBulkLoadHFiles.java index 591d807c0da4..fecf4c7ec2c2 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/tool/TestBulkLoadHFiles.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/tool/TestBulkLoadHFiles.java @@ -18,6 +18,7 @@ package org.apache.hadoop.hbase.tool; import static org.apache.hadoop.hbase.HBaseTestingUtil.countRows; +import static org.hamcrest.Matchers.greaterThan; import static org.junit.Assert.assertArrayEquals; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertThrows; @@ -63,6 +64,7 @@ import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.CommonFSUtils; import org.apache.hadoop.hbase.util.HFileTestUtil; +import org.hamcrest.MatcherAssert; import org.junit.AfterClass; import org.junit.BeforeClass; import org.junit.ClassRule; @@ -567,6 +569,25 @@ public void testSplitStoreFile() throws IOException { assertEquals(1000, rowCount); } + @Test + public void testSplitStoreFileWithCreateTimeTS() throws IOException { + Path dir = util.getDataTestDirOnTestFS("testSplitStoreFileWithCreateTimeTS"); + FileSystem fs = util.getTestFileSystem(); + Path testIn = new Path(dir, "testhfile"); + ColumnFamilyDescriptor familyDesc = ColumnFamilyDescriptorBuilder.of(FAMILY); + HFileTestUtil.createHFile(util.getConfiguration(), fs, testIn, FAMILY, QUALIFIER, + Bytes.toBytes("aaa"), Bytes.toBytes("zzz"), 1000); + + Path bottomOut = new Path(dir, "bottom.out"); + Path topOut = new Path(dir, "top.out"); + + BulkLoadHFilesTool.splitStoreFile(util.getConfiguration(), testIn, familyDesc, + Bytes.toBytes("ggg"), bottomOut, topOut); + + verifyHFileCreateTimeTS(bottomOut); + verifyHFileCreateTimeTS(topOut); + } + @Test public void testSplitStoreFileWithNoneToNone() throws IOException { testSplitStoreFileWithDifferentEncoding(DataBlockEncoding.NONE, DataBlockEncoding.NONE); @@ -623,6 +644,16 @@ private int verifyHFile(Path p) throws IOException { return count; } + private void verifyHFileCreateTimeTS(Path p) throws IOException { + Configuration conf = util.getConfiguration(); + + try (HFile.Reader reader = + HFile.createReader(p.getFileSystem(conf), p, new CacheConfig(conf), true, conf)) { + long fileCreateTime = reader.getHFileInfo().getHFileContext().getFileCreateTime(); + MatcherAssert.assertThat(fileCreateTime, greaterThan(0L)); + } + } + private void addStartEndKeysForTest(TreeMap map, byte[] first, byte[] last) { Integer value = map.containsKey(first) ? map.get(first) : 0; map.put(first, value + 1);