diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/HFileLink.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/HFileLink.java index a036a90d7cf7..32fd181f64cd 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/HFileLink.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/HFileLink.java @@ -21,7 +21,6 @@ import java.util.regex.Matcher; import java.util.regex.Pattern; import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.TableName; @@ -290,175 +289,6 @@ public static String createHFileLinkName(final TableName tableName, final String return s; } - /** - * Create a new HFileLink - *

- * It also adds a back-reference to the hfile back-reference directory to simplify the - * reference-count and the cleaning process. - * @param conf {@link Configuration} to read for the archive directory name - * @param fs {@link FileSystem} on which to write the HFileLink - * @param dstFamilyPath - Destination path (table/region/cf/) - * @param hfileRegionInfo - Linked HFile Region Info - * @param hfileName - Linked HFile name - * @return the file link name. - * @throws IOException on file or parent directory creation failure. - */ - public static String create(final Configuration conf, final FileSystem fs, - final Path dstFamilyPath, final RegionInfo hfileRegionInfo, final String hfileName) - throws IOException { - return create(conf, fs, dstFamilyPath, hfileRegionInfo, hfileName, true); - } - - /** - * Create a new HFileLink - *

- * It also adds a back-reference to the hfile back-reference directory to simplify the - * reference-count and the cleaning process. - * @param conf {@link Configuration} to read for the archive directory name - * @param fs {@link FileSystem} on which to write the HFileLink - * @param dstFamilyPath - Destination path (table/region/cf/) - * @param hfileRegionInfo - Linked HFile Region Info - * @param hfileName - Linked HFile name - * @param createBackRef - Whether back reference should be created. Defaults to true. - * @return the file link name. - * @throws IOException on file or parent directory creation failure. - */ - public static String create(final Configuration conf, final FileSystem fs, - final Path dstFamilyPath, final RegionInfo hfileRegionInfo, final String hfileName, - final boolean createBackRef) throws IOException { - TableName linkedTable = hfileRegionInfo.getTable(); - String linkedRegion = hfileRegionInfo.getEncodedName(); - return create(conf, fs, dstFamilyPath, linkedTable, linkedRegion, hfileName, createBackRef); - } - - /** - * Create a new HFileLink - *

- * It also adds a back-reference to the hfile back-reference directory to simplify the - * reference-count and the cleaning process. - * @param conf {@link Configuration} to read for the archive directory name - * @param fs {@link FileSystem} on which to write the HFileLink - * @param dstFamilyPath - Destination path (table/region/cf/) - * @param linkedTable - Linked Table Name - * @param linkedRegion - Linked Region Name - * @param hfileName - Linked HFile name - * @return the file link name. - * @throws IOException on file or parent directory creation failure. - */ - public static String create(final Configuration conf, final FileSystem fs, - final Path dstFamilyPath, final TableName linkedTable, final String linkedRegion, - final String hfileName) throws IOException { - return create(conf, fs, dstFamilyPath, linkedTable, linkedRegion, hfileName, true); - } - - /** - * Create a new HFileLink. In the event of link creation failure, this method throws an - * IOException, so that the calling upper laying can decide on how to proceed with this. - *

- * It also adds a back-reference to the hfile back-reference directory to simplify the - * reference-count and the cleaning process. - * @param conf {@link Configuration} to read for the archive directory name - * @param fs {@link FileSystem} on which to write the HFileLink - * @param dstFamilyPath - Destination path (table/region/cf/) - * @param linkedTable - Linked Table Name - * @param linkedRegion - Linked Region Name - * @param hfileName - Linked HFile name - * @param createBackRef - Whether back reference should be created. Defaults to true. - * @return the file link name. - * @throws IOException on file or parent directory creation failure. - */ - public static String create(final Configuration conf, final FileSystem fs, - final Path dstFamilyPath, final TableName linkedTable, final String linkedRegion, - final String hfileName, final boolean createBackRef) throws IOException { - String familyName = dstFamilyPath.getName(); - String regionName = dstFamilyPath.getParent().getName(); - String tableName = - CommonFSUtils.getTableName(dstFamilyPath.getParent().getParent()).getNameAsString(); - - return create(conf, fs, dstFamilyPath, familyName, tableName, regionName, linkedTable, - linkedRegion, hfileName, createBackRef); - } - - /** - * Create a new HFileLink - *

- * It also adds a back-reference to the hfile back-reference directory to simplify the - * reference-count and the cleaning process. - * @param conf {@link Configuration} to read for the archive directory name - * @param fs {@link FileSystem} on which to write the HFileLink - * @param dstFamilyPath - Destination path (table/region/cf/) - * @param dstTableName - Destination table name - * @param dstRegionName - Destination region name - * @param linkedTable - Linked Table Name - * @param linkedRegion - Linked Region Name - * @param hfileName - Linked HFile name - * @param createBackRef - Whether back reference should be created. Defaults to true. - * @return the file link name. - * @throws IOException on file or parent directory creation failure - */ - public static String create(final Configuration conf, final FileSystem fs, - final Path dstFamilyPath, final String familyName, final String dstTableName, - final String dstRegionName, final TableName linkedTable, final String linkedRegion, - final String hfileName, final boolean createBackRef) throws IOException { - String name = createHFileLinkName(linkedTable, linkedRegion, hfileName); - String refName = createBackReferenceName(dstTableName, dstRegionName); - - // Make sure the destination directory exists - fs.mkdirs(dstFamilyPath); - - // Make sure the FileLink reference directory exists - Path archiveStoreDir = - HFileArchiveUtil.getStoreArchivePath(conf, linkedTable, linkedRegion, familyName); - Path backRefPath = null; - if (createBackRef) { - Path backRefssDir = getBackReferencesDir(archiveStoreDir, hfileName); - fs.mkdirs(backRefssDir); - - // Create the reference for the link - backRefPath = new Path(backRefssDir, refName); - fs.createNewFile(backRefPath); - } - try { - // Create the link - if (fs.createNewFile(new Path(dstFamilyPath, name))) { - return name; - } - } catch (IOException e) { - LOG.error("couldn't create the link=" + name + " for " + dstFamilyPath, e); - // Revert the reference if the link creation failed - if (createBackRef) { - fs.delete(backRefPath, false); - } - throw e; - } - throw new IOException( - "File link=" + name + " already exists under " + dstFamilyPath + " folder."); - } - - /** - * Create a new HFileLink starting from a hfileLink name - *

- * It also adds a back-reference to the hfile back-reference directory to simplify the - * reference-count and the cleaning process. - * @param conf {@link Configuration} to read for the archive directory name - * @param fs {@link FileSystem} on which to write the HFileLink - * @param dstFamilyPath - Destination path (table/region/cf/) - * @param hfileLinkName - HFileLink name (it contains hfile-region-table) - * @param createBackRef - Whether back reference should be created. Defaults to true. - * @return the file link name. - * @throws IOException on file or parent directory creation failure. - */ - public static String createFromHFileLink(final Configuration conf, final FileSystem fs, - final Path dstFamilyPath, final String hfileLinkName, final boolean createBackRef) - throws IOException { - Matcher m = LINK_NAME_PATTERN.matcher(hfileLinkName); - if (!m.matches()) { - throw new IllegalArgumentException(hfileLinkName + " is not a valid HFileLink name!"); - } - return create(conf, fs, dstFamilyPath, TableName.valueOf(m.group(1), m.group(2)), m.group(3), - m.group(4), createBackRef); - } - /** * Create the back reference name */ diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/SplitTableRegionProcedure.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/SplitTableRegionProcedure.java index 3250680d57bb..3d3d3d18de23 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/SplitTableRegionProcedure.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/SplitTableRegionProcedure.java @@ -751,7 +751,8 @@ private Pair, List> splitStoreFiles(final MasterProcedureEnv en for (Map.Entry> e : files.entrySet()) { byte[] familyName = Bytes.toBytes(e.getKey()); final ColumnFamilyDescriptor hcd = htd.getColumnFamily(familyName); - final Collection storeFiles = e.getValue(); + Collection storeFileInfos = e.getValue(); + final Collection storeFiles = storeFileInfos; if (storeFiles != null && storeFiles.size() > 0) { final Configuration storeConfiguration = StoreUtils.createStoreConfiguration(env.getMasterConfiguration(), htd, hcd); @@ -762,7 +763,7 @@ private Pair, List> splitStoreFiles(final MasterProcedureEnv en // is running in a regionserver's Store context, or we might not be able // to read the hfiles. storeFileInfo.setConf(storeConfiguration); - StoreFileSplitter sfs = new StoreFileSplitter(regionFs, familyName, + StoreFileSplitter sfs = new StoreFileSplitter(regionFs, htd, hcd, new HStoreFile(storeFileInfo, hcd.getBloomFilterType(), CacheConfig.DISABLED)); futures.add(threadPool.submit(sfs)); } @@ -829,19 +830,27 @@ private void assertSplitResultFilesCount(final FileSystem fs, } } - private Pair splitStoreFile(HRegionFileSystem regionFs, byte[] family, HStoreFile sf) - throws IOException { + private Pair splitStoreFile(HRegionFileSystem regionFs, TableDescriptor htd, + ColumnFamilyDescriptor hcd, HStoreFile sf) throws IOException { if (LOG.isDebugEnabled()) { LOG.debug("pid=" + getProcId() + " splitting started for store file: " + sf.getPath() + " for region: " + getParentRegion().getShortNameToLog()); } final byte[] splitRow = getSplitRow(); - final String familyName = Bytes.toString(family); - final Path path_first = - regionFs.splitStoreFile(this.daughterOneRI, familyName, sf, splitRow, false, splitPolicy); - final Path path_second = - regionFs.splitStoreFile(this.daughterTwoRI, familyName, sf, splitRow, true, splitPolicy); + final String familyName = hcd.getNameAsString(); + StoreFileTracker daughterOneSft = + StoreFileTrackerFactory.create(regionFs.getFileSystem().getConf(), htd, hcd, + HRegionFileSystem.create(regionFs.getFileSystem().getConf(), regionFs.getFileSystem(), + regionFs.getTableDir(), daughterOneRI)); + StoreFileTracker daughterTwoSft = + StoreFileTrackerFactory.create(regionFs.getFileSystem().getConf(), htd, hcd, + HRegionFileSystem.create(regionFs.getFileSystem().getConf(), regionFs.getFileSystem(), + regionFs.getTableDir(), daughterTwoRI)); + final Path path_first = regionFs.splitStoreFile(this.daughterOneRI, familyName, sf, splitRow, + false, splitPolicy, daughterOneSft); + final Path path_second = regionFs.splitStoreFile(this.daughterTwoRI, familyName, sf, splitRow, + true, splitPolicy, daughterTwoSft); if (LOG.isDebugEnabled()) { LOG.debug("pid=" + getProcId() + " splitting complete for store file: " + sf.getPath() + " for region: " + getParentRegion().getShortNameToLog()); @@ -855,24 +864,27 @@ private Pair splitStoreFile(HRegionFileSystem regionFs, byte[] famil */ private class StoreFileSplitter implements Callable> { private final HRegionFileSystem regionFs; - private final byte[] family; + private final ColumnFamilyDescriptor hcd; private final HStoreFile sf; + private final TableDescriptor htd; /** * Constructor that takes what it needs to split * @param regionFs the file system - * @param family Family that contains the store file + * @param hcd Family that contains the store file * @param sf which file */ - public StoreFileSplitter(HRegionFileSystem regionFs, byte[] family, HStoreFile sf) { + public StoreFileSplitter(HRegionFileSystem regionFs, TableDescriptor htd, + ColumnFamilyDescriptor hcd, HStoreFile sf) { this.regionFs = regionFs; this.sf = sf; - this.family = family; + this.hcd = hcd; + this.htd = htd; } @Override public Pair call() throws IOException { - return splitStoreFile(regionFs, family, sf); + return splitStoreFile(regionFs, htd, hcd, sf); } } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionFileSystem.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionFileSystem.java index c77f4d4aefde..f718b2dfe9f8 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionFileSystem.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionFileSystem.java @@ -742,8 +742,7 @@ public Path splitStoreFile(RegionInfo hri, String familyName, HStoreFile f, byte hfileName = m.group(4); } // must create back reference here - HFileLink.create(conf, fs, splitDir, familyName, hri.getTable().getNameAsString(), - hri.getEncodedName(), linkedTable, linkedRegion, hfileName, true); + tracker.createHFileLink(linkedTable, linkedRegion, hfileName, true); Path path = new Path(splitDir, HFileLink.createHFileLinkName(linkedTable, linkedRegion, hfileName)); LOG.info("Created linkFile:" + path.toString() + " for child: " + hri.getEncodedName() diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/storefiletracker/StoreFileTracker.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/storefiletracker/StoreFileTracker.java index b0024b73786a..dea67daca831 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/storefiletracker/StoreFileTracker.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/storefiletracker/StoreFileTracker.java @@ -20,6 +20,12 @@ import java.io.IOException; import java.util.Collection; import java.util.List; +<<<<<<< HEAD +======= +import org.apache.hadoop.fs.FileStatus; +import org.apache.hadoop.fs.Path; +import org.apache.hadoop.hbase.TableName; +>>>>>>> a680795f71 (HBASE-28969 Move HFileLink file creations to SFT (#6459)) import org.apache.hadoop.hbase.client.TableDescriptorBuilder; import org.apache.hadoop.hbase.regionserver.CreateStoreFileWriterParams; import org.apache.hadoop.hbase.regionserver.StoreFileInfo; @@ -94,4 +100,52 @@ void replace(Collection compactedFiles, Collection * does not allow broken store files under the actual data directory. */ boolean requireWritingToTmpDirFirst(); + + Reference createReference(Reference reference, Path path) throws IOException; + + /** + * Reads the reference file from the given path. + * @param path the {@link Path} to the reference file in the file system. + * @return a {@link Reference} that points at top/bottom half of a an hfile + */ + Reference readReference(Path path) throws IOException; + + /** + * Returns true if the specified family has reference files + * @return true if family contains reference files + */ + boolean hasReferences() throws IOException; + + StoreFileInfo getStoreFileInfo(final FileStatus fileStatus, final Path initialPath, + final boolean primaryReplica) throws IOException; + + StoreFileInfo getStoreFileInfo(final Path initialPath, final boolean primaryReplica) + throws IOException; + + /** + * Create a new HFileLink + *

+ * It also adds a back-reference to the hfile back-reference directory to simplify the + * reference-count and the cleaning process. + * @param hfileLinkName - HFileLink name (it contains hfile-region-table) + * @param createBackRef - Whether back reference should be created. Defaults to true. + * @return the file link name. + * @throws IOException on file or parent directory creation failure. + */ + String createHFileLink(final TableName linkedTable, final String linkedRegion, + final String hfileName, final boolean createBackRef) throws IOException; + + /** + * Create a new HFileLink starting from a hfileLink name + *

+ * It also adds a back-reference to the hfile back-reference directory to simplify the + * reference-count and the cleaning process. + * @param hfileLinkName - HFileLink name (it contains hfile-region-table) + * @param createBackRef - Whether back reference should be created. Defaults to true. + * @return the file link name. + * @throws IOException on file or parent directory creation failure. + */ + String createFromHFileLink(final String hfileName, final boolean createBackRef) + throws IOException; + } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/storefiletracker/StoreFileTrackerBase.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/storefiletracker/StoreFileTrackerBase.java index 794a707062e5..3cf05ec6e831 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/storefiletracker/StoreFileTrackerBase.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/storefiletracker/StoreFileTrackerBase.java @@ -22,8 +22,11 @@ import java.io.IOException; import java.util.Collection; import java.util.List; +import java.util.regex.Matcher; +import org.apache.commons.io.IOUtils; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; +import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor; import org.apache.hadoop.hbase.client.TableDescriptorBuilder; import org.apache.hadoop.hbase.io.compress.Compression; @@ -38,6 +41,7 @@ import org.apache.hadoop.hbase.regionserver.StoreFileWriter; import org.apache.hadoop.hbase.regionserver.StoreUtils; import org.apache.hadoop.hbase.util.EnvironmentEdgeManager; +import org.apache.hadoop.hbase.util.HFileArchiveUtil; import org.apache.yetus.audience.InterfaceAudience; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -191,6 +195,173 @@ public final StoreFileWriter createWriter(CreateStoreFileWriterParams params) th return builder.build(); } + @Override + public Reference createReference(Reference reference, Path path) throws IOException { + FSDataOutputStream out = ctx.getRegionFileSystem().getFileSystem().create(path, false); + try { + out.write(reference.toByteArray()); + } finally { + out.close(); + } + return reference; + } + + /** + * Returns true if the specified family has reference files + * @param familyName Column Family Name + * @return true if family contains reference files + */ + public boolean hasReferences() throws IOException { + Path storeDir = ctx.getRegionFileSystem().getStoreDir(ctx.getFamily().getNameAsString()); + FileStatus[] files = + CommonFSUtils.listStatus(ctx.getRegionFileSystem().getFileSystem(), storeDir); + if (files != null) { + for (FileStatus stat : files) { + if (stat.isDirectory()) { + continue; + } + if (StoreFileInfo.isReference(stat.getPath())) { + LOG.trace("Reference {}", stat.getPath()); + return true; + } + } + } + return false; + } + + @Override + public Reference readReference(final Path p) throws IOException { + InputStream in = ctx.getRegionFileSystem().getFileSystem().open(p); + try { + // I need to be able to move back in the stream if this is not a pb serialization so I can + // do the Writable decoding instead. + in = in.markSupported() ? in : new BufferedInputStream(in); + int pblen = ProtobufUtil.lengthOfPBMagic(); + in.mark(pblen); + byte[] pbuf = new byte[pblen]; + IOUtils.readFully(in, pbuf, 0, pblen); + // WATCHOUT! Return in middle of function!!! + if (ProtobufUtil.isPBMagicPrefix(pbuf)) { + return Reference.convert( + org.apache.hadoop.hbase.shaded.protobuf.generated.FSProtos.Reference.parseFrom(in)); + } + // Else presume Writables. Need to reset the stream since it didn't start w/ pb. + // We won't bother rewriting thie Reference as a pb since Reference is transitory. + in.reset(); + Reference r = new Reference(); + DataInputStream dis = new DataInputStream(in); + // Set in = dis so it gets the close below in the finally on our way out. + in = dis; + r.readFields(dis); + return r; + } finally { + in.close(); + } + } + + @Override + public StoreFileInfo getStoreFileInfo(Path initialPath, boolean primaryReplica) + throws IOException { + return getStoreFileInfo(null, initialPath, primaryReplica); + } + + @Override + public StoreFileInfo getStoreFileInfo(FileStatus fileStatus, Path initialPath, + boolean primaryReplica) throws IOException { + FileSystem fs = this.ctx.getRegionFileSystem().getFileSystem(); + assert fs != null; + assert initialPath != null; + assert conf != null; + Reference reference = null; + HFileLink link = null; + long createdTimestamp = 0; + long size = 0; + Path p = initialPath; + if (HFileLink.isHFileLink(p)) { + // HFileLink + reference = null; + link = HFileLink.buildFromHFileLinkPattern(conf, p); + LOG.trace("{} is a link", p); + } else if (StoreFileInfo.isReference(p)) { + reference = readReference(p); + Path referencePath = StoreFileInfo.getReferredToFile(p); + if (HFileLink.isHFileLink(referencePath)) { + // HFileLink Reference + link = HFileLink.buildFromHFileLinkPattern(conf, referencePath); + } else { + // Reference + link = null; + } + LOG.trace("{} is a {} reference to {}", p, reference.getFileRegion(), referencePath); + } else + if (StoreFileInfo.isHFile(p) || StoreFileInfo.isMobFile(p) || StoreFileInfo.isMobRefFile(p)) { + // HFile + if (fileStatus != null) { + createdTimestamp = fileStatus.getModificationTime(); + size = fileStatus.getLen(); + } else { + FileStatus fStatus = fs.getFileStatus(initialPath); + createdTimestamp = fStatus.getModificationTime(); + size = fStatus.getLen(); + } + } else { + throw new IOException("path=" + p + " doesn't look like a valid StoreFile"); + } + return new StoreFileInfo(conf, fs, createdTimestamp, initialPath, size, reference, link, + isPrimaryReplica); + } + + public String createHFileLink(final TableName linkedTable, final String linkedRegion, + final String hfileName, final boolean createBackRef) throws IOException { + String name = HFileLink.createHFileLinkName(linkedTable, linkedRegion, hfileName); + String refName = HFileLink.createBackReferenceName(ctx.getTableName().toString(), + ctx.getRegionInfo().getEncodedName()); + + FileSystem fs = ctx.getRegionFileSystem().getFileSystem(); + // Make sure the destination directory exists + fs.mkdirs(ctx.getFamilyStoreDirectoryPath()); + + // Make sure the FileLink reference directory exists + Path archiveStoreDir = HFileArchiveUtil.getStoreArchivePath(conf, linkedTable, linkedRegion, + ctx.getFamily().getNameAsString()); + Path backRefPath = null; + if (createBackRef) { + Path backRefssDir = HFileLink.getBackReferencesDir(archiveStoreDir, hfileName); + fs.mkdirs(backRefssDir); + + // Create the reference for the link + backRefPath = new Path(backRefssDir, refName); + fs.createNewFile(backRefPath); + } + try { + // Create the link + if (fs.createNewFile(new Path(ctx.getFamilyStoreDirectoryPath(), name))) { + return name; + } + } catch (IOException e) { + LOG.error("couldn't create the link=" + name + " for " + ctx.getFamilyStoreDirectoryPath(), + e); + // Revert the reference if the link creation failed + if (createBackRef) { + fs.delete(backRefPath, false); + } + throw e; + } + throw new IOException("File link=" + name + " already exists under " + + ctx.getFamilyStoreDirectoryPath() + " folder."); + + } + + public String createFromHFileLink(final String hfileLinkName, final boolean createBackRef) + throws IOException { + Matcher m = HFileLink.LINK_NAME_PATTERN.matcher(hfileLinkName); + if (!m.matches()) { + throw new IllegalArgumentException(hfileLinkName + " is not a valid HFileLink name!"); + } + return createHFileLink(TableName.valueOf(m.group(1), m.group(2)), m.group(3), m.group(4), + createBackRef); + } + /** * For primary replica, we will call load once when opening a region, and the implementation could * choose to do some cleanup work. So here we use {@code readOnly} to indicate that whether you diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/RestoreSnapshotHelper.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/RestoreSnapshotHelper.java index dab581041ee5..1b72dc58ff4e 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/RestoreSnapshotHelper.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/RestoreSnapshotHelper.java @@ -39,6 +39,7 @@ import org.apache.hadoop.hbase.MetaTableAccessor; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.backup.HFileArchiver; +import org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder; import org.apache.hadoop.hbase.client.Connection; import org.apache.hadoop.hbase.client.ConnectionFactory; import org.apache.hadoop.hbase.client.RegionInfo; @@ -659,6 +660,21 @@ private void cloneRegion(final RegionInfo newRegionInfo, final Path regionDir, for (SnapshotRegionManifest.FamilyFiles familyFiles : manifest.getFamilyFilesList()) { Path familyDir = new Path(regionDir, familyFiles.getFamilyName().toStringUtf8()); List clonedFiles = new ArrayList<>(); + Path regionPath = new Path(tableDir, newRegionInfo.getEncodedName()); + HRegionFileSystem regionFS = (fs.exists(regionPath)) + ? HRegionFileSystem.openRegionFromFileSystem(conf, fs, tableDir, newRegionInfo, false) + : HRegionFileSystem.createRegionOnFileSystem(conf, fs, tableDir, newRegionInfo); + + Configuration sftConf = StoreUtils.createStoreConfiguration(conf, tableDesc, + tableDesc.getColumnFamily(familyFiles.getFamilyName().toByteArray())); + StoreFileTracker tracker = + StoreFileTrackerFactory + .create(sftConf, true, + StoreContext.getBuilder().withFamilyStoreDirectoryPath(familyDir) + .withRegionFileSystem(regionFS) + .withColumnFamilyDescriptor( + ColumnFamilyDescriptorBuilder.of(familyFiles.getFamilyName().toByteArray())) + .build()); for (SnapshotRegionManifest.StoreFile storeFile : familyFiles.getStoreFilesList()) { LOG.info("Adding HFileLink " + storeFile.getName() + " from cloned region " + "in snapshot " + snapshotName + " to table=" + tableName); @@ -724,11 +740,12 @@ private String restoreStoreFile(final Path familyDir, final RegionInfo regionInf throws IOException { String hfileName = storeFile.getName(); if (HFileLink.isHFileLink(hfileName)) { - return HFileLink.createFromHFileLink(conf, fs, familyDir, hfileName, createBackRef); + return tracker.createFromHFileLink(hfileName, createBackRef); } else if (StoreFileInfo.isReference(hfileName)) { return restoreReferenceFile(familyDir, regionInfo, storeFile); } else { - return HFileLink.create(conf, fs, familyDir, regionInfo, hfileName, createBackRef); + return tracker.createHFileLink(regionInfo.getTable(), regionInfo.getEncodedName(), hfileName, + createBackRef); } } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/RegionSplitter.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/RegionSplitter.java index 5feee61bee00..97e420c3c82f 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/RegionSplitter.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/RegionSplitter.java @@ -51,6 +51,8 @@ import org.apache.hadoop.hbase.client.TableDescriptor; import org.apache.hadoop.hbase.client.TableDescriptorBuilder; import org.apache.hadoop.hbase.regionserver.HRegionFileSystem; +import org.apache.hadoop.hbase.regionserver.storefiletracker.StoreFileTracker; +import org.apache.hadoop.hbase.regionserver.storefiletracker.StoreFileTrackerFactory; import org.apache.yetus.audience.InterfaceAudience; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -684,7 +686,9 @@ static LinkedList> splitScan(LinkedList test) throws Exceptio Bytes.toBytes("testPrefetchWhenHFileLink")); Path storeFilePath = regionFs.commitStoreFile("cf", writer.getPath()); - Path dstPath = new Path(regionFs.getTableDir(), new Path("test-region", "cf")); - HFileLink.create(testConf, this.fs, dstPath, hri, storeFilePath.getName()); + final RegionInfo dstHri = + RegionInfoBuilder.newBuilder(TableName.valueOf("testPrefetchWhenHFileLink")).build(); + HRegionFileSystem dstRegionFs = HRegionFileSystem.createRegionOnFileSystem(testConf, fs, + CommonFSUtils.getTableDir(testDir, dstHri.getTable()), dstHri); + Path dstPath = new Path(regionFs.getTableDir(), new Path(dstHri.getRegionNameAsString(), "cf")); + StoreFileTracker sft = StoreFileTrackerFactory.create(testConf, false, + StoreContext.getBuilder() + .withFamilyStoreDirectoryPath(new Path(dstRegionFs.getRegionDir(), "cf")) + .withColumnFamilyDescriptor(ColumnFamilyDescriptorBuilder.of("cf")) + .withRegionFileSystem(dstRegionFs).build()); + sft.createHFileLink(hri.getTable(), hri.getEncodedName(), storeFilePath.getName(), true); Path linkFilePath = new Path(dstPath, HFileLink.createHFileLinkName(hri, storeFilePath.getName())); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/cleaner/TestHFileLinkCleaner.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/cleaner/TestHFileLinkCleaner.java index 2ad014ffa3f6..93675112ff68 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/cleaner/TestHFileLinkCleaner.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/cleaner/TestHFileLinkCleaner.java @@ -35,9 +35,14 @@ import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.client.ClusterConnection; import org.apache.hadoop.hbase.client.Connection; +import org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder; import org.apache.hadoop.hbase.client.RegionInfo; import org.apache.hadoop.hbase.client.RegionInfoBuilder; import org.apache.hadoop.hbase.io.HFileLink; +import org.apache.hadoop.hbase.regionserver.HRegionFileSystem; +import org.apache.hadoop.hbase.regionserver.StoreContext; +import org.apache.hadoop.hbase.regionserver.storefiletracker.StoreFileTracker; +import org.apache.hadoop.hbase.regionserver.storefiletracker.StoreFileTrackerFactory; import org.apache.hadoop.hbase.testclassification.MasterTests; import org.apache.hadoop.hbase.testclassification.MediumTests; import org.apache.hadoop.hbase.util.CommonFSUtils; @@ -125,7 +130,14 @@ public void configureDirectoriesAndLinks() throws IOException { hfilePath = new Path(familyPath, hfileName); fs.createNewFile(hfilePath); - createLink(true); + HRegionFileSystem regionFS = HRegionFileSystem.create(conf, fs, + CommonFSUtils.getTableDir(rootDir, tableLinkName), hriLink); + StoreFileTracker sft = StoreFileTrackerFactory.create(conf, true, + StoreContext.getBuilder() + .withFamilyStoreDirectoryPath(new Path(regionFS.getRegionDir(), familyName)) + .withColumnFamilyDescriptor(ColumnFamilyDescriptorBuilder.of(familyName)) + .withRegionFileSystem(regionFS).build()); + createLink(sft, true); // Initialize cleaner conf.setLong(TimeToLiveHFileCleaner.TTL_CONF_KEY, TTL); @@ -133,11 +145,12 @@ public void configureDirectoriesAndLinks() throws IOException { cleaner = new HFileCleaner(1000, server, conf, fs, archiveDir, POOL); } - private void createLink(boolean createBackReference) throws IOException { + private void createLink(StoreFileTracker sft, boolean createBackReference) throws IOException { // Create link to hfile familyLinkPath = getFamilyDirPath(rootDir, tableLinkName, hriLink.getEncodedName(), familyName); fs.mkdirs(familyLinkPath); - hfileLinkName = HFileLink.create(conf, fs, familyLinkPath, hri, hfileName, createBackReference); + hfileLinkName = + sft.createHFileLink(hri.getTable(), hri.getEncodedName(), hfileName, createBackReference); linkBackRefDir = HFileLink.getBackReferencesDir(archiveStoreDir, hfileName); assertTrue(fs.exists(linkBackRefDir)); backRefs = fs.listStatus(linkBackRefDir); @@ -166,6 +179,7 @@ public void cleanup() throws IOException, InterruptedException { public void testHFileLinkCleaning() throws Exception { // Link backref cannot be removed cleaner.chore(); + // CommonFSUtils. assertTrue(fs.exists(linkBackRef)); assertTrue(fs.exists(hfilePath)); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/snapshot/TestSnapshotManager.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/snapshot/TestSnapshotManager.java index feae6a87deb0..a5a96ca76911 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/snapshot/TestSnapshotManager.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/snapshot/TestSnapshotManager.java @@ -31,6 +31,7 @@ import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.Stoppable; import org.apache.hadoop.hbase.TableName; +import org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder; import org.apache.hadoop.hbase.client.RegionInfo; import org.apache.hadoop.hbase.client.RegionInfoBuilder; import org.apache.hadoop.hbase.executor.ExecutorService; @@ -41,6 +42,10 @@ import org.apache.hadoop.hbase.master.cleaner.HFileCleaner; import org.apache.hadoop.hbase.master.cleaner.HFileLinkCleaner; import org.apache.hadoop.hbase.procedure.ProcedureCoordinator; +import org.apache.hadoop.hbase.regionserver.HRegionFileSystem; +import org.apache.hadoop.hbase.regionserver.StoreContext; +import org.apache.hadoop.hbase.regionserver.storefiletracker.StoreFileTracker; +import org.apache.hadoop.hbase.regionserver.storefiletracker.StoreFileTrackerFactory; import org.apache.hadoop.hbase.snapshot.SnapshotDescriptionUtils; import org.apache.hadoop.hbase.testclassification.MasterTests; import org.apache.hadoop.hbase.testclassification.SmallTests; @@ -223,9 +228,14 @@ public void testDisableSnapshotAndNotDeleteBackReference() throws Exception { Path hfilePath = new Path(familyPath, hfileName); fs.createNewFile(hfilePath); // Create link to hfile - Path familyLinkPath = - getFamilyDirPath(rootDir, tableLinkName, hriLink.getEncodedName(), familyName); - HFileLink.create(conf, fs, familyLinkPath, hri, hfileName); + HRegionFileSystem regionFS = HRegionFileSystem.create(conf, fs, + CommonFSUtils.getTableDir(rootDir, tableLinkName), hriLink); + StoreFileTracker sft = StoreFileTrackerFactory.create(conf, true, + StoreContext.getBuilder() + .withFamilyStoreDirectoryPath(new Path(regionFS.getRegionDir(), familyName)) + .withColumnFamilyDescriptor(ColumnFamilyDescriptorBuilder.of(familyName)) + .withRegionFileSystem(regionFS).build()); + sft.createHFileLink(hri.getTable(), hri.getEncodedName(), hfileName, true); Path linkBackRefDir = HFileLink.getBackReferencesDir(archiveStoreDir, hfileName); assertTrue(fs.exists(linkBackRefDir)); FileStatus[] backRefs = fs.listStatus(linkBackRefDir); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHStoreFile.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHStoreFile.java index b93e0472bd71..dcfd65ca2bef 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHStoreFile.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHStoreFile.java @@ -163,8 +163,13 @@ public void testBasicHalfAndHFileLinkMapFile() throws Exception { writeStoreFile(writer); Path sfPath = regionFs.commitStoreFile(TEST_FAMILY, writer.getPath()); - HStoreFile sf = new HStoreFile(this.fs, sfPath, conf, cacheConf, BloomType.NONE, true); - checkHalfHFile(regionFs, sf); + StoreFileTracker sft = StoreFileTrackerFactory.create(conf, false, + StoreContext.getBuilder() + .withFamilyStoreDirectoryPath(new Path(regionFs.getRegionDir(), TEST_FAMILY)) + .withColumnFamilyDescriptor(ColumnFamilyDescriptorBuilder.of(TEST_FAMILY)) + .withRegionFileSystem(regionFs).build()); + HStoreFile sf = new HStoreFile(this.fs, sfPath, conf, cacheConf, BloomType.NONE, true, sft); + checkHalfHFile(regionFs, sf, sft); } private void writeStoreFile(final StoreFileWriter writer) throws IOException { @@ -320,13 +325,23 @@ public void testHFileLink() throws IOException { writeStoreFile(writer); Path storeFilePath = regionFs.commitStoreFile(TEST_FAMILY, writer.getPath()); - Path dstPath = new Path(regionFs.getTableDir(), new Path("test-region", TEST_FAMILY)); - HFileLink.create(testConf, this.fs, dstPath, hri, storeFilePath.getName()); + Path dstPath = + new Path(regionFs.getTableDir(), new Path(dstHri.getRegionNameAsString(), TEST_FAMILY)); Path linkFilePath = new Path(dstPath, HFileLink.createHFileLinkName(hri, storeFilePath.getName())); // Try to open store file from link - StoreFileInfo storeFileInfo = new StoreFileInfo(testConf, this.fs, linkFilePath, true); + + // this should be the SFT for the destination link file path, though it is not + // being used right now, for the next patch file link creation logic also would + // move to SFT interface. + StoreFileTracker sft = StoreFileTrackerFactory.create(testConf, false, + StoreContext.getBuilder() + .withFamilyStoreDirectoryPath(new Path(dstHri.getRegionNameAsString(), TEST_FAMILY)) + .withColumnFamilyDescriptor(ColumnFamilyDescriptorBuilder.of(TEST_FAMILY)) + .withRegionFileSystem(dstRegionFs).build()); + sft.createHFileLink(hri.getTable(), hri.getEncodedName(), storeFilePath.getName(), true); + StoreFileInfo storeFileInfo = sft.getStoreFileInfo(linkFilePath, true); HStoreFile hsf = new HStoreFile(storeFileInfo, BloomType.NONE, cacheConf); assertTrue(storeFileInfo.isLink()); hsf.initReader(); @@ -368,16 +383,42 @@ public void testReferenceToHFileLink() throws IOException { HRegionFileSystem cloneRegionFs = HRegionFileSystem.createRegionOnFileSystem(testConf, fs, CommonFSUtils.getTableDir(testDir, hri.getTable()), hriClone); Path dstPath = cloneRegionFs.getStoreDir(TEST_FAMILY); - HFileLink.create(testConf, this.fs, dstPath, hri, storeFilePath.getName()); Path linkFilePath = new Path(dstPath, HFileLink.createHFileLinkName(hri, storeFilePath.getName())); // create splits of the link. // /clone/splitA//, // /clone/splitB// +<<<<<<< HEAD HRegionInfo splitHriA = new HRegionInfo(hri.getTable(), null, SPLITKEY); HRegionInfo splitHriB = new HRegionInfo(hri.getTable(), SPLITKEY, null); HStoreFile f = new HStoreFile(fs, linkFilePath, testConf, cacheConf, BloomType.NONE, true); +======= + RegionInfo splitHriA = RegionInfoBuilder.newBuilder(hri.getTable()).setEndKey(SPLITKEY).build(); + RegionInfo splitHriB = + RegionInfoBuilder.newBuilder(hri.getTable()).setStartKey(SPLITKEY).build(); + + StoreFileTracker sft = StoreFileTrackerFactory.create(testConf, true, + StoreContext.getBuilder() + .withFamilyStoreDirectoryPath(new Path(hriClone.getRegionNameAsString(), TEST_FAMILY)) + .withColumnFamilyDescriptor(ColumnFamilyDescriptorBuilder.of(TEST_FAMILY)) + .withRegionFileSystem(cloneRegionFs).build()); + sft.createHFileLink(hri.getTable(), hri.getEncodedName(), storeFilePath.getName(), true); + + HRegionFileSystem splitRegionAFs = HRegionFileSystem.createRegionOnFileSystem(testConf, fs, + CommonFSUtils.getTableDir(testDir, splitHriA.getTable()), splitHriA); + StoreFileTracker sftA = StoreFileTrackerFactory.create(testConf, true, + StoreContext.getBuilder() + .withFamilyStoreDirectoryPath(new Path(splitHriA.getRegionNameAsString(), TEST_FAMILY)) + .withRegionFileSystem(splitRegionAFs).build()); + HRegionFileSystem splitRegionBFs = HRegionFileSystem.createRegionOnFileSystem(testConf, fs, + CommonFSUtils.getTableDir(testDir, splitHriB.getTable()), splitHriB); + StoreFileTracker sftB = StoreFileTrackerFactory.create(testConf, true, + StoreContext.getBuilder() + .withFamilyStoreDirectoryPath(new Path(splitHriB.getRegionNameAsString(), TEST_FAMILY)) + .withRegionFileSystem(splitRegionBFs).build()); + HStoreFile f = new HStoreFile(fs, linkFilePath, testConf, cacheConf, BloomType.NONE, true, sft); +>>>>>>> a680795f71 (HBASE-28969 Move HFileLink file creations to SFT (#6459)) f.initReader(); Path pathA = splitStoreFile(cloneRegionFs, splitHriA, TEST_FAMILY, f, SPLITKEY, true); // top Path pathB = splitStoreFile(cloneRegionFs, splitHriB, TEST_FAMILY, f, SPLITKEY, false);// bottom