From 613323a6a792f3dc6465a3b9f7b4aaa6c11cc1fb Mon Sep 17 00:00:00 2001 From: Andrew Purtell Date: Fri, 22 Jul 2022 15:07:18 -0700 Subject: [PATCH 1/3] HBASE-27203 Clean up error-prone findings in hbase-client --- .../hadoop/hbase/CatalogFamilyFormat.java | 12 +-- .../hadoop/hbase/ClientMetaTableAccessor.java | 59 ++++++------- .../org/apache/hadoop/hbase/ClusterId.java | 1 + .../hadoop/hbase/ClusterMetricsBuilder.java | 4 +- .../hadoop/hbase/CoprocessorEnvironment.java | 4 +- .../hadoop/hbase/HBaseServerException.java | 5 +- .../apache/hadoop/hbase/HRegionLocation.java | 4 +- .../apache/hadoop/hbase/RegionLocations.java | 2 + .../apache/hadoop/hbase/RegionMetrics.java | 8 +- .../hadoop/hbase/ServerMetricsBuilder.java | 19 ++-- .../org/apache/hadoop/hbase/UserMetrics.java | 4 +- .../hadoop/hbase/UserMetricsBuilder.java | 4 +- .../hadoop/hbase/client/AbstractResponse.java | 6 +- .../AbstractRpcBasedConnectionRegistry.java | 2 +- .../org/apache/hadoop/hbase/client/Admin.java | 14 ++- .../hbase/client/AdminOverAsyncAdmin.java | 1 + .../apache/hadoop/hbase/client/Append.java | 2 +- .../hadoop/hbase/client/AsyncAdmin.java | 48 ++++------- .../AsyncAdminRequestRetryingCaller.java | 3 - .../hadoop/hbase/client/AsyncTable.java | 69 ++++++++------- ...talogReplicaLoadBalanceSimpleSelector.java | 10 ++- .../hadoop/hbase/client/CheckAndMutate.java | 8 ++ .../hbase/client/ClientIdGenerator.java | 17 ++-- .../hbase/client/ColumnFamilyDescriptor.java | 26 +++--- .../client/ColumnFamilyDescriptorBuilder.java | 65 ++++++-------- .../hadoop/hbase/client/CompactType.java | 6 +- .../apache/hadoop/hbase/client/Delete.java | 4 +- .../org/apache/hadoop/hbase/client/Get.java | 2 +- .../apache/hadoop/hbase/client/Increment.java | 1 + .../hadoop/hbase/client/IsolationLevel.java | 7 +- .../hadoop/hbase/client/LogQueryFilter.java | 5 +- .../MasterCoprocessorRpcChannelImpl.java | 4 +- .../hadoop/hbase/client/MasterRegistry.java | 4 +- .../hadoop/hbase/client/MultiResponse.java | 6 +- .../hbase/client/MutableRegionInfo.java | 1 + .../apache/hadoop/hbase/client/Mutation.java | 19 ++-- .../org/apache/hadoop/hbase/client/Put.java | 1 + .../org/apache/hadoop/hbase/client/Query.java | 7 +- .../hbase/client/RawAsyncHBaseAdmin.java | 86 +++++++++---------- .../hbase/client/RawAsyncTableImpl.java | 2 +- .../client/RegionCoprocessorServiceExec.java | 2 +- .../hadoop/hbase/client/RegionInfo.java | 33 +++---- .../hbase/client/RegionReplicaUtil.java | 4 +- ...RegionServerCoprocessorRpcChannelImpl.java | 4 +- .../apache/hadoop/hbase/client/Result.java | 4 +- .../apache/hadoop/hbase/client/RowAccess.java | 1 - .../hadoop/hbase/client/RowMutations.java | 2 + .../org/apache/hadoop/hbase/client/Scan.java | 6 +- .../hbase/client/ScanResultConsumer.java | 3 +- .../hadoop/hbase/client/ServiceCaller.java | 8 +- .../hadoop/hbase/client/SlowLogParams.java | 5 +- .../org/apache/hadoop/hbase/client/Table.java | 13 ++- .../hadoop/hbase/client/TableDescriptor.java | 4 +- .../hbase/client/TableDescriptorBuilder.java | 9 +- .../hbase/client/TableOverAsyncTable.java | 16 ++-- .../hadoop/hbase/client/TableState.java | 18 ++-- .../backoff/ClientBackoffPolicyFactory.java | 4 - .../ExponentialClientBackoffPolicy.java | 4 - .../BigDecimalColumnInterpreter.java | 2 +- .../ReplicationPeerConfigUtil.java | 32 +++---- .../hbase/coprocessor/ColumnInterpreter.java | 5 -- .../exceptions/RegionOpeningException.java | 4 +- .../hbase/filter/BigDecimalComparator.java | 8 +- .../hadoop/hbase/filter/BinaryComparator.java | 18 ++-- .../filter/BinaryComponentComparator.java | 7 +- .../hbase/filter/BinaryPrefixComparator.java | 18 ++-- .../hadoop/hbase/filter/BitComparator.java | 17 ++-- .../hbase/filter/ColumnCountGetFilter.java | 15 ++-- .../hbase/filter/ColumnPaginationFilter.java | 28 +++--- .../hbase/filter/ColumnPrefixFilter.java | 17 ++-- .../hbase/filter/ColumnRangeFilter.java | 10 +-- .../hbase/filter/ColumnValueFilter.java | 12 +-- .../hadoop/hbase/filter/CompareFilter.java | 12 ++- .../hbase/filter/DependentColumnFilter.java | 16 ++-- .../hadoop/hbase/filter/FamilyFilter.java | 14 +-- .../apache/hadoop/hbase/filter/Filter.java | 1 - .../hadoop/hbase/filter/FilterList.java | 15 ++-- .../hbase/filter/FilterListWithAND.java | 6 +- .../hadoop/hbase/filter/FilterListWithOR.java | 6 +- .../hbase/filter/FirstKeyOnlyFilter.java | 21 +++-- ...FirstKeyValueMatchingQualifiersFilter.java | 4 +- .../hadoop/hbase/filter/FuzzyRowFilter.java | 26 +++--- .../hbase/filter/InclusiveStopFilter.java | 19 ++-- .../hadoop/hbase/filter/KeyOnlyFilter.java | 18 ++-- .../hadoop/hbase/filter/LongComparator.java | 14 ++- .../hbase/filter/MultiRowRangeFilter.java | 19 ++-- .../filter/MultipleColumnPrefixFilter.java | 19 ++-- .../hadoop/hbase/filter/NullComparator.java | 18 ++-- .../hadoop/hbase/filter/PageFilter.java | 9 +- .../hadoop/hbase/filter/ParseFilter.java | 5 +- .../hadoop/hbase/filter/PrefixFilter.java | 17 ++-- .../hadoop/hbase/filter/QualifierFilter.java | 16 ++-- .../hadoop/hbase/filter/RandomRowFilter.java | 19 ++-- .../hbase/filter/RegexStringComparator.java | 17 ++-- .../apache/hadoop/hbase/filter/RowFilter.java | 14 +-- .../SingleColumnValueExcludeFilter.java | 18 ++-- .../hbase/filter/SingleColumnValueFilter.java | 5 +- .../hadoop/hbase/filter/SkipFilter.java | 15 ++-- .../hbase/filter/SubstringComparator.java | 14 +-- .../hadoop/hbase/filter/TimestampsFilter.java | 15 ++-- .../hadoop/hbase/filter/ValueFilter.java | 14 +-- .../hadoop/hbase/filter/WhileMatchFilter.java | 17 ++-- .../hadoop/hbase/filter/package-info.java | 21 +++-- .../hadoop/hbase/ipc/AbstractRpcClient.java | 6 +- .../hbase/ipc/BlockingRpcConnection.java | 7 +- .../hadoop/hbase/ipc/CellBlockBuilder.java | 2 + .../apache/hadoop/hbase/ipc/ConnectionId.java | 16 ++-- .../hadoop/hbase/ipc/HBaseRpcController.java | 2 + .../hbase/ipc/NettyRpcDuplexHandler.java | 9 +- .../apache/hadoop/hbase/ipc/RpcClient.java | 4 +- .../hadoop/hbase/master/RegionState.java | 7 +- .../hadoop/hbase/protobuf/ProtobufMagic.java | 10 +-- .../hadoop/hbase/quotas/QuotaRetriever.java | 4 +- .../hbase/quotas/SpaceLimitSettings.java | 4 +- .../hadoop/hbase/security/SaslUtil.java | 8 +- .../security/access/AccessControlClient.java | 56 +++++++----- .../security/access/AccessControlUtil.java | 17 ++-- .../access/ShadedAccessControlUtil.java | 14 ++- .../security/access/TablePermission.java | 4 +- ...tractSaslClientAuthenticationProvider.java | 1 + .../security/visibility/VisibilityClient.java | 10 ++- .../hbase/shaded/protobuf/ProtobufUtil.java | 28 ++---- .../shaded/protobuf/RequestConverter.java | 13 +-- .../apache/hadoop/hbase/util/Writables.java | 1 + .../hbase/zookeeper/ReadOnlyZKClient.java | 4 + .../hadoop/hbase/zookeeper/ZKMetadata.java | 2 +- .../client/TestAsyncTableRpcPriority.java | 14 +-- .../hbase/client/TestAsyncTableTracing.java | 14 +-- .../client/TestCoprocessorDescriptor.java | 4 - .../apache/hadoop/hbase/client/TestGet.java | 3 +- .../hadoop/hbase/client/TestMutation.java | 10 +-- .../hbase/client/TestRegionInfoDisplay.java | 4 +- .../apache/hadoop/hbase/client/TestScan.java | 33 +++---- .../client/TestSimpleRequestController.java | 7 +- .../hbase/filter/TestKeyOnlyFilter.java | 4 +- .../hbase/ipc/TestCellBlockBuilder.java | 9 +- .../hbase/ipc/TestNettyRpcConnection.java | 4 +- .../TestQuotaGlobalsSettingsBypass.java | 1 + .../security/TestHBaseSaslRpcClient.java | 29 +++---- .../hbase/util/TestRoundRobinPoolMap.java | 6 +- .../hbase/util/TestThreadLocalPoolMap.java | 6 +- 141 files changed, 881 insertions(+), 825 deletions(-) diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/CatalogFamilyFormat.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/CatalogFamilyFormat.java index d714c47359b9..c2385084a3ea 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/CatalogFamilyFormat.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/CatalogFamilyFormat.java @@ -349,8 +349,8 @@ public static TableState getTableState(Result r) throws IOException { } /** - * @return Deserialized values of <qualifier,regioninfo> pairs taken from column values that - * match the regex 'info:merge.*' in array of cells. + * Returns Deserialized values of <qualifier,regioninfo> pairs taken from column values that + * match the regex 'info:merge.*' in array of cells. */ @Nullable public static Map getMergeRegionsWithName(Cell[] cells) { @@ -376,8 +376,8 @@ public static Map getMergeRegionsWithName(Cell[] cells) { } /** - * @return Deserialized regioninfo values taken from column values that match the regex - * 'info:merge.*' in array of cells. + * Returns Deserialized regioninfo values taken from column values that match the regex + * 'info:merge.*' in array of cells. */ @Nullable public static List getMergeRegions(Cell[] cells) { @@ -386,8 +386,8 @@ public static List getMergeRegions(Cell[] cells) { } /** - * @return True if any merge regions present in cells; i.e. the column in - * cell matches the regex 'info:merge.*'. + * Returns True if any merge regions present in cells; i.e. the column in + * cell matches the regex 'info:merge.*'. */ public static boolean hasMergeRegions(Cell[] cells) { for (Cell cell : cells) { diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/ClientMetaTableAccessor.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/ClientMetaTableAccessor.java index b75398dd1cf9..42bfd757e0d1 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/ClientMetaTableAccessor.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/ClientMetaTableAccessor.java @@ -59,6 +59,7 @@ private ClientMetaTableAccessor() { } @InterfaceAudience.Private + @SuppressWarnings("ImmutableEnumChecker") public enum QueryType { ALL(HConstants.TABLE_FAMILY, HConstants.CATALOG_FAMILY), REGION(HConstants.CATALOG_FAMILY), @@ -100,11 +101,7 @@ public static CompletableFuture> getTableState(AsyncTable> getRegionLocation(AsyncTable metaTable, byte[] regionName) { CompletableFuture> future = new CompletableFuture<>(); @@ -126,11 +123,7 @@ public static CompletableFuture> getTableState(AsyncTable> getRegionLocationWithEncodedName(AsyncTable metaTable, byte[] encodedRegionName) { CompletableFuture> future = new CompletableFuture<>(); @@ -167,8 +160,9 @@ private static Optional getTableState(Result r) throws IOException { } /** - * Used to get all region locations for the specific table. n * @param tableName table we're - * looking for, can be null for getting all regions + * Used to get all region locations for the specific table + * @param metaTable scanner over meta table + * @param tableName table we're looking for, can be null for getting all regions * @return the list of region locations. The return value will be wrapped by a * {@link CompletableFuture}. */ @@ -191,8 +185,9 @@ public static CompletableFuture> getTableHRegionLocations( } /** - * Used to get table regions' info and server. n * @param tableName table we're looking for, can - * be null for getting all regions + * Used to get table regions' info and server. + * @param metaTable scanner over meta table + * @param tableName table we're looking for, can be null for getting all regions * @param excludeOfflinedSplitParents don't return split parents * @return the list of regioninfos and server. The return value will be wrapped by a * {@link CompletableFuture}. @@ -221,9 +216,11 @@ private static CompletableFuture>> getTableReg } /** - * Performs a scan of META table for given table. n * @param tableName table withing we scan - * @param type scanned part of meta - * @param visitor Visitor invoked against each row + * Performs a scan of META table for given table. + * @param metaTable scanner over meta table + * @param tableName table within we scan + * @param type scanned part of meta + * @param visitor Visitor invoked against each row */ private static CompletableFuture scanMeta(AsyncTable metaTable, TableName tableName, QueryType type, final Visitor visitor) { @@ -232,11 +229,13 @@ private static CompletableFuture scanMeta(AsyncTable scanMeta(AsyncTable metaTable, byte[] startRow, byte[] stopRow, QueryType type, int maxRows, final Visitor visitor) { @@ -456,19 +455,12 @@ private static Scan getMetaScan(AsyncTable metaTable, int rowUpperLimit) { return scan; } - /** - * Returns an HRegionLocationList extracted from the result. - * @return an HRegionLocationList containing all locations for the region range or null if we - * can't deserialize the result. - */ + /** Returns an HRegionLocationList extracted from the result. */ private static Optional getRegionLocations(Result r) { return Optional.ofNullable(CatalogFamilyFormat.getRegionLocations(r)); } - /** - * @param tableName table we're working with - * @return start row for scanning META according to query type - */ + /** Returns start row for scanning META according to query type */ public static byte[] getTableStartRowForMeta(TableName tableName, QueryType type) { if (tableName == null) { return null; @@ -490,10 +482,7 @@ public static byte[] getTableStartRowForMeta(TableName tableName, QueryType type } } - /** - * @param tableName table we're working with - * @return stop row for scanning META according to query type - */ + /** Returns stop row for scanning META according to query type */ public static byte[] getTableStopRowForMeta(TableName tableName, QueryType type) { if (tableName == null) { return null; diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/ClusterId.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/ClusterId.java index 9b7a5de19bd3..8c675c4522e6 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/ClusterId.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/ClusterId.java @@ -51,6 +51,7 @@ public byte[] toByteArray() { } /** + * Parse the serialized representation of the {@link ClusterId} * @param bytes A pb serialized {@link ClusterId} instance with pb magic prefix * @return An instance of {@link ClusterId} made from bytes n * @see #toByteArray() */ diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/ClusterMetricsBuilder.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/ClusterMetricsBuilder.java index 5695f5b65ade..7254209487b2 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/ClusterMetricsBuilder.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/ClusterMetricsBuilder.java @@ -67,13 +67,13 @@ public static ClusterStatusProtos.ClusterStatus toClusterStatus(ClusterMetrics m .collect(Collectors.toList())) .addAllTableRegionStatesCount(metrics.getTableRegionStatesCount().entrySet().stream() .map(status -> ClusterStatusProtos.TableRegionStatesCount.newBuilder() - .setTableName(ProtobufUtil.toProtoTableName((status.getKey()))) + .setTableName(ProtobufUtil.toProtoTableName(status.getKey())) .setRegionStatesCount(ProtobufUtil.toTableRegionStatesCount(status.getValue())).build()) .collect(Collectors.toList())) .addAllDecommissionedServers(metrics.getDecommissionedServerNames().stream() .map(ProtobufUtil::toServerName).collect(Collectors.toList())); if (metrics.getMasterName() != null) { - builder.setMaster(ProtobufUtil.toServerName((metrics.getMasterName()))); + builder.setMaster(ProtobufUtil.toServerName(metrics.getMasterName())); } if (metrics.getMasterTasks() != null) { builder.addAllMasterTasks(metrics.getMasterTasks().stream() diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/CoprocessorEnvironment.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/CoprocessorEnvironment.java index 94909f1c14ef..32e06d610247 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/CoprocessorEnvironment.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/CoprocessorEnvironment.java @@ -44,8 +44,8 @@ public interface CoprocessorEnvironment { int getLoadSequence(); /** - * @return a Read-only Configuration; throws {@link UnsupportedOperationException} if you try to - * set a configuration. + * Returns a Read-only Configuration; throws {@link UnsupportedOperationException} if you try to + * set a configuration. */ Configuration getConfiguration(); diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/HBaseServerException.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/HBaseServerException.java index 3484995c1bfd..47a86f9492f5 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/HBaseServerException.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/HBaseServerException.java @@ -44,10 +44,7 @@ public HBaseServerException(boolean serverOverloaded, String message) { this.serverOverloaded = serverOverloaded; } - /** - * @param t throwable to check for server overloaded state - * @return True if the server was considered overloaded when the exception was thrown - */ + /** Returns True if the server was considered overloaded when the exception was thrown */ public static boolean isServerOverloaded(Throwable t) { if (t instanceof HBaseServerException) { return ((HBaseServerException) t).isServerOverloaded(); diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/HRegionLocation.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/HRegionLocation.java index 0decb58bc20b..ebf6d919374d 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/HRegionLocation.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/HRegionLocation.java @@ -100,8 +100,8 @@ public long getSeqNum() { } /** - * @return String made of hostname and port formatted as per - * {@link Addressing#createHostAndPortStr(String, int)} + * Returns String made of hostname and port formatted as per + * {@link Addressing#createHostAndPortStr(String, int)} */ public String getHostnamePort() { return Addressing.createHostAndPortStr(this.getHostname(), this.getPort()); diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/RegionLocations.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/RegionLocations.java index 4d6dd6d43fa3..4c0390c6c3be 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/RegionLocations.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/RegionLocations.java @@ -208,6 +208,7 @@ public RegionLocations removeElementsWithNullLocation() { * @param other the locations to merge with * @return an RegionLocations object with merged locations or the same object if nothing is merged */ + @SuppressWarnings("ReferenceEquality") public RegionLocations mergeLocations(RegionLocations other) { assert other != null; @@ -280,6 +281,7 @@ private HRegionLocation selectRegionLocation(HRegionLocation oldLocation, * @return an RegionLocations object with updated locations or the same object if nothing is * updated */ + @SuppressWarnings("ReferenceEquality") public RegionLocations updateLocation(HRegionLocation location, boolean checkForEquals, boolean force) { assert location != null; diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/RegionMetrics.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/RegionMetrics.java index 88527e86442c..47b36a7a1516 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/RegionMetrics.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/RegionMetrics.java @@ -53,8 +53,8 @@ public interface RegionMetrics { public long getCpRequestCount(); /** - * @return the number of write requests and read requests and coprocessor service requests made to - * region + * Returns the number of write requests and read requests and coprocessor service requests made to + * region */ default long getRequestCount() { return getReadRequestCount() + getWriteRequestCount() + getCpRequestCount(); @@ -113,8 +113,8 @@ default String getNameAsString() { int getStoreRefCount(); /** - * @return the max reference count for any store file among all compacted stores files of this - * region + * Returns the max reference count for any store file among all compacted stores files of this + * region */ int getMaxCompactedStoreFileRefCount(); diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/ServerMetricsBuilder.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/ServerMetricsBuilder.java index 99f8520aa362..7a0312f22fdc 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/ServerMetricsBuilder.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/ServerMetricsBuilder.java @@ -44,10 +44,6 @@ @InterfaceAudience.Private public final class ServerMetricsBuilder { - /** - * @param sn the server name - * @return a empty metrics - */ public static ServerMetrics of(ServerName sn) { return newBuilder(sn).build(); } @@ -300,6 +296,7 @@ public int getVersionNumber() { return versionNumber; } + @Override public String getVersion() { return version; } @@ -414,16 +411,18 @@ public String toString() { int currentMaxCompactedStoreFileRefCount = r.getMaxCompactedStoreFileRefCount(); maxCompactedStoreFileRefCount = Math.max(maxCompactedStoreFileRefCount, currentMaxCompactedStoreFileRefCount); - uncompressedStoreFileSizeMB += r.getUncompressedStoreFileSize().get(Size.Unit.MEGABYTE); - storeFileSizeMB += r.getStoreFileSize().get(Size.Unit.MEGABYTE); - memStoreSizeMB += r.getMemStoreSize().get(Size.Unit.MEGABYTE); - storefileIndexSizeKB += r.getStoreFileUncompressedDataIndexSize().get(Size.Unit.KILOBYTE); + uncompressedStoreFileSizeMB += + (long) r.getUncompressedStoreFileSize().get(Size.Unit.MEGABYTE); + storeFileSizeMB += (long) r.getStoreFileSize().get(Size.Unit.MEGABYTE); + memStoreSizeMB += (long) r.getMemStoreSize().get(Size.Unit.MEGABYTE); + storefileIndexSizeKB += + (long) r.getStoreFileUncompressedDataIndexSize().get(Size.Unit.KILOBYTE); readRequestsCount += r.getReadRequestCount(); cpRequestsCount += r.getCpRequestCount(); writeRequestsCount += r.getWriteRequestCount(); filteredReadRequestsCount += r.getFilteredReadRequestCount(); - rootLevelIndexSizeKB += r.getStoreFileRootLevelIndexSize().get(Size.Unit.KILOBYTE); - bloomFilterSizeMB += r.getBloomFilterSize().get(Size.Unit.MEGABYTE); + rootLevelIndexSizeKB += (long) r.getStoreFileRootLevelIndexSize().get(Size.Unit.KILOBYTE); + bloomFilterSizeMB += (long) r.getBloomFilterSize().get(Size.Unit.MEGABYTE); compactedCellCount += r.getCompactedCellCount(); compactingCellCount += r.getCompactingCellCount(); } diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/UserMetrics.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/UserMetrics.java index 05108c70e746..681b1f416c78 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/UserMetrics.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/UserMetrics.java @@ -50,8 +50,8 @@ interface ClientMetrics { long getWriteRequestCount(); /** - * @return the number of write requests and read requests and coprocessor service requests made by - * the user + * Returns the number of write requests and read requests and coprocessor service requests made by + * the user */ default long getRequestCount() { return getReadRequestCount() + getWriteRequestCount(); diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/UserMetricsBuilder.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/UserMetricsBuilder.java index ab63f19fec85..4a66283146d9 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/UserMetricsBuilder.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/UserMetricsBuilder.java @@ -17,6 +17,7 @@ */ package org.apache.hadoop.hbase; +import java.nio.charset.StandardCharsets; import java.util.HashMap; import java.util.Map; import org.apache.hadoop.hbase.util.Strings; @@ -30,7 +31,8 @@ public final class UserMetricsBuilder { public static UserMetrics toUserMetrics(ClusterStatusProtos.UserLoad userLoad) { - UserMetricsBuilder builder = UserMetricsBuilder.newBuilder(userLoad.getUserName().getBytes()); + UserMetricsBuilder builder = + UserMetricsBuilder.newBuilder(userLoad.getUserName().getBytes(StandardCharsets.UTF_8)); userLoad.getClientMetricsList().stream() .map(clientMetrics -> new ClientMetricsImpl(clientMetrics.getHostName(), clientMetrics.getReadRequestsCount(), clientMetrics.getWriteRequestsCount(), diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AbstractResponse.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AbstractResponse.java index bb44defbac6a..b0a33eda4021 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AbstractResponse.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AbstractResponse.java @@ -27,11 +27,9 @@ abstract class AbstractResponse { public enum ResponseType { - SINGLE(0), - MULTI(1); + SINGLE, + MULTI; - ResponseType(int value) { - } } public abstract ResponseType type(); diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AbstractRpcBasedConnectionRegistry.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AbstractRpcBasedConnectionRegistry.java index 2380335e56b0..6dd14a520ee4 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AbstractRpcBasedConnectionRegistry.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AbstractRpcBasedConnectionRegistry.java @@ -135,7 +135,7 @@ private void populateStubs(Set addrs) throws IOException { * Typically, you can use lambda expression to implement this interface as * *
-   * (c, s, d) -> s.xxx(c, your request here, d)
+   * (c, s, d) -> s.xxx(c, your request here, d)
    * 
*/ @FunctionalInterface diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Admin.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Admin.java index 96923ae84621..34f7cf43308a 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Admin.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Admin.java @@ -115,6 +115,7 @@ public interface Admin extends Abortable, Closeable { Connection getConnection(); /** + * Check if a table exists. * @param tableName Table to check. * @return true if table exists already. * @throws IOException if a remote or network exception occurs @@ -187,7 +188,7 @@ default TableName[] listTableNames(Pattern pattern) throws IOException { * Get a table descriptor. * @param tableName as a {@link TableName} * @return the tableDescriptor - * @throws org.apache.hadoop.hbase.TableNotFoundException + * @throws org.apache.hadoop.hbase.TableNotFoundException if the table was not found * @throws IOException if a remote or network exception occurs */ TableDescriptor getDescriptor(TableName tableName) throws TableNotFoundException, IOException; @@ -381,6 +382,7 @@ default void disableTable(TableName tableName) throws IOException { } /** + * Check if a table is enabled. * @param tableName name of table to check * @return true if table is on-line * @throws IOException if a remote or network exception occurs @@ -388,6 +390,7 @@ default void disableTable(TableName tableName) throws IOException { boolean isTableEnabled(TableName tableName) throws IOException; /** + * Check if a table is disabled. * @param tableName name of table to check * @return true if table is off-line * @throws IOException if a remote or network exception occurs @@ -395,6 +398,7 @@ default void disableTable(TableName tableName) throws IOException { boolean isTableDisabled(TableName tableName) throws IOException; /** + * Check if a table is available. * @param tableName name of table to check * @return true if all regions of the table are available * @throws IOException if a remote or network exception occurs @@ -1100,6 +1104,7 @@ default ClusterMetrics getClusterMetrics() throws IOException { ClusterMetrics getClusterMetrics(EnumSet