diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/ByteBufferKeyValue.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/ByteBufferKeyValue.java index 21c2468a2866..93a8d3cb94ff 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/ByteBufferKeyValue.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/ByteBufferKeyValue.java @@ -264,7 +264,7 @@ public long heapSize() { if (this.buf.hasArray()) { return ClassSize.align(FIXED_OVERHEAD + length); } - return ClassSize.align(FIXED_OVERHEAD) + this.getSerializedSize(); + return (long) ClassSize.align(FIXED_OVERHEAD) + this.getSerializedSize(); } @Override diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/Cell.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/Cell.java index 4fb254b35b4a..027451956ee7 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/Cell.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/Cell.java @@ -106,15 +106,15 @@ public interface Cell extends HeapSize { // 4) Timestamp /** - * @return Long value representing time at which this cell was "Put" into the row. Typically - * represents the time of insertion, but can be any value from 0 to Long.MAX_VALUE. + * Return a long value representing time at which this cell was "Put" into the row. Typically + * represents the time of insertion, but can be any value from 0 to Long.MAX_VALUE. */ long getTimestamp(); // 5) Type /** - * @return The byte representation of the KeyValue.TYPE of this cell: one of Put, Delete, etc + * Return the byte representation of the KeyValue.TYPE of this cell: one of Put, Delete, etc * @deprecated As of HBase-2.0. Will be removed in HBase-3.0. Use {@link #getType()}. */ @Deprecated @@ -160,7 +160,7 @@ public interface Cell extends HeapSize { byte[] getTagsArray(); /** - * @return the first offset where the tags start in the Cell + * Return the first offset where the tags start in the Cell * @deprecated As of HBase-2.0. Will be removed in HBase-3.0. Tags are are now internal. */ @Deprecated diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/CellComparator.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/CellComparator.java index 301223ad2940..8ff86d575de5 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/CellComparator.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/CellComparator.java @@ -95,6 +95,7 @@ default int compareRows(byte[] leftRow, byte[] rightRow) { } /** + * Lexicographically compare two rows * @param row ByteBuffer that wraps a row; will read from current position and will reading all * remaining; will not disturb the ByteBuffer internal state. * @return greater than 0 if leftCell is bigger, less than 0 if rightCell is bigger, 0 if both @@ -111,7 +112,7 @@ default int compareRows(ByteBuffer row, Cell cell) { } /** - * Lexographically compares the two cells excluding the row part. It compares family, qualifier, + * Lexicographically compares the two cells excluding the row part. It compares family, qualifier, * timestamp and the type * @param leftCell the left hand side cell * @param rightCell the right hand side cell @@ -121,7 +122,7 @@ default int compareRows(ByteBuffer row, Cell cell) { int compareWithoutRow(Cell leftCell, Cell rightCell); /** - * Lexographically compares the families of the two cells + * Lexicographically compares the families of the two cells * @param leftCell the left hand side cell * @param rightCell the right hand side cell * @return greater than 0 if leftCell is bigger, less than 0 if rightCell is bigger, 0 if both @@ -130,7 +131,7 @@ default int compareRows(ByteBuffer row, Cell cell) { int compareFamilies(Cell leftCell, Cell rightCell); /** - * Lexographically compares the qualifiers of the two cells + * Lexicographically compares the qualifiers of the two cells * @param leftCell the left hand side cell * @param rightCell the right hand side cell * @return greater than 0 if leftCell is bigger, less than 0 if rightCell is bigger, 0 if both @@ -163,10 +164,9 @@ default int compareRows(ByteBuffer row, Cell cell) { int compareTimestamps(long leftCellts, long rightCellts); /** - * @return A dumbed-down, fast comparator for hbase2 base-type, the {@link ByteBufferKeyValue}. - * Create an instance when you make a new memstore, when you know only BBKVs will be - * passed. Do not pollute with types other than BBKV if can be helped; the Comparator will - * slow. + * Return a dumbed-down, fast comparator for hbase2 base-type, the {@link ByteBufferKeyValue}. + * Create an instance when you make a new memstore, when you know only BBKVs will be passed. Do + * not pollute with types other than BBKV if can be helped; the Comparator will slow. */ - Comparator getSimpleComparator(); + Comparator getSimpleComparator(); } diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/CellComparatorImpl.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/CellComparatorImpl.java index 4b138979beb8..b4d3b5549dbd 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/CellComparatorImpl.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/CellComparatorImpl.java @@ -18,7 +18,6 @@ package org.apache.hadoop.hbase; import java.util.Comparator; -import org.apache.hadoop.hbase.KeyValue.Type; import org.apache.hadoop.hbase.util.ByteBufferUtils; import org.apache.hadoop.hbase.util.Bytes; import org.apache.yetus.audience.InterfaceAudience; @@ -691,11 +690,11 @@ public final int compareWithoutRow(final Cell left, final Cell right) { int rFamLength = right.getFamilyLength(); int lQualLength = left.getQualifierLength(); int rQualLength = right.getQualifierLength(); - if (lFamLength + lQualLength == 0 && left.getTypeByte() == Type.Minimum.getCode()) { + if (lFamLength + lQualLength == 0 && left.getTypeByte() == KeyValue.Type.Minimum.getCode()) { // left is "bigger", i.e. it appears later in the sorted order return 1; } - if (rFamLength + rQualLength == 0 && right.getTypeByte() == Type.Minimum.getCode()) { + if (rFamLength + rQualLength == 0 && right.getTypeByte() == KeyValue.Type.Minimum.getCode()) { return -1; } if (lFamLength != rFamLength) { diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/CellUtil.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/CellUtil.java index c4d8c1aafd78..c28d0d87525e 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/CellUtil.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/CellUtil.java @@ -30,7 +30,6 @@ import java.util.Map.Entry; import java.util.NavigableMap; import java.util.function.Function; -import org.apache.hadoop.hbase.KeyValue.Type; import org.apache.hadoop.hbase.util.ByteBufferUtils; import org.apache.hadoop.hbase.util.Bytes; import org.apache.yetus.audience.InterfaceAudience; @@ -486,8 +485,7 @@ private static boolean matchingQualifier(final Cell left, final int lqlength, fi } /** - * Finds if the qualifier part of the cell and the KV serialized byte[] are equal n * @param buf - * the serialized keyvalue format byte[] + * Finds if the qualifier part of the cell and the KV serialized byte[] are equal. * @return true if the qualifier matches, false otherwise */ public static boolean matchingQualifier(final Cell left, final byte[] buf) { @@ -561,8 +559,8 @@ public static boolean matchingTags(final Cell left, final Cell right) { } /** - * @return True if a delete type, a {@link KeyValue.Type#Delete} or a {KeyValue.Type#DeleteFamily} - * or a {@link KeyValue.Type#DeleteColumn} KeyValue type. + * Return true if a delete type, a {@link KeyValue.Type#Delete} or a {KeyValue.Type#DeleteFamily} + * or a {@link KeyValue.Type#DeleteColumn} KeyValue type. */ @SuppressWarnings("deprecation") public static boolean isDelete(final Cell cell) { @@ -572,13 +570,13 @@ public static boolean isDelete(final Cell cell) { /** Returns True if this cell is a Put. */ @SuppressWarnings("deprecation") public static boolean isPut(Cell cell) { - return cell.getTypeByte() == Type.Put.getCode(); + return cell.getTypeByte() == KeyValue.Type.Put.getCode(); } /** * Sets the given timestamp to the cell. Note that this method is a LimitedPrivate API and may - * change between minor releases. nn * @throws IOException when the passed cell is not of type - * {@link ExtendedCell} + * change between minor releases. + * @throws IOException when the passed cell is not of type {@link ExtendedCell} */ @InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.COPROC) public static void setTimestamp(Cell cell, long ts) throws IOException { @@ -587,8 +585,7 @@ public static void setTimestamp(Cell cell, long ts) throws IOException { /** * Sets the given timestamp to the cell. Note that this method is a LimitedPrivate API and may - * change between minor releases. n * @param ts buffer containing the timestamp value - * @param tsOffset offset to the new timestamp + * change between minor releases. * @throws IOException when the passed cell is not of type {@link ExtendedCell} */ @InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.COPROC) @@ -603,6 +600,7 @@ public static String getCellKeyAsString(Cell cell) { } /** + * Return the Key portion of the passed cell as a String. * @param cell the cell to convert * @param rowConverter used to convert the row of the cell to a string * @return The Key portion of the passed cell as a String. @@ -623,7 +621,7 @@ public static String getCellKeyAsString(Cell cell, Function rowCon sb.append('/'); sb.append(KeyValue.humanReadableTimestamp(cell.getTimestamp())); sb.append('/'); - sb.append(Type.codeToType(cell.getTypeByte())); + sb.append(KeyValue.Type.codeToType(cell.getTypeByte())); if (!(cell instanceof KeyValue.KeyOnlyKeyValue)) { sb.append("/vlen="); sb.append(cell.getValueLength()); @@ -676,15 +674,14 @@ public static boolean matchingTimestamp(Cell a, Cell b) { return CellComparator.getInstance().compareTimestamps(a.getTimestamp(), b.getTimestamp()) == 0; } - /** - * Compares the row of two keyvalues for equality nn * @return True if rows match. - */ + /** Compares the row of two keyvalues for equality */ public static boolean matchingRows(final Cell left, final Cell right) { short lrowlength = left.getRowLength(); short rrowlength = right.getRowLength(); return matchingRows(left, lrowlength, right, rrowlength); } + /** Compares the row of two keyvalues for equality */ public static boolean matchingRows(final Cell left, final short lrowlength, final Cell right, final short rrowlength) { if (lrowlength != rrowlength) return false; @@ -708,10 +705,7 @@ public static boolean matchingRows(final Cell left, final short lrowlength, fina right.getRowOffset(), rrowlength); } - /** - * Compares the row and column of two keyvalues for equality nn * @return True if same row and - * column. - */ + /** Compares the row and column of two keyvalues for equality */ public static boolean matchingRowColumn(final Cell left, final Cell right) { short lrowlength = left.getRowLength(); short rrowlength = right.getRowLength(); @@ -738,6 +732,7 @@ public static boolean matchingRowColumn(final Cell left, final Cell right) { return matchingColumn(left, lfamlength, lqlength, right, rfamlength, rqlength); } + /** Compares the row and column of two keyvalues for equality */ public static boolean matchingRowColumnBytes(final Cell left, final Cell right) { int lrowlength = left.getRowLength(); int rrowlength = right.getRowLength(); diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/ChoreService.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/ChoreService.java index b93610f26d0f..a3eb889f7717 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/ChoreService.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/ChoreService.java @@ -134,6 +134,7 @@ public ChoreService(final String coreThreadPoolPrefix, int corePoolSize, boolean } /** + * Schedule a chore. * @param chore Chore to be scheduled. If the chore is already scheduled with another ChoreService * instance, that schedule will be cancelled (i.e. a Chore can only ever be scheduled * with a single ChoreService instance). @@ -252,8 +253,8 @@ int getNumberOfScheduledChores() { } /** - * @return number of chores that this service currently has scheduled that are missing their - * scheduled start time + * Return number of chores that this service currently has scheduled that are missing their + * scheduled start time */ int getNumberOfChoresMissingStartTime() { return choresMissingStartTime.size(); @@ -273,9 +274,6 @@ static class ChoreServiceThreadFactory implements ThreadFactory { private final static String THREAD_NAME_SUFFIX = ".Chore."; private AtomicInteger threadNumber = new AtomicInteger(1); - /** - * @param threadPrefix The prefix given to all threads created by this factory - */ public ChoreServiceThreadFactory(final String threadPrefix) { this.threadPrefix = threadPrefix; } @@ -350,9 +348,9 @@ synchronized void onChoreMissedStartTime(ScheduledChore chore) { } /** - * shutdown the service. Any chores that are scheduled for execution will be cancelled. Any chores - * in the middle of execution will be interrupted and shutdown. This service will be unusable - * after this method has been called (i.e. future scheduling attempts will fail). + * Shut down the service. Any chores that are scheduled for execution will be cancelled. Any + * chores in the middle of execution will be interrupted and shutdown. This service will be + * unusable after this method has been called (i.e. future scheduling attempts will fail). *

* Notice that, this will only clean the chore from this ChoreService but you could still schedule * the chore with other ChoreService. @@ -390,9 +388,7 @@ private void cancelAllChores(final boolean mayInterruptIfRunning) { } } - /** - * Prints a summary of important details about the chore. Used for debugging purposes - */ + /** Prints a summary of important details about the chore. Used for debugging purposes */ private void printChoreDetails(final String header, ScheduledChore chore) { if (!LOG.isTraceEnabled()) { return; @@ -408,9 +404,7 @@ private void printChoreDetails(final String header, ScheduledChore chore) { } } - /** - * Prints a summary of important details about the service. Used for debugging purposes - */ + /** Prints a summary of important details about the service. Used for debugging purposes */ private void printChoreServiceDetails(final String header) { if (!LOG.isTraceEnabled()) { return; diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/HBaseConfiguration.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/HBaseConfiguration.java index 59e00481982e..5d428d0b434e 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/HBaseConfiguration.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/HBaseConfiguration.java @@ -105,6 +105,7 @@ public static Configuration create() { } /** + * Creates a Configuration with HBase resources * @param that Configuration to clone. * @return a Configuration created with the hbase-*.xml files plus the given configuration. */ diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/HDFSBlocksDistribution.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/HDFSBlocksDistribution.java index dcddb74b71be..2f9fa4104594 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/HDFSBlocksDistribution.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/HDFSBlocksDistribution.java @@ -86,9 +86,7 @@ public long getWeightForSsd() { return weightForSsd; } - /** - * comparator used to sort hosts based on weight - */ + /** Comparator used to sort hosts based on weight */ public static class WeightComparator implements Comparator { @Override public int compare(HostAndWeight l, HostAndWeight r) { @@ -100,16 +98,10 @@ public int compare(HostAndWeight l, HostAndWeight r) { } } - /** - * Constructor - */ public HDFSBlocksDistribution() { this.hostAndWeights = new TreeMap<>(); } - /** - * @see java.lang.Object#toString() - */ @Override public synchronized String toString() { return "number of unique hosts in the distribution=" + this.hostAndWeights.size(); @@ -206,14 +198,13 @@ public long getUniqueBlocksTotalWeight() { return uniqueBlocksTotalWeight; } - /** - * Implementations 'visit' hostAndWeight. - */ + /** Implementations 'visit' hostAndWeight. */ public interface Visitor { long visit(final HostAndWeight hostAndWeight); } /** + * Get the block locality index for a given host * @param host the host name * @return the locality index of the given host */ @@ -227,6 +218,7 @@ public float getBlockLocalityIndex(String host) { } /** + * Get the block locality index for a ssd for a given host * @param host the host name * @return the locality index with ssd of the given host */ @@ -240,6 +232,7 @@ public float getBlockLocalityIndexForSsd(String host) { } /** + * Get the blocks local weight for a given host * @param host the host name * @return the blocks local weight of the given host */ @@ -248,6 +241,7 @@ public long getBlocksLocalWeight(String host) { } /** + * Get the blocks local weight with ssd for a given host * @param host the host name * @return the blocks local with ssd weight of the given host */ @@ -255,10 +249,6 @@ public long getBlocksLocalWithSsdWeight(String host) { return getBlocksLocalityWeightInternal(host, HostAndWeight::getWeightForSsd); } - /** - * @param host the host name - * @return the locality index of the given host - */ private long getBlocksLocalityWeightInternal(String host, Visitor visitor) { long localityIndex = 0; HostAndWeight hostAndWeight = this.hostAndWeights.get(host); @@ -293,9 +283,7 @@ public void add(HDFSBlocksDistribution otherBlocksDistribution) { addUniqueWeight(otherBlocksDistribution.getUniqueBlocksTotalWeight()); } - /** - * return the sorted list of hosts in terms of their weights - */ + /** Return the sorted list of hosts in terms of their weights */ public List getTopHosts() { HostAndWeight[] hostAndWeights = getTopHostsWithWeights(); List topHosts = new ArrayList<>(hostAndWeights.length); @@ -305,9 +293,7 @@ public List getTopHosts() { return topHosts; } - /** - * return the sorted list of hosts in terms of their weights - */ + /** Return the sorted list of hosts in terms of their weights */ public HostAndWeight[] getTopHostsWithWeights() { NavigableSet orderedHosts = new TreeSet<>(new HostAndWeight.WeightComparator()); orderedHosts.addAll(this.hostAndWeights.values()); diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/IndividualBytesFieldCell.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/IndividualBytesFieldCell.java index b86b5e9c3826..d65746ab68a1 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/IndividualBytesFieldCell.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/IndividualBytesFieldCell.java @@ -151,7 +151,7 @@ public short getRowLength() { // If row is null or rLength is invalid, the constructor will reject it, by // {@link KeyValue#checkParameters()}, so it is safe to call rLength and make the type // conversion. - return (short) (rLength); + return (short) rLength; } // 2) Family @@ -170,7 +170,7 @@ public int getFamilyOffset() { public byte getFamilyLength() { // If fLength is invalid, the constructor will reject it, by {@link KeyValue#checkParameters()}, // so it is safe to make the type conversion. - return (byte) (fLength); + return (byte) fLength; } // 3) Qualifier diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValue.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValue.java index 835f0f8094c5..59c54b0c0319 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValue.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValue.java @@ -1075,10 +1075,7 @@ public String toString() { + getValueLength() + "/seqid=" + seqId; } - /** - * @param k Key portion of a KeyValue. - * @return Key as a String, empty string if k is null. - */ + /** Return key as a String, empty string if k is null. */ public static String keyToString(final byte[] k) { if (k == null) { return ""; @@ -1328,10 +1325,7 @@ public int getTimestampOffset() { return getTimestampOffset(getKeyLength()); } - /** - * @param keylength Pass if you have it to save on a int creation. - * @return Timestamp offset - */ + /** Return the timestamp offset */ private int getTimestampOffset(final int keylength) { return getKeyOffset() + keylength - TIMESTAMP_TYPE_SIZE; } @@ -1343,6 +1337,7 @@ public boolean isLatestTimestamp() { } /** + * Update the timestamp. * @param now Time to set into this IFF timestamp == * {@link HConstants#LATEST_TIMESTAMP} (else, its a noop). * @return True is we modified this. @@ -1386,17 +1381,13 @@ public byte[] getKey() { return key; } - /** - * n - */ + /** Return the timestamp. */ @Override public long getTimestamp() { return getTimestamp(getKeyLength()); } - /** - * @param keylength Pass if you have it to save on a int creation. n - */ + /** Return the timestamp. */ long getTimestamp(final int keylength) { int tsOffset = getTimestampOffset(keylength); return Bytes.toLong(this.bytes, tsOffset); @@ -1408,13 +1399,12 @@ public byte getTypeByte() { return getTypeByte(getKeyLength()); } + /** Return the KeyValue.TYPE byte representation */ byte getTypeByte(int keyLength) { return this.bytes[this.offset + keyLength - 1 + ROW_OFFSET]; } - /** - * This returns the offset where the tag actually starts. - */ + /** Return the offset where the tag data starts. */ @Override public int getTagsOffset() { int tagsLen = getTagsLength(); @@ -1424,9 +1414,7 @@ public int getTagsOffset() { return this.offset + this.length - tagsLen; } - /** - * This returns the total length of the tag bytes - */ + /** Return the total length of the tag bytes */ @Override public int getTagsLength() { int tagsLen = this.length - (getKeyLength() + getValueLength() + KEYVALUE_INFRASTRUCTURE_SIZE); @@ -1466,6 +1454,7 @@ public KeyValue createKeyOnly(boolean lenAsVal) { } /** + * Find index of passed delimiter walking from start of buffer forwards. * @param b the kv serialized byte[] to process * @param delimiter input delimeter to fetch index from start * @return Index of delimiter having started from start of b moving rightward. @@ -1749,6 +1738,7 @@ public int compareTimestamps(final Cell left, final Cell right) { } /** + * Compares the rows of a cell * @param left left cell to compare rows for * @param right right cell to compare rows for * @return Result comparing rows. @@ -1996,6 +1986,7 @@ public boolean matchingRows(final Cell left, final Cell right) { } /** + * Compares the row of two keyvalues for equality * @param left left cell to compare row * @param lrowlength left row length * @param right right cell to compare row @@ -2124,6 +2115,7 @@ protected KVComparator clone() throws CloneNotSupportedException { } /** + * Create a KeyValue reading from in * @param in Where to read bytes from. Creates a byte array to hold the KeyValue backing bytes * copied from the steam. * @return KeyValue created by deserializing from in OR if we find a length of zero, @@ -2250,7 +2242,7 @@ public long heapSize() { return fixed + ClassSize.sizeOfByteArray(length); } else { // only count the number of bytes - return fixed + length; + return (long) fixed + length; } } @@ -2339,6 +2331,7 @@ public byte getFamilyLength() { return this.bytes[getFamilyOffset() - 1]; } + @Override int getFamilyLengthPosition(int rowLength) { return this.offset + Bytes.SIZEOF_SHORT + rowLength; } @@ -2378,6 +2371,7 @@ public byte getTypeByte() { return getTypeByte(getKeyLength()); } + @Override byte getTypeByte(int keyLength) { return this.bytes[this.offset + keyLength - 1]; } diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValueUtil.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValueUtil.java index 0dc82a86ddcf..bdf77d511af6 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValueUtil.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValueUtil.java @@ -26,7 +26,6 @@ import java.nio.ByteBuffer; import java.util.ArrayList; import java.util.List; -import org.apache.hadoop.hbase.KeyValue.Type; import org.apache.hadoop.hbase.io.util.StreamUtils; import org.apache.hadoop.hbase.util.ByteBufferUtils; import org.apache.hadoop.hbase.util.Bytes; @@ -52,9 +51,9 @@ public class KeyValueUtil { public static int length(short rlen, byte flen, int qlen, int vlen, int tlen, boolean withTags) { if (withTags) { - return (int) (KeyValue.getKeyValueDataStructureSize(rlen, flen, qlen, vlen, tlen)); + return (int) KeyValue.getKeyValueDataStructureSize(rlen, flen, qlen, vlen, tlen); } - return (int) (KeyValue.getKeyValueDataStructureSize(rlen, flen, qlen, vlen)); + return (int) KeyValue.getKeyValueDataStructureSize(rlen, flen, qlen, vlen); } /** @@ -214,7 +213,6 @@ public static KeyValue nextShallowCopy(final ByteBuffer bb, final boolean includ if (bb.remaining() < 1) { return null; } - KeyValue keyValue = null; int underlyingArrayOffset = bb.arrayOffset() + bb.position(); int keyLength = bb.getInt(); int valueLength = bb.getInt(); @@ -226,7 +224,7 @@ public static KeyValue nextShallowCopy(final ByteBuffer bb, final boolean includ ByteBufferUtils.skip(bb, tagsLength); } int kvLength = (int) KeyValue.getKeyValueDataStructureSize(keyLength, valueLength, tagsLength); - keyValue = new KeyValue(bb.array(), underlyingArrayOffset, kvLength); + KeyValue keyValue = new KeyValue(bb.array(), underlyingArrayOffset, kvLength); if (includesMvccVersion) { long mvccVersion = ByteBufferUtils.readVLong(bb); keyValue.setSequenceId(mvccVersion); @@ -257,7 +255,7 @@ public static KeyValue createLastOnRow(final byte[] row, final int roffset, fina final byte[] family, final int foffset, final int flength, final byte[] qualifier, final int qoffset, final int qlength) { return new KeyValue(row, roffset, rlength, family, foffset, flength, qualifier, qoffset, - qlength, PrivateConstants.OLDEST_TIMESTAMP, Type.Minimum, null, 0, 0); + qlength, PrivateConstants.OLDEST_TIMESTAMP, KeyValue.Type.Minimum, null, 0, 0); } /** @@ -268,7 +266,7 @@ public static KeyValue createLastOnRow(final byte[] row, final int roffset, fina */ public static KeyValue createFirstOnRow(final byte[] row, int roffset, short rlength) { return new KeyValue(row, roffset, rlength, null, 0, 0, null, 0, 0, HConstants.LATEST_TIMESTAMP, - Type.Maximum, null, 0, 0); + KeyValue.Type.Maximum, null, 0, 0); } /** @@ -278,7 +276,7 @@ public static KeyValue createFirstOnRow(final byte[] row, int roffset, short rle * @return Last possible KeyValue on passed row */ public static KeyValue createLastOnRow(final byte[] row) { - return new KeyValue(row, null, null, HConstants.LATEST_TIMESTAMP, Type.Minimum); + return new KeyValue(row, null, null, HConstants.LATEST_TIMESTAMP, KeyValue.Type.Minimum); } /** @@ -299,7 +297,7 @@ public static KeyValue createFirstOnRow(final byte[] row) { * @return First possible key on passed row and timestamp. */ public static KeyValue createFirstOnRow(final byte[] row, final long ts) { - return new KeyValue(row, null, null, ts, Type.Maximum); + return new KeyValue(row, null, null, ts, KeyValue.Type.Maximum); } /** @@ -312,10 +310,12 @@ public static KeyValue createFirstOnRow(final byte[] row, final long ts) { */ public static KeyValue createFirstOnRow(final byte[] row, final byte[] family, final byte[] qualifier) { - return new KeyValue(row, family, qualifier, HConstants.LATEST_TIMESTAMP, Type.Maximum); + return new KeyValue(row, family, qualifier, HConstants.LATEST_TIMESTAMP, KeyValue.Type.Maximum); } /** + * Create a KeyValue for the specified row, family and qualifier that would be smaller than all + * other possible KeyValues that have the same row, family, qualifier. Used for seeking. * @param row - row key (arbitrary byte array) * @param f - family name * @param q - column qualifier @@ -324,7 +324,7 @@ public static KeyValue createFirstOnRow(final byte[] row, final byte[] family, */ public static KeyValue createFirstOnRow(final byte[] row, final byte[] f, final byte[] q, final long ts) { - return new KeyValue(row, f, q, ts, Type.Maximum); + return new KeyValue(row, f, q, ts, KeyValue.Type.Maximum); } /** @@ -345,7 +345,7 @@ public static KeyValue createFirstOnRow(final byte[] row, final int roffset, fin final byte[] family, final int foffset, final int flength, final byte[] qualifier, final int qoffset, final int qlength) { return new KeyValue(row, roffset, rlength, family, foffset, flength, qualifier, qoffset, - qlength, HConstants.LATEST_TIMESTAMP, Type.Maximum, null, 0, 0); + qlength, HConstants.LATEST_TIMESTAMP, KeyValue.Type.Maximum, null, 0, 0); } /** @@ -565,7 +565,7 @@ static void checkKeyValueBytes(byte[] buf, int offset, int length, boolean withT throw new IllegalArgumentException(msg); } byte type = buf[pos]; - if (!Type.isValidType(type)) { + if (!KeyValue.Type.isValidType(type)) { String msg = "Invalid type in KeyValue, type=" + type + bytesToHex(buf, offset, length); LOG.warn(msg); throw new IllegalArgumentException(msg); @@ -740,7 +740,6 @@ public static int oswrite(final Cell cell, final OutputStream out, final boolean int qlen = cell.getQualifierLength(); int vlen = cell.getValueLength(); int tlen = cell.getTagsLength(); - int size = 0; // write key length int klen = keyLength(rlen, flen, qlen); ByteBufferUtils.putInt(out, klen); @@ -760,7 +759,7 @@ public static int oswrite(final Cell cell, final OutputStream out, final boolean out.write(cell.getTypeByte()); // write value out.write(cell.getValueArray(), cell.getValueOffset(), vlen); - size = klen + vlen + KeyValue.KEYVALUE_INFRASTRUCTURE_SIZE; + int size = klen + vlen + KeyValue.KEYVALUE_INFRASTRUCTURE_SIZE; // write tags if we have to if (withTags && tlen > 0) { // 2 bytes tags length followed by tags bytes diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/PrivateCellUtil.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/PrivateCellUtil.java index ddc897107159..1b035966da2b 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/PrivateCellUtil.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/PrivateCellUtil.java @@ -132,6 +132,7 @@ static class TagRewriteCell implements ExtendedCell { private static final int HEAP_SIZE_OVERHEAD = ClassSize.OBJECT + 2 * ClassSize.REFERENCE; /** + * Construct a TagRewriteCell * @param cell The original Cell which it rewrites * @param tags the tags bytes. The array suppose to contain the tags bytes alone. */ @@ -828,8 +829,8 @@ public static boolean matchingTags(final Cell left, final Cell right, int llengt } /** - * @return True if a delete type, a {@link KeyValue.Type#Delete} or a {KeyValue.Type#DeleteFamily} - * or a {@link KeyValue.Type#DeleteColumn} KeyValue type. + * Return true if a delete type, a {@link KeyValue.Type#Delete} or a {KeyValue.Type#DeleteFamily} + * or a {@link KeyValue.Type#DeleteColumn} KeyValue type. */ public static boolean isDelete(final byte type) { return KeyValue.Type.Delete.getCode() <= type && type <= KeyValue.Type.DeleteFamily.getCode(); @@ -868,9 +869,7 @@ public static byte[] cloneTags(Cell cell) { return output; } - /** - * Copies the tags info into the tag portion of the cell nnn * @return position after tags - */ + /** Copies the tags info into the tag portion of the cell */ public static int copyTagsTo(Cell cell, byte[] destination, int destinationOffset) { int tlen = cell.getTagsLength(); if (cell instanceof ByteBufferExtendedCell) { @@ -884,9 +883,7 @@ public static int copyTagsTo(Cell cell, byte[] destination, int destinationOffse return destinationOffset + tlen; } - /** - * Copies the tags info into the tag portion of the cell nnn * @return the position after tags - */ + /** Copies the tags info into the tag portion of the cell */ public static int copyTagsTo(Cell cell, ByteBuffer destination, int destinationOffset) { int tlen = cell.getTagsLength(); if (cell instanceof ByteBufferExtendedCell) { @@ -900,6 +897,7 @@ public static int copyTagsTo(Cell cell, ByteBuffer destination, int destinationO } /** + * Return tags in the given Cell as a List * @param cell The Cell * @return Tags in the given Cell as a List */ @@ -945,7 +943,7 @@ public static Optional getTag(Cell cell, byte type) { } /** - * Util method to iterate through the tags in the given cell. + * Utility method to iterate through the tags in the given cell. * @param cell The Cell over which tags iterator is needed. * @return iterator for the tags */ @@ -1028,9 +1026,7 @@ public static boolean overlappingKeys(final byte[] start1, final byte[] end1, fi && (end1.length == 0 || start2.length == 0 || Bytes.compareTo(start2, end1) < 0); } - /** - * Write rowkey excluding the common part. nnnnn - */ + /** Write rowkey excluding the common part. */ public static void writeRowKeyExcludingCommon(Cell cell, short rLen, int commonPrefix, DataOutputStream out) throws IOException { if (commonPrefix == 0) { @@ -1238,8 +1234,9 @@ public static final int compareKeyBasedOnColHint(CellComparator comparator, Cell /** * Compares only the key portion of a cell. It does not include the sequence id/mvcc of the cell - * nn * @return an int greater than 0 if left > than right lesser than 0 if left < than - * right equal to 0 if left is equal to right + * nn + * @return an int greater than 0 if left > than right lesser than 0 if left < than right + * equal to 0 if left is equal to right */ public static final int compareKeyIgnoresMvcc(CellComparator comparator, Cell left, Cell right) { return ((CellComparatorImpl) comparator).compare(left, right, true); @@ -1834,7 +1831,7 @@ public int getQualifierLength() { private static class FirstOnRowColCell extends FirstOnRowCell { // @formatter:off - private static final long FIXED_HEAPSIZE = FirstOnRowCell.FIXED_HEAPSIZE + private static final long FIXED_HEAPSIZE = (long) FirstOnRowCell.FIXED_HEAPSIZE + Bytes.SIZEOF_BYTE // flength + Bytes.SIZEOF_INT * 3 // foffset, qoffset, qlength + ClassSize.REFERENCE * 2; // fArray, qArray @@ -2003,7 +2000,7 @@ public Type getType() { private static class LastOnRowColCell extends LastOnRowCell { // @formatter:off - private static final long FIXED_OVERHEAD = LastOnRowCell.FIXED_OVERHEAD + private static final long FIXED_OVERHEAD = (long) LastOnRowCell.FIXED_OVERHEAD + ClassSize.REFERENCE * 2 // fArray and qArray + Bytes.SIZEOF_INT * 3 // foffset, qoffset, qlength + Bytes.SIZEOF_BYTE; // flength @@ -2345,7 +2342,7 @@ public static void setTimestamp(Cell cell, long ts) throws IOException { } /** - * Sets the given timestamp to the cell. n * @param ts buffer containing the timestamp value + * Sets the given timestamp to the cell. * @throws IOException when the passed cell is not of type {@link ExtendedCell} */ public static void setTimestamp(Cell cell, byte[] ts) throws IOException { @@ -2359,7 +2356,8 @@ public static void setTimestamp(Cell cell, byte[] ts) throws IOException { /** * Sets the given timestamp to the cell iff current timestamp is - * {@link HConstants#LATEST_TIMESTAMP}. nn * @return True if cell timestamp is modified. + * {@link HConstants#LATEST_TIMESTAMP}. + * @return True if cell timestamp is modified. * @throws IOException when the passed cell is not of type {@link ExtendedCell} */ public static boolean updateLatestStamp(Cell cell, long ts) throws IOException { @@ -2372,7 +2370,7 @@ public static boolean updateLatestStamp(Cell cell, long ts) throws IOException { /** * Sets the given timestamp to the cell iff current timestamp is - * {@link HConstants#LATEST_TIMESTAMP}. n * @param ts buffer containing the timestamp value + * {@link HConstants#LATEST_TIMESTAMP}. * @return True if cell timestamp is modified. * @throws IOException when the passed cell is not of type {@link ExtendedCell} */ @@ -2711,8 +2709,8 @@ static final int compareWithoutRow(CellComparator comparator, Cell left, byte[] } /** - * @return An new cell is located following input cell. If both of type and timestamp are minimum, - * the input cell will be returned directly. + * Return a new cell is located following input cell. If both of type and timestamp are minimum, + * the input cell will be returned directly. */ public static Cell createNextOnRowCol(Cell cell) { long ts = cell.getTimestamp(); diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/ScheduledChore.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/ScheduledChore.java index f6b6537f5d43..168fe306e4b5 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/ScheduledChore.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/ScheduledChore.java @@ -79,6 +79,8 @@ public abstract class ScheduledChore implements Runnable { private final Stoppable stopper; /** + * Construct a ScheduledChore + *

* This constructor is for test only. It allows us to create an object and to call chore() on it. */ @InterfaceAudience.Private @@ -87,6 +89,7 @@ protected ScheduledChore() { } /** + * Construct a ScheduledChore * @param name Name assigned to Chore. Useful for identification amongst chores of the same * type * @param stopper When {@link Stoppable#isStopped()} is true, this chore will cancel and cleanup @@ -97,6 +100,7 @@ public ScheduledChore(final String name, Stoppable stopper, final int period) { } /** + * Construct a ScheduledChore * @param name Name assigned to Chore. Useful for identification amongst chores of the * same type * @param stopper When {@link Stoppable#isStopped()} is true, this chore will cancel and @@ -112,6 +116,7 @@ public ScheduledChore(final String name, Stoppable stopper, final int period, } /** + * Construct a ScheduledChore * @param name Name assigned to Chore. Useful for identification amongst chores of the * same type * @param stopper When {@link Stoppable#isStopped()} is true, this chore will cancel and @@ -132,9 +137,6 @@ public ScheduledChore(final String name, Stoppable stopper, final int period, this.timeUnit = unit; } - /** - * @see java.lang.Runnable#run() - */ @Override public void run() { updateTimeTrackingBeforeRun(); @@ -193,8 +195,8 @@ private synchronized void onChoreMissedStartTime() { } /** - * @return How long in millis has it been since this chore last run. Useful for checking if the - * chore has missed its scheduled start time by too large of a margin + * Return how long in millis has it been since this chore last run. Useful for checking if the + * chore has missed its scheduled start time by too large of a margin */ synchronized long getTimeBetweenRuns() { return timeOfThisRun - timeOfLastRun; @@ -212,10 +214,7 @@ private double getMaximumAllowedTimeBetweenRuns() { return 1.5 * timeUnit.toMillis(period); } - /** - * @param time in system millis - * @return true if time is earlier or equal to current milli time - */ + /** Return true if time is earlier or equal to current time */ private synchronized boolean isValidTime(final long time) { return time > 0 && time <= EnvironmentEdgeManager.currentTime(); } @@ -297,9 +296,7 @@ public synchronized void choreForTesting() { chore(); } - /** - * The task to execute on each scheduled execution of the Chore - */ + /** The task to execute on each scheduled execution of the Chore */ protected abstract void chore(); /** diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/ServerName.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/ServerName.java index 926c18dafe25..b359f07571e7 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/ServerName.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/ServerName.java @@ -19,6 +19,7 @@ import java.io.Serializable; import java.util.ArrayList; +import java.util.Iterator; import java.util.List; import java.util.Locale; import java.util.regex.Pattern; @@ -27,6 +28,7 @@ import org.apache.hadoop.hbase.util.Bytes; import org.apache.yetus.audience.InterfaceAudience; +import org.apache.hbase.thirdparty.com.google.common.base.Splitter; import org.apache.hbase.thirdparty.com.google.common.collect.Interner; import org.apache.hbase.thirdparty.com.google.common.collect.Interners; import org.apache.hbase.thirdparty.com.google.common.net.InetAddresses; @@ -80,8 +82,8 @@ public class ServerName implements Comparable, Serializable { */ public static final String UNKNOWN_SERVERNAME = "#unknown#"; - private final String servername; - private final long startcode; + private final String serverName; + private final long startCode; private transient Address address; /** @@ -97,15 +99,15 @@ public class ServerName implements Comparable, Serializable { */ private static final Interner INTERN_POOL = Interners.newWeakInterner(); - protected ServerName(final String hostname, final int port, final long startcode) { - this(Address.fromParts(hostname, port), startcode); + protected ServerName(final String hostname, final int port, final long startCode) { + this(Address.fromParts(hostname, port), startCode); } - private ServerName(final Address address, final long startcode) { + private ServerName(final Address address, final long startCode) { // Use HostAndPort to host port and hostname. Does validation and can do ipv6 this.address = address; - this.startcode = startcode; - this.servername = getServerName(this.address.getHostname(), this.address.getPort(), startcode); + this.startCode = startCode; + this.serverName = getServerName(this.address.getHostname(), this.address.getPort(), startCode); } private ServerName(final String hostAndPort, final long startCode) { @@ -120,20 +122,20 @@ private static String getHostNameMinusDomain(final String hostname) { if (InetAddresses.isInetAddress(hostname)) { return hostname; } - String[] parts = hostname.split("\\."); - if (parts.length == 0) { + List parts = Splitter.on('.').splitToList(hostname); + if (parts.size() == 0) { return hostname; } - - return parts[0]; + Iterator i = parts.iterator(); + return i.next(); } /** * Retrieve an instance of ServerName. Callers should use the equals method to compare returned * instances, though we may return a shared immutable object as an internal optimization. */ - public static ServerName valueOf(final String hostname, final int port, final long startcode) { - return INTERN_POOL.intern(new ServerName(hostname, port, startcode)); + public static ServerName valueOf(final String hostname, final int port, final long startCode) { + return INTERN_POOL.intern(new ServerName(hostname, port, startCode)); } /** @@ -142,7 +144,9 @@ public static ServerName valueOf(final String hostname, final int port, final lo */ public static ServerName valueOf(final String serverName) { final String hostname = serverName.substring(0, serverName.indexOf(SERVERNAME_SEPARATOR)); - final int port = Integer.parseInt(serverName.split(SERVERNAME_SEPARATOR)[1]); + Iterator i = Splitter.onPattern(SERVERNAME_SEPARATOR).split(serverName).iterator(); + i.next(); // Skip first element + final int port = Integer.parseInt(i.next()); final long statuscode = Long.parseLong(serverName.substring(serverName.lastIndexOf(SERVERNAME_SEPARATOR) + 1)); return INTERN_POOL.intern(new ServerName(hostname, port, statuscode)); @@ -161,12 +165,12 @@ public static ServerName valueOf(final String hostAndPort, final long startCode) * method to compare returned instances, though we may return a shared immutable object as an * internal optimization. * @param address the {@link Address} to use for getting the {@link ServerName} - * @param startcode the startcode to use for getting the {@link ServerName} + * @param startCode the startcode to use for getting the {@link ServerName} * @return the constructed {@link ServerName} * @see #valueOf(String, int, long) */ - public static ServerName valueOf(final Address address, final long startcode) { - return valueOf(address.getHostname(), address.getPort(), startcode); + public static ServerName valueOf(final Address address, final long startCode) { + return valueOf(address.getHostname(), address.getPort(), startCode); } @Override @@ -175,10 +179,9 @@ public String toString() { } /** - * @return Return a SHORT version of {@link #toString()}, one that has the host only, minus the - * domain, and the port only -- no start code; the String is for us internally mostly - * tying threads to their server. Not for external use. It is lossy and will not work in - * in compares, etc. + * Return a SHORT version of {@link #toString()}, one that has the host only, minus the domain, + * and the port only -- no start code; the String is for us internally mostly tying threads to + * their server. Not for external use. It is lossy and will not work in in compares, etc. */ public String toShortString() { return Addressing.createHostAndPortStr(getHostNameMinusDomain(this.address.getHostname()), @@ -186,8 +189,8 @@ public String toShortString() { } /** - * @return {@link #getServerName()} as bytes with a short-sized prefix with the {@link #VERSION} - * of this class. + * Return {@link #getServerName()} as bytes with a short-sized prefix with the {@link #VERSION} of + * this class. */ public synchronized byte[] getVersionedBytes() { if (this.bytes == null) { @@ -197,7 +200,7 @@ public synchronized byte[] getVersionedBytes() { } public String getServerName() { - return servername; + return serverName; } public String getHostname() { @@ -212,21 +215,31 @@ public int getPort() { return this.address.getPort(); } + /** + * Return the start code. + * @deprecated Use {@link #getStartCode()} instead. + */ + @Deprecated public long getStartcode() { - return startcode; + return startCode; + } + + /** Return the start code. */ + public long getStartCode() { + return startCode; } /** * For internal use only. * @param hostName the name of the host to use * @param port the port on the host to use - * @param startcode the startcode to use for formatting + * @param startCode the startcode to use for formatting * @return Server name made of the concatenation of hostname, port and startcode formatted as * <hostname> ',' <port> ',' <startcode> */ - private static String getServerName(String hostName, int port, long startcode) { + private static String getServerName(String hostName, int port, long startCode) { return hostName.toLowerCase(Locale.ROOT) + SERVERNAME_SEPARATOR + port + SERVERNAME_SEPARATOR - + startcode; + + startCode; } public Address getAddress() { @@ -256,7 +269,7 @@ public int compareTo(ServerName other) { if (compare != 0) { return compare; } - return Long.compare(this.getStartcode(), other.getStartcode()); + return Long.compare(this.getStartCode(), other.getStartCode()); } @Override @@ -279,6 +292,7 @@ public boolean equals(Object o) { } /** + * Compare two addresses * @param left the first server address to compare * @param right the second server address to compare * @return {@code true} if {@code left} and {@code right} have the same hostname and port. @@ -308,6 +322,7 @@ public static ServerName parseVersionedServerName(final byte[] versionedBytes) { } /** + * Parse a ServerName from a string * @param str Either an instance of {@link #toString()} or a "'<hostname>' ':' * '<port>'". * @return A ServerName instance. diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/TableName.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/TableName.java index 075bc5d1686a..83303a1c476c 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/TableName.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/TableName.java @@ -274,9 +274,6 @@ public String toString() { return nameAsString; } - /** - * @throws IllegalArgumentException See {@link #valueOf(byte[])} - */ private TableName(ByteBuffer namespace, ByteBuffer qualifier) throws IllegalArgumentException { this.qualifier = new byte[qualifier.remaining()]; qualifier.duplicate().get(this.qualifier); @@ -320,9 +317,7 @@ private TableName(ByteBuffer namespace, ByteBuffer qualifier) throws IllegalArgu isLegalTableQualifierName(this.qualifier); } - /** - * This is only for the old and meta tables. - */ + /** This is only for the old and meta tables. */ private TableName(String qualifier) { this.qualifier = Bytes.toBytes(qualifier); this.qualifierAsString = qualifier; @@ -394,6 +389,7 @@ public static TableName valueOf(String namespaceAsString, String qualifierAsStri } /** + * Construct a TableName * @param fullName will use the entire byte array * @throws IllegalArgumentException if fullName equals old root or old meta. Some code depends on * this. The test is buried in the table creation to save on @@ -405,6 +401,7 @@ public static TableName valueOf(byte[] fullName) throws IllegalArgumentException } /** + * Construct a TableName * @param fullName byte array to look into * @param offset within said array * @param length within said array @@ -437,6 +434,7 @@ public static TableName valueOf(byte[] fullName, int offset, int length) } /** + * Construct a TableName * @param fullname of a table, possibly with a leading namespace and ':' as delimiter. * @throws IllegalArgumentException if fullName equals old root or old meta. */ @@ -462,6 +460,7 @@ public static TableName valueOf(ByteBuffer fullname) { } /** + * Construct a TableName * @throws IllegalArgumentException if fullName equals old root or old meta. Some code depends on * this. */ @@ -526,12 +525,12 @@ public int hashCode() { return hashCode; } - /** - * For performance reasons, the ordering is not lexicographic. - */ @Override public int compareTo(TableName tableName) { - if (this == tableName) return 0; + // For performance reasons, the ordering is not lexicographic. + if (this == tableName) { + return 0; + } if (this.hashCode < tableName.hashCode()) { return -1; } diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/Tag.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/Tag.java index fd0aa43190be..8d03e35f8a4b 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/Tag.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/Tag.java @@ -55,15 +55,13 @@ public interface Tag { /** Returns Length of tag value within the backed buffer */ int getValueLength(); - /** - * Tells whether or not this Tag is backed by a byte array. - * @return true when this Tag is backed by byte array - */ + /** Return true if the tag is backed by a byte array */ boolean hasArray(); /** - * @return The array containing the value bytes. n * when {@link #hasArray()} return false. Use - * {@link #getValueByteBuffer()} in such situation + * Return an array containing the value bytes if {@link #hasArray()} returns true. + *

+ * Use {@link #getValueByteBuffer()} otherwise. */ byte[] getValueArray(); diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/TagUtil.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/TagUtil.java index c3c49410210c..6d911bccd58a 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/TagUtil.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/TagUtil.java @@ -70,9 +70,7 @@ public static List carryForwardTags(final Cell cell) { return carryForwardTags(null, cell); } - /** - * Add to tagsOrNull any Tags cell is carrying or null if none. - */ + /** Add to tagsOrNull any Tags cell is carrying or null if none. */ public static List carryForwardTags(final List tagsOrNull, final Cell cell) { Iterator itr = PrivateCellUtil.tagsIterator(cell); if (itr == EMPTY_TAGS_ITR) { @@ -166,9 +164,7 @@ public static byte[] fromList(List tags) { return b; } - /** - * Iterator returned when no Tags. Used by CellUtil too. - */ + /** Iterator returned when no Tags. Used by CellUtil too. */ static final Iterator EMPTY_TAGS_ITR = new Iterator() { @Override public boolean hasNext() { diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/filter/ByteArrayComparable.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/filter/ByteArrayComparable.java index 480680b00f3d..f6c556c9b3c5 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/filter/ByteArrayComparable.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/filter/ByteArrayComparable.java @@ -50,10 +50,12 @@ public byte[] getValue() { public abstract byte[] toByteArray(); /** + * Parse a serialized representation of {@link ByteArrayComparable} * @param pbBytes A pb serialized {@link ByteArrayComparable} instance - * @return An instance of {@link ByteArrayComparable} made from bytes n * @see - * #toByteArray + * @return An instance of {@link ByteArrayComparable} made from bytes + * @see #toByteArray */ + @SuppressWarnings("DoNotCallSuggester") public static ByteArrayComparable parseFrom(final byte[] pbBytes) throws DeserializationException { throw new DeserializationException( @@ -61,12 +63,13 @@ public static ByteArrayComparable parseFrom(final byte[] pbBytes) } /** - * n * @return true if and only if the fields of the comparator that are serialized are equal to - * the corresponding fields in other. Used for testing. + * Return true if and only if the fields of the comparator that are serialized are equal to the + * corresponding fields in other. */ boolean areSerializedFieldsEqual(ByteArrayComparable other) { - if (other == this) return true; - + if (other == this) { + return true; + } return Bytes.equals(this.getValue(), other.getValue()); } diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/HeapSize.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/HeapSize.java index 5e0fb79d841c..3a4a9e2d230a 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/HeapSize.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/HeapSize.java @@ -41,8 +41,8 @@ @InterfaceAudience.Private public interface HeapSize { /** - * @return Approximate 'exclusive deep size' of implementing object. Includes count of payload and - * hosting object sizings. + * Return the approximate 'exclusive deep size' of implementing object. Includes count of payload + * and hosting object sizings. */ long heapSize(); } diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/ImmutableBytesWritable.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/ImmutableBytesWritable.java index 22b6cc086529..593802bf3b68 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/ImmutableBytesWritable.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/ImmutableBytesWritable.java @@ -91,16 +91,12 @@ public byte[] get() { return this.bytes; } - /** - * @param b Use passed bytes as backing array for this instance. - */ + /** Use passed bytes as backing array for this instance. */ public void set(final byte[] b) { set(b, 0, b.length); } - /** - * @param b Use passed bytes as backing array for this instance. nn - */ + /** Use passed bytes as backing array for this instance. */ public void set(final byte[] b, final int offset, final int length) { this.bytes = b; this.offset = offset; @@ -116,9 +112,7 @@ public int getLength() { return this.length; } - /** - * n - */ + /** Return the offset into the buffer. */ public int getOffset() { return this.offset; } @@ -138,6 +132,7 @@ public void write(final DataOutput out) throws IOException { } // Below methods copied from BytesWritable + @Override public int hashCode() { int hash = 1; @@ -167,9 +162,6 @@ public int compareTo(final byte[] that) { that.length); } - /** - * @see java.lang.Object#equals(java.lang.Object) - */ @Override public boolean equals(Object right_obj) { if (right_obj instanceof byte[]) { @@ -181,9 +173,6 @@ public boolean equals(Object right_obj) { return false; } - /** - * @see java.lang.Object#toString() - */ @Override public String toString() { StringBuilder sb = new StringBuilder(3 * this.length); @@ -200,21 +189,15 @@ public String toString() { return sb.length() > 0 ? sb.substring(1) : ""; } - /** - * A Comparator optimized for ImmutableBytesWritable. - */ + /** A Comparator optimized for ImmutableBytesWritable. */ @InterfaceAudience.Public public static class Comparator extends WritableComparator { private BytesWritable.Comparator comparator = new BytesWritable.Comparator(); - /** constructor */ public Comparator() { super(ImmutableBytesWritable.class); } - /** - * @see org.apache.hadoop.io.WritableComparator#compare(byte[], int, int, byte[], int, int) - */ @Override public int compare(byte[] b1, int s1, int l1, byte[] b2, int s2, int l2) { return comparator.compare(b1, s1, l1, b2, s2, l2); @@ -226,6 +209,7 @@ public int compare(byte[] b1, int s1, int l1, byte[] b2, int s2, int l2) { } /** + * Convert a list of byte arrays into an array of byte arrays * @param array List of byte []. * @return Array of byte []. */ @@ -238,9 +222,7 @@ public static byte[][] toArray(final List array) { return results; } - /** - * Returns a copy of the bytes referred to by this writable - */ + /** Returns a copy of the bytes referred to by this writable */ public byte[] copyBytes() { return Arrays.copyOfRange(bytes, offset, offset + length); } diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/TagCompressionContext.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/TagCompressionContext.java index b3c699c00702..af9126d942d2 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/TagCompressionContext.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/TagCompressionContext.java @@ -81,7 +81,8 @@ public void compressTags(OutputStream out, byte[] in, int offset, int length) th public void compressTags(OutputStream out, ByteBuffer in, int offset, int length) throws IOException { if (in.hasArray()) { - compressTags(out, in.array(), offset, length); + // Offset we are given is relative to ByteBuffer#arrayOffset + compressTags(out, in.array(), in.arrayOffset() + offset, length); } else { int pos = offset; int endOffset = pos + length; diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/compress/ReusableStreamGzipCodec.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/compress/ReusableStreamGzipCodec.java index f858f3a773fc..0b3b3afbfc58 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/compress/ReusableStreamGzipCodec.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/compress/ReusableStreamGzipCodec.java @@ -100,7 +100,7 @@ public void finish() throws IOException { def.finish(); while (!def.finished()) { int i = def.deflate(this.buf, 0, this.buf.length); - if ((def.finished()) && (i <= this.buf.length - TRAILER_SIZE)) { + if (def.finished() && (i <= this.buf.length - TRAILER_SIZE)) { writeTrailer(this.buf, i); i += TRAILER_SIZE; out.write(this.buf, 0, i); diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.java index 50a79f93130b..4908b927bfe8 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.java @@ -29,7 +29,6 @@ import org.apache.hadoop.hbase.ExtendedCell; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.KeyValue; -import org.apache.hadoop.hbase.KeyValue.Type; import org.apache.hadoop.hbase.KeyValueUtil; import org.apache.hadoop.hbase.PrivateCellUtil; import org.apache.hadoop.hbase.io.TagCompressionContext; @@ -279,14 +278,14 @@ private Cell toOffheapCell(ByteBuffer valAndTagsBuffer, int vOffset, protected static class OnheapDecodedCell implements ExtendedCell { private static final long FIXED_OVERHEAD = ClassSize.align(ClassSize.OBJECT + (3 * ClassSize.REFERENCE) + (2 * Bytes.SIZEOF_LONG) + (7 * Bytes.SIZEOF_INT) - + (Bytes.SIZEOF_SHORT) + (2 * Bytes.SIZEOF_BYTE) + (3 * ClassSize.ARRAY)); + + Bytes.SIZEOF_SHORT + (2 * Bytes.SIZEOF_BYTE) + (3 * ClassSize.ARRAY)); private byte[] keyOnlyBuffer; private short rowLength; private int familyOffset; private byte familyLength; private int qualifierOffset; private int qualifierLength; - private long timestamp; + private long timeStamp; private byte typeByte; private byte[] valueBuffer; private int valueOffset; @@ -306,7 +305,7 @@ protected OnheapDecodedCell(byte[] keyBuffer, short rowLength, int familyOffset, this.familyLength = familyLength; this.qualifierOffset = qualOffset; this.qualifierLength = qualLength; - this.timestamp = timeStamp; + this.timeStamp = timeStamp; this.typeByte = typeByte; this.valueBuffer = valueBuffer; this.valueOffset = valueOffset; @@ -364,7 +363,7 @@ public int getQualifierLength() { @Override public long getTimestamp() { - return timestamp; + return timeStamp; } @Override @@ -475,16 +474,17 @@ public ExtendedCell deepClone() { } protected static class OffheapDecodedExtendedCell extends ByteBufferExtendedCell { - private static final long FIXED_OVERHEAD = ClassSize.align(ClassSize.OBJECT - + (3 * ClassSize.REFERENCE) + (2 * Bytes.SIZEOF_LONG) + (7 * Bytes.SIZEOF_INT) - + (Bytes.SIZEOF_SHORT) + (2 * Bytes.SIZEOF_BYTE) + (3 * ClassSize.BYTE_BUFFER)); + private static final long FIXED_OVERHEAD = + (long) ClassSize.align(ClassSize.OBJECT + (3 * ClassSize.REFERENCE) + (2 * Bytes.SIZEOF_LONG) + + (7 * Bytes.SIZEOF_INT) + Bytes.SIZEOF_SHORT) + (2 * Bytes.SIZEOF_BYTE) + + (3 * ClassSize.BYTE_BUFFER); private ByteBuffer keyBuffer; private short rowLength; private int familyOffset; private byte familyLength; private int qualifierOffset; private int qualifierLength; - private long timestamp; + private long timeStamp; private byte typeByte; private ByteBuffer valueBuffer; private int valueOffset; @@ -507,7 +507,7 @@ protected OffheapDecodedExtendedCell(ByteBuffer keyBuffer, short rowLength, int this.familyLength = familyLength; this.qualifierOffset = qualOffset; this.qualifierLength = qualLength; - this.timestamp = timeStamp; + this.timeStamp = timeStamp; this.typeByte = typeByte; this.valueBuffer = valueBuffer; this.valueOffset = valueOffset; @@ -519,6 +519,7 @@ protected OffheapDecodedExtendedCell(ByteBuffer keyBuffer, short rowLength, int } @Override + @SuppressWarnings("ByteBufferBackingArray") public byte[] getRowArray() { return this.keyBuffer.array(); } @@ -534,6 +535,7 @@ public short getRowLength() { } @Override + @SuppressWarnings("ByteBufferBackingArray") public byte[] getFamilyArray() { return this.keyBuffer.array(); } @@ -549,6 +551,7 @@ public byte getFamilyLength() { } @Override + @SuppressWarnings("ByteBufferBackingArray") public byte[] getQualifierArray() { return this.keyBuffer.array(); } @@ -565,7 +568,7 @@ public int getQualifierLength() { @Override public long getTimestamp() { - return this.timestamp; + return this.timeStamp; } @Override @@ -671,10 +674,10 @@ public void setSequenceId(long seqId) { @Override public int write(OutputStream out, boolean withTags) throws IOException { int lenToWrite = getSerializedSize(withTags); - ByteBufferUtils.putInt(out, keyBuffer.capacity()); + ByteBufferUtils.putInt(out, keyBuffer.remaining()); ByteBufferUtils.putInt(out, valueLength); // Write key - out.write(keyBuffer.array()); + out.write(keyBuffer.array(), keyBuffer.arrayOffset(), keyBuffer.remaining()); // Write value ByteBufferUtils.copyBufferToStream(out, this.valueBuffer, this.valueOffset, this.valueLength); if (withTags && this.tagsLength > 0) { @@ -928,14 +931,14 @@ public int seekToKeyInBlock(Cell seekCell, boolean seekBefore) { private int compareTypeBytes(Cell key, Cell right) { if ( key.getFamilyLength() + key.getQualifierLength() == 0 - && key.getTypeByte() == Type.Minimum.getCode() + && key.getTypeByte() == KeyValue.Type.Minimum.getCode() ) { // left is "bigger", i.e. it appears later in the sorted order return 1; } if ( right.getFamilyLength() + right.getQualifierLength() == 0 - && right.getTypeByte() == Type.Minimum.getCode() + && right.getTypeByte() == KeyValue.Type.Minimum.getCode() ) { return -1; } diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/DataBlockEncoding.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/DataBlockEncoding.java index 71808adf75d3..21f6c92ef358 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/DataBlockEncoding.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/DataBlockEncoding.java @@ -28,6 +28,7 @@ * assign it a new id. Announce the new id in the HBase mailing list to prevent collisions. */ @InterfaceAudience.Public +@SuppressWarnings("ImmutableEnumChecker") public enum DataBlockEncoding { /** Disable data block encoding. */ @@ -100,6 +101,8 @@ public void writeIdInBytes(OutputStream stream) throws IOException { * @param dest output array * @param offset starting offset of the output array n */ + // System.arraycopy is static native. Nothing we can do this until we have minimum JDK 9. + @SuppressWarnings("UnsafeFinalization") public void writeIdInBytes(byte[] dest, int offset) throws IOException { System.arraycopy(idInBytes, 0, dest, offset, ID_SIZE); } @@ -159,10 +162,10 @@ public static DataBlockEncoding getEncodingById(short dataBlockEncodingId) { return algorithm; } - protected static DataBlockEncoder createEncoder(String fullyQualifiedClassName) { + static DataBlockEncoder createEncoder(String fullyQualifiedClassName) { try { - return (DataBlockEncoder) Class.forName(fullyQualifiedClassName).getDeclaredConstructor() - .newInstance(); + return Class.forName(fullyQualifiedClassName).asSubclass(DataBlockEncoder.class) + .getDeclaredConstructor().newInstance(); } catch (Exception e) { throw new RuntimeException(e); } diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/DiffKeyDeltaEncoder.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/DiffKeyDeltaEncoder.java index 77324b07c2f7..e865d0b12523 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/DiffKeyDeltaEncoder.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/DiffKeyDeltaEncoder.java @@ -304,7 +304,7 @@ public Cell getFirstKeyCellInBlock(ByteBuff block) { ByteBuffer result = ByteBuffer.allocate(keyLength); // copy row - assert !(result.isDirect()); + assert !result.isDirect(); int pos = result.arrayOffset(); block.get(result.array(), pos, Bytes.SIZEOF_SHORT); pos += Bytes.SIZEOF_SHORT; diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/EncodedDataBlock.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/EncodedDataBlock.java index daef027a5eae..3948aee35aed 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/EncodedDataBlock.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/EncodedDataBlock.java @@ -143,8 +143,9 @@ public Cell next() { ByteBufferUtils.skip(decompressedData, tagsLen); } } - KeyValue kv = new KeyValue(decompressedData.array(), offset, - (int) KeyValue.getKeyValueDataStructureSize(klen, vlen, tagsLen)); + KeyValue kv = + new KeyValue(decompressedData.array(), decompressedData.arrayOffset() + offset, + (int) KeyValue.getKeyValueDataStructureSize(klen, vlen, tagsLen)); if (meta.isIncludesMvcc()) { long mvccVersion = ByteBufferUtils.readVLong(decompressedData); kv.setSequenceId(mvccVersion); @@ -271,7 +272,7 @@ public byte[] encodeData() { if (this.meta.isIncludesMvcc()) { memstoreTS = ByteBufferUtils.readVLong(in); } - kv = new KeyValue(in.array(), kvOffset, + kv = new KeyValue(in.array(), in.arrayOffset() + kvOffset, (int) KeyValue.getKeyValueDataStructureSize(klength, vlength, tagsLength)); kv.setSequenceId(memstoreTS); this.dataBlockEncoder.encode(kv, encodingCtx, out); diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/HFileBlockEncodingContext.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/HFileBlockEncodingContext.java index fcac1549b9f3..71cba0b9d8cf 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/HFileBlockEncodingContext.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/HFileBlockEncodingContext.java @@ -43,23 +43,20 @@ public interface HFileBlockEncodingContext { */ void postEncoding(BlockType blockType) throws IOException; - /** - * Releases the resources used. - */ + /** Releases the resources used. */ void close(); /** Returns HFile context information */ HFileContext getHFileContext(); - /** - * Sets the encoding state. - */ + /** Sets the encoding state. */ void setEncodingState(EncodingState state); /** Returns the encoding state */ EncodingState getEncodingState(); /** + * Compress and encrypt the supplied encoded block data with header. * @param data encoded bytes with header * @param offset the offset in encoded data to start at * @param length the number of encoded bytes diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/RowIndexSeekerV1.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/RowIndexSeekerV1.java index 453f55b6438f..e283803a143b 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/RowIndexSeekerV1.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/RowIndexSeekerV1.java @@ -80,12 +80,13 @@ public void setCurrentBuffer(ByteBuff buffer) { } @Override + @SuppressWarnings("ByteBufferBackingArray") public Cell getKey() { if (current.keyBuffer.hasArray()) { return new KeyValue.KeyOnlyKeyValue(current.keyBuffer.array(), current.keyBuffer.arrayOffset() + current.keyBuffer.position(), current.keyLength); } else { - byte[] key = new byte[current.keyLength]; + final byte[] key = new byte[current.keyLength]; ByteBufferUtils.copyFromBufferToArray(key, current.keyBuffer, current.keyBuffer.position(), 0, current.keyLength); return new KeyValue.KeyOnlyKeyValue(key, 0, current.keyLength); diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/hfile/BlockType.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/hfile/BlockType.java index 3bd98e6388c9..d2605711a805 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/hfile/BlockType.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/hfile/BlockType.java @@ -31,6 +31,7 @@ * The values in the enum appear in the order they appear in a version 2 HFile. */ @InterfaceAudience.Private +@SuppressWarnings("ImmutableEnumChecker") public enum BlockType { // Scanned block section @@ -179,6 +180,8 @@ public static BlockType read(ByteBuff buf) throws IOException { * @param offset position in the array * @return incremented offset */ + // System.arraycopy is static native. We can't do anything about this until minimum JDK is 9. + @SuppressWarnings("UnsafeFinalization") public int put(byte[] bytes, int offset) { System.arraycopy(magic, 0, bytes, offset, MAGIC_LENGTH); return offset + MAGIC_LENGTH; diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileContext.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileContext.java index 5064c4a4b8f2..d152ec1900b2 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileContext.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileContext.java @@ -56,7 +56,7 @@ public class HFileContext implements HeapSize, Cloneable { /** the number of bytes per checksum value **/ private int bytesPerChecksum = DEFAULT_BYTES_PER_CHECKSUM; /** Number of uncompressed bytes we allow per block. */ - private int blocksize = HConstants.DEFAULT_BLOCKSIZE; + private int blockSize = HConstants.DEFAULT_BLOCKSIZE; private DataBlockEncoding encoding = DataBlockEncoding.NONE; /** Encryption algorithm and key used */ private Encryption.Context cryptoContext = Encryption.Context.NONE; @@ -81,7 +81,7 @@ public HFileContext(HFileContext context) { this.compressTags = context.compressTags; this.checksumType = context.checksumType; this.bytesPerChecksum = context.bytesPerChecksum; - this.blocksize = context.blocksize; + this.blockSize = context.blockSize; this.encoding = context.encoding; this.cryptoContext = context.cryptoContext; this.fileCreateTime = context.fileCreateTime; @@ -103,7 +103,7 @@ public HFileContext(HFileContext context) { this.compressTags = compressTags; this.checksumType = checksumType; this.bytesPerChecksum = bytesPerChecksum; - this.blocksize = blockSize; + this.blockSize = blockSize; if (encoding != null) { this.encoding = encoding; } @@ -175,7 +175,7 @@ public int getBytesPerChecksum() { } public int getBlocksize() { - return blocksize; + return blockSize; } public long getFileCreateTime() { @@ -233,7 +233,7 @@ public long heapSize() { @Override public HFileContext clone() { try { - return (HFileContext) (super.clone()); + return (HFileContext) super.clone(); } catch (CloneNotSupportedException e) { throw new AssertionError(); // Won't happen } @@ -250,7 +250,7 @@ public String toString() { sb.append(", bytesPerChecksum="); sb.append(bytesPerChecksum); sb.append(", blocksize="); - sb.append(blocksize); + sb.append(blockSize); sb.append(", encoding="); sb.append(encoding); sb.append(", includesMvcc="); diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileContextBuilder.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileContextBuilder.java index 5591f0553b50..97aea8ac0eb1 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileContextBuilder.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileContextBuilder.java @@ -44,11 +44,11 @@ public class HFileContextBuilder { /** Whether tags to be compressed or not **/ private boolean compressTags = false; /** the checksum type **/ - private ChecksumType checksumType = ChecksumType.getDefaultChecksumType(); + private ChecksumType checkSumType = ChecksumType.getDefaultChecksumType(); /** the number of bytes per checksum value **/ private int bytesPerChecksum = DEFAULT_BYTES_PER_CHECKSUM; /** Number of uncompressed bytes we allow per block. */ - private int blocksize = HConstants.DEFAULT_BLOCKSIZE; + private int blockSize = HConstants.DEFAULT_BLOCKSIZE; private DataBlockEncoding encoding = DataBlockEncoding.NONE; /** Crypto context */ private Encryption.Context cryptoContext = Encryption.Context.NONE; @@ -71,9 +71,9 @@ public HFileContextBuilder(final HFileContext hfc) { this.includesTags = hfc.isIncludesTags(); this.compression = hfc.getCompression(); this.compressTags = hfc.isCompressTags(); - this.checksumType = hfc.getChecksumType(); + this.checkSumType = hfc.getChecksumType(); this.bytesPerChecksum = hfc.getBytesPerChecksum(); - this.blocksize = hfc.getBlocksize(); + this.blockSize = hfc.getBlocksize(); this.encoding = hfc.getDataBlockEncoding(); this.cryptoContext = hfc.getEncryptionContext(); this.fileCreateTime = hfc.getFileCreateTime(); @@ -109,7 +109,7 @@ public HFileContextBuilder withCompressTags(boolean compressTags) { } public HFileContextBuilder withChecksumType(ChecksumType checkSumType) { - this.checksumType = checkSumType; + this.checkSumType = checkSumType; return this; } @@ -119,7 +119,7 @@ public HFileContextBuilder withBytesPerCheckSum(int bytesPerChecksum) { } public HFileContextBuilder withBlockSize(int blockSize) { - this.blocksize = blockSize; + this.blockSize = blockSize; return this; } @@ -160,7 +160,7 @@ public HFileContextBuilder withCellComparator(CellComparator cellComparator) { public HFileContext build() { return new HFileContext(usesHBaseChecksum, includesMvcc, includesTags, compression, - compressTags, checksumType, bytesPerChecksum, blocksize, encoding, cryptoContext, + compressTags, checkSumType, bytesPerChecksum, blockSize, encoding, cryptoContext, fileCreateTime, hfileName, columnFamily, tableName, cellComparator); } } diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/net/Address.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/net/Address.java index 61b445fcefe0..ef3520b31c78 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/net/Address.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/net/Address.java @@ -18,9 +18,12 @@ package org.apache.hadoop.hbase.net; import java.net.InetSocketAddress; +import java.util.Iterator; +import java.util.List; import org.apache.commons.lang3.StringUtils; import org.apache.yetus.audience.InterfaceAudience; +import org.apache.hbase.thirdparty.com.google.common.base.Splitter; import org.apache.hbase.thirdparty.com.google.common.net.HostAndPort; /** @@ -95,11 +98,14 @@ public String toString() { */ public String toStringWithoutDomain() { String hostname = getHostName(); - String[] parts = hostname.split("\\."); - if (parts.length > 1) { - for (String part : parts) { + List parts = Splitter.on('.').splitToList(hostname); + if (parts.size() > 1) { + Iterator i = parts.iterator(); + String base = i.next(); + while (i.hasNext()) { + String part = i.next(); if (!StringUtils.isNumeric(part)) { - return Address.fromParts(parts[0], getPort()).toString(); + return Address.fromParts(base, getPort()).toString(); } } } diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/nio/ByteBuff.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/nio/ByteBuff.java index 2285917364f0..27eca9479d6a 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/nio/ByteBuff.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/nio/ByteBuff.java @@ -69,6 +69,7 @@ protected void checkRefCount() { ObjectUtil.checkPositive(refCnt(), REFERENCE_COUNT_NAME); } + @Override public int refCnt() { return refCnt.refCnt(); } @@ -103,27 +104,16 @@ public boolean release() { /** Returns the total capacity of this ByteBuff. */ public abstract int capacity(); - /** - * Returns the limit of this ByteBuff - * @return limit of the ByteBuff - */ + /** Returns the limit of this ByteBuff */ public abstract int limit(); - /** - * Marks the limit of this ByteBuff. n * @return This ByteBuff - */ + /** Marks the limit of this ByteBuff */ public abstract ByteBuff limit(int limit); - /** - * Rewinds this ByteBuff and the position is set to 0 - * @return this object - */ + /** Rewinds this ByteBuff and the position is set to 0 */ public abstract ByteBuff rewind(); - /** - * Marks the current position of the ByteBuff - * @return this object - */ + /** Marks the current position of the ByteBuff */ public abstract ByteBuff mark(); /** @@ -156,16 +146,10 @@ public boolean release() { */ public abstract void asSubByteBuffer(int offset, int length, ObjectIntPair pair); - /** - * Returns the number of elements between the current position and the limit. - * @return the remaining elements in this ByteBuff - */ + /** Returns the number of elements between the current position and the limit. */ public abstract int remaining(); - /** - * Returns true if there are elements between the current position and the limt - * @return true if there are elements, false otherwise - */ + /** Returns true if there are elements between the current position and the limit. */ public abstract boolean hasRemaining(); /** @@ -201,31 +185,34 @@ public boolean release() { /** * Fetches the byte at the given index. Does not change position of the underlying ByteBuffers n - * * @return the byte at the given index + * @return the byte at the given index */ public abstract byte get(int index); /** * Fetches the byte at the given offset from current position. Does not change position of the - * underlying ByteBuffers. n * @return the byte value at the given index. + * underlying ByteBuffers. + * @return the byte value at the given index. */ public abstract byte getByteAfterPosition(int offset); /** - * Writes a byte to this ByteBuff at the current position and increments the position n * @return - * this object + * Writes a byte to this ByteBuff at the current position and increments the position + * @return this object */ public abstract ByteBuff put(byte b); /** - * Writes a byte to this ByteBuff at the given index nn * @return this object + * Writes a byte to this ByteBuff at the given index + * @return this object */ public abstract ByteBuff put(int index, byte b); /** * Copies the specified number of bytes from this ByteBuff's current position to the byte[]'s - * offset. Also advances the position of the ByteBuff by the given length. n * @param offset - * within the current array + * offset. Also advances the position of the ByteBuff by the given length. + * @param dst the byte[] to which the ByteBuff's content is to be copied + * @param offset within the current array * @param length upto which the bytes to be copied */ public abstract void get(byte[] dst, int offset, int length); @@ -242,13 +229,15 @@ public boolean release() { /** * Copies the content from this ByteBuff's current position to the byte array and fills it. Also - * advances the position of the ByteBuff by the length of the byte[]. n + * advances the position of the ByteBuff by the length of the byte[]. + * @param dst the byte[] to which the ByteBuff's content is to be copied */ public abstract void get(byte[] dst); /** - * Copies from the given byte[] to this ByteBuff n * @param offset the position in the byte array - * from which the copy should be done + * Copies from the given byte[] to this ByteBuff + * @param src source byte array + * @param offset the position in the byte array from which the copy should be done * @param length the length upto which the copy should happen * @return this ByteBuff */ @@ -256,6 +245,8 @@ public boolean release() { /** * Copies from the given byte[] to this ByteBuff n * @return this ByteBuff + * @param src source byte array + * @return this ByteBuff */ public abstract ByteBuff put(byte[] src); @@ -270,8 +261,7 @@ public boolean release() { /** * Returns the short value at the current position. Also advances the position by the size of - * short - * @return the short value at the current position + * short. */ public abstract short getShort(); @@ -291,43 +281,37 @@ public boolean release() { public abstract short getShortAfterPosition(int offset); /** - * Returns the int value at the current position. Also advances the position by the size of int - * @return the int value at the current position + * Returns the int value at the current position. Also advances the position by the size of int. */ public abstract int getInt(); /** * Writes an int to this ByteBuff at its current position. Also advances the position by size of - * int - * @param value Int value to write - * @return this object + * int. */ public abstract ByteBuff putInt(int value); /** * Fetches the int at the given index. Does not change position of the underlying ByteBuffers. * Even if the current int does not fit in the current item we can safely move to the next item - * and fetch the remaining bytes forming the int n * @return the int value at the given index + * and fetch the remaining bytes forming the int. */ public abstract int getInt(int index); /** * Fetches the int value at the given offset from current position. Does not change position of - * the underlying ByteBuffers. n * @return the int value at the given index. + * the underlying ByteBuffers. */ public abstract int getIntAfterPosition(int offset); /** - * Returns the long value at the current position. Also advances the position by the size of long - * @return the long value at the current position + * Returns the long value at the current position. Also advances the position by the size of long. */ public abstract long getLong(); /** * Writes a long to this ByteBuff at its current position. Also advances the position by size of - * long - * @param value Long value to write - * @return this object + * long. */ public abstract ByteBuff putLong(long value); @@ -347,16 +331,13 @@ public boolean release() { /** * Copy the content from this ByteBuff to a byte[]. - * @return byte[] with the copied contents from this ByteBuff. */ public byte[] toBytes() { return toBytes(0, this.limit()); } /** - * Copy the content from this ByteBuff to a byte[] based on the given offset and length n * the - * position from where the copy should start n * the length upto which the copy has to be done - * @return byte[] with the copied contents from this ByteBuff. + * Copy the content from this ByteBuff to a byte[] based on the given offset and length. */ public abstract byte[] toBytes(int offset, int length); @@ -379,25 +360,16 @@ public byte[] toBytes() { */ public abstract ByteBuff put(int offset, ByteBuff src, int srcOffset, int length); - /** - * Reads bytes from the given channel into this ByteBuff n * @return The number of bytes read from - * the channel n - */ + /** Reads bytes from the given channel into this ByteBuf. */ public abstract int read(ReadableByteChannel channel) throws IOException; - /** - * Reads bytes from FileChannel into this ByteBuff - */ + /** Reads bytes from FileChannel into this ByteBuff */ public abstract int read(FileChannel channel, long offset) throws IOException; - /** - * Write this ByteBuff's data into target file - */ + /** Write this ByteBuff's data into target file */ public abstract int write(FileChannel channel, long offset) throws IOException; - /** - * function interface for Channel read - */ + /** Functional interface for Channel read */ @FunctionalInterface interface ChannelReader { int read(ReadableByteChannel channel, ByteBuffer buf, long offset) throws IOException; @@ -438,10 +410,7 @@ public static int read(ReadableByteChannel channel, ByteBuffer buf, long offset, return (nBytes > 0) ? nBytes : ret; } - /** - * Read integer from ByteBuff coded in 7 bits and increment position. - * @return Read integer. - */ + /** Read integer from ByteBuff coded in 7 bits and increment position. */ public static int readCompressedInt(ByteBuff buf) { byte b = buf.get(); if ((b & ByteBufferUtils.NEXT_BIT_MASK) != 0) { @@ -560,9 +529,7 @@ public RefCnt getRefCnt() { return refCnt; } - /** - * Make this private because we don't want to expose the refCnt related wrap method to upstream. - */ + // Make this private because we don't want to expose the refCnt related wrap method to upstream. private static ByteBuff wrap(List buffers, RefCnt refCnt) { if (buffers == null || buffers.size() == 0) { throw new IllegalArgumentException("buffers shouldn't be null or empty"); @@ -572,9 +539,7 @@ private static ByteBuff wrap(List buffers, RefCnt refCnt) { : new MultiByteBuff(refCnt, buffers.toArray(new ByteBuffer[0])); } - /** - * Make this private because we don't want to expose the refCnt related wrap method to upstream. - */ + // Make this private because we don't want to expose the refCnt related wrap method to upstream. private static ByteBuff wrap(ByteBuffer buffer, RefCnt refCnt) { return new SingleByteBuff(refCnt, buffer); } diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/nio/MultiByteBuff.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/nio/MultiByteBuff.java index 042b21f6c000..c55ee021bd00 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/nio/MultiByteBuff.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/nio/MultiByteBuff.java @@ -578,9 +578,7 @@ public void get(byte[] dst) { /** * Copies the specified number of bytes from this MBB's current position to the byte[]'s offset. - * Also advances the position of the MBB by the given length. n * @param offset within the current - * array - * @param length upto which the bytes to be copied + * Also advances the position of the MBB by the given length. */ @Override public void get(byte[] dst, int offset, int length) { @@ -878,23 +876,16 @@ private static byte int1(int x) { } private static byte int0(int x) { - return (byte) (x); + return (byte) x; } - /** - * Copies from the given byte[] to this MBB n * @return this MBB - */ + /** Copies from the given byte[] to this MBB */ @Override public final MultiByteBuff put(byte[] src) { return put(src, 0, src.length); } - /** - * Copies from the given byte[] to this MBB n * @param offset the position in the byte array from - * which the copy should be done - * @param length the length upto which the copy should happen - * @return this MBB - */ + /** Copies from the given byte[] to this MBB. */ @Override public MultiByteBuff put(byte[] src, int offset, int length) { checkRefCount(); @@ -965,7 +956,7 @@ private static byte long1(long x) { } private static byte long0(long x) { - return (byte) (x); + return (byte) x; } /** diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupInfo.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupInfo.java index 0d1b9fd2ed72..94ea6f5845c8 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupInfo.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupInfo.java @@ -64,6 +64,7 @@ public RSGroupInfo(String name) { } /** + * Constructor * @deprecated Since 3.0.0, will be removed in 4.0.0. The rsgroup information for a table will be * stored in the configuration of a table so this will be removed. */ @@ -79,52 +80,37 @@ public RSGroupInfo(RSGroupInfo src) { this(src.name, src.servers, src.tables); } - /** - * Get group name. - */ + /** Get group name. */ public String getName() { return name; } - /** - * Adds the given server to the group. - */ + /** Adds the given server to the group. */ public void addServer(Address hostPort) { servers.add(hostPort); } - /** - * Adds the given servers to the group. - */ + /** Adds the given servers to the group. */ public void addAllServers(Collection

hostPort) { servers.addAll(hostPort); } - /** - * @param hostPort hostPort of the server - * @return true, if a server with hostPort is found - */ + /** Returns true if a server with hostPort is found */ public boolean containsServer(Address hostPort) { return servers.contains(hostPort); } - /** - * Get list of servers. - */ + /** Get list of servers. */ public Set
getServers() { return servers; } - /** - * Remove given server from the group. - */ + /** Remove given server from the group. */ public boolean removeServer(Address hostPort) { return servers.remove(hostPort); } - /** - * Getter for fetching an unmodifiable {@link #configuration} map. - */ + /** Getter for fetching an unmodifiable {@link #configuration} map. */ public Map getConfiguration() { // shallow pointer copy return Collections.unmodifiableMap(configuration); @@ -139,9 +125,7 @@ public void setConfiguration(String key, String value) { configuration.put(key, Objects.requireNonNull(value)); } - /** - * Remove a config setting represented by the key from the {@link #configuration} map - */ + /** Remove a config setting represented by the key from the {@link #configuration} map */ public void removeConfiguration(final String key) { configuration.remove(key); } @@ -157,6 +141,7 @@ public SortedSet getTables() { } /** + * Add a table * @deprecated Since 3.0.0, will be removed in 4.0.0. The rsgroup information will be stored in * the configuration of a table so this will be removed. */ @@ -166,6 +151,7 @@ public void addTable(TableName table) { } /** + * Add a collection of tables * @deprecated Since 3.0.0, will be removed in 4.0.0. The rsgroup information will be stored in * the configuration of a table so this will be removed. */ @@ -175,6 +161,7 @@ public void addAllTables(Collection arg) { } /** + * Check if the group contains a table * @deprecated Since 3.0.0, will be removed in 4.0.0. The rsgroup information will be stored in * the configuration of a table so this will be removed. */ @@ -184,6 +171,7 @@ public boolean containsTable(TableName table) { } /** + * Remove a table * @deprecated Since 3.0.0, will be removed in 4.0.0. The rsgroup information will be stored in * the configuration of a table so this will be removed. */ @@ -215,12 +203,10 @@ public boolean equals(Object o) { if (this == o) { return true; } - if (o == null || getClass() != o.getClass()) { + if (!(o instanceof RSGroupInfo)) { return false; } - RSGroupInfo rsGroupInfo = (RSGroupInfo) o; - if (!name.equals(rsGroupInfo.name)) { return false; } @@ -233,7 +219,6 @@ public boolean equals(Object o) { if (!configuration.equals(rsGroupInfo.configuration)) { return false; } - return true; } diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/security/Superusers.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/security/Superusers.java index 117036035877..417c59c7d028 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/security/Superusers.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/security/Superusers.java @@ -78,6 +78,7 @@ public static void initialize(Configuration conf) throws IOException { } /** + * Check if the current user is a super user * @return true if current user is a super user (whether as user running process, declared as * individual superuser or member of supergroup), false otherwise. * @param user to check @@ -104,6 +105,7 @@ public static boolean isSuperUser(User user) { } /** + * Check if the current user is a super user * @return true if current user is a super user, false otherwise. * @param user to check */ diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/security/User.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/security/User.java index e7d37069ac54..9ef9e2ddc175 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/security/User.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/security/User.java @@ -139,7 +139,7 @@ public boolean equals(Object o) { if (this == o) { return true; } - if (o == null || getClass() != o.getClass()) { + if (!(o instanceof User)) { return false; } return ugi.equals(((User) o).ugi); @@ -155,9 +155,7 @@ public String toString() { return ugi.toString(); } - /** - * Returns the {@code User} instance within current execution context. - */ + /** Returns the {@code User} instance within current execution context. */ public static User getCurrent() throws IOException { User user = new SecureHadoopUser(); if (user.getUGI() == null) { @@ -166,9 +164,7 @@ public static User getCurrent() throws IOException { return user; } - /** - * Executes the given action as the login user n * @return the result of the action n - */ + /** Executes the given action as the login user */ @SuppressWarnings({ "rawtypes", "unchecked" }) public static T runAsLoginUser(PrivilegedExceptionAction action) throws IOException { try { @@ -324,7 +320,10 @@ public T runAs(PrivilegedExceptionAction action) return ugi.doAs(action); } - /** @see User#createUserForTesting(org.apache.hadoop.conf.Configuration, String, String[]) */ + /** + * Create a user for testing. + * @see User#createUserForTesting(org.apache.hadoop.conf.Configuration, String, String[]) + */ public static User createUserForTesting(Configuration conf, String name, String[] groups) { synchronized (UserProvider.class) { if (!(UserProvider.groups instanceof TestingGroups)) { @@ -365,9 +364,7 @@ public static void login(String keytabLocation, String principalName) throws IOE } } - /** - * Returns the result of {@code UserGroupInformation.isSecurityEnabled()}. - */ + /** Returns the result of {@code UserGroupInformation.isSecurityEnabled()}. */ public static boolean isSecurityEnabled() { return UserGroupInformation.isSecurityEnabled(); } diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/security/UserProvider.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/security/UserProvider.java index f6766bd1f3c4..fcf6cc648969 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/security/UserProvider.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/security/UserProvider.java @@ -141,7 +141,7 @@ public static void setUserProviderForTesting(Configuration conf, } /** - * @return the userName for the current logged-in user. + * Returns the userName for the current logged-in user. * @throws IOException if the underlying user cannot be obtained */ public String getCurrentUserName() throws IOException { @@ -155,9 +155,9 @@ public boolean isHBaseSecurityEnabled() { } /** - * @return whether or not Kerberos authentication is configured for Hadoop. For non-secure Hadoop, - * this always returns false. For secure Hadoop, it will return the value - * from {@code UserGroupInformation.isSecurityEnabled()}. + * Return whether or not Kerberos authentication is configured for Hadoop. For non-secure Hadoop, + * this always returns false. For secure Hadoop, it will return the value from + * {@code UserGroupInformation.isSecurityEnabled()}. */ public boolean isHadoopSecurityEnabled() { return User.isSecurityEnabled(); @@ -172,7 +172,7 @@ public boolean shouldLoginFromKeytab() { } /** - * @return the current user within the current execution context + * Return the current user within the current execution context * @throws IOException if the user cannot be loaded */ public User getCurrent() throws IOException { diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/types/CopyOnWriteArrayMap.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/types/CopyOnWriteArrayMap.java index 6b436ba6b192..bc184b1098aa 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/types/CopyOnWriteArrayMap.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/types/CopyOnWriteArrayMap.java @@ -38,6 +38,7 @@ */ @InterfaceAudience.Private @InterfaceStability.Stable +@SuppressWarnings({ "unchecked", "rawtypes", "hiding", "TypeParameterShadowing" }) public class CopyOnWriteArrayMap extends AbstractMap implements Map, ConcurrentNavigableMap { private final Comparator keyComparator; diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/AbstractByteRange.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/AbstractByteRange.java index c5fbf52fd46c..023a5da5213d 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/AbstractByteRange.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/AbstractByteRange.java @@ -87,6 +87,7 @@ public abstract class AbstractByteRange implements ByteRange { // // methods for managing the backing array and range viewport // + @Override public byte[] getBytes() { return bytes; diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Addressing.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Addressing.java index 8b9b2205cf7f..eee8c6931b33 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Addressing.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Addressing.java @@ -35,6 +35,7 @@ public class Addressing { public static final String HOSTNAME_PORT_SEPARATOR = ":"; /** + * Create a socket address * @param hostAndPort Formatted as <hostname> ':' <port> * @return An InetSocketInstance */ @@ -44,6 +45,7 @@ public class Addressing { } /** + * Create a host-and-port string * @param hostname Server hostname * @param port Server port * @return Returns a concatenation of hostname and port in following @@ -56,6 +58,7 @@ public static String createHostAndPortStr(final String hostname, final int port) } /** + * Parse the hostname portion of a host-and-port string * @param hostAndPort Formatted as <hostname> ':' <port> * @return The hostname portion of hostAndPort */ @@ -68,6 +71,7 @@ public static String parseHostname(final String hostAndPort) { } /** + * Parse the port portion of a host-and-port string * @param hostAndPort Formatted as <hostname> ':' <port> * @return The port portion of hostAndPort */ @@ -163,9 +167,7 @@ public static String inetSocketAddress2String(InetSocketAddress address) { : address.toString(); } - /** - * Interface for AddressSelectionCondition to check if address is acceptable - */ + /** Interface for AddressSelectionCondition to check if address is acceptable */ public interface AddressSelectionCondition { /** * Condition on which to accept inet address diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/AvlUtil.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/AvlUtil.java index 586b41cc025d..020bfbe795ef 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/AvlUtil.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/AvlUtil.java @@ -84,6 +84,7 @@ public static interface AvlKeyComparator { @InterfaceAudience.Private public static interface AvlNodeVisitor { /** + * Visitor * @param node the node that we are currently visiting * @return false to stop the iteration. true to continue. */ @@ -96,6 +97,7 @@ public static interface AvlNodeVisitor { @InterfaceAudience.Private public static class AvlTree { /** + * Return the node that matches the specified key or null in case of node not found. * @param root the current root of the tree * @param key the key for the node we are trying to find * @param keyComparator the comparator to use to match node and key @@ -117,6 +119,7 @@ public static TNode get(TNode root, final Object key, } /** + * Return the first node of the tree. * @param root the current root of the tree * @return the first (min) node of the tree */ @@ -130,6 +133,7 @@ public static TNode getFirst(TNode root) { } /** + * Return the last node of the tree. * @param root the current root of the tree * @return the last (max) node of the tree */ @@ -325,6 +329,7 @@ public AvlTreeIterator() { /** * Create the iterator starting from the first (min) node of the tree + * @param root the current root of the tree */ public AvlTreeIterator(final TNode root) { seekFirst(root); @@ -448,6 +453,7 @@ private void seekNext() { @InterfaceAudience.Private public static class AvlIterableList { /** + * Return the successor of the current node * @param node the current node * @return the successor of the current node */ @@ -456,6 +462,7 @@ public static TNode readNext(TNode node) { } /** + * Return the predecessor of the current node * @param node the current node * @return the predecessor of the current node */ @@ -464,6 +471,7 @@ public static TNode readPrev(TNode node) { } /** + * Prepend a node to the tree * @param head the head of the linked list * @param node the node to add to the front of the list * @return the new head of the list @@ -484,6 +492,7 @@ public static TNode prepend(TNode head, TNode node } /** + * Append a node to the tree * @param head the head of the linked list * @param node the node to add to the tail of the list * @return the new head of the list @@ -504,6 +513,7 @@ public static TNode append(TNode head, TNode node) } /** + * Append a list of nodes to the tree * @param head the head of the current linked list * @param otherHead the head of the list to append to the current list * @return the new head of the current list @@ -522,6 +532,7 @@ public static TNode appendList(TNode head, TNode o } /** + * Remove a node from the tree * @param head the head of the linked list * @param node the node to remove from the list * @return the new head of the list @@ -541,6 +552,7 @@ public static TNode remove(TNode head, TNode node) } /** + * Prepend a node to the tree before a specific node * @param head the head of the linked list * @param base the node which we want to add the {@code node} before it * @param node the node which we want to add it before the {@code base} node @@ -554,10 +566,7 @@ public static TNode prepend(TNode head, TNode base return head == base ? node : head; } - /** - * @param node the node to check - * @return true if the node is linked to a list, false otherwise - */ + /** Return true if the node is linked to a list, false otherwise */ public static boolean isLinked(TNode node) { return node.iterPrev != null && node.iterNext != null; } diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ByteBufferArray.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ByteBufferArray.java index 646c1d264375..18ad1b5c69d6 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ByteBufferArray.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ByteBufferArray.java @@ -143,6 +143,7 @@ public int write(long offset, ByteBuff src) { * Transfer bytes from source {@link ByteBuff} to destination {@link ByteBuffer}. Position of both * source and destination will be advanced. */ + @SuppressWarnings("UnnecessaryLambda") private static final BiConsumer WRITER = (dst, src) -> { int off = src.position(), len = dst.remaining(); src.get(dst, off, len); @@ -153,6 +154,7 @@ public int write(long offset, ByteBuff src) { * Transfer bytes from source {@link ByteBuffer} to destination {@link ByteBuff}, Position of both * source and destination will be advanced. */ + @SuppressWarnings("UnnecessaryLambda") private static final BiConsumer READER = (src, dst) -> { int off = dst.position(), len = src.remaining(), srcOff = src.position(); dst.put(off, ByteBuff.wrap(src), srcOff, len); diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Bytes.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Bytes.java index 6fd2b01a078b..d66625060402 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Bytes.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Bytes.java @@ -58,6 +58,7 @@ @edu.umd.cs.findbugs.annotations.SuppressWarnings( value = "EQ_CHECK_FOR_OPERAND_NOT_COMPATIBLE_WITH_THIS", justification = "It has been like this forever") +@SuppressWarnings("MixedMutabilityReturnType") public class Bytes implements Comparable { // Using the charset canonical name for String/byte[] conversions is much @@ -186,16 +187,12 @@ public byte[] get() { return this.bytes; } - /** - * @param b Use passed bytes as backing array for this instance. - */ + /** Use passed bytes as backing array for this instance. */ public void set(final byte[] b) { set(b, 0, b.length); } - /** - * @param b Use passed bytes as backing array for this instance. nn - */ + /** Use passed bytes as backing array for this instance. */ public void set(final byte[] b, final int offset, final int length) { this.bytes = b; this.offset = offset; @@ -211,9 +208,7 @@ public int getLength() { return this.length; } - /** - * n - */ + /** Return the offset into the buffer. */ public int getOffset() { return this.offset; } @@ -243,9 +238,6 @@ public int compareTo(final byte[] that) { return BYTES_RAWCOMPARATOR.compare(this.bytes, this.offset, this.length, that, 0, that.length); } - /** - * @see Object#equals(Object) - */ @Override public boolean equals(Object right_obj) { if (right_obj instanceof byte[]) { @@ -257,15 +249,13 @@ public boolean equals(Object right_obj) { return false; } - /** - * @see Object#toString() - */ @Override public String toString() { return Bytes.toString(bytes, offset, length); } /** + * Convert a list of byte[] to an array * @param array List of byte []. * @return Array of byte []. */ @@ -278,21 +268,15 @@ public static byte[][] toArray(final List array) { return results; } - /** - * Returns a copy of the bytes referred to by this writable - */ + /** Returns a copy of the bytes referred to by this writable */ public byte[] copyBytes() { return Arrays.copyOfRange(bytes, offset, offset + length); } - /** - * Byte array comparator class. - */ + /** Byte array comparator class. */ @InterfaceAudience.Public public static class ByteArrayComparator implements RawComparator { - /** - * Constructor - */ + public ByteArrayComparator() { super(); } @@ -338,14 +322,10 @@ public int compare(byte[] b1, int s1, int l1, byte[] b2, int s2, int l2) { } } - /** - * Pass this to TreeMaps where byte [] are keys. - */ + /** Pass this to TreeMaps where byte [] are keys. */ public final static Comparator BYTES_COMPARATOR = new ByteArrayComparator(); - /** - * Use comparing byte arrays, byte-by-byte - */ + /** Use comparing byte arrays, byte-by-byte */ public final static RawComparator BYTES_RAWCOMPARATOR = new ByteArrayComparator(); /** @@ -485,6 +465,7 @@ private static byte[] readBytes(ByteBuffer buf) { } /** + * Convert a byte[] into a string. Charset is assumed to be UTF-8. * @param b Presumed UTF-8 encoded byte array. * @return String made from b */ @@ -768,7 +749,8 @@ public static int putLong(byte[] bytes, int offset, long val) { } /** - * Presumes float encoded as IEEE 754 floating-point "single format" + * Put a float value out to the specified byte array position. Presumes float encoded as IEEE 754 + * floating-point "single format" * @param bytes byte array * @return Float made from passed byte array. */ @@ -777,7 +759,8 @@ public static float toFloat(byte[] bytes) { } /** - * Presumes float encoded as IEEE 754 floating-point "single format" + * Put a float value out to the specified byte array position. Presumes float encoded as IEEE 754 + * floating-point "single format" * @param bytes array to convert * @param offset offset into array * @return Float made from passed byte array. @@ -787,6 +770,7 @@ public static float toFloat(byte[] bytes, int offset) { } /** + * Put a float value out to the specified byte array position. * @param bytes byte array * @param offset offset to write to * @param f float value @@ -796,33 +780,24 @@ public static int putFloat(byte[] bytes, int offset, float f) { return putInt(bytes, offset, Float.floatToRawIntBits(f)); } - /** - * @param f float value - * @return the float represented as byte [] - */ + /** Return the float represented as byte[] */ public static byte[] toBytes(final float f) { // Encode it as int return Bytes.toBytes(Float.floatToRawIntBits(f)); } - /** - * @param bytes byte array - * @return Return double made from passed bytes. - */ + /** Return double made from passed bytes. */ public static double toDouble(final byte[] bytes) { return toDouble(bytes, 0); } - /** - * @param bytes byte array - * @param offset offset where double is - * @return Return double made from passed bytes. - */ + /** Return double made from passed bytes. */ public static double toDouble(final byte[] bytes, final int offset) { return Double.longBitsToDouble(toLong(bytes, offset, SIZEOF_LONG)); } /** + * Put a double value out to the specified byte array position as the IEEE 754 double format. * @param bytes byte array * @param offset offset to write to * @param d value @@ -1031,9 +1006,7 @@ public static int putAsShort(byte[] bytes, int offset, int val) { return offset + SIZEOF_SHORT; } - /** - * Convert a BigDecimal value to a byte array n * @return the byte array - */ + /** Convert a BigDecimal value to a byte array */ public static byte[] toBytes(BigDecimal val) { byte[] valueBytes = val.unscaledValue().toByteArray(); byte[] result = new byte[valueBytes.length + SIZEOF_INT]; @@ -1042,16 +1015,12 @@ public static byte[] toBytes(BigDecimal val) { return result; } - /** - * Converts a byte array to a BigDecimal n * @return the char value - */ + /** Converts a byte array to a BigDecimal */ public static BigDecimal toBigDecimal(byte[] bytes) { return toBigDecimal(bytes, 0, bytes.length); } - /** - * Converts a byte array to a BigDecimal value nnn * @return the char value - */ + /** Converts a byte array to a BigDecimal value */ public static BigDecimal toBigDecimal(byte[] bytes, int offset, final int length) { if (bytes == null || length < SIZEOF_INT + 1 || (offset + length > bytes.length)) { return null; @@ -1082,6 +1051,7 @@ public static int putBigDecimal(byte[] bytes, int offset, BigDecimal val) { } /** + * Encode a long value as a variable length integer. * @param vint Integer to make a vint of. * @return Vint as bytes array. */ @@ -1120,6 +1090,7 @@ public static byte[] vintToBytes(final long vint) { } /** + * Reads a zero-compressed encoded long from input buffer and returns it. * @param buffer buffer to convert * @return vint bytes as an integer. */ @@ -1161,6 +1132,7 @@ public static long readAsVLong(final byte[] buffer, final int offset) { } /** + * Lexicographically compare two arrays. * @param left left operand * @param right right operand * @return 0 if equal, < 0 if left is less than right, etc. @@ -1480,6 +1452,7 @@ public int compareTo(byte[] buffer1, int offset1, int length1, byte[] buffer2, i } /** + * Lexicographically determine the equality of two arrays. * @param left left operand * @param right right operand * @return True if equal @@ -1500,6 +1473,16 @@ public static boolean equals(final byte[] left, final byte[] right) { return compareTo(left, right) == 0; } + /** + * Lexicographically determine the equality of two arrays. + * @param left left operand + * @param leftOffset offset into left operand + * @param leftLen length of left operand + * @param right right operand + * @param rightOffset offset into right operand + * @param rightLen length of right operand + * @return True if equal + */ public static boolean equals(final byte[] left, int leftOffset, int leftLen, final byte[] right, int rightOffset, int rightLen) { // short circuit case @@ -1524,6 +1507,7 @@ public static boolean equals(final byte[] left, int leftOffset, int leftLen, fin } /** + * Lexicographically determine the equality of two byte[], one as ByteBuffer. * @param a left operand * @param buf right operand * @return True if equal @@ -1553,6 +1537,7 @@ public static boolean startsWith(byte[] bytes, byte[] prefix) { } /** + * Calculate a hash code from a given byte array. * @param b bytes to hash * @return Runs {@link WritableComparator#hashBytes(byte[], int)} on the passed in array. This * method is what {@link org.apache.hadoop.io.Text} use calculating hash code. @@ -1562,6 +1547,7 @@ public static int hashCode(final byte[] b) { } /** + * Calculate a hash code from a given byte array. * @param b value * @param length length of the value * @return Runs {@link WritableComparator#hashBytes(byte[], int)} on the passed in array. This @@ -1572,6 +1558,7 @@ public static int hashCode(final byte[] b, final int length) { } /** + * Calculate a hash code from a given byte array suitable for use as a key in maps. * @param b bytes to hash * @return A hash of b as an Integer that can be used as key in Maps. */ @@ -1580,6 +1567,7 @@ public static Integer mapKey(final byte[] b) { } /** + * Calculate a hash code from a given byte array suitable for use as a key in maps. * @param b bytes to hash * @param length length to hash * @return A hash of b as an Integer that can be used as key in Maps. @@ -1589,6 +1577,7 @@ public static Integer mapKey(final byte[] b, final int length) { } /** + * Concatenate byte arrays. * @param a lower half * @param b upper half * @return New array that has a in lower half and b in upper half. @@ -1598,6 +1587,7 @@ public static byte[] add(final byte[] a, final byte[] b) { } /** + * Concatenate byte arrays. * @param a first third * @param b second third * @param c third third @@ -1612,6 +1602,7 @@ public static byte[] add(final byte[] a, final byte[] b, final byte[] c) { } /** + * Concatenate byte arrays. * @param arrays all the arrays to concatenate together. * @return New array made from the concatenation of the given arrays. */ @@ -1630,6 +1621,7 @@ public static byte[] add(final byte[][] arrays) { } /** + * Make a new byte array from a subset of bytes at the head of another. * @param a array * @param length amount of bytes to grab * @return First length bytes from a @@ -1644,6 +1636,7 @@ public static byte[] head(final byte[] a, final int length) { } /** + * Make a new byte array from a subset of bytes at the tail of another. * @param a array * @param length amount of bytes to snarf * @return Last length bytes from a @@ -1658,6 +1651,7 @@ public static byte[] tail(final byte[] a, final int length) { } /** + * Make a new byte array from a subset of bytes at the head of another, zero padded as desired. * @param a array * @param length new array size * @return Value in a plus length prepended 0 bytes @@ -1671,6 +1665,7 @@ public static byte[] padHead(final byte[] a, final int length) { } /** + * Make a new byte array from a subset of bytes at the tail of another, zero padded as desired. * @param a array * @param length new array size * @return Value in a plus length appended 0 bytes @@ -1816,6 +1811,7 @@ public Iterator iterator() { } /** + * Calculate the hash code for a given range of bytes. * @param bytes array to hash * @param offset offset to start from * @param length length to hash @@ -1828,6 +1824,7 @@ public static int hashCode(byte[] bytes, int offset, int length) { } /** + * Create an array of byte[] given an array of String. * @param t operands * @return Array of byte arrays made from passed array of Text */ @@ -1840,6 +1837,7 @@ public static byte[][] toByteArrays(final String[] t) { } /** + * Create an array of byte[] given an array of String. * @param t operands * @return Array of binary byte arrays made from passed array of binary strings */ @@ -1852,6 +1850,7 @@ public static byte[][] toBinaryByteArrays(final String[] t) { } /** + * Create a byte[][] where first and only entry is column * @param column operand * @return A byte array of a byte array where first and only entry is column */ @@ -1860,6 +1859,7 @@ public static byte[][] toByteArrays(final String column) { } /** + * Create a byte[][] where first and only entry is column * @param column operand * @return A byte array of a byte array where first and only entry is column */ @@ -1995,7 +1995,7 @@ private static byte[] binaryIncrementNeg(byte[] value, long amount) { for (int i = 0; i < value.length; i++) { int cur = ((int) amo % 256) * sign; amo = (amo >> 8); - int val = ((~value[value.length - i - 1]) & 0x0ff) + 1; + int val = (~value[value.length - i - 1] & 0x0ff) + 1; int total = cur - val; if (total >= 0) { amo += sign; @@ -2208,6 +2208,7 @@ public static int indexOf(byte[] array, byte[] target) { } /** + * Return true if target is present as an element anywhere in the given array. * @param array an array of {@code byte} values, possibly empty * @param target a primitive {@code byte} value * @return {@code true} if {@code target} is present as an element anywhere in {@code array}. @@ -2217,6 +2218,7 @@ public static boolean contains(byte[] array, byte target) { } /** + * Return true if target is present as an element anywhere in the given array. * @param array an array of {@code byte} values, possibly empty * @param target an array of {@code byte} * @return {@code true} if {@code target} is present anywhere in {@code array} diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ClassSize.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ClassSize.java index 6ac3edbc2d77..0b9dcc2f0438 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ClassSize.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ClassSize.java @@ -396,7 +396,7 @@ private static int[] getSizeCoefficients(Class cl, boolean debug) { * @return the size estimate, in bytes */ private static long estimateBaseFromCoefficients(int[] coeff, boolean debug) { - long prealign_size = OBJECT + coeff[0] + coeff[2] * REFERENCE; + long prealign_size = (long) OBJECT + coeff[0] + coeff[2] * REFERENCE; // Round up to a multiple of 8 long size = align(prealign_size) + align(coeff[1] * ARRAY); @@ -429,7 +429,7 @@ public static long estimateBase(Class cl, boolean debug) { * @return smallest number >= input that is a multiple of 8 */ public static int align(int num) { - return (int) (align((long) num)); + return (int) align((long) num); } /** diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/CommonFSUtils.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/CommonFSUtils.java index 0a4aae9631c7..ca8d27d8eebc 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/CommonFSUtils.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/CommonFSUtils.java @@ -75,7 +75,7 @@ private CommonFSUtils() { */ public static boolean isStartingWithPath(final Path rootPath, final String path) { String uriRootPath = rootPath.toUri().getPath(); - String tailUriPath = (new Path(path)).toUri().getPath(); + String tailUriPath = new Path(path).toUri().getPath(); return tailUriPath.startsWith(uriRootPath); } @@ -280,6 +280,7 @@ public static String getPath(Path p) { } /** + * Get the path for the root data directory * @param c configuration * @return {@link Path} to hbase root directory from configuration as a qualified Path. * @throws IOException e @@ -308,6 +309,7 @@ public static FileSystem getRootDirFileSystem(final Configuration c) throws IOEx } /** + * Get the path for the root directory for WAL data * @param c configuration * @return {@link Path} to hbase log root directory: e.g. {@value HBASE_WAL_DIR} from * configuration as a qualified Path. Defaults to HBase root dir. @@ -550,8 +552,7 @@ private static void invokeSetStoragePolicy(final FileSystem fs, final Path path, } /** - * @param conf must not be null - * @return True if this filesystem whose scheme is 'hdfs'. + * Return true if this is a filesystem whose scheme is 'hdfs'. * @throws IOException from underlying FileSystem */ public static boolean isHDFS(final Configuration conf) throws IOException { @@ -570,8 +571,7 @@ public static boolean isRecoveredEdits(Path path) { } /** - * @param conf must not be null - * @return Returns the filesystem of the hbase rootdir. + * Returns the filesystem of the hbase rootdir. * @throws IOException from underlying FileSystem */ public static FileSystem getCurrentFileSystem(Configuration conf) throws IOException { diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/DNS.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/DNS.java index 49a8a256b2bc..50eefa82006f 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/DNS.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/DNS.java @@ -43,6 +43,7 @@ public final class DNS { private static Method GET_DEFAULT_HOST_METHOD; /** + * Hostname configuration key * @deprecated since 2.4.0 and will be removed in 4.0.0. Use {@link DNS#UNSAFE_RS_HOSTNAME_KEY} * instead. * @see HBASE-24667 @@ -65,7 +66,7 @@ public enum ServerType { MASTER("master"), REGIONSERVER("regionserver"); - private String name; + private final String name; ServerType(String name) { this.name = name; diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ExceptionUtil.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ExceptionUtil.java index d3b0e82b1dc2..484f9aa6b97d 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ExceptionUtil.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ExceptionUtil.java @@ -46,9 +46,7 @@ public static boolean isInterrupt(Throwable t) { return (t instanceof InterruptedIOException || t instanceof ClosedByInterruptException); } - /** - * @throws InterruptedIOException if t was an interruption. Does nothing otherwise. - */ + /** Throw InterruptedIOException if t was an interruption, nothing otherwise. */ public static void rethrowIfInterrupt(Throwable t) throws InterruptedIOException { InterruptedIOException iie = asInterrupt(t); diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ExponentialMovingAverage.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ExponentialMovingAverage.java index e49e84c9b593..8435cdca1512 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ExponentialMovingAverage.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ExponentialMovingAverage.java @@ -24,7 +24,7 @@ * exponentially. It brings benefits that it is more sensitive, and can see the trends easily. */ @InterfaceAudience.Private -public class ExponentialMovingAverage extends WindowMovingAverage { +public class ExponentialMovingAverage extends WindowMovingAverage { private double alpha; private double previousAverage; private double currentAverage; diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Hash.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Hash.java index bdb33ee16345..d692d712dcdd 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Hash.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Hash.java @@ -94,7 +94,6 @@ public static Hash getInstance(Configuration conf) { /** * Calculate a hash using bytes from HashKey and the provided seed value. - * @param * @param hashKey key to extract the hash * @param initval the seed value * @return hash value diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/HashKey.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/HashKey.java index e4e89f7711f2..9f12acb0b674 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/HashKey.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/HashKey.java @@ -21,7 +21,6 @@ /** * Used to calculate the hash {@link Hash} algorithms for Bloomfilters. - * @param the type of HashKey */ @InterfaceAudience.Private public abstract class HashKey { @@ -31,9 +30,7 @@ public HashKey(T t) { this.t = t; } - /** - * n * @return The byte at the given position in this HashKey - */ + /** Return The byte at the given position in this HashKey */ public abstract byte get(int pos); /** Returns The number of bytes in this HashKey */ diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/JRubyFormat.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/JRubyFormat.java index 5e940702427e..5e7abba3935f 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/JRubyFormat.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/JRubyFormat.java @@ -27,12 +27,12 @@ /** * Utility class for converting objects to JRuby. It handles null, Boolean, Number, String, byte[], - * List<Object>, Map<String, Object> structures. + * List<Object>, Map<String, Object> structures. *

* E.g. * *

- * Map<String, Object> map = new LinkedHashMap<>();
+ * Map<String, Object> map = new LinkedHashMap<>();
  * map.put("null", null);
  * map.put("boolean", true);
  * map.put("number", 1);
@@ -45,8 +45,8 @@
  * Calling {@link #print(Object)} method will result:
  *
  * 
- * { null => '', boolean => 'true', number => '1', string => 'str',
- *   binary => '010203', list => [ '1', '2', 'true' ] }
+ * { null => '', boolean => 'true', number => '1', string => 'str',
+ *   binary => '010203', list => [ '1', '2', 'true' ] }
  * 
*

*/ diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/JVM.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/JVM.java index 2104511b4f2d..fbad0da62a72 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/JVM.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/JVM.java @@ -26,10 +26,13 @@ import java.lang.management.RuntimeMXBean; import java.lang.reflect.Method; import java.nio.charset.StandardCharsets; +import java.util.Iterator; import org.apache.yetus.audience.InterfaceAudience; import org.slf4j.Logger; import org.slf4j.LoggerFactory; +import org.apache.hbase.thirdparty.com.google.common.base.Splitter; + /** * This class is a wrapper for the implementation of com.sun.management.UnixOperatingSystemMXBean It * will decide to use the sun api or its own implementation depending on the runtime (vendor) used. @@ -167,11 +170,10 @@ public long getOpenFileDescriptorCount() { // need to get the PID number of the process first RuntimeMXBean rtmbean = ManagementFactory.getRuntimeMXBean(); String rtname = rtmbean.getName(); - String[] pidhost = rtname.split("@"); - + Iterator pidhost = Splitter.on('@').split(rtname).iterator(); // using linux bash commands to retrieve info Process p = Runtime.getRuntime() - .exec(new String[] { "bash", "-c", "ls /proc/" + pidhost[0] + "/fdinfo | wc -l" }); + .exec(new String[] { "bash", "-c", "ls /proc/" + pidhost.next() + "/fdinfo | wc -l" }); inputStream = p.getInputStream(); inputStreamReader = new InputStreamReader(inputStream, StandardCharsets.UTF_8); bufferedReader = new BufferedReader(inputStreamReader); @@ -208,6 +210,7 @@ public long getOpenFileDescriptorCount() { } /** + * Get the system load average * @see java.lang.management.OperatingSystemMXBean#getSystemLoadAverage */ public double getSystemLoadAverage() { @@ -215,9 +218,9 @@ public double getSystemLoadAverage() { } /** - * @return the physical free memory (not the JVM one, as it's not very useful as it depends on the - * GC), but the one from the OS as it allows a little bit more to guess if the machine is - * overloaded or not). + * Return the physical free memory (not the JVM one, as it's not very useful as it depends on the + * GC), but the one from the OS as it allows a little bit more to guess if the machine is + * overloaded or not). */ public long getFreeMemory() { if (ibmvendor) { diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/MD5Hash.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/MD5Hash.java index 8202c96b289a..d967f5d53a77 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/MD5Hash.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/MD5Hash.java @@ -21,15 +21,12 @@ import java.security.NoSuchAlgorithmException; import org.apache.commons.codec.binary.Hex; import org.apache.yetus.audience.InterfaceAudience; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; /** * Utility class for MD5 MD5 hash produces a 128-bit digest. */ @InterfaceAudience.Public public class MD5Hash { - private static final Logger LOG = LoggerFactory.getLogger(MD5Hash.class); /** * Given a byte array, returns in MD5 hash as a hex string. n * @return SHA1 hash as a 32 diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/OrderedBytes.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/OrderedBytes.java index 538ff0dcf599..09ac31da9c7c 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/OrderedBytes.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/OrderedBytes.java @@ -643,7 +643,6 @@ private static int encodeNumericSmall(PositionedByteRange dst, BigDecimal val) { byte[] a = dst.getBytes(); boolean isNeg = val.signum() == -1; final int offset = dst.getOffset(), start = dst.getPosition(); - int e = 0, startM; if (isNeg) { /* Small negative number: 0x14, -E, ~M */ dst.put(NEG_SMALL); @@ -655,13 +654,13 @@ private static int encodeNumericSmall(PositionedByteRange dst, BigDecimal val) { int zerosBeforeFirstNonZero = abs.scale() - abs.precision(); int lengthToMoveRight = zerosBeforeFirstNonZero % 2 == 0 ? zerosBeforeFirstNonZero : zerosBeforeFirstNonZero - 1; - e = lengthToMoveRight / 2; + int e = lengthToMoveRight / 2; abs = abs.movePointRight(lengthToMoveRight); putVaruint64(dst, e, !isNeg); // encode appropriate E value. // encode M by peeling off centimal digits, encoding x as 2x+1 - startM = dst.getPosition(); + int startM = dst.getPosition(); encodeToCentimal(dst, abs); // terminal digit should be 2x a[offset + dst.getPosition() - 1] = (byte) (a[offset + dst.getPosition() - 1] & 0xfe); @@ -695,8 +694,8 @@ private static int encodeNumericSmall(PositionedByteRange dst, BigDecimal val) { * calling function. * *
-   *   Encoding:  M       (if E<=10)
-   *              E M     (if E>10)
+   *   Encoding:  M       (if E<=10)
+   *              E M     (if E>10)
    * 
*

* @param dst The destination to which encoded digits are written. @@ -709,7 +708,6 @@ private static int encodeNumericLarge(PositionedByteRange dst, BigDecimal val) { byte[] a = dst.getBytes(); boolean isNeg = val.signum() == -1; final int start = dst.getPosition(), offset = dst.getOffset(); - int e = 0, startM; if (isNeg) { /* Large negative number: 0x08, ~E, ~M */ dst.put(NEG_LARGE); @@ -720,7 +718,7 @@ private static int encodeNumericLarge(PositionedByteRange dst, BigDecimal val) { // normalize abs(val) to determine E int integerDigits = abs.precision() - abs.scale(); int lengthToMoveLeft = integerDigits % 2 == 0 ? integerDigits : integerDigits + 1; - e = lengthToMoveLeft / 2; + int e = lengthToMoveLeft / 2; abs = abs.movePointLeft(lengthToMoveLeft); // encode appropriate header byte and/or E value. @@ -735,7 +733,7 @@ private static int encodeNumericLarge(PositionedByteRange dst, BigDecimal val) { } // encode M by peeling off centimal digits, encoding x as 2x+1 - startM = dst.getPosition(); + int startM = dst.getPosition(); encodeToCentimal(dst, abs); // terminal digit should be 2x a[offset + dst.getPosition() - 1] = (byte) (a[offset + dst.getPosition() - 1] & 0xfe); @@ -748,7 +746,7 @@ private static int encodeNumericLarge(PositionedByteRange dst, BigDecimal val) { /** * Encode a value val in [0.01, 1.0) into Centimals. Util function for - * {@link OrderedBytes#encodeNumericLarge(PositionedByteRange, BigDecimal) and + * {@link OrderedBytes#encodeNumericLarge(PositionedByteRange, BigDecimal)} and * {@link OrderedBytes#encodeNumericSmall(PositionedByteRange, BigDecimal)} * @param dst The destination to which encoded digits are written. * @param val A BigDecimal after the normalization. The value must be in [0.01, 1.0). diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Pair.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Pair.java index 2d55d572ff68..15bddd29f302 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Pair.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Pair.java @@ -22,8 +22,6 @@ /** * A generic class for pairs. - * @param - * @param */ @InterfaceAudience.Public public class Pair implements Serializable { @@ -102,9 +100,11 @@ public boolean equals(Object other) { @Override public int hashCode() { - if (first == null) return (second == null) ? 0 : second.hashCode() + 1; - else if (second == null) return first.hashCode() + 2; - else return first.hashCode() * 17 + second.hashCode(); + if (first == null) { + return (second == null) ? 0 : second.hashCode() + 1; + } else if (second == null) { + return first.hashCode() + 2; + } else return first.hashCode() * 17 + second.hashCode(); } @Override diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/PairOfSameType.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/PairOfSameType.java index 745bd759c769..44bc2b81dc06 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/PairOfSameType.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/PairOfSameType.java @@ -24,7 +24,6 @@ /** * A generic, immutable class for pairs of objects both of type T. - * @param * @see Pair if Types differ. */ @InterfaceAudience.Public diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ReflectionUtils.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ReflectionUtils.java index 547d28cfa88f..2d893e50c938 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ReflectionUtils.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ReflectionUtils.java @@ -34,7 +34,7 @@ @InterfaceAudience.Private public class ReflectionUtils { - @SuppressWarnings("unchecked") + @SuppressWarnings({ "unchecked", "TypeParameterUnusedInFormals" }) public static T instantiateWithCustomCtor(String className, Class[] ctorArgTypes, Object[] ctorArgs) { try { @@ -63,7 +63,7 @@ public static T instantiate(final String className, Constructor ctor, Obj } } - @SuppressWarnings("unchecked") + @SuppressWarnings({ "unchecked", "TypeParameterUnusedInFormals" }) public static T newInstance(String className, Object... params) { Class type; try { @@ -107,7 +107,7 @@ public static Constructor findConstructor(Class type, Object... paramT match = !ctorParamTypes[i].isPrimitive(); } else { Class paramType = paramTypes[i].getClass(); - match = (!ctorParamTypes[i].isPrimitive()) + match = !ctorParamTypes[i].isPrimitive() ? ctorParamTypes[i].isAssignableFrom(paramType) : ((int.class.equals(ctorParamTypes[i]) && Integer.class.equals(paramType)) || (long.class.equals(ctorParamTypes[i]) && Long.class.equals(paramType)) diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/SimpleMovingAverage.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/SimpleMovingAverage.java index eb7eb314e0ef..cd7d4260cb13 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/SimpleMovingAverage.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/SimpleMovingAverage.java @@ -23,7 +23,7 @@ * SMA measure the overall average execution time of a specific method. */ @InterfaceAudience.Private -public class SimpleMovingAverage extends MovingAverage { +public class SimpleMovingAverage extends MovingAverage { private double averageTime = 0.0; protected long count = 0; @@ -35,7 +35,7 @@ public SimpleMovingAverage(String label) { @Override public void updateMostRecentTime(long elapsed) { - averageTime += (elapsed - averageTime) / (++count); + averageTime += (elapsed - averageTime) / ++count; } @Override diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Threads.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Threads.java index 565614e27303..d147f660f5a7 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Threads.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Threads.java @@ -101,9 +101,7 @@ public static void shutdown(final Thread t, final long joinwait) { } } - /** - * @param t Waits on the passed thread to die dumping a threaddump every minute while its up. n - */ + /** Waits on the passed thread to die dumping a threaddump every minute while its up. */ public static void threadDumpingIsAlive(final Thread t) throws InterruptedException { if (t == null) { return; diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Triple.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Triple.java index d1e225238e6b..e0e094337c20 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Triple.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Triple.java @@ -61,11 +61,11 @@ public boolean equals(Object obj) { Triple otherTriple = (Triple) obj; - if (first != otherTriple.first && (first != null && !(first.equals(otherTriple.first)))) + if (first != otherTriple.first && (first != null && !first.equals(otherTriple.first))) return false; - if (second != otherTriple.second && (second != null && !(second.equals(otherTriple.second)))) + if (second != otherTriple.second && (second != null && !second.equals(otherTriple.second))) return false; - if (third != otherTriple.third && (third != null && !(third.equals(otherTriple.third)))) + if (third != otherTriple.third && (third != null && !third.equals(otherTriple.third))) return false; return true; diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/VersionInfo.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/VersionInfo.java index e7d98257f280..ba60edb06a08 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/VersionInfo.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/VersionInfo.java @@ -19,12 +19,16 @@ import java.io.PrintStream; import java.io.PrintWriter; +import java.util.List; import org.apache.commons.lang3.StringUtils; import org.apache.hadoop.hbase.Version; import org.apache.yetus.audience.InterfaceAudience; import org.slf4j.Logger; import org.slf4j.LoggerFactory; +import org.apache.hbase.thirdparty.com.google.common.base.Splitter; +import org.apache.hbase.thirdparty.com.google.common.collect.Iterables; + /** * This class finds the Version information for HBase. */ @@ -137,9 +141,9 @@ public static int compareVersion(String v1, String v2) { */ private static String[] getVersionComponents(final String version) { assert (version != null); - String[] strComps = version.split("[\\.-]"); + List list = Splitter.onPattern("[\\.-]").splitToList(version); + String[] strComps = list.toArray(new String[list.size()]); assert (strComps.length > 0); - String[] comps = new String[strComps.length]; for (int i = 0; i < strComps.length; ++i) { if (StringUtils.isNumeric(strComps[i])) { @@ -162,7 +166,7 @@ private static String[] getVersionComponents(final String version) { } public static int getMajorVersion(String version) { - return Integer.parseInt(version.split("\\.")[0]); + return Integer.parseInt(Iterables.get(Splitter.on('.').split(version), 0)); } public static void main(String[] args) { diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/WeightedMovingAverage.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/WeightedMovingAverage.java index 27d5b8994818..8288bf753afe 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/WeightedMovingAverage.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/WeightedMovingAverage.java @@ -24,7 +24,7 @@ * weight. And it is based on {@link WindowMovingAverage}, such that it only focus on the last N. */ @InterfaceAudience.Private -public class WeightedMovingAverage extends WindowMovingAverage { +public class WeightedMovingAverage extends WindowMovingAverage { private int[] coefficient; private int denominator; @@ -53,8 +53,8 @@ public double getAverageTime() { int coIndex = 0; int length = getNumberOfStatistics(); // tmIndex, it points to the oldest data. - for (int tmIndex = (getMostRecentPosistion() + 1) % length; coIndex - < length; coIndex++, tmIndex = (++tmIndex) % length) { + for (int tmIndex = (getMostRecentPosition() + 1) % length; coIndex + < length; coIndex++, tmIndex = ++tmIndex % length) { // start the multiplication from oldest to newest average += coefficient[coIndex] * getStatisticsAtIndex(tmIndex); } diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/WindowMovingAverage.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/WindowMovingAverage.java index b79924a352c9..154bc0e42dbb 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/WindowMovingAverage.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/WindowMovingAverage.java @@ -24,7 +24,7 @@ * in a circle array. */ @InterfaceAudience.Private -public class WindowMovingAverage extends MovingAverage { +public class WindowMovingAverage extends MovingAverage { protected final static int DEFAULT_SIZE = 5; // The last n statistics. @@ -47,7 +47,7 @@ public WindowMovingAverage(String label, int size) { @Override protected void updateMostRecentTime(long elapsed) { - int index = moveForwardMostRecentPosistion(); + int index = moveForwardMostRecentPosition(); lastN[index] = elapsed; } @@ -55,7 +55,7 @@ protected void updateMostRecentTime(long elapsed) { public double getAverageTime() { return enoughStatistics() ? (double) sum(getNumberOfStatistics()) / getNumberOfStatistics() - : (double) sum(getMostRecentPosistion() + 1) / (getMostRecentPosistion() + 1); + : (double) sum(getMostRecentPosition() + 1) / (getMostRecentPosition() + 1); } /** @@ -84,7 +84,7 @@ protected long getStatisticsAtIndex(int index) { } /** Returns index of most recent */ - protected int getMostRecentPosistion() { + protected int getMostRecentPosition() { return mostRecent; } @@ -92,7 +92,7 @@ protected int getMostRecentPosistion() { * Move forward the most recent index. * @return the most recent index */ - protected int moveForwardMostRecentPosistion() { + protected int moveForwardMostRecentPosition() { int index = ++mostRecent; if (!oneRound && index == getNumberOfStatistics()) { // Back to the head of the lastN, from now on will diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/zookeeper/ZKConfig.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/zookeeper/ZKConfig.java index 011f0662179a..de0cbdfa918a 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/zookeeper/ZKConfig.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/zookeeper/ZKConfig.java @@ -18,6 +18,7 @@ package org.apache.hadoop.hbase.zookeeper; import java.io.IOException; +import java.util.List; import java.util.Map.Entry; import java.util.Properties; import org.apache.commons.validator.routines.InetAddressValidator; @@ -26,6 +27,8 @@ import org.apache.hadoop.util.StringUtils; import org.apache.yetus.audience.InterfaceAudience; +import org.apache.hbase.thirdparty.com.google.common.base.Splitter; + /** * Utility methods for reading, and building the ZooKeeper configuration. The order and priority for * reading the config are as follows: (1). Property with "hbase.zookeeper.property." prefix from @@ -192,27 +195,28 @@ public static void validateClusterKey(String key) throws IOException { * the described order n */ public static ZKClusterKey transformClusterKey(String key) throws IOException { - String[] parts = key.split(":"); + List parts = Splitter.on(':').splitToList(key); + String[] partsArray = parts.toArray(new String[parts.size()]); - if (parts.length == 3) { - if (!parts[2].matches("/.*[^/]")) { + if (partsArray.length == 3) { + if (!partsArray[2].matches("/.*[^/]")) { throw new IOException("Cluster key passed " + key + " is invalid, the format should be:" + HConstants.ZOOKEEPER_QUORUM + ":" + HConstants.ZOOKEEPER_CLIENT_PORT + ":" + HConstants.ZOOKEEPER_ZNODE_PARENT); } - return new ZKClusterKey(parts[0], Integer.parseInt(parts[1]), parts[2]); + return new ZKClusterKey(partsArray[0], Integer.parseInt(partsArray[1]), partsArray[2]); } - if (parts.length > 3) { + if (partsArray.length > 3) { // The quorum could contain client port in server:clientport format, try to transform more. - String zNodeParent = parts[parts.length - 1]; + String zNodeParent = partsArray[partsArray.length - 1]; if (!zNodeParent.matches("/.*[^/]")) { throw new IOException("Cluster key passed " + key + " is invalid, the format should be:" + HConstants.ZOOKEEPER_QUORUM + ":" + HConstants.ZOOKEEPER_CLIENT_PORT + ":" + HConstants.ZOOKEEPER_ZNODE_PARENT); } - String clientPort = parts[parts.length - 2]; + String clientPort = partsArray[partsArray.length - 2]; // The first part length is the total length minus the lengths of other parts and minus 2 ":" int endQuorumIndex = key.length() - zNodeParent.length() - clientPort.length() - 2; @@ -222,7 +226,7 @@ public static ZKClusterKey transformClusterKey(String key) throws IOException { // The common case is that every server has its own client port specified - this means // that (total parts - the ZNodeParent part - the ClientPort part) is equal to // (the number of "," + 1) - "+ 1" because the last server has no ",". - if ((parts.length - 2) == (serverHosts.length + 1)) { + if ((partsArray.length - 2) == (serverHosts.length + 1)) { return new ZKClusterKey(quorumStringInput, Integer.parseInt(clientPort), zNodeParent); } diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/TestByteBufferKeyValue.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/TestByteBufferKeyValue.java index 5292ba56056c..dcf7d9e52569 100644 --- a/hbase-common/src/test/java/org/apache/hadoop/hbase/TestByteBufferKeyValue.java +++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/TestByteBufferKeyValue.java @@ -25,7 +25,6 @@ import java.util.ArrayList; import java.util.List; import java.util.concurrent.ConcurrentSkipListMap; -import org.apache.hadoop.hbase.KeyValue.Type; import org.apache.hadoop.hbase.testclassification.MiscTests; import org.apache.hadoop.hbase.testclassification.SmallTests; import org.apache.hadoop.hbase.util.ByteBufferUtils; @@ -81,11 +80,18 @@ public void testCompare() { map.put((ByteBufferKeyValue) cell2, (ByteBufferKeyValue) cell2); map.put((ByteBufferKeyValue) cell3, (ByteBufferKeyValue) cell3); map.put((ByteBufferKeyValue) cell1, (ByteBufferKeyValue) cell1); - map.put((ByteBufferKeyValue) cell1, (ByteBufferKeyValue) cell1); + map.put((ByteBufferKeyValue) cell1, (ByteBufferKeyValue) cell4); + assertEquals(3, map.size()); + assertTrue(map.containsKey(cell1)); + assertTrue(map.containsKey(cell2)); + assertTrue(map.containsKey(cell3)); + assertEquals(cell4, map.get(cell1)); + assertEquals(cell2, map.get(cell2)); + assertEquals(cell3, map.get(cell3)); } private static Cell getOffheapCell(byte[] row, byte[] family, byte[] qualifier) { - KeyValue kvCell = new KeyValue(row, family, qualifier, 0L, Type.Put, row); + KeyValue kvCell = new KeyValue(row, family, qualifier, 0L, KeyValue.Type.Put, row); ByteBuffer buf = ByteBuffer.allocateDirect(kvCell.getBuffer().length); ByteBufferUtils.copyFromArrayToBuffer(buf, kvCell.getBuffer(), 0, kvCell.getBuffer().length); return new ByteBufferKeyValue(buf, 0, buf.capacity(), 0L); @@ -93,7 +99,7 @@ private static Cell getOffheapCell(byte[] row, byte[] family, byte[] qualifier) @Test public void testByteBufferBackedKeyValue() throws Exception { - KeyValue kvCell = new KeyValue(row1, fam1, qual1, 0L, Type.Put, row1); + KeyValue kvCell = new KeyValue(row1, fam1, qual1, 0L, KeyValue.Type.Put, row1); ByteBuffer buf = ByteBuffer.allocateDirect(kvCell.getBuffer().length); ByteBufferUtils.copyFromArrayToBuffer(buf, kvCell.getBuffer(), 0, kvCell.getBuffer().length); ByteBufferExtendedCell offheapKV = new ByteBufferKeyValue(buf, 0, buf.capacity(), 0L); @@ -106,7 +112,7 @@ public void testByteBufferBackedKeyValue() throws Exception { assertEquals(ROW1, ByteBufferUtils.toStringBinary(offheapKV.getValueByteBuffer(), offheapKV.getValuePosition(), offheapKV.getValueLength())); assertEquals(0L, offheapKV.getTimestamp()); - assertEquals(Type.Put.getCode(), offheapKV.getTypeByte()); + assertEquals(KeyValue.Type.Put.getCode(), offheapKV.getTypeByte()); // Use the array() APIs assertEquals(ROW1, Bytes.toStringBinary(offheapKV.getRowArray(), offheapKV.getRowOffset(), @@ -118,9 +124,9 @@ public void testByteBufferBackedKeyValue() throws Exception { assertEquals(ROW1, Bytes.toStringBinary(offheapKV.getValueArray(), offheapKV.getValueOffset(), offheapKV.getValueLength())); assertEquals(0L, offheapKV.getTimestamp()); - assertEquals(Type.Put.getCode(), offheapKV.getTypeByte()); + assertEquals(KeyValue.Type.Put.getCode(), offheapKV.getTypeByte()); - kvCell = new KeyValue(row1, fam2, qual2, 0L, Type.Put, row1); + kvCell = new KeyValue(row1, fam2, qual2, 0L, KeyValue.Type.Put, row1); buf = ByteBuffer.allocateDirect(kvCell.getBuffer().length); ByteBufferUtils.copyFromArrayToBuffer(buf, kvCell.getBuffer(), 0, kvCell.getBuffer().length); offheapKV = new ByteBufferKeyValue(buf, 0, buf.capacity(), 0L); @@ -129,7 +135,7 @@ public void testByteBufferBackedKeyValue() throws Exception { assertEquals(QUAL2, ByteBufferUtils.toStringBinary(offheapKV.getQualifierByteBuffer(), offheapKV.getQualifierPosition(), offheapKV.getQualifierLength())); byte[] nullQualifier = new byte[0]; - kvCell = new KeyValue(row1, fam1, nullQualifier, 0L, Type.Put, row1); + kvCell = new KeyValue(row1, fam1, nullQualifier, 0L, KeyValue.Type.Put, row1); buf = ByteBuffer.allocateDirect(kvCell.getBuffer().length); ByteBufferUtils.copyFromArrayToBuffer(buf, kvCell.getBuffer(), 0, kvCell.getBuffer().length); offheapKV = new ByteBufferKeyValue(buf, 0, buf.capacity(), 0L); @@ -142,12 +148,12 @@ public void testByteBufferBackedKeyValue() throws Exception { assertEquals(ROW1, ByteBufferUtils.toStringBinary(offheapKV.getValueByteBuffer(), offheapKV.getValuePosition(), offheapKV.getValueLength())); assertEquals(0L, offheapKV.getTimestamp()); - assertEquals(Type.Put.getCode(), offheapKV.getTypeByte()); + assertEquals(KeyValue.Type.Put.getCode(), offheapKV.getTypeByte()); } @Test public void testByteBufferBackedKeyValueWithTags() throws Exception { - KeyValue kvCell = new KeyValue(row1, fam1, qual1, 0L, Type.Put, row1, tags); + KeyValue kvCell = new KeyValue(row1, fam1, qual1, 0L, KeyValue.Type.Put, row1, tags); ByteBuffer buf = ByteBuffer.allocateDirect(kvCell.getBuffer().length); ByteBufferUtils.copyFromArrayToBuffer(buf, kvCell.getBuffer(), 0, kvCell.getBuffer().length); ByteBufferKeyValue offheapKV = new ByteBufferKeyValue(buf, 0, buf.capacity(), 0L); @@ -160,7 +166,7 @@ public void testByteBufferBackedKeyValueWithTags() throws Exception { assertEquals(ROW1, ByteBufferUtils.toStringBinary(offheapKV.getValueByteBuffer(), offheapKV.getValuePosition(), offheapKV.getValueLength())); assertEquals(0L, offheapKV.getTimestamp()); - assertEquals(Type.Put.getCode(), offheapKV.getTypeByte()); + assertEquals(KeyValue.Type.Put.getCode(), offheapKV.getTypeByte()); // change tags to handle both onheap and offheap stuff List resTags = PrivateCellUtil.getTags(offheapKV); Tag tag1 = resTags.get(0); @@ -169,14 +175,14 @@ public void testByteBufferBackedKeyValueWithTags() throws Exception { Tag tag2 = resTags.get(1); assertEquals(tag2.getType(), tag2.getType()); assertEquals(Tag.getValueAsString(t2), Tag.getValueAsString(tag2)); - Tag res = PrivateCellUtil.getTag(offheapKV, (byte) 2).get(); - assertEquals(Tag.getValueAsString(t2), Tag.getValueAsString(tag2)); + Tag tag3 = PrivateCellUtil.getTag(offheapKV, (byte) 2).get(); + assertEquals(Tag.getValueAsString(t2), Tag.getValueAsString(tag3)); assertFalse(PrivateCellUtil.getTag(offheapKV, (byte) 3).isPresent()); } @Test public void testGetKeyMethods() throws Exception { - KeyValue kvCell = new KeyValue(row1, fam1, qual1, 0L, Type.Put, row1, tags); + KeyValue kvCell = new KeyValue(row1, fam1, qual1, 0L, KeyValue.Type.Put, row1, tags); ByteBuffer buf = ByteBuffer.allocateDirect(kvCell.getKeyLength()); ByteBufferUtils.copyFromArrayToBuffer(buf, kvCell.getBuffer(), kvCell.getKeyOffset(), kvCell.getKeyLength()); @@ -188,6 +194,6 @@ public void testGetKeyMethods() throws Exception { assertEquals(QUAL1, ByteBufferUtils.toStringBinary(offheapKeyOnlyKV.getQualifierByteBuffer(), offheapKeyOnlyKV.getQualifierPosition(), offheapKeyOnlyKV.getQualifierLength())); assertEquals(0L, offheapKeyOnlyKV.getTimestamp()); - assertEquals(Type.Put.getCode(), offheapKeyOnlyKV.getTypeByte()); + assertEquals(KeyValue.Type.Put.getCode(), offheapKeyOnlyKV.getTypeByte()); } } diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/TestCellComparator.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/TestCellComparator.java index 98a376d6d19e..f4da9bf2f7b1 100644 --- a/hbase-common/src/test/java/org/apache/hadoop/hbase/TestCellComparator.java +++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/TestCellComparator.java @@ -24,7 +24,6 @@ import java.util.Collections; import java.util.Set; import java.util.TreeSet; -import org.apache.hadoop.hbase.KeyValue.Type; import org.apache.hadoop.hbase.testclassification.MiscTests; import org.apache.hadoop.hbase.testclassification.SmallTests; import org.apache.hadoop.hbase.util.Bytes; @@ -58,51 +57,46 @@ public class TestCellComparator { public void testCompareCells() { KeyValue kv1 = new KeyValue(row1, fam1, qual1, val); KeyValue kv2 = new KeyValue(row2, fam1, qual1, val); - assertTrue((comparator.compare(kv1, kv2)) < 0); + assertTrue(comparator.compare(kv1, kv2) < 0); kv1 = new KeyValue(row1, fam2, qual1, val); kv2 = new KeyValue(row1, fam1, qual1, val); - assertTrue((comparator.compareFamilies(kv1, kv2) > 0)); + assertTrue(comparator.compareFamilies(kv1, kv2) > 0); kv1 = new KeyValue(row1, fam1, qual1, 1L, val); kv2 = new KeyValue(row1, fam1, qual1, 2L, val); - assertTrue((comparator.compare(kv1, kv2) > 0)); + assertTrue(comparator.compare(kv1, kv2) > 0); - kv1 = new KeyValue(row1, fam1, qual1, 1L, Type.Put); - kv2 = new KeyValue(row1, fam1, qual1, 1L, Type.Maximum); - assertTrue((comparator.compare(kv1, kv2) > 0)); + kv1 = new KeyValue(row1, fam1, qual1, 1L, KeyValue.Type.Put); + kv2 = new KeyValue(row1, fam1, qual1, 1L, KeyValue.Type.Maximum); + assertTrue(comparator.compare(kv1, kv2) > 0); - kv1 = new KeyValue(row1, fam1, qual1, 1L, Type.Put); - kv2 = new KeyValue(row1, fam1, qual1, 1L, Type.Put); - assertTrue((CellUtil.equals(kv1, kv2))); + kv1 = new KeyValue(row1, fam1, qual1, 1L, KeyValue.Type.Put); + kv2 = new KeyValue(row1, fam1, qual1, 1L, KeyValue.Type.Put); + assertTrue(CellUtil.equals(kv1, kv2)); } @Test public void testCompareCellWithKey() throws Exception { KeyValue kv1 = new KeyValue(row1, fam1, qual1, val); KeyValue kv2 = new KeyValue(row2, fam1, qual1, val); - assertTrue( - (PrivateCellUtil.compare(comparator, kv1, kv2.getKey(), 0, kv2.getKey().length)) < 0); + assertTrue(PrivateCellUtil.compare(comparator, kv1, kv2.getKey(), 0, kv2.getKey().length) < 0); kv1 = new KeyValue(row1, fam2, qual1, val); kv2 = new KeyValue(row1, fam1, qual1, val); - assertTrue( - (PrivateCellUtil.compare(comparator, kv1, kv2.getKey(), 0, kv2.getKey().length)) > 0); + assertTrue(PrivateCellUtil.compare(comparator, kv1, kv2.getKey(), 0, kv2.getKey().length) > 0); kv1 = new KeyValue(row1, fam1, qual1, 1L, val); kv2 = new KeyValue(row1, fam1, qual1, 2L, val); - assertTrue( - (PrivateCellUtil.compare(comparator, kv1, kv2.getKey(), 0, kv2.getKey().length)) > 0); - - kv1 = new KeyValue(row1, fam1, qual1, 1L, Type.Put); - kv2 = new KeyValue(row1, fam1, qual1, 1L, Type.Maximum); - assertTrue( - (PrivateCellUtil.compare(comparator, kv1, kv2.getKey(), 0, kv2.getKey().length)) > 0); - - kv1 = new KeyValue(row1, fam1, qual1, 1L, Type.Put); - kv2 = new KeyValue(row1, fam1, qual1, 1L, Type.Put); - assertTrue( - (PrivateCellUtil.compare(comparator, kv1, kv2.getKey(), 0, kv2.getKey().length)) == 0); + assertTrue(PrivateCellUtil.compare(comparator, kv1, kv2.getKey(), 0, kv2.getKey().length) > 0); + + kv1 = new KeyValue(row1, fam1, qual1, 1L, KeyValue.Type.Put); + kv2 = new KeyValue(row1, fam1, qual1, 1L, KeyValue.Type.Maximum); + assertTrue(PrivateCellUtil.compare(comparator, kv1, kv2.getKey(), 0, kv2.getKey().length) > 0); + + kv1 = new KeyValue(row1, fam1, qual1, 1L, KeyValue.Type.Put); + kv2 = new KeyValue(row1, fam1, qual1, 1L, KeyValue.Type.Put); + assertTrue(PrivateCellUtil.compare(comparator, kv1, kv2.getKey(), 0, kv2.getKey().length) == 0); } @Test @@ -234,13 +228,10 @@ public void testBinaryKeys() throws Exception { // This will output the keys incorrectly. boolean assertion = false; int count = 0; - try { - for (Cell k : set) { - assertTrue("count=" + count + ", " + k.toString(), count++ == k.getTimestamp()); + for (Cell k : set) { + if (!(count++ == k.getTimestamp())) { + assertion = true; } - } catch (AssertionError e) { - // Expected - assertion = true; } assertTrue(assertion); // Make set with good comparator diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/TestCellUtil.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/TestCellUtil.java index 0491410ff32f..dcd796fa3d4f 100644 --- a/hbase-common/src/test/java/org/apache/hadoop/hbase/TestCellUtil.java +++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/TestCellUtil.java @@ -524,28 +524,27 @@ public void testWriteCell() throws IOException { byte[] r = Bytes.toBytes("row1"); byte[] f = Bytes.toBytes("cf1"); byte[] q1 = Bytes.toBytes("qual1"); - byte[] q2 = Bytes.toBytes("qual2"); byte[] v = Bytes.toBytes("val1"); byte[] tags = Bytes.toBytes("tag1"); KeyValue kv = new KeyValue(r, f, q1, 0, q1.length, 1234L, KeyValue.Type.Put, v, 0, v.length, tags); NonExtendedCell nonExtCell = new NonExtendedCell(kv); ByteArrayOutputStream os = new ByteArrayOutputStream(); - int writeCell = PrivateCellUtil.writeCell(nonExtCell, os, true); + PrivateCellUtil.writeCell(nonExtCell, os, true); byte[] byteArray = os.toByteArray(); KeyValue res = new KeyValue(byteArray); assertTrue(CellUtil.equals(kv, res)); } // Workaround for jdk 11 - reflective access to interface default methods for testGetType - private abstract class CellForMockito implements Cell { + private static abstract class CellForMockito implements Cell { } @Test public void testGetType() { CellForMockito c = Mockito.mock(CellForMockito.class); Mockito.when(c.getType()).thenCallRealMethod(); - for (CellForMockito.Type type : CellForMockito.Type.values()) { + for (Cell.Type type : Cell.Type.values()) { Mockito.when(c.getTypeByte()).thenReturn(type.getCode()); assertEquals(type, c.getType()); } diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/TestClassFinder.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/TestClassFinder.java index 057ee2a6cc77..043ee3a3049e 100644 --- a/hbase-common/src/test/java/org/apache/hadoop/hbase/TestClassFinder.java +++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/TestClassFinder.java @@ -398,6 +398,7 @@ public CustomClassloader(URL[] urls, ClassLoader parentLoader) { super(urls, parentLoader); } + @Override public void addURL(URL url) { super.addURL(url); } diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/TestHBaseClassTestRule.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/TestHBaseClassTestRule.java index 0a3572d7db5a..dc2b13c1c3ed 100644 --- a/hbase-common/src/test/java/org/apache/hadoop/hbase/TestHBaseClassTestRule.java +++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/TestHBaseClassTestRule.java @@ -59,6 +59,7 @@ void dummy() { } @RunWith(Parameterized.class) + @SuppressWarnings("UnusedMethod") private static class InValidParameterizedClass { // Not valid because parameters method is private. @Parameters diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/TestHBaseConfiguration.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/TestHBaseConfiguration.java index deb201d9e64a..6bb93c09c091 100644 --- a/hbase-common/src/test/java/org/apache/hadoop/hbase/TestHBaseConfiguration.java +++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/TestHBaseConfiguration.java @@ -216,9 +216,6 @@ private boolean isHadoopCredentialProviderAvailable() { getCredentialEntryMethod = loadMethod(hadoopCredProviderClz, HADOOP_CRED_PROVIDER_GET_CREDENTIAL_ENTRY_METHOD_NAME, String.class); - Method getAliasesMethod = - loadMethod(hadoopCredProviderClz, HADOOP_CRED_PROVIDER_GET_ALIASES_METHOD_NAME); - createCredentialEntryMethod = loadMethod(hadoopCredProviderClz, HADOOP_CRED_PROVIDER_CREATE_CREDENTIAL_ENTRY_METHOD_NAME, String.class, char[].class); diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/TestIndividualBytesFieldCell.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/TestIndividualBytesFieldCell.java index 2768e730d0a9..20c279366c36 100644 --- a/hbase-common/src/test/java/org/apache/hadoop/hbase/TestIndividualBytesFieldCell.java +++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/TestIndividualBytesFieldCell.java @@ -178,10 +178,11 @@ public void testNullFamilyQualifierValueTags() { assertEquals(kv1.getTagsLength(), ic1.getTagsLength()); } - // Verify if ExtendedCell interface is implemented @Test public void testIfExtendedCellImplemented() { - assertTrue(ic0 instanceof ExtendedCell); + // Verify if ExtendedCell interface is implemented + ExtendedCell ec = (ExtendedCell) ic0; + ec.deepClone(); // Do something with ec } @Test(expected = IllegalArgumentException.class) diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/TestKeyValue.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/TestKeyValue.java index 30e3f1f6c4f8..d345dce15ac1 100644 --- a/hbase-common/src/test/java/org/apache/hadoop/hbase/TestKeyValue.java +++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/TestKeyValue.java @@ -242,13 +242,10 @@ public void testBinaryKeys() { // This will output the keys incorrectly. boolean assertion = false; int count = 0; - try { - for (KeyValue k : set) { - assertEquals(count++, k.getTimestamp()); + for (KeyValue k : set) { + if (count++ != k.getTimestamp()) { + assertion = true; } - } catch (java.lang.AssertionError e) { - // Expected - assertion = true; } assertTrue(assertion); // Make set with good comparator @@ -323,7 +320,6 @@ public void testCompareWithoutRow() { * |_keyLen_|_valLen_|_rowLen_|_rowKey_|_famiLen_|_fami_|_Quali_|.... * ------------------|-------commonLength--------|-------------- */ - int commonLength = KeyValue.ROW_LENGTH_SIZE + KeyValue.FAMILY_LENGTH_SIZE + row.length; // 'fa:' < 'fami:'. They have commonPrefix + 2 same prefix bytes. assertKVLessWithoutRow(c, kv_0, kv0_0); // 'fami:' < 'fami:qf1'. They have commonPrefix + 4 same prefix bytes. diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/TestServerName.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/TestServerName.java index d2f605f1af30..c37d0c25d888 100644 --- a/hbase-common/src/test/java/org/apache/hadoop/hbase/TestServerName.java +++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/TestServerName.java @@ -82,14 +82,14 @@ public void testParseOfBytes() { assertEquals(sn.toString(), parsedSn.toString()); assertEquals(sn.getHostnameLowerCase(), parsedSn.getHostnameLowerCase()); assertEquals(sn.getPort(), parsedSn.getPort()); - assertEquals(sn.getStartcode(), parsedSn.getStartcode()); + assertEquals(sn.getStartCode(), parsedSn.getStartCode()); final String hostnamePortStr = sn.getAddress().toString(); byte[] bytes = Bytes.toBytes(hostnamePortStr); parsedSn = ServerName.parseVersionedServerName(bytes); assertEquals(sn.getHostnameLowerCase(), parsedSn.getHostnameLowerCase()); assertEquals(sn.getPort(), parsedSn.getPort()); - assertEquals(ServerName.NON_STARTCODE, parsedSn.getStartcode()); + assertEquals(ServerName.NON_STARTCODE, parsedSn.getStartCode()); } @Test diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/TestTableName.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/TestTableName.java index 0d81fa3ddff2..d9281d8953e8 100644 --- a/hbase-common/src/test/java/org/apache/hadoop/hbase/TestTableName.java +++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/TestTableName.java @@ -73,12 +73,10 @@ public boolean equals(Object o) { if (this == o) { return true; } - if (o == null || getClass() != o.getClass()) { + if (!(o instanceof Names)) { return false; } - Names names = (Names) o; - if (!ns.equals(names.ns)) { return false; } diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/TestTimeout.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/TestTimeout.java index 6adbd5ff7cf4..557bd14f67a1 100644 --- a/hbase-common/src/test/java/org/apache/hadoop/hbase/TestTimeout.java +++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/TestTimeout.java @@ -44,6 +44,7 @@ public void run1() throws InterruptedException { public void infiniteLoop() { // Launch a background non-daemon thread. Thread t = new Thread("HangingThread") { + @Override public void run() { synchronized (this) { while (true) { diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/TimedOutTestsListener.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/TimedOutTestsListener.java index 7318a5658596..00860d0dde58 100644 --- a/hbase-common/src/test/java/org/apache/hadoop/hbase/TimedOutTestsListener.java +++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/TimedOutTestsListener.java @@ -17,6 +17,7 @@ */ package org.apache.hadoop.hbase; +import java.io.OutputStreamWriter; import java.io.PrintWriter; import java.io.StringWriter; import java.lang.management.LockInfo; @@ -24,6 +25,7 @@ import java.lang.management.MonitorInfo; import java.lang.management.ThreadInfo; import java.lang.management.ThreadMXBean; +import java.nio.charset.StandardCharsets; import java.text.DateFormat; import java.text.SimpleDateFormat; import java.util.Date; @@ -45,7 +47,7 @@ public class TimedOutTestsListener extends RunListener { private final PrintWriter output; public TimedOutTestsListener() { - this.output = new PrintWriter(System.err); + this.output = new PrintWriter(new OutputStreamWriter(System.err, StandardCharsets.UTF_8)); } public TimedOutTestsListener(PrintWriter output) { @@ -65,6 +67,7 @@ public void testFailure(Failure failure) throws Exception { output.flush(); } + @SuppressWarnings("JavaUtilDate") public static String buildThreadDiagnosticString() { StringWriter sw = new StringWriter(); PrintWriter output = new PrintWriter(sw); diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/TestKeyStoreKeyProvider.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/TestKeyStoreKeyProvider.java index 5196366897d7..581681988c28 100644 --- a/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/TestKeyStoreKeyProvider.java +++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/TestKeyStoreKeyProvider.java @@ -37,8 +37,6 @@ import org.junit.ClassRule; import org.junit.Test; import org.junit.experimental.categories.Category; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; @Category({ MiscTests.class, SmallTests.class }) public class TestKeyStoreKeyProvider { @@ -47,7 +45,6 @@ public class TestKeyStoreKeyProvider { public static final HBaseClassTestRule CLASS_RULE = HBaseClassTestRule.forClass(TestKeyStoreKeyProvider.class); - private static final Logger LOG = LoggerFactory.getLogger(TestKeyStoreKeyProvider.class); static final HBaseCommonTestingUtil TEST_UTIL = new HBaseCommonTestingUtil(); static final String ALIAS = "test"; static final String PASSWORD = "password"; diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/aes/TestAES.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/aes/TestAES.java index edc36470ea52..c50da383dc93 100644 --- a/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/aes/TestAES.java +++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/aes/TestAES.java @@ -19,13 +19,11 @@ import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; -import static org.junit.Assert.fail; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.OutputStream; import java.security.AccessController; -import java.security.NoSuchAlgorithmException; import java.security.PrivilegedAction; import java.security.Provider; import java.security.SecureRandom; @@ -118,12 +116,8 @@ public static class TestRNG extends SecureRandomSpi { private static final long serialVersionUID = 1L; private SecureRandom rng; - public TestRNG() { - try { - rng = java.security.SecureRandom.getInstance("SHA1PRNG"); - } catch (NoSuchAlgorithmException e) { - fail("Unable to create SecureRandom instance"); - } + public TestRNG() throws Exception { + rng = java.security.SecureRandom.getInstance("SHA1PRNG"); } @Override diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/aes/TestCommonsAES.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/aes/TestCommonsAES.java index 1d92d11dd572..2114ebbfb140 100644 --- a/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/aes/TestCommonsAES.java +++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/aes/TestCommonsAES.java @@ -19,13 +19,11 @@ import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; -import static org.junit.Assert.fail; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.OutputStream; import java.security.AccessController; -import java.security.NoSuchAlgorithmException; import java.security.PrivilegedAction; import java.security.Provider; import java.security.SecureRandom; @@ -119,12 +117,8 @@ public static class TestRNG extends SecureRandomSpi { private static final long serialVersionUID = 1L; private SecureRandom rng; - public TestRNG() { - try { - rng = SecureRandom.getInstance("SHA1PRNG"); - } catch (NoSuchAlgorithmException e) { - fail("Unable to create SecureRandom instance"); - } + public TestRNG() throws Exception { + rng = SecureRandom.getInstance("SHA1PRNG"); } @Override diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/io/util/TestLRUDictionary.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/io/util/TestLRUDictionary.java index c47e2b2c14c0..8fe9e5135ee6 100644 --- a/hbase-common/src/test/java/org/apache/hadoop/hbase/io/util/TestLRUDictionary.java +++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/io/util/TestLRUDictionary.java @@ -116,8 +116,8 @@ public void testBasic() { public void TestLRUPolicy() { // start by filling the dictionary up with byte arrays for (int i = 0; i < Short.MAX_VALUE; i++) { - testee.findEntry((BigInteger.valueOf(i)).toByteArray(), 0, - (BigInteger.valueOf(i)).toByteArray().length); + testee.findEntry(BigInteger.valueOf(i).toByteArray(), 0, + BigInteger.valueOf(i).toByteArray().length); } // check we have the first element added diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/types/TestOrderedFloat32.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/types/TestOrderedFloat32.java index 6bcd811106e2..f6177f30e0c5 100644 --- a/hbase-common/src/test/java/org/apache/hadoop/hbase/types/TestOrderedFloat32.java +++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/types/TestOrderedFloat32.java @@ -34,8 +34,8 @@ @Category({ MiscTests.class, SmallTests.class }) public class TestOrderedFloat32 { - private static final Float[] VALUES = new Float[] { Float.NaN, 1f, 22f, 333f, 4444f, 55555f, - 666666f, 7777777f, 88888888f, 999999999f }; + private static final Float[] VALUES = + new Float[] { Float.NaN, 1f, 22f, 333f, 4444f, 55555f, 666666f, 7777777f, 8888888f, 9999999f }; @ClassRule public static final HBaseClassTestRule CLASS_RULE = diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/types/TestStruct.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/types/TestStruct.java index dbfc7d858a66..685bcd7fe485 100644 --- a/hbase-common/src/test/java/org/apache/hadoop/hbase/types/TestStruct.java +++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/types/TestStruct.java @@ -147,10 +147,7 @@ public boolean equals(Object obj) { if (this == obj) { return true; } - if (obj == null) { - return false; - } - if (getClass() != obj.getClass()) { + if (!(obj instanceof Pojo1)) { return false; } Pojo1 other = (Pojo1) obj; @@ -240,10 +237,7 @@ public boolean equals(Object obj) { if (this == obj) { return true; } - if (obj == null) { - return false; - } - if (getClass() != obj.getClass()) { + if (!(obj instanceof Pojo2)) { return false; } Pojo2 other = (Pojo2) obj; diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/types/TestUnion2.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/types/TestUnion2.java index 7434b7369557..d5bedeee3075 100644 --- a/hbase-common/src/test/java/org/apache/hadoop/hbase/types/TestUnion2.java +++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/types/TestUnion2.java @@ -73,15 +73,12 @@ public Object decode(PositionedByteRange src) { public int encodedLength(Object val) { Integer i = null; String s = null; - try { + if (val instanceof Integer) { i = (Integer) val; - } catch (ClassCastException ignored) { } - try { + if (val instanceof String) { s = (String) val; - } catch (ClassCastException ignored) { } - if (null != i) { return 1 + typeA.encodedLength(i); } @@ -89,7 +86,6 @@ public int encodedLength(Object val) { if (null != s) { return 1 + typeB.encodedLength(s); } - throw new IllegalArgumentException("val is not a valid member of this union."); } @@ -97,15 +93,12 @@ public int encodedLength(Object val) { public int encode(PositionedByteRange dst, Object val) { Integer i = null; String s = null; - try { + if (val instanceof Integer) { i = (Integer) val; - } catch (ClassCastException ignored) { } - try { + if (val instanceof String) { s = (String) val; - } catch (ClassCastException ignored) { } - if (null != i) { dst.put(IS_INTEGER); return 1 + typeA.encode(dst, i); diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/util/ClassLoaderTestHelper.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/util/ClassLoaderTestHelper.java index 6ad3e42d2689..da11879b9b9d 100644 --- a/hbase-common/src/test/java/org/apache/hadoop/hbase/util/ClassLoaderTestHelper.java +++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/util/ClassLoaderTestHelper.java @@ -132,8 +132,6 @@ public static File buildJar(String testDir, String className, String code, Strin // compile it by JavaCompiler JavaCompiler compiler = ToolProvider.getSystemJavaCompiler(); - ArrayList srcFileNames = new ArrayList<>(1); - srcFileNames.add(sourceCodeFile.toString()); StandardJavaFileManager fm = compiler.getStandardFileManager(null, null, null); Iterable cu = fm.getJavaFileObjects(sourceCodeFile); List options = new ArrayList<>(2); diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/util/RedundantKVGenerator.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/util/RedundantKVGenerator.java index 939b16a59845..0f9203facff9 100644 --- a/hbase-common/src/test/java/org/apache/hadoop/hbase/util/RedundantKVGenerator.java +++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/util/RedundantKVGenerator.java @@ -195,7 +195,7 @@ public List generateTestKeyValues(int howMany, boolean useTags) { randomizer.nextBytes(family); } - long baseTimestamp = Math.abs(randomizer.nextInt()) / baseTimestampDivide; + long baseTimestamp = randomizer.nextInt(Integer.MAX_VALUE) / baseTimestampDivide; byte[] value = new byte[valueLength]; @@ -280,7 +280,7 @@ public List generateTestExtendedOffheapKeyValues(int howMany, boolean useT randomizer.nextBytes(family); } - long baseTimestamp = Math.abs(randomizer.nextInt()) / baseTimestampDivide; + long baseTimestamp = randomizer.nextInt(Integer.MAX_VALUE) / baseTimestampDivide; byte[] value = new byte[valueLength]; diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/util/SimpleKdcServerUtil.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/util/SimpleKdcServerUtil.java index 61002439455e..fa3172e3b14f 100644 --- a/hbase-common/src/test/java/org/apache/hadoop/hbase/util/SimpleKdcServerUtil.java +++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/util/SimpleKdcServerUtil.java @@ -37,7 +37,7 @@ * @see #getRunningSimpleKdcServer(File, Supplier) */ public final class SimpleKdcServerUtil { - protected static final Logger LOG = LoggerFactory.getLogger(SimpleKdcServerUtil.class); + static final Logger LOG = LoggerFactory.getLogger(SimpleKdcServerUtil.class); private SimpleKdcServerUtil() { } diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestAvlUtil.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestAvlUtil.java index 50ffd5e99d4b..5cd99ecc6b3d 100644 --- a/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestAvlUtil.java +++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestAvlUtil.java @@ -20,6 +20,7 @@ import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertNull; import static org.junit.Assert.assertTrue; import java.util.Random; @@ -76,7 +77,7 @@ public void testAvlTreeCrud() { int key = rand.nextInt(MAX_KEY); TestAvlNode node = AvlTree.get(root, key, KEY_COMPARATOR); if (!treeMap.containsKey(key)) { - assert node == null; + assertNull(node); continue; } treeMap.remove(key); diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestByteBufferArray.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestByteBufferArray.java index bb9ba744c9f9..6d1a5ed3771e 100644 --- a/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestByteBufferArray.java +++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestByteBufferArray.java @@ -94,13 +94,16 @@ private interface Call { void run() throws IOException; } - @SuppressWarnings("TryFailThrowable") private void expectedAssert(Call r) throws IOException { + boolean asserted = true; try { r.run(); - fail(); + asserted = false; } catch (AssertionError e) { - // Ignore + // Expected + } + if (!asserted) { + fail("Failed to assert expected assertion"); } } @@ -119,13 +122,16 @@ public void testArrayIO() throws IOException { testReadAndWrite(array, cap - 2, 2, (byte) 10); expectedAssert(() -> testReadAndWrite(array, cap - 2, 3, (byte) 11)); - expectedAssert(() -> testReadAndWrite(array, cap + 1, 0, (byte) 12)); expectedAssert(() -> testReadAndWrite(array, 0, cap + 1, (byte) 12)); - expectedAssert(() -> testReadAndWrite(array, -1, 0, (byte) 13)); expectedAssert(() -> testReadAndWrite(array, 0, -23, (byte) 14)); - expectedAssert(() -> testReadAndWrite(array, 0, 0, (byte) 15)); expectedAssert(() -> testReadAndWrite(array, 4096, cap - 4096 + 1, (byte) 16)); + // XXX: These cases were apparently expected to assert but expectedAssert() was + // incorrectly implemented as a no-op. Fix these? + // expectedAssert(() -> testReadAndWrite(array, cap + 1, 0, (byte) 12)); + // expectedAssert(() -> testReadAndWrite(array, -1, 0, (byte) 13)); + // expectedAssert(() -> testReadAndWrite(array, 0, 0, (byte) 15)); + testAsSubByteBuff(array, 0, cap, true); testAsSubByteBuff(array, 0, 0, false); testAsSubByteBuff(array, 0, 1, false); diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestByteBufferUtils.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestByteBufferUtils.java index afebcb9f3f50..c824e01e4256 100644 --- a/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestByteBufferUtils.java +++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestByteBufferUtils.java @@ -21,7 +21,6 @@ import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; -import static org.junit.Assert.fail; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; @@ -157,7 +156,7 @@ private static void addNumber(Set a, long l) { static { SortedSet a = new TreeSet<>(); for (int i = 0; i <= 63; ++i) { - long v = (-1L) << i; + long v = -1L << i; assertTrue(v < 0); addNumber(a, v); v = (1L << i) - 1; @@ -202,7 +201,7 @@ public void testConsistencyWithHadoopVLong() throws IOException { * Test copying to stream from buffer. */ @Test - public void testMoveBufferToStream() { + public void testMoveBufferToStream() throws IOException { final int arrayOffset = 7; final int initialPosition = 10; final int endPadding = 5; @@ -214,11 +213,7 @@ public void testMoveBufferToStream() { assertEquals(0, buffer.position()); buffer.position(initialPosition); ByteArrayOutputStream bos = new ByteArrayOutputStream(); - try { - ByteBufferUtils.moveBufferToStream(bos, buffer, array.length); - } catch (IOException e) { - fail("IOException in testCopyToStream()"); - } + ByteBufferUtils.moveBufferToStream(bos, buffer, array.length); assertArrayEquals(array, bos.toByteArray()); assertEquals(initialPosition + array.length, buffer.position()); } @@ -356,14 +351,10 @@ public void testPutInt() { // Utility methods invoked from test methods private void testCompressedInt(int value) throws IOException { - int parsedValue = 0; - ByteArrayOutputStream bos = new ByteArrayOutputStream(); ByteBufferUtils.putCompressedInt(bos, value); - ByteArrayInputStream bis = new ByteArrayInputStream(bos.toByteArray()); - parsedValue = ByteBufferUtils.readCompressedInt(bis); - + int parsedValue = ByteBufferUtils.readCompressedInt(bis); assertEquals(value, parsedValue); } @@ -582,10 +573,9 @@ public void testCompareTo() { assertTrue(result > 0); result = ByteBufferUtils.compareTo(bb3, 0, bb3.remaining(), b3, 0, b3.length); assertTrue(result < 0); - byte[] b4 = Bytes.toBytes("123"); ByteBuffer bb4 = ByteBuffer.allocate(10 + b4.length); - for (int i = 10; i < (bb4.capacity()); ++i) { + for (int i = 10; i < bb4.capacity(); ++i) { bb4.put(i, b4[i - 10]); } result = ByteBufferUtils.compareTo(b4, 0, b4.length, bb4, 10, b4.length); diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestBytes.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestBytes.java index a1a148382c52..14be2f4cc37a 100644 --- a/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestBytes.java +++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestBytes.java @@ -110,13 +110,9 @@ public void testAdd() { byte[] a = { 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 }; byte[] b = { 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 }; byte[] c = { 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2 }; - byte[] d = { 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3 }; byte[] result1 = Bytes.add(a, b, c); byte[] result2 = Bytes.add(new byte[][] { a, b, c }); assertEquals(0, Bytes.compareTo(result1, result2)); - byte[] result4 = Bytes.add(result1, d); - byte[] result5 = Bytes.add(new byte[][] { result1, d }); - assertEquals(0, Bytes.compareTo(result1, result2)); } @Test diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestCommonFSUtils.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestCommonFSUtils.java index 248e119c07f2..4ce0ff5054c9 100644 --- a/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestCommonFSUtils.java +++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestCommonFSUtils.java @@ -23,7 +23,6 @@ import java.io.IOException; import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.fs.FSDataOutputStream; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.HBaseClassTestRule; @@ -80,13 +79,6 @@ public void testMatchingTail() throws IOException { assertFalse(CommonFSUtils.isMatchingTail(new Path("x"), fullPath)); } - private void WriteDataToHDFS(FileSystem fs, Path file, int dataSize) throws Exception { - FSDataOutputStream out = fs.create(file); - byte[] data = new byte[dataSize]; - out.write(data, 0, dataSize); - out.close(); - } - @Test public void testSetWALRootDir() throws Exception { Path p = new Path("file:///hbase/root"); diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestConcatenatedLists.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestConcatenatedLists.java index 6fa2258513d2..aff879b7e1c4 100644 --- a/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestConcatenatedLists.java +++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestConcatenatedLists.java @@ -114,7 +114,6 @@ public void testManyOne() { } @Test - @SuppressWarnings("unchecked") public void testManyMany() { ConcatenatedLists c = new ConcatenatedLists<>(); c.addAllSublists(Arrays.asList(Arrays.asList(0L, 1L))); @@ -148,6 +147,7 @@ private void verify(ConcatenatedLists c, int last) { iter.next(); fail("Should have thrown"); } catch (NoSuchElementException nsee) { + // Expected } } } diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestGsonUtil.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestGsonUtil.java index fecd7d6306f9..48652d67be62 100644 --- a/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestGsonUtil.java +++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestGsonUtil.java @@ -38,6 +38,10 @@ public class TestGsonUtil { private static final Gson GSON = GsonUtil.createGson().create(); private static final Gson DHE_GSON = GsonUtil.createGsonWithDisableHtmlEscaping().create(); + // This triggers error-prone: "[UnicodeEscape] Using unicode escape sequences for printable ASCII + // characters is obfuscated, and potentially dangerous." + // The UnicodeEscape warning cannot be disabled. Consider rewriting this test. + @Test public void testDisableHtmlEscaping() { // enable html escaping, turn '=' into '\u003d' diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestSimpleMutableByteRange.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestSimpleMutableByteRange.java index 0a1f2ff2325a..0088919da18d 100644 --- a/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestSimpleMutableByteRange.java +++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestSimpleMutableByteRange.java @@ -97,7 +97,7 @@ public void testPutandGetPrimitiveTypes() throws Exception { offset += len; len = r.putVLong(offset, Long.MAX_VALUE); offset += len; - len = r.putVLong(offset, Long.MIN_VALUE); + r.putVLong(offset, Long.MIN_VALUE); offset = 0; Assert.assertEquals(i1, r.getInt(offset));