diff --git a/hbase-assembly/src/main/assembly/hadoop-three-compat.xml b/hbase-assembly/src/main/assembly/hadoop-three-compat.xml
index e3100bb88fcb..4ba995409f08 100644
--- a/hbase-assembly/src/main/assembly/hadoop-three-compat.xml
+++ b/hbase-assembly/src/main/assembly/hadoop-three-compat.xml
@@ -46,6 +46,7 @@
org.apache.hbase:hbase-itorg.apache.hbase:hbase-loggingorg.apache.hbase:hbase-mapreduce
+ org.apache.hbase:hbase-diagnosticsorg.apache.hbase:hbase-metricsorg.apache.hbase:hbase-metrics-apiorg.apache.hbase:hbase-procedure
diff --git a/hbase-asyncfs/src/test/java/org/apache/hadoop/hbase/security/HBaseKerberosUtils.java b/hbase-asyncfs/src/test/java/org/apache/hadoop/hbase/security/HBaseKerberosUtils.java
index f45b949079bb..ca0d5a7ff66a 100644
--- a/hbase-asyncfs/src/test/java/org/apache/hadoop/hbase/security/HBaseKerberosUtils.java
+++ b/hbase-asyncfs/src/test/java/org/apache/hadoop/hbase/security/HBaseKerberosUtils.java
@@ -18,8 +18,6 @@
package org.apache.hadoop.hbase.security;
import java.io.File;
-import java.io.IOException;
-import java.net.InetAddress;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.CommonConfigurationKeys;
import org.apache.hadoop.hbase.AuthUtil;
@@ -172,23 +170,6 @@ public static void setSSLConfiguration(HBaseCommonTestingUtil utility, Class>
KeyStoreTestUtil.setupSSLConfig(keystoresDir.getAbsolutePath(), sslConfDir, conf, false);
}
- public static UserGroupInformation loginAndReturnUGI(Configuration conf, String username)
- throws IOException {
- String hostname = InetAddress.getLocalHost().getHostName();
- String keyTabFileConfKey = "hbase." + username + ".keytab.file";
- String keyTabFileLocation = conf.get(keyTabFileConfKey);
- String principalConfKey = "hbase." + username + ".kerberos.principal";
- String principal = org.apache.hadoop.security.SecurityUtil
- .getServerPrincipal(conf.get(principalConfKey), hostname);
- if (keyTabFileLocation == null || principal == null) {
- LOG.warn(
- "Principal or key tab file null for : " + principalConfKey + ", " + keyTabFileConfKey);
- }
- UserGroupInformation ugi =
- UserGroupInformation.loginUserFromKeytabAndReturnUGI(principal, keyTabFileLocation);
- return ugi;
- }
-
public static UserGroupInformation loginKerberosPrincipal(String krbKeytab, String krbPrincipal)
throws Exception {
Configuration conf = new Configuration();
diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/io/compress/CompressionTestBase.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/io/compress/CompressionTestBase.java
index 0134d52eebbc..93adc4cdb9b7 100644
--- a/hbase-common/src/test/java/org/apache/hadoop/hbase/io/compress/CompressionTestBase.java
+++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/io/compress/CompressionTestBase.java
@@ -26,7 +26,7 @@
import org.apache.hadoop.conf.Configurable;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
-import org.apache.hadoop.hbase.util.RandomDistribution;
+import org.apache.hadoop.hbase.util.RandomDistributionCopy;
import org.apache.hadoop.io.IOUtils;
import org.apache.hadoop.io.compress.CompressionCodec;
import org.apache.hadoop.io.compress.CompressionInputStream;
@@ -128,8 +128,8 @@ protected void codecSmallTest(final CompressionCodec codec) throws Exception {
* Test with a large input (1MB) divided into blocks of 4KB.
*/
protected void codecLargeTest(final CompressionCodec codec, final double sigma) throws Exception {
- RandomDistribution.DiscreteRNG rng =
- new RandomDistribution.Zipf(new Random(), 0, Byte.MAX_VALUE, sigma);
+ RandomDistributionCopy.DiscreteRNG rng =
+ new RandomDistributionCopy.Zipf(new Random(), 0, Byte.MAX_VALUE, sigma);
final byte[][] input = new byte[LARGE_SIZE / BLOCK_SIZE][BLOCK_SIZE];
fill(rng, input);
codecTest(codec, input);
@@ -140,20 +140,20 @@ protected void codecLargeTest(final CompressionCodec codec, final double sigma)
*/
protected void codecVeryLargeTest(final CompressionCodec codec, final double sigma)
throws Exception {
- RandomDistribution.DiscreteRNG rng =
- new RandomDistribution.Zipf(new Random(), 0, Byte.MAX_VALUE, sigma);
+ RandomDistributionCopy.DiscreteRNG rng =
+ new RandomDistributionCopy.Zipf(new Random(), 0, Byte.MAX_VALUE, sigma);
final byte[][] input = new byte[1][VERY_LARGE_SIZE];
fill(rng, input);
codecTest(codec, input);
}
- protected static void fill(RandomDistribution.DiscreteRNG rng, byte[][] input) {
+ protected static void fill(RandomDistributionCopy.DiscreteRNG rng, byte[][] input) {
for (int i = 0; i < input.length; i++) {
fill(rng, input[i]);
}
}
- protected static void fill(RandomDistribution.DiscreteRNG rng, byte[] input) {
+ protected static void fill(RandomDistributionCopy.DiscreteRNG rng, byte[] input) {
for (int i = 0; i < input.length; i++) {
input[i] = (byte) rng.nextInt();
}
diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/KeyProviderForTestingCopy.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/KeyProviderForTestingCopy.java
new file mode 100644
index 000000000000..7df7aa6421ef
--- /dev/null
+++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/KeyProviderForTestingCopy.java
@@ -0,0 +1,45 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.io.crypto;
+
+import java.security.Key;
+import javax.crypto.spec.SecretKeySpec;
+
+/**
+ * Return a fixed secret key for AES for testing.
+ */
+public class KeyProviderForTestingCopy implements KeyProvider {
+
+ @Override
+ public void init(String parameters) {
+ }
+
+ @Override
+ public Key getKey(String name) {
+ return new SecretKeySpec(Encryption.hash128(name), "AES");
+ }
+
+ @Override
+ public Key[] getKeys(String[] aliases) {
+ Key[] result = new Key[aliases.length];
+ for (int i = 0; i < aliases.length; i++) {
+ result[i] = new SecretKeySpec(Encryption.hash128(aliases[i]), "AES");
+ }
+ return result;
+ }
+}
diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/util/RandomDistributionCopy.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/util/RandomDistributionCopy.java
new file mode 100644
index 000000000000..6961d027dc8a
--- /dev/null
+++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/util/RandomDistributionCopy.java
@@ -0,0 +1,227 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.util;
+
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.Random;
+
+/**
+ * A class that generates random numbers that follow some distribution.
+ *
+ * Copied from hadoop-3315 tfile.
+ * Remove after tfile is committed and use the tfile version of this class instead.
+ *
+ */
+public class RandomDistributionCopy {
+ /**
+ * Interface for discrete (integer) random distributions.
+ */
+ public interface DiscreteRNG {
+ /**
+ * Get the next random number
+ * @return the next random number.
+ */
+ int nextInt();
+ }
+
+ /**
+ * P(i)=1/(max-min)
+ */
+ public static final class Flat implements DiscreteRNG {
+ private final Random random;
+ private final int min;
+ private final int max;
+
+ /**
+ * Generate random integers from min (inclusive) to max (exclusive) following even distribution.
+ * The basic random number generator. Minimum integer maximum integer (exclusive).
+ */
+ public Flat(Random random, int min, int max) {
+ if (min >= max) {
+ throw new IllegalArgumentException("Invalid range");
+ }
+ this.random = random;
+ this.min = min;
+ this.max = max;
+ }
+
+ /**
+ * @see DiscreteRNG#nextInt()
+ */
+ @Override
+ public int nextInt() {
+ return random.nextInt(max - min) + min;
+ }
+ }
+
+ /**
+ * Zipf distribution. The ratio of the probabilities of integer i and j is defined as follows:
+ * P(i)/P(j)=((j-min+1)/(i-min+1))^sigma.
+ */
+ public static final class Zipf implements DiscreteRNG {
+ private static final double DEFAULT_EPSILON = 0.001;
+ private final Random random;
+ private final ArrayList k;
+ private final ArrayList v;
+
+ /**
+ * Constructor The random number generator. minimum integer (inclusvie) maximum integer
+ * (exclusive) parameter sigma. (sigma > 1.0)
+ */
+ public Zipf(Random r, int min, int max, double sigma) {
+ this(r, min, max, sigma, DEFAULT_EPSILON);
+ }
+
+ /**
+ * Constructor. The random number generator. minimum integer (inclusvie) maximum integer
+ * (exclusive) parameter sigma. (sigma > 1.0) Allowable error percentage (0 < epsilon < 1.0).
+ */
+ public Zipf(Random r, int min, int max, double sigma, double epsilon) {
+ if ((max <= min) || (sigma <= 1) || (epsilon <= 0) || (epsilon >= 0.5)) {
+ throw new IllegalArgumentException("Invalid arguments");
+ }
+ random = r;
+ k = new ArrayList<>();
+ v = new ArrayList<>();
+
+ double sum = 0;
+ int last = -1;
+ for (int i = min; i < max; ++i) {
+ sum += Math.exp(-sigma * Math.log(i - min + 1));
+ if ((last == -1) || i * (1 - epsilon) > last) {
+ k.add(i);
+ v.add(sum);
+ last = i;
+ }
+ }
+
+ if (last != max - 1) {
+ k.add(max - 1);
+ v.add(sum);
+ }
+
+ v.set(v.size() - 1, 1.0);
+
+ for (int i = v.size() - 2; i >= 0; --i) {
+ v.set(i, v.get(i) / sum);
+ }
+ }
+
+ /**
+ * @see DiscreteRNG#nextInt()
+ */
+ @Override
+ public int nextInt() {
+ double d = random.nextDouble();
+ int idx = Collections.binarySearch(v, d);
+
+ if (idx > 0) {
+ ++idx;
+ } else {
+ idx = -(idx + 1);
+ }
+
+ if (idx >= v.size()) {
+ idx = v.size() - 1;
+ }
+
+ if (idx == 0) {
+ return k.get(0);
+ }
+
+ int ceiling = k.get(idx);
+ int lower = k.get(idx - 1);
+
+ return ceiling - random.nextInt(ceiling - lower);
+ }
+ }
+
+ /**
+ * Binomial distribution. P(k)=select(n, k)*p^k*(1-p)^(n-k) (k = 0, 1, ..., n)
+ * P(k)=select(max-min-1, k-min)*p^(k-min)*(1-p)^(k-min)*(1-p)^(max-k-1)
+ */
+ public static final class Binomial implements DiscreteRNG {
+ private final Random random;
+ private final int min;
+ private final int n;
+ private final double[] v;
+
+ private static double select(int n, int k) {
+ double ret = 1.0;
+ for (int i = k + 1; i <= n; ++i) {
+ ret *= (double) i / (i - k);
+ }
+ return ret;
+ }
+
+ private static double power(double p, int k) {
+ return Math.exp(k * Math.log(p));
+ }
+
+ /**
+ * Generate random integers from min (inclusive) to max (exclusive) following Binomial
+ * distribution. The basic random number generator. Minimum integer maximum integer (exclusive).
+ * parameter.
+ */
+ public Binomial(Random random, int min, int max, double p) {
+ if (min >= max) {
+ throw new IllegalArgumentException("Invalid range");
+ }
+ this.random = random;
+ this.min = min;
+ this.n = max - min - 1;
+ if (n > 0) {
+ v = new double[n + 1];
+ double sum = 0.0;
+ for (int i = 0; i <= n; ++i) {
+ sum += select(n, i) * power(p, i) * power(1 - p, n - i);
+ v[i] = sum;
+ }
+ for (int i = 0; i <= n; ++i) {
+ v[i] /= sum;
+ }
+ } else {
+ v = null;
+ }
+ }
+
+ /**
+ * @see DiscreteRNG#nextInt()
+ */
+ @Override
+ public int nextInt() {
+ if (v == null) {
+ return min;
+ }
+ double d = random.nextDouble();
+ int idx = Arrays.binarySearch(v, d);
+ if (idx > 0) {
+ ++idx;
+ } else {
+ idx = -(idx + 1);
+ }
+
+ if (idx >= v.length) {
+ idx = v.length - 1;
+ }
+ return idx + min;
+ }
+ }
+}
diff --git a/hbase-compression/hbase-compression-zstd/src/test/java/org/apache/hadoop/hbase/io/compress/zstd/TestZstdDictionary.java b/hbase-compression/hbase-compression-zstd/src/test/java/org/apache/hadoop/hbase/io/compress/zstd/TestZstdDictionary.java
index 2f5a9784ec4d..dd316263abf5 100644
--- a/hbase-compression/hbase-compression-zstd/src/test/java/org/apache/hadoop/hbase/io/compress/zstd/TestZstdDictionary.java
+++ b/hbase-compression/hbase-compression-zstd/src/test/java/org/apache/hadoop/hbase/io/compress/zstd/TestZstdDictionary.java
@@ -30,7 +30,7 @@
import org.apache.hadoop.hbase.io.compress.CompressionTestBase;
import org.apache.hadoop.hbase.io.compress.DictionaryCache;
import org.apache.hadoop.hbase.testclassification.SmallTests;
-import org.apache.hadoop.hbase.util.RandomDistribution;
+import org.apache.hadoop.hbase.util.RandomDistributionCopy;
import org.junit.BeforeClass;
import org.junit.ClassRule;
import org.junit.Test;
@@ -82,8 +82,8 @@ public static void main(String[] args) throws IOException {
System.err.println("Usage: TestZstdCodec ");
System.exit(-1);
}
- final RandomDistribution.DiscreteRNG rng =
- new RandomDistribution.Zipf(new Random(), 0, Byte.MAX_VALUE, 2);
+ final RandomDistributionCopy.DiscreteRNG rng =
+ new RandomDistributionCopy.Zipf(new Random(), 0, Byte.MAX_VALUE, 2);
final File outFile = new File(args[0]);
final byte[] buffer = new byte[1024];
System.out.println("Generating " + outFile);
diff --git a/hbase-diagnostics/pom.xml b/hbase-diagnostics/pom.xml
new file mode 100644
index 000000000000..9f07ed01079a
--- /dev/null
+++ b/hbase-diagnostics/pom.xml
@@ -0,0 +1,173 @@
+
+
+
+ 4.0.0
+
+ org.apache.hbase
+ hbase-build-configuration
+ ${revision}
+ ../hbase-build-configuration
+
+ hbase-diagnostics
+ Apache HBase - Diagnostics Tools
+ Diagnostics Tools for HBase
+
+
+
+ org.apache.hbase
+ hbase-server
+ test-jar
+ test
+
+
+ org.apache.hbase
+ hbase-server
+
+
+ org.apache.hbase
+ hbase-mapreduce
+
+
+ org.apache.hbase
+ hbase-logging
+
+
+ org.apache.hbase.thirdparty
+ hbase-shaded-miscellaneous
+
+
+
+ org.apache.hbase
+ hbase-common
+
+
+ org.apache.hbase
+ hbase-protocol-shaded
+
+
+ org.apache.hbase
+ hbase-client
+
+
+ org.apache.hbase
+ hbase-zookeeper
+
+
+ org.apache.hbase
+ hbase-balancer
+
+
+ org.apache.hbase.thirdparty
+ hbase-shaded-gson
+
+
+ org.apache.hbase
+ hbase-common
+ test-jar
+ test
+
+
+ org.apache.hbase
+ hbase-annotations
+ test-jar
+ test
+
+
+
+ io.opentelemetry
+ opentelemetry-context
+
+
+ io.opentelemetry
+ opentelemetry-api
+
+
+ io.dropwizard.metrics
+ metrics-core
+
+
+ commons-io
+ commons-io
+
+
+ org.apache.commons
+ commons-lang3
+
+
+ org.slf4j
+ slf4j-api
+
+
+ org.apache.commons
+ commons-math3
+
+
+ org.apache.zookeeper
+ zookeeper
+
+
+ org.apache.hadoop
+ hadoop-common
+ ${hadoop-three.version}
+
+
+ org.apache.hadoop
+ hadoop-mapreduce-client-core
+ ${hadoop-three.version}
+
+
+
+ org.apache.hadoop
+ hadoop-hdfs
+ ${hadoop-three.version}
+ test-jar
+ test
+
+
+ org.apache.hbase
+ hbase-zookeeper
+ test-jar
+ test
+
+
+ org.apache.hadoop
+ hadoop-minicluster
+ ${hadoop-three.version}
+ test
+
+
+ javax.xml.bind
+ jaxb-api
+
+
+ javax.ws.rs
+ jsr311-api
+
+
+
+
+ junit
+ junit
+ test
+
+
+
+
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/HFilePerformanceEvaluation.java b/hbase-diagnostics/src/main/java/org/apache/hadoop/hbase/HFilePerformanceEvaluation.java
similarity index 100%
rename from hbase-server/src/test/java/org/apache/hadoop/hbase/HFilePerformanceEvaluation.java
rename to hbase-diagnostics/src/main/java/org/apache/hadoop/hbase/HFilePerformanceEvaluation.java
diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/PerformanceEvaluation.java b/hbase-diagnostics/src/main/java/org/apache/hadoop/hbase/PerformanceEvaluation.java
similarity index 100%
rename from hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/PerformanceEvaluation.java
rename to hbase-diagnostics/src/main/java/org/apache/hadoop/hbase/PerformanceEvaluation.java
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/PerformanceEvaluationCommons.java b/hbase-diagnostics/src/main/java/org/apache/hadoop/hbase/PerformanceEvaluationCommons.java
similarity index 97%
rename from hbase-server/src/test/java/org/apache/hadoop/hbase/PerformanceEvaluationCommons.java
rename to hbase-diagnostics/src/main/java/org/apache/hadoop/hbase/PerformanceEvaluationCommons.java
index 8a6347ce6056..4d278c639467 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/PerformanceEvaluationCommons.java
+++ b/hbase-diagnostics/src/main/java/org/apache/hadoop/hbase/PerformanceEvaluationCommons.java
@@ -21,12 +21,14 @@
import java.util.ArrayList;
import java.util.List;
import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
+import org.apache.yetus.audience.InterfaceAudience;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Code shared by PE tests.
*/
+@InterfaceAudience.Private
public class PerformanceEvaluationCommons {
private static final Logger LOG =
LoggerFactory.getLogger(PerformanceEvaluationCommons.class.getName());
diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/ScanPerformanceEvaluation.java b/hbase-diagnostics/src/main/java/org/apache/hadoop/hbase/ScanPerformanceEvaluation.java
similarity index 100%
rename from hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/ScanPerformanceEvaluation.java
rename to hbase-diagnostics/src/main/java/org/apache/hadoop/hbase/ScanPerformanceEvaluation.java
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/FilterAllFilter.java b/hbase-diagnostics/src/main/java/org/apache/hadoop/hbase/filter/FilterAllFilter.java
similarity index 95%
rename from hbase-server/src/test/java/org/apache/hadoop/hbase/filter/FilterAllFilter.java
rename to hbase-diagnostics/src/main/java/org/apache/hadoop/hbase/filter/FilterAllFilter.java
index a5650adad914..a9ae79ffb6ca 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/FilterAllFilter.java
+++ b/hbase-diagnostics/src/main/java/org/apache/hadoop/hbase/filter/FilterAllFilter.java
@@ -20,7 +20,9 @@
import java.io.IOException;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.exceptions.DeserializationException;
+import org.apache.yetus.audience.InterfaceAudience;
+@InterfaceAudience.Private
public class FilterAllFilter extends FilterBase {
public FilterAllFilter() {
diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/KeyProviderForTesting.java b/hbase-diagnostics/src/main/java/org/apache/hadoop/hbase/io/crypto/KeyProviderForTesting.java
similarity index 94%
rename from hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/KeyProviderForTesting.java
rename to hbase-diagnostics/src/main/java/org/apache/hadoop/hbase/io/crypto/KeyProviderForTesting.java
index b795356d8bbf..a74596f47a60 100644
--- a/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/KeyProviderForTesting.java
+++ b/hbase-diagnostics/src/main/java/org/apache/hadoop/hbase/io/crypto/KeyProviderForTesting.java
@@ -19,12 +19,13 @@
import java.security.Key;
import javax.crypto.spec.SecretKeySpec;
+import org.apache.yetus.audience.InterfaceAudience;
/**
* Return a fixed secret key for AES for testing.
*/
+@InterfaceAudience.Private
public class KeyProviderForTesting implements KeyProvider {
-
@Override
public void init(String parameters) {
}
diff --git a/hbase-balancer/src/test/java/org/apache/hadoop/hbase/master/balancer/LoadBalancerPerformanceEvaluation.java b/hbase-diagnostics/src/main/java/org/apache/hadoop/hbase/master/balancer/LoadBalancerPerformanceEvaluation.java
similarity index 97%
rename from hbase-balancer/src/test/java/org/apache/hadoop/hbase/master/balancer/LoadBalancerPerformanceEvaluation.java
rename to hbase-diagnostics/src/main/java/org/apache/hadoop/hbase/master/balancer/LoadBalancerPerformanceEvaluation.java
index 3a435e140989..0e3977dc31ab 100644
--- a/hbase-balancer/src/test/java/org/apache/hadoop/hbase/master/balancer/LoadBalancerPerformanceEvaluation.java
+++ b/hbase-diagnostics/src/main/java/org/apache/hadoop/hbase/master/balancer/LoadBalancerPerformanceEvaluation.java
@@ -24,7 +24,7 @@
import java.util.List;
import java.util.Map;
import java.util.concurrent.TimeUnit;
-import org.apache.hadoop.hbase.HBaseCommonTestingUtil;
+import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HBaseInterfaceAudience;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.ServerName;
@@ -54,8 +54,6 @@ public class LoadBalancerPerformanceEvaluation extends AbstractHBaseTool {
private static final Logger LOG =
LoggerFactory.getLogger(LoadBalancerPerformanceEvaluation.class.getName());
- protected static final HBaseCommonTestingUtil UTIL = new HBaseCommonTestingUtil();
-
private static final int DEFAULT_NUM_REGIONS = 1000000;
private static Option NUM_REGIONS_OPT = new Option("regions", true,
"Number of regions to consider by load balancer. Default: " + DEFAULT_NUM_REGIONS);
@@ -177,7 +175,7 @@ protected int doWork() throws Exception {
public static void main(String[] args) throws IOException {
LoadBalancerPerformanceEvaluation tool = new LoadBalancerPerformanceEvaluation();
- tool.setConf(UTIL.getConfiguration());
+ tool.setConf(HBaseConfiguration.create());
tool.run(args);
}
}
diff --git a/hbase-diagnostics/src/main/java/org/apache/hadoop/hbase/util/DiagnosticToolsCommonUtils.java b/hbase-diagnostics/src/main/java/org/apache/hadoop/hbase/util/DiagnosticToolsCommonUtils.java
new file mode 100644
index 000000000000..b4c65d812579
--- /dev/null
+++ b/hbase-diagnostics/src/main/java/org/apache/hadoop/hbase/util/DiagnosticToolsCommonUtils.java
@@ -0,0 +1,49 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.util;
+
+import java.io.IOException;
+import java.net.InetAddress;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.security.UserGroupInformation;
+import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+@InterfaceAudience.Private
+public class DiagnosticToolsCommonUtils {
+ private static final Logger LOG = LoggerFactory.getLogger(DiagnosticToolsCommonUtils.class);
+
+ public static UserGroupInformation loginAndReturnUGI(Configuration conf, String username)
+ throws IOException {
+ String hostname = InetAddress.getLocalHost().getHostName();
+ String keyTabFileConfKey = "hbase." + username + ".keytab.file";
+ String keyTabFileLocation = conf.get(keyTabFileConfKey);
+ String principalConfKey = "hbase." + username + ".kerberos.principal";
+ String principal = org.apache.hadoop.security.SecurityUtil
+ .getServerPrincipal(conf.get(principalConfKey), hostname);
+ if (keyTabFileLocation == null || principal == null) {
+ LOG.warn(
+ "Principal or key tab file null for : " + principalConfKey + ", " + keyTabFileConfKey);
+ }
+ UserGroupInformation ugi =
+ UserGroupInformation.loginUserFromKeytabAndReturnUGI(principal, keyTabFileLocation);
+ return ugi;
+ }
+
+}
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/LoadTestDataGeneratorWithMOB.java b/hbase-diagnostics/src/main/java/org/apache/hadoop/hbase/util/LoadTestDataGeneratorWithMOB.java
similarity index 96%
rename from hbase-server/src/test/java/org/apache/hadoop/hbase/util/LoadTestDataGeneratorWithMOB.java
rename to hbase-diagnostics/src/main/java/org/apache/hadoop/hbase/util/LoadTestDataGeneratorWithMOB.java
index 77397f116ca0..c02eed483d33 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/LoadTestDataGeneratorWithMOB.java
+++ b/hbase-diagnostics/src/main/java/org/apache/hadoop/hbase/util/LoadTestDataGeneratorWithMOB.java
@@ -18,10 +18,12 @@
package org.apache.hadoop.hbase.util;
import java.util.Arrays;
+import org.apache.yetus.audience.InterfaceAudience;
/**
* A load test data generator for MOB
*/
+@InterfaceAudience.Private
public class LoadTestDataGeneratorWithMOB extends MultiThreadedAction.DefaultDataGenerator {
private byte[] mobColumnFamily;
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/LoadTestDataGeneratorWithTags.java b/hbase-diagnostics/src/main/java/org/apache/hadoop/hbase/util/LoadTestDataGeneratorWithTags.java
similarity index 100%
rename from hbase-server/src/test/java/org/apache/hadoop/hbase/util/LoadTestDataGeneratorWithTags.java
rename to hbase-diagnostics/src/main/java/org/apache/hadoop/hbase/util/LoadTestDataGeneratorWithTags.java
diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/util/LoadTestKVGenerator.java b/hbase-diagnostics/src/main/java/org/apache/hadoop/hbase/util/LoadTestKVGenerator.java
similarity index 100%
rename from hbase-common/src/test/java/org/apache/hadoop/hbase/util/LoadTestKVGenerator.java
rename to hbase-diagnostics/src/main/java/org/apache/hadoop/hbase/util/LoadTestKVGenerator.java
diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/util/LoadTestTool.java b/hbase-diagnostics/src/main/java/org/apache/hadoop/hbase/util/LoadTestTool.java
similarity index 98%
rename from hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/util/LoadTestTool.java
rename to hbase-diagnostics/src/main/java/org/apache/hadoop/hbase/util/LoadTestTool.java
index 72a73eab8311..a843df2ce7e5 100644
--- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/util/LoadTestTool.java
+++ b/hbase-diagnostics/src/main/java/org/apache/hadoop/hbase/util/LoadTestTool.java
@@ -29,7 +29,6 @@
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HBaseInterfaceAudience;
-import org.apache.hadoop.hbase.HBaseTestingUtil;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Admin;
@@ -47,7 +46,6 @@
import org.apache.hadoop.hbase.logging.Log4jUtils;
import org.apache.hadoop.hbase.regionserver.BloomType;
import org.apache.hadoop.hbase.security.EncryptionUtil;
-import org.apache.hadoop.hbase.security.HBaseKerberosUtils;
import org.apache.hadoop.hbase.security.User;
import org.apache.hadoop.hbase.security.access.AccessControlClient;
import org.apache.hadoop.hbase.security.access.Permission;
@@ -322,8 +320,7 @@ protected void addOptions() {
addOptWithArg(OPT_BLOOM, OPT_USAGE_BLOOM);
addOptWithArg(OPT_BLOOM_PARAM, "the parameter of bloom filter type");
addOptWithArg(OPT_COMPRESSION, OPT_USAGE_COMPRESSION);
- addOptWithArg(HFileTestUtil.OPT_DATA_BLOCK_ENCODING,
- HFileTestUtil.OPT_DATA_BLOCK_ENCODING_USAGE);
+ addOptWithArg(LoadTestUtil.OPT_DATA_BLOCK_ENCODING, LoadTestUtil.OPT_DATA_BLOCK_ENCODING_USAGE);
addOptWithArg(OPT_MAX_READ_ERRORS,
"The maximum number of read errors "
+ "to tolerate before terminating all reader threads. The default is "
@@ -409,7 +406,7 @@ protected void processOptions(CommandLine cmd) {
families[i] = Bytes.toBytes(list[i]);
}
} else {
- families = HFileTestUtil.DEFAULT_COLUMN_FAMILIES;
+ families = LoadTestUtil.DEFAULT_COLUMN_FAMILIES;
}
isVerbose = cmd.hasOption(OPT_VERBOSE);
@@ -522,7 +519,7 @@ protected void processOptions(CommandLine cmd) {
}
private void parseColumnFamilyOptions(CommandLine cmd) {
- String dataBlockEncodingStr = cmd.getOptionValue(HFileTestUtil.OPT_DATA_BLOCK_ENCODING);
+ String dataBlockEncodingStr = cmd.getOptionValue(LoadTestUtil.OPT_DATA_BLOCK_ENCODING);
dataBlockEncodingAlgo =
dataBlockEncodingStr == null ? null : DataBlockEncoding.valueOf(dataBlockEncodingStr);
@@ -554,7 +551,7 @@ public void initTestTable() throws IOException {
durability = Durability.ASYNC_WAL;
}
- HBaseTestingUtil.createPreSplitLoadTestTable(conf, tableName, getColumnFamilies(), compressAlgo,
+ LoadTestUtil.createPreSplitLoadTestTable(conf, tableName, getColumnFamilies(), compressAlgo,
dataBlockEncodingAlgo, numRegionsPerServer, regionReplication, durability);
applyColumnFamilyOptions(tableName, getColumnFamilies());
}
@@ -609,7 +606,7 @@ protected int loadTable() throws IOException {
LOG.error(exp.toString(), exp);
return EXIT_FAILURE;
}
- userOwner = User.create(HBaseKerberosUtils.loginAndReturnUGI(conf, superUser));
+ userOwner = User.create(DiagnosticToolsCommonUtils.loginAndReturnUGI(conf, superUser));
} else {
superUser = clazzAndArgs[1];
userNames = clazzAndArgs[2];
@@ -648,7 +645,7 @@ protected int loadTable() throws IOException {
User user = null;
for (String userStr : users) {
if (User.isHBaseSecurityEnabled(conf)) {
- user = User.create(HBaseKerberosUtils.loginAndReturnUGI(conf, userStr));
+ user = User.create(DiagnosticToolsCommonUtils.loginAndReturnUGI(conf, userStr));
} else {
user = User.createUserForTesting(conf, userStr, new String[0]);
}
diff --git a/hbase-diagnostics/src/main/java/org/apache/hadoop/hbase/util/LoadTestUtil.java b/hbase-diagnostics/src/main/java/org/apache/hadoop/hbase/util/LoadTestUtil.java
new file mode 100644
index 000000000000..0d8e460fffdc
--- /dev/null
+++ b/hbase-diagnostics/src/main/java/org/apache/hadoop/hbase/util/LoadTestUtil.java
@@ -0,0 +1,196 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.util;
+
+import java.io.IOException;
+import java.util.Arrays;
+import java.util.Locale;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hbase.MasterNotRunningException;
+import org.apache.hadoop.hbase.TableExistsException;
+import org.apache.hadoop.hbase.TableName;
+import org.apache.hadoop.hbase.client.Admin;
+import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;
+import org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder;
+import org.apache.hadoop.hbase.client.Connection;
+import org.apache.hadoop.hbase.client.ConnectionFactory;
+import org.apache.hadoop.hbase.client.Durability;
+import org.apache.hadoop.hbase.client.TableDescriptor;
+import org.apache.hadoop.hbase.client.TableDescriptorBuilder;
+import org.apache.hadoop.hbase.io.compress.Compression.Algorithm;
+import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding;
+import org.apache.hadoop.hbase.util.RegionSplitter.SplitAlgorithm;
+import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+@InterfaceAudience.Private
+public class LoadTestUtil {
+ public static final String OPT_DATA_BLOCK_ENCODING_USAGE = "Encoding algorithm (e.g. prefix "
+ + "compression) to use for data blocks in the test column family, " + "one of "
+ + Arrays.toString(DataBlockEncoding.values()) + ".";
+ public static final String OPT_DATA_BLOCK_ENCODING =
+ ColumnFamilyDescriptorBuilder.DATA_BLOCK_ENCODING.toLowerCase(Locale.ROOT);
+ /** Column family used by the test */
+ public static byte[] DEFAULT_COLUMN_FAMILY = Bytes.toBytes("test_cf");
+ /** Column families used by the test */
+ public static final byte[][] DEFAULT_COLUMN_FAMILIES = { DEFAULT_COLUMN_FAMILY };
+
+ public static final String REGIONS_PER_SERVER_KEY = "hbase.test.regions-per-server";
+ /**
+ * The default number of regions per regionserver when creating a pre-split table.
+ */
+ public static final int DEFAULT_REGIONS_PER_SERVER = 3;
+ public static final String PRESPLIT_TEST_TABLE_KEY = "hbase.test.pre-split-table";
+ public static final boolean PRESPLIT_TEST_TABLE = true;
+ protected static final Logger LOG = LoggerFactory.getLogger(LoadTestUtil.class);
+
+ /**
+ * Creates a pre-split table for load testing. If the table already exists, logs a warning and
+ * continues.
+ * @return the number of regions the table was split into
+ */
+ public static int createPreSplitLoadTestTable(Configuration conf, TableName tableName,
+ byte[] columnFamily, Algorithm compression, DataBlockEncoding dataBlockEncoding)
+ throws IOException {
+ return createPreSplitLoadTestTable(conf, tableName, columnFamily, compression,
+ dataBlockEncoding, DEFAULT_REGIONS_PER_SERVER, 1, Durability.USE_DEFAULT);
+ }
+
+ /**
+ * Creates a pre-split table for load testing. If the table already exists, logs a warning and
+ * continues.
+ * @return the number of regions the table was split into
+ */
+ public static int createPreSplitLoadTestTable(Configuration conf, TableName tableName,
+ byte[] columnFamily, Algorithm compression, DataBlockEncoding dataBlockEncoding,
+ int numRegionsPerServer, int regionReplication, Durability durability) throws IOException {
+ TableDescriptorBuilder builder = TableDescriptorBuilder.newBuilder(tableName);
+ builder.setDurability(durability);
+ builder.setRegionReplication(regionReplication);
+ ColumnFamilyDescriptorBuilder cfBuilder =
+ ColumnFamilyDescriptorBuilder.newBuilder(columnFamily);
+ cfBuilder.setDataBlockEncoding(dataBlockEncoding);
+ cfBuilder.setCompressionType(compression);
+ return createPreSplitLoadTestTable(conf, builder.build(), cfBuilder.build(),
+ numRegionsPerServer);
+ }
+
+ /**
+ * Creates a pre-split table for load testing. If the table already exists, logs a warning and
+ * continues.
+ * @return the number of regions the table was split into
+ */
+ public static int createPreSplitLoadTestTable(Configuration conf, TableName tableName,
+ byte[][] columnFamilies, Algorithm compression, DataBlockEncoding dataBlockEncoding,
+ int numRegionsPerServer, int regionReplication, Durability durability) throws IOException {
+ TableDescriptorBuilder builder = TableDescriptorBuilder.newBuilder(tableName);
+ builder.setDurability(durability);
+ builder.setRegionReplication(regionReplication);
+ ColumnFamilyDescriptor[] hcds = new ColumnFamilyDescriptor[columnFamilies.length];
+ for (int i = 0; i < columnFamilies.length; i++) {
+ ColumnFamilyDescriptorBuilder cfBuilder =
+ ColumnFamilyDescriptorBuilder.newBuilder(columnFamilies[i]);
+ cfBuilder.setDataBlockEncoding(dataBlockEncoding);
+ cfBuilder.setCompressionType(compression);
+ hcds[i] = cfBuilder.build();
+ }
+ return createPreSplitLoadTestTable(conf, builder.build(), hcds, numRegionsPerServer);
+ }
+
+ /**
+ * Creates a pre-split table for load testing. If the table already exists, logs a warning and
+ * continues.
+ * @return the number of regions the table was split into
+ */
+ public static int createPreSplitLoadTestTable(Configuration conf, TableDescriptor desc,
+ ColumnFamilyDescriptor hcd) throws IOException {
+ return createPreSplitLoadTestTable(conf, desc, hcd, DEFAULT_REGIONS_PER_SERVER);
+ }
+
+ /**
+ * Creates a pre-split table for load testing. If the table already exists, logs a warning and
+ * continues.
+ * @return the number of regions the table was split into
+ */
+ public static int createPreSplitLoadTestTable(Configuration conf, TableDescriptor desc,
+ ColumnFamilyDescriptor hcd, int numRegionsPerServer) throws IOException {
+ return createPreSplitLoadTestTable(conf, desc, new ColumnFamilyDescriptor[] { hcd },
+ numRegionsPerServer);
+ }
+
+ /**
+ * Creates a pre-split table for load testing. If the table already exists, logs a warning and
+ * continues.
+ * @return the number of regions the table was split into
+ */
+ public static int createPreSplitLoadTestTable(Configuration conf, TableDescriptor desc,
+ ColumnFamilyDescriptor[] hcds, int numRegionsPerServer) throws IOException {
+ return createPreSplitLoadTestTable(conf, desc, hcds, new RegionSplitter.HexStringSplit(),
+ numRegionsPerServer);
+ }
+
+ /**
+ * Creates a pre-split table for load testing. If the table already exists, logs a warning and
+ * continues.
+ * @return the number of regions the table was split into
+ */
+ public static int createPreSplitLoadTestTable(Configuration conf, TableDescriptor td,
+ ColumnFamilyDescriptor[] cds, SplitAlgorithm splitter, int numRegionsPerServer)
+ throws IOException {
+ TableDescriptorBuilder builder = TableDescriptorBuilder.newBuilder(td);
+ for (ColumnFamilyDescriptor cd : cds) {
+ if (!td.hasColumnFamily(cd.getName())) {
+ builder.setColumnFamily(cd);
+ }
+ }
+ td = builder.build();
+ int totalNumberOfRegions = 0;
+ Connection unmanagedConnection = ConnectionFactory.createConnection(conf);
+ Admin admin = unmanagedConnection.getAdmin();
+
+ try {
+ // create a table a pre-splits regions.
+ // The number of splits is set as:
+ // region servers * regions per region server).
+ int numberOfServers = admin.getRegionServers().size();
+ if (numberOfServers == 0) {
+ throw new IllegalStateException("No live regionservers");
+ }
+
+ totalNumberOfRegions = numberOfServers * numRegionsPerServer;
+ LOG.info("Number of live regionservers: " + numberOfServers + ", "
+ + "pre-splitting table into " + totalNumberOfRegions + " regions " + "(regions per server: "
+ + numRegionsPerServer + ")");
+
+ byte[][] splits = splitter.split(totalNumberOfRegions);
+
+ admin.createTable(td, splits);
+ } catch (MasterNotRunningException e) {
+ LOG.error("Master not running", e);
+ throw new IOException(e);
+ } catch (TableExistsException e) {
+ LOG.warn("Table " + td.getTableName() + " already exists, continuing");
+ } finally {
+ admin.close();
+ unmanagedConnection.close();
+ }
+ return totalNumberOfRegions;
+ }
+
+}
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MultiThreadedAction.java b/hbase-diagnostics/src/main/java/org/apache/hadoop/hbase/util/MultiThreadedAction.java
similarity index 98%
rename from hbase-server/src/test/java/org/apache/hadoop/hbase/util/MultiThreadedAction.java
rename to hbase-diagnostics/src/main/java/org/apache/hadoop/hbase/util/MultiThreadedAction.java
index 2476fb388084..3914884fec95 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MultiThreadedAction.java
+++ b/hbase-diagnostics/src/main/java/org/apache/hadoop/hbase/util/MultiThreadedAction.java
@@ -41,6 +41,7 @@
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.util.test.LoadTestDataGenerator;
import org.apache.hadoop.util.StringUtils;
+import org.apache.yetus.audience.InterfaceAudience;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -51,6 +52,7 @@
/**
* Common base class for reader and writer parts of multi-thread HBase load test (See LoadTestTool).
*/
+@InterfaceAudience.Private
public abstract class MultiThreadedAction {
private static final Logger LOG = LoggerFactory.getLogger(MultiThreadedAction.class);
@@ -78,7 +80,7 @@ public abstract class MultiThreadedAction {
* Default implementation of LoadTestDataGenerator that uses LoadTestKVGenerator, fixed set of
* column families, and random number of columns in range. The table for it can be created
* manually or, for example, via
- * {@link org.apache.hadoop.hbase.HBaseTestingUtil#createPreSplitLoadTestTable(Configuration, TableName, byte[], org.apache.hadoop.hbase.io.compress.Compression.Algorithm, org.apache.hadoop.hbase.io.encoding.DataBlockEncoding)}
+ * {@link org.apache.hadoop.hbase.util.LoadTestUtil#createPreSplitLoadTestTable(Configuration, TableName, byte[], org.apache.hadoop.hbase.io.compress.Compression.Algorithm, org.apache.hadoop.hbase.io.encoding.DataBlockEncoding)}
*/
public static class DefaultDataGenerator extends LoadTestDataGenerator {
private byte[][] columnFamilies = null;
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MultiThreadedReader.java b/hbase-diagnostics/src/main/java/org/apache/hadoop/hbase/util/MultiThreadedReader.java
similarity index 99%
rename from hbase-server/src/test/java/org/apache/hadoop/hbase/util/MultiThreadedReader.java
rename to hbase-diagnostics/src/main/java/org/apache/hadoop/hbase/util/MultiThreadedReader.java
index b69f67e2f184..e8e3bea66742 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MultiThreadedReader.java
+++ b/hbase-diagnostics/src/main/java/org/apache/hadoop/hbase/util/MultiThreadedReader.java
@@ -33,10 +33,12 @@
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.Table;
import org.apache.hadoop.hbase.util.test.LoadTestDataGenerator;
+import org.apache.yetus.audience.InterfaceAudience;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/** Creates multiple threads that read and verify previously written data */
+@InterfaceAudience.Private
public class MultiThreadedReader extends MultiThreadedAction {
private static final Logger LOG = LoggerFactory.getLogger(MultiThreadedReader.class);
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MultiThreadedReaderWithACL.java b/hbase-diagnostics/src/main/java/org/apache/hadoop/hbase/util/MultiThreadedReaderWithACL.java
similarity index 96%
rename from hbase-server/src/test/java/org/apache/hadoop/hbase/util/MultiThreadedReaderWithACL.java
rename to hbase-diagnostics/src/main/java/org/apache/hadoop/hbase/util/MultiThreadedReaderWithACL.java
index 23087ae3c0a7..d928ac214063 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MultiThreadedReaderWithACL.java
+++ b/hbase-diagnostics/src/main/java/org/apache/hadoop/hbase/util/MultiThreadedReaderWithACL.java
@@ -26,16 +26,17 @@
import org.apache.hadoop.hbase.client.Get;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.Table;
-import org.apache.hadoop.hbase.security.HBaseKerberosUtils;
import org.apache.hadoop.hbase.security.User;
import org.apache.hadoop.hbase.util.test.LoadTestDataGenerator;
import org.apache.hadoop.security.UserGroupInformation;
+import org.apache.yetus.audience.InterfaceAudience;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* A MultiThreadReader that helps to work with ACL
*/
+@InterfaceAudience.Private
public class MultiThreadedReaderWithACL extends MultiThreadedReader {
private static final Logger LOG = LoggerFactory.getLogger(MultiThreadedReaderWithACL.class);
@@ -123,7 +124,7 @@ public Object run() throws Exception {
UserGroupInformation realUserUgi;
if (!users.containsKey(userNames[mod])) {
if (User.isHBaseSecurityEnabled(conf)) {
- realUserUgi = HBaseKerberosUtils.loginAndReturnUGI(conf, userNames[mod]);
+ realUserUgi = DiagnosticToolsCommonUtils.loginAndReturnUGI(conf, userNames[mod]);
} else {
realUserUgi = UserGroupInformation.createRemoteUser(userNames[mod]);
}
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MultiThreadedUpdater.java b/hbase-diagnostics/src/main/java/org/apache/hadoop/hbase/util/MultiThreadedUpdater.java
similarity index 99%
rename from hbase-server/src/test/java/org/apache/hadoop/hbase/util/MultiThreadedUpdater.java
rename to hbase-diagnostics/src/main/java/org/apache/hadoop/hbase/util/MultiThreadedUpdater.java
index ff70cef91f8a..99ce506c0c9e 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MultiThreadedUpdater.java
+++ b/hbase-diagnostics/src/main/java/org/apache/hadoop/hbase/util/MultiThreadedUpdater.java
@@ -45,6 +45,7 @@
import org.apache.hadoop.hbase.client.Table;
import org.apache.hadoop.hbase.util.test.LoadTestDataGenerator;
import org.apache.hadoop.util.StringUtils;
+import org.apache.yetus.audience.InterfaceAudience;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -53,6 +54,7 @@
import org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.MutationType;
/** Creates multiple threads that write key/values into the */
+@InterfaceAudience.Private
public class MultiThreadedUpdater extends MultiThreadedWriterBase {
private static final Logger LOG = LoggerFactory.getLogger(MultiThreadedUpdater.class);
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MultiThreadedUpdaterWithACL.java b/hbase-diagnostics/src/main/java/org/apache/hadoop/hbase/util/MultiThreadedUpdaterWithACL.java
similarity index 98%
rename from hbase-server/src/test/java/org/apache/hadoop/hbase/util/MultiThreadedUpdaterWithACL.java
rename to hbase-diagnostics/src/main/java/org/apache/hadoop/hbase/util/MultiThreadedUpdaterWithACL.java
index a3a2c4946572..1018928e82d8 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MultiThreadedUpdaterWithACL.java
+++ b/hbase-diagnostics/src/main/java/org/apache/hadoop/hbase/util/MultiThreadedUpdaterWithACL.java
@@ -34,17 +34,18 @@
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.RetriesExhaustedWithDetailsException;
import org.apache.hadoop.hbase.client.Table;
-import org.apache.hadoop.hbase.security.HBaseKerberosUtils;
import org.apache.hadoop.hbase.security.User;
import org.apache.hadoop.hbase.util.test.LoadTestDataGenerator;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.util.StringUtils;
+import org.apache.yetus.audience.InterfaceAudience;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* A MultiThreadUpdater that helps to work with ACL
*/
+@InterfaceAudience.Private
public class MultiThreadedUpdaterWithACL extends MultiThreadedUpdater {
private static final Logger LOG = LoggerFactory.getLogger(MultiThreadedUpdaterWithACL.class);
private final static String COMMA = ",";
@@ -138,7 +139,7 @@ public Object run() throws Exception {
try {
if (!users.containsKey(userNames[mod])) {
if (User.isHBaseSecurityEnabled(conf)) {
- realUserUgi = HBaseKerberosUtils.loginAndReturnUGI(conf, userNames[mod]);
+ realUserUgi = DiagnosticToolsCommonUtils.loginAndReturnUGI(conf, userNames[mod]);
} else {
realUserUgi = UserGroupInformation.createRemoteUser(userNames[mod]);
}
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MultiThreadedWriter.java b/hbase-diagnostics/src/main/java/org/apache/hadoop/hbase/util/MultiThreadedWriter.java
similarity index 100%
rename from hbase-server/src/test/java/org/apache/hadoop/hbase/util/MultiThreadedWriter.java
rename to hbase-diagnostics/src/main/java/org/apache/hadoop/hbase/util/MultiThreadedWriter.java
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MultiThreadedWriterBase.java b/hbase-diagnostics/src/main/java/org/apache/hadoop/hbase/util/MultiThreadedWriterBase.java
similarity index 98%
rename from hbase-server/src/test/java/org/apache/hadoop/hbase/util/MultiThreadedWriterBase.java
rename to hbase-diagnostics/src/main/java/org/apache/hadoop/hbase/util/MultiThreadedWriterBase.java
index 3a83446cddfe..f3b8b2033dcb 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MultiThreadedWriterBase.java
+++ b/hbase-diagnostics/src/main/java/org/apache/hadoop/hbase/util/MultiThreadedWriterBase.java
@@ -32,10 +32,12 @@
import org.apache.hadoop.hbase.client.RegionLocator;
import org.apache.hadoop.hbase.client.Table;
import org.apache.hadoop.hbase.util.test.LoadTestDataGenerator;
+import org.apache.yetus.audience.InterfaceAudience;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/** Creates multiple threads that write key/values into the */
+@InterfaceAudience.Private
public abstract class MultiThreadedWriterBase extends MultiThreadedAction {
private static final Logger LOG = LoggerFactory.getLogger(MultiThreadedWriterBase.class);
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MultiThreadedWriterWithACL.java b/hbase-diagnostics/src/main/java/org/apache/hadoop/hbase/util/MultiThreadedWriterWithACL.java
similarity index 98%
rename from hbase-server/src/test/java/org/apache/hadoop/hbase/util/MultiThreadedWriterWithACL.java
rename to hbase-diagnostics/src/main/java/org/apache/hadoop/hbase/util/MultiThreadedWriterWithACL.java
index 96e2748012ea..a127b0230477 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MultiThreadedWriterWithACL.java
+++ b/hbase-diagnostics/src/main/java/org/apache/hadoop/hbase/util/MultiThreadedWriterWithACL.java
@@ -29,12 +29,14 @@
import org.apache.hadoop.hbase.security.User;
import org.apache.hadoop.hbase.util.test.LoadTestDataGenerator;
import org.apache.hadoop.util.StringUtils;
+import org.apache.yetus.audience.InterfaceAudience;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* MultiThreadedWriter that helps in testing ACL
*/
+@InterfaceAudience.Private
public class MultiThreadedWriterWithACL extends MultiThreadedWriter {
private static final Logger LOG = LoggerFactory.getLogger(MultiThreadedWriterWithACL.class);
diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/util/RandomDistribution.java b/hbase-diagnostics/src/main/java/org/apache/hadoop/hbase/util/RandomDistribution.java
similarity index 98%
rename from hbase-common/src/test/java/org/apache/hadoop/hbase/util/RandomDistribution.java
rename to hbase-diagnostics/src/main/java/org/apache/hadoop/hbase/util/RandomDistribution.java
index 6635accedbb0..e5d2a66ebe5b 100644
--- a/hbase-common/src/test/java/org/apache/hadoop/hbase/util/RandomDistribution.java
+++ b/hbase-diagnostics/src/main/java/org/apache/hadoop/hbase/util/RandomDistribution.java
@@ -21,6 +21,7 @@
import java.util.Arrays;
import java.util.Collections;
import java.util.Random;
+import org.apache.yetus.audience.InterfaceAudience;
/**
* A class that generates random numbers that follow some distribution.
@@ -29,6 +30,7 @@
* Remove after tfile is committed and use the tfile version of this class instead.
*
*/
+@InterfaceAudience.Private
public class RandomDistribution {
/**
* Interface for discrete (integer) random distributions.
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/test/LoadTestDataGenerator.java b/hbase-diagnostics/src/main/java/org/apache/hadoop/hbase/util/test/LoadTestDataGenerator.java
similarity index 100%
rename from hbase-server/src/test/java/org/apache/hadoop/hbase/util/test/LoadTestDataGenerator.java
rename to hbase-diagnostics/src/main/java/org/apache/hadoop/hbase/util/test/LoadTestDataGenerator.java
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/test/LoadTestDataGeneratorWithACL.java b/hbase-diagnostics/src/main/java/org/apache/hadoop/hbase/util/test/LoadTestDataGeneratorWithACL.java
similarity index 100%
rename from hbase-server/src/test/java/org/apache/hadoop/hbase/util/test/LoadTestDataGeneratorWithACL.java
rename to hbase-diagnostics/src/main/java/org/apache/hadoop/hbase/util/test/LoadTestDataGeneratorWithACL.java
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestClientClusterMetrics.java b/hbase-diagnostics/src/test/java/org/apache/hadoop/hbase/TestClientClusterMetrics.java
similarity index 100%
rename from hbase-server/src/test/java/org/apache/hadoop/hbase/TestClientClusterMetrics.java
rename to hbase-diagnostics/src/test/java/org/apache/hadoop/hbase/TestClientClusterMetrics.java
diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/TestPerformanceEvaluation.java b/hbase-diagnostics/src/test/java/org/apache/hadoop/hbase/TestPerformanceEvaluation.java
similarity index 100%
rename from hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/TestPerformanceEvaluation.java
rename to hbase-diagnostics/src/test/java/org/apache/hadoop/hbase/TestPerformanceEvaluation.java
diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/TestKeyProvider.java b/hbase-diagnostics/src/test/java/org/apache/hadoop/hbase/io/crypto/TestKeyProvider.java
similarity index 97%
rename from hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/TestKeyProvider.java
rename to hbase-diagnostics/src/test/java/org/apache/hadoop/hbase/io/crypto/TestKeyProvider.java
index 3d549ed29137..f18bf2eb4987 100644
--- a/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/TestKeyProvider.java
+++ b/hbase-diagnostics/src/test/java/org/apache/hadoop/hbase/io/crypto/TestKeyProvider.java
@@ -43,10 +43,10 @@ public class TestKeyProvider {
@Test
public void testTestProvider() {
Configuration conf = HBaseConfiguration.create();
- conf.set(HConstants.CRYPTO_KEYPROVIDER_CONF_KEY, KeyProviderForTesting.class.getName());
+ conf.set(HConstants.CRYPTO_KEYPROVIDER_CONF_KEY, KeyProviderForTestingCopy.class.getName());
KeyProvider provider = Encryption.getKeyProvider(conf);
assertNotNull("Null returned for provider", provider);
- assertTrue("Provider is not the expected type", provider instanceof KeyProviderForTesting);
+ assertTrue("Provider is not the expected type", provider instanceof KeyProviderForTestingCopy);
Key key = provider.getKey("foo");
assertNotNull("Test provider did not return a key as expected", key);
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestEncodedSeekers.java b/hbase-diagnostics/src/test/java/org/apache/hadoop/hbase/io/encoding/TestEncodedSeekers.java
similarity index 100%
rename from hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestEncodedSeekers.java
rename to hbase-diagnostics/src/test/java/org/apache/hadoop/hbase/io/encoding/TestEncodedSeekers.java
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestLoadAndSwitchEncodeOnDisk.java b/hbase-diagnostics/src/test/java/org/apache/hadoop/hbase/io/encoding/TestLoadAndSwitchEncodeOnDisk.java
similarity index 100%
rename from hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestLoadAndSwitchEncodeOnDisk.java
rename to hbase-diagnostics/src/test/java/org/apache/hadoop/hbase/io/encoding/TestLoadAndSwitchEncodeOnDisk.java
diff --git a/hbase-client/src/test/java/org/apache/hadoop/hbase/security/TestEncryptionUtil.java b/hbase-diagnostics/src/test/java/org/apache/hadoop/hbase/security/TestEncryptionUtil.java
similarity index 97%
rename from hbase-client/src/test/java/org/apache/hadoop/hbase/security/TestEncryptionUtil.java
rename to hbase-diagnostics/src/test/java/org/apache/hadoop/hbase/security/TestEncryptionUtil.java
index 6f7be8315853..ec36c43c1d5d 100644
--- a/hbase-client/src/test/java/org/apache/hadoop/hbase/security/TestEncryptionUtil.java
+++ b/hbase-diagnostics/src/test/java/org/apache/hadoop/hbase/security/TestEncryptionUtil.java
@@ -28,7 +28,7 @@
import org.apache.hadoop.hbase.HBaseClassTestRule;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.io.crypto.Encryption;
-import org.apache.hadoop.hbase.io.crypto.KeyProviderForTesting;
+import org.apache.hadoop.hbase.io.crypto.KeyProviderForTestingCopy;
import org.apache.hadoop.hbase.io.crypto.aes.AES;
import org.apache.hadoop.hbase.testclassification.ClientTests;
import org.apache.hadoop.hbase.testclassification.SmallTests;
@@ -105,7 +105,7 @@ public void testWALKeyWrappingWithInvalidHashAlg() throws Exception {
public void testWALKeyWrappingWithIncorrectKey() throws Exception {
// set up the key provider for testing to resolve a key for our test subject
Configuration conf = new Configuration(); // we don't need HBaseConfiguration for this
- conf.set(HConstants.CRYPTO_KEYPROVIDER_CONF_KEY, KeyProviderForTesting.class.getName());
+ conf.set(HConstants.CRYPTO_KEYPROVIDER_CONF_KEY, KeyProviderForTestingCopy.class.getName());
// generate a test key
byte[] keyBytes = new byte[AES.KEY_LENGTH];
@@ -144,7 +144,7 @@ public void testHashAlgorithmMismatchShouldNotFailWithDefaultConfig() throws Exc
private void testKeyWrapping(String hashAlgorithm) throws Exception {
// set up the key provider for testing to resolve a key for our test subject
Configuration conf = new Configuration(); // we don't need HBaseConfiguration for this
- conf.set(HConstants.CRYPTO_KEYPROVIDER_CONF_KEY, KeyProviderForTesting.class.getName());
+ conf.set(HConstants.CRYPTO_KEYPROVIDER_CONF_KEY, KeyProviderForTestingCopy.class.getName());
if (!hashAlgorithm.equals(DEFAULT_HASH_ALGORITHM)) {
conf.set(Encryption.CRYPTO_KEY_HASH_ALGORITHM_CONF_KEY, hashAlgorithm);
}
@@ -180,7 +180,7 @@ private void testKeyWrapping(String hashAlgorithm) throws Exception {
private void testWALKeyWrapping(String hashAlgorithm) throws Exception {
// set up the key provider for testing to resolve a key for our test subject
Configuration conf = new Configuration(); // we don't need HBaseConfiguration for this
- conf.set(HConstants.CRYPTO_KEYPROVIDER_CONF_KEY, KeyProviderForTesting.class.getName());
+ conf.set(HConstants.CRYPTO_KEYPROVIDER_CONF_KEY, KeyProviderForTestingCopy.class.getName());
if (!hashAlgorithm.equals(DEFAULT_HASH_ALGORITHM)) {
conf.set(Encryption.CRYPTO_KEY_HASH_ALGORITHM_CONF_KEY, hashAlgorithm);
}
@@ -207,7 +207,7 @@ private void testWALKeyWrapping(String hashAlgorithm) throws Exception {
private void testKeyWrappingWithMismatchingAlgorithms(Configuration conf) throws Exception {
// we use MD5 to hash the encryption key during wrapping
- conf.set(HConstants.CRYPTO_KEYPROVIDER_CONF_KEY, KeyProviderForTesting.class.getName());
+ conf.set(HConstants.CRYPTO_KEYPROVIDER_CONF_KEY, KeyProviderForTestingCopy.class.getName());
conf.set(Encryption.CRYPTO_KEY_HASH_ALGORITHM_CONF_KEY, "MD5");
// generate a test key
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/LoadTestDataGeneratorWithVisibilityLabels.java b/hbase-diagnostics/src/test/java/org/apache/hadoop/hbase/security/visibility/LoadTestDataGeneratorWithVisibilityLabels.java
similarity index 100%
rename from hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/LoadTestDataGeneratorWithVisibilityLabels.java
rename to hbase-diagnostics/src/test/java/org/apache/hadoop/hbase/security/visibility/LoadTestDataGeneratorWithVisibilityLabels.java
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/ProcessBasedLocalHBaseCluster.java b/hbase-diagnostics/src/test/java/org/apache/hadoop/hbase/util/ProcessBasedLocalHBaseCluster.java
similarity index 100%
rename from hbase-server/src/test/java/org/apache/hadoop/hbase/util/ProcessBasedLocalHBaseCluster.java
rename to hbase-diagnostics/src/test/java/org/apache/hadoop/hbase/util/ProcessBasedLocalHBaseCluster.java
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/RestartMetaTest.java b/hbase-diagnostics/src/test/java/org/apache/hadoop/hbase/util/RestartMetaTest.java
similarity index 93%
rename from hbase-server/src/test/java/org/apache/hadoop/hbase/util/RestartMetaTest.java
rename to hbase-diagnostics/src/test/java/org/apache/hadoop/hbase/util/RestartMetaTest.java
index cdf9f6101e47..8ac6072a7097 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/RestartMetaTest.java
+++ b/hbase-diagnostics/src/test/java/org/apache/hadoop/hbase/util/RestartMetaTest.java
@@ -77,7 +77,7 @@ private void loadData() throws IOException {
// start the writers
LoadTestDataGenerator dataGen = new MultiThreadedAction.DefaultDataGenerator(minColDataSize,
- maxColDataSize, minColsPerKey, maxColsPerKey, HFileTestUtil.DEFAULT_COLUMN_FAMILY);
+ maxColDataSize, minColsPerKey, maxColsPerKey, LoadTestUtil.DEFAULT_COLUMN_FAMILY);
MultiThreadedWriter writer = new MultiThreadedWriter(dataGen, conf, TABLE_NAME);
writer.setMultiPut(true);
writer.start(startKey, endKey, numThreads);
@@ -96,8 +96,8 @@ protected int doWork() throws Exception {
hbaseCluster.startHBase();
// create tables if needed
- HBaseTestingUtil.createPreSplitLoadTestTable(conf, TABLE_NAME,
- HFileTestUtil.DEFAULT_COLUMN_FAMILY, Compression.Algorithm.NONE, DataBlockEncoding.NONE);
+ LoadTestUtil.createPreSplitLoadTestTable(conf, TABLE_NAME, LoadTestUtil.DEFAULT_COLUMN_FAMILY,
+ Compression.Algorithm.NONE, DataBlockEncoding.NONE);
LOG.debug("Loading data....\n\n");
loadData();
@@ -135,8 +135,7 @@ protected int doWork() throws Exception {
@Override
protected void addOptions() {
addOptWithArg(OPT_NUM_RS, "Number of Region Servers");
- addOptWithArg(HFileTestUtil.OPT_DATA_BLOCK_ENCODING,
- HFileTestUtil.OPT_DATA_BLOCK_ENCODING_USAGE);
+ addOptWithArg(LoadTestUtil.OPT_DATA_BLOCK_ENCODING, LoadTestUtil.OPT_DATA_BLOCK_ENCODING_USAGE);
}
@Override
diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestLoadTestKVGenerator.java b/hbase-diagnostics/src/test/java/org/apache/hadoop/hbase/util/TestLoadTestKVGenerator.java
similarity index 100%
rename from hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestLoadTestKVGenerator.java
rename to hbase-diagnostics/src/test/java/org/apache/hadoop/hbase/util/TestLoadTestKVGenerator.java
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestMiniClusterLoadEncoded.java b/hbase-diagnostics/src/test/java/org/apache/hadoop/hbase/util/TestMiniClusterLoadEncoded.java
similarity index 100%
rename from hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestMiniClusterLoadEncoded.java
rename to hbase-diagnostics/src/test/java/org/apache/hadoop/hbase/util/TestMiniClusterLoadEncoded.java
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestMiniClusterLoadParallel.java b/hbase-diagnostics/src/test/java/org/apache/hadoop/hbase/util/TestMiniClusterLoadParallel.java
similarity index 100%
rename from hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestMiniClusterLoadParallel.java
rename to hbase-diagnostics/src/test/java/org/apache/hadoop/hbase/util/TestMiniClusterLoadParallel.java
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestMiniClusterLoadSequential.java b/hbase-diagnostics/src/test/java/org/apache/hadoop/hbase/util/TestMiniClusterLoadSequential.java
similarity index 98%
rename from hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestMiniClusterLoadSequential.java
rename to hbase-diagnostics/src/test/java/org/apache/hadoop/hbase/util/TestMiniClusterLoadSequential.java
index 7d0666886128..4980b292c08d 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestMiniClusterLoadSequential.java
+++ b/hbase-diagnostics/src/test/java/org/apache/hadoop/hbase/util/TestMiniClusterLoadSequential.java
@@ -148,7 +148,7 @@ protected void runLoadTestOnExistingTable() throws IOException {
protected void createPreSplitLoadTestTable(TableDescriptor tableDescriptor,
ColumnFamilyDescriptor familyDescriptor) throws IOException {
- HBaseTestingUtil.createPreSplitLoadTestTable(conf, tableDescriptor, familyDescriptor);
+ LoadTestUtil.createPreSplitLoadTestTable(conf, tableDescriptor, familyDescriptor);
TEST_UTIL.waitUntilAllRegionsAssigned(tableDescriptor.getTableName());
}
diff --git a/hbase-mapreduce/src/test/resources/org/apache/hadoop/hbase/PerformanceEvaluation_Counter.properties b/hbase-diagnostics/src/test/resources/org/apache/hadoop/hbase/mapreduce/PerformanceEvaluation_Counter.properties
similarity index 100%
rename from hbase-mapreduce/src/test/resources/org/apache/hadoop/hbase/PerformanceEvaluation_Counter.properties
rename to hbase-diagnostics/src/test/resources/org/apache/hadoop/hbase/mapreduce/PerformanceEvaluation_Counter.properties
diff --git a/hbase-it/pom.xml b/hbase-it/pom.xml
index 96033b151299..330a8844bd19 100644
--- a/hbase-it/pom.xml
+++ b/hbase-it/pom.xml
@@ -106,6 +106,17 @@
+
+ org.apache.hbase
+ hbase-diagnostics
+ test
+
+
+ org.apache.hbase
+ hbase-diagnostics
+ test-jar
+ test
+ org.bouncycastle
diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestBackupRestore.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestBackupRestore.java
index 1a0446381aed..6b0fffd78411 100644
--- a/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestBackupRestore.java
+++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestBackupRestore.java
@@ -50,6 +50,7 @@
import org.apache.hadoop.hbase.client.TableDescriptorBuilder;
import org.apache.hadoop.hbase.testclassification.IntegrationTests;
import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
+import org.apache.hadoop.hbase.util.LoadTestUtil;
import org.apache.hadoop.util.ToolRunner;
import org.junit.After;
import org.junit.Assert;
@@ -212,7 +213,7 @@ private void createTable(TableName tableName) throws Exception {
LOG.info("Creating table {} with {} splits.", tableName,
regionsCountPerServer * regionServerCount);
startTime = EnvironmentEdgeManager.currentTime();
- HBaseTestingUtil.createPreSplitLoadTestTable(util.getConfiguration(), desc, columns,
+ LoadTestUtil.createPreSplitLoadTestTable(util.getConfiguration(), desc, columns,
regionsCountPerServer);
util.waitTableAvailable(tableName);
endTime = EnvironmentEdgeManager.currentTime();
diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestIngestStripeCompactions.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestIngestStripeCompactions.java
index 7bcb017cbb1b..43357304aa09 100644
--- a/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestIngestStripeCompactions.java
+++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestIngestStripeCompactions.java
@@ -28,6 +28,7 @@
import org.apache.hadoop.hbase.regionserver.StripeStoreEngine;
import org.apache.hadoop.hbase.testclassification.IntegrationTests;
import org.apache.hadoop.hbase.util.HFileTestUtil;
+import org.apache.hadoop.hbase.util.LoadTestUtil;
import org.apache.hadoop.util.ToolRunner;
import org.junit.experimental.categories.Category;
@@ -45,7 +46,7 @@ protected void initTable() throws IOException {
.setValue(HStore.BLOCKING_STOREFILES_KEY, "100").build();
ColumnFamilyDescriptor familyDescriptor =
ColumnFamilyDescriptorBuilder.of(HFileTestUtil.DEFAULT_COLUMN_FAMILY);
- HBaseTestingUtil.createPreSplitLoadTestTable(util.getConfiguration(), tableDescriptor,
+ LoadTestUtil.createPreSplitLoadTestTable(util.getConfiguration(), tableDescriptor,
familyDescriptor);
}
diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestIngestWithEncryption.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestIngestWithEncryption.java
index e0dcb0c48582..86fc1d2fd531 100644
--- a/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestIngestWithEncryption.java
+++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestIngestWithEncryption.java
@@ -24,7 +24,7 @@
import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;
import org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder;
import org.apache.hadoop.hbase.client.TableDescriptor;
-import org.apache.hadoop.hbase.io.crypto.KeyProviderForTesting;
+import org.apache.hadoop.hbase.io.crypto.KeyProviderForTestingCopy;
import org.apache.hadoop.hbase.io.hfile.HFile;
import org.apache.hadoop.hbase.testclassification.IntegrationTests;
import org.apache.hadoop.hbase.util.EncryptionTest;
@@ -48,7 +48,7 @@ public void setUpCluster() throws Exception {
if (!util.isDistributedCluster()) {
// Inject required configuration if we are not running in distributed mode
conf.setInt(HFile.FORMAT_VERSION_KEY, 3);
- conf.set(HConstants.CRYPTO_KEYPROVIDER_CONF_KEY, KeyProviderForTesting.class.getName());
+ conf.set(HConstants.CRYPTO_KEYPROVIDER_CONF_KEY, KeyProviderForTestingCopy.class.getName());
conf.set(HConstants.CRYPTO_MASTERKEY_NAME_CONF_KEY, "hbase");
conf.setBoolean(HConstants.ENABLE_WAL_ENCRYPTION, true);
}
diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestBigLinkedList.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestBigLinkedList.java
index 0e259f5072ae..0cd88e463e11 100644
--- a/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestBigLinkedList.java
+++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestBigLinkedList.java
@@ -40,7 +40,6 @@
import org.apache.hadoop.fs.RemoteIterator;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.HBaseConfiguration;
-import org.apache.hadoop.hbase.HBaseTestingUtil;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.HRegionLocation;
import org.apache.hadoop.hbase.IntegrationTestBase;
@@ -77,6 +76,7 @@
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.CommonFSUtils;
import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
+import org.apache.hadoop.hbase.util.LoadTestUtil;
import org.apache.hadoop.hbase.util.Random64;
import org.apache.hadoop.hbase.util.RegionSplitter;
import org.apache.hadoop.hbase.wal.WALEdit;
@@ -752,15 +752,14 @@ protected void createSchema() throws IOException {
// If we want to pre-split compute how many splits.
if (
- conf.getBoolean(HBaseTestingUtil.PRESPLIT_TEST_TABLE_KEY,
- HBaseTestingUtil.PRESPLIT_TEST_TABLE)
+ conf.getBoolean(LoadTestUtil.PRESPLIT_TEST_TABLE_KEY, LoadTestUtil.PRESPLIT_TEST_TABLE)
) {
int numberOfServers = admin.getRegionServers().size();
if (numberOfServers == 0) {
throw new IllegalStateException("No live regionservers");
}
- int regionsPerServer = conf.getInt(HBaseTestingUtil.REGIONS_PER_SERVER_KEY,
- HBaseTestingUtil.DEFAULT_REGIONS_PER_SERVER);
+ int regionsPerServer = conf.getInt(LoadTestUtil.REGIONS_PER_SERVER_KEY,
+ LoadTestUtil.DEFAULT_REGIONS_PER_SERVER);
int totalNumberOfRegions = numberOfServers * regionsPerServer;
LOG.info("Number of live regionservers: " + numberOfServers + ", "
+ "pre-splitting table into " + totalNumberOfRegions + " regions "
@@ -1899,9 +1898,9 @@ private void printCommands() {
System.err.println(" -D" + TABLE_NAME_KEY + "=");
System.err.println(
" Run using the as the tablename. Defaults to " + DEFAULT_TABLE_NAME);
- System.err.println(" -D" + HBaseTestingUtil.REGIONS_PER_SERVER_KEY + "=<# regions>");
+ System.err.println(" -D" + LoadTestUtil.REGIONS_PER_SERVER_KEY + "=<# regions>");
System.err.println(" Create table with presplit regions per server. Defaults to "
- + HBaseTestingUtil.DEFAULT_REGIONS_PER_SERVER);
+ + LoadTestUtil.DEFAULT_REGIONS_PER_SERVER);
System.err.println(" -DuseMob=");
System.err.println(
diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestLoadCommonCrawl.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestLoadCommonCrawl.java
index fab08dbb2ec3..6455219ff41d 100644
--- a/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestLoadCommonCrawl.java
+++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestLoadCommonCrawl.java
@@ -49,7 +49,6 @@
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.HBaseConfiguration;
-import org.apache.hadoop.hbase.HBaseTestingUtil;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.IntegrationTestBase;
import org.apache.hadoop.hbase.IntegrationTestingUtility;
@@ -79,6 +78,7 @@
import org.apache.hadoop.hbase.test.util.warc.WARCWritable;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
+import org.apache.hadoop.hbase.util.LoadTestUtil;
import org.apache.hadoop.hbase.util.RegionSplitter;
import org.apache.hadoop.io.BytesWritable;
import org.apache.hadoop.io.LongWritable;
@@ -479,15 +479,15 @@ void createSchema(final TableName tableName) throws IOException {
TableDescriptorBuilder.newBuilder(tableName).setColumnFamilies(families).build();
if (
- getConf().getBoolean(HBaseTestingUtil.PRESPLIT_TEST_TABLE_KEY,
- HBaseTestingUtil.PRESPLIT_TEST_TABLE)
+ getConf().getBoolean(LoadTestUtil.PRESPLIT_TEST_TABLE_KEY,
+ LoadTestUtil.PRESPLIT_TEST_TABLE)
) {
int numberOfServers = admin.getRegionServers().size();
if (numberOfServers == 0) {
throw new IllegalStateException("No live regionservers");
}
- int regionsPerServer = getConf().getInt(HBaseTestingUtil.REGIONS_PER_SERVER_KEY,
- HBaseTestingUtil.DEFAULT_REGIONS_PER_SERVER);
+ int regionsPerServer = getConf().getInt(LoadTestUtil.REGIONS_PER_SERVER_KEY,
+ LoadTestUtil.DEFAULT_REGIONS_PER_SERVER);
int totalNumberOfRegions = numberOfServers * regionsPerServer;
LOG.info("Creating test table: " + tableDescriptor);
LOG.info("Number of live regionservers: " + numberOfServers + ", "
diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHFileOutputFormat2.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHFileOutputFormat2.java
index 0714f27e64d9..77206c839c7d 100644
--- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHFileOutputFormat2.java
+++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHFileOutputFormat2.java
@@ -64,7 +64,6 @@
import org.apache.hadoop.hbase.HDFSBlocksDistribution;
import org.apache.hadoop.hbase.HadoopShims;
import org.apache.hadoop.hbase.KeyValue;
-import org.apache.hadoop.hbase.PerformanceEvaluation;
import org.apache.hadoop.hbase.PrivateCellUtil;
import org.apache.hadoop.hbase.ServerName;
import org.apache.hadoop.hbase.StartTestingClusterOption;
@@ -137,7 +136,7 @@
/**
* Simple test for {@link HFileOutputFormat2}. Sets up and runs a mapreduce job that writes hfile
* output. Creates a few inner classes to implement splits and an inputformat that emits keys and
- * values like those of {@link PerformanceEvaluation}.
+ * values.
*/
@Category({ VerySlowMapReduceTests.class, LargeTests.class })
public class TestHFileOutputFormat2 {
@@ -554,18 +553,46 @@ private byte[][] generateRandomStartKeys(int numKeys) {
// first region start key is always empty
ret[0] = HConstants.EMPTY_BYTE_ARRAY;
for (int i = 1; i < numKeys; i++) {
- ret[i] =
- PerformanceEvaluation.generateData(random, PerformanceEvaluation.DEFAULT_VALUE_LENGTH);
+ ret[i] = generateData(random, DEFAULT_VALUE_LENGTH);
}
return ret;
}
+ // TODO: Copied from PerformanceEvaluation
+ public static final int DEFAULT_VALUE_LENGTH = 1000;
+
+ /*
+ * This method takes some time and is done inline uploading data. For example, doing the mapfile
+ * test, generation of the key and value consumes about 30% of CPU time.
+ * @return Generated random value to insert into a table cell.
+ */
+ public static byte[] generateData(final Random r, int length) {
+ byte[] b = new byte[length];
+ int i;
+
+ for (i = 0; i < (length - 8); i += 8) {
+ b[i] = (byte) (65 + r.nextInt(26));
+ b[i + 1] = b[i];
+ b[i + 2] = b[i];
+ b[i + 3] = b[i];
+ b[i + 4] = b[i];
+ b[i + 5] = b[i];
+ b[i + 6] = b[i];
+ b[i + 7] = b[i];
+ }
+
+ byte a = (byte) (65 + r.nextInt(26));
+ for (; i < length; i++) {
+ b[i] = a;
+ }
+ return b;
+ }
+
private byte[][] generateRandomSplitKeys(int numKeys) {
Random random = ThreadLocalRandom.current();
byte[][] ret = new byte[numKeys][];
for (int i = 0; i < numKeys; i++) {
- ret[i] =
- PerformanceEvaluation.generateData(random, PerformanceEvaluation.DEFAULT_VALUE_LENGTH);
+ ret[i] = generateData(random, DEFAULT_VALUE_LENGTH);
}
return ret;
}
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestingUtil.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestingUtil.java
index fd5b7dd729e0..2313ca730d49 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestingUtil.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestingUtil.java
@@ -183,15 +183,8 @@
@InterfaceStability.Evolving
public class HBaseTestingUtil extends HBaseZKTestingUtil {
- public static final String REGIONS_PER_SERVER_KEY = "hbase.test.regions-per-server";
- /**
- * The default number of regions per regionserver when creating a pre-split table.
- */
public static final int DEFAULT_REGIONS_PER_SERVER = 3;
- public static final String PRESPLIT_TEST_TABLE_KEY = "hbase.test.pre-split-table";
- public static final boolean PRESPLIT_TEST_TABLE = true;
-
private MiniDFSCluster dfsCluster = null;
private FsDatasetAsyncDiskServiceFixer dfsClusterFixer = null;
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionObserverInterface.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionObserverInterface.java
index f1c7d0770a08..2a4bb33c8cac 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionObserverInterface.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionObserverInterface.java
@@ -60,7 +60,6 @@
import org.apache.hadoop.hbase.client.Table;
import org.apache.hadoop.hbase.client.TableDescriptor;
import org.apache.hadoop.hbase.client.TableDescriptorBuilder;
-import org.apache.hadoop.hbase.filter.FilterAllFilter;
import org.apache.hadoop.hbase.filter.SingleColumnValueFilter;
import org.apache.hadoop.hbase.io.hfile.CacheConfig;
import org.apache.hadoop.hbase.io.hfile.HFile;
@@ -510,29 +509,30 @@ public void testHBase3583() throws IOException {
table.close();
}
- @Test
- public void testHBASE14489() throws IOException {
- final TableName tableName = TableName.valueOf(name.getMethodName());
- Table table = util.createTable(tableName, new byte[][] { A });
- Put put = new Put(ROW);
- put.addColumn(A, A, A);
- table.put(put);
-
- Scan s = new Scan();
- s.setFilter(new FilterAllFilter());
- ResultScanner scanner = table.getScanner(s);
- try {
- for (Result rr = scanner.next(); rr != null; rr = scanner.next()) {
- }
- } finally {
- scanner.close();
- }
- verifyMethodResult(SimpleRegionObserver.class, new String[] { "wasScannerFilterRowCalled" },
- tableName, new Boolean[] { true });
- util.deleteTable(tableName);
- table.close();
-
- }
+ // TODO: Below test seems to be using FilterAllFilter
+ // @Test
+ // public void testHBASE14489() throws IOException {
+ // final TableName tableName = TableName.valueOf(name.getMethodName());
+ // Table table = util.createTable(tableName, new byte[][] { A });
+ // Put put = new Put(ROW);
+ // put.addColumn(A, A, A);
+ // table.put(put);
+ //
+ // Scan s = new Scan();
+ // s.setFilter(new FilterAllFilter());
+ // ResultScanner scanner = table.getScanner(s);
+ // try {
+ // for (Result rr = scanner.next(); rr != null; rr = scanner.next()) {
+ // }
+ // } finally {
+ // scanner.close();
+ // }
+ // verifyMethodResult(SimpleRegionObserver.class, new String[] { "wasScannerFilterRowCalled" },
+ // tableName, new Boolean[] { true });
+ // util.deleteTable(tableName);
+ // table.close();
+ //
+ // }
@Test
// HBase-3758
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/KVGenerator.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/KVGenerator.java
index 04be6064ef3f..83eafcc5bf94 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/KVGenerator.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/KVGenerator.java
@@ -18,7 +18,7 @@
package org.apache.hadoop.hbase.io.hfile;
import java.util.Random;
-import org.apache.hadoop.hbase.util.RandomDistribution;
+import org.apache.hadoop.hbase.util.RandomDistributionCopy;
import org.apache.hadoop.io.BytesWritable;
import org.apache.hadoop.io.WritableComparator;
@@ -33,13 +33,13 @@ class KVGenerator {
private final Random random;
private final byte[][] dict;
private final boolean sorted;
- private final RandomDistribution.DiscreteRNG keyLenRNG, valLenRNG;
+ private final RandomDistributionCopy.DiscreteRNG keyLenRNG, valLenRNG;
private BytesWritable lastKey;
private static final int MIN_KEY_LEN = 4;
private final byte prefix[] = new byte[MIN_KEY_LEN];
- public KVGenerator(Random random, boolean sorted, RandomDistribution.DiscreteRNG keyLenRNG,
- RandomDistribution.DiscreteRNG valLenRNG, RandomDistribution.DiscreteRNG wordLenRNG,
+ public KVGenerator(Random random, boolean sorted, RandomDistributionCopy.DiscreteRNG keyLenRNG,
+ RandomDistributionCopy.DiscreteRNG valLenRNG, RandomDistributionCopy.DiscreteRNG wordLenRNG,
int dictSize) {
this.random = random;
dict = new byte[dictSize][];
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/KeySampler.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/KeySampler.java
index 116d6d7bcefe..137534591aeb 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/KeySampler.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/KeySampler.java
@@ -18,7 +18,7 @@
package org.apache.hadoop.hbase.io.hfile;
import java.util.Random;
-import org.apache.hadoop.hbase.util.RandomDistribution.DiscreteRNG;
+import org.apache.hadoop.hbase.util.RandomDistributionCopy.DiscreteRNG;
import org.apache.hadoop.io.BytesWritable;
/*
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileEncryption.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileEncryption.java
index 663c0d540499..7ce3fe9c88b2 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileEncryption.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileEncryption.java
@@ -46,7 +46,7 @@
import org.apache.hadoop.hbase.io.compress.Compression;
import org.apache.hadoop.hbase.io.crypto.Cipher;
import org.apache.hadoop.hbase.io.crypto.Encryption;
-import org.apache.hadoop.hbase.io.crypto.KeyProviderForTesting;
+import org.apache.hadoop.hbase.io.crypto.KeyProviderForTestingCopy;
import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding;
import org.apache.hadoop.hbase.testclassification.IOTests;
import org.apache.hadoop.hbase.testclassification.SmallTests;
@@ -77,7 +77,7 @@ public static void setUp() throws Exception {
Configuration conf = TEST_UTIL.getConfiguration();
// Disable block cache in this test.
conf.setFloat(HConstants.HFILE_BLOCK_CACHE_SIZE_KEY, 0.0f);
- conf.set(HConstants.CRYPTO_KEYPROVIDER_CONF_KEY, KeyProviderForTesting.class.getName());
+ conf.set(HConstants.CRYPTO_KEYPROVIDER_CONF_KEY, KeyProviderForTestingCopy.class.getName());
conf.set(HConstants.CRYPTO_MASTERKEY_NAME_CONF_KEY, "hbase");
conf.setInt("hfile.format.version", 3);
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileSeek.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileSeek.java
index 3018d321480d..3a77871e21c3 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileSeek.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileSeek.java
@@ -34,7 +34,7 @@
import org.apache.hadoop.hbase.testclassification.IOTests;
import org.apache.hadoop.hbase.testclassification.SmallTests;
import org.apache.hadoop.hbase.util.Bytes;
-import org.apache.hadoop.hbase.util.RandomDistribution;
+import org.apache.hadoop.hbase.util.RandomDistributionCopy;
import org.apache.hadoop.io.BytesWritable;
import org.junit.After;
import org.junit.Before;
@@ -76,7 +76,7 @@ public class TestHFileSeek {
private FileSystem fs;
private NanoTimer timer;
private Random rng;
- private RandomDistribution.DiscreteRNG keyLenGen;
+ private RandomDistributionCopy.DiscreteRNG keyLenGen;
private KVGenerator kvGen;
private static final Logger LOG = LoggerFactory.getLogger(TestHFileSeek.class);
@@ -99,11 +99,11 @@ public void setUp() throws IOException {
fs = path.getFileSystem(conf);
timer = new NanoTimer(false);
rng = new Random(options.seed);
- keyLenGen = new RandomDistribution.Zipf(new Random(rng.nextLong()), options.minKeyLen,
+ keyLenGen = new RandomDistributionCopy.Zipf(new Random(rng.nextLong()), options.minKeyLen,
options.maxKeyLen, 1.2);
- RandomDistribution.DiscreteRNG valLenGen = new RandomDistribution.Flat(
+ RandomDistributionCopy.DiscreteRNG valLenGen = new RandomDistributionCopy.Flat(
new Random(rng.nextLong()), options.minValLength, options.maxValLength);
- RandomDistribution.DiscreteRNG wordLenGen = new RandomDistribution.Flat(
+ RandomDistributionCopy.DiscreteRNG wordLenGen = new RandomDistributionCopy.Flat(
new Random(rng.nextLong()), options.minWordLen, options.maxWordLen);
kvGen = new KVGenerator(rng, true, keyLenGen, valLenGen, wordLenGen, options.dictSize);
}
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestNettyRpcServer.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestNettyRpcServer.java
index 2d5b95028f6f..f696c34ef4e7 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestNettyRpcServer.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestNettyRpcServer.java
@@ -50,7 +50,7 @@
import org.apache.hadoop.hbase.testclassification.MediumTests;
import org.apache.hadoop.hbase.testclassification.RPCTests;
import org.apache.hadoop.hbase.util.Bytes;
-import org.apache.hadoop.hbase.util.LoadTestKVGenerator;
+import org.apache.hadoop.hbase.util.LoadTestKVGeneratorCopy;
import org.junit.After;
import org.junit.Before;
import org.junit.ClassRule;
@@ -76,7 +76,8 @@ public class TestNettyRpcServer {
private static final int NUM_ROWS = 100;
private static final int MIN_LEN = 1000;
private static final int MAX_LEN = 1000000;
- protected static final LoadTestKVGenerator GENERATOR = new LoadTestKVGenerator(MIN_LEN, MAX_LEN);
+ protected static final LoadTestKVGeneratorCopy GENERATOR =
+ new LoadTestKVGeneratorCopy(MIN_LEN, MAX_LEN);
protected static HBaseTestingUtil TEST_UTIL;
@Rule
@@ -122,18 +123,18 @@ protected void doTest(TableName tableName) throws Exception {
TEST_UTIL.createTable(desc, new byte[][] { FAMILY }, TEST_UTIL.getConfiguration())) {
// put some test data
for (int i = 0; i < NUM_ROWS; i++) {
- final byte[] rowKey = Bytes.toBytes(LoadTestKVGenerator.md5PrefixedKey(i));
+ final byte[] rowKey = Bytes.toBytes(LoadTestKVGeneratorCopy.md5PrefixedKey(i));
final byte[] v = GENERATOR.generateRandomSizeValue(rowKey, QUALIFIER);
table.put(new Put(rowKey).addColumn(FAMILY, QUALIFIER, v));
}
// read to verify it.
for (int i = 0; i < NUM_ROWS; i++) {
- final byte[] rowKey = Bytes.toBytes(LoadTestKVGenerator.md5PrefixedKey(i));
+ final byte[] rowKey = Bytes.toBytes(LoadTestKVGeneratorCopy.md5PrefixedKey(i));
final Result r = table.get(new Get(rowKey).addColumn(FAMILY, QUALIFIER));
assertNotNull("Result was empty", r);
final byte[] v = r.getValue(FAMILY, QUALIFIER);
assertNotNull("Result did not contain expected value", v);
- assertTrue("Value was not verified", LoadTestKVGenerator.verify(v, rowKey, QUALIFIER));
+ assertTrue("Value was not verified", LoadTestKVGeneratorCopy.verify(v, rowKey, QUALIFIER));
}
}
}
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestSimpleRpcServer.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestSimpleRpcServer.java
index f2420e028b1a..15b71efd8b59 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestSimpleRpcServer.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestSimpleRpcServer.java
@@ -34,7 +34,7 @@
import org.apache.hadoop.hbase.testclassification.MediumTests;
import org.apache.hadoop.hbase.testclassification.RPCTests;
import org.apache.hadoop.hbase.util.Bytes;
-import org.apache.hadoop.hbase.util.LoadTestKVGenerator;
+import org.apache.hadoop.hbase.util.LoadTestKVGeneratorCopy;
import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.ClassRule;
@@ -54,7 +54,8 @@ public class TestSimpleRpcServer {
private static final int NUM_ROWS = 100;
private static final int MIN_LEN = 1000;
private static final int MAX_LEN = 1000000;
- protected static final LoadTestKVGenerator GENERATOR = new LoadTestKVGenerator(MIN_LEN, MAX_LEN);
+ protected static final LoadTestKVGeneratorCopy GENERATOR =
+ new LoadTestKVGeneratorCopy(MIN_LEN, MAX_LEN);
protected static HBaseTestingUtil TEST_UTIL;
@Rule
@@ -91,18 +92,18 @@ protected void doTest(TableName tableName) throws Exception {
TEST_UTIL.createTable(desc, new byte[][] { FAMILY }, TEST_UTIL.getConfiguration())) {
// put some test data
for (int i = 0; i < NUM_ROWS; i++) {
- final byte[] rowKey = Bytes.toBytes(LoadTestKVGenerator.md5PrefixedKey(i));
+ final byte[] rowKey = Bytes.toBytes(LoadTestKVGeneratorCopy.md5PrefixedKey(i));
final byte[] v = GENERATOR.generateRandomSizeValue(rowKey, QUALIFIER);
table.put(new Put(rowKey).addColumn(FAMILY, QUALIFIER, v));
}
// read to verify it.
for (int i = 0; i < NUM_ROWS; i++) {
- final byte[] rowKey = Bytes.toBytes(LoadTestKVGenerator.md5PrefixedKey(i));
+ final byte[] rowKey = Bytes.toBytes(LoadTestKVGeneratorCopy.md5PrefixedKey(i));
final Result r = table.get(new Get(rowKey).addColumn(FAMILY, QUALIFIER));
assertNotNull("Result was empty", r);
final byte[] v = r.getValue(FAMILY, QUALIFIER);
assertNotNull("Result did not contain expected value", v);
- assertTrue("Value was not verified", LoadTestKVGenerator.verify(v, rowKey, QUALIFIER));
+ assertTrue("Value was not verified", LoadTestKVGeneratorCopy.verify(v, rowKey, QUALIFIER));
}
}
}
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/normalizer/TestSimpleRegionNormalizerOnCluster.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/normalizer/TestSimpleRegionNormalizerOnCluster.java
index 158b8f0e70c2..a9b89ad278ad 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/normalizer/TestSimpleRegionNormalizerOnCluster.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/normalizer/TestSimpleRegionNormalizerOnCluster.java
@@ -61,7 +61,7 @@
import org.apache.hadoop.hbase.testclassification.MasterTests;
import org.apache.hadoop.hbase.testclassification.MediumTests;
import org.apache.hadoop.hbase.util.Bytes;
-import org.apache.hadoop.hbase.util.LoadTestKVGenerator;
+import org.apache.hadoop.hbase.util.LoadTestKVGeneratorCopy;
import org.hamcrest.Matcher;
import org.hamcrest.Matchers;
import org.junit.AfterClass;
@@ -365,7 +365,7 @@ private static List generateTestData(final TableName tableName,
private static void generateTestData(Region region, int numRows) throws IOException {
// generating 1Mb values
LOG.debug("writing {}mb to {}", numRows, region);
- LoadTestKVGenerator dataGenerator = new LoadTestKVGenerator(1024 * 1024, 1024 * 1024);
+ LoadTestKVGeneratorCopy dataGenerator = new LoadTestKVGeneratorCopy(1024 * 1024, 1024 * 1024);
for (int i = 0; i < numRows; ++i) {
byte[] key = Bytes.add(region.getRegionInfo().getStartKey(), Bytes.toBytes(i));
for (int j = 0; j < 1; ++j) {
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestEncryptionDisabled.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestEncryptionDisabled.java
index 0506bd020726..6917ff418ac2 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestEncryptionDisabled.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestEncryptionDisabled.java
@@ -26,7 +26,7 @@
import org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder;
import org.apache.hadoop.hbase.client.TableDescriptorBuilder;
import org.apache.hadoop.hbase.io.crypto.Encryption;
-import org.apache.hadoop.hbase.io.crypto.KeyProviderForTesting;
+import org.apache.hadoop.hbase.io.crypto.KeyProviderForTestingCopy;
import org.apache.hadoop.hbase.testclassification.MasterTests;
import org.apache.hadoop.hbase.testclassification.MediumTests;
import org.apache.hadoop.hbase.util.Bytes;
@@ -56,7 +56,7 @@ public class TestEncryptionDisabled {
@BeforeClass
public static void setUp() throws Exception {
conf.setInt("hfile.format.version", 3);
- conf.set(HConstants.CRYPTO_KEYPROVIDER_CONF_KEY, KeyProviderForTesting.class.getName());
+ conf.set(HConstants.CRYPTO_KEYPROVIDER_CONF_KEY, KeyProviderForTestingCopy.class.getName());
conf.set(HConstants.CRYPTO_MASTERKEY_NAME_CONF_KEY, "hbase");
conf.set(Encryption.CRYPTO_ENABLED_CONF_KEY, "false");
conf.set(TableDescriptorChecker.TABLE_SANITY_CHECKS, "true");
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestEncryptionKeyRotation.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestEncryptionKeyRotation.java
index 6040084ee4dc..683af78ad92e 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestEncryptionKeyRotation.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestEncryptionKeyRotation.java
@@ -41,7 +41,7 @@
import org.apache.hadoop.hbase.client.TableDescriptor;
import org.apache.hadoop.hbase.client.TableDescriptorBuilder;
import org.apache.hadoop.hbase.io.crypto.Encryption;
-import org.apache.hadoop.hbase.io.crypto.KeyProviderForTesting;
+import org.apache.hadoop.hbase.io.crypto.KeyProviderForTestingCopy;
import org.apache.hadoop.hbase.io.crypto.aes.AES;
import org.apache.hadoop.hbase.io.hfile.CacheConfig;
import org.apache.hadoop.hbase.io.hfile.HFile;
@@ -86,7 +86,7 @@ public class TestEncryptionKeyRotation {
@BeforeClass
public static void setUp() throws Exception {
conf.setInt("hfile.format.version", 3);
- conf.set(HConstants.CRYPTO_KEYPROVIDER_CONF_KEY, KeyProviderForTesting.class.getName());
+ conf.set(HConstants.CRYPTO_KEYPROVIDER_CONF_KEY, KeyProviderForTestingCopy.class.getName());
conf.set(HConstants.CRYPTO_MASTERKEY_NAME_CONF_KEY, "hbase");
// Start the minicluster
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestEncryptionRandomKeying.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestEncryptionRandomKeying.java
index a71415a16a61..78788185606f 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestEncryptionRandomKeying.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestEncryptionRandomKeying.java
@@ -34,7 +34,7 @@
import org.apache.hadoop.hbase.client.Table;
import org.apache.hadoop.hbase.client.TableDescriptorBuilder;
import org.apache.hadoop.hbase.io.crypto.Encryption;
-import org.apache.hadoop.hbase.io.crypto.KeyProviderForTesting;
+import org.apache.hadoop.hbase.io.crypto.KeyProviderForTestingCopy;
import org.apache.hadoop.hbase.io.hfile.CacheConfig;
import org.apache.hadoop.hbase.io.hfile.HFile;
import org.apache.hadoop.hbase.testclassification.MediumTests;
@@ -89,7 +89,7 @@ private static byte[] extractHFileKey(Path path) throws Exception {
@BeforeClass
public static void setUp() throws Exception {
conf.setInt("hfile.format.version", 3);
- conf.set(HConstants.CRYPTO_KEYPROVIDER_CONF_KEY, KeyProviderForTesting.class.getName());
+ conf.set(HConstants.CRYPTO_KEYPROVIDER_CONF_KEY, KeyProviderForTestingCopy.class.getName());
conf.set(HConstants.CRYPTO_MASTERKEY_NAME_CONF_KEY, "hbase");
// Create the table schema
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHMobStore.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHMobStore.java
index 0ec1f75e2690..43ede4ddfe3c 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHMobStore.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHMobStore.java
@@ -55,7 +55,7 @@
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.client.TableDescriptor;
import org.apache.hadoop.hbase.client.TableDescriptorBuilder;
-import org.apache.hadoop.hbase.io.crypto.KeyProviderForTesting;
+import org.apache.hadoop.hbase.io.crypto.KeyProviderForTestingCopy;
import org.apache.hadoop.hbase.io.crypto.aes.AES;
import org.apache.hadoop.hbase.io.hfile.HFile;
import org.apache.hadoop.hbase.mob.MobConstants;
@@ -469,7 +469,7 @@ private static void flushStore(HMobStore store, long id) throws IOException {
public void testMOBStoreEncryption() throws Exception {
final Configuration conf = TEST_UTIL.getConfiguration();
- conf.set(HConstants.CRYPTO_KEYPROVIDER_CONF_KEY, KeyProviderForTesting.class.getName());
+ conf.set(HConstants.CRYPTO_KEYPROVIDER_CONF_KEY, KeyProviderForTestingCopy.class.getName());
conf.set(HConstants.CRYPTO_MASTERKEY_NAME_CONF_KEY, "hbase");
byte[] keyBytes = new byte[AES.KEY_LENGTH];
Bytes.secureRandom(keyBytes);
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestSecureAsyncWALReplay.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestSecureAsyncWALReplay.java
index b702ef394d52..879a2e534b53 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestSecureAsyncWALReplay.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestSecureAsyncWALReplay.java
@@ -20,7 +20,7 @@
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseClassTestRule;
import org.apache.hadoop.hbase.HConstants;
-import org.apache.hadoop.hbase.io.crypto.KeyProviderForTesting;
+import org.apache.hadoop.hbase.io.crypto.KeyProviderForTestingCopy;
import org.apache.hadoop.hbase.testclassification.MediumTests;
import org.apache.hadoop.hbase.testclassification.RegionServerTests;
import org.junit.BeforeClass;
@@ -37,7 +37,7 @@ public class TestSecureAsyncWALReplay extends TestAsyncWALReplay {
@BeforeClass
public static void setUpBeforeClass() throws Exception {
Configuration conf = AbstractTestWALReplay.TEST_UTIL.getConfiguration();
- conf.set(HConstants.CRYPTO_KEYPROVIDER_CONF_KEY, KeyProviderForTesting.class.getName());
+ conf.set(HConstants.CRYPTO_KEYPROVIDER_CONF_KEY, KeyProviderForTestingCopy.class.getName());
conf.set(HConstants.CRYPTO_MASTERKEY_NAME_CONF_KEY, "hbase");
conf.setBoolean(HConstants.ENABLE_WAL_ENCRYPTION, true);
TestAsyncWALReplay.setUpBeforeClass();
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestSecureWALReplay.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestSecureWALReplay.java
index 796996ce180a..5f3a72e5842a 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestSecureWALReplay.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestSecureWALReplay.java
@@ -20,7 +20,7 @@
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseClassTestRule;
import org.apache.hadoop.hbase.HConstants;
-import org.apache.hadoop.hbase.io.crypto.KeyProviderForTesting;
+import org.apache.hadoop.hbase.io.crypto.KeyProviderForTestingCopy;
import org.apache.hadoop.hbase.testclassification.LargeTests;
import org.apache.hadoop.hbase.testclassification.RegionServerTests;
import org.junit.BeforeClass;
@@ -37,7 +37,7 @@ public class TestSecureWALReplay extends TestWALReplay {
@BeforeClass
public static void setUpBeforeClass() throws Exception {
Configuration conf = AbstractTestWALReplay.TEST_UTIL.getConfiguration();
- conf.set(HConstants.CRYPTO_KEYPROVIDER_CONF_KEY, KeyProviderForTesting.class.getName());
+ conf.set(HConstants.CRYPTO_KEYPROVIDER_CONF_KEY, KeyProviderForTestingCopy.class.getName());
conf.set(HConstants.CRYPTO_MASTERKEY_NAME_CONF_KEY, "hbase");
conf.setBoolean(HConstants.ENABLE_WAL_ENCRYPTION, true);
AbstractTestWALReplay.setUpBeforeClass();
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/HFileTestUtil.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/HFileTestUtil.java
index d017db6eb9bd..8f5e660f1f7f 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/HFileTestUtil.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/HFileTestUtil.java
@@ -22,7 +22,6 @@
import static org.junit.Assert.fail;
import java.io.IOException;
-import java.util.Arrays;
import java.util.Locale;
import java.util.Optional;
import org.apache.hadoop.conf.Configuration;
@@ -51,10 +50,6 @@
* Utility class for HFile-related testing.
*/
public class HFileTestUtil {
-
- public static final String OPT_DATA_BLOCK_ENCODING_USAGE = "Encoding algorithm (e.g. prefix "
- + "compression) to use for data blocks in the test column family, " + "one of "
- + Arrays.toString(DataBlockEncoding.values()) + ".";
public static final String OPT_DATA_BLOCK_ENCODING =
ColumnFamilyDescriptorBuilder.DATA_BLOCK_ENCODING.toLowerCase(Locale.ROOT);
/** Column family used by the test */
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/LoadTestKVGeneratorCopy.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/LoadTestKVGeneratorCopy.java
new file mode 100644
index 000000000000..e4df2eedca9b
--- /dev/null
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/LoadTestKVGeneratorCopy.java
@@ -0,0 +1,116 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.util;
+
+import java.util.Random;
+import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ * A generator of random keys and values for load testing. Keys are generated by converting numeric
+ * indexes to strings and prefixing them with an MD5 hash. Values are generated by selecting value
+ * size in the configured range and generating a pseudo-random sequence of bytes seeded by key,
+ * column qualifier, and value size.
+ */
+@InterfaceAudience.Private
+public class LoadTestKVGeneratorCopy {
+
+ // TODO: What do we do about this class
+ private static final Logger LOG = LoggerFactory.getLogger(LoadTestKVGeneratorCopy.class);
+ private static int logLimit = 10;
+
+ /** A random number generator for determining value size */
+ private Random randomForValueSize = new Random(); // Seed may be set with Random#setSeed
+
+ private final int minValueSize;
+ private final int maxValueSize;
+
+ public LoadTestKVGeneratorCopy(int minValueSize, int maxValueSize) {
+ if (minValueSize <= 0 || maxValueSize <= 0) {
+ throw new IllegalArgumentException(
+ "Invalid min/max value sizes: " + minValueSize + ", " + maxValueSize);
+ }
+ this.minValueSize = minValueSize;
+ this.maxValueSize = maxValueSize;
+ }
+
+ /**
+ * Verifies that the given byte array is the same as what would be generated for the given seed
+ * strings (row/cf/column/...). We are assuming that the value size is correct, and only verify
+ * the actual bytes. However, if the min/max value sizes are set sufficiently high, an accidental
+ * match should be extremely improbable.
+ */
+ public static boolean verify(byte[] value, byte[]... seedStrings) {
+ byte[] expectedData = getValueForRowColumn(value.length, seedStrings);
+ boolean equals = Bytes.equals(expectedData, value);
+ if (!equals && LOG.isDebugEnabled() && logLimit > 0) {
+ LOG.debug("verify failed, expected value: " + Bytes.toStringBinary(expectedData)
+ + " actual value: " + Bytes.toStringBinary(value));
+ logLimit--; // this is not thread safe, but at worst we will have more logging
+ }
+ return equals;
+ }
+
+ /**
+ * Converts the given key to string, and prefixes it with the MD5 hash of the index's string
+ * representation.
+ */
+ public static String md5PrefixedKey(long key) {
+ String stringKey = Long.toString(key);
+ String md5hash = MD5Hash.getMD5AsHex(Bytes.toBytes(stringKey));
+
+ // flip the key to randomize
+ return md5hash + "-" + stringKey;
+ }
+
+ /**
+ * Generates a value for the given key index and column qualifier. Size is selected randomly in
+ * the configured range. The generated value depends only on the combination of the strings passed
+ * (key/cf/column/...) and the selected value size. This allows to verify the actual value bytes
+ * when reading, as done in {#verify(byte[], byte[]...)} This method is as thread-safe as Random
+ * class. It appears that the worst bug ever found with the latter is that multiple threads will
+ * get some duplicate values, which we don't care about.
+ */
+ public byte[] generateRandomSizeValue(byte[]... seedStrings) {
+ int dataSize = minValueSize;
+ if (minValueSize != maxValueSize) {
+ dataSize = minValueSize + randomForValueSize.nextInt(Math.abs(maxValueSize - minValueSize));
+ }
+ return getValueForRowColumn(dataSize, seedStrings);
+ }
+
+ /**
+ * Generates random bytes of the given size for the given row and column qualifier. The random
+ * seed is fully determined by these parameters.
+ */
+ private static byte[] getValueForRowColumn(int dataSize, byte[]... seedStrings) {
+ long seed = dataSize;
+ for (byte[] str : seedStrings) {
+ final String bytesString = Bytes.toString(str);
+ if (bytesString != null) {
+ seed += bytesString.hashCode();
+ }
+ }
+ Random seededRandom = new Random(seed);
+ byte[] randomBytes = new byte[dataSize];
+ seededRandom.nextBytes(randomBytes);
+ return randomBytes;
+ }
+
+}
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestEncryptionTest.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestEncryptionTest.java
index f9459bc5cdd1..cbe3df0887c2 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestEncryptionTest.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestEncryptionTest.java
@@ -30,7 +30,7 @@
import org.apache.hadoop.hbase.io.crypto.DefaultCipherProvider;
import org.apache.hadoop.hbase.io.crypto.Encryption;
import org.apache.hadoop.hbase.io.crypto.KeyProvider;
-import org.apache.hadoop.hbase.io.crypto.KeyProviderForTesting;
+import org.apache.hadoop.hbase.io.crypto.KeyProviderForTestingCopy;
import org.apache.hadoop.hbase.testclassification.MiscTests;
import org.apache.hadoop.hbase.testclassification.SmallTests;
import org.junit.ClassRule;
@@ -47,7 +47,7 @@ public class TestEncryptionTest {
@Test
public void testTestKeyProvider() throws Exception {
Configuration conf = HBaseConfiguration.create();
- conf.set(HConstants.CRYPTO_KEYPROVIDER_CONF_KEY, KeyProviderForTesting.class.getName());
+ conf.set(HConstants.CRYPTO_KEYPROVIDER_CONF_KEY, KeyProviderForTestingCopy.class.getName());
EncryptionTest.testKeyProvider(conf);
}
@@ -77,7 +77,7 @@ public void testBadCipherProvider() throws Exception {
@Test
public void testAESCipher() {
Configuration conf = HBaseConfiguration.create();
- conf.set(HConstants.CRYPTO_KEYPROVIDER_CONF_KEY, KeyProviderForTesting.class.getName());
+ conf.set(HConstants.CRYPTO_KEYPROVIDER_CONF_KEY, KeyProviderForTestingCopy.class.getName());
String algorithm = conf.get(HConstants.CRYPTO_KEY_ALGORITHM_CONF_KEY, HConstants.CIPHER_AES);
try {
EncryptionTest.testEncryption(conf, algorithm, null);
@@ -89,7 +89,7 @@ public void testAESCipher() {
@Test(expected = IOException.class)
public void testUnknownCipher() throws Exception {
Configuration conf = HBaseConfiguration.create();
- conf.set(HConstants.CRYPTO_KEYPROVIDER_CONF_KEY, KeyProviderForTesting.class.getName());
+ conf.set(HConstants.CRYPTO_KEYPROVIDER_CONF_KEY, KeyProviderForTestingCopy.class.getName());
EncryptionTest.testEncryption(conf, "foobar", null);
fail("Test for bogus cipher should have failed");
}
@@ -97,7 +97,7 @@ public void testUnknownCipher() throws Exception {
@Test
public void testTestEnabledWithDefaultConfig() {
Configuration conf = HBaseConfiguration.create();
- conf.set(HConstants.CRYPTO_KEYPROVIDER_CONF_KEY, KeyProviderForTesting.class.getName());
+ conf.set(HConstants.CRYPTO_KEYPROVIDER_CONF_KEY, KeyProviderForTestingCopy.class.getName());
String algorithm = conf.get(HConstants.CRYPTO_KEY_ALGORITHM_CONF_KEY, HConstants.CIPHER_AES);
try {
EncryptionTest.testEncryption(conf, algorithm, null);
@@ -110,7 +110,7 @@ public void testTestEnabledWithDefaultConfig() {
@Test
public void testTestEnabledWhenCryptoIsExplicitlyEnabled() {
Configuration conf = HBaseConfiguration.create();
- conf.set(HConstants.CRYPTO_KEYPROVIDER_CONF_KEY, KeyProviderForTesting.class.getName());
+ conf.set(HConstants.CRYPTO_KEYPROVIDER_CONF_KEY, KeyProviderForTestingCopy.class.getName());
String algorithm = conf.get(HConstants.CRYPTO_KEY_ALGORITHM_CONF_KEY, HConstants.CIPHER_AES);
conf.setBoolean(Encryption.CRYPTO_ENABLED_CONF_KEY, true);
try {
@@ -124,7 +124,7 @@ public void testTestEnabledWhenCryptoIsExplicitlyEnabled() {
@Test(expected = IOException.class)
public void testTestEnabledWhenCryptoIsExplicitlyDisabled() throws Exception {
Configuration conf = HBaseConfiguration.create();
- conf.set(HConstants.CRYPTO_KEYPROVIDER_CONF_KEY, KeyProviderForTesting.class.getName());
+ conf.set(HConstants.CRYPTO_KEYPROVIDER_CONF_KEY, KeyProviderForTestingCopy.class.getName());
String algorithm = conf.get(HConstants.CRYPTO_KEY_ALGORITHM_CONF_KEY, HConstants.CIPHER_AES);
conf.setBoolean(Encryption.CRYPTO_ENABLED_CONF_KEY, false);
EncryptionTest.testEncryption(conf, algorithm, null);
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestHBaseFsckEncryption.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestHBaseFsckEncryption.java
index aeed1a9a4837..5cedc00ef8b8 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestHBaseFsckEncryption.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestHBaseFsckEncryption.java
@@ -38,7 +38,7 @@
import org.apache.hadoop.hbase.client.TableDescriptor;
import org.apache.hadoop.hbase.client.TableDescriptorBuilder;
import org.apache.hadoop.hbase.io.crypto.Encryption;
-import org.apache.hadoop.hbase.io.crypto.KeyProviderForTesting;
+import org.apache.hadoop.hbase.io.crypto.KeyProviderForTestingCopy;
import org.apache.hadoop.hbase.io.crypto.aes.AES;
import org.apache.hadoop.hbase.io.hfile.CacheConfig;
import org.apache.hadoop.hbase.io.hfile.HFile;
@@ -78,7 +78,7 @@ public class TestHBaseFsckEncryption {
public void setUp() throws Exception {
conf = TEST_UTIL.getConfiguration();
conf.setInt("hfile.format.version", 3);
- conf.set(HConstants.CRYPTO_KEYPROVIDER_CONF_KEY, KeyProviderForTesting.class.getName());
+ conf.set(HConstants.CRYPTO_KEYPROVIDER_CONF_KEY, KeyProviderForTestingCopy.class.getName());
conf.set(HConstants.CRYPTO_MASTERKEY_NAME_CONF_KEY, "hbase");
// Create the test encryption key
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestSecureWAL.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestSecureWAL.java
index 75e4b0921f30..215e5b7d4e83 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestSecureWAL.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestSecureWAL.java
@@ -38,7 +38,7 @@
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.RegionInfo;
import org.apache.hadoop.hbase.client.RegionInfoBuilder;
-import org.apache.hadoop.hbase.io.crypto.KeyProviderForTesting;
+import org.apache.hadoop.hbase.io.crypto.KeyProviderForTestingCopy;
import org.apache.hadoop.hbase.regionserver.MultiVersionConcurrencyControl;
import org.apache.hadoop.hbase.testclassification.MediumTests;
import org.apache.hadoop.hbase.testclassification.RegionServerTests;
@@ -82,7 +82,7 @@ public static Iterable
+
+ org.apache.hbase
+ hbase-diagnostics
+ ${project.version}
+ test-jar
+ test
+ org.apache.hbasehbase-endpoint
@@ -1361,6 +1369,11 @@
hbase-openssl${project.version}
+
+ org.apache.hbase
+ hbase-diagnostics
+ ${project.version}
+ com.github.stephenc.findbugs