diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/PerformanceEvaluation.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/PerformanceEvaluation.java index 8fd2d5f7fb26..b230ce8948c5 100644 --- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/PerformanceEvaluation.java +++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/PerformanceEvaluation.java @@ -36,6 +36,7 @@ import java.util.Locale; import java.util.Map; import java.util.NoSuchElementException; +import java.util.Properties; import java.util.Queue; import java.util.Random; import java.util.TreeMap; @@ -360,7 +361,8 @@ static boolean checkTable(Admin admin, TestOptions opts) throws IOException { // {RegionSplitPolicy,replica count} does not match requested, or when the // number of column families does not match requested. if ( - (exists && opts.presplitRegions != DEFAULT_OPTS.presplitRegions) + (exists && opts.presplitRegions != DEFAULT_OPTS.presplitRegions + && opts.presplitRegions != admin.getRegions(tableName).size()) || (!isReadCmd && desc != null && !StringUtils.equals(desc.getRegionSplitPolicyClassName(), opts.splitPolicy)) || (!isReadCmd && desc != null && desc.getRegionReplication() != opts.replicas) @@ -728,6 +730,7 @@ static class TestOptions { boolean cacheBlocks = true; Scan.ReadType scanReadType = Scan.ReadType.DEFAULT; long bufferSize = 2l * 1024l * 1024l; + Properties commandProperties; public TestOptions() { } @@ -784,6 +787,11 @@ public TestOptions(TestOptions that) { this.cacheBlocks = that.cacheBlocks; this.scanReadType = that.scanReadType; this.bufferSize = that.bufferSize; + this.commandProperties = that.commandProperties; + } + + public Properties getCommandProperties() { + return commandProperties; } public int getCaching() { @@ -1149,10 +1157,10 @@ private static long nextRandomSeed() { protected final Configuration conf; protected final TestOptions opts; - private final Status status; + protected final Status status; private String testName; - private Histogram latencyHistogram; + protected Histogram latencyHistogram; private Histogram replicaLatencyHistogram; private Histogram valueSizeHistogram; private Histogram rpcCallsHistogram; @@ -2581,7 +2589,7 @@ protected static void printUsage(final String shortName, final String message) { System.err.println(message); } System.err.print("Usage: hbase " + shortName); - System.err.println(" [-D]* "); + System.err.println(" [-D]* "); System.err.println(); System.err.println("General Options:"); System.err.println( @@ -2682,6 +2690,13 @@ protected static void printUsage(final String shortName, final String message) { System.err.println(String.format(" %-20s %s", command.getName(), command.getDescription())); } System.err.println(); + System.err.println("Class:"); + System.err.println("To run any custom implementation of PerformanceEvaluation.Test, " + + "provide the classname of the implementaion class in place of " + + "command name and it will be loaded at runtime from classpath.:"); + System.err.println("Please consider to contribute back " + + "this custom test impl into a builtin PE command for the benefit of the community"); + System.err.println(); System.err.println("Args:"); System.err.println(" nclients Integer. Required. Total number of clients " + "(and HRegionServers) running. 1 <= value <= 500"); @@ -2976,6 +2991,20 @@ static TestOptions parseOpts(Queue args) { continue; } + final String commandPropertiesFile = "--commandPropertiesFile="; + if (cmd.startsWith(commandPropertiesFile)) { + String fileName = String.valueOf(cmd.substring(commandPropertiesFile.length())); + Properties properties = new Properties(); + try { + properties + .load(PerformanceEvaluation.class.getClassLoader().getResourceAsStream(fileName)); + opts.commandProperties = properties; + } catch (IOException e) { + LOG.error("Failed to load metricIds from properties file", e); + } + continue; + } + validateParsedOpts(opts); if (isCommandClass(cmd)) { @@ -3089,7 +3118,20 @@ public int run(String[] args) throws Exception { } private static boolean isCommandClass(String cmd) { - return COMMANDS.containsKey(cmd); + return COMMANDS.containsKey(cmd) || isCustomTestClass(cmd); + } + + private static boolean isCustomTestClass(String cmd) { + Class cmdClass; + try { + cmdClass = + (Class) PerformanceEvaluation.class.getClassLoader().loadClass(cmd); + addCommandDescriptor(cmdClass, cmd, "custom command"); + return true; + } catch (Throwable th) { + LOG.info("No class found for command: " + cmd, th); + return false; + } } private static Class determineCommandClass(String cmd) { diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/TestPerformanceEvaluation.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/TestPerformanceEvaluation.java index f02d30c3887b..cf11510a897a 100644 --- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/TestPerformanceEvaluation.java +++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/TestPerformanceEvaluation.java @@ -28,6 +28,8 @@ import com.codahale.metrics.UniformReservoir; import java.io.BufferedReader; import java.io.ByteArrayInputStream; +import java.io.File; +import java.io.FileWriter; import java.io.IOException; import java.io.InputStreamReader; import java.lang.reflect.Constructor; @@ -35,6 +37,7 @@ import java.nio.charset.StandardCharsets; import java.util.LinkedList; import java.util.NoSuchElementException; +import java.util.Properties; import java.util.Queue; import java.util.Random; import java.util.concurrent.ThreadLocalRandom; @@ -42,8 +45,10 @@ import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.PerformanceEvaluation.RandomReadTest; +import org.apache.hadoop.hbase.PerformanceEvaluation.Status; import org.apache.hadoop.hbase.PerformanceEvaluation.TestOptions; import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor; +import org.apache.hadoop.hbase.client.Connection; import org.apache.hadoop.hbase.client.TableDescriptor; import org.apache.hadoop.hbase.regionserver.CompactingMemStore; import org.apache.hadoop.hbase.testclassification.MiscTests; @@ -359,4 +364,46 @@ public void testParseOptsValueRandom() { assertEquals(true, options.valueRandom); } + @Test + public void testCustomTestClassOptions() throws IOException { + Queue opts = new LinkedList<>(); + // create custom properties that can be used for a custom test class + Properties commandProps = new Properties(); + commandProps.put("prop1", "val1"); + String cmdPropsFilePath = + this.getClass().getClassLoader().getResource("").getPath() + "cmd_properties.txt"; + FileWriter writer = new FileWriter(new File(cmdPropsFilePath)); + commandProps.store(writer, null); + // create opts for the custom test class - commandPropertiesFile, testClassName + opts.offer("--commandPropertiesFile=" + "cmd_properties.txt"); + String testClassName = "org.apache.hadoop.hbase.TestPerformanceEvaluation$PESampleTestImpl"; + opts.offer(testClassName); + opts.offer("1"); + PerformanceEvaluation.TestOptions options = PerformanceEvaluation.parseOpts(opts); + assertNotNull(options); + assertNotNull(options.getCmdName()); + assertEquals(testClassName, options.getCmdName()); + assertNotNull(options.getCommandProperties()); + assertEquals("val1", options.getCommandProperties().get("prop1")); + } + + class PESampleTestImpl extends PerformanceEvaluation.Test { + + PESampleTestImpl(Connection con, TestOptions options, Status status) { + super(con, options, status); + } + + @Override + void onStartup() throws IOException { + } + + @Override + void onTakedown() throws IOException { + } + + @Override + boolean testRow(int i, long startTime) throws IOException, InterruptedException { + return false; + } + } }