diff --git a/bin/hbase b/bin/hbase index 44bf5cd742e6..840d4959bbdc 100755 --- a/bin/hbase +++ b/bin/hbase @@ -656,6 +656,8 @@ elif [ "$COMMAND" = "pre-upgrade" ] ; then CLASS='org.apache.hadoop.hbase.tool.PreUpgradeValidator' elif [ "$COMMAND" = "completebulkload" ] ; then CLASS='org.apache.hadoop.hbase.tool.BulkLoadHFilesTool' +elif [ "$COMMAND" = "top" ] ; then + CLASS='org.apache.hadoop.hbase.hbtop.HBTop' else CLASS=$COMMAND fi diff --git a/hbase-assembly/pom.xml b/hbase-assembly/pom.xml index 61ea15f125f8..3fe8fc2b8452 100644 --- a/hbase-assembly/pom.xml +++ b/hbase-assembly/pom.xml @@ -309,6 +309,10 @@ org.apache.hbase hbase-zookeeper + + org.apache.hbase + hbase-hbtop + jline jline diff --git a/hbase-hbtop/pom.xml b/hbase-hbtop/pom.xml new file mode 100644 index 000000000000..82543b3fbd18 --- /dev/null +++ b/hbase-hbtop/pom.xml @@ -0,0 +1,90 @@ + + + + 4.0.0 + + hbase + org.apache.hbase + 3.0.0-SNAPSHOT + + hbase-hbtop + Apache HBase - HBTop + A real-time monitoring tool for HBase like Unix top command + + + + + org.apache.maven.plugins + maven-source-plugin + + + + org.apache.maven.plugins + maven-shade-plugin + 3.2.0 + + + package + + shade + + + + + classworlds:classworlds + junit:junit + jmock:* + *:xml-apis + org.apache.maven:lib:tests + log4j:log4j:jar: + + + + + + + + + + + org.apache.hbase + hbase-server + provided + + + com.googlecode.lanterna + lanterna + 3.0.1 + + + org.mockito + mockito-core + test + + + org.apache.hbase + hbase-testing-util + test + + + diff --git a/hbase-hbtop/src/main/java/org/apache/hadoop/hbase/hbtop/Filter.java b/hbase-hbtop/src/main/java/org/apache/hadoop/hbase/hbtop/Filter.java new file mode 100644 index 000000000000..cd33f4e63585 --- /dev/null +++ b/hbase-hbtop/src/main/java/org/apache/hadoop/hbase/hbtop/Filter.java @@ -0,0 +1,343 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase.hbtop; + +import java.util.Arrays; +import java.util.List; +import java.util.Objects; +import org.apache.hadoop.hbase.hbtop.field.Field; +import org.apache.hadoop.hbase.hbtop.field.FieldValue; +import org.apache.yetus.audience.InterfaceAudience; + + +/** + * Represents a filter that's filtering the metric {@link Record}s. + */ +@InterfaceAudience.Private +public final class Filter { + + private enum Operator { + EQUAL("="), + DOUBLE_EQUALS("=="), + GREATER(">"), + GREATER_OR_EQUAL(">="), + LESS("<"), + LESS_OR_EQUAL("<="); + + private final String operator; + + Operator(String operator) { + this.operator = operator; + } + + @Override + public String toString() { + return operator; + } + } + + public static Filter parse(String filterString, boolean ignoreCase) { + return parse(filterString, Arrays.asList(Field.values()), ignoreCase); + } + + public static Filter parse(String filterString, List fields, boolean ignoreCase) { + int index = 0; + + boolean not = isNot(filterString); + if (not) { + index += 1; + } + + StringBuilder fieldString = new StringBuilder(); + while (filterString.length() > index && filterString.charAt(index) != '<' + && filterString.charAt(index) != '>' && filterString.charAt(index) != '=') { + fieldString.append(filterString.charAt(index++)); + } + + if (fieldString.length() == 0 || filterString.length() == index) { + return null; + } + + Field field = getField(fields, fieldString.toString()); + if (field == null) { + return null; + } + + StringBuilder operatorString = new StringBuilder(); + while (filterString.length() > index && (filterString.charAt(index) == '<' || + filterString.charAt(index) == '>' || filterString.charAt(index) == '=')) { + operatorString.append(filterString.charAt(index++)); + } + + Operator operator = getOperator(operatorString.toString()); + if (operator == null) { + return null; + } + + String value = filterString.substring(index); + FieldValue fieldValue = getFieldValue(field, value); + if (fieldValue == null) { + return null; + } + + return new Filter(ignoreCase, not, field, operator, fieldValue); + } + + private static FieldValue getFieldValue(Field field, String value) { + try { + return field.newValue(value); + } catch (Exception e) { + return null; + } + } + + private static boolean isNot(String filterString) { + return filterString.startsWith("!"); + } + + private static Field getField(List fields, String fieldString) { + for (Field f : fields) { + if (f.getHeader().equals(fieldString)) { + return f; + } + } + return null; + } + + private static Operator getOperator(String operatorString) { + for (Operator o : Operator.values()) { + if (operatorString.equals(o.toString())) { + return o; + } + } + return null; + } + + private final boolean ignoreCase; + private final boolean not; + private final Field field; + private final Operator operator; + private final FieldValue value; + + private Filter(boolean ignoreCase, boolean not, Field field, Operator operator, + FieldValue value) { + this.ignoreCase = ignoreCase; + this.not = not; + this.field = Objects.requireNonNull(field); + this.operator = Objects.requireNonNull(operator); + this.value = Objects.requireNonNull(value); + } + + public boolean execute(Record record) { + FieldValue fieldValue = record.get(field); + if (fieldValue == null) { + return false; + } + + if (operator == Operator.EQUAL) { + boolean ret; + if (ignoreCase) { + ret = fieldValue.asString().toLowerCase().contains(value.asString().toLowerCase()); + } else { + ret = fieldValue.asString().contains(value.asString()); + } + return not != ret; + } + + int compare = ignoreCase ? + fieldValue.compareToIgnoreCase(value) : fieldValue.compareTo(value); + + boolean ret; + switch (operator) { + case DOUBLE_EQUALS: + ret = compare == 0; + break; + + case GREATER: + ret = compare > 0; + break; + + case GREATER_OR_EQUAL: + ret = compare >= 0; + break; + + case LESS: + ret = compare < 0; + break; + + case LESS_OR_EQUAL: + ret = compare <= 0; + break; + + default: + throw new AssertionError(); + } + return not != ret; + } + + @Override + public String toString() { + return (not ? "!" : "") + field.getHeader() + operator + value.asString(); + } + + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + + Filter filter = (Filter) o; + + return ignoreCase == filter.ignoreCase && not == filter.not && field == filter.field + && operator == filter.operator && value.equals(filter.value); + } + + @Override + public int hashCode() { + int result = (ignoreCase ? 1 : 0); + result = 31 * result + (not ? 1 : 0); + result = 31 * result + field.hashCode(); + result = 31 * result + operator.hashCode(); + result = 31 * result + value.hashCode(); + return result; + } + + /* + * For FilterBuilder + */ + public static FilterBuilder newBuilder(Field field) { + return new FilterBuilder(field, false); + } + + public static FilterBuilder newBuilder(Field field, boolean ignoreCase) { + return new FilterBuilder(field, ignoreCase); + } + + public static final class FilterBuilder { + private final Field field; + private final boolean ignoreCase; + + private FilterBuilder(Field field, boolean ignoreCase) { + this.field = Objects.requireNonNull(field); + this.ignoreCase = ignoreCase; + } + + public Filter equal(FieldValue value) { + return newFilter(false, Operator.EQUAL, value); + } + + public Filter equal(Object value) { + return equal(field.newValue(value)); + } + + public Filter notEqual(FieldValue value) { + return newFilter(true, Operator.EQUAL, value); + } + + public Filter notEqual(Object value) { + return notEqual(field.newValue(value)); + } + + public Filter doubleEquals(FieldValue value) { + return newFilter(false, Operator.DOUBLE_EQUALS, value); + } + + public Filter doubleEquals(Object value) { + return doubleEquals(field.newValue(value)); + } + + public Filter notDoubleEquals(FieldValue value) { + return newFilter(true, Operator.DOUBLE_EQUALS, value); + } + + public Filter notDoubleEquals(Object value) { + return notDoubleEquals(field.newValue(value)); + } + + public Filter greater(FieldValue value) { + return newFilter(false, Operator.GREATER, value); + } + + public Filter greater(Object value) { + return greater(field.newValue(value)); + } + + public Filter notGreater(FieldValue value) { + return newFilter(true, Operator.GREATER, value); + } + + public Filter notGreater(Object value) { + return notGreater(field.newValue(value)); + } + + public Filter greaterOrEqual(FieldValue value) { + return newFilter(false, Operator.GREATER_OR_EQUAL, value); + } + + public Filter greaterOrEqual(Object value) { + return greaterOrEqual(field.newValue(value)); + } + + public Filter notGreaterOrEqual(FieldValue value) { + return newFilter(true, Operator.GREATER_OR_EQUAL, value); + } + + public Filter notGreaterOrEqual(Object value) { + return notGreaterOrEqual(field.newValue(value)); + } + + public Filter less(FieldValue value) { + return newFilter(false, Operator.LESS, value); + } + + public Filter less(Object value) { + return less(field.newValue(value)); + } + + public Filter notLess(FieldValue value) { + return newFilter(true, Operator.LESS, value); + } + + public Filter notLess(Object value) { + return notLess(field.newValue(value)); + } + + public Filter lessOrEqual(FieldValue value) { + return newFilter(false, Operator.LESS_OR_EQUAL, value); + } + + public Filter lessOrEqual(Object value) { + return lessOrEqual(field.newValue(value)); + } + + public Filter notLessOrEqual(FieldValue value) { + return newFilter(true, Operator.LESS_OR_EQUAL, value); + } + + public Filter notLessOrEqual(Object value) { + return notLessOrEqual(field.newValue(value)); + } + + private Filter newFilter(boolean not, Operator operator, FieldValue value) { + return new Filter(ignoreCase, not, field, operator, value); + } + } +} diff --git a/hbase-hbtop/src/main/java/org/apache/hadoop/hbase/hbtop/HBTop.java b/hbase-hbtop/src/main/java/org/apache/hadoop/hbase/hbtop/HBTop.java new file mode 100644 index 000000000000..f7bd47fddf34 --- /dev/null +++ b/hbase-hbtop/src/main/java/org/apache/hadoop/hbase/hbtop/HBTop.java @@ -0,0 +1,141 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase.hbtop; + +import java.io.InputStream; +import java.util.Objects; +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.conf.Configured; +import org.apache.hadoop.hbase.HBaseConfiguration; +import org.apache.hadoop.hbase.hbtop.mode.Mode; +import org.apache.hadoop.hbase.hbtop.screen.Screen; +import org.apache.hadoop.util.Tool; +import org.apache.hadoop.util.ToolRunner; +import org.apache.log4j.PropertyConfigurator; +import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import org.apache.hbase.thirdparty.org.apache.commons.cli.CommandLine; +import org.apache.hbase.thirdparty.org.apache.commons.cli.DefaultParser; +import org.apache.hbase.thirdparty.org.apache.commons.cli.HelpFormatter; +import org.apache.hbase.thirdparty.org.apache.commons.cli.Options; + + +/** + * A real-time monitoring tool for HBase like Unix top command. + */ +@InterfaceAudience.Private +public class HBTop extends Configured implements Tool { + + private static final Logger LOGGER = LoggerFactory.getLogger(HBTop.class); + + public HBTop() { + this(HBaseConfiguration.create()); + } + + public HBTop(Configuration conf) { + super(Objects.requireNonNull(conf)); + } + + @Override + public int run(String[] args) throws Exception { + // In order to change the log level forcibly, read a custom log4j configuration file + InputStream log4jConfigInputStream = this.getClass().getClassLoader() + .getResourceAsStream("log4j-hbtop.properties"); + PropertyConfigurator.configure(log4jConfigInputStream); + + long initialRefreshDelay = 3 * 1000; + Mode initialMode = Mode.REGION; + try { + // Command line options + Options opts = new Options(); + opts.addOption("help", false, + "Print usage; for help while the tool is running press 'h'"); + opts.addOption("delay", true, + "The refresh delay (in seconds); default is 3 seconds"); + opts.addOption("mode", true, + "The mode; n (Namespace)|t (Table)|r (Region)|s (RegionServer)" + + ", default is r (Region)"); + + CommandLine commandLine = new DefaultParser().parse(opts, args); + + if (commandLine.hasOption("help")) { + printUsage(opts); + return 0; + } + + if (commandLine.hasOption("delay")) { + int delay = 0; + try { + delay = Integer.parseInt(commandLine.getOptionValue("delay")); + } catch (NumberFormatException ignored) { + } + + if (delay < 1) { + LOGGER.warn("Delay set too low or invalid, using default"); + } else { + initialRefreshDelay = delay * 1000; + } + } + + if (commandLine.hasOption("mode")) { + String mode = commandLine.getOptionValue("mode"); + switch (mode) { + case "n": + initialMode = Mode.NAMESPACE; + break; + + case "t": + initialMode = Mode.TABLE; + break; + + case "r": + initialMode = Mode.NAMESPACE; + break; + + case "s": + initialMode = Mode.REGION_SERVER; + break; + + default: + LOGGER.warn("Mode set invalid, using default"); + break; + } + } + } catch (Exception e) { + LOGGER.error("Unable to parse options", e); + return 1; + } + + try (Screen screen = new Screen(getConf(), initialRefreshDelay, initialMode)) { + screen.run(); + } + + return 0; + } + + private void printUsage(Options opts) { + new HelpFormatter().printHelp("hbase top", opts); + } + + public static void main(String[] args) throws Exception { + int res = ToolRunner.run(new HBTop(), args); + System.exit(res); + } +} diff --git a/hbase-hbtop/src/main/java/org/apache/hadoop/hbase/hbtop/Record.java b/hbase-hbtop/src/main/java/org/apache/hadoop/hbase/hbtop/Record.java new file mode 100644 index 000000000000..c3730e0f03b8 --- /dev/null +++ b/hbase-hbtop/src/main/java/org/apache/hadoop/hbase/hbtop/Record.java @@ -0,0 +1,180 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase.hbtop; + +import java.util.Collection; +import java.util.Collections; +import java.util.EnumMap; +import java.util.List; +import java.util.Map; +import java.util.Objects; +import java.util.Set; +import java.util.stream.Stream; +import javax.validation.constraints.NotNull; +import org.apache.hadoop.hbase.hbtop.field.Field; +import org.apache.hadoop.hbase.hbtop.field.FieldValue; +import org.apache.yetus.audience.InterfaceAudience; + + +/** + * Represents a record of the metrics in the top screen. + */ +@InterfaceAudience.Private +public class Record implements Map { + + private Map values; + + public static final class Entry implements Map.Entry { + private final Field key; + private FieldValue value; + + private Entry(Field key, FieldValue value) { + this.key = Objects.requireNonNull(key); + this.value = Objects.requireNonNull(value); + } + + @Override + public Field getKey() { + return key; + } + + @Override + public FieldValue getValue() { + return value; + } + + @Override + public FieldValue setValue(FieldValue value) { + FieldValue oldValue = this.value; + this.value = value; + return oldValue; + } + } + + public static Entry entry(Field field, Object value) { + return new Entry(field, field.newValue(value)); + } + + public static Entry entry(Field field, FieldValue value) { + return new Entry(field, value); + } + + public static Record ofEntries(List entries) { + return ofEntries(entries.stream()); + } + + public static Record ofEntries(Entry... entries) { + return ofEntries(Stream.of(entries)); + } + + public static Record ofEntries(Stream entries) { + return entries + .collect(Record::new, (r, e) -> r.put(e.getKey(), e.getValue()), (r1, r2) -> {}); + } + + public Record() { + this(new EnumMap<>(Field.class)); + } + + private Record(Map values) { + this.values = values; + } + + @Override + public int size() { + return values.size(); + } + + @Override + public boolean isEmpty() { + return values.isEmpty(); + } + + @Override + public boolean containsKey(Object key) { + return values.containsKey(key); + } + + @Override + public boolean containsValue(Object value) { + return values.containsValue(value); + } + + @Override + public FieldValue get(Object key) { + return values.get(key); + } + + @Override + public FieldValue put(Field key, FieldValue value) { + return values.put(key, value); + } + + public FieldValue put(Field key, Object value) { + return values.put(key, key.newValue(value)); + } + + @Override + public FieldValue remove(Object key) { + return values.remove(key); + } + + @Override + public void putAll(@NotNull Map m) { + values.putAll(m); + } + + @Override + public void clear() { + values.clear(); + } + + @Override + @NotNull + public Set keySet() { + return values.keySet(); + } + + @Override + @NotNull + public Collection values() { + return values.values(); + } + + @Override + @NotNull + public Set> entrySet() { + return values.entrySet(); + } + + public Record combine(Record o) { + return ofEntries(values.keySet().stream() + .map(k -> { + switch (k.getFieldValueType()) { + case STRING: + return entry(k, values.get(k)); + default: + return entry(k, values.get(k).plus(o.values.get(k))); + } + })); + } + + public Record toImmutable() { + return new Record(Collections.unmodifiableMap(values)); + } +} diff --git a/hbase-hbtop/src/main/java/org/apache/hadoop/hbase/hbtop/field/Field.java b/hbase-hbtop/src/main/java/org/apache/hadoop/hbase/hbtop/field/Field.java new file mode 100644 index 000000000000..6e5f66f6244a --- /dev/null +++ b/hbase-hbtop/src/main/java/org/apache/hadoop/hbase/hbtop/field/Field.java @@ -0,0 +1,100 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase.hbtop.field; + +import java.util.Objects; +import org.apache.yetus.audience.InterfaceAudience; + + +/** + * Represents fields that are displayed in the top screen. + */ +@InterfaceAudience.Private +public enum Field { + REGION_NAME("RNAME", "Region Name", true, true, FieldValueType.STRING), + NAMESPACE("NAMESPACE", "Namespace Name", true, true, FieldValueType.STRING), + TABLE("TABLE", "Table Name", true, true, FieldValueType.STRING), + START_CODE("SCODE", "Start Code", false, true, FieldValueType.STRING), + REPLICA_ID("REPID", "Replica ID", false, false, FieldValueType.STRING), + REGION("REGION", "Encoded Region Name", false, true, FieldValueType.STRING), + REGION_SERVER("RS", "Short Region Server Name", true, true, FieldValueType.STRING), + LONG_REGION_SERVER("LRS", "Long Region Server Name", true, true, FieldValueType.STRING), + REQUEST_COUNT_PER_SECOND("#REQ/S", "Request Count per second", false, false, + FieldValueType.LONG), + READ_REQUEST_COUNT_PER_SECOND("#READ/S", "Read Request Count per second", false, false, + FieldValueType.LONG), + FILTERED_READ_REQUEST_COUNT_PER_SECOND("#FREAD/S", "Filtered Read Request Count per second", + false, false, FieldValueType.LONG), + WRITE_REQUEST_COUNT_PER_SECOND("#WRITE/S", "Write Request Count per second", false, false, + FieldValueType.LONG), + STORE_FILE_SIZE("SF", "StoreFile Size", false, false, FieldValueType.SIZE), + UNCOMPRESSED_STORE_FILE_SIZE("USF", "Uncompressed StoreFile Size", false, false, + FieldValueType.SIZE), + NUM_STORE_FILES("#SF", "Number of StoreFiles", false, false, FieldValueType.INTEGER), + MEM_STORE_SIZE("MEMSTORE", "MemStore Size", false, false, FieldValueType.SIZE), + LOCALITY("LOCALITY", "Block Locality", false, false, FieldValueType.FLOAT), + START_KEY("SKEY", "Start Key", true, true, FieldValueType.STRING), + COMPACTING_CELL_COUNT("#COMPingCELL", "Compacting Cell Count", false, false, + FieldValueType.LONG), + COMPACTED_CELL_COUNT("#COMPedCELL", "Compacted Cell Count", false, false, FieldValueType.LONG), + COMPACTION_PROGRESS("%COMP", "Compaction Progress", false, false, FieldValueType.PERCENT), + LAST_MAJOR_COMPACTION_TIME("LASTMCOMP", "Last Major Compaction Time", false, true, + FieldValueType.STRING), + REGION_COUNT("#REGION", "Region Count", false, false, FieldValueType.INTEGER), + USED_HEAP_SIZE("UHEAP", "Used Heap Size", false, false, FieldValueType.SIZE), + MAX_HEAP_SIZE("MHEAP", "Max Heap Size", false, false, FieldValueType.SIZE); + + private final String header; + private final String description; + private final boolean autoAdjust; + private final boolean leftJustify; + private final FieldValueType fieldValueType; + + Field(String header, String description, boolean autoAdjust, boolean leftJustify, + FieldValueType fieldValueType) { + this.header = Objects.requireNonNull(header); + this.description = Objects.requireNonNull(description); + this.autoAdjust = autoAdjust; + this.leftJustify = leftJustify; + this.fieldValueType = Objects.requireNonNull(fieldValueType); + } + + public FieldValue newValue(Object value) { + return new FieldValue(value, fieldValueType); + } + + public String getHeader() { + return header; + } + + public String getDescription() { + return description; + } + + public boolean isAutoAdjust() { + return autoAdjust; + } + + public boolean isLeftJustify() { + return leftJustify; + } + + public FieldValueType getFieldValueType() { + return fieldValueType; + } +} diff --git a/hbase-hbtop/src/main/java/org/apache/hadoop/hbase/hbtop/field/FieldInfo.java b/hbase-hbtop/src/main/java/org/apache/hadoop/hbase/hbtop/field/FieldInfo.java new file mode 100644 index 000000000000..3f0e5f7ad1d3 --- /dev/null +++ b/hbase-hbtop/src/main/java/org/apache/hadoop/hbase/hbtop/field/FieldInfo.java @@ -0,0 +1,55 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase.hbtop.field; + +import java.util.Objects; +import org.apache.yetus.audience.InterfaceAudience; + + +/** + * Information about a field. + * + * This has a {@link Field} itself and additional information (e.g. {@code defaultLength} and + * {@code displayByDefault}). This additional information is different between the + * {@link org.apache.hadoop.hbase.hbtop.mode.Mode}s even when the field is the same. That's why the + * additional information is separated from {@link Field}. + */ +@InterfaceAudience.Private +public class FieldInfo { + private final Field field; + private final int defaultLength; + private final boolean displayByDefault; + + public FieldInfo(Field field, int defaultLength, boolean displayByDefault) { + this.field = Objects.requireNonNull(field); + this.defaultLength = defaultLength; + this.displayByDefault = displayByDefault; + } + + public Field getField() { + return field; + } + + public int getDefaultLength() { + return defaultLength; + } + + public boolean isDisplayByDefault() { + return displayByDefault; + } +} diff --git a/hbase-hbtop/src/main/java/org/apache/hadoop/hbase/hbtop/field/FieldValue.java b/hbase-hbtop/src/main/java/org/apache/hadoop/hbase/hbtop/field/FieldValue.java new file mode 100644 index 000000000000..70770fc29c66 --- /dev/null +++ b/hbase-hbtop/src/main/java/org/apache/hadoop/hbase/hbtop/field/FieldValue.java @@ -0,0 +1,256 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase.hbtop.field; + +import java.util.Objects; +import javax.validation.constraints.NotNull; +import org.apache.hadoop.hbase.Size; +import org.apache.yetus.audience.InterfaceAudience; + + +/** + * Represents a value of a field. + * + * The type of a value is defined by {@link FieldValue}. + */ +@InterfaceAudience.Private +public final class FieldValue implements Comparable { + + private final Object value; + private final FieldValueType type; + + FieldValue(Object value, FieldValueType type) { + Objects.requireNonNull(value); + this.type = Objects.requireNonNull(type); + + switch (type) { + case STRING: + if (value instanceof String) { + this.value = value; + break; + } + throw new IllegalArgumentException("invalid type"); + + case INTEGER: + if (value instanceof Integer) { + this.value = value; + break; + } else if (value instanceof String) { + this.value = Integer.valueOf((String) value); + break; + } + throw new IllegalArgumentException("invalid type"); + + case LONG: + if (value instanceof Long) { + this.value = value; + break; + } else if (value instanceof String) { + this.value = Long.valueOf((String) value); + break; + } + throw new IllegalArgumentException("invalid type"); + + case FLOAT: + if (value instanceof Float) { + this.value = value; + break; + } else if (value instanceof String) { + this.value = Float.valueOf((String) value); + break; + } + throw new IllegalArgumentException("invalid type"); + + case SIZE: + if (value instanceof Size) { + this.value = value; + break; + } else if (value instanceof String) { + this.value = parseSizeString((String) value); + break; + } + throw new IllegalArgumentException("invalid type"); + + case PERCENT: + if (value instanceof Float) { + this.value = value; + break; + } else if (value instanceof String) { + this.value = parsePercentString((String) value); + break; + } + throw new IllegalArgumentException("invalid type"); + + default: + throw new AssertionError(); + } + } + + private Size parseSizeString(String sizeString) { + if (sizeString.length() < 3) { + throw new IllegalArgumentException("invalid size"); + } + + String valueString = sizeString.substring(0, sizeString.length() - 2); + String unitSimpleName = sizeString.substring(sizeString.length() - 2, sizeString.length()); + return new Size(Double.valueOf(valueString), convertToUnit(unitSimpleName)); + } + + private Size.Unit convertToUnit(String unitSimpleName) { + for (Size.Unit unit: Size.Unit.values()) { + if (unitSimpleName.equals(unit.getSimpleName())) { + return unit; + } + } + throw new IllegalArgumentException("invalid size"); + } + + private Float parsePercentString(String percentString) { + if (percentString.endsWith("%")) { + percentString = percentString.substring(0, percentString.length() - 1); + } + return Float.valueOf(percentString); + } + + public String asString() { + return toString(); + } + + public int asInt() { + return (Integer) value; + } + + public long asLong() { + return (Long) value; + } + + public float asFloat() { + return (Float) value; + } + + public Size asSize() { + return (Size) value; + } + + @Override + public String toString() { + switch (type) { + case STRING: + case INTEGER: + case LONG: + case FLOAT: + case SIZE: + return value.toString(); + + case PERCENT: + return String.format("%.2f", (Float) value) + "%"; + + default: + throw new AssertionError(); + } + } + + @Override + public int compareTo(@NotNull FieldValue o) { + if (type != o.type) { + throw new IllegalArgumentException("invalid type"); + } + + switch (type) { + case STRING: + return ((String) value).compareTo((String) o.value); + + case INTEGER: + return ((Integer) value).compareTo((Integer) o.value); + + case LONG: + return ((Long) value).compareTo((Long) o.value); + + case FLOAT: + case PERCENT: + return ((Float) value).compareTo((Float) o.value); + + case SIZE: + return ((Size) value).compareTo((Size) o.value); + + default: + throw new AssertionError(); + } + } + + @Override + public boolean equals(Object obj) { + return obj instanceof FieldValue && compareTo((FieldValue) obj) == 0; + } + + @Override + public int hashCode() { + return value.hashCode(); + } + + public FieldValue plus(FieldValue o) { + if (type != o.type) { + throw new IllegalArgumentException("invalid type"); + } + + switch (type) { + case STRING: + return new FieldValue(((String) value).concat((String) o.value), type); + + case INTEGER: + return new FieldValue(((Integer) value) + ((Integer) o.value), type); + + case LONG: + return new FieldValue(((Long) value) + ((Long) o.value), type); + + case FLOAT: + case PERCENT: + return new FieldValue(((Float) value) + ((Float) o.value), type); + + case SIZE: + Size size = (Size) value; + Size oSize = (Size) o.value; + Size.Unit unit = size.getUnit(); + return new FieldValue(new Size(size.get(unit) + oSize.get(unit), unit), type); + + default: + throw new AssertionError(); + } + } + + public int compareToIgnoreCase(FieldValue o) { + if (type != o.type) { + throw new IllegalArgumentException("invalid type"); + } + + switch (type) { + case STRING: + return ((String) value).compareToIgnoreCase((String) o.value); + + case INTEGER: + case LONG: + case FLOAT: + case SIZE: + case PERCENT: + return compareTo(o); + + default: + throw new AssertionError(); + } + } +} diff --git a/hbase-hbtop/src/main/java/org/apache/hadoop/hbase/hbtop/field/FieldValueType.java b/hbase-hbtop/src/main/java/org/apache/hadoop/hbase/hbtop/field/FieldValueType.java new file mode 100644 index 000000000000..e2edae87b800 --- /dev/null +++ b/hbase-hbtop/src/main/java/org/apache/hadoop/hbase/hbtop/field/FieldValueType.java @@ -0,0 +1,29 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase.hbtop.field; + +import org.apache.yetus.audience.InterfaceAudience; + + +/** + * Represents the type of a {@link FieldValue}. + */ +@InterfaceAudience.Private +public enum FieldValueType { + STRING, INTEGER, LONG, FLOAT, SIZE, PERCENT +} diff --git a/hbase-hbtop/src/main/java/org/apache/hadoop/hbase/hbtop/mode/DrillDownInfo.java b/hbase-hbtop/src/main/java/org/apache/hadoop/hbase/hbtop/mode/DrillDownInfo.java new file mode 100644 index 000000000000..0f5cafaa0d4a --- /dev/null +++ b/hbase-hbtop/src/main/java/org/apache/hadoop/hbase/hbtop/mode/DrillDownInfo.java @@ -0,0 +1,51 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase.hbtop.mode; + +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; +import java.util.Objects; + +import org.apache.hadoop.hbase.hbtop.Filter; +import org.apache.yetus.audience.InterfaceAudience; + + +/** + * Information about drilling down. + * + * When drilling down, going to next {@link Mode} with initial {@link Filter}s. + */ +@InterfaceAudience.Private +public class DrillDownInfo { + private final Mode nextMode; + private final List initialFilters; + + public DrillDownInfo(Mode nextMode, List initialFilters) { + this.nextMode = Objects.requireNonNull(nextMode); + this.initialFilters = Collections.unmodifiableList(new ArrayList<>(initialFilters)); + } + + public Mode getNextMode() { + return nextMode; + } + + public List getInitialFilters() { + return initialFilters; + } +} diff --git a/hbase-hbtop/src/main/java/org/apache/hadoop/hbase/hbtop/mode/Mode.java b/hbase-hbtop/src/main/java/org/apache/hadoop/hbase/hbtop/mode/Mode.java new file mode 100644 index 000000000000..1290e6916cb8 --- /dev/null +++ b/hbase-hbtop/src/main/java/org/apache/hadoop/hbase/hbtop/mode/Mode.java @@ -0,0 +1,74 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase.hbtop.mode; + +import edu.umd.cs.findbugs.annotations.Nullable; +import java.util.List; +import java.util.Objects; +import org.apache.hadoop.hbase.ClusterMetrics; +import org.apache.hadoop.hbase.hbtop.Record; +import org.apache.hadoop.hbase.hbtop.field.Field; +import org.apache.hadoop.hbase.hbtop.field.FieldInfo; +import org.apache.yetus.audience.InterfaceAudience; + + +/** + * Represents a display mode in the top screen. + */ +@InterfaceAudience.Private +public enum Mode { + NAMESPACE("Namespace", "Record per Namespace", new NamespaceModeStrategy()), + TABLE("Table", "Record per Table", new TableModeStrategy()), + REGION("Region", "Record per Region", new RegionModeStrategy()), + REGION_SERVER("RegionServer", "Record per RegionServer", new RegionServerModeStrategy()); + + private final String header; + private final String description; + private final ModeStrategy modeStrategy; + + Mode(String header, String description, ModeStrategy modeStrategy) { + this.header = Objects.requireNonNull(header); + this.description = Objects.requireNonNull(description); + this.modeStrategy = Objects.requireNonNull(modeStrategy); + } + + public String getHeader() { + return header; + } + + public String getDescription() { + return description; + } + + public List getRecords(ClusterMetrics clusterMetrics) { + return modeStrategy.getRecords(clusterMetrics); + } + + public List getFieldInfos() { + return modeStrategy.getFieldInfos(); + } + + public Field getDefaultSortField() { + return modeStrategy.getDefaultSortField(); + } + + @Nullable + public DrillDownInfo drillDown(Record currentRecord) { + return modeStrategy.drillDown(currentRecord); + } +} diff --git a/hbase-hbtop/src/main/java/org/apache/hadoop/hbase/hbtop/mode/ModeStrategy.java b/hbase-hbtop/src/main/java/org/apache/hadoop/hbase/hbtop/mode/ModeStrategy.java new file mode 100644 index 000000000000..09fa297e3033 --- /dev/null +++ b/hbase-hbtop/src/main/java/org/apache/hadoop/hbase/hbtop/mode/ModeStrategy.java @@ -0,0 +1,38 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase.hbtop.mode; + +import edu.umd.cs.findbugs.annotations.Nullable; +import java.util.List; +import org.apache.hadoop.hbase.ClusterMetrics; +import org.apache.hadoop.hbase.hbtop.Record; +import org.apache.hadoop.hbase.hbtop.field.Field; +import org.apache.hadoop.hbase.hbtop.field.FieldInfo; +import org.apache.yetus.audience.InterfaceAudience; + + +/** + * An interface for strategy logic for {@link Mode}. + */ +@InterfaceAudience.Private +interface ModeStrategy { + List getFieldInfos(); + Field getDefaultSortField(); + List getRecords(ClusterMetrics clusterMetrics); + @Nullable DrillDownInfo drillDown(Record selectedRecord); +} diff --git a/hbase-hbtop/src/main/java/org/apache/hadoop/hbase/hbtop/mode/NamespaceModeStrategy.java b/hbase-hbtop/src/main/java/org/apache/hadoop/hbase/hbtop/mode/NamespaceModeStrategy.java new file mode 100644 index 000000000000..8f649575da06 --- /dev/null +++ b/hbase-hbtop/src/main/java/org/apache/hadoop/hbase/hbtop/mode/NamespaceModeStrategy.java @@ -0,0 +1,97 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase.hbtop.mode; + +import java.util.Arrays; +import java.util.Collections; +import java.util.List; +import java.util.stream.Collectors; +import java.util.stream.Stream; +import org.apache.hadoop.hbase.ClusterMetrics; +import org.apache.hadoop.hbase.hbtop.Filter; +import org.apache.hadoop.hbase.hbtop.Record; +import org.apache.hadoop.hbase.hbtop.field.Field; +import org.apache.hadoop.hbase.hbtop.field.FieldInfo; +import org.apache.yetus.audience.InterfaceAudience; + + +/** + * Implementation for {@link ModeStrategy} for Namespace Mode. + */ +@InterfaceAudience.Private +public final class NamespaceModeStrategy implements ModeStrategy { + + private final List fieldInfos = Arrays.asList( + new FieldInfo(Field.NAMESPACE, 0, true), + new FieldInfo(Field.REGION_COUNT, 7, true), + new FieldInfo(Field.REQUEST_COUNT_PER_SECOND, 10, true), + new FieldInfo(Field.READ_REQUEST_COUNT_PER_SECOND, 10, true), + new FieldInfo(Field.FILTERED_READ_REQUEST_COUNT_PER_SECOND, 8, true), + new FieldInfo(Field.WRITE_REQUEST_COUNT_PER_SECOND, 10, true), + new FieldInfo(Field.STORE_FILE_SIZE, 13, true), + new FieldInfo(Field.UNCOMPRESSED_STORE_FILE_SIZE, 15, false), + new FieldInfo(Field.NUM_STORE_FILES, 7, true), + new FieldInfo(Field.MEM_STORE_SIZE, 11, true) + ); + + private final RegionModeStrategy regionModeStrategy = new RegionModeStrategy(); + + NamespaceModeStrategy(){ + } + + @Override + public List getFieldInfos() { + return fieldInfos; + } + + @Override + public Field getDefaultSortField() { + return Field.REQUEST_COUNT_PER_SECOND; + } + + @Override + public List getRecords(ClusterMetrics clusterMetrics) { + // Get records from RegionModeStrategy and add REGION_COUNT field + List records = regionModeStrategy.getRecords(clusterMetrics).stream() + .map(record -> + Record.ofEntries(fieldInfos.stream() + .filter(fi -> record.containsKey(fi.getField())) + .map(fi -> Record.entry(fi.getField(), record.get(fi.getField()))))) + .peek(r -> r.put(Field.REGION_COUNT, 1)) + .collect(Collectors.toList()); + + // Aggregation by NAMESPACE field + return records.stream() + .collect(Collectors.groupingBy(r -> r.get(Field.NAMESPACE).asString())) + .entrySet().stream() + .flatMap( + e -> e.getValue().stream() + .reduce(Record::combine) + .map(Stream::of) + .orElse(Stream.empty())) + .collect(Collectors.toList()); + } + + @Override + public DrillDownInfo drillDown(Record selectedRecord) { + List initialFilters = + Collections.singletonList(Filter.newBuilder(Field.NAMESPACE) + .doubleEquals(selectedRecord.get(Field.NAMESPACE))); + return new DrillDownInfo(Mode.TABLE, initialFilters); + } +} diff --git a/hbase-hbtop/src/main/java/org/apache/hadoop/hbase/hbtop/mode/RegionModeStrategy.java b/hbase-hbtop/src/main/java/org/apache/hadoop/hbase/hbtop/mode/RegionModeStrategy.java new file mode 100644 index 000000000000..3c3ed4d44bc8 --- /dev/null +++ b/hbase-hbtop/src/main/java/org/apache/hadoop/hbase/hbtop/mode/RegionModeStrategy.java @@ -0,0 +1,183 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase.hbtop.mode; + +import edu.umd.cs.findbugs.annotations.Nullable; +import java.io.IOException; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import org.apache.commons.lang3.time.FastDateFormat; +import org.apache.hadoop.hbase.ClusterMetrics; +import org.apache.hadoop.hbase.RegionMetrics; +import org.apache.hadoop.hbase.ServerMetrics; +import org.apache.hadoop.hbase.TableName; +import org.apache.hadoop.hbase.client.RegionInfo; +import org.apache.hadoop.hbase.hbtop.Record; +import org.apache.hadoop.hbase.hbtop.field.Field; +import org.apache.hadoop.hbase.hbtop.field.FieldInfo; +import org.apache.hadoop.hbase.util.Bytes; +import org.apache.yetus.audience.InterfaceAudience; + + +/** + * Implementation for {@link ModeStrategy} for Region Mode. + */ +@InterfaceAudience.Private +public final class RegionModeStrategy implements ModeStrategy { + + private final List fieldInfos = Arrays.asList( + new FieldInfo(Field.REGION_NAME, 0, false), + new FieldInfo(Field.NAMESPACE, 0, true), + new FieldInfo(Field.TABLE, 0, true), + new FieldInfo(Field.START_CODE, 13, false), + new FieldInfo(Field.REPLICA_ID, 5, false), + new FieldInfo(Field.REGION, 32, true), + new FieldInfo(Field.REGION_SERVER, 0, true), + new FieldInfo(Field.LONG_REGION_SERVER, 0, false), + new FieldInfo(Field.REQUEST_COUNT_PER_SECOND, 8, true), + new FieldInfo(Field.READ_REQUEST_COUNT_PER_SECOND, 8, true), + new FieldInfo(Field.FILTERED_READ_REQUEST_COUNT_PER_SECOND, 8, true), + new FieldInfo(Field.WRITE_REQUEST_COUNT_PER_SECOND, 8, true), + new FieldInfo(Field.STORE_FILE_SIZE, 10, true), + new FieldInfo(Field.UNCOMPRESSED_STORE_FILE_SIZE, 12, false), + new FieldInfo(Field.NUM_STORE_FILES,4, true), + new FieldInfo(Field.MEM_STORE_SIZE, 8, true), + new FieldInfo(Field.LOCALITY, 8, true), + new FieldInfo(Field.START_KEY, 0, false), + new FieldInfo(Field.COMPACTING_CELL_COUNT, 12, false), + new FieldInfo(Field.COMPACTED_CELL_COUNT, 12, false), + new FieldInfo(Field.COMPACTION_PROGRESS, 7, false), + new FieldInfo(Field.LAST_MAJOR_COMPACTION_TIME, 19, false) + ); + + private final Map requestCountPerSecondMap = new HashMap<>(); + + RegionModeStrategy() { + } + + @Override + public List getFieldInfos() { + return fieldInfos; + } + + @Override + public Field getDefaultSortField() { + return Field.REQUEST_COUNT_PER_SECOND; + } + + @Override + public List getRecords(ClusterMetrics clusterMetrics) { + List ret = new ArrayList<>(); + for (ServerMetrics sm : clusterMetrics.getLiveServerMetrics().values()) { + long lastReportTimestamp = sm.getLastReportTimestamp(); + for (RegionMetrics rm : sm.getRegionMetrics().values()) { + ret.add(createRecord(sm, rm, lastReportTimestamp)); + } + } + return ret; + } + + private Record createRecord(ServerMetrics serverMetrics, RegionMetrics regionMetrics, + long lastReportTimestamp) { + + Record ret = new Record(); + + String regionName = regionMetrics.getNameAsString(); + ret.put(Field.REGION_NAME, regionName); + + String namespaceName = ""; + String tableName = ""; + String region = ""; + String startKey = ""; + String startCode = ""; + String replicaId = ""; + try { + byte[][] elements = RegionInfo.parseRegionName(regionMetrics.getRegionName()); + TableName tn = TableName.valueOf(elements[0]); + namespaceName = tn.getNamespaceAsString(); + tableName = tn.getQualifierAsString(); + startKey = Bytes.toStringBinary(elements[1]); + startCode = Bytes.toString(elements[2]); + replicaId = elements.length == 4 ? + Integer.valueOf(Bytes.toString(elements[3])).toString() : ""; + region = RegionInfo.encodeRegionName(regionMetrics.getRegionName()); + } catch (IOException ignored) { + } + + ret.put(Field.NAMESPACE, namespaceName); + ret.put(Field.TABLE, tableName); + ret.put(Field.START_CODE, startCode); + ret.put(Field.REPLICA_ID, replicaId); + ret.put(Field.REGION, region); + ret.put(Field.START_KEY, startKey); + ret.put(Field.REGION_SERVER, serverMetrics.getServerName().toShortString()); + ret.put(Field.LONG_REGION_SERVER, serverMetrics.getServerName().getServerName()); + + RequestCountPerSecond requestCountPerSecond = requestCountPerSecondMap.get(regionName); + if (requestCountPerSecond == null) { + requestCountPerSecond = new RequestCountPerSecond(); + requestCountPerSecondMap.put(regionName, requestCountPerSecond); + } + requestCountPerSecond.refresh(lastReportTimestamp, regionMetrics.getReadRequestCount(), + regionMetrics.getFilteredReadRequestCount(), regionMetrics.getWriteRequestCount()); + + ret.put(Field.READ_REQUEST_COUNT_PER_SECOND, + requestCountPerSecond.getReadRequestCountPerSecond()); + ret.put(Field.FILTERED_READ_REQUEST_COUNT_PER_SECOND, + requestCountPerSecond.getFilteredReadRequestCountPerSecond()); + ret.put(Field.WRITE_REQUEST_COUNT_PER_SECOND, + requestCountPerSecond.getWriteRequestCountPerSecond()); + ret.put(Field.REQUEST_COUNT_PER_SECOND, + requestCountPerSecond.getRequestCountPerSecond()); + + ret.put(Field.STORE_FILE_SIZE, regionMetrics.getStoreFileSize()); + ret.put(Field.UNCOMPRESSED_STORE_FILE_SIZE, regionMetrics.getUncompressedStoreFileSize()); + ret.put(Field.NUM_STORE_FILES, regionMetrics.getStoreFileCount()); + ret.put(Field.MEM_STORE_SIZE, regionMetrics.getMemStoreSize()); + ret.put(Field.LOCALITY, regionMetrics.getDataLocality()); + + long compactingCellCount = regionMetrics.getCompactingCellCount(); + long compactedCellCount = regionMetrics.getCompactedCellCount(); + float compactionProgress = 0; + if (compactedCellCount > 0) { + compactionProgress = 100 * ((float) compactedCellCount / compactingCellCount); + } + + ret.put(Field.COMPACTING_CELL_COUNT, compactingCellCount); + ret.put(Field.COMPACTED_CELL_COUNT, compactedCellCount); + ret.put(Field.COMPACTION_PROGRESS, compactionProgress); + + FastDateFormat df = FastDateFormat.getInstance("yyyy-MM-dd HH:mm:ss"); + long lastMajorCompactionTimestamp = regionMetrics.getLastMajorCompactionTimestamp(); + + ret.put(Field.LAST_MAJOR_COMPACTION_TIME, + lastMajorCompactionTimestamp == 0 ? "" : df.format(lastMajorCompactionTimestamp)); + + return ret; + } + + @Nullable + @Override + public DrillDownInfo drillDown(Record selectedRecord) { + // do nothing + return null; + } +} diff --git a/hbase-hbtop/src/main/java/org/apache/hadoop/hbase/hbtop/mode/RegionServerModeStrategy.java b/hbase-hbtop/src/main/java/org/apache/hadoop/hbase/hbtop/mode/RegionServerModeStrategy.java new file mode 100644 index 000000000000..599988969326 --- /dev/null +++ b/hbase-hbtop/src/main/java/org/apache/hadoop/hbase/hbtop/mode/RegionServerModeStrategy.java @@ -0,0 +1,115 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase.hbtop.mode; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collections; +import java.util.List; +import java.util.Map; +import java.util.stream.Collectors; +import java.util.stream.Stream; + +import org.apache.hadoop.hbase.ClusterMetrics; +import org.apache.hadoop.hbase.ServerMetrics; +import org.apache.hadoop.hbase.hbtop.Filter; +import org.apache.hadoop.hbase.hbtop.Record; +import org.apache.hadoop.hbase.hbtop.field.Field; +import org.apache.hadoop.hbase.hbtop.field.FieldInfo; +import org.apache.yetus.audience.InterfaceAudience; + + +/** + * Implementation for {@link ModeStrategy} for RegionServer Mode. + */ +@InterfaceAudience.Private +public final class RegionServerModeStrategy implements ModeStrategy { + + private final List fieldInfos = Arrays.asList( + new FieldInfo(Field.REGION_SERVER, 0, true), + new FieldInfo(Field.LONG_REGION_SERVER, 0, false), + new FieldInfo(Field.REGION_COUNT, 7, true), + new FieldInfo(Field.REQUEST_COUNT_PER_SECOND, 10, true), + new FieldInfo(Field.READ_REQUEST_COUNT_PER_SECOND, 10, true), + new FieldInfo(Field.FILTERED_READ_REQUEST_COUNT_PER_SECOND, 8, true), + new FieldInfo(Field.WRITE_REQUEST_COUNT_PER_SECOND, 10, true), + new FieldInfo(Field.STORE_FILE_SIZE, 13, true), + new FieldInfo(Field.UNCOMPRESSED_STORE_FILE_SIZE, 15, false), + new FieldInfo(Field.NUM_STORE_FILES, 7, true), + new FieldInfo(Field.MEM_STORE_SIZE, 11, true), + new FieldInfo(Field.USED_HEAP_SIZE, 11, true), + new FieldInfo(Field.MAX_HEAP_SIZE, 11, true) + ); + + private final RegionModeStrategy regionModeStrategy = new RegionModeStrategy(); + + RegionServerModeStrategy(){ + } + + @Override + public List getFieldInfos() { + return fieldInfos; + } + + @Override + public Field getDefaultSortField() { + return Field.REQUEST_COUNT_PER_SECOND; + } + + @Override + public List getRecords(ClusterMetrics clusterMetrics) { + // Get records from RegionModeStrategy and add REGION_COUNT field + List records = regionModeStrategy.getRecords(clusterMetrics).stream() + .map(record -> + Record.ofEntries(fieldInfos.stream() + .filter(fi -> record.containsKey(fi.getField())) + .map(fi -> Record.entry(fi.getField(), record.get(fi.getField()))))) + .peek(r -> r.put(Field.REGION_COUNT, 1)) + .collect(Collectors.toList()); + + // Aggregation by LONG_REGION_SERVER field + Map retMap = records.stream() + .collect(Collectors.groupingBy(r -> r.get(Field.LONG_REGION_SERVER).asString())) + .entrySet().stream() + .flatMap( + e -> e.getValue().stream() + .reduce(Record::combine) + .map(Stream::of) + .orElse(Stream.empty())) + .collect(Collectors.toMap(r -> r.get(Field.LONG_REGION_SERVER).asString(), r -> r)); + + // Add USED_HEAP_SIZE field and MAX_HEAP_SIZE field + for (ServerMetrics sm : clusterMetrics.getLiveServerMetrics().values()) { + Record record = retMap.get(sm.getServerName().getServerName()); + if (record == null) { + continue; + } + record.put(Field.USED_HEAP_SIZE, sm.getUsedHeapSize()); + record.put(Field.MAX_HEAP_SIZE, sm.getMaxHeapSize()); + } + + return new ArrayList<>(retMap.values()); + } + + @Override + public DrillDownInfo drillDown(Record selectedRecord) { + List initialFilters = Collections.singletonList(Filter.newBuilder(Field.REGION_SERVER) + .doubleEquals(selectedRecord.get(Field.REGION_SERVER))); + return new DrillDownInfo(Mode.REGION, initialFilters); + } +} diff --git a/hbase-hbtop/src/main/java/org/apache/hadoop/hbase/hbtop/mode/RequestCountPerSecond.java b/hbase-hbtop/src/main/java/org/apache/hadoop/hbase/hbtop/mode/RequestCountPerSecond.java new file mode 100644 index 000000000000..508cf829bc9d --- /dev/null +++ b/hbase-hbtop/src/main/java/org/apache/hadoop/hbase/hbtop/mode/RequestCountPerSecond.java @@ -0,0 +1,74 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase.hbtop.mode; + +import org.apache.yetus.audience.InterfaceAudience; + + +/** + * Utility class for calculating request counts per second. + */ +@InterfaceAudience.Private +public class RequestCountPerSecond { + private long previousLastReportTimestamp; + private long previousReadRequestCount; + private long previousFilteredReadRequestCount; + private long previousWriteRequestCount; + private long readRequestCountPerSecond; + private long filteredReadRequestCountPerSecond; + private long writeRequestCountPerSecond; + + public void refresh(long lastReportTimestamp, long readRequestCount, + long filteredReadRequestCount, long writeRequestCount) { + if (previousLastReportTimestamp == 0) { + previousLastReportTimestamp = lastReportTimestamp; + previousReadRequestCount = readRequestCount; + previousFilteredReadRequestCount = filteredReadRequestCount; + previousWriteRequestCount = writeRequestCount; + } else if (previousLastReportTimestamp != lastReportTimestamp) { + readRequestCountPerSecond = (readRequestCount - previousReadRequestCount) / + ((lastReportTimestamp - previousLastReportTimestamp) / 1000); + filteredReadRequestCountPerSecond = + (filteredReadRequestCount - previousFilteredReadRequestCount) / + ((lastReportTimestamp - previousLastReportTimestamp) / 1000); + writeRequestCountPerSecond = (writeRequestCount - previousWriteRequestCount) / + ((lastReportTimestamp - previousLastReportTimestamp) / 1000); + + previousLastReportTimestamp = lastReportTimestamp; + previousReadRequestCount = readRequestCount; + previousFilteredReadRequestCount = filteredReadRequestCount; + previousWriteRequestCount = writeRequestCount; + } + } + + public long getReadRequestCountPerSecond() { + return readRequestCountPerSecond < 0 ? 0 : readRequestCountPerSecond; + } + + public long getFilteredReadRequestCountPerSecond() { + return filteredReadRequestCountPerSecond < 0 ? 0 : filteredReadRequestCountPerSecond; + } + + public long getWriteRequestCountPerSecond() { + return writeRequestCountPerSecond < 0 ? 0 : writeRequestCountPerSecond; + } + + public long getRequestCountPerSecond() { + return getReadRequestCountPerSecond() + getWriteRequestCountPerSecond(); + } +} diff --git a/hbase-hbtop/src/main/java/org/apache/hadoop/hbase/hbtop/mode/TableModeStrategy.java b/hbase-hbtop/src/main/java/org/apache/hadoop/hbase/hbtop/mode/TableModeStrategy.java new file mode 100644 index 000000000000..4e1f0e2340a1 --- /dev/null +++ b/hbase-hbtop/src/main/java/org/apache/hadoop/hbase/hbtop/mode/TableModeStrategy.java @@ -0,0 +1,103 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase.hbtop.mode; + +import java.util.Arrays; +import java.util.List; +import java.util.stream.Collectors; +import java.util.stream.Stream; + +import org.apache.hadoop.hbase.ClusterMetrics; +import org.apache.hadoop.hbase.TableName; +import org.apache.hadoop.hbase.hbtop.Filter; +import org.apache.hadoop.hbase.hbtop.Record; +import org.apache.hadoop.hbase.hbtop.field.Field; +import org.apache.hadoop.hbase.hbtop.field.FieldInfo; +import org.apache.yetus.audience.InterfaceAudience; + + +/** + * Implementation for {@link ModeStrategy} for Table Mode. + */ +@InterfaceAudience.Private +public final class TableModeStrategy implements ModeStrategy { + + private final List fieldInfos = Arrays.asList( + new FieldInfo(Field.NAMESPACE, 0, true), + new FieldInfo(Field.TABLE, 0, true), + new FieldInfo(Field.REGION_COUNT, 7, true), + new FieldInfo(Field.REQUEST_COUNT_PER_SECOND, 10, true), + new FieldInfo(Field.READ_REQUEST_COUNT_PER_SECOND, 10, true), + new FieldInfo(Field.FILTERED_READ_REQUEST_COUNT_PER_SECOND, 8, true), + new FieldInfo(Field.WRITE_REQUEST_COUNT_PER_SECOND, 10, true), + new FieldInfo(Field.STORE_FILE_SIZE, 13, true), + new FieldInfo(Field.UNCOMPRESSED_STORE_FILE_SIZE, 15, false), + new FieldInfo(Field.NUM_STORE_FILES, 7, true), + new FieldInfo(Field.MEM_STORE_SIZE, 11, true) + ); + + private final RegionModeStrategy regionModeStrategy = new RegionModeStrategy(); + + TableModeStrategy() { + } + + @Override + public List getFieldInfos() { + return fieldInfos; + } + + @Override + public Field getDefaultSortField() { + return Field.REQUEST_COUNT_PER_SECOND; + } + + @Override + public List getRecords(ClusterMetrics clusterMetrics) { + // Get records from RegionModeStrategy and add REGION_COUNT field + List records = regionModeStrategy.getRecords(clusterMetrics).stream() + .map(record -> + Record.ofEntries(fieldInfos.stream() + .filter(fi -> record.containsKey(fi.getField())) + .map(fi -> Record.entry(fi.getField(), record.get(fi.getField()))))) + .peek(r -> r.put(Field.REGION_COUNT, 1)) + .collect(Collectors.toList()); + + // Aggregation by NAMESPACE field and TABLE field + return records.stream() + .collect(Collectors.groupingBy(r -> { + String namespace = r.get(Field.NAMESPACE).asString(); + String table = r.get(Field.TABLE).asString(); + return TableName.valueOf(namespace, table); + })) + .entrySet().stream() + .flatMap( + e -> e.getValue().stream() + .reduce(Record::combine) + .map(Stream::of) + .orElse(Stream.empty())) + .collect(Collectors.toList()); + } + + @Override + public DrillDownInfo drillDown(Record selectedRecord) { + List initialFilters = Arrays.asList( + Filter.newBuilder(Field.NAMESPACE).doubleEquals(selectedRecord.get(Field.NAMESPACE)), + Filter.newBuilder(Field.TABLE).doubleEquals(selectedRecord.get(Field.TABLE))); + return new DrillDownInfo(Mode.REGION, initialFilters); + } +} diff --git a/hbase-hbtop/src/main/java/org/apache/hadoop/hbase/hbtop/screen/AbstractScreenView.java b/hbase-hbtop/src/main/java/org/apache/hadoop/hbase/hbtop/screen/AbstractScreenView.java new file mode 100644 index 000000000000..a3ad5f998a69 --- /dev/null +++ b/hbase-hbtop/src/main/java/org/apache/hadoop/hbase/hbtop/screen/AbstractScreenView.java @@ -0,0 +1,74 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase.hbtop.screen; + +import java.util.Objects; +import org.apache.hadoop.hbase.hbtop.terminal.KeyEvent; +import org.apache.hadoop.hbase.hbtop.terminal.Terminal; +import org.apache.yetus.audience.InterfaceAudience; + + +/** + * An abstract class for {@link ScreenView} that has the common useful methods and the default + * implementations for the abstract methods. + */ +@InterfaceAudience.Private +public abstract class AbstractScreenView implements ScreenView { + + protected final Screen screen; + protected final Terminal terminal; + + public AbstractScreenView(Screen screen, Terminal terminal) { + this.screen = Objects.requireNonNull(screen); + this.terminal = Objects.requireNonNull(terminal); + } + + @Override + public void init() { + } + + @Override + public ScreenView handleKeyPress(KeyEvent keyEvent) { + return this; + } + + @Override + public ScreenView handleTimer() { + return this; + } + + protected void setTimer(long delay) { + screen.setTimer(delay); + } + + protected void cancelTimer() { + screen.cancelTimer(); + } + + public void clearTerminal() { + terminal.clear(); + } + + public void refreshTerminal() { + terminal.refresh(); + } + + public void hideCursor() { + terminal.setCursorPosition(null); + } +} diff --git a/hbase-hbtop/src/main/java/org/apache/hadoop/hbase/hbtop/screen/Screen.java b/hbase-hbtop/src/main/java/org/apache/hadoop/hbase/hbtop/screen/Screen.java new file mode 100644 index 000000000000..cf2c82f93098 --- /dev/null +++ b/hbase-hbtop/src/main/java/org/apache/hadoop/hbase/hbtop/screen/Screen.java @@ -0,0 +1,133 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase.hbtop.screen; + +import java.io.Closeable; +import java.io.IOException; +import java.util.concurrent.TimeUnit; +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.hbase.client.Admin; +import org.apache.hadoop.hbase.client.Connection; +import org.apache.hadoop.hbase.client.ConnectionFactory; +import org.apache.hadoop.hbase.hbtop.mode.Mode; +import org.apache.hadoop.hbase.hbtop.screen.top.TopScreenView; +import org.apache.hadoop.hbase.hbtop.terminal.KeyEvent; +import org.apache.hadoop.hbase.hbtop.terminal.Terminal; +import org.apache.hadoop.hbase.hbtop.terminal.impl.LanternaTerminal; +import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + + +/** + * This dispatches key press events and time events to the current {@link ScreenView}. + */ +@InterfaceAudience.Private +public class Screen implements Closeable { + private static final Logger LOGGER = LoggerFactory.getLogger(Screen.class); + private static final long SLEEP_TIMEOUT_MILLISECONDS = 100; + + private final Connection connection; + private final Admin admin; + private final Terminal terminal; + + private ScreenView currentScreenView; + private Long timerTimestamp; + + public Screen(Configuration conf, long initialRefreshDelay, Mode initialMode) + throws IOException { + connection = ConnectionFactory.createConnection(conf); + admin = connection.getAdmin(); + + // Check connection + admin.getClusterMetrics(); + + // The first screen is the top screen + this.terminal = new LanternaTerminal(); + currentScreenView = new TopScreenView(this, terminal, initialRefreshDelay, admin, + initialMode); + } + + @Override + public void close() throws IOException { + try { + admin.close(); + } finally { + try { + connection.close(); + } finally { + terminal.close(); + } + } + } + + public void run() { + currentScreenView.init(); + while (true) { + try { + KeyEvent keyEvent = terminal.pollKeyEvent(); + + ScreenView nextScreenView; + if (keyEvent != null) { + // Dispatch the key event to the current screen + nextScreenView = currentScreenView.handleKeyPress(keyEvent); + } else { + if (timerTimestamp != null) { + long now = System.currentTimeMillis(); + if (timerTimestamp <= now) { + // Dispatch the timer event to the current screen + timerTimestamp = null; + nextScreenView = currentScreenView.handleTimer(); + } else { + if (timerTimestamp - now < SLEEP_TIMEOUT_MILLISECONDS) { + TimeUnit.MILLISECONDS.sleep(timerTimestamp - now); + } else { + TimeUnit.MILLISECONDS.sleep(SLEEP_TIMEOUT_MILLISECONDS); + } + continue; + } + } else { + TimeUnit.MILLISECONDS.sleep(SLEEP_TIMEOUT_MILLISECONDS); + continue; + } + } + + // If the next screen is null, then exit + if (nextScreenView == null) { + return; + } + + // If the next screen is not the previous, then go to the next screen + if (nextScreenView != currentScreenView) { + currentScreenView = nextScreenView; + currentScreenView.init(); + } + } catch (Exception e) { + LOGGER.error("Caught an exception", e); + } + } + } + + public void setTimer(long delay) { + timerTimestamp = System.currentTimeMillis() + delay; + } + + public void cancelTimer() { + timerTimestamp = null; + } +} diff --git a/hbase-hbtop/src/main/java/org/apache/hadoop/hbase/hbtop/screen/ScreenView.java b/hbase-hbtop/src/main/java/org/apache/hadoop/hbase/hbtop/screen/ScreenView.java new file mode 100644 index 000000000000..bbc325df5d06 --- /dev/null +++ b/hbase-hbtop/src/main/java/org/apache/hadoop/hbase/hbtop/screen/ScreenView.java @@ -0,0 +1,35 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase.hbtop.screen; + +import edu.umd.cs.findbugs.annotations.Nullable; +import org.apache.hadoop.hbase.hbtop.terminal.KeyEvent; +import org.apache.yetus.audience.InterfaceAudience; + + +/** + * An interface for a screen view. + * + * A screen view handles key press evens and timer events. + */ +@InterfaceAudience.Private +public interface ScreenView { + void init(); + @Nullable ScreenView handleKeyPress(KeyEvent keyEvent); + @Nullable ScreenView handleTimer(); +} diff --git a/hbase-hbtop/src/main/java/org/apache/hadoop/hbase/hbtop/screen/field/FieldScreenPresenter.java b/hbase-hbtop/src/main/java/org/apache/hadoop/hbase/hbtop/screen/field/FieldScreenPresenter.java new file mode 100644 index 000000000000..059719e3733e --- /dev/null +++ b/hbase-hbtop/src/main/java/org/apache/hadoop/hbase/hbtop/screen/field/FieldScreenPresenter.java @@ -0,0 +1,186 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase.hbtop.screen.field; + +import java.util.ArrayList; +import java.util.EnumMap; +import java.util.List; +import java.util.Objects; + +import org.apache.hadoop.hbase.hbtop.field.Field; +import org.apache.hadoop.hbase.hbtop.screen.ScreenView; +import org.apache.yetus.audience.InterfaceAudience; + + +/** + * Presentation logic for the field screen. + */ +@InterfaceAudience.Private +public class FieldScreenPresenter { + + @FunctionalInterface + public interface ResultListener { + void accept(Field sortField, List fields, EnumMap fieldDisplayMap); + } + + private final FieldScreenView fieldScreenView; + private Field sortField; + private final List fields; + private final EnumMap fieldDisplayMap; + private final ResultListener resultListener; + private final ScreenView nextScreenView; + + private final int headerMaxLength; + private final int descriptionMaxLength; + + private int currentPosition; + private boolean moveMode; + + public FieldScreenPresenter(FieldScreenView fieldScreenView, Field sortField, List fields, + EnumMap fieldDisplayMap, ResultListener resultListener, + ScreenView nextScreenView) { + this.fieldScreenView = Objects.requireNonNull(fieldScreenView); + this.sortField = Objects.requireNonNull(sortField); + this.fields = new ArrayList<>(Objects.requireNonNull(fields)); + this.fieldDisplayMap = new EnumMap<>(Objects.requireNonNull(fieldDisplayMap)); + this.resultListener = Objects.requireNonNull(resultListener); + this.nextScreenView = Objects.requireNonNull(nextScreenView); + + int headerLength = 0; + int descriptionLength = 0; + for (int i = 0; i < fields.size(); i ++) { + Field field = fields.get(i); + + if (field == sortField) { + currentPosition = i; + } + + if (headerLength < field.getHeader().length()) { + headerLength = field.getHeader().length(); + } + + if (descriptionLength < field.getDescription().length()) { + descriptionLength = field.getDescription().length(); + } + } + + headerMaxLength = headerLength; + descriptionMaxLength = descriptionLength; + } + + public void init() { + fieldScreenView.hideCursor(); + fieldScreenView.clearTerminal(); + fieldScreenView.showFieldScreen(sortField.getHeader(), fields, fieldDisplayMap, + currentPosition, headerMaxLength, descriptionMaxLength, moveMode); + fieldScreenView.refreshTerminal(); + } + + public void arrowUp() { + if (currentPosition > 0) { + currentPosition -= 1; + + if (moveMode) { + Field tmp = fields.remove(currentPosition); + fields.add(currentPosition + 1, tmp); + } + + showField(currentPosition); + showField(currentPosition + 1); + fieldScreenView.refreshTerminal(); + } + } + + public void arrowDown() { + if (currentPosition < fields.size() - 1) { + currentPosition += 1; + + if (moveMode) { + Field tmp = fields.remove(currentPosition - 1); + fields.add(currentPosition, tmp); + } + + showField(currentPosition); + showField(currentPosition - 1); + fieldScreenView.refreshTerminal(); + } + } + + public void pageUp() { + if (currentPosition > 0 && !moveMode) { + int previousPosition = currentPosition; + currentPosition = 0; + showField(previousPosition); + showField(currentPosition); + fieldScreenView.refreshTerminal(); + } + } + + public void pageDown() { + if (currentPosition < fields.size() - 1 && !moveMode) { + int previousPosition = currentPosition; + currentPosition = fields.size() - 1; + showField(previousPosition); + showField(currentPosition); + fieldScreenView.refreshTerminal(); + } + } + + public void turnOnMoveMode() { + moveMode = true; + showField(currentPosition); + fieldScreenView.refreshTerminal(); + } + + public void turnOffMoveMode() { + moveMode = false; + showField(currentPosition); + fieldScreenView.refreshTerminal(); + } + + public void switchFieldDisplay() { + if (!moveMode) { + Field field = fields.get(currentPosition); + fieldDisplayMap.put(field, !fieldDisplayMap.get(field)); + showField(currentPosition); + fieldScreenView.refreshTerminal(); + } + } + + private void showField(int pos) { + Field field = fields.get(pos); + fieldScreenView.showField(pos, field, fieldDisplayMap.get(field), pos == currentPosition, + headerMaxLength, descriptionMaxLength, moveMode); + } + + public void setSortField() { + if (!moveMode) { + Field newSortField = fields.get(currentPosition); + if (newSortField != this.sortField) { + this.sortField = newSortField; + fieldScreenView.showScreenDescription(sortField.getHeader()); + fieldScreenView.refreshTerminal(); + } + } + } + + public ScreenView transitionToNextScreen() { + resultListener.accept(sortField, fields, fieldDisplayMap); + return nextScreenView; + } +} diff --git a/hbase-hbtop/src/main/java/org/apache/hadoop/hbase/hbtop/screen/field/FieldScreenView.java b/hbase-hbtop/src/main/java/org/apache/hadoop/hbase/hbtop/screen/field/FieldScreenView.java new file mode 100644 index 000000000000..030b820cfb45 --- /dev/null +++ b/hbase-hbtop/src/main/java/org/apache/hadoop/hbase/hbtop/screen/field/FieldScreenView.java @@ -0,0 +1,185 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase.hbtop.screen.field; + +import java.util.EnumMap; +import java.util.List; +import org.apache.hadoop.hbase.hbtop.field.Field; +import org.apache.hadoop.hbase.hbtop.screen.AbstractScreenView; +import org.apache.hadoop.hbase.hbtop.screen.Screen; +import org.apache.hadoop.hbase.hbtop.screen.ScreenView; +import org.apache.hadoop.hbase.hbtop.terminal.KeyEvent; +import org.apache.hadoop.hbase.hbtop.terminal.Terminal; +import org.apache.hadoop.hbase.hbtop.terminal.TerminalPrinter; +import org.apache.yetus.audience.InterfaceAudience; + + +/** + * The screen where we can change the displayed fields, the sort key and the order of the fields. + */ +@InterfaceAudience.Private +public class FieldScreenView extends AbstractScreenView { + + private static final int SCREEN_DESCRIPTION_START_ROW = 0; + private static final int FIELD_START_ROW = 5; + + private final FieldScreenPresenter fieldScreenPresenter; + + public FieldScreenView(Screen screen, Terminal terminal, Field sortField, List fields, + EnumMap fieldDisplayMap, FieldScreenPresenter.ResultListener resultListener, + ScreenView nextScreenView) { + super(screen, terminal); + this.fieldScreenPresenter = new FieldScreenPresenter(this, sortField, fields, fieldDisplayMap, + resultListener, nextScreenView); + } + + @Override + public void init() { + fieldScreenPresenter.init(); + } + + @Override + public ScreenView handleKeyPress(KeyEvent keyEvent) { + switch (keyEvent.getType()) { + case Escape: + return fieldScreenPresenter.transitionToNextScreen(); + + case ArrowUp: + fieldScreenPresenter.arrowUp(); + return this; + + case ArrowDown: + fieldScreenPresenter.arrowDown(); + return this; + + case PageUp: + case Home: + fieldScreenPresenter.pageUp(); + return this; + + case PageDown: + case End: + fieldScreenPresenter.pageDown(); + return this; + + case ArrowRight: + fieldScreenPresenter.turnOnMoveMode(); + return this; + + case ArrowLeft: + case Enter: + fieldScreenPresenter.turnOffMoveMode(); + return this; + } + + if (keyEvent.getType() != KeyEvent.Type.Character) { + return this; + } + + assert keyEvent.getCharacter() != null; + switch (keyEvent.getCharacter()) { + case 'd': + case ' ': + fieldScreenPresenter.switchFieldDisplay(); + break; + + case 's': + fieldScreenPresenter.setSortField(); + break; + + case 'q': + return fieldScreenPresenter.transitionToNextScreen(); + } + + return this; + } + + public void showFieldScreen(String sortFieldHeader, List fields, + EnumMap fieldDisplayMap, int currentPosition, int headerMaxLength, + int descriptionMaxLength, boolean moveMode) { + showScreenDescription(sortFieldHeader); + + for (int i = 0; i < fields.size(); i ++) { + Field field = fields.get(i); + showField(i, field, fieldDisplayMap.get(field), i == currentPosition, headerMaxLength, + descriptionMaxLength, moveMode); + } + } + + public void showScreenDescription(String sortKeyHeader) { + TerminalPrinter printer = terminal.getTerminalPrinter(SCREEN_DESCRIPTION_START_ROW); + printer.startBold().print("Fields Management").stopBold().endOfLine(); + printer.print("Current Sort Field: ").startBold().print(sortKeyHeader).stopBold().endOfLine(); + printer.print("Navigate with up/down, Right selects for move then or Left commits,") + .endOfLine(); + printer.print("'d' or toggles display, 's' sets sort. Use 'q' or to end!") + .endOfLine(); + } + + public void showField(int pos, Field field, boolean display, boolean selected, + int fieldHeaderMaxLength, int fieldDescriptionMaxLength, boolean moveMode) { + + String fieldHeader = String.format("%-" + fieldHeaderMaxLength + "s", field.getHeader()); + String fieldDescription = String.format("%-" + fieldDescriptionMaxLength + "s", + field.getDescription()); + + int row = FIELD_START_ROW + pos; + TerminalPrinter printer = terminal.getTerminalPrinter(row); + if (selected) { + String prefix = display ? "* " : " "; + if (moveMode) { + printer.print(prefix); + + if (display) { + printer.startBold(); + } + + printer.startHighlight() + .printFormat("%s = %s", fieldHeader, fieldDescription).stopHighlight(); + + if (display) { + printer.stopBold(); + } + + printer.endOfLine(); + } else { + printer.print(prefix); + + if (display) { + printer.startBold(); + } + + printer.startHighlight().print(fieldHeader).stopHighlight() + .printFormat(" = %s", fieldDescription); + + if (display) { + printer.stopBold(); + } + + printer.endOfLine(); + } + } else { + if (display) { + printer.print("* ").startBold().printFormat("%s = %s", fieldHeader, fieldDescription) + .stopBold().endOfLine(); + } else { + printer.printFormat(" %s = %s", fieldHeader, fieldDescription).endOfLine(); + } + } + } +} diff --git a/hbase-hbtop/src/main/java/org/apache/hadoop/hbase/hbtop/screen/help/CommandDescription.java b/hbase-hbtop/src/main/java/org/apache/hadoop/hbase/hbtop/screen/help/CommandDescription.java new file mode 100644 index 000000000000..5002ab8f6c18 --- /dev/null +++ b/hbase-hbtop/src/main/java/org/apache/hadoop/hbase/hbtop/screen/help/CommandDescription.java @@ -0,0 +1,53 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase.hbtop.screen.help; + +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; +import java.util.Objects; + +import org.apache.yetus.audience.InterfaceAudience; + + +/** + * Represents a description of a command that we can execute in the top screen. + */ +@InterfaceAudience.Private +public class CommandDescription { + + private final List keys; + private final String description; + + public CommandDescription(String key, String description) { + this(Collections.singletonList(Objects.requireNonNull(key)), description); + } + + public CommandDescription(List keys, String description) { + this.keys = Collections.unmodifiableList(new ArrayList<>(Objects.requireNonNull(keys))); + this.description = Objects.requireNonNull(description); + } + + public List getKeys() { + return keys; + } + + public String getDescription() { + return description; + } +} diff --git a/hbase-hbtop/src/main/java/org/apache/hadoop/hbase/hbtop/screen/help/HelpScreenPresenter.java b/hbase-hbtop/src/main/java/org/apache/hadoop/hbase/hbtop/screen/help/HelpScreenPresenter.java new file mode 100644 index 000000000000..b3596e7a5674 --- /dev/null +++ b/hbase-hbtop/src/main/java/org/apache/hadoop/hbase/hbtop/screen/help/HelpScreenPresenter.java @@ -0,0 +1,73 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase.hbtop.screen.help; + +import java.util.Arrays; +import java.util.Objects; + +import org.apache.hadoop.hbase.hbtop.screen.ScreenView; +import org.apache.yetus.audience.InterfaceAudience; + + +/** + * The presentation logic for the help screen. + */ +@InterfaceAudience.Private +public class HelpScreenPresenter { + + private static final CommandDescription[] COMMAND_DESCRIPTIONS = new CommandDescription[] { + new CommandDescription("f", "Add/Remove/Order/Sort the fields"), + new CommandDescription("R", "Toggle the sort order (ascending/descending)"), + new CommandDescription("m", "Select mode"), + new CommandDescription("o", "Add a filter with ignoring case"), + new CommandDescription("O", "Add a filter with case sensitive"), + new CommandDescription("p", "Show the current filters"), + new CommandDescription("=", "Clear the current filters"), + new CommandDescription("i", "Drill down"), + new CommandDescription( + Arrays.asList("up", "down", "left", "right", "pageUp", "pageDown", "home", "end"), + "Scroll the metrics"), + new CommandDescription("d", "Change the refresh delay"), + new CommandDescription("X", "Adjust the field length"), + new CommandDescription("", "Refresh the display"), + new CommandDescription("h", "Display this screen"), + new CommandDescription(Arrays.asList("q", ""), "Quit") + }; + + private final HelpScreenView helpScreenView; + private final long refreshDelay; + private final ScreenView nextScreenView; + + public HelpScreenPresenter(HelpScreenView helpScreenView, long refreshDelay, + ScreenView nextScreenView) { + this.helpScreenView = Objects.requireNonNull(helpScreenView); + this.refreshDelay = refreshDelay; + this.nextScreenView = Objects.requireNonNull(nextScreenView); + } + + public void init() { + helpScreenView.hideCursor(); + helpScreenView.clearTerminal(); + helpScreenView.showHelpScreen(refreshDelay, COMMAND_DESCRIPTIONS); + helpScreenView.refreshTerminal(); + } + + public ScreenView transitionToNextScreen() { + return nextScreenView; + } +} diff --git a/hbase-hbtop/src/main/java/org/apache/hadoop/hbase/hbtop/screen/help/HelpScreenView.java b/hbase-hbtop/src/main/java/org/apache/hadoop/hbase/hbtop/screen/help/HelpScreenView.java new file mode 100644 index 000000000000..1e8a3e97e6b0 --- /dev/null +++ b/hbase-hbtop/src/main/java/org/apache/hadoop/hbase/hbtop/screen/help/HelpScreenView.java @@ -0,0 +1,90 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase.hbtop.screen.help; + +import org.apache.hadoop.hbase.hbtop.screen.AbstractScreenView; +import org.apache.hadoop.hbase.hbtop.screen.Screen; +import org.apache.hadoop.hbase.hbtop.screen.ScreenView; +import org.apache.hadoop.hbase.hbtop.terminal.KeyEvent; +import org.apache.hadoop.hbase.hbtop.terminal.Terminal; +import org.apache.hadoop.hbase.hbtop.terminal.TerminalPrinter; +import org.apache.yetus.audience.InterfaceAudience; + + +/** + * The help screen. + */ +@InterfaceAudience.Private +public class HelpScreenView extends AbstractScreenView { + + private static final int SCREEN_DESCRIPTION_START_ROW = 0; + private static final int COMMAND_DESCRIPTION_START_ROW = 3; + + private final HelpScreenPresenter helpScreenPresenter; + + public HelpScreenView(Screen screen, Terminal terminal, long refreshDelay, + ScreenView nextScreenView) { + super(screen, terminal); + this.helpScreenPresenter = new HelpScreenPresenter(this, refreshDelay, nextScreenView); + } + + @Override + public void init() { + helpScreenPresenter.init(); + } + + @Override + public ScreenView handleKeyPress(KeyEvent keyEvent) { + return helpScreenPresenter.transitionToNextScreen(); + } + + public void showHelpScreen(long refreshDelay, CommandDescription[] commandDescriptions) { + showScreenDescription(refreshDelay); + + TerminalPrinter printer = terminal.getTerminalPrinter(COMMAND_DESCRIPTION_START_ROW); + for (CommandDescription commandDescription : commandDescriptions) { + showCommandDescription(printer, commandDescription); + } + + printer.endOfLine(); + printer.print("Press any key to continue").endOfLine(); + } + + private void showScreenDescription(long refreshDelay) { + TerminalPrinter printer = terminal.getTerminalPrinter(SCREEN_DESCRIPTION_START_ROW); + printer.startBold().print("Help for Interactive Commands").stopBold().endOfLine(); + printer.print("Refresh delay: ").startBold() + .print((double) refreshDelay / 1000).stopBold().endOfLine(); + } + + private void showCommandDescription(TerminalPrinter terminalPrinter, + CommandDescription commandDescription) { + terminalPrinter.print(" "); + boolean first = true; + for (String key : commandDescription.getKeys()) { + if (first) { + first = false; + } else { + terminalPrinter.print(","); + } + terminalPrinter.startBold().print(key).stopBold(); + } + + terminalPrinter.printFormat(": %s", commandDescription.getDescription()).endOfLine(); + } +} diff --git a/hbase-hbtop/src/main/java/org/apache/hadoop/hbase/hbtop/screen/mode/ModeScreenPresenter.java b/hbase-hbtop/src/main/java/org/apache/hadoop/hbase/hbtop/screen/mode/ModeScreenPresenter.java new file mode 100644 index 000000000000..8cd9879b0ede --- /dev/null +++ b/hbase-hbtop/src/main/java/org/apache/hadoop/hbase/hbtop/screen/mode/ModeScreenPresenter.java @@ -0,0 +1,132 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase.hbtop.screen.mode; + +import java.util.Arrays; +import java.util.List; +import java.util.Objects; +import java.util.function.Consumer; +import org.apache.hadoop.hbase.hbtop.mode.Mode; +import org.apache.hadoop.hbase.hbtop.screen.ScreenView; +import org.apache.yetus.audience.InterfaceAudience; + + +/** + * The presentation logic for the mode screen. + */ +@InterfaceAudience.Private +public class ModeScreenPresenter { + + private final ModeScreenView modeScreenView; + private final Mode currentMode; + private final Consumer resultListener; + private final ScreenView nextScreenView; + + private final int modeHeaderMaxLength; + private final int modeDescriptionMaxLength; + private final List modes = Arrays.asList(Mode.values()); + + private int currentPosition; + + public ModeScreenPresenter(ModeScreenView modeScreenView, Mode currentMode, + Consumer resultListener, ScreenView nextScreenView) { + this.modeScreenView = Objects.requireNonNull(modeScreenView); + this.currentMode = Objects.requireNonNull(currentMode); + this.resultListener = Objects.requireNonNull(resultListener); + this.nextScreenView = Objects.requireNonNull(nextScreenView); + + int modeHeaderLength = 0; + int modeDescriptionLength = 0; + for (int i = 0; i < modes.size(); i++) { + Mode mode = modes.get(i); + if (mode == currentMode) { + currentPosition = i; + } + + if (modeHeaderLength < mode.getHeader().length()) { + modeHeaderLength = mode.getHeader().length(); + } + + if (modeDescriptionLength < mode.getDescription().length()) { + modeDescriptionLength = mode.getDescription().length(); + } + } + + modeHeaderMaxLength = modeHeaderLength; + modeDescriptionMaxLength = modeDescriptionLength; + } + + public void init() { + modeScreenView.hideCursor(); + modeScreenView.clearTerminal(); + modeScreenView.showModeScreen(currentMode, modes, currentPosition, modeHeaderMaxLength, + modeDescriptionMaxLength); + modeScreenView.refreshTerminal(); + } + + public void arrowUp() { + if (currentPosition > 0) { + currentPosition -= 1; + showMode(currentPosition); + showMode(currentPosition + 1); + modeScreenView.refreshTerminal(); + } + } + + public void arrowDown() { + if (currentPosition < modes.size() - 1) { + currentPosition += 1; + showMode(currentPosition); + showMode(currentPosition - 1); + modeScreenView.refreshTerminal(); + } + } + + public void pageUp() { + if (currentPosition > 0) { + int previousPosition = currentPosition; + currentPosition = 0; + showMode(previousPosition); + showMode(currentPosition); + modeScreenView.refreshTerminal(); + } + } + + public void pageDown() { + if (currentPosition < modes.size() - 1) { + int previousPosition = currentPosition; + currentPosition = modes.size() - 1; + showMode(previousPosition); + showMode(currentPosition); + modeScreenView.refreshTerminal(); + } + } + + private void showMode(int pos) { + modeScreenView.showMode(pos, modes.get(pos), pos == currentPosition, modeHeaderMaxLength, + modeDescriptionMaxLength); + } + + public ScreenView transitionToNextScreen(boolean changeMode) { + Mode selectedMode = modes.get(currentPosition); + if (changeMode && currentMode != selectedMode) { + resultListener.accept(selectedMode); + } + return nextScreenView; + } +} diff --git a/hbase-hbtop/src/main/java/org/apache/hadoop/hbase/hbtop/screen/mode/ModeScreenView.java b/hbase-hbtop/src/main/java/org/apache/hadoop/hbase/hbtop/screen/mode/ModeScreenView.java new file mode 100644 index 000000000000..f9f724619b92 --- /dev/null +++ b/hbase-hbtop/src/main/java/org/apache/hadoop/hbase/hbtop/screen/mode/ModeScreenView.java @@ -0,0 +1,130 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase.hbtop.screen.mode; + +import java.util.List; +import java.util.function.Consumer; +import org.apache.hadoop.hbase.hbtop.mode.Mode; +import org.apache.hadoop.hbase.hbtop.screen.AbstractScreenView; +import org.apache.hadoop.hbase.hbtop.screen.Screen; +import org.apache.hadoop.hbase.hbtop.screen.ScreenView; +import org.apache.hadoop.hbase.hbtop.terminal.KeyEvent; +import org.apache.hadoop.hbase.hbtop.terminal.Terminal; +import org.apache.hadoop.hbase.hbtop.terminal.TerminalPrinter; +import org.apache.yetus.audience.InterfaceAudience; + + +/** + * The screen where we can choose the {@link Mode} in the top screen. + */ +@InterfaceAudience.Private +public class ModeScreenView extends AbstractScreenView { + + private static final int SCREEN_DESCRIPTION_START_ROW = 0; + private static final int MODE_START_ROW = 4; + + private final ModeScreenPresenter modeScreenPresenter; + + public ModeScreenView(Screen screen, Terminal terminal, Mode currentMode, + Consumer resultListener, ScreenView nextScreenView) { + super(screen, terminal); + this.modeScreenPresenter = new ModeScreenPresenter(this, currentMode, resultListener, + nextScreenView); + } + + @Override + public void init() { + modeScreenPresenter.init(); + } + + @Override + public ScreenView handleKeyPress(KeyEvent keyEvent) { + switch (keyEvent.getType()) { + case Escape: + return modeScreenPresenter.transitionToNextScreen(false); + + case Enter: + return modeScreenPresenter.transitionToNextScreen(true); + + case ArrowUp: + modeScreenPresenter.arrowUp(); + return this; + + case ArrowDown: + modeScreenPresenter.arrowDown(); + return this; + + case PageUp: + case Home: + modeScreenPresenter.pageUp(); + return this; + + case PageDown: + case End: + modeScreenPresenter.pageDown(); + return this; + } + + if (keyEvent.getType() != KeyEvent.Type.Character) { + return this; + } + + assert keyEvent.getCharacter() != null; + switch (keyEvent.getCharacter()) { + case 'q': + return modeScreenPresenter.transitionToNextScreen(false); + } + + return this; + } + + public void showModeScreen(Mode currentMode, List modes, int currentPosition, + int modeHeaderMaxLength, int modeDescriptionMaxLength) { + showScreenDescription(currentMode); + + for (int i = 0; i < modes.size(); i++) { + showMode(i, modes.get(i), i == currentPosition, + modeHeaderMaxLength, modeDescriptionMaxLength); + } + } + + private void showScreenDescription(Mode currentMode) { + TerminalPrinter printer = terminal.getTerminalPrinter(SCREEN_DESCRIPTION_START_ROW); + printer.startBold().print("Mode Management").stopBold().endOfLine(); + printer.print("Current mode: ") + .startBold().print(currentMode.getHeader()).stopBold().endOfLine(); + printer.print("Select mode followed by ").endOfLine(); + } + + public void showMode(int pos, Mode mode, boolean selected, int modeHeaderMaxLength, + int modeDescriptionMaxLength) { + + String modeHeader = String.format("%-" + modeHeaderMaxLength + "s", mode.getHeader()); + String modeDescription = String.format("%-" + modeDescriptionMaxLength + "s", + mode.getDescription()); + + int row = MODE_START_ROW + pos; + TerminalPrinter printer = terminal.getTerminalPrinter(row); + if (selected) { + printer.startHighlight().print(modeHeader).stopHighlight() + .printFormat(" = %s", modeDescription).endOfLine(); + } else { + printer.printFormat("%s = %s", modeHeader, modeDescription).endOfLine(); + } + } +} diff --git a/hbase-hbtop/src/main/java/org/apache/hadoop/hbase/hbtop/screen/top/FilterDisplayModeScreenPresenter.java b/hbase-hbtop/src/main/java/org/apache/hadoop/hbase/hbtop/screen/top/FilterDisplayModeScreenPresenter.java new file mode 100644 index 000000000000..7282fd9f3f27 --- /dev/null +++ b/hbase-hbtop/src/main/java/org/apache/hadoop/hbase/hbtop/screen/top/FilterDisplayModeScreenPresenter.java @@ -0,0 +1,54 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase.hbtop.screen.top; + +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; +import java.util.Objects; +import org.apache.hadoop.hbase.hbtop.Filter; +import org.apache.hadoop.hbase.hbtop.screen.ScreenView; +import org.apache.yetus.audience.InterfaceAudience; + + +/** + * The presentation logic for the filter display mode. + */ +@InterfaceAudience.Private +public class FilterDisplayModeScreenPresenter { + + private final FilterDisplayModeScreenView filterDisplayModeScreenView; + private final List filters; + private final ScreenView nextScreenView; + + public FilterDisplayModeScreenPresenter(FilterDisplayModeScreenView filterDisplayModeScreenView, + List filters, ScreenView nextScreenView) { + this.filterDisplayModeScreenView = Objects.requireNonNull(filterDisplayModeScreenView); + this.filters = Collections.unmodifiableList(new ArrayList<>(Objects.requireNonNull(filters))); + this.nextScreenView = Objects.requireNonNull(nextScreenView); + } + + public void init() { + filterDisplayModeScreenView.showFilters(filters); + filterDisplayModeScreenView.refreshTerminal(); + } + + public ScreenView returnToNextScreen() { + return nextScreenView; + } +} diff --git a/hbase-hbtop/src/main/java/org/apache/hadoop/hbase/hbtop/screen/top/FilterDisplayModeScreenView.java b/hbase-hbtop/src/main/java/org/apache/hadoop/hbase/hbtop/screen/top/FilterDisplayModeScreenView.java new file mode 100644 index 000000000000..8938123858a8 --- /dev/null +++ b/hbase-hbtop/src/main/java/org/apache/hadoop/hbase/hbtop/screen/top/FilterDisplayModeScreenView.java @@ -0,0 +1,74 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase.hbtop.screen.top; + +import java.util.List; +import java.util.stream.Collectors; +import org.apache.hadoop.hbase.hbtop.Filter; +import org.apache.hadoop.hbase.hbtop.screen.AbstractScreenView; +import org.apache.hadoop.hbase.hbtop.screen.Screen; +import org.apache.hadoop.hbase.hbtop.screen.ScreenView; +import org.apache.hadoop.hbase.hbtop.terminal.KeyEvent; +import org.apache.hadoop.hbase.hbtop.terminal.Terminal; +import org.apache.yetus.audience.InterfaceAudience; + + +/** + * The filter display mode in the top screen. + * + * Exit if Enter key is pressed. + */ +@InterfaceAudience.Private +public class FilterDisplayModeScreenView extends AbstractScreenView { + + private final int row; + private final FilterDisplayModeScreenPresenter filterDisplayModeScreenPresenter; + + public FilterDisplayModeScreenView(Screen screen, Terminal terminal, int row, + List filters, ScreenView nextScreenView) { + super(screen, terminal); + this.row = row; + this.filterDisplayModeScreenPresenter = + new FilterDisplayModeScreenPresenter(this, filters, nextScreenView); + } + + @Override + public void init() { + filterDisplayModeScreenPresenter.init(); + } + + @Override + public ScreenView handleKeyPress(KeyEvent keyEvent) { + switch (keyEvent.getType()) { + case Enter: + return filterDisplayModeScreenPresenter.returnToNextScreen(); + } + return this; + } + + public void showFilters(List filters) { + String filtersString = "none"; + if (!filters.isEmpty()) { + filtersString = String.join(" + ", + filters.stream().map(f -> String.format("'%s'", f)).collect(Collectors.toList())); + } + + terminal.getTerminalPrinter(row).startBold() + .print(" to resume, filters: " + filtersString).stopBold().endOfLine(); + } +} diff --git a/hbase-hbtop/src/main/java/org/apache/hadoop/hbase/hbtop/screen/top/Header.java b/hbase-hbtop/src/main/java/org/apache/hadoop/hbase/hbtop/screen/top/Header.java new file mode 100644 index 000000000000..df672e9695d9 --- /dev/null +++ b/hbase-hbtop/src/main/java/org/apache/hadoop/hbase/hbtop/screen/top/Header.java @@ -0,0 +1,49 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase.hbtop.screen.top; + +import java.util.Objects; +import org.apache.hadoop.hbase.hbtop.field.Field; +import org.apache.yetus.audience.InterfaceAudience; + + +/** + * Represents headers for the metrics in the top screen. + */ +@InterfaceAudience.Private +public class Header { + private final Field field; + private final int length; + + public Header(Field field, int length) { + this.field = Objects.requireNonNull(field); + this.length = length; + } + + public String format() { + return "%" + (field.isLeftJustify() ? "-" : "") + length + "s"; + } + + public Field getField() { + return field; + } + + public int getLength() { + return length; + } +} diff --git a/hbase-hbtop/src/main/java/org/apache/hadoop/hbase/hbtop/screen/top/InputModeScreenPresenter.java b/hbase-hbtop/src/main/java/org/apache/hadoop/hbase/hbtop/screen/top/InputModeScreenPresenter.java new file mode 100644 index 000000000000..8ab858b995f3 --- /dev/null +++ b/hbase-hbtop/src/main/java/org/apache/hadoop/hbase/hbtop/screen/top/InputModeScreenPresenter.java @@ -0,0 +1,165 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase.hbtop.screen.top; + +import edu.umd.cs.findbugs.annotations.Nullable; +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; +import java.util.Objects; +import java.util.function.Function; +import org.apache.hadoop.hbase.hbtop.screen.ScreenView; +import org.apache.yetus.audience.InterfaceAudience; + + +/** + * The presentation logic for the input mode. + */ +@InterfaceAudience.Private +public class InputModeScreenPresenter { + private final InputModeScreenView inputModeScreenView; + private final String message; + private final List histories; + private final Function resultListener; + + private StringBuilder inputString = new StringBuilder(); + private int cursorPosition; + private int historyPosition = -1; + + public InputModeScreenPresenter(InputModeScreenView inputModeScreenView, String message, + @Nullable List histories, Function resultListener) { + this.inputModeScreenView = Objects.requireNonNull(inputModeScreenView); + this.message = Objects.requireNonNull(message); + + if (histories != null) { + this.histories = Collections.unmodifiableList(new ArrayList<>(histories)); + } else { + this.histories = Collections.emptyList(); + } + + this.resultListener = Objects.requireNonNull(resultListener); + } + + public void init() { + inputModeScreenView.showInput(message, inputString.toString(), cursorPosition); + inputModeScreenView.refreshTerminal(); + } + + public ScreenView returnToNextScreen() { + inputModeScreenView.hideCursor(); + String result = inputString.toString(); + + return resultListener.apply(result); + } + + public void character(Character character) { + inputString.insert(cursorPosition, character); + cursorPosition += 1; + inputModeScreenView.showInput(message, inputString.toString(), cursorPosition); + inputModeScreenView.refreshTerminal(); + } + + public void backspace() { + if (cursorPosition == 0) { + return; + } + + inputString.deleteCharAt(cursorPosition - 1); + cursorPosition -= 1; + inputModeScreenView.showInput(message, inputString.toString(), cursorPosition); + inputModeScreenView.refreshTerminal(); + } + + public void delete() { + if (inputString.length() == 0 || cursorPosition > inputString.length() - 1) { + return; + } + + inputString.deleteCharAt(cursorPosition); + inputModeScreenView.showInput(message, inputString.toString(), cursorPosition); + inputModeScreenView.refreshTerminal(); + } + + public void arrowLeft() { + if (cursorPosition == 0) { + return; + } + + cursorPosition -= 1; + inputModeScreenView.showInput(message, inputString.toString(), cursorPosition); + inputModeScreenView.refreshTerminal(); + } + + public void arrowRight() { + if (cursorPosition > inputString.length() - 1) { + return; + } + + cursorPosition += 1; + inputModeScreenView.showInput(message, inputString.toString(), cursorPosition); + inputModeScreenView.refreshTerminal(); + } + + public void home() { + cursorPosition = 0; + inputModeScreenView.showInput(message, inputString.toString(), cursorPosition); + inputModeScreenView.refreshTerminal(); + } + + public void end() { + cursorPosition = inputString.length(); + inputModeScreenView.showInput(message, inputString.toString(), cursorPosition); + inputModeScreenView.refreshTerminal(); + } + + public void arrowUp() { + if (historyPosition == 0 || histories.isEmpty()) { + return; + } + + if (historyPosition == -1) { + historyPosition = histories.size() - 1; + } else { + historyPosition -= 1; + } + + inputString = new StringBuilder(histories.get(historyPosition)); + + cursorPosition = inputString.length(); + inputModeScreenView.showInput(message, inputString.toString(), cursorPosition); + inputModeScreenView.refreshTerminal(); + } + + public void arrowDown() { + if (historyPosition == -1 || histories.isEmpty()) { + return; + } + + if (historyPosition == histories.size() - 1) { + historyPosition = -1; + inputString = new StringBuilder(); + } else { + historyPosition += 1; + inputString = new StringBuilder(histories.get(historyPosition)); + } + + cursorPosition = inputString.length(); + inputModeScreenView.showInput(message, inputString.toString(), cursorPosition); + inputModeScreenView.refreshTerminal(); + } +} diff --git a/hbase-hbtop/src/main/java/org/apache/hadoop/hbase/hbtop/screen/top/InputModeScreenView.java b/hbase-hbtop/src/main/java/org/apache/hadoop/hbase/hbtop/screen/top/InputModeScreenView.java new file mode 100644 index 000000000000..b867f5179836 --- /dev/null +++ b/hbase-hbtop/src/main/java/org/apache/hadoop/hbase/hbtop/screen/top/InputModeScreenView.java @@ -0,0 +1,109 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase.hbtop.screen.top; + +import java.util.List; +import java.util.function.Function; +import org.apache.hadoop.hbase.hbtop.screen.AbstractScreenView; +import org.apache.hadoop.hbase.hbtop.screen.Screen; +import org.apache.hadoop.hbase.hbtop.screen.ScreenView; +import org.apache.hadoop.hbase.hbtop.terminal.CursorPosition; +import org.apache.hadoop.hbase.hbtop.terminal.KeyEvent; +import org.apache.hadoop.hbase.hbtop.terminal.Terminal; +import org.apache.yetus.audience.InterfaceAudience; + + +/** + * The input mode in the top screen. + */ +@InterfaceAudience.Private +public class InputModeScreenView extends AbstractScreenView { + + private final int row; + private final InputModeScreenPresenter inputModeScreenPresenter; + + public InputModeScreenView(Screen screen, Terminal terminal, int row, String message, + List histories, Function resultListener) { + super(screen, terminal); + this.row = row; + this.inputModeScreenPresenter = new InputModeScreenPresenter(this, message, histories, + resultListener); + } + + @Override + public void init() { + inputModeScreenPresenter.init(); + } + + @Override + public ScreenView handleKeyPress(KeyEvent keyEvent) { + + switch (keyEvent.getType()) { + case Enter: + return inputModeScreenPresenter.returnToNextScreen(); + + case Character: + inputModeScreenPresenter.character(keyEvent.getCharacter()); + break; + + case Backspace: + inputModeScreenPresenter.backspace(); + break; + + case Delete: + inputModeScreenPresenter.delete(); + break; + + case ArrowLeft: + inputModeScreenPresenter.arrowLeft(); + break; + + case ArrowRight: + inputModeScreenPresenter.arrowRight(); + break; + + case Home: + inputModeScreenPresenter.home(); + break; + + case End: + inputModeScreenPresenter.end(); + break; + + case ArrowUp: + inputModeScreenPresenter.arrowUp(); + break; + + case ArrowDown: + inputModeScreenPresenter.arrowDown(); + break; + + default: + break; + } + return this; + } + + public void showInput(String message, String inputString, int cursorPosition) { + terminal.getTerminalPrinter(row).startBold().print(message) + .stopBold().print(" ").print(inputString).endOfLine(); + terminal.setCursorPosition( + new CursorPosition(message.length() + 1 + cursorPosition, row)); + terminal.refresh(); + } +} diff --git a/hbase-hbtop/src/main/java/org/apache/hadoop/hbase/hbtop/screen/top/MessageModeScreenPresenter.java b/hbase-hbtop/src/main/java/org/apache/hadoop/hbase/hbtop/screen/top/MessageModeScreenPresenter.java new file mode 100644 index 000000000000..174a15a48432 --- /dev/null +++ b/hbase-hbtop/src/main/java/org/apache/hadoop/hbase/hbtop/screen/top/MessageModeScreenPresenter.java @@ -0,0 +1,52 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase.hbtop.screen.top; + +import java.util.Objects; +import org.apache.hadoop.hbase.hbtop.screen.ScreenView; +import org.apache.yetus.audience.InterfaceAudience; + + +/** + * The presentation logic for the message mode. + * + * Exit after 2 seconds or if any key is pressed. + */ +@InterfaceAudience.Private +public class MessageModeScreenPresenter { + + private final MessageModeScreenView messageModeScreenView; + private final String message; + private final ScreenView nextScreenView; + + public MessageModeScreenPresenter(MessageModeScreenView messageModeScreenView, String message, + ScreenView nextScreenView) { + this.messageModeScreenView = Objects.requireNonNull(messageModeScreenView); + this.message = Objects.requireNonNull(message); + this.nextScreenView = Objects.requireNonNull(nextScreenView); + } + + public void init() { + messageModeScreenView.showMessage(message); + messageModeScreenView.refreshTerminal(); + } + + public ScreenView returnToNextScreen() { + return nextScreenView; + } +} diff --git a/hbase-hbtop/src/main/java/org/apache/hadoop/hbase/hbtop/screen/top/MessageModeScreenView.java b/hbase-hbtop/src/main/java/org/apache/hadoop/hbase/hbtop/screen/top/MessageModeScreenView.java new file mode 100644 index 000000000000..b46d6b42ff73 --- /dev/null +++ b/hbase-hbtop/src/main/java/org/apache/hadoop/hbase/hbtop/screen/top/MessageModeScreenView.java @@ -0,0 +1,66 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase.hbtop.screen.top; + +import org.apache.hadoop.hbase.hbtop.screen.AbstractScreenView; +import org.apache.hadoop.hbase.hbtop.screen.Screen; +import org.apache.hadoop.hbase.hbtop.screen.ScreenView; +import org.apache.hadoop.hbase.hbtop.terminal.KeyEvent; +import org.apache.hadoop.hbase.hbtop.terminal.Terminal; +import org.apache.yetus.audience.InterfaceAudience; + + +/** + * The message mode in the top screen. + */ +@InterfaceAudience.Private +public class MessageModeScreenView extends AbstractScreenView { + + private final int row; + private final MessageModeScreenPresenter messageModeScreenPresenter; + + public MessageModeScreenView(Screen screen, Terminal terminal, int row, String message, + ScreenView nextScreenView) { + super(screen, terminal); + this.row = row; + this.messageModeScreenPresenter = + new MessageModeScreenPresenter(this, message, nextScreenView); + } + + @Override + public void init() { + messageModeScreenPresenter.init(); + setTimer(2000); + } + + @Override + public ScreenView handleTimer() { + return messageModeScreenPresenter.returnToNextScreen(); + } + + @Override + public ScreenView handleKeyPress(KeyEvent keyEvent) { + cancelTimer(); + return messageModeScreenPresenter.returnToNextScreen(); + } + + public void showMessage(String message) { + terminal.getTerminalPrinter(row).startHighlight().print(" ") + .print(message).print(" ").stopHighlight().endOfLine(); + } +} diff --git a/hbase-hbtop/src/main/java/org/apache/hadoop/hbase/hbtop/screen/top/Paging.java b/hbase-hbtop/src/main/java/org/apache/hadoop/hbase/hbtop/screen/top/Paging.java new file mode 100644 index 000000000000..b95e6f480e6e --- /dev/null +++ b/hbase-hbtop/src/main/java/org/apache/hadoop/hbase/hbtop/screen/top/Paging.java @@ -0,0 +1,152 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase.hbtop.screen.top; + +import org.apache.yetus.audience.InterfaceAudience; + + +/** + * Utility class for paging for the metrics. + */ +@InterfaceAudience.Private +public class Paging { + private int currentPosition; + private int pageStartPosition; + private int pageEndPosition; + + private int pageSize; + private int recordsSize; + + public void init() { + currentPosition = 0; + pageStartPosition = 0; + pageEndPosition = Math.min(pageSize, recordsSize); + } + + public void updatePageSize(int pageSize) { + this.pageSize = pageSize; + + if (pageSize == 0) { + pageStartPosition = 0; + pageEndPosition = 0; + } else { + pageEndPosition = pageStartPosition + pageSize; + keepConsistent(); + } + } + + public void updateRecordsSize(int recordsSize) { + if (this.recordsSize == 0) { + currentPosition = 0; + pageStartPosition = 0; + pageEndPosition = Math.min(pageSize, recordsSize); + this.recordsSize = recordsSize; + } else if (recordsSize == 0) { + currentPosition = 0; + pageStartPosition = 0; + pageEndPosition = 0; + this.recordsSize = recordsSize; + } else { + this.recordsSize = recordsSize; + if (pageSize > 0) { + pageEndPosition = pageStartPosition + pageSize; + keepConsistent(); + } + } + } + + public void arrowUp() { + if (currentPosition > 0) { + currentPosition -= 1; + if (pageSize > 0) { + keepConsistent(); + } + } + } + + public void arrowDown() { + if (currentPosition < recordsSize - 1) { + currentPosition += 1; + if (pageSize > 0) { + keepConsistent(); + } + } + } + + public void pageUp() { + if (pageSize > 0 && currentPosition > 0) { + currentPosition -= pageSize; + if (currentPosition < 0) { + currentPosition = 0; + } + keepConsistent(); + } + } + + public void pageDown() { + if (pageSize > 0 && currentPosition < recordsSize - 1) { + + currentPosition = currentPosition + pageSize; + if (currentPosition >= recordsSize) { + currentPosition = recordsSize - 1; + } + + pageStartPosition = currentPosition; + pageEndPosition = pageStartPosition + pageSize; + keepConsistent(); + } + } + + private void keepConsistent() { + if (currentPosition < pageStartPosition) { + pageStartPosition = currentPosition; + pageEndPosition = pageStartPosition + pageSize; + } else if (currentPosition > recordsSize - 1) { + currentPosition = recordsSize - 1; + pageEndPosition = recordsSize; + pageStartPosition = pageEndPosition - pageSize; + } else if (currentPosition > pageEndPosition - 1) { + pageEndPosition = currentPosition + 1; + pageStartPosition = pageEndPosition - pageSize; + } + + if (pageStartPosition < 0) { + pageStartPosition = 0; + } + + if (pageEndPosition > recordsSize) { + pageEndPosition = recordsSize; + pageStartPosition = pageEndPosition - pageSize; + if (pageStartPosition < 0) { + pageStartPosition = 0; + } + } + } + + public int getCurrentPosition() { + return currentPosition; + } + + public int getPageStartPosition() { + return pageStartPosition; + } + + public int getPageEndPosition() { + return pageEndPosition; + } +} diff --git a/hbase-hbtop/src/main/java/org/apache/hadoop/hbase/hbtop/screen/top/Summary.java b/hbase-hbtop/src/main/java/org/apache/hadoop/hbase/hbtop/screen/top/Summary.java new file mode 100644 index 000000000000..03598f66fb48 --- /dev/null +++ b/hbase-hbtop/src/main/java/org/apache/hadoop/hbase/hbtop/screen/top/Summary.java @@ -0,0 +1,94 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase.hbtop.screen.top; + +import java.util.Objects; +import org.apache.yetus.audience.InterfaceAudience; + + +/** + * Represents the summary of the metrics. + */ +@InterfaceAudience.Private +public class Summary { + private final String currentTime; + private final String version; + private final String clusterId; + private final int servers; + private final int liveServers; + private final int deadServers; + private final int regionCount; + private final int ritCount; + private final double averageLoad; + private final long aggregateRequestPerSecond; + + public Summary(String currentTime, String version, String clusterId, int servers, + int liveServers, int deadServers, int regionCount, int ritCount, double averageLoad, + long aggregateRequestPerSecond) { + this.currentTime = Objects.requireNonNull(currentTime); + this.version = Objects.requireNonNull(version); + this.clusterId = Objects.requireNonNull(clusterId); + this.servers = servers; + this.liveServers = liveServers; + this.deadServers = deadServers; + this.regionCount = regionCount; + this.ritCount = ritCount; + this.averageLoad = averageLoad; + this.aggregateRequestPerSecond = aggregateRequestPerSecond; + } + + public String getCurrentTime() { + return currentTime; + } + + public String getVersion() { + return version; + } + + public String getClusterId() { + return clusterId; + } + + public int getServers() { + return servers; + } + + public int getLiveServers() { + return liveServers; + } + + public int getDeadServers() { + return deadServers; + } + + public int getRegionCount() { + return regionCount; + } + + public int getRitCount() { + return ritCount; + } + + public double getAverageLoad() { + return averageLoad; + } + + public long getAggregateRequestPerSecond() { + return aggregateRequestPerSecond; + } +} diff --git a/hbase-hbtop/src/main/java/org/apache/hadoop/hbase/hbtop/screen/top/TopScreenModel.java b/hbase-hbtop/src/main/java/org/apache/hadoop/hbase/hbtop/screen/top/TopScreenModel.java new file mode 100644 index 000000000000..66e5e9f2fa64 --- /dev/null +++ b/hbase-hbtop/src/main/java/org/apache/hadoop/hbase/hbtop/screen/top/TopScreenModel.java @@ -0,0 +1,202 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase.hbtop.screen.top; + +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; +import java.util.Objects; +import java.util.stream.Collectors; +import org.apache.commons.lang3.time.DateFormatUtils; +import org.apache.hadoop.hbase.ClusterMetrics; +import org.apache.hadoop.hbase.client.Admin; +import org.apache.hadoop.hbase.hbtop.Filter; +import org.apache.hadoop.hbase.hbtop.Record; +import org.apache.hadoop.hbase.hbtop.field.Field; +import org.apache.hadoop.hbase.hbtop.field.FieldInfo; +import org.apache.hadoop.hbase.hbtop.field.FieldValue; +import org.apache.hadoop.hbase.hbtop.mode.DrillDownInfo; +import org.apache.hadoop.hbase.hbtop.mode.Mode; +import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + + +/** + * The data and business logic for the top screen. + */ +@InterfaceAudience.Private +public class TopScreenModel { + + private static final Logger LOGGER = LoggerFactory.getLogger(TopScreenModel.class); + + private final Admin admin; + + private Mode currentMode; + private Field currentSortField; + private List fieldInfos; + private List fields; + + private Summary summary; + private List records; + + private final List filters = new ArrayList<>(); + private final List filterHistories = new ArrayList<>(); + + private boolean ascendingSort; + + public TopScreenModel(Admin admin, Mode initialMode) { + this.admin = Objects.requireNonNull(admin); + switchMode(Objects.requireNonNull(initialMode), null, false); + } + + public void switchMode(Mode nextMode, List initialFilters, + boolean keepSortFieldAndSortOrderIfPossible) { + + currentMode = nextMode; + fieldInfos = Collections.unmodifiableList(new ArrayList<>(currentMode.getFieldInfos())); + fields = Collections.unmodifiableList(new ArrayList<>(currentMode.getFieldInfos().stream() + .map(FieldInfo::getField).collect(Collectors.toList()))); + + if (keepSortFieldAndSortOrderIfPossible) { + boolean match = fields.stream().anyMatch(f -> f == currentSortField); + if (!match) { + currentSortField = nextMode.getDefaultSortField(); + ascendingSort = false; + } + } else { + currentSortField = nextMode.getDefaultSortField(); + ascendingSort = false; + } + + clearFilters(); + if (initialFilters != null) { + filters.addAll(initialFilters); + } + } + + public void setSortFieldAndFields(Field sortField, List fields) { + this.currentSortField = sortField; + this.fields = Collections.unmodifiableList(new ArrayList<>(fields)); + } + + public void refreshMetricsData() { + ClusterMetrics clusterMetrics; + try { + clusterMetrics = admin.getClusterMetrics(); + } catch (Exception e) { + LOGGER.error("Unable to get cluster metrics", e); + return; + } + + refreshSummary(clusterMetrics); + refreshRecords(clusterMetrics); + } + + private void refreshSummary(ClusterMetrics clusterMetrics) { + String currentTime = DateFormatUtils.ISO_8601_EXTENDED_TIME_FORMAT + .format(System.currentTimeMillis()); + String version = clusterMetrics.getHBaseVersion(); + String clusterId = clusterMetrics.getClusterId(); + int liveServers = clusterMetrics.getLiveServerMetrics().size(); + int deadServers = clusterMetrics.getDeadServerNames().size(); + int regionCount = clusterMetrics.getRegionCount(); + int ritCount = clusterMetrics.getRegionStatesInTransition().size(); + double averageLoad = clusterMetrics.getAverageLoad(); + long aggregateRequestPerSecond = clusterMetrics.getLiveServerMetrics().entrySet().stream() + .mapToLong(e -> e.getValue().getRequestCountPerSecond()).sum(); + + summary = new Summary(currentTime, version, clusterId, liveServers + deadServers, + liveServers, deadServers, regionCount, ritCount, averageLoad, aggregateRequestPerSecond); + } + + private void refreshRecords(ClusterMetrics clusterMetrics) { + List records = currentMode.getRecords(clusterMetrics); + + // Filter and sort + records = records.stream() + .filter(r -> filters.stream().allMatch(f -> f.execute(r))) + .sorted((recordLeft, recordRight) -> { + FieldValue left = recordLeft.get(currentSortField); + FieldValue right = recordRight.get(currentSortField); + return (ascendingSort ? 1 : -1) * left.compareTo(right); + }).collect(Collectors.toList()); + + this.records = Collections.unmodifiableList(records); + } + + public void switchSortOrder() { + ascendingSort = !ascendingSort; + } + + public boolean addFilter(String filterString, boolean ignoreCase) { + Filter filter = Filter.parse(filterString, fields, ignoreCase); + if (filter == null) { + return false; + } + + filters.add(filter); + filterHistories.add(filterString); + return true; + } + + public void clearFilters() { + filters.clear(); + } + + public boolean drillDown(Record selectedRecord) { + DrillDownInfo drillDownInfo = currentMode.drillDown(selectedRecord); + if (drillDownInfo == null) { + return false; + } + switchMode(drillDownInfo.getNextMode(), drillDownInfo.getInitialFilters(), true); + return true; + } + + public Mode getCurrentMode() { + return currentMode; + } + + public Field getCurrentSortField() { + return currentSortField; + } + + public List getFieldInfos() { + return fieldInfos; + } + + public List getFields() { + return fields; + } + + public Summary getSummary() { + return summary; + } + + public List getRecords() { + return records; + } + + public List getFilters() { + return Collections.unmodifiableList(filters); + } + + public List getFilterHistories() { + return Collections.unmodifiableList(filterHistories); + } +} diff --git a/hbase-hbtop/src/main/java/org/apache/hadoop/hbase/hbtop/screen/top/TopScreenPresenter.java b/hbase-hbtop/src/main/java/org/apache/hadoop/hbase/hbtop/screen/top/TopScreenPresenter.java new file mode 100644 index 000000000000..6e7cba0576db --- /dev/null +++ b/hbase-hbtop/src/main/java/org/apache/hadoop/hbase/hbtop/screen/top/TopScreenPresenter.java @@ -0,0 +1,331 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase.hbtop.screen.top; + +import java.util.ArrayList; +import java.util.EnumMap; +import java.util.List; +import java.util.Objects; +import java.util.concurrent.atomic.AtomicBoolean; +import java.util.concurrent.atomic.AtomicLong; +import java.util.stream.Collectors; +import org.apache.hadoop.hbase.hbtop.Record; +import org.apache.hadoop.hbase.hbtop.field.Field; +import org.apache.hadoop.hbase.hbtop.field.FieldInfo; +import org.apache.hadoop.hbase.hbtop.mode.Mode; +import org.apache.hadoop.hbase.hbtop.screen.Screen; +import org.apache.hadoop.hbase.hbtop.screen.ScreenView; +import org.apache.hadoop.hbase.hbtop.screen.field.FieldScreenView; +import org.apache.hadoop.hbase.hbtop.screen.help.HelpScreenView; +import org.apache.hadoop.hbase.hbtop.screen.mode.ModeScreenView; +import org.apache.hadoop.hbase.hbtop.terminal.Terminal; +import org.apache.hadoop.hbase.hbtop.terminal.TerminalSize; +import org.apache.yetus.audience.InterfaceAudience; + + +/** + * The presentation logic for the top screen. + */ +@InterfaceAudience.Private +public class TopScreenPresenter { + private final TopScreenView topScreenView; + private final AtomicLong refreshDelay; + private long lastRefreshTimestamp; + + private final AtomicBoolean adjustFieldLength = new AtomicBoolean(true); + private final TopScreenModel topScreenModel; + private int terminalLength; + private int horizontalScroll; + private final Paging paging = new Paging(); + + private final EnumMap fieldDisplayMap = new EnumMap<>(Field.class); + private final EnumMap fieldLengthMap = new EnumMap<>(Field.class); + + public TopScreenPresenter(TopScreenView topScreenView, long initialRefreshDelay, + TopScreenModel topScreenModel) { + this.topScreenView = Objects.requireNonNull(topScreenView); + this.refreshDelay = new AtomicLong(initialRefreshDelay); + this.topScreenModel = Objects.requireNonNull(topScreenModel); + + initFieldDisplayMapAndFieldLengthMap(); + } + + public void init() { + terminalLength = topScreenView.getTerminalSize().getColumns(); + paging.updatePageSize(topScreenView.getPageSize()); + topScreenView.hideCursor(); + } + + public long refresh(boolean force) { + if (!force) { + long delay = System.currentTimeMillis() - lastRefreshTimestamp; + if (delay < refreshDelay.get()) { + return refreshDelay.get() - delay; + } + } + + TerminalSize newTerminalSize = topScreenView.doResizeIfNecessary(); + if (newTerminalSize != null) { + terminalLength = newTerminalSize.getColumns(); + paging.updatePageSize(topScreenView.getPageSize()); + topScreenView.clearTerminal(); + } + + topScreenModel.refreshMetricsData(); + paging.updateRecordsSize(topScreenModel.getRecords().size()); + + adjustFieldLengthIfNeeded(); + + topScreenView.showTopScreen(topScreenModel.getSummary(), getDisplayedHeaders(), + getDisplayedRecords(), getSelectedRecord()); + + topScreenView.refreshTerminal(); + + lastRefreshTimestamp = System.currentTimeMillis(); + return refreshDelay.get(); + } + + public void adjustFieldLength() { + adjustFieldLength.set(true); + refresh(true); + } + + private void adjustFieldLengthIfNeeded() { + if (adjustFieldLength.get()) { + adjustFieldLength.set(false); + + for (Field f : topScreenModel.getFields()) { + if (f.isAutoAdjust()) { + int maxLength = topScreenModel.getRecords().stream() + .map(r -> r.get(f).asString().length()) + .max(Integer::compareTo).orElse(0); + fieldLengthMap.put(f, Math.max(maxLength, f.getHeader().length())); + } + } + } + } + + private List
getDisplayedHeaders() { + List displayFields = + topScreenModel.getFields().stream() + .filter(fieldDisplayMap::get).collect(Collectors.toList()); + + if (displayFields.isEmpty()) { + horizontalScroll = 0; + } else if (horizontalScroll > displayFields.size() - 1) { + horizontalScroll = displayFields.size() - 1; + } + + List
ret = new ArrayList<>(); + + int length = 0; + for (int i = horizontalScroll; i < displayFields.size(); i++) { + Field field = displayFields.get(i); + int fieldLength = fieldLengthMap.get(field); + + length += fieldLength + 1; + if (length > terminalLength) { + break; + } + ret.add(new Header(field, fieldLength)); + } + + return ret; + } + + private List getDisplayedRecords() { + List ret = new ArrayList<>(); + for (int i = paging.getPageStartPosition(); i < paging.getPageEndPosition(); i++) { + ret.add(topScreenModel.getRecords().get(i)); + } + return ret; + } + + private Record getSelectedRecord() { + if (topScreenModel.getRecords().isEmpty()) { + return null; + } + return topScreenModel.getRecords().get(paging.getCurrentPosition()); + } + + public void arrowUp() { + paging.arrowUp(); + refresh(true); + } + + public void arrowDown() { + paging.arrowDown(); + refresh(true); + } + + public void pageUp() { + paging.pageUp(); + refresh(true); + } + + public void pageDown() { + paging.pageDown(); + refresh(true); + } + + public void arrowLeft() { + if (horizontalScroll > 0) { + horizontalScroll -= 1; + } + refresh(true); + } + + public void arrowRight() { + if (horizontalScroll < getHeaderSize() - 1) { + horizontalScroll += 1; + } + refresh(true); + } + + public void home() { + if (horizontalScroll > 0) { + horizontalScroll = 0; + } + refresh(true); + } + + public void end() { + int headerSize = getHeaderSize(); + horizontalScroll = headerSize == 0 ? 0 : headerSize - 1; + refresh(true); + } + + private int getHeaderSize() { + return (int) topScreenModel.getFields().stream() + .filter(fieldDisplayMap::get).count(); + } + + public void switchSortOrder() { + topScreenModel.switchSortOrder(); + refresh(true); + } + + public ScreenView transitionToHelpScreen(Screen screen, Terminal terminal) { + return new HelpScreenView(screen, terminal, refreshDelay.get(), topScreenView); + } + + public ScreenView transitionToModeScreen(Screen screen, Terminal terminal) { + return new ModeScreenView(screen, terminal, topScreenModel.getCurrentMode(), this::switchMode, + topScreenView); + } + + public ScreenView transitionToFieldScreen(Screen screen, Terminal terminal) { + return new FieldScreenView(screen, terminal, + topScreenModel.getCurrentSortField(), topScreenModel.getFields(), + fieldDisplayMap, + (sortKey, fields, fieldDisplayMap) -> { + topScreenModel.setSortFieldAndFields(sortKey, fields); + this.fieldDisplayMap.clear(); + this.fieldDisplayMap.putAll(fieldDisplayMap); + } + , topScreenView); + } + + private void switchMode(Mode nextMode) { + topScreenModel.switchMode(nextMode, null, false); + reset(); + } + + public void drillDown() { + Record selectedRecord = getSelectedRecord(); + if (selectedRecord == null) { + return; + } + if (topScreenModel.drillDown(selectedRecord)) { + reset(); + refresh(true); + } + } + + private void reset() { + initFieldDisplayMapAndFieldLengthMap(); + adjustFieldLength.set(true); + paging.init(); + horizontalScroll = 0; + topScreenView.clearTerminal(); + } + + private void initFieldDisplayMapAndFieldLengthMap() { + fieldDisplayMap.clear(); + fieldLengthMap.clear(); + for (FieldInfo fieldInfo : topScreenModel.getFieldInfos()) { + fieldDisplayMap.put(fieldInfo.getField(), fieldInfo.isDisplayByDefault()); + fieldLengthMap.put(fieldInfo.getField(), fieldInfo.getDefaultLength()); + } + } + + public ScreenView goToMessageMode(Screen screen, Terminal terminal, int row, String message) { + return new MessageModeScreenView(screen, terminal, row, message, topScreenView); + } + + public ScreenView goToInputModeForRefreshDelay(Screen screen, Terminal terminal, int row) { + return new InputModeScreenView(screen, terminal, row, + "Change refresh delay from " + (double) refreshDelay.get() / 1000 + " to", null, + (inputString) -> { + if (inputString.isEmpty()) { + return topScreenView; + } + + double delay; + try { + delay = Double.valueOf(inputString); + } catch (NumberFormatException e) { + return goToMessageMode(screen, terminal, row, "Unacceptable floating point"); + } + + refreshDelay.set((long) (delay * 1000)); + return topScreenView; + }); + } + + public ScreenView goToInputModeForFilter(Screen screen, Terminal terminal, int row, + boolean ignoreCase) { + return new InputModeScreenView(screen, terminal, row, + "add filter #" + (topScreenModel.getFilters().size() + 1) + + " (" + (ignoreCase ? "ignoring case" : "case sensitive") + ") as: [!]FLD?VAL", + topScreenModel.getFilterHistories(), + (inputString) -> { + if (inputString.isEmpty()) { + return topScreenView; + } + + if (!topScreenModel.addFilter(inputString, ignoreCase)) { + return goToMessageMode(screen, terminal, row, "Unacceptable filter expression"); + } + + paging.init(); + return topScreenView; + }); + } + + public void clearFilters() { + topScreenModel.clearFilters(); + paging.init(); + refresh(true); + } + + public ScreenView goToFilterDisplayMode(Screen screen, Terminal terminal, int row) { + return new FilterDisplayModeScreenView(screen, terminal, row, topScreenModel.getFilters(), + topScreenView); + } +} diff --git a/hbase-hbtop/src/main/java/org/apache/hadoop/hbase/hbtop/screen/top/TopScreenView.java b/hbase-hbtop/src/main/java/org/apache/hadoop/hbase/hbtop/screen/top/TopScreenView.java new file mode 100644 index 000000000000..91a741ef1acc --- /dev/null +++ b/hbase-hbtop/src/main/java/org/apache/hadoop/hbase/hbtop/screen/top/TopScreenView.java @@ -0,0 +1,298 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase.hbtop.screen.top; + +import edu.umd.cs.findbugs.annotations.Nullable; +import java.util.ArrayList; +import java.util.List; +import java.util.stream.Collectors; +import org.apache.hadoop.hbase.client.Admin; +import org.apache.hadoop.hbase.hbtop.Record; +import org.apache.hadoop.hbase.hbtop.mode.Mode; +import org.apache.hadoop.hbase.hbtop.screen.AbstractScreenView; +import org.apache.hadoop.hbase.hbtop.screen.Screen; +import org.apache.hadoop.hbase.hbtop.screen.ScreenView; +import org.apache.hadoop.hbase.hbtop.terminal.KeyEvent; +import org.apache.hadoop.hbase.hbtop.terminal.Terminal; +import org.apache.hadoop.hbase.hbtop.terminal.TerminalPrinter; +import org.apache.hadoop.hbase.hbtop.terminal.TerminalSize; +import org.apache.yetus.audience.InterfaceAudience; + + +/** + * The screen that provides a dynamic real-time view for the HBase metrics. + * + * This shows the metric {@link Summary} and the metric {@link Record}s. The summary and the + * metrics are updated periodically (3 seconds by default). + */ +@InterfaceAudience.Private +public class TopScreenView extends AbstractScreenView { + + private static final int SUMMARY_START_ROW = 0; + private static final int SUMMARY_ROW_NUM = 7; + private static final int MESSAGE_ROW = 7; + private static final int RECORD_HEADER_ROW = 8; + private static final int RECORD_START_ROW = 9; + + private final TopScreenPresenter topScreenPresenter; + private int pageSize; + + public TopScreenView(Screen screen, Terminal terminal, long initialRefreshDelay, Admin admin, + Mode initialMode) { + super(screen, terminal); + this.topScreenPresenter = new TopScreenPresenter(this, initialRefreshDelay, + new TopScreenModel(admin, initialMode)); + } + + @Override + public void init() { + topScreenPresenter.init(); + long delay = topScreenPresenter.refresh(true); + setTimer(delay); + } + + @Override + public ScreenView handleTimer() { + long delay = topScreenPresenter.refresh(false); + setTimer(delay); + return this; + } + + @Nullable + @Override + public ScreenView handleKeyPress(KeyEvent keyEvent) { + switch (keyEvent.getType()) { + case Enter: + topScreenPresenter.refresh(true); + return this; + + case ArrowUp: + topScreenPresenter.arrowUp(); + return this; + + case ArrowDown: + topScreenPresenter.arrowDown(); + return this; + + case ArrowLeft: + topScreenPresenter.arrowLeft(); + return this; + + case ArrowRight: + topScreenPresenter.arrowRight(); + return this; + + case PageUp: + topScreenPresenter.pageUp(); + return this; + + case PageDown: + topScreenPresenter.pageDown(); + return this; + + case Home: + topScreenPresenter.home(); + return this; + + case End: + topScreenPresenter.end(); + return this; + + case Escape: + return null; + } + + if (keyEvent.getType() != KeyEvent.Type.Character) { + return unknownCommandMessage(); + } + + assert keyEvent.getCharacter() != null; + switch (keyEvent.getCharacter()) { + case 'R': + topScreenPresenter.switchSortOrder(); + break; + + case 'f': + cancelTimer(); + return topScreenPresenter.transitionToFieldScreen(screen, terminal); + + case 'm': + cancelTimer(); + return topScreenPresenter.transitionToModeScreen(screen, terminal); + + case 'h': + cancelTimer(); + return topScreenPresenter.transitionToHelpScreen(screen, terminal); + + case 'd': + cancelTimer(); + return topScreenPresenter.goToInputModeForRefreshDelay(screen, terminal, MESSAGE_ROW); + + case 'o': + cancelTimer(); + return topScreenPresenter.goToInputModeForFilter(screen, terminal, MESSAGE_ROW, true); + + case 'O': + cancelTimer(); + return topScreenPresenter.goToInputModeForFilter(screen, terminal, MESSAGE_ROW, false); + + case 'p': + cancelTimer(); + return topScreenPresenter.goToFilterDisplayMode(screen, terminal, MESSAGE_ROW); + + case '=': + topScreenPresenter.clearFilters(); + break; + + case 'X': + topScreenPresenter.adjustFieldLength(); + break; + + case 'i': + topScreenPresenter.drillDown(); + break; + + case 'q': + return null; + + default: + return unknownCommandMessage(); + } + return this; + } + + public TerminalSize getTerminalSize() { + TerminalSize terminalSize = terminal.getSize(); + updatePageSize(terminalSize); + return terminalSize; + } + + public TerminalSize doResizeIfNecessary() { + TerminalSize terminalSize = terminal.doResizeIfNecessary(); + if (terminalSize == null) { + return null; + } + updatePageSize(terminalSize); + return terminalSize; + } + + private void updatePageSize(TerminalSize terminalSize) { + pageSize = terminalSize.getRows() - SUMMARY_ROW_NUM - 2; + if (pageSize < 0) { + pageSize = 0; + } + } + + public int getPageSize() { + return pageSize; + } + + public void showTopScreen(Summary summary, List
headers, List records, + Record selectedRecord) { + showSummary(summary); + clearMessage(); + showHeaders(headers); + showRecords(headers, records, selectedRecord); + } + + private void showSummary(Summary summary) { + TerminalPrinter printer = terminal.getTerminalPrinter(SUMMARY_START_ROW); + printer.print(String.format("HBase top - %s", summary.getCurrentTime())).endOfLine(); + printer.print(String.format("Version: %s", summary.getVersion())).endOfLine(); + printer.print(String.format("Cluster ID: %s", summary.getClusterId())).endOfLine(); + printer.print("RegionServer(s): ") + .startBold().print(Integer.toString(summary.getServers())).stopBold() + .print(" total, ") + .startBold().print(Integer.toString(summary.getLiveServers())).stopBold() + .print(" live, ") + .startBold().print(Integer.toString(summary.getDeadServers())).stopBold() + .print(" dead").endOfLine(); + printer.print("RegionCount: ") + .startBold().print(Integer.toString(summary.getRegionCount())).stopBold() + .print(" total, ") + .startBold().print(Integer.toString(summary.getRitCount())).stopBold() + .print(" rit").endOfLine(); + printer.print("Average Cluster Load: ") + .startBold().print(String.format("%.2f", summary.getAverageLoad())).stopBold().endOfLine(); + printer.print("Aggregate Request/s: ") + .startBold().print(Long.toString(summary.getAggregateRequestPerSecond())).stopBold() + .endOfLine(); + } + + private void showRecords(List
headers, List records, Record selectedRecord) { + TerminalPrinter printer = terminal.getTerminalPrinter(RECORD_START_ROW); + List buf = new ArrayList<>(headers.size()); + for (int i = 0; i < pageSize; i++) { + if(i < records.size()) { + Record record = records.get(i); + buf.clear(); + for (Header header : headers) { + String value = ""; + if (record.containsKey(header.getField())) { + value = record.get(header.getField()).asString(); + } + + buf.add(limitLineLength(String.format(header.format(), value), header.getLength())); + } + + String recordString = String.join(" ", buf); + if (!recordString.isEmpty()) { + recordString += " "; + } + + if (record == selectedRecord) { + printer.startHighlight().print(recordString).stopHighlight().endOfLine(); + } else { + printer.print(recordString).endOfLine(); + } + } else { + printer.endOfLine(); + } + } + } + + private void showHeaders(List
headers) { + String header = headers.stream() + .map(h -> String.format(h.format(), h.getField().getHeader())) + .collect(Collectors.joining(" ")); + + if (!header.isEmpty()) { + header += " "; + } + + terminal.getTerminalPrinter(RECORD_HEADER_ROW).startHighlight() + .print(header).stopHighlight().endOfLine(); + } + + private String limitLineLength(String line, int length) { + if (line.length() > length) { + return line.substring(0, length - 1) + "+"; + } + return line; + } + + private void clearMessage() { + terminal.getTerminalPrinter(MESSAGE_ROW).print("").endOfLine(); + } + + private ScreenView unknownCommandMessage() { + cancelTimer(); + return topScreenPresenter.goToMessageMode(screen, terminal, MESSAGE_ROW, + "Unknown command - try 'h' for help"); + } +} diff --git a/hbase-hbtop/src/main/java/org/apache/hadoop/hbase/hbtop/terminal/CursorPosition.java b/hbase-hbtop/src/main/java/org/apache/hadoop/hbase/hbtop/terminal/CursorPosition.java new file mode 100644 index 000000000000..c88a237f291f --- /dev/null +++ b/hbase-hbtop/src/main/java/org/apache/hadoop/hbase/hbtop/terminal/CursorPosition.java @@ -0,0 +1,43 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase.hbtop.terminal; + +import org.apache.yetus.audience.InterfaceAudience; + + +/** + * A 2-d position in 'terminal space'. + */ +@InterfaceAudience.Private +public class CursorPosition { + private final int column; + private final int row; + + public CursorPosition(int column, int row) { + this.column = column; + this.row = row; + } + + public int getColumn() { + return column; + } + + public int getRow() { + return row; + } +} diff --git a/hbase-hbtop/src/main/java/org/apache/hadoop/hbase/hbtop/terminal/KeyEvent.java b/hbase-hbtop/src/main/java/org/apache/hadoop/hbase/hbtop/terminal/KeyEvent.java new file mode 100644 index 000000000000..b616cb8d5f65 --- /dev/null +++ b/hbase-hbtop/src/main/java/org/apache/hadoop/hbase/hbtop/terminal/KeyEvent.java @@ -0,0 +1,110 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase.hbtop.terminal; + +import edu.umd.cs.findbugs.annotations.Nullable; +import java.util.Objects; +import org.apache.yetus.audience.InterfaceAudience; + + +/** + * Represents the user pressing a key on the keyboard. + */ +@InterfaceAudience.Private +public class KeyEvent { + public enum Type { + Character, + Escape, + Backspace, + ArrowLeft, + ArrowRight, + ArrowUp, + ArrowDown, + Insert, + Delete, + Home, + End, + PageUp, + PageDown, + Tab, + ReverseTab, + Enter, + F1, + F2, + F3, + F4, + F5, + F6, + F7, + F8, + F9, + F10, + F11, + F12, + F13, + F14, + F15, + F16, + F17, + F18, + F19, + Unknown + } + + private final Type type; + private final Character character; + private final boolean ctrlDown; + private final boolean altDown; + private final boolean shiftDown; + private final long eventTime; + + public KeyEvent(Type type, @Nullable Character character, boolean ctrlDown, boolean altDown, + boolean shiftDown, long eventTime) { + this.type = Objects.requireNonNull(type); + this.character = character; + this.ctrlDown = ctrlDown; + this.altDown = altDown; + this.shiftDown = shiftDown; + this.eventTime = eventTime; + } + + public Type getType() { + return type; + } + + @Nullable + public Character getCharacter() { + return character; + } + + public boolean isCtrlDown() { + return ctrlDown; + } + + public boolean isAltDown() { + return altDown; + } + + public boolean isShiftDown() { + return shiftDown; + } + + public long getEventTime() { + return eventTime; + } +} diff --git a/hbase-hbtop/src/main/java/org/apache/hadoop/hbase/hbtop/terminal/SGR.java b/hbase-hbtop/src/main/java/org/apache/hadoop/hbase/hbtop/terminal/SGR.java new file mode 100644 index 000000000000..f944f6cbcaad --- /dev/null +++ b/hbase-hbtop/src/main/java/org/apache/hadoop/hbase/hbtop/terminal/SGR.java @@ -0,0 +1,29 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase.hbtop.terminal; + +import org.apache.yetus.audience.InterfaceAudience; + + +/** + * Select Graphic Rendition. + */ +@InterfaceAudience.Private +public enum SGR { + BOLD, REVERSE, UNDERLINE, BLINK, BORDERED, FRAKTUR, CROSSED_OUT, CIRCLED, ITALIC +} diff --git a/hbase-hbtop/src/main/java/org/apache/hadoop/hbase/hbtop/terminal/Terminal.java b/hbase-hbtop/src/main/java/org/apache/hadoop/hbase/hbtop/terminal/Terminal.java new file mode 100644 index 000000000000..dd8134acf8b7 --- /dev/null +++ b/hbase-hbtop/src/main/java/org/apache/hadoop/hbase/hbtop/terminal/Terminal.java @@ -0,0 +1,47 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase.hbtop.terminal; + +import edu.umd.cs.findbugs.annotations.Nullable; +import java.io.Closeable; +import org.apache.yetus.audience.InterfaceAudience; + + +/** + * The terminal interface that is an abstraction of terminal screen. + */ +@InterfaceAudience.Private +public interface Terminal extends Closeable { + void clear(); + void refresh(); + TerminalSize getSize(); + @Nullable TerminalSize doResizeIfNecessary(); + KeyEvent readKeyEvent(); + @Nullable KeyEvent pollKeyEvent(); + CursorPosition getCursorPosition(); + void setCursorPosition(@Nullable CursorPosition cursorPosition); + Terminal print(int column, int row, String value); + Terminal setForegroundColor(TextColor textColor); + Terminal setBackgroundColor(TextColor textColor); + Terminal enableModifiers(SGR... modifiers); + Terminal disableModifiers(SGR... modifiers); + + default TerminalPrinter getTerminalPrinter(int startRow) { + return new TerminalPrinter(this, getSize().getColumns(), startRow); + } +} diff --git a/hbase-hbtop/src/main/java/org/apache/hadoop/hbase/hbtop/terminal/TerminalPrinter.java b/hbase-hbtop/src/main/java/org/apache/hadoop/hbase/hbtop/terminal/TerminalPrinter.java new file mode 100644 index 000000000000..e9ccdef4cbbe --- /dev/null +++ b/hbase-hbtop/src/main/java/org/apache/hadoop/hbase/hbtop/terminal/TerminalPrinter.java @@ -0,0 +1,140 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase.hbtop.terminal; + +import java.util.Objects; +import org.apache.commons.lang3.StringUtils; +import org.apache.yetus.audience.InterfaceAudience; + + +/** + * Utility class for printing to terminal. + */ +@InterfaceAudience.Private +public class TerminalPrinter { + private final Terminal terminal; + private final int terminalColumns; + private int row; + private int column; + + TerminalPrinter(Terminal terminal, int terminalColumns, int startRow) { + this.terminal = Objects.requireNonNull(terminal); + this.terminalColumns = terminalColumns; + this.row = startRow; + } + + public TerminalPrinter print(Object value) { + String string = value.toString(); + terminal.print(column, row, string); + column += string.length(); + return this; + } + + public TerminalPrinter print(char value) { + String string = Character.toString(value); + terminal.print(column, row, string); + column += string.length(); + return this; + } + + public TerminalPrinter print(short value) { + String string = Short.toString(value); + terminal.print(column, row, string); + column += string.length(); + return this; + } + + public TerminalPrinter print(int value) { + String string = Integer.toString(value); + terminal.print(column, row, string); + column += string.length(); + return this; + } + + public TerminalPrinter print(long value) { + String string = Long.toString(value); + terminal.print(column, row, string); + column += string.length(); + return this; + } + + public TerminalPrinter print(float value) { + String string = Float.toString(value); + terminal.print(column, row, string); + column += string.length(); + return this; + } + + public TerminalPrinter print(double value) { + String string = Double.toString(value); + terminal.print(column, row, string); + column += string.length(); + return this; + } + + public TerminalPrinter printFormat(String format, Object... args) { + String string = String.format(format, args); + terminal.print(column, row, string); + column += string.length(); + return this; + } + + public TerminalPrinter setForegroundColor(TextColor textColor) { + terminal.setForegroundColor(textColor); + return this; + } + + public TerminalPrinter setBackgroundColor(TextColor textColor) { + terminal.setBackgroundColor(textColor); + return this; + } + + public TerminalPrinter startHighlight() { + setForegroundColor(TextColor.BLACK); + return setBackgroundColor(TextColor.WHITE); + } + + public TerminalPrinter stopHighlight() { + setForegroundColor(TextColor.DEFAULT); + return setBackgroundColor(TextColor.DEFAULT); + } + + public TerminalPrinter enableModifiers(SGR... modifiers) { + terminal.enableModifiers(modifiers); + return this; + } + + public TerminalPrinter disableModifiers(SGR... modifiers) { + terminal.disableModifiers(modifiers); + return this; + } + + public TerminalPrinter startBold() { + return enableModifiers(SGR.BOLD); + } + + public TerminalPrinter stopBold() { + return disableModifiers(SGR.BOLD); + } + + public void endOfLine() { + terminal.print(column, row, StringUtils.repeat(" ", terminalColumns - column)); + row += 1; + column = 0; + } +} diff --git a/hbase-hbtop/src/main/java/org/apache/hadoop/hbase/hbtop/terminal/TerminalSize.java b/hbase-hbtop/src/main/java/org/apache/hadoop/hbase/hbtop/terminal/TerminalSize.java new file mode 100644 index 000000000000..51467454dafb --- /dev/null +++ b/hbase-hbtop/src/main/java/org/apache/hadoop/hbase/hbtop/terminal/TerminalSize.java @@ -0,0 +1,43 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase.hbtop.terminal; + +import org.apache.yetus.audience.InterfaceAudience; + + +/** + * Terminal dimensions in 2-d space, measured in number of rows and columns. + */ +@InterfaceAudience.Private +public class TerminalSize { + private final int columns; + private final int rows; + + public TerminalSize(int columns, int rows) { + this.columns = columns; + this.rows = rows; + } + + public int getColumns() { + return columns; + } + + public int getRows() { + return rows; + } +} diff --git a/hbase-hbtop/src/main/java/org/apache/hadoop/hbase/hbtop/terminal/TextColor.java b/hbase-hbtop/src/main/java/org/apache/hadoop/hbase/hbtop/terminal/TextColor.java new file mode 100644 index 000000000000..d3796d1154d8 --- /dev/null +++ b/hbase-hbtop/src/main/java/org/apache/hadoop/hbase/hbtop/terminal/TextColor.java @@ -0,0 +1,29 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase.hbtop.terminal; + +import org.apache.yetus.audience.InterfaceAudience; + + +/** + * Terminal color definitions. + */ +@InterfaceAudience.Private +public enum TextColor { + BLACK, RED, GREEN, YELLOW, BLUE, MAGENTA, CYAN, WHITE, DEFAULT +} diff --git a/hbase-hbtop/src/main/java/org/apache/hadoop/hbase/hbtop/terminal/impl/LanternaTerminal.java b/hbase-hbtop/src/main/java/org/apache/hadoop/hbase/hbtop/terminal/impl/LanternaTerminal.java new file mode 100644 index 000000000000..2b0b57541369 --- /dev/null +++ b/hbase-hbtop/src/main/java/org/apache/hadoop/hbase/hbtop/terminal/impl/LanternaTerminal.java @@ -0,0 +1,236 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase.hbtop.terminal.impl; + +import com.googlecode.lanterna.TerminalPosition; +import com.googlecode.lanterna.graphics.TextGraphics; +import com.googlecode.lanterna.input.KeyStroke; +import com.googlecode.lanterna.input.KeyType; +import com.googlecode.lanterna.screen.TerminalScreen; +import com.googlecode.lanterna.terminal.DefaultTerminalFactory; +import edu.umd.cs.findbugs.annotations.Nullable; +import java.io.IOException; +import java.io.UncheckedIOException; +import java.util.stream.Collectors; +import java.util.stream.Stream; +import org.apache.hadoop.hbase.hbtop.terminal.CursorPosition; +import org.apache.hadoop.hbase.hbtop.terminal.KeyEvent; +import org.apache.hadoop.hbase.hbtop.terminal.SGR; +import org.apache.hadoop.hbase.hbtop.terminal.Terminal; +import org.apache.hadoop.hbase.hbtop.terminal.TerminalSize; +import org.apache.hadoop.hbase.hbtop.terminal.TextColor; +import org.apache.yetus.audience.InterfaceAudience; + + +/** + * Implementation of {@link Terminal} using Lanterna library. + */ +@InterfaceAudience.Private +public class LanternaTerminal implements Terminal { + + private final com.googlecode.lanterna.screen.Screen screen; + private final TextGraphics textGraphics; + + public LanternaTerminal() { + try { + DefaultTerminalFactory defaultTerminalFactory = new DefaultTerminalFactory(); + com.googlecode.lanterna.terminal.Terminal terminal = defaultTerminalFactory.createTerminal(); + screen = new TerminalScreen(terminal); + screen.startScreen(); + textGraphics = screen.newTextGraphics(); + } catch (IOException e) { + throw new UncheckedIOException(e); + } + } + + @Override + public void close() { + try { + screen.stopScreen(); + } catch (IOException e) { + throw new UncheckedIOException(e); + } + } + + @Override + public void clear() { + screen.clear(); + } + + @Override + public void refresh() { + try { + screen.refresh(); + } catch (IOException e) { + throw new UncheckedIOException(e); + } + } + + @Override + public TerminalSize getSize() { + com.googlecode.lanterna.TerminalSize terminalSize = screen.getTerminalSize(); + return new TerminalSize(terminalSize.getColumns(), terminalSize.getRows()); + } + + @Nullable + @Override + public TerminalSize doResizeIfNecessary() { + com.googlecode.lanterna.TerminalSize terminalSize = screen.doResizeIfNecessary(); + return terminalSize == null ? null : + new TerminalSize(terminalSize.getColumns(), terminalSize.getRows()); + } + + @Override + public KeyEvent readKeyEvent() { + KeyStroke keyStroke; + try { + keyStroke = screen.readInput(); + } catch (IOException e) { + throw new UncheckedIOException(e); + } + + return convertToKeyEvent(keyStroke); + } + + @Nullable + @Override + public KeyEvent pollKeyEvent() { + KeyStroke keyStroke; + try { + keyStroke = screen.pollInput(); + } catch (IOException e) { + throw new UncheckedIOException(e); + } + if (keyStroke == null) { + return null; + } + + return convertToKeyEvent(keyStroke); + } + + private KeyEvent convertToKeyEvent(KeyStroke keyStroke) { + KeyEvent.Type type; + switch (keyStroke.getKeyType()) { + case Character: + case Escape: + case Backspace: + case ArrowLeft: + case ArrowRight: + case ArrowUp: + case ArrowDown: + case Insert: + case Delete: + case Home: + case End: + case PageUp: + case PageDown: + case Tab: + case ReverseTab: + case Enter: + case F1: + case F2: + case F3: + case F4: + case F5: + case F6: + case F7: + case F8: + case F9: + case F10: + case F11: + case F12: + case F13: + case F14: + case F15: + case F16: + case F17: + case F18: + case F19: + type = KeyEvent.Type.valueOf(keyStroke.getKeyType().toString()); + break; + + case EOF: + throw new UncheckedIOException(new IOException("EOF")); + + default: + type = KeyEvent.Type.Unknown; + break; + } + + Character character = null; + if (keyStroke.getKeyType() == KeyType.Character) { + character = keyStroke.getCharacter(); + } + + return new KeyEvent(type, character, keyStroke.isCtrlDown(), keyStroke.isAltDown(), + keyStroke.isShiftDown(), keyStroke.getEventTime()); + } + + @Override + public CursorPosition getCursorPosition() { + TerminalPosition terminalPosition = screen.getCursorPosition(); + return new CursorPosition(terminalPosition.getColumn(), terminalPosition.getRow()); + } + + @Override + public void setCursorPosition(@Nullable CursorPosition cursorPosition) { + screen.setCursorPosition(cursorPosition == null ? null : + new TerminalPosition(cursorPosition.getColumn(), cursorPosition.getRow())); + } + + @Override + public Terminal print(int column, int row, String value) { + textGraphics.putString(column, row, value); + return this; + } + + @Override + public Terminal setForegroundColor(TextColor textColor) { + textGraphics.setForegroundColor(convertTextColor(textColor)); + return this; + } + + @Override + public Terminal setBackgroundColor(TextColor textColor) { + textGraphics.setBackgroundColor(convertTextColor(textColor)); + return this; + } + + private com.googlecode.lanterna.TextColor.ANSI convertTextColor(TextColor textColor) { + return com.googlecode.lanterna.TextColor.ANSI.valueOf(textColor.name()); + } + + @Override + public Terminal enableModifiers(SGR... modifiers) { + textGraphics.enableModifiers(convertModifiers(modifiers)); + return null; + } + + @Override + public Terminal disableModifiers(SGR... modifiers) { + textGraphics.disableModifiers(convertModifiers(modifiers)); + return null; + } + + private com.googlecode.lanterna.SGR[] convertModifiers(SGR[] modifiers) { + return Stream.of(modifiers) + .map(m -> com.googlecode.lanterna.SGR.valueOf(m.name())) + .collect(Collectors.toList()) + .toArray(new com.googlecode.lanterna.SGR[modifiers.length]); + } +} diff --git a/hbase-hbtop/src/main/resources/log4j-hbtop.properties b/hbase-hbtop/src/main/resources/log4j-hbtop.properties new file mode 100644 index 000000000000..e40f600b9d36 --- /dev/null +++ b/hbase-hbtop/src/main/resources/log4j-hbtop.properties @@ -0,0 +1,8 @@ +log4j.rootLogger=WARN,console +log4j.threshold=WARN + +# console +log4j.appender.console=org.apache.log4j.ConsoleAppender +log4j.appender.console.target=System.err +log4j.appender.console.layout=org.apache.log4j.PatternLayout +log4j.appender.console.layout.ConversionPattern=%d{ISO8601} %-5p [%t] %c{2}: %m%n diff --git a/hbase-hbtop/src/test/java/org/apache/hadoop/hbase/hbtop/FilterTest.java b/hbase-hbtop/src/test/java/org/apache/hadoop/hbase/hbtop/FilterTest.java new file mode 100644 index 000000000000..46c47bdfa2b6 --- /dev/null +++ b/hbase-hbtop/src/test/java/org/apache/hadoop/hbase/hbtop/FilterTest.java @@ -0,0 +1,206 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase.hbtop; + +import static org.hamcrest.CoreMatchers.hasItems; +import static org.hamcrest.CoreMatchers.is; +import static org.hamcrest.CoreMatchers.notNullValue; +import static org.hamcrest.CoreMatchers.nullValue; +import static org.junit.Assert.assertThat; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collections; +import java.util.List; +import java.util.stream.Collectors; +import org.apache.hadoop.hbase.HBaseClassTestRule; +import org.apache.hadoop.hbase.Size; +import org.apache.hadoop.hbase.hbtop.field.Field; +import org.apache.hadoop.hbase.testclassification.SmallTests; +import org.junit.ClassRule; +import org.junit.Test; +import org.junit.experimental.categories.Category; + + +@Category(SmallTests.class) +public class FilterTest { + + @ClassRule + public static final HBaseClassTestRule CLASS_RULE = + HBaseClassTestRule.forClass(FilterTest.class); + + @Test + public void testParseAndBuilder() { + testParseAndBuilder("REGION=region1", false, + Filter.newBuilder(Field.REGION).equal("region1")); + + testParseAndBuilder("REGION=", false, + Filter.newBuilder(Field.REGION).equal("")); + + testParseAndBuilder("!REGION=region1", false, + Filter.newBuilder(Field.REGION).notEqual("region1")); + + testParseAndBuilder("REGION==region2", true, + Filter.newBuilder(Field.REGION, true).doubleEquals("region2")); + + testParseAndBuilder("!REGION==region2", true, + Filter.newBuilder(Field.REGION, true).notDoubleEquals("region2")); + + testParseAndBuilder("#REQ/S>100", false, + Filter.newBuilder(Field.REQUEST_COUNT_PER_SECOND).greater(100L)); + + testParseAndBuilder("!#REQ/S>100", false, + Filter.newBuilder(Field.REQUEST_COUNT_PER_SECOND).notGreater(100L)); + + testParseAndBuilder("SF>=50MB", true, + Filter.newBuilder(Field.STORE_FILE_SIZE, true).greaterOrEqual("50MB")); + + testParseAndBuilder("!SF>=50MB", true, + Filter.newBuilder(Field.STORE_FILE_SIZE, true).notGreaterOrEqual("50MB")); + + testParseAndBuilder("#REQ/S<20", false, + Filter.newBuilder(Field.REQUEST_COUNT_PER_SECOND).less(20L)); + + testParseAndBuilder("!#REQ/S<20", false, + Filter.newBuilder(Field.REQUEST_COUNT_PER_SECOND).notLess(20L)); + + testParseAndBuilder("%COMP<=50%", true, + Filter.newBuilder(Field.COMPACTION_PROGRESS, true).lessOrEqual("50%")); + + testParseAndBuilder("!%COMP<=50%", true, + Filter.newBuilder(Field.COMPACTION_PROGRESS, true).notLessOrEqual("50%")); + } + + private void testParseAndBuilder(String filterString, boolean ignoreCase, Filter expected) { + Filter actual = Filter.parse(filterString, ignoreCase); + assertThat(expected, is(actual)); + } + + @Test + public void testParseFailure() { + Filter filter = Filter.parse("REGIO=region1", false); + assertThat(filter, is(nullValue())); + + filter = Filter.parse("", false); + assertThat(filter, is(nullValue())); + + filter = Filter.parse("#REQ/S==aaa", false); + assertThat(filter, is(nullValue())); + + filter = Filter.parse("SF>=50", false); + assertThat(filter, is(nullValue())); + } + + @Test + public void testToString() { + testToString("REGION=region1"); + testToString("!REGION=region1"); + testToString("REGION==region2"); + testToString("!REGION==region2"); + testToString("#REQ/S>100"); + testToString("!#REQ/S>100"); + testToString("SF>=50.0MB"); + testToString("!SF>=50.0MB"); + testToString("#REQ/S<20"); + testToString("!#REQ/S<20"); + testToString("%COMP<=50.00%"); + testToString("!%COMP<=50.00%"); + } + + private void testToString(String filterString) { + Filter filter = Filter.parse(filterString, false); + assertThat(filter, is(notNullValue())); + assertThat(filterString, is(filter.toString())); + } + + @Test + public void testFilters() { + List records = createTestRecords(); + + testFilter(records, "REGION=region", false, + "region1", "region2", "region3", "region4", "region5"); + testFilter(records, "!REGION=region", false); + testFilter(records, "REGION=Region", false); + + testFilter(records, "REGION==region", false); + testFilter(records, "REGION==region1", false, "region1"); + testFilter(records, "!REGION==region1", false, "region2", "region3", "region4", "region5"); + + testFilter(records, "#REQ/S==100", false, "region1"); + testFilter(records, "#REQ/S>100", false, "region2", "region5"); + testFilter(records, "SF>=100MB", false, "region1", "region2", "region4", "region5"); + testFilter(records, "!#SF>=10", false, "region1", "region4"); + testFilter(records, "LOCALITY<0.5", false, "region5"); + testFilter(records, "%COMP<=50%", false, "region2", "region3", "region4", "region5"); + + testFilters(records, Arrays.asList("SF>=100MB", "#REQ/S>100"), false, + "region2", "region5"); + testFilters(records, Arrays.asList("%COMP<=50%", "!#SF>=10"), false, "region4"); + testFilters(records, Arrays.asList("!REGION==region1", "LOCALITY<0.5", "#REQ/S>100"), false, + "region5"); + } + + @Test + public void testFiltersIgnoreCase() { + List records = createTestRecords(); + + testFilter(records, "REGION=Region", true, + "region1", "region2", "region3", "region4", "region5"); + testFilter(records, "REGION=REGION", true, + "region1", "region2", "region3", "region4", "region5"); + } + + private List createTestRecords() { + List ret = new ArrayList<>(); + ret.add(createTestRecord("region1", 100L, new Size(100, Size.Unit.MEGABYTE), 2, 1.0f, 80f)); + ret.add(createTestRecord("region2", 120L, new Size(100, Size.Unit.GIGABYTE), 10, 0.5f, 20f)); + ret.add(createTestRecord("region3", 50L, new Size(500, Size.Unit.KILOBYTE), 15, 0.8f, 50f)); + ret.add(createTestRecord("region4", 90L, new Size(10, Size.Unit.TERABYTE), 5, 0.9f, 30f)); + ret.add(createTestRecord("region5", 200L, new Size(1, Size.Unit.PETABYTE), 13, 0.1f, 40f)); + return ret; + } + + private Record createTestRecord(String region, long requestCountPerSecond, + Size storeFileSize, int numStoreFiles, float locality, float compactionProgress) { + Record ret = new Record(); + ret.put(Field.REGION, region); + ret.put(Field.REQUEST_COUNT_PER_SECOND, requestCountPerSecond); + ret.put(Field.STORE_FILE_SIZE, storeFileSize); + ret.put(Field.NUM_STORE_FILES, numStoreFiles); + ret.put(Field.LOCALITY, locality); + ret.put(Field.COMPACTION_PROGRESS, compactionProgress); + return ret; + } + + private void testFilter(List records, String filterString, boolean ignoreCase, + String... expectedRegions) { + testFilters(records, Collections.singletonList(filterString), ignoreCase, expectedRegions); + } + + private void testFilters(List records, List filterStrings, boolean ignoreCase, + String... expectedRegions) { + List actual = + records.stream().filter(r -> filterStrings.stream() + .map(f -> Filter.parse(f, ignoreCase)) + .allMatch(f -> f.execute(r))) + .map(r -> r.get(Field.REGION).asString()) + .collect(Collectors.toList()); + assertThat(actual, hasItems(expectedRegions)); + assertThat(actual.size(), is(expectedRegions.length)); + } +} diff --git a/hbase-hbtop/src/test/java/org/apache/hadoop/hbase/hbtop/RecordTest.java b/hbase-hbtop/src/test/java/org/apache/hadoop/hbase/hbtop/RecordTest.java new file mode 100644 index 000000000000..a7930cc19aeb --- /dev/null +++ b/hbase-hbtop/src/test/java/org/apache/hadoop/hbase/hbtop/RecordTest.java @@ -0,0 +1,62 @@ +package org.apache.hadoop.hbase.hbtop; + +import static org.apache.hadoop.hbase.hbtop.Record.entry; +import static org.hamcrest.CoreMatchers.is; +import static org.junit.Assert.assertThat; +import static org.junit.Assert.fail; + +import org.apache.hadoop.hbase.HBaseClassTestRule; +import org.apache.hadoop.hbase.hbtop.field.Field; +import org.apache.hadoop.hbase.testclassification.SmallTests; +import org.junit.ClassRule; +import org.junit.Test; +import org.junit.experimental.categories.Category; + + +@Category(SmallTests.class) +public class RecordTest { + + @ClassRule + public static final HBaseClassTestRule CLASS_RULE = + HBaseClassTestRule.forClass(RecordTest.class); + + @Test + public void testCombine() { + Record record1 = Record.ofEntries( + entry(Field.TABLE, "tableName"), + entry(Field.REGION_COUNT, 3), + entry(Field.REQUEST_COUNT_PER_SECOND, 100L) + ); + + Record record2 = Record.ofEntries( + entry(Field.TABLE, "tableName"), + entry(Field.REGION_COUNT, 5), + entry(Field.REQUEST_COUNT_PER_SECOND, 500L) + ); + + Record actual = record1.combine(record2); + + assertThat(actual.get(Field.TABLE).asString(), is("tableName")); + assertThat(actual.get(Field.REGION_COUNT).asInt(), is(8)); + assertThat(actual.get(Field.REQUEST_COUNT_PER_SECOND).asLong(), is(600L)); + } + + @Test + public void testToImmutable() { + Record record = Record.ofEntries( + entry(Field.TABLE, "tableName"), + entry(Field.REGION_COUNT, 3), + entry(Field.REQUEST_COUNT_PER_SECOND, 100L) + ); + + record = record.toImmutable(); + + assertThat(record.get(Field.TABLE).asString(), is("tableName")); + + try { + record.put(Field.TABLE, "tableName2"); + fail(); + } catch(UnsupportedOperationException ignored) { + } + } +} diff --git a/hbase-hbtop/src/test/java/org/apache/hadoop/hbase/hbtop/TestUtils.java b/hbase-hbtop/src/test/java/org/apache/hadoop/hbase/hbtop/TestUtils.java new file mode 100644 index 000000000000..43a84474027b --- /dev/null +++ b/hbase-hbtop/src/test/java/org/apache/hadoop/hbase/hbtop/TestUtils.java @@ -0,0 +1,402 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase.hbtop; + +import static org.hamcrest.CoreMatchers.is; +import static org.junit.Assert.assertThat; +import static org.junit.Assert.fail; + +import java.text.ParseException; +import java.util.ArrayList; +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import org.apache.commons.lang3.time.FastDateFormat; +import org.apache.hadoop.hbase.ClusterMetrics; +import org.apache.hadoop.hbase.ClusterMetricsBuilder; +import org.apache.hadoop.hbase.RegionMetrics; +import org.apache.hadoop.hbase.RegionMetricsBuilder; +import org.apache.hadoop.hbase.ServerMetrics; +import org.apache.hadoop.hbase.ServerMetricsBuilder; +import org.apache.hadoop.hbase.ServerName; +import org.apache.hadoop.hbase.Size; +import org.apache.hadoop.hbase.TableName; +import org.apache.hadoop.hbase.client.RegionInfoBuilder; +import org.apache.hadoop.hbase.hbtop.field.Field; +import org.apache.hadoop.hbase.hbtop.screen.top.Summary; +import org.apache.hadoop.hbase.master.RegionState; +import org.apache.hadoop.hbase.util.Bytes; + + +public final class TestUtils { + + private TestUtils() { + } + + public static ClusterMetrics createDummyClusterMetrics() { + Map serverMetricsMap = new HashMap<>(); + + // host1 + List regionMetricsList = new ArrayList<>(); + regionMetricsList.add(createRegionMetrics( + "table1,,1.00000000000000000000000000000000.", + 100, 50, 100, + new Size(100, Size.Unit.MEGABYTE), new Size(200, Size.Unit.MEGABYTE), 1, + new Size(100, Size.Unit.MEGABYTE), 0.1f, 100, 100, "2019-07-22 00:00:00")); + regionMetricsList.add(createRegionMetrics( + "table2,1,2.00000000000000000000000000000001.", + 200, 100, 200, + new Size(200, Size.Unit.MEGABYTE), new Size(400, Size.Unit.MEGABYTE), 2, + new Size(200, Size.Unit.MEGABYTE), 0.2f, 50, 200, "2019-07-22 00:00:01")); + regionMetricsList.add(createRegionMetrics( + "namespace:table3,,3_0001.00000000000000000000000000000002.", + 300, 150, 300, + new Size(300, Size.Unit.MEGABYTE), new Size(600, Size.Unit.MEGABYTE), 3, + new Size(300, Size.Unit.MEGABYTE), 0.3f, 100, 300, "2019-07-22 00:00:02")); + + ServerName host1 = ServerName.valueOf("host1.apache.com", 1000, 1); + serverMetricsMap.put(host1, createServerMetrics(host1, 100, + new Size(100, Size.Unit.MEGABYTE), new Size(200, Size.Unit.MEGABYTE), 100, + regionMetricsList)); + + // host2 + regionMetricsList.clear(); + regionMetricsList.add(createRegionMetrics( + "table1,1,4.00000000000000000000000000000003.", + 100, 50, 100, + new Size(100, Size.Unit.MEGABYTE), new Size(200, Size.Unit.MEGABYTE), 1, + new Size(100, Size.Unit.MEGABYTE), 0.4f, 50, 100, "2019-07-22 00:00:03")); + regionMetricsList.add(createRegionMetrics( + "table2,,5.00000000000000000000000000000004.", + 200, 100, 200, + new Size(200, Size.Unit.MEGABYTE), new Size(400, Size.Unit.MEGABYTE), 2, + new Size(200, Size.Unit.MEGABYTE), 0.5f, 150, 200, "2019-07-22 00:00:04")); + regionMetricsList.add(createRegionMetrics( + "namespace:table3,,6.00000000000000000000000000000005.", + 300, 150, 300, + new Size(300, Size.Unit.MEGABYTE), new Size(600, Size.Unit.MEGABYTE), 3, + new Size(300, Size.Unit.MEGABYTE), 0.6f, 200, 300, "2019-07-22 00:00:05")); + + ServerName host2 = ServerName.valueOf("host2.apache.com", 1001, 2); + serverMetricsMap.put(host2, createServerMetrics(host2, 200, + new Size(16, Size.Unit.GIGABYTE), new Size(32, Size.Unit.GIGABYTE), 200, + regionMetricsList)); + + ServerName host3 = ServerName.valueOf("host3.apache.com", 1002, 3); + return ClusterMetricsBuilder.newBuilder() + .setHBaseVersion("3.0.0-SNAPSHOT") + .setClusterId("01234567-89ab-cdef-0123-456789abcdef") + .setLiveServerMetrics(serverMetricsMap) + .setDeadServerNames(Collections.singletonList(host3)) + .setRegionsInTransition(Collections.singletonList( + new RegionState(RegionInfoBuilder.newBuilder(TableName.valueOf("table4")) + .setStartKey(new byte [0]) + .setEndKey(new byte [0]) + .setOffline(true) + .setReplicaId(0) + .setRegionId(0) + .setSplit(false) + .build(), + RegionState.State.OFFLINE, host3))) + .build(); + } + + private static RegionMetrics createRegionMetrics(String regionName, long readRequestCount, + long filteredReadRequestCount, long writeRequestCount, Size storeFileSize, + Size uncompressedStoreFileSize, int storeFileCount, Size memStoreSize, float locality, + long compactedCellCount, long compactingCellCount, String lastMajorCompactionTime) { + + FastDateFormat df = FastDateFormat.getInstance("yyyy-MM-dd HH:mm:ss"); + try { + return RegionMetricsBuilder.newBuilder(Bytes.toBytes(regionName)) + .setReadRequestCount(readRequestCount) + .setFilteredReadRequestCount(filteredReadRequestCount) + .setWriteRequestCount(writeRequestCount).setStoreFileSize(storeFileSize) + .setUncompressedStoreFileSize(uncompressedStoreFileSize).setStoreFileCount(storeFileCount) + .setMemStoreSize(memStoreSize).setDataLocality(locality) + .setCompactedCellCount(compactedCellCount).setCompactingCellCount(compactingCellCount) + .setLastMajorCompactionTimestamp(df.parse(lastMajorCompactionTime).getTime()).build(); + } catch (ParseException e) { + throw new IllegalArgumentException(e); + } + } + + private static ServerMetrics createServerMetrics(ServerName serverName, long reportTimestamp, + Size usedHeapSize, Size maxHeapSize, long requestCountPerSecond, + List regionMetricsList) { + + return ServerMetricsBuilder.newBuilder(serverName) + .setReportTimestamp(reportTimestamp) + .setUsedHeapSize(usedHeapSize) + .setMaxHeapSize(maxHeapSize) + .setRequestCountPerSecond(requestCountPerSecond) + .setRegionMetrics(regionMetricsList).build(); + } + + public static void assertRecordsInRegionMode(List records) { + assertThat(records.size(), is(6)); + + for (Record record : records) { + switch (record.get(Field.REGION_NAME).asString()) { + case "table1,,1.00000000000000000000000000000000.": + assertRecordInRegionMode(record, "default", "1", "", "table1", + "00000000000000000000000000000000", "host1:1000", "host1.apache.com,1000,1",0L, + 0L, 0L, 0L, new Size(100, Size.Unit.MEGABYTE), new Size(200, Size.Unit.MEGABYTE), 1, + new Size(100, Size.Unit.MEGABYTE), 0.1f, "", 100L, 100L, 100f, + "2019-07-22 00:00:00"); + break; + + case "table1,1,4.00000000000000000000000000000003.": + assertRecordInRegionMode(record, "default", "4", "", "table1", + "00000000000000000000000000000003", "host2:1001", "host2.apache.com,1001,2",0L, + 0L, 0L, 0L, new Size(100, Size.Unit.MEGABYTE), new Size(200, Size.Unit.MEGABYTE), 1, + new Size(100, Size.Unit.MEGABYTE), 0.4f, "1", 100L, 50L, 50f, + "2019-07-22 00:00:03"); + break; + + case "table2,,5.00000000000000000000000000000004.": + assertRecordInRegionMode(record, "default", "5", "", "table2", + "00000000000000000000000000000004", "host2:1001", "host2.apache.com,1001,2",0L, + 0L, 0L, 0L, new Size(200, Size.Unit.MEGABYTE), new Size(400, Size.Unit.MEGABYTE), 2, + new Size(200, Size.Unit.MEGABYTE), 0.5f, "", 200L, 150L, 75f, + "2019-07-22 00:00:04"); + break; + + case "table2,1,2.00000000000000000000000000000001.": + assertRecordInRegionMode(record, "default", "2", "", "table2", + "00000000000000000000000000000001", "host1:1000", "host1.apache.com,1000,1",0L, + 0L, 0L, 0L, new Size(200, Size.Unit.MEGABYTE), new Size(400, Size.Unit.MEGABYTE), 2, + new Size(200, Size.Unit.MEGABYTE), 0.2f, "1", 200L, 50L, 25f, + "2019-07-22 00:00:01"); + break; + + case "namespace:table3,,6.00000000000000000000000000000005.": + assertRecordInRegionMode(record, "namespace", "6", "", "table3", + "00000000000000000000000000000005", "host2:1001", "host2.apache.com,1001,2",0L, + 0L, 0L, 0L, new Size(300, Size.Unit.MEGABYTE), new Size(600, Size.Unit.MEGABYTE), 3, + new Size(300, Size.Unit.MEGABYTE), 0.6f, "", 300L, 200L, 66.66667f, + "2019-07-22 00:00:05"); + break; + + case "namespace:table3,,3_0001.00000000000000000000000000000002.": + assertRecordInRegionMode(record, "namespace", "3", "1", "table3", + "00000000000000000000000000000002", "host1:1000", "host1.apache.com,1000,1",0L, + 0L, 0L, 0L, new Size(300, Size.Unit.MEGABYTE), new Size(600, Size.Unit.MEGABYTE), 3, + new Size(300, Size.Unit.MEGABYTE), 0.3f, "", 300L, 100L, 33.333336f, + "2019-07-22 00:00:02"); + break; + + default: + fail(); + } + } + } + + private static void assertRecordInRegionMode(Record record, String namespace, String startCode, + String replicaId, String table, String region, String regionServer, String longRegionServer, + long requestCountPerSecond, long readRequestCountPerSecond, + long filteredReadRequestCountPerSecond, long writeCountRequestPerSecond, + Size storeFileSize, Size uncompressedStoreFileSize, int numStoreFiles, + Size memStoreSize, float Locality, String startKey, long compactingCellCount, + long compactedCellCount, float compactionProgress, String lastMajorCompactionTime) { + assertThat(record.size(), is(22)); + assertThat(record.get(Field.NAMESPACE).asString(), is(namespace)); + assertThat(record.get(Field.START_CODE).asString(), is(startCode)); + assertThat(record.get(Field.REPLICA_ID).asString(), is(replicaId)); + assertThat(record.get(Field.TABLE).asString(), is(table)); + assertThat(record.get(Field.REGION).asString(), is(region)); + assertThat(record.get(Field.REGION_SERVER).asString(), is(regionServer)); + assertThat(record.get(Field.LONG_REGION_SERVER).asString(), is(longRegionServer)); + assertThat(record.get(Field.REQUEST_COUNT_PER_SECOND).asLong(), + is(requestCountPerSecond)); + assertThat(record.get(Field.READ_REQUEST_COUNT_PER_SECOND).asLong(), + is(readRequestCountPerSecond)); + assertThat(record.get(Field.FILTERED_READ_REQUEST_COUNT_PER_SECOND).asLong(), + is(filteredReadRequestCountPerSecond)); + assertThat(record.get(Field.WRITE_REQUEST_COUNT_PER_SECOND).asLong(), + is(writeCountRequestPerSecond)); + assertThat(record.get(Field.STORE_FILE_SIZE).asSize(), is(storeFileSize)); + assertThat(record.get(Field.UNCOMPRESSED_STORE_FILE_SIZE).asSize(), + is(uncompressedStoreFileSize)); + assertThat(record.get(Field.NUM_STORE_FILES).asInt(), is(numStoreFiles)); + assertThat(record.get(Field.MEM_STORE_SIZE).asSize(), is(memStoreSize)); + assertThat(record.get(Field.LOCALITY).asFloat(), is(Locality)); + assertThat(record.get(Field.START_KEY).asString(), is(startKey)); + assertThat(record.get(Field.COMPACTING_CELL_COUNT).asLong(), is(compactingCellCount)); + assertThat(record.get(Field.COMPACTED_CELL_COUNT).asLong(), is(compactedCellCount)); + assertThat(record.get(Field.COMPACTION_PROGRESS).asFloat(), is(compactionProgress)); + assertThat(record.get(Field.LAST_MAJOR_COMPACTION_TIME).asString(), + is(lastMajorCompactionTime)); + } + + public static void assertRecordsInNamespaceMode(List records) { + assertThat(records.size(), is(2)); + + for (Record record : records) { + switch (record.get(Field.NAMESPACE).asString()) { + case "default": + assertRecordInNamespaceMode(record, 0L, 0L, 0L, 0L, new Size(600, Size.Unit.MEGABYTE), + new Size(1200, Size.Unit.MEGABYTE), 6, new Size(600, Size.Unit.MEGABYTE), 4); + break; + + case "namespace": + assertRecordInNamespaceMode(record, 0L, 0L, 0L, 0L, new Size(600, Size.Unit.MEGABYTE), + new Size(1200, Size.Unit.MEGABYTE), 6, new Size(600, Size.Unit.MEGABYTE), 2); + break; + + default: + fail(); + } + } + } + + private static void assertRecordInNamespaceMode(Record record, long requestCountPerSecond, + long readRequestCountPerSecond, long filteredReadRequestCountPerSecond, + long writeCountRequestPerSecond, Size storeFileSize, Size uncompressedStoreFileSize, + int numStoreFiles, Size memStoreSize, int regionCount) { + assertThat(record.size(), is(10)); + assertThat(record.get(Field.REQUEST_COUNT_PER_SECOND).asLong(), + is(requestCountPerSecond)); + assertThat(record.get(Field.READ_REQUEST_COUNT_PER_SECOND).asLong(), + is(readRequestCountPerSecond)); + assertThat(record.get(Field.FILTERED_READ_REQUEST_COUNT_PER_SECOND).asLong(), + is(filteredReadRequestCountPerSecond)); + assertThat(record.get(Field.WRITE_REQUEST_COUNT_PER_SECOND).asLong(), + is(writeCountRequestPerSecond)); + assertThat(record.get(Field.STORE_FILE_SIZE).asSize(), is(storeFileSize)); + assertThat(record.get(Field.UNCOMPRESSED_STORE_FILE_SIZE).asSize(), + is(uncompressedStoreFileSize)); + assertThat(record.get(Field.NUM_STORE_FILES).asInt(), is(numStoreFiles)); + assertThat(record.get(Field.MEM_STORE_SIZE).asSize(), is(memStoreSize)); + assertThat(record.get(Field.REGION_COUNT).asInt(), is(regionCount)); + } + + public static void assertRecordsInTableMode(List records) { + assertThat(records.size(), is(3)); + + for (Record record : records) { + String tableName = String.format("%s:%s", record.get(Field.NAMESPACE).asString(), + record.get(Field.TABLE).asString()); + + switch (tableName) { + case "default:table1": + assertRecordInTableMode(record, 0L, 0L, 0L, 0L, new Size(200, Size.Unit.MEGABYTE), + new Size(400, Size.Unit.MEGABYTE), 2, new Size(200, Size.Unit.MEGABYTE), 2); + break; + + case "default:table2": + assertRecordInTableMode(record, 0L, 0L, 0L, 0L, new Size(400, Size.Unit.MEGABYTE), + new Size(800, Size.Unit.MEGABYTE), 4, new Size(400, Size.Unit.MEGABYTE), 2); + break; + + case "namespace:table3": + assertRecordInTableMode(record, 0L, 0L, 0L, 0L, new Size(600, Size.Unit.MEGABYTE), + new Size(1200, Size.Unit.MEGABYTE), 6, new Size(600, Size.Unit.MEGABYTE), 2); + break; + + default: + fail(); + } + } + } + + private static void assertRecordInTableMode(Record record, long requestCountPerSecond, + long readRequestCountPerSecond, long filteredReadRequestCountPerSecond, + long writeCountRequestPerSecond, Size storeFileSize, Size uncompressedStoreFileSize, + int numStoreFiles, Size memStoreSize, int regionCount) { + assertThat(record.size(), is(11)); + assertThat(record.get(Field.REQUEST_COUNT_PER_SECOND).asLong(), + is(requestCountPerSecond)); + assertThat(record.get(Field.READ_REQUEST_COUNT_PER_SECOND).asLong(), + is(readRequestCountPerSecond)); + assertThat(record.get(Field.FILTERED_READ_REQUEST_COUNT_PER_SECOND).asLong(), + is(filteredReadRequestCountPerSecond)); + assertThat(record.get(Field.WRITE_REQUEST_COUNT_PER_SECOND).asLong(), + is(writeCountRequestPerSecond)); + assertThat(record.get(Field.STORE_FILE_SIZE).asSize(), is(storeFileSize)); + assertThat(record.get(Field.UNCOMPRESSED_STORE_FILE_SIZE).asSize(), + is(uncompressedStoreFileSize)); + assertThat(record.get(Field.NUM_STORE_FILES).asInt(), is(numStoreFiles)); + assertThat(record.get(Field.MEM_STORE_SIZE).asSize(), is(memStoreSize)); + assertThat(record.get(Field.REGION_COUNT).asInt(), is(regionCount)); + } + + public static void assertRecordsInRegionServerMode(List records) { + assertThat(records.size(), is(2)); + + for (Record record : records) { + switch (record.get(Field.REGION_SERVER).asString()) { + case "host1:1000": + assertRecordInRegionServerMode(record, "host1.apache.com,1000,1", 0L, 0L, 0L, 0L, + new Size(600, Size.Unit.MEGABYTE), new Size(1200, Size.Unit.MEGABYTE), 6, + new Size(600, Size.Unit.MEGABYTE), 3, new Size(100, Size.Unit.MEGABYTE), + new Size(200, Size.Unit.MEGABYTE)); + break; + + case "host2:1001": + assertRecordInRegionServerMode(record, "host2.apache.com,1001,2", 0L, 0L, 0L, 0L, + new Size(600, Size.Unit.MEGABYTE), new Size(1200, Size.Unit.MEGABYTE), 6, + new Size(600, Size.Unit.MEGABYTE), 3, new Size(16, Size.Unit.GIGABYTE), + new Size(32, Size.Unit.GIGABYTE)); + break; + + default: + fail(); + } + } + } + + private static void assertRecordInRegionServerMode(Record record, String longRegionServer, + long requestCountPerSecond, long readRequestCountPerSecond, + long filteredReadRequestCountPerSecond, long writeCountRequestPerSecond, + Size storeFileSize, Size uncompressedStoreFileSize, int numStoreFiles, + Size memStoreSize, int regionCount, Size usedHeapSize, Size maxHeapSize) { + assertThat(record.size(), is(13)); + assertThat(record.get(Field.LONG_REGION_SERVER).asString(), + is(longRegionServer)); + assertThat(record.get(Field.REQUEST_COUNT_PER_SECOND).asLong(), + is(requestCountPerSecond)); + assertThat(record.get(Field.READ_REQUEST_COUNT_PER_SECOND).asLong(), + is(readRequestCountPerSecond)); + assertThat(record.get(Field.FILTERED_READ_REQUEST_COUNT_PER_SECOND).asLong(), + is(filteredReadRequestCountPerSecond)); + assertThat(record.get(Field.WRITE_REQUEST_COUNT_PER_SECOND).asLong(), + is(writeCountRequestPerSecond)); + assertThat(record.get(Field.STORE_FILE_SIZE).asSize(), is(storeFileSize)); + assertThat(record.get(Field.UNCOMPRESSED_STORE_FILE_SIZE).asSize(), + is(uncompressedStoreFileSize)); + assertThat(record.get(Field.NUM_STORE_FILES).asInt(), is(numStoreFiles)); + assertThat(record.get(Field.MEM_STORE_SIZE).asSize(), is(memStoreSize)); + assertThat(record.get(Field.REGION_COUNT).asInt(), is(regionCount)); + assertThat(record.get(Field.USED_HEAP_SIZE).asSize(), is(usedHeapSize)); + assertThat(record.get(Field.MAX_HEAP_SIZE).asSize(), is(maxHeapSize)); + } + + public static void assertSummary(Summary summary) { + assertThat(summary.getVersion(), is("3.0.0-SNAPSHOT")); + assertThat(summary.getClusterId(), is("01234567-89ab-cdef-0123-456789abcdef")); + assertThat(summary.getServers(), is(3)); + assertThat(summary.getLiveServers(), is(2)); + assertThat(summary.getDeadServers(), is(1)); + assertThat(summary.getRegionCount(), is(6)); + assertThat(summary.getRitCount(), is(1)); + assertThat(summary.getAverageLoad(), is(3.0)); + assertThat(summary.getAggregateRequestPerSecond(), is(300L)); + } +} diff --git a/hbase-hbtop/src/test/java/org/apache/hadoop/hbase/hbtop/field/FieldValueTest.java b/hbase-hbtop/src/test/java/org/apache/hadoop/hbase/hbtop/field/FieldValueTest.java new file mode 100644 index 000000000000..263edf1166a5 --- /dev/null +++ b/hbase-hbtop/src/test/java/org/apache/hadoop/hbase/hbtop/field/FieldValueTest.java @@ -0,0 +1,255 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase.hbtop.field; + +import static org.hamcrest.CoreMatchers.is; +import static org.junit.Assert.assertThat; +import static org.junit.Assert.fail; + +import org.apache.hadoop.hbase.HBaseClassTestRule; +import org.apache.hadoop.hbase.Size; +import org.apache.hadoop.hbase.testclassification.SmallTests; +import org.junit.ClassRule; +import org.junit.Test; +import org.junit.experimental.categories.Category; + + +@Category(SmallTests.class) +public class FieldValueTest { + + @ClassRule + public static final HBaseClassTestRule CLASS_RULE = + HBaseClassTestRule.forClass(FieldValueTest.class); + + @Test + public void testParseAndAsSomethingMethod() { + // String + FieldValue stringFieldValue = new FieldValue("aaa", FieldValueType.STRING); + assertThat(stringFieldValue.asString(), is("aaa")); + + try { + new FieldValue(1, FieldValueType.STRING); + fail(); + } catch (IllegalArgumentException ignored) { + } + + // Integer + FieldValue integerFieldValue = new FieldValue(100, FieldValueType.INTEGER); + assertThat(integerFieldValue.asInt(), is(100)); + + integerFieldValue = new FieldValue("100", FieldValueType.INTEGER); + assertThat(integerFieldValue.asInt(), is(100)); + + try { + new FieldValue("aaa", FieldValueType.INTEGER); + fail(); + } catch (IllegalArgumentException ignored) { + } + + // Long + FieldValue longFieldValue = new FieldValue(100L, FieldValueType.LONG); + assertThat(longFieldValue.asLong(), is(100L)); + + longFieldValue = new FieldValue("100", FieldValueType.LONG); + assertThat(longFieldValue.asLong(), is(100L)); + + try { + new FieldValue("aaa", FieldValueType.LONG); + fail(); + } catch (IllegalArgumentException ignored) { + } + + try { + new FieldValue(100, FieldValueType.LONG); + fail(); + } catch (IllegalArgumentException ignored) { + } + + // Float + FieldValue floatFieldValue = new FieldValue(1.0f, FieldValueType.FLOAT); + assertThat(floatFieldValue.asFloat(), is(1.0f)); + + floatFieldValue = new FieldValue("1", FieldValueType.FLOAT); + assertThat(floatFieldValue.asFloat(), is(1.0f)); + + try { + new FieldValue("aaa", FieldValueType.FLOAT); + fail(); + } catch (IllegalArgumentException ignored) { + } + + try { + new FieldValue(1, FieldValueType.FLOAT); + fail(); + } catch (IllegalArgumentException ignored) { + } + + // Size + FieldValue sizeFieldValue = + new FieldValue(new Size(100, Size.Unit.MEGABYTE), FieldValueType.SIZE); + assertThat(sizeFieldValue.asString(), is("100.0MB")); + assertThat(sizeFieldValue.asSize(), is(new Size(100, Size.Unit.MEGABYTE))); + + sizeFieldValue = new FieldValue("100MB", FieldValueType.SIZE); + assertThat(sizeFieldValue.asString(), is("100.0MB")); + assertThat(sizeFieldValue.asSize(), is(new Size(100, Size.Unit.MEGABYTE))); + + try { + new FieldValue("100", FieldValueType.SIZE); + fail(); + } catch (IllegalArgumentException ignored) { + } + + try { + new FieldValue(100, FieldValueType.SIZE); + fail(); + } catch (IllegalArgumentException ignored) { + } + + // Percent + FieldValue percentFieldValue = + new FieldValue(100f, FieldValueType.PERCENT); + assertThat(percentFieldValue.asString(), is("100.00%")); + assertThat(percentFieldValue.asFloat(), is(100f)); + + percentFieldValue = new FieldValue("100%", FieldValueType.PERCENT); + assertThat(percentFieldValue.asString(), is("100.00%")); + assertThat(percentFieldValue.asFloat(), is(100f)); + + percentFieldValue = new FieldValue("100", FieldValueType.PERCENT); + assertThat(percentFieldValue.asString(), is("100.00%")); + assertThat(percentFieldValue.asFloat(), is(100f)); + + try { + new FieldValue(100, FieldValueType.PERCENT); + fail(); + } catch (IllegalArgumentException ignored) { + } + } + + @Test + public void testCompareTo() { + // String + FieldValue stringAFieldValue = new FieldValue("a", FieldValueType.STRING); + FieldValue stringAFieldValue2 = new FieldValue("a", FieldValueType.STRING); + FieldValue stringBFieldValue = new FieldValue("b", FieldValueType.STRING); + FieldValue stringCapitalAFieldValue = new FieldValue("A", FieldValueType.STRING); + + assertThat(stringAFieldValue.compareTo(stringAFieldValue2), is(0)); + assertThat(stringBFieldValue.compareTo(stringAFieldValue), is(1)); + assertThat(stringAFieldValue.compareTo(stringBFieldValue), is(-1)); + assertThat(stringAFieldValue.compareTo(stringCapitalAFieldValue), is(32)); + + // Integer + FieldValue integer1FieldValue = new FieldValue(1, FieldValueType.INTEGER); + FieldValue integer1FieldValue2 = new FieldValue(1, FieldValueType.INTEGER); + FieldValue integer2FieldValue = new FieldValue(2, FieldValueType.INTEGER); + + assertThat(integer1FieldValue.compareTo(integer1FieldValue2), is(0)); + assertThat(integer2FieldValue.compareTo(integer1FieldValue), is(1)); + assertThat(integer1FieldValue.compareTo(integer2FieldValue), is(-1)); + + // Long + FieldValue long1FieldValue = new FieldValue(1L, FieldValueType.LONG); + FieldValue long1FieldValue2 = new FieldValue(1L, FieldValueType.LONG); + FieldValue long2FieldValue = new FieldValue(2L, FieldValueType.LONG); + + assertThat(long1FieldValue.compareTo(long1FieldValue2), is(0)); + assertThat(long2FieldValue.compareTo(long1FieldValue), is(1)); + assertThat(long1FieldValue.compareTo(long2FieldValue), is(-1)); + + // Float + FieldValue float1FieldValue = new FieldValue(1.0f, FieldValueType.FLOAT); + FieldValue float1FieldValue2 = new FieldValue(1.0f, FieldValueType.FLOAT); + FieldValue float2FieldValue = new FieldValue(2.0f, FieldValueType.FLOAT); + + assertThat(float1FieldValue.compareTo(float1FieldValue2), is(0)); + assertThat(float2FieldValue.compareTo(float1FieldValue), is(1)); + assertThat(float1FieldValue.compareTo(float2FieldValue), is(-1)); + + // Size + FieldValue size100MBFieldValue = + new FieldValue(new Size(100, Size.Unit.MEGABYTE), FieldValueType.SIZE); + FieldValue size100MBFieldValue2 = + new FieldValue(new Size(100, Size.Unit.MEGABYTE), FieldValueType.SIZE); + FieldValue size200MBFieldValue = + new FieldValue(new Size(200, Size.Unit.MEGABYTE), FieldValueType.SIZE); + + assertThat(size100MBFieldValue.compareTo(size100MBFieldValue2), is(0)); + assertThat(size200MBFieldValue.compareTo(size100MBFieldValue), is(1)); + assertThat(size100MBFieldValue.compareTo(size200MBFieldValue), is(-1)); + + // Percent + FieldValue percent50FieldValue = new FieldValue(50.0f, FieldValueType.PERCENT); + FieldValue percent50FieldValue2 = new FieldValue(50.0f, FieldValueType.PERCENT); + FieldValue percent100FieldValue = new FieldValue(100.0f, FieldValueType.PERCENT); + + assertThat(percent50FieldValue.compareTo(percent50FieldValue2), is(0)); + assertThat(percent100FieldValue.compareTo(percent50FieldValue), is(1)); + assertThat(percent50FieldValue.compareTo(percent100FieldValue), is(-1)); + } + + @Test + public void testPlus() { + // String + FieldValue stringFieldValue = new FieldValue("a", FieldValueType.STRING); + FieldValue stringFieldValue2 = new FieldValue("b", FieldValueType.STRING); + assertThat(stringFieldValue.plus(stringFieldValue2).asString(), is("ab")); + + // Integer + FieldValue integerFieldValue = new FieldValue(1, FieldValueType.INTEGER); + FieldValue integerFieldValue2 = new FieldValue(2, FieldValueType.INTEGER); + assertThat(integerFieldValue.plus(integerFieldValue2).asInt(), is(3)); + + // Long + FieldValue longFieldValue = new FieldValue(1L, FieldValueType.LONG); + FieldValue longFieldValue2 = new FieldValue(2L, FieldValueType.LONG); + assertThat(longFieldValue.plus(longFieldValue2).asLong(), is(3L)); + + // Float + FieldValue floatFieldValue = new FieldValue(1.2f, FieldValueType.FLOAT); + FieldValue floatFieldValue2 = new FieldValue(2.2f, FieldValueType.FLOAT); + assertThat(floatFieldValue.plus(floatFieldValue2).asFloat(), is(3.4f)); + + // Size + FieldValue sizeFieldValue = + new FieldValue(new Size(100, Size.Unit.MEGABYTE), FieldValueType.SIZE); + FieldValue sizeFieldValue2 = + new FieldValue(new Size(200, Size.Unit.MEGABYTE), FieldValueType.SIZE); + assertThat(sizeFieldValue.plus(sizeFieldValue2).asString(), is("300.0MB")); + assertThat(sizeFieldValue.plus(sizeFieldValue2).asSize(), + is(new Size(300, Size.Unit.MEGABYTE))); + + // Percent + FieldValue percentFieldValue = new FieldValue(30f, FieldValueType.PERCENT); + FieldValue percentFieldValue2 = new FieldValue(60f, FieldValueType.PERCENT); + assertThat(percentFieldValue.plus(percentFieldValue2).asString(), is("90.00%")); + assertThat(percentFieldValue.plus(percentFieldValue2).asFloat(), is(90f)); + } + + @Test + public void testCompareToIgnoreCase() { + FieldValue stringAFieldValue = new FieldValue("a", FieldValueType.STRING); + FieldValue stringCapitalAFieldValue = new FieldValue("A", FieldValueType.STRING); + FieldValue stringCapitalBFieldValue = new FieldValue("B", FieldValueType.STRING); + + assertThat(stringAFieldValue.compareToIgnoreCase(stringCapitalAFieldValue), is(0)); + assertThat(stringCapitalBFieldValue.compareToIgnoreCase(stringAFieldValue), is(1)); + assertThat(stringAFieldValue.compareToIgnoreCase(stringCapitalBFieldValue), is(-1)); + } +} diff --git a/hbase-hbtop/src/test/java/org/apache/hadoop/hbase/hbtop/mode/ModeTestBase.java b/hbase-hbtop/src/test/java/org/apache/hadoop/hbase/hbtop/mode/ModeTestBase.java new file mode 100644 index 000000000000..7ad1a3a870a5 --- /dev/null +++ b/hbase-hbtop/src/test/java/org/apache/hadoop/hbase/hbtop/mode/ModeTestBase.java @@ -0,0 +1,46 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase.hbtop.mode; + +import java.util.List; +import org.apache.hadoop.hbase.hbtop.Record; +import org.apache.hadoop.hbase.hbtop.TestUtils; +import org.junit.Test; + + +public abstract class ModeTestBase { + + @Test + public void testGetRecords() { + List records = getMode().getRecords(TestUtils.createDummyClusterMetrics()); + assertRecords(records); + } + + protected abstract Mode getMode(); + protected abstract void assertRecords(List records); + + @Test + public void testDrillDown() { + List records = getMode().getRecords(TestUtils.createDummyClusterMetrics()); + for (Record record : records) { + assertDrillDown(record, getMode().drillDown(record)); + } + } + + protected abstract void assertDrillDown(Record currentRecord, DrillDownInfo drillDownInfo); +} diff --git a/hbase-hbtop/src/test/java/org/apache/hadoop/hbase/hbtop/mode/NamespaceModeTest.java b/hbase-hbtop/src/test/java/org/apache/hadoop/hbase/hbtop/mode/NamespaceModeTest.java new file mode 100644 index 000000000000..ace29b3f2340 --- /dev/null +++ b/hbase-hbtop/src/test/java/org/apache/hadoop/hbase/hbtop/mode/NamespaceModeTest.java @@ -0,0 +1,70 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase.hbtop.mode; + +import static org.hamcrest.CoreMatchers.is; +import static org.junit.Assert.assertThat; +import static org.junit.Assert.fail; + +import java.util.List; +import org.apache.hadoop.hbase.HBaseClassTestRule; +import org.apache.hadoop.hbase.hbtop.Record; +import org.apache.hadoop.hbase.hbtop.TestUtils; +import org.apache.hadoop.hbase.hbtop.field.Field; +import org.apache.hadoop.hbase.testclassification.SmallTests; +import org.junit.ClassRule; +import org.junit.experimental.categories.Category; + + +@Category(SmallTests.class) +public class NamespaceModeTest extends ModeTestBase { + + @ClassRule + public static final HBaseClassTestRule CLASS_RULE = + HBaseClassTestRule.forClass(NamespaceModeTest.class); + + @Override + protected Mode getMode() { + return Mode.NAMESPACE; + } + + @Override + protected void assertRecords(List records) { + TestUtils.assertRecordsInNamespaceMode(records); + } + + @Override + protected void assertDrillDown(Record currentRecord, DrillDownInfo drillDownInfo) { + assertThat(drillDownInfo.getNextMode(), is(Mode.TABLE)); + assertThat(drillDownInfo.getInitialFilters().size(), is(1)); + + switch (currentRecord.get(Field.NAMESPACE).asString()) { + case "default": + assertThat(drillDownInfo.getInitialFilters().get(0).toString(), is("NAMESPACE==default")); + break; + + case "namespace": + assertThat(drillDownInfo.getInitialFilters().get(0).toString(), + is("NAMESPACE==namespace")); + break; + + default: + fail(); + } + } +} diff --git a/hbase-hbtop/src/test/java/org/apache/hadoop/hbase/hbtop/mode/RegionModeTest.java b/hbase-hbtop/src/test/java/org/apache/hadoop/hbase/hbtop/mode/RegionModeTest.java new file mode 100644 index 000000000000..36ad3473eb9a --- /dev/null +++ b/hbase-hbtop/src/test/java/org/apache/hadoop/hbase/hbtop/mode/RegionModeTest.java @@ -0,0 +1,54 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase.hbtop.mode; + +import static org.hamcrest.CoreMatchers.is; +import static org.hamcrest.CoreMatchers.nullValue; +import static org.junit.Assert.assertThat; + +import java.util.List; +import org.apache.hadoop.hbase.HBaseClassTestRule; +import org.apache.hadoop.hbase.hbtop.Record; +import org.apache.hadoop.hbase.hbtop.TestUtils; +import org.apache.hadoop.hbase.testclassification.SmallTests; +import org.junit.ClassRule; +import org.junit.experimental.categories.Category; + + +@Category(SmallTests.class) +public class RegionModeTest extends ModeTestBase { + + @ClassRule + public static final HBaseClassTestRule CLASS_RULE = + HBaseClassTestRule.forClass(RegionModeTest.class); + + @Override + protected Mode getMode() { + return Mode.REGION; + } + + @Override + protected void assertRecords(List records) { + TestUtils.assertRecordsInRegionMode(records); + } + + @Override + protected void assertDrillDown(Record currentRecord, DrillDownInfo drillDownInfo) { + assertThat(drillDownInfo, is(nullValue())); + } +} diff --git a/hbase-hbtop/src/test/java/org/apache/hadoop/hbase/hbtop/mode/RegionServerModeTest.java b/hbase-hbtop/src/test/java/org/apache/hadoop/hbase/hbtop/mode/RegionServerModeTest.java new file mode 100644 index 000000000000..93fa5c463b0c --- /dev/null +++ b/hbase-hbtop/src/test/java/org/apache/hadoop/hbase/hbtop/mode/RegionServerModeTest.java @@ -0,0 +1,69 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase.hbtop.mode; + +import static org.hamcrest.CoreMatchers.is; +import static org.junit.Assert.assertThat; +import static org.junit.Assert.fail; + +import java.util.List; +import org.apache.hadoop.hbase.HBaseClassTestRule; +import org.apache.hadoop.hbase.hbtop.Record; +import org.apache.hadoop.hbase.hbtop.TestUtils; +import org.apache.hadoop.hbase.hbtop.field.Field; +import org.apache.hadoop.hbase.testclassification.SmallTests; +import org.junit.ClassRule; +import org.junit.experimental.categories.Category; + + +@Category(SmallTests.class) +public class RegionServerModeTest extends ModeTestBase { + + @ClassRule + public static final HBaseClassTestRule CLASS_RULE = + HBaseClassTestRule.forClass(RegionServerModeTest.class); + + @Override + protected Mode getMode() { + return Mode.REGION_SERVER; + } + + @Override + protected void assertRecords(List records) { + TestUtils.assertRecordsInRegionServerMode(records); + } + + @Override + protected void assertDrillDown(Record currentRecord, DrillDownInfo drillDownInfo) { + assertThat(drillDownInfo.getNextMode(), is(Mode.REGION)); + assertThat(drillDownInfo.getInitialFilters().size(), is(1)); + + switch (currentRecord.get(Field.REGION_SERVER).asString()) { + case "host1:1000": + assertThat(drillDownInfo.getInitialFilters().get(0).toString(), is("RS==host1:1000")); + break; + + case "host2:1001": + assertThat(drillDownInfo.getInitialFilters().get(0).toString(), is("RS==host2:1001")); + break; + + default: + fail(); + } + } +} diff --git a/hbase-hbtop/src/test/java/org/apache/hadoop/hbase/hbtop/mode/RequestCountPerSecondTest.java b/hbase-hbtop/src/test/java/org/apache/hadoop/hbase/hbtop/mode/RequestCountPerSecondTest.java new file mode 100644 index 000000000000..716ce260e9fc --- /dev/null +++ b/hbase-hbtop/src/test/java/org/apache/hadoop/hbase/hbtop/mode/RequestCountPerSecondTest.java @@ -0,0 +1,59 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase.hbtop.mode; + +import static org.hamcrest.CoreMatchers.is; +import static org.junit.Assert.assertThat; + +import org.apache.hadoop.hbase.HBaseClassTestRule; +import org.apache.hadoop.hbase.testclassification.SmallTests; +import org.junit.ClassRule; +import org.junit.Test; +import org.junit.experimental.categories.Category; + + +@Category(SmallTests.class) +public class RequestCountPerSecondTest { + + @ClassRule + public static final HBaseClassTestRule CLASS_RULE = + HBaseClassTestRule.forClass(RequestCountPerSecondTest.class); + + @Test + public void test() { + RequestCountPerSecond requestCountPerSecond = new RequestCountPerSecond(); + + requestCountPerSecond.refresh(1000, 300, 100, 200); + assertThat(requestCountPerSecond.getRequestCountPerSecond(), is(0L)); + assertThat(requestCountPerSecond.getReadRequestCountPerSecond(), is(0L)); + assertThat(requestCountPerSecond.getWriteRequestCountPerSecond(), is(0L)); + assertThat(requestCountPerSecond.getFilteredReadRequestCountPerSecond(), is(0L)); + + requestCountPerSecond.refresh(2000, 1300, 1100, 1200); + assertThat(requestCountPerSecond.getRequestCountPerSecond(), is(2000L)); + assertThat(requestCountPerSecond.getReadRequestCountPerSecond(), is(1000L)); + assertThat(requestCountPerSecond.getFilteredReadRequestCountPerSecond(), is(1000L)); + assertThat(requestCountPerSecond.getWriteRequestCountPerSecond(), is(1000L)); + + requestCountPerSecond.refresh(12000, 5300, 3100, 2200); + assertThat(requestCountPerSecond.getRequestCountPerSecond(), is(500L)); + assertThat(requestCountPerSecond.getReadRequestCountPerSecond(), is(400L)); + assertThat(requestCountPerSecond.getFilteredReadRequestCountPerSecond(), is(200L)); + assertThat(requestCountPerSecond.getWriteRequestCountPerSecond(), is(100L)); + } +} diff --git a/hbase-hbtop/src/test/java/org/apache/hadoop/hbase/hbtop/mode/TableModeTest.java b/hbase-hbtop/src/test/java/org/apache/hadoop/hbase/hbtop/mode/TableModeTest.java new file mode 100644 index 000000000000..11265715c015 --- /dev/null +++ b/hbase-hbtop/src/test/java/org/apache/hadoop/hbase/hbtop/mode/TableModeTest.java @@ -0,0 +1,80 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase.hbtop.mode; + +import static org.hamcrest.CoreMatchers.is; +import static org.junit.Assert.assertThat; +import static org.junit.Assert.fail; + +import java.util.List; +import org.apache.hadoop.hbase.HBaseClassTestRule; +import org.apache.hadoop.hbase.hbtop.Record; +import org.apache.hadoop.hbase.hbtop.TestUtils; +import org.apache.hadoop.hbase.hbtop.field.Field; +import org.apache.hadoop.hbase.testclassification.SmallTests; +import org.junit.ClassRule; +import org.junit.experimental.categories.Category; + + +@Category(SmallTests.class) +public class TableModeTest extends ModeTestBase { + + @ClassRule + public static final HBaseClassTestRule CLASS_RULE = + HBaseClassTestRule.forClass(TableModeTest.class); + + @Override + protected Mode getMode() { + return Mode.TABLE; + } + + @Override + protected void assertRecords(List records) { + TestUtils.assertRecordsInTableMode(records); + } + + @Override + protected void assertDrillDown(Record currentRecord, DrillDownInfo drillDownInfo) { + assertThat(drillDownInfo.getNextMode(), is(Mode.REGION)); + assertThat(drillDownInfo.getInitialFilters().size(), is(2)); + + String tableName = String.format("%s:%s", currentRecord.get(Field.NAMESPACE).asString(), + currentRecord.get(Field.TABLE).asString()); + + switch (tableName) { + case "default:table1": + assertThat(drillDownInfo.getInitialFilters().get(0).toString(), is("NAMESPACE==default")); + assertThat(drillDownInfo.getInitialFilters().get(1).toString(), is("TABLE==table1")); + break; + + case "default:table2": + assertThat(drillDownInfo.getInitialFilters().get(0).toString(), is("NAMESPACE==default")); + assertThat(drillDownInfo.getInitialFilters().get(1).toString(), is("TABLE==table2")); + break; + + case "namespace:table3": + assertThat(drillDownInfo.getInitialFilters().get(0).toString(), + is("NAMESPACE==namespace")); + assertThat(drillDownInfo.getInitialFilters().get(1).toString(), is("TABLE==table3")); + break; + + default: + fail(); + } + } +} diff --git a/hbase-hbtop/src/test/java/org/apache/hadoop/hbase/hbtop/screen/field/FieldScreenPresenterTest.java b/hbase-hbtop/src/test/java/org/apache/hadoop/hbase/hbtop/screen/field/FieldScreenPresenterTest.java new file mode 100644 index 000000000000..3075877e5191 --- /dev/null +++ b/hbase-hbtop/src/test/java/org/apache/hadoop/hbase/hbtop/screen/field/FieldScreenPresenterTest.java @@ -0,0 +1,153 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase.hbtop.screen.field; + +import static org.hamcrest.CoreMatchers.is; +import static org.junit.Assert.assertThat; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.anyBoolean; +import static org.mockito.ArgumentMatchers.anyInt; +import static org.mockito.ArgumentMatchers.eq; +import static org.mockito.Mockito.inOrder; +import static org.mockito.Mockito.verify; + +import java.util.ArrayList; +import java.util.EnumMap; +import java.util.List; +import java.util.stream.Collectors; +import org.apache.hadoop.hbase.HBaseClassTestRule; +import org.apache.hadoop.hbase.hbtop.field.Field; +import org.apache.hadoop.hbase.hbtop.field.FieldInfo; +import org.apache.hadoop.hbase.hbtop.mode.Mode; +import org.apache.hadoop.hbase.hbtop.screen.top.TopScreenView; +import org.apache.hadoop.hbase.testclassification.SmallTests; +import org.junit.Before; +import org.junit.ClassRule; +import org.junit.Test; +import org.junit.experimental.categories.Category; +import org.junit.runner.RunWith; +import org.mockito.InOrder; +import org.mockito.Mock; +import org.mockito.runners.MockitoJUnitRunner; + + +@Category(SmallTests.class) +@RunWith(MockitoJUnitRunner.class) +public class FieldScreenPresenterTest { + + @ClassRule + public static final HBaseClassTestRule CLASS_RULE = + HBaseClassTestRule.forClass(FieldScreenPresenterTest.class); + + @Mock + private FieldScreenView fieldScreenView; + + private int sortFieldPosition = -1; + private List fields; + private EnumMap fieldDisplayMap; + + @Mock + private FieldScreenPresenter.ResultListener resultListener; + + @Mock + private TopScreenView topScreenView; + + private FieldScreenPresenter fieldScreenPresenter; + + @Before + public void setup() { + Field sortField = Mode.REGION.getDefaultSortField(); + fields = new ArrayList<>(Mode.REGION.getFieldInfos().stream() + .map(FieldInfo::getField).collect(Collectors.toList())); + + fieldDisplayMap = Mode.REGION.getFieldInfos().stream() + .collect(() -> new EnumMap<>(Field.class), + (r, fi) -> r.put(fi.getField(), fi.isDisplayByDefault()), (r1, r2) -> {}); + + fieldScreenPresenter = + new FieldScreenPresenter(fieldScreenView, sortField, fields, fieldDisplayMap, resultListener, + topScreenView); + + for (int i = 0; i < fields.size(); i++) { + Field field = fields.get(i); + if (field == sortField) { + sortFieldPosition = i; + break; + } + } + } + + @Test + public void testInit() { + fieldScreenPresenter.init(); + + int modeHeaderMaxLength = "#COMPingCell".length(); + int modeDescriptionMaxLength = "Filtered Read Request Count per second".length(); + + verify(fieldScreenView).showFieldScreen(eq("#REQ/S"), eq(fields), eq(fieldDisplayMap), + eq(sortFieldPosition), eq(modeHeaderMaxLength), eq(modeDescriptionMaxLength), eq(false)); + } + + @Test + public void testChangeSortField() { + fieldScreenPresenter.arrowUp(); + fieldScreenPresenter.setSortField(); + + fieldScreenPresenter.arrowDown(); + fieldScreenPresenter.arrowDown(); + fieldScreenPresenter.setSortField(); + + fieldScreenPresenter.pageUp(); + fieldScreenPresenter.setSortField(); + + fieldScreenPresenter.pageDown(); + fieldScreenPresenter.setSortField(); + + InOrder inOrder = inOrder(fieldScreenView); + inOrder.verify(fieldScreenView).showScreenDescription(eq("LRS")); + inOrder.verify(fieldScreenView).showScreenDescription(eq("#READ/S")); + inOrder.verify(fieldScreenView).showScreenDescription(eq(fields.get(0).getHeader())); + inOrder.verify(fieldScreenView).showScreenDescription( + eq(fields.get(fields.size() - 1).getHeader())); + } + + @Test + public void testSwitchFieldDisplay() { + fieldScreenPresenter.switchFieldDisplay(); + fieldScreenPresenter.switchFieldDisplay(); + + InOrder inOrder = inOrder(fieldScreenView); + inOrder.verify(fieldScreenView).showField(anyInt(), any(), eq(false), anyBoolean(), anyInt(), + anyInt(), anyBoolean()); + inOrder.verify(fieldScreenView).showField(anyInt(), any(), eq(true), anyBoolean(), anyInt(), + anyInt(), anyBoolean()); + } + + @Test + public void testChangeFieldsOrder() { + fieldScreenPresenter.turnOnMoveMode(); + fieldScreenPresenter.arrowUp(); + fieldScreenPresenter.turnOffMoveMode(); + + Field removed = fields.remove(sortFieldPosition); + fields.add(sortFieldPosition - 1, removed); + + assertThat(fieldScreenPresenter.transitionToNextScreen(), is(topScreenView)); + verify(resultListener).accept(any(), eq(fields), any()); + } +} diff --git a/hbase-hbtop/src/test/java/org/apache/hadoop/hbase/hbtop/screen/help/HelpScreenPresenterTest.java b/hbase-hbtop/src/test/java/org/apache/hadoop/hbase/hbtop/screen/help/HelpScreenPresenterTest.java new file mode 100644 index 000000000000..7c920edf0861 --- /dev/null +++ b/hbase-hbtop/src/test/java/org/apache/hadoop/hbase/hbtop/screen/help/HelpScreenPresenterTest.java @@ -0,0 +1,72 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase.hbtop.screen.help; + +import static org.hamcrest.CoreMatchers.is; +import static org.junit.Assert.assertThat; +import static org.mockito.ArgumentMatchers.argThat; +import static org.mockito.ArgumentMatchers.eq; +import static org.mockito.Mockito.verify; + +import org.apache.hadoop.hbase.HBaseClassTestRule; +import org.apache.hadoop.hbase.hbtop.screen.top.TopScreenView; +import org.apache.hadoop.hbase.testclassification.SmallTests; +import org.junit.Before; +import org.junit.ClassRule; +import org.junit.Test; +import org.junit.experimental.categories.Category; +import org.junit.runner.RunWith; +import org.mockito.Mock; +import org.mockito.runners.MockitoJUnitRunner; + + +@Category(SmallTests.class) +@RunWith(MockitoJUnitRunner.class) +public class HelpScreenPresenterTest { + + @ClassRule + public static final HBaseClassTestRule CLASS_RULE = + HBaseClassTestRule.forClass(HelpScreenPresenterTest.class); + + private static final long TEST_REFRESH_DELAY = 5; + + @Mock + private HelpScreenView helpScreenView; + + @Mock + private TopScreenView topScreenView; + + private HelpScreenPresenter helpScreenPresenter; + + @Before + public void setup() { + helpScreenPresenter = new HelpScreenPresenter(helpScreenView, TEST_REFRESH_DELAY, + topScreenView); + } + + @Test + public void testInit() { + helpScreenPresenter.init(); + verify(helpScreenView).showHelpScreen(eq(TEST_REFRESH_DELAY), argThat(cds -> cds.length == 14)); + } + + @Test + public void testTransitionToTopScreen() { + assertThat(helpScreenPresenter.transitionToNextScreen(), is(topScreenView)); + } +} diff --git a/hbase-hbtop/src/test/java/org/apache/hadoop/hbase/hbtop/screen/mode/ModeScreenPresenterTest.java b/hbase-hbtop/src/test/java/org/apache/hadoop/hbase/hbtop/screen/mode/ModeScreenPresenterTest.java new file mode 100644 index 000000000000..f1343a02d58f --- /dev/null +++ b/hbase-hbtop/src/test/java/org/apache/hadoop/hbase/hbtop/screen/mode/ModeScreenPresenterTest.java @@ -0,0 +1,147 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase.hbtop.screen.mode; + +import static org.hamcrest.CoreMatchers.is; +import static org.junit.Assert.assertThat; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.eq; +import static org.mockito.Mockito.never; +import static org.mockito.Mockito.verify; + +import java.util.Arrays; +import java.util.function.Consumer; +import org.apache.hadoop.hbase.HBaseClassTestRule; +import org.apache.hadoop.hbase.hbtop.mode.Mode; +import org.apache.hadoop.hbase.hbtop.screen.top.TopScreenView; +import org.apache.hadoop.hbase.testclassification.SmallTests; +import org.junit.ClassRule; +import org.junit.Test; +import org.junit.experimental.categories.Category; +import org.junit.runner.RunWith; +import org.mockito.Mock; +import org.mockito.runners.MockitoJUnitRunner; + + +@Category(SmallTests.class) +@RunWith(MockitoJUnitRunner.class) +public class ModeScreenPresenterTest { + + @ClassRule + public static final HBaseClassTestRule CLASS_RULE = + HBaseClassTestRule.forClass(ModeScreenPresenterTest.class); + + @Mock + private ModeScreenView modeScreenView; + + @Mock + private TopScreenView topScreenView; + + @Mock + private Consumer resultListener; + + private ModeScreenPresenter createModeScreenPresenter(Mode currentMode) { + return new ModeScreenPresenter(modeScreenView, currentMode, resultListener, topScreenView); + } + + @Test + public void testInit() { + ModeScreenPresenter modeScreenPresenter = createModeScreenPresenter(Mode.REGION); + + modeScreenPresenter.init(); + + int modeHeaderMaxLength = Mode.REGION_SERVER.getHeader().length(); + int modeDescriptionMaxLength = Mode.REGION_SERVER.getDescription().length(); + + verify(modeScreenView).showModeScreen(eq(Mode.REGION), eq(Arrays.asList(Mode.values())), + eq(Mode.REGION.ordinal()) , eq(modeHeaderMaxLength), eq(modeDescriptionMaxLength)); + } + + @Test + public void testSelectNamespaceMode() { + ModeScreenPresenter modeScreenPresenter = createModeScreenPresenter(Mode.REGION); + + modeScreenPresenter.arrowUp(); + modeScreenPresenter.arrowUp(); + + assertThat(modeScreenPresenter.transitionToNextScreen(true), is(topScreenView)); + verify(resultListener).accept(eq(Mode.NAMESPACE)); + } + + @Test + public void testSelectTableMode() { + ModeScreenPresenter modeScreenPresenter = createModeScreenPresenter(Mode.REGION); + + modeScreenPresenter.arrowUp(); + assertThat(modeScreenPresenter.transitionToNextScreen(true), is(topScreenView)); + verify(resultListener).accept(eq(Mode.TABLE)); + } + + @Test + public void testSelectRegionMode() { + ModeScreenPresenter modeScreenPresenter = createModeScreenPresenter(Mode.NAMESPACE); + + modeScreenPresenter.arrowDown(); + modeScreenPresenter.arrowDown(); + + assertThat(modeScreenPresenter.transitionToNextScreen(true), is(topScreenView)); + verify(resultListener).accept(eq(Mode.REGION)); + } + + @Test + public void testSelectRegionServerMode() { + ModeScreenPresenter modeScreenPresenter = createModeScreenPresenter(Mode.REGION); + + modeScreenPresenter.arrowDown(); + + assertThat(modeScreenPresenter.transitionToNextScreen(true), is(topScreenView)); + verify(resultListener).accept(eq(Mode.REGION_SERVER)); + } + + @Test + public void testCancelSelectingMode() { + ModeScreenPresenter modeScreenPresenter = createModeScreenPresenter(Mode.REGION); + + modeScreenPresenter.arrowDown(); + modeScreenPresenter.arrowDown(); + + assertThat(modeScreenPresenter.transitionToNextScreen(false), is(topScreenView)); + verify(resultListener, never()).accept(any()); + } + + @Test + public void testPageUp() { + ModeScreenPresenter modeScreenPresenter = createModeScreenPresenter(Mode.REGION); + + modeScreenPresenter.pageUp(); + + assertThat(modeScreenPresenter.transitionToNextScreen(true), is(topScreenView)); + verify(resultListener).accept(eq(Mode.values()[0])); + } + + @Test + public void testPageDown() { + ModeScreenPresenter modeScreenPresenter = createModeScreenPresenter(Mode.REGION); + + modeScreenPresenter.pageDown(); + + assertThat(modeScreenPresenter.transitionToNextScreen(true), is(topScreenView)); + Mode[] modes = Mode.values(); + verify(resultListener).accept(eq(modes[modes.length - 1])); + } +} diff --git a/hbase-hbtop/src/test/java/org/apache/hadoop/hbase/hbtop/screen/top/FilterDisplayModeScreenPresenterTest.java b/hbase-hbtop/src/test/java/org/apache/hadoop/hbase/hbtop/screen/top/FilterDisplayModeScreenPresenterTest.java new file mode 100644 index 000000000000..cfc7c567d7cf --- /dev/null +++ b/hbase-hbtop/src/test/java/org/apache/hadoop/hbase/hbtop/screen/top/FilterDisplayModeScreenPresenterTest.java @@ -0,0 +1,84 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase.hbtop.screen.top; + +import static org.hamcrest.CoreMatchers.is; +import static org.junit.Assert.assertThat; +import static org.mockito.ArgumentMatchers.argThat; +import static org.mockito.Mockito.verify; + +import java.util.ArrayList; +import java.util.List; +import java.util.stream.Collectors; +import org.apache.hadoop.hbase.HBaseClassTestRule; +import org.apache.hadoop.hbase.hbtop.Filter; +import org.apache.hadoop.hbase.hbtop.field.Field; +import org.apache.hadoop.hbase.hbtop.field.FieldInfo; +import org.apache.hadoop.hbase.hbtop.mode.Mode; +import org.apache.hadoop.hbase.testclassification.SmallTests; +import org.junit.Before; +import org.junit.ClassRule; +import org.junit.Test; +import org.junit.experimental.categories.Category; +import org.junit.runner.RunWith; +import org.mockito.Mock; +import org.mockito.runners.MockitoJUnitRunner; + + +@Category(SmallTests.class) +@RunWith(MockitoJUnitRunner.class) +public class FilterDisplayModeScreenPresenterTest { + + @ClassRule + public static final HBaseClassTestRule CLASS_RULE = + HBaseClassTestRule.forClass(FilterDisplayModeScreenPresenterTest.class); + + @Mock + private FilterDisplayModeScreenView filterDisplayModeScreenView; + + @Mock + private TopScreenView topScreenView; + + private FilterDisplayModeScreenPresenter filterDisplayModeScreenPresenter; + + @Before + public void setup() { + List fields = new ArrayList<>( + Mode.REGION.getFieldInfos().stream().map(FieldInfo::getField).collect(Collectors.toList())); + + List filters = new ArrayList<>(); + filters.add(Filter.parse("NAMESPACE==namespace", fields, true)); + filters.add(Filter.parse("TABLE==table", fields, true)); + + filterDisplayModeScreenPresenter = new FilterDisplayModeScreenPresenter( + filterDisplayModeScreenView, filters, topScreenView); + } + + @Test + public void testInit() { + filterDisplayModeScreenPresenter.init(); + verify(filterDisplayModeScreenView).showFilters(argThat(filters -> filters.size() == 2 + && filters.get(0).toString().equals("NAMESPACE==namespace") + && filters.get(1).toString().equals("TABLE==table"))); + } + + @Test + public void testReturnToTopScreen() { + assertThat(filterDisplayModeScreenPresenter.returnToNextScreen(), is(topScreenView)); + } +} diff --git a/hbase-hbtop/src/test/java/org/apache/hadoop/hbase/hbtop/screen/top/InputModeScreenPresenterTest.java b/hbase-hbtop/src/test/java/org/apache/hadoop/hbase/hbtop/screen/top/InputModeScreenPresenterTest.java new file mode 100644 index 000000000000..cfe08e0d75d4 --- /dev/null +++ b/hbase-hbtop/src/test/java/org/apache/hadoop/hbase/hbtop/screen/top/InputModeScreenPresenterTest.java @@ -0,0 +1,206 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase.hbtop.screen.top; + +import static org.hamcrest.CoreMatchers.is; +import static org.junit.Assert.assertThat; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.eq; +import static org.mockito.Mockito.inOrder; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; + +import java.util.ArrayList; +import java.util.List; +import java.util.function.Function; +import org.apache.hadoop.hbase.HBaseClassTestRule; +import org.apache.hadoop.hbase.hbtop.screen.ScreenView; +import org.apache.hadoop.hbase.testclassification.SmallTests; +import org.junit.Before; +import org.junit.ClassRule; +import org.junit.Test; +import org.junit.experimental.categories.Category; +import org.junit.runner.RunWith; +import org.mockito.InOrder; +import org.mockito.Mock; +import org.mockito.runners.MockitoJUnitRunner; + + +@Category(SmallTests.class) +@RunWith(MockitoJUnitRunner.class) +public class InputModeScreenPresenterTest { + + @ClassRule + public static final HBaseClassTestRule CLASS_RULE = + HBaseClassTestRule.forClass(InputModeScreenPresenterTest.class); + + private static final String TEST_INPUT_MESSAGE = "test input message"; + + @Mock + private InputModeScreenView inputModeScreenView; + + @Mock + private TopScreenView topScreenView; + + @Mock + private Function resultListener; + + private InputModeScreenPresenter inputModeScreenPresenter; + + @Before + public void setup() { + List histories = new ArrayList<>(); + histories.add("history1"); + histories.add("history2"); + + inputModeScreenPresenter = new InputModeScreenPresenter(inputModeScreenView, + TEST_INPUT_MESSAGE, histories, resultListener); + } + + @Test + public void testInit() { + inputModeScreenPresenter.init(); + + verify(inputModeScreenView).showInput(eq(TEST_INPUT_MESSAGE), eq(""), eq(0)); + } + + @Test + public void testCharacter() { + inputModeScreenPresenter.character('a'); + inputModeScreenPresenter.character('b'); + inputModeScreenPresenter.character('c'); + + InOrder inOrder = inOrder(inputModeScreenView); + inOrder.verify(inputModeScreenView).showInput(any(), eq("a"), eq(1)); + inOrder.verify(inputModeScreenView).showInput(any(), eq("ab"), eq(2)); + inOrder.verify(inputModeScreenView).showInput(any(), eq("abc"), eq(3)); + } + + @Test + public void testArrowLeftAndRight() { + inputModeScreenPresenter.character('a'); + inputModeScreenPresenter.character('b'); + inputModeScreenPresenter.character('c'); + inputModeScreenPresenter.arrowLeft(); + inputModeScreenPresenter.arrowLeft(); + inputModeScreenPresenter.arrowLeft(); + inputModeScreenPresenter.arrowLeft(); + inputModeScreenPresenter.arrowRight(); + inputModeScreenPresenter.arrowRight(); + inputModeScreenPresenter.arrowRight(); + inputModeScreenPresenter.arrowRight(); + + InOrder inOrder = inOrder(inputModeScreenView); + inOrder.verify(inputModeScreenView).showInput(any(), eq("a"), eq(1)); + inOrder.verify(inputModeScreenView).showInput(any(), eq("ab"), eq(2)); + inOrder.verify(inputModeScreenView).showInput(any(), eq("abc"), eq(3)); + inOrder.verify(inputModeScreenView).showInput(any(), eq("abc"), eq(2)); + inOrder.verify(inputModeScreenView).showInput(any(), eq("abc"), eq(1)); + inOrder.verify(inputModeScreenView).showInput(any(), eq("abc"), eq(0)); + inOrder.verify(inputModeScreenView).showInput(any(), eq("abc"), eq(1)); + inOrder.verify(inputModeScreenView).showInput(any(), eq("abc"), eq(2)); + inOrder.verify(inputModeScreenView).showInput(any(), eq("abc"), eq(3)); + } + + @Test + public void testHomeAndEnd() { + inputModeScreenPresenter.character('a'); + inputModeScreenPresenter.character('b'); + inputModeScreenPresenter.character('c'); + inputModeScreenPresenter.home(); + inputModeScreenPresenter.home(); + inputModeScreenPresenter.end(); + inputModeScreenPresenter.end(); + + InOrder inOrder = inOrder(inputModeScreenView); + inOrder.verify(inputModeScreenView).showInput(any(), eq("a"), eq(1)); + inOrder.verify(inputModeScreenView).showInput(any(), eq("ab"), eq(2)); + inOrder.verify(inputModeScreenView).showInput(any(), eq("abc"), eq(3)); + inOrder.verify(inputModeScreenView).showInput(any(), eq("abc"), eq(0)); + inOrder.verify(inputModeScreenView).showInput(any(), eq("abc"), eq(3)); + } + + @Test + public void testBackspace() { + inputModeScreenPresenter.character('a'); + inputModeScreenPresenter.character('b'); + inputModeScreenPresenter.character('c'); + inputModeScreenPresenter.backspace(); + inputModeScreenPresenter.backspace(); + inputModeScreenPresenter.backspace(); + inputModeScreenPresenter.backspace(); + + InOrder inOrder = inOrder(inputModeScreenView); + inOrder.verify(inputModeScreenView).showInput(any(), eq("a"), eq(1)); + inOrder.verify(inputModeScreenView).showInput(any(), eq("ab"), eq(2)); + inOrder.verify(inputModeScreenView).showInput(any(), eq("abc"), eq(3)); + inOrder.verify(inputModeScreenView).showInput(any(), eq("ab"), eq(2)); + inOrder.verify(inputModeScreenView).showInput(any(), eq("a"), eq(1)); + inOrder.verify(inputModeScreenView).showInput(any(), eq(""), eq(0)); + } + + @Test + public void testDelete() { + inputModeScreenPresenter.character('a'); + inputModeScreenPresenter.character('b'); + inputModeScreenPresenter.character('c'); + inputModeScreenPresenter.delete(); + inputModeScreenPresenter.arrowLeft(); + inputModeScreenPresenter.delete(); + + InOrder inOrder = inOrder(inputModeScreenView); + inOrder.verify(inputModeScreenView).showInput(any(), eq("a"), eq(1)); + inOrder.verify(inputModeScreenView).showInput(any(), eq("ab"), eq(2)); + inOrder.verify(inputModeScreenView).showInput(any(), eq("abc"), eq(3)); + inOrder.verify(inputModeScreenView).showInput(any(), eq("abc"), eq(2)); + inOrder.verify(inputModeScreenView).showInput(any(), eq("ab"), eq(2)); + } + + @Test + public void testHistories() { + inputModeScreenPresenter.character('a'); + inputModeScreenPresenter.character('b'); + inputModeScreenPresenter.character('c'); + inputModeScreenPresenter.arrowUp(); + inputModeScreenPresenter.arrowUp(); + inputModeScreenPresenter.arrowUp(); + inputModeScreenPresenter.arrowDown(); + inputModeScreenPresenter.arrowDown(); + inputModeScreenPresenter.arrowDown(); + + InOrder inOrder = inOrder(inputModeScreenView); + inOrder.verify(inputModeScreenView).showInput(any(), eq("a"), eq(1)); + inOrder.verify(inputModeScreenView).showInput(any(), eq("ab"), eq(2)); + inOrder.verify(inputModeScreenView).showInput(any(), eq("abc"), eq(3)); + inOrder.verify(inputModeScreenView).showInput(any(), eq("history2"), eq(8)); + inOrder.verify(inputModeScreenView).showInput(any(), eq("history1"), eq(8)); + inOrder.verify(inputModeScreenView).showInput(any(), eq("history2"), eq(8)); + } + + @Test + public void testReturnToTopScreen() { + when(resultListener.apply(any())).thenReturn(topScreenView); + + inputModeScreenPresenter.character('a'); + inputModeScreenPresenter.character('b'); + inputModeScreenPresenter.character('c'); + + assertThat(inputModeScreenPresenter.returnToNextScreen(), is(topScreenView)); + verify(resultListener).apply(eq("abc")); + } +} diff --git a/hbase-hbtop/src/test/java/org/apache/hadoop/hbase/hbtop/screen/top/MessageModeScreenPresenterTest.java b/hbase-hbtop/src/test/java/org/apache/hadoop/hbase/hbtop/screen/top/MessageModeScreenPresenterTest.java new file mode 100644 index 000000000000..836caf905db2 --- /dev/null +++ b/hbase-hbtop/src/test/java/org/apache/hadoop/hbase/hbtop/screen/top/MessageModeScreenPresenterTest.java @@ -0,0 +1,71 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase.hbtop.screen.top; + +import static org.hamcrest.CoreMatchers.is; +import static org.junit.Assert.assertThat; +import static org.mockito.ArgumentMatchers.eq; +import static org.mockito.Mockito.verify; + +import org.apache.hadoop.hbase.HBaseClassTestRule; +import org.apache.hadoop.hbase.testclassification.SmallTests; +import org.junit.Before; +import org.junit.ClassRule; +import org.junit.Test; +import org.junit.experimental.categories.Category; +import org.junit.runner.RunWith; +import org.mockito.Mock; +import org.mockito.runners.MockitoJUnitRunner; + + +@Category(SmallTests.class) +@RunWith(MockitoJUnitRunner.class) +public class MessageModeScreenPresenterTest { + + @ClassRule + public static final HBaseClassTestRule CLASS_RULE = + HBaseClassTestRule.forClass(MessageModeScreenPresenterTest.class); + + private static final String TEST_MESSAGE = "test message"; + + @Mock + private MessageModeScreenView messageModeScreenView; + + @Mock + private TopScreenView topScreenView; + + private MessageModeScreenPresenter messageModeScreenPresenter; + + @Before + public void setup() { + messageModeScreenPresenter = new MessageModeScreenPresenter(messageModeScreenView, + TEST_MESSAGE, topScreenView); + } + + @Test + public void testInit() { + messageModeScreenPresenter.init(); + + verify(messageModeScreenView).showMessage(eq(TEST_MESSAGE)); + } + + @Test + public void testReturnToTopScreen() { + assertThat(messageModeScreenPresenter.returnToNextScreen(), is(topScreenView)); + } +} diff --git a/hbase-hbtop/src/test/java/org/apache/hadoop/hbase/hbtop/screen/top/PagingTest.java b/hbase-hbtop/src/test/java/org/apache/hadoop/hbase/hbtop/screen/top/PagingTest.java new file mode 100644 index 000000000000..cf9606b08516 --- /dev/null +++ b/hbase-hbtop/src/test/java/org/apache/hadoop/hbase/hbtop/screen/top/PagingTest.java @@ -0,0 +1,300 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase.hbtop.screen.top; + +import static org.hamcrest.CoreMatchers.is; +import static org.junit.Assert.assertThat; + +import org.apache.hadoop.hbase.HBaseClassTestRule; +import org.apache.hadoop.hbase.testclassification.SmallTests; +import org.junit.ClassRule; +import org.junit.Test; +import org.junit.experimental.categories.Category; + + +@Category(SmallTests.class) +public class PagingTest { + + @ClassRule + public static final HBaseClassTestRule CLASS_RULE = + HBaseClassTestRule.forClass(PagingTest.class); + + @Test + public void testArrowUpAndArrowDown() { + Paging paging = new Paging(); + paging.updatePageSize(3); + paging.updateRecordsSize(5); + + assertPaging(paging, 0, 0, 3); + + paging.arrowDown(); + assertPaging(paging, 1, 0, 3); + + paging.arrowDown(); + assertPaging(paging, 2, 0, 3); + + paging.arrowDown(); + assertPaging(paging, 3, 1, 4); + + paging.arrowDown(); + assertPaging(paging, 4, 2, 5); + + paging.arrowDown(); + assertPaging(paging, 4, 2, 5); + + paging.arrowUp(); + assertPaging(paging, 3, 2, 5); + + paging.arrowUp(); + assertPaging(paging, 2, 2, 5); + + paging.arrowUp(); + assertPaging(paging, 1, 1, 4); + + paging.arrowUp(); + assertPaging(paging, 0, 0, 3); + + paging.arrowUp(); + assertPaging(paging, 0, 0, 3); + } + + @Test + public void testPageUpAndPageDown() { + Paging paging = new Paging(); + paging.updatePageSize(3); + paging.updateRecordsSize(8); + + assertPaging(paging, 0, 0, 3); + + paging.pageDown(); + assertPaging(paging, 3, 3, 6); + + paging.pageDown(); + assertPaging(paging, 6, 5, 8); + + paging.pageDown(); + assertPaging(paging, 7, 5, 8); + + paging.pageDown(); + assertPaging(paging, 7, 5, 8); + + paging.pageUp(); + assertPaging(paging, 4, 4, 7); + + paging.pageUp(); + assertPaging(paging, 1, 1, 4); + + paging.pageUp(); + assertPaging(paging, 0, 0, 3); + + paging.pageUp(); + assertPaging(paging, 0, 0, 3); + } + + @Test + public void testInit() { + Paging paging = new Paging(); + paging.updatePageSize(3); + paging.updateRecordsSize(5); + + assertPaging(paging, 0, 0, 3); + + paging.pageDown(); + paging.pageDown(); + paging.pageDown(); + paging.pageDown(); + paging.init(); + + assertPaging(paging, 0, 0, 3); + } + + @Test + public void testWhenPageSizeGraterThanRecordsSize() { + Paging paging = new Paging(); + paging.updatePageSize(5); + paging.updateRecordsSize(3); + + assertPaging(paging, 0, 0, 3); + + paging.arrowDown(); + assertPaging(paging, 1, 0, 3); + + paging.arrowDown(); + assertPaging(paging, 2, 0, 3); + + paging.arrowDown(); + assertPaging(paging, 2, 0, 3); + + paging.arrowUp(); + assertPaging(paging, 1, 0, 3); + + paging.arrowUp(); + assertPaging(paging, 0, 0, 3); + + paging.arrowUp(); + assertPaging(paging, 0, 0, 3); + + paging.pageDown(); + assertPaging(paging, 2, 0, 3); + + paging.pageDown(); + assertPaging(paging, 2, 0, 3); + + paging.pageUp(); + assertPaging(paging, 0, 0, 3); + + paging.pageUp(); + assertPaging(paging, 0, 0, 3); + } + + @Test + public void testWhenPageSizeIsZero() { + Paging paging = new Paging(); + paging.updatePageSize(0); + paging.updateRecordsSize(5); + + assertPaging(paging, 0, 0, 0); + + paging.arrowDown(); + assertPaging(paging, 1, 0, 0); + + paging.arrowUp(); + assertPaging(paging, 0, 0, 0); + + paging.pageDown(); + assertPaging(paging, 0, 0, 0); + + paging.pageUp(); + assertPaging(paging, 0, 0, 0); + } + + @Test + public void testWhenRecordsSizeIsZero() { + Paging paging = new Paging(); + paging.updatePageSize(3); + paging.updateRecordsSize(0); + + assertPaging(paging, 0, 0, 0); + + paging.arrowDown(); + assertPaging(paging, 0, 0, 0); + + paging.arrowUp(); + assertPaging(paging, 0, 0, 0); + + paging.pageDown(); + assertPaging(paging, 0, 0, 0); + + paging.pageUp(); + assertPaging(paging, 0, 0, 0); + } + + @Test + public void testWhenChangingPageSizeDynamically() { + Paging paging = new Paging(); + paging.updatePageSize(3); + paging.updateRecordsSize(5); + + assertPaging(paging, 0, 0, 3); + + paging.arrowDown(); + assertPaging(paging, 1, 0, 3); + + paging.updatePageSize(2); + assertPaging(paging, 1, 0, 2); + + paging.arrowDown(); + assertPaging(paging, 2, 1, 3); + + paging.arrowDown(); + assertPaging(paging, 3, 2, 4); + + paging.updatePageSize(4); + assertPaging(paging, 3, 1, 5); + + paging.updatePageSize(5); + assertPaging(paging, 3, 0, 5); + + paging.updatePageSize(0); + assertPaging(paging, 3, 0, 0); + + paging.arrowDown(); + assertPaging(paging, 4, 0, 0); + + paging.arrowUp(); + assertPaging(paging, 3, 0, 0); + + paging.pageDown(); + assertPaging(paging, 3, 0, 0); + + paging.pageUp(); + assertPaging(paging, 3, 0, 0); + + paging.updatePageSize(1); + assertPaging(paging, 3, 3, 4); + } + + @Test + public void testWhenChangingRecordsSizeDynamically() { + Paging paging = new Paging(); + paging.updatePageSize(3); + paging.updateRecordsSize(5); + + assertPaging(paging, 0, 0, 3); + + paging.updateRecordsSize(2); + assertPaging(paging, 0, 0, 2); + assertThat(paging.getCurrentPosition(), is(0)); + assertThat(paging.getPageStartPosition(), is(0)); + assertThat(paging.getPageEndPosition(), is(2)); + + paging.arrowDown(); + assertPaging(paging, 1, 0, 2); + + paging.updateRecordsSize(3); + assertPaging(paging, 1, 0, 3); + + paging.arrowDown(); + assertPaging(paging, 2, 0, 3); + + paging.updateRecordsSize(1); + assertPaging(paging, 0, 0, 1); + + paging.updateRecordsSize(0); + assertPaging(paging, 0, 0, 0); + + paging.arrowDown(); + assertPaging(paging, 0, 0, 0); + + paging.arrowUp(); + assertPaging(paging, 0, 0, 0); + + paging.pageDown(); + assertPaging(paging, 0, 0, 0); + + paging.pageUp(); + assertPaging(paging, 0, 0, 0); + } + + private void assertPaging(Paging paging, int currentPosition, int pageStartPosition, + int pageEndPosition) { + assertThat(paging.getCurrentPosition(), is(currentPosition)); + assertThat(paging.getPageStartPosition(), is(pageStartPosition)); + assertThat(paging.getPageEndPosition(), is(pageEndPosition)); + } +} diff --git a/hbase-hbtop/src/test/java/org/apache/hadoop/hbase/hbtop/screen/top/TopScreenModelTest.java b/hbase-hbtop/src/test/java/org/apache/hadoop/hbase/hbtop/screen/top/TopScreenModelTest.java new file mode 100644 index 000000000000..a9d1f1d485fe --- /dev/null +++ b/hbase-hbtop/src/test/java/org/apache/hadoop/hbase/hbtop/screen/top/TopScreenModelTest.java @@ -0,0 +1,206 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase.hbtop.screen.top; + +import static org.hamcrest.CoreMatchers.is; +import static org.junit.Assert.assertThat; +import static org.junit.Assert.assertTrue; +import static org.mockito.Mockito.when; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; +import java.util.stream.Collectors; +import org.apache.hadoop.hbase.HBaseClassTestRule; +import org.apache.hadoop.hbase.client.Admin; +import org.apache.hadoop.hbase.hbtop.Filter; +import org.apache.hadoop.hbase.hbtop.Record; +import org.apache.hadoop.hbase.hbtop.TestUtils; +import org.apache.hadoop.hbase.hbtop.field.Field; +import org.apache.hadoop.hbase.hbtop.field.FieldInfo; +import org.apache.hadoop.hbase.hbtop.field.FieldValue; +import org.apache.hadoop.hbase.hbtop.mode.Mode; +import org.apache.hadoop.hbase.testclassification.SmallTests; +import org.junit.Before; +import org.junit.ClassRule; +import org.junit.Test; +import org.junit.experimental.categories.Category; +import org.junit.runner.RunWith; +import org.mockito.Mock; +import org.mockito.runners.MockitoJUnitRunner; + + +@Category(SmallTests.class) +@RunWith(MockitoJUnitRunner.class) +public class TopScreenModelTest { + + @ClassRule + public static final HBaseClassTestRule CLASS_RULE = + HBaseClassTestRule.forClass(TopScreenModelTest.class); + + @Mock + private Admin admin; + + private TopScreenModel topScreenModel; + + private List fields; + + @Before + public void setup() throws IOException { + when(admin.getClusterMetrics()).thenReturn(TestUtils.createDummyClusterMetrics()); + topScreenModel = new TopScreenModel(admin, Mode.REGION); + + fields = new ArrayList<>(Mode.REGION.getFieldInfos().stream() + .map(FieldInfo::getField).collect(Collectors.toList())); + } + + @Test + public void testSummary() { + topScreenModel.refreshMetricsData(); + Summary summary = topScreenModel.getSummary(); + TestUtils.assertSummary(summary); + } + + @Test + public void testRecords() { + // Region Mode + topScreenModel.refreshMetricsData(); + TestUtils.assertRecordsInRegionMode(topScreenModel.getRecords()); + + // Namespace Mode + topScreenModel.switchMode(Mode.NAMESPACE, null, false); + topScreenModel.refreshMetricsData(); + TestUtils.assertRecordsInNamespaceMode(topScreenModel.getRecords()); + + // Table Mode + topScreenModel.switchMode(Mode.TABLE, null, false); + topScreenModel.refreshMetricsData(); + TestUtils.assertRecordsInTableMode(topScreenModel.getRecords()); + + // Namespace Mode + topScreenModel.switchMode(Mode.REGION_SERVER, null, false); + topScreenModel.refreshMetricsData(); + TestUtils.assertRecordsInRegionServerMode(topScreenModel.getRecords()); + } + + @Test + public void testSort() { + // The sort key is LOCALITY + topScreenModel.setSortFieldAndFields(Field.LOCALITY, fields); + + FieldValue previous = null; + + // Test for ascending sort + topScreenModel.refreshMetricsData(); + + for (Record record : topScreenModel.getRecords()) { + FieldValue current = record.get(Field.LOCALITY); + if (previous != null) { + assertTrue(current.compareTo(previous) < 0); + } + previous = current; + } + + // Test for descending sort + topScreenModel.switchSortOrder(); + topScreenModel.refreshMetricsData(); + + previous = null; + for (Record record : topScreenModel.getRecords()) { + FieldValue current = record.get(Field.LOCALITY); + if (previous != null) { + assertTrue(current.compareTo(previous) > 0); + } + previous = current; + } + } + + @Test + public void testFilters() { + topScreenModel.addFilter("TABLE==table1", false); + topScreenModel.refreshMetricsData(); + for (Record record : topScreenModel.getRecords()) { + FieldValue value = record.get(Field.TABLE); + assertThat(value.asString(), is("table1")); + } + + topScreenModel.clearFilters(); + topScreenModel.addFilter("TABLE==TABLE1", false); + topScreenModel.refreshMetricsData(); + assertThat(topScreenModel.getRecords().size(), is(0)); + + // Test for ignore case + topScreenModel.clearFilters(); + topScreenModel.addFilter("TABLE==TABLE1", true); + topScreenModel.refreshMetricsData(); + for (Record record : topScreenModel.getRecords()) { + FieldValue value = record.get(Field.TABLE); + assertThat(value.asString(), is("table1")); + } + } + + @Test + public void testFilterHistories() { + topScreenModel.addFilter("TABLE==table1", false); + topScreenModel.addFilter("TABLE==table2", false); + topScreenModel.addFilter("TABLE==table3", false); + + assertThat(topScreenModel.getFilterHistories().get(0), is("TABLE==table1")); + assertThat(topScreenModel.getFilterHistories().get(1), is("TABLE==table2")); + assertThat(topScreenModel.getFilterHistories().get(2), is("TABLE==table3")); + } + + @Test + public void testSwitchMode() { + topScreenModel.switchMode(Mode.TABLE, null, false); + assertThat(topScreenModel.getCurrentMode(), is(Mode.TABLE)); + + // Test for initialFilters + List initialFilters = Arrays.asList( + Filter.parse("TABLE==table1", fields, true), + Filter.parse("TABLE==table2", fields, true)); + + topScreenModel.switchMode(Mode.TABLE, initialFilters, false); + + assertThat(topScreenModel.getFilters().size(), is(initialFilters.size())); + for (int i = 0; i < topScreenModel.getFilters().size(); i++) { + assertThat(topScreenModel.getFilters().get(i).toString(), + is(initialFilters.get(i).toString())); + } + + // Test when keepSortFieldAndSortOrderIfPossible is true + topScreenModel.setSortFieldAndFields(Field.NAMESPACE, fields); + topScreenModel.switchMode(Mode.NAMESPACE, null, true); + assertThat(topScreenModel.getCurrentSortField(), is(Field.NAMESPACE)); + } + + @Test + public void testDrillDown() { + topScreenModel.switchMode(Mode.TABLE, null, false); + topScreenModel.setSortFieldAndFields(Field.NAMESPACE, fields); + topScreenModel.refreshMetricsData(); + + boolean success = topScreenModel.drillDown(topScreenModel.getRecords().get(0)); + assertThat(success, is(true)); + + assertThat(topScreenModel.getFilters().get(0).toString(), is("NAMESPACE==namespace")); + assertThat(topScreenModel.getFilters().get(1).toString(), is("TABLE==table3")); + assertThat(topScreenModel.getCurrentSortField(), is(Field.NAMESPACE)); + } +} diff --git a/hbase-hbtop/src/test/java/org/apache/hadoop/hbase/hbtop/screen/top/TopScreenPresenterTest.java b/hbase-hbtop/src/test/java/org/apache/hadoop/hbase/hbtop/screen/top/TopScreenPresenterTest.java new file mode 100644 index 000000000000..5f42767e6dd6 --- /dev/null +++ b/hbase-hbtop/src/test/java/org/apache/hadoop/hbase/hbtop/screen/top/TopScreenPresenterTest.java @@ -0,0 +1,256 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase.hbtop.screen.top; + +import static org.apache.hadoop.hbase.hbtop.Record.entry; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.argThat; +import static org.mockito.Mockito.inOrder; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; + +import java.util.Arrays; +import java.util.List; +import java.util.stream.Collectors; +import org.apache.hadoop.hbase.HBaseClassTestRule; +import org.apache.hadoop.hbase.hbtop.Record; +import org.apache.hadoop.hbase.hbtop.field.Field; +import org.apache.hadoop.hbase.hbtop.field.FieldInfo; +import org.apache.hadoop.hbase.hbtop.terminal.TerminalSize; +import org.apache.hadoop.hbase.testclassification.SmallTests; +import org.junit.Before; +import org.junit.ClassRule; +import org.junit.Test; +import org.junit.experimental.categories.Category; +import org.junit.runner.RunWith; +import org.mockito.InOrder; +import org.mockito.Mock; +import org.mockito.runners.MockitoJUnitRunner; + + +@Category(SmallTests.class) +@RunWith(MockitoJUnitRunner.class) +public class TopScreenPresenterTest { + + @ClassRule + public static final HBaseClassTestRule CLASS_RULE = + HBaseClassTestRule.forClass(TopScreenPresenterTest.class); + + private static final List TEST_FIELD_INFOS = Arrays.asList( + new FieldInfo(Field.REGION, 10, true), + new FieldInfo(Field.REQUEST_COUNT_PER_SECOND, 10, true), + new FieldInfo(Field.LOCALITY, 10, true) + ); + + private static final List TEST_RECORDS = Arrays.asList( + Record.ofEntries( + entry(Field.REGION, "region1"), + entry(Field.REQUEST_COUNT_PER_SECOND, 1L), + entry(Field.LOCALITY, 0.3f)), + Record.ofEntries( + entry(Field.REGION, "region2"), + entry(Field.REQUEST_COUNT_PER_SECOND, 2L), + entry(Field.LOCALITY, 0.2f)), + Record.ofEntries( + entry(Field.REGION, "region3"), + entry(Field.REQUEST_COUNT_PER_SECOND, 3L), + entry(Field.LOCALITY, 0.1f)) + ); + + private static final Summary TEST_SUMMARY = new Summary( + "00:00:01", "3.0.0-SNAPSHOT", "01234567-89ab-cdef-0123-456789abcdef", + 3, 2, 1, 6, 1, 3.0, 300); + + @Mock + private TopScreenView topScreenView; + + @Mock + private TopScreenModel topScreenModel; + + private TopScreenPresenter topScreenPresenter; + + @Before + public void setup() { + when(topScreenView.getTerminalSize()).thenReturn(new TerminalSize(100, 100)); + when(topScreenView.getPageSize()).thenReturn(100); + + when(topScreenModel.getFieldInfos()).thenReturn(TEST_FIELD_INFOS); + when(topScreenModel.getFields()).thenReturn(TEST_FIELD_INFOS.stream() + .map(FieldInfo::getField).collect(Collectors.toList())); + when(topScreenModel.getRecords()).thenReturn(TEST_RECORDS); + when(topScreenModel.getSummary()).thenReturn(TEST_SUMMARY); + + topScreenPresenter = new TopScreenPresenter(topScreenView, 3000, topScreenModel); + } + + @Test + public void testRefresh() { + topScreenPresenter.init(); + topScreenPresenter.refresh(true); + + verify(topScreenView).showTopScreen(argThat(this::assertSummary), + argThat(this::assertHeaders), argThat(this::assertRecords), + argThat(selectedRecord -> assertSelectedRecord(selectedRecord, 0))); + } + + @Test + public void testVerticalScrolling() { + topScreenPresenter.init(); + topScreenPresenter.refresh(true); + + topScreenPresenter.arrowDown(); + topScreenPresenter.arrowDown(); + topScreenPresenter.arrowDown(); + + topScreenPresenter.arrowDown(); + topScreenPresenter.arrowDown(); + topScreenPresenter.arrowDown(); + + topScreenPresenter.arrowUp(); + topScreenPresenter.arrowUp(); + topScreenPresenter.arrowUp(); + + topScreenPresenter.pageDown(); + topScreenPresenter.pageDown(); + + topScreenPresenter.pageUp(); + topScreenPresenter.pageUp(); + + InOrder inOrder = inOrder(topScreenView); + verifyVerticalScrolling(inOrder, 0); + + verifyVerticalScrolling(inOrder, 1); + verifyVerticalScrolling(inOrder, 2); + verifyVerticalScrolling(inOrder, 2); + + verifyVerticalScrolling(inOrder, 1); + verifyVerticalScrolling(inOrder, 0); + verifyVerticalScrolling(inOrder, 0); + + verifyVerticalScrolling(inOrder, 2); + verifyVerticalScrolling(inOrder, 2); + + verifyVerticalScrolling(inOrder, 0); + verifyVerticalScrolling(inOrder, 0); + } + + private void verifyVerticalScrolling(InOrder inOrder, int expectedSelectedRecodeIndex) { + inOrder.verify(topScreenView).showTopScreen(any(), any(), any(), + argThat(selectedRecord -> assertSelectedRecord(selectedRecord, expectedSelectedRecodeIndex))); + } + + @Test + public void testHorizontalScrolling() { + topScreenPresenter.init(); + topScreenPresenter.refresh(true); + + topScreenPresenter.arrowRight(); + topScreenPresenter.arrowRight(); + topScreenPresenter.arrowRight(); + + topScreenPresenter.arrowLeft(); + topScreenPresenter.arrowLeft(); + topScreenPresenter.arrowLeft(); + + topScreenPresenter.end(); + topScreenPresenter.end(); + + topScreenPresenter.home(); + topScreenPresenter.home(); + + InOrder inOrder = inOrder(topScreenView); + verifyHorizontalScrolling(inOrder, 3); + + verifyHorizontalScrolling(inOrder, 2); + verifyHorizontalScrolling(inOrder, 1); + verifyHorizontalScrolling(inOrder, 1); + + verifyHorizontalScrolling(inOrder, 2); + verifyHorizontalScrolling(inOrder, 3); + verifyHorizontalScrolling(inOrder, 3); + + verifyHorizontalScrolling(inOrder, 1); + verifyHorizontalScrolling(inOrder, 1); + + verifyHorizontalScrolling(inOrder, 3); + verifyHorizontalScrolling(inOrder, 3); + } + + private void verifyHorizontalScrolling(InOrder inOrder, int expectedHeaderCount) { + inOrder.verify(topScreenView).showTopScreen(any(), + argThat(headers -> headers.size() == expectedHeaderCount), any(), any()); + } + + private boolean assertSummary(Summary actual) { + return actual.getCurrentTime().equals(TEST_SUMMARY.getCurrentTime()) + && actual.getVersion().equals(TEST_SUMMARY.getVersion()) + && actual.getClusterId().equals(TEST_SUMMARY.getClusterId()) + && actual.getServers() == TEST_SUMMARY.getServers() + && actual.getLiveServers() == TEST_SUMMARY.getLiveServers() + && actual.getDeadServers() == TEST_SUMMARY.getDeadServers() + && actual.getRegionCount() == TEST_SUMMARY.getRegionCount() + && actual.getRitCount() == TEST_SUMMARY.getRitCount() + && actual.getAverageLoad() == TEST_SUMMARY.getAverageLoad() + && actual.getAggregateRequestPerSecond() == TEST_SUMMARY.getAggregateRequestPerSecond(); + } + + private boolean assertHeaders(List
actual) { + List
expected = + TEST_FIELD_INFOS.stream().map(fi -> new Header(fi.getField(), fi.getDefaultLength())) + .collect(Collectors.toList()); + + if (actual.size() != expected.size()) { + return false; + } + + for (int i = 0; i < actual.size(); i++) { + if (actual.get(i).getField() != expected.get(i).getField()) { + return false; + } + if (actual.get(i).getLength() != expected.get(i).getLength()) { + return false; + } + } + + return true; + } + + private boolean assertRecords(List actual) { + if (actual.size() != TEST_RECORDS.size()) { + return false; + } + + for (int i = 0; i < actual.size(); i++) { + if (!assertRecord(actual.get(i), TEST_RECORDS.get(i))) { + return false; + } + } + + return true; + } + + private boolean assertSelectedRecord(Record actual, int expectedSelectedRecodeIndex) { + return assertRecord(actual, TEST_RECORDS.get(expectedSelectedRecodeIndex)); + } + + private boolean assertRecord(Record actual, Record expected) { + return actual.get(Field.REGION).equals(expected.get(Field.REGION)) && actual + .get(Field.REQUEST_COUNT_PER_SECOND).equals(expected.get(Field.REQUEST_COUNT_PER_SECOND)) + && actual.get(Field.LOCALITY).equals(expected.get(Field.LOCALITY)); + } +} diff --git a/pom.xml b/pom.xml index f2104a57b671..32a60ad54927 100755 --- a/pom.xml +++ b/pom.xml @@ -90,6 +90,7 @@ hbase-metrics hbase-backup hbase-zookeeper + hbase-hbtop scm:git:git://gitbox.apache.org/repos/asf/hbase.git @@ -1805,6 +1806,11 @@ test-jar test + + hbase-hbtop + org.apache.hbase + ${project.version} + org.apache.hbase hbase-shaded-client