From 33390956301d105fabcf4256a80f86e5a9d0f197 Mon Sep 17 00:00:00 2001 From: "Doroszlai, Attila" Date: Wed, 17 Jul 2019 23:29:56 +0200 Subject: [PATCH 1/3] HDDS-1811. Prometheus metrics are broken for datanodes due to an invalid metric --- .../container/common/transport/server/ratis/CSMMetrics.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/transport/server/ratis/CSMMetrics.java b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/transport/server/ratis/CSMMetrics.java index def0d7ff40b64..ebbec4de66a6f 100644 --- a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/transport/server/ratis/CSMMetrics.java +++ b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/transport/server/ratis/CSMMetrics.java @@ -62,7 +62,7 @@ public class CSMMetrics { public CSMMetrics() { int numCmdTypes = ContainerProtos.Type.values().length; this.opsLatency = new MutableRate[numCmdTypes]; - this.registry = new MetricsRegistry(CSMMetrics.class.getName()); + this.registry = new MetricsRegistry(CSMMetrics.class.getSimpleName()); for (int i = 0; i < numCmdTypes; i++) { opsLatency[i] = registry.newRate( ContainerProtos.Type.forNumber(i + 1).toString(), From 1ca11fb1a46b0601b177a72fb33aeb9e6484793b Mon Sep 17 00:00:00 2001 From: "Doroszlai, Attila" Date: Thu, 18 Jul 2019 07:36:07 +0200 Subject: [PATCH 2/3] HDDS-1811. Prometheus metrics are broken for SCM --- .../hdds/server/PrometheusMetricsSink.java | 34 +++++-------------- .../server/TestPrometheusMetricsSink.java | 26 +++++++++++--- 2 files changed, 30 insertions(+), 30 deletions(-) diff --git a/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/server/PrometheusMetricsSink.java b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/server/PrometheusMetricsSink.java index 52532f1723928..df25cfc9a39f5 100644 --- a/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/server/PrometheusMetricsSink.java +++ b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/server/PrometheusMetricsSink.java @@ -23,7 +23,6 @@ import java.io.Writer; import java.util.HashMap; import java.util.Map; -import java.util.regex.Matcher; import java.util.regex.Pattern; import org.apache.commons.lang3.StringUtils; @@ -47,8 +46,8 @@ public class PrometheusMetricsSink implements MetricsSink { */ private Map metricLines = new HashMap<>(); - private static final Pattern UPPER_CASE_SEQ = - Pattern.compile("([A-Z]*)([A-Z])"); + private static final Pattern SPLIT_PATTERN = + Pattern.compile("(? 0) { - replacement = "_" + m.group(1).toLowerCase() + replacement; - } - m.appendReplacement(sb, replacement); - } - m.appendTail(sb); - - //always prefixed with "_" - return sb.toString().substring(1); - } - - private String upperFirst(String name) { - if (Character.isLowerCase(name.charAt(0))) { - return Character.toUpperCase(name.charAt(0)) + name.substring(1); - } else { - return name; - } + String baseName = StringUtils.capitalize(recordName) + + StringUtils.capitalize(metricName); + baseName = baseName.replace('-', '_'); + String[] parts = SPLIT_PATTERN.split(baseName); + return String.join("_", parts).toLowerCase(); } @Override diff --git a/hadoop-hdds/framework/src/test/java/org/apache/hadoop/hdds/server/TestPrometheusMetricsSink.java b/hadoop-hdds/framework/src/test/java/org/apache/hadoop/hdds/server/TestPrometheusMetricsSink.java index 0a8eb676d8351..80ce4636e4d92 100644 --- a/hadoop-hdds/framework/src/test/java/org/apache/hadoop/hdds/server/TestPrometheusMetricsSink.java +++ b/hadoop-hdds/framework/src/test/java/org/apache/hadoop/hdds/server/TestPrometheusMetricsSink.java @@ -20,6 +20,7 @@ import java.io.ByteArrayOutputStream; import java.io.IOException; import java.io.OutputStreamWriter; +import java.nio.charset.StandardCharsets; import org.apache.hadoop.metrics2.MetricsSystem; import org.apache.hadoop.metrics2.annotation.Metric; @@ -52,7 +53,7 @@ public void testPublish() throws IOException { testMetrics.numBucketCreateFails.incr(); metrics.publishMetricsNow(); ByteArrayOutputStream stream = new ByteArrayOutputStream(); - OutputStreamWriter writer = new OutputStreamWriter(stream, UTF_8); + OutputStreamWriter writer = new OutputStreamWriter(stream, StandardCharsets.UTF_8); //WHEN sink.writeMetrics(writer); @@ -71,7 +72,7 @@ public void testPublish() throws IOException { } @Test - public void testNaming() throws IOException { + public void testNamingCamelCase() { PrometheusMetricsSink sink = new PrometheusMetricsSink(); Assert.assertEquals("rpc_time_some_metrics", @@ -82,18 +83,35 @@ public void testNaming() throws IOException { Assert.assertEquals("rpc_time_small", sink.prometheusName("RpcTime", "small")); + } + @Test + public void testNamingRocksDB() { //RocksDB metrics are handled differently. - + PrometheusMetricsSink sink = new PrometheusMetricsSink(); Assert.assertEquals("rocksdb_om.db_num_open_connections", sink.prometheusName("Rocksdb_om.db", "num_open_connections")); } + @Test + public void testNamingPipeline() { + PrometheusMetricsSink sink = new PrometheusMetricsSink(); + + String recordName = "SCMPipelineMetrics"; + String metricName = "NumBlocksAllocated-" + + "RATIS-THREE-47659e3d-40c9-43b3-9792-4982fc279aba"; + Assert.assertEquals( + "scm_pipeline_metrics_" + + "num_blocks_allocated_" + + "ratis_three_47659e3d_40c9_43b3_9792_4982fc279aba", + sink.prometheusName(recordName, metricName)); + } + /** * Example metric pojo. */ @Metrics(about = "Test Metrics", context = "dfs") - public static class TestMetrics { + private static class TestMetrics { @Metric private MutableCounterLong numBucketCreateFails; From 09583ff27fd0f4d570df6f087df66db67b5f3f0b Mon Sep 17 00:00:00 2001 From: "Doroszlai, Attila" Date: Thu, 18 Jul 2019 11:32:09 +0200 Subject: [PATCH 3/3] HDDS-1811. Fix checkstyle --- .../hadoop/hdds/server/TestPrometheusMetricsSink.java | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/hadoop-hdds/framework/src/test/java/org/apache/hadoop/hdds/server/TestPrometheusMetricsSink.java b/hadoop-hdds/framework/src/test/java/org/apache/hadoop/hdds/server/TestPrometheusMetricsSink.java index 80ce4636e4d92..a1a9a55500cf8 100644 --- a/hadoop-hdds/framework/src/test/java/org/apache/hadoop/hdds/server/TestPrometheusMetricsSink.java +++ b/hadoop-hdds/framework/src/test/java/org/apache/hadoop/hdds/server/TestPrometheusMetricsSink.java @@ -20,7 +20,6 @@ import java.io.ByteArrayOutputStream; import java.io.IOException; import java.io.OutputStreamWriter; -import java.nio.charset.StandardCharsets; import org.apache.hadoop.metrics2.MetricsSystem; import org.apache.hadoop.metrics2.annotation.Metric; @@ -31,7 +30,7 @@ import org.junit.Assert; import org.junit.Test; -import static org.apache.commons.codec.CharEncoding.UTF_8; +import static java.nio.charset.StandardCharsets.UTF_8; /** * Test prometheus Sink. @@ -53,17 +52,18 @@ public void testPublish() throws IOException { testMetrics.numBucketCreateFails.incr(); metrics.publishMetricsNow(); ByteArrayOutputStream stream = new ByteArrayOutputStream(); - OutputStreamWriter writer = new OutputStreamWriter(stream, StandardCharsets.UTF_8); + OutputStreamWriter writer = new OutputStreamWriter(stream, UTF_8); //WHEN sink.writeMetrics(writer); writer.flush(); //THEN - System.out.println(stream.toString(UTF_8)); + String writtenMetrics = stream.toString(UTF_8.name()); + System.out.println(writtenMetrics); Assert.assertTrue( "The expected metric line is missing from prometheus metrics output", - stream.toString(UTF_8).contains( + writtenMetrics.contains( "test_metrics_num_bucket_create_fails{context=\"dfs\"") );