From e7922693c28667dca566e93db0f96a6424f222cd Mon Sep 17 00:00:00 2001 From: Tanguy Leroux Date: Tue, 28 Feb 2017 11:04:15 +0100 Subject: [PATCH] Tests: Add unit test for InternalDateHistogram Relates to #22278 --- .../histogram/InternalDateHistogram.java | 51 +++++++++++ .../histogram/InternalDateHistogramTests.java | 85 +++++++++++++++++++ 2 files changed, 136 insertions(+) create mode 100644 core/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalDateHistogramTests.java diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalDateHistogram.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalDateHistogram.java index a8976aaa1ac77..19e5dc1ec897d 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalDateHistogram.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalDateHistogram.java @@ -41,6 +41,7 @@ import java.util.List; import java.util.ListIterator; import java.util.Map; +import java.util.Objects; /** * Implementation of {@link Histogram}. @@ -76,6 +77,24 @@ public Bucket(StreamInput in, boolean keyed, DocValueFormat format) throws IOExc aggregations = InternalAggregations.readAggregations(in); } + @Override + public boolean equals(Object obj) { + if (obj == null || obj.getClass() != InternalDateHistogram.Bucket.class) { + return false; + } + InternalDateHistogram.Bucket that = (InternalDateHistogram.Bucket) obj; + // No need to take the keyed and format parameters into account, + // they are already stored and tested on the InternalDateHistogram object + return key == that.key + && docCount == that.docCount + && Objects.equals(aggregations, that.aggregations); + } + + @Override + public int hashCode() { + return Objects.hash(getClass(), key, docCount, aggregations); + } + @Override public void writeTo(StreamOutput out) throws IOException { out.writeLong(key); @@ -169,6 +188,21 @@ void writeTo(StreamOutput out) throws IOException { out.writeOptionalWriteable(bounds); } + @Override + public boolean equals(Object obj) { + if (obj == null || getClass() != obj.getClass()) { + return false; + } + EmptyBucketInfo that = (EmptyBucketInfo) obj; + return Objects.equals(rounding, that.rounding) + && Objects.equals(bounds, that.bounds) + && Objects.equals(subAggregations, that.subAggregations); + } + + @Override + public int hashCode() { + return Objects.hash(getClass(), rounding, bounds, subAggregations); + } } private final List buckets; @@ -446,4 +480,21 @@ public InternalAggregation createAggregation(List { + + @Override + protected InternalDateHistogram createTestInstance(String name, List pipelineAggregators, + Map metaData) { + + boolean keyed = randomBoolean(); + DocValueFormat format = DocValueFormat.RAW; + int nbBuckets = randomInt(10); + List buckets = new ArrayList<>(nbBuckets); + long startingDate = System.currentTimeMillis(); + + long interval = randomIntBetween(1, 3); + long intervalMillis = randomFrom(timeValueSeconds(interval), timeValueMinutes(interval), timeValueHours(interval)).getMillis(); + + for (int i = 0; i < nbBuckets; i++) { + long key = startingDate + (intervalMillis * i); + buckets.add(i, new InternalDateHistogram.Bucket(key, randomIntBetween(1, 100), keyed, format, InternalAggregations.EMPTY)); + } + + InternalOrder order = (InternalOrder) randomFrom(InternalHistogram.Order.KEY_ASC, InternalHistogram.Order.KEY_DESC); + return new InternalDateHistogram(name, buckets, order, 1, 0L, null, format, keyed, pipelineAggregators, metaData); + } + + @Override + protected void assertReduced(InternalDateHistogram reduced, List inputs) { + Map expectedCounts = new TreeMap<>(); + for (Histogram histogram : inputs) { + for (Histogram.Bucket bucket : histogram.getBuckets()) { + expectedCounts.compute(((DateTime) bucket.getKey()).getMillis(), + (key, oldValue) -> (oldValue == null ? 0 : oldValue) + bucket.getDocCount()); + } + } + Map actualCounts = new TreeMap<>(); + for (Histogram.Bucket bucket : reduced.getBuckets()) { + actualCounts.compute(((DateTime) bucket.getKey()).getMillis(), + (key, oldValue) -> (oldValue == null ? 0 : oldValue) + bucket.getDocCount()); + } + assertEquals(expectedCounts, actualCounts); + } + + @Override + protected Writeable.Reader instanceReader() { + return InternalDateHistogram::new; + } +}