Skip to content
Original file line number Diff line number Diff line change
Expand Up @@ -846,4 +846,9 @@ public <T> ObjectArray<T> grow(ObjectArray<T> array, long minSize) {
final long newSize = overSize(minSize, PageCacheRecycler.OBJECT_PAGE_SIZE, RamUsageEstimator.NUM_BYTES_OBJECT_REF);
return resize(array, newSize);
}

protected boolean shouldCheckBreaker() {
return checkBreaker;
}

}
Original file line number Diff line number Diff line change
Expand Up @@ -8,12 +8,8 @@

package org.elasticsearch.search.aggregations;

import org.apache.lucene.index.DirectoryReader;
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.store.Directory;
import org.apache.lucene.tests.index.RandomIndexWriter;
import org.elasticsearch.index.mapper.KeywordFieldMapper.KeywordFieldType;
import org.apache.lucene.search.MatchAllDocsQuery;
import org.elasticsearch.script.AggregationScript;
import org.elasticsearch.script.Script;
import org.elasticsearch.script.ScriptService;
Expand All @@ -28,49 +24,48 @@

public class AggregationCollectorTests extends AggregatorTestCase {
public void testTerms() throws IOException {
assertFalse(needsScores(termsBuilder().field("f")));
assertNeedsScores(termsBuilder().field("f"), false);
}

public void testSubTerms() throws IOException {
assertFalse(needsScores(termsBuilder().field("f").subAggregation(new TermsAggregationBuilder("i").field("f"))));
assertNeedsScores(termsBuilder().field("f").subAggregation(new TermsAggregationBuilder("i").field("f")), false);
}

public void testScoreConsumingScript() throws IOException {
assertFalse(needsScores(termsBuilder().script(new Script("no_scores"))));
assertNeedsScores(termsBuilder().script(new Script("no_scores")), false);
}

public void testNonScoreConsumingScript() throws IOException {
assertTrue(needsScores(termsBuilder().script(new Script("with_scores"))));
assertNeedsScores(termsBuilder().script(new Script("with_scores")), true);
}

public void testSubScoreConsumingScript() throws IOException {
assertFalse(needsScores(termsBuilder().field("f").subAggregation(termsBuilder().script(new Script("no_scores")))));
assertNeedsScores(termsBuilder().field("f").subAggregation(termsBuilder().script(new Script("no_scores"))), false);
}

public void testSubNonScoreConsumingScript() throws IOException {
assertTrue(needsScores(termsBuilder().field("f").subAggregation(termsBuilder().script(new Script("with_scores")))));
assertNeedsScores(termsBuilder().field("f").subAggregation(termsBuilder().script(new Script("with_scores"))), true);
}

public void testTopHits() throws IOException {
assertTrue(needsScores(new TopHitsAggregationBuilder("h")));
assertNeedsScores(new TopHitsAggregationBuilder("h"), true);
}

public void testSubTopHits() throws IOException {
assertTrue(needsScores(termsBuilder().field("f").subAggregation(new TopHitsAggregationBuilder("h"))));
assertNeedsScores(termsBuilder().field("f").subAggregation(new TopHitsAggregationBuilder("h")), true);
}

private TermsAggregationBuilder termsBuilder() {
return new TermsAggregationBuilder("t");
}

private boolean needsScores(AggregationBuilder builder) throws IOException {
try (
Directory directory = newDirectory();
RandomIndexWriter indexWriter = new RandomIndexWriter(random(), directory);
DirectoryReader reader = indexWriter.getReader()
) {
return createAggregator(builder, new IndexSearcher(reader), new KeywordFieldType("f")).scoreMode().needsScores();
}
private void assertNeedsScores(AggregationBuilder builder, boolean expected) throws IOException {
withAggregator(
builder,
new MatchAllDocsQuery(),
iw -> {},
(indexSearcher, agg) -> assertEquals(expected, agg.scoreMode().needsScores())
);
}

@Override
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -13,20 +13,43 @@
import org.apache.lucene.index.DirectoryReader;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.Query;
import org.apache.lucene.store.Directory;
import org.apache.lucene.tests.index.RandomIndexWriter;
import org.elasticsearch.core.Releasables;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.NumberFieldMapper;
import org.elasticsearch.search.aggregations.AggregationExecutionContext;
import org.elasticsearch.search.aggregations.AggregatorFactories;
import org.elasticsearch.search.aggregations.AggregatorTestCase;
import org.elasticsearch.search.aggregations.InternalAggregation;
import org.elasticsearch.search.aggregations.LeafBucketCollector;
import org.elasticsearch.search.aggregations.support.AggregationContext;
import org.junit.After;

import java.io.IOException;
import java.util.ArrayList;
import java.util.List;

public class BucketsAggregatorTests extends AggregatorTestCase {

private List<AggregationContext> toRelease = new ArrayList<>();

@Override
protected AggregationContext createAggregationContext(IndexSearcher indexSearcher, Query query, MappedFieldType... fieldTypes)
throws IOException {
AggregationContext context = super.createAggregationContext(indexSearcher, query, fieldTypes);
// Generally, we should avoid doing this, but this test doesn't do anything with reduction, so it should be safe here
toRelease.add(context);
return context;
}

@After
public void releaseContext() {
Releasables.close(toRelease);
toRelease.clear();
}

public BucketsAggregator buildMergeAggregator() throws IOException {
try (Directory directory = newDirectory()) {
try (RandomIndexWriter indexWriter = new RandomIndexWriter(random(), directory)) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -63,19 +63,15 @@
import org.elasticsearch.index.shard.ShardId;
import org.elasticsearch.search.aggregations.AggregationBuilder;
import org.elasticsearch.search.aggregations.AggregationExecutionContext;
import org.elasticsearch.search.aggregations.AggregationReduceContext;
import org.elasticsearch.search.aggregations.Aggregator;
import org.elasticsearch.search.aggregations.AggregatorTestCase;
import org.elasticsearch.search.aggregations.InternalAggregation;
import org.elasticsearch.search.aggregations.LeafBucketCollector;
import org.elasticsearch.search.aggregations.bucket.filter.FiltersAggregator.KeyedFilter;
import org.elasticsearch.search.aggregations.bucket.nested.NestedAggregatorTests;
import org.elasticsearch.search.aggregations.metrics.Max;
import org.elasticsearch.search.aggregations.metrics.MaxAggregationBuilder;
import org.elasticsearch.search.aggregations.metrics.Sum;
import org.elasticsearch.search.aggregations.metrics.SumAggregationBuilder;
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator.PipelineTree;
import org.elasticsearch.search.aggregations.support.AggregationContext;
import org.elasticsearch.search.aggregations.support.AggregationInspectionHelper;
import org.elasticsearch.search.internal.ContextIndexSearcherTests.DocumentSubsetDirectoryReader;
import org.elasticsearch.test.ListMatcher;
Expand Down Expand Up @@ -668,43 +664,31 @@ public void onCache(ShardId shardId, Accountable accountable) {}
LongPoint.newRangeQuery("t", 5, Long.MAX_VALUE)
);
IndexSearcher searcher = newIndexSearcher(limitedReader);
AggregationContext context = createAggregationContext(searcher, new MatchAllDocsQuery());
FilterByFilterAggregator aggregator = createAggregator(builder, context);
aggregator.preCollection();
searcher.search(context.query(), aggregator.asCollector());
aggregator.postCollection();

InternalAggregation result = aggregator.buildTopLevel();
result = result.reduce(
List.of(result),
new AggregationReduceContext.ForFinal(
context.bigArrays(),
getMockScriptService(),
() -> false,
builder,
b -> {},
PipelineTree.EMPTY
)
);
InternalFilters filters = (InternalFilters) result;
assertThat(filters.getBuckets(), hasSize(1));
assertThat(filters.getBucketByKey("q1").getDocCount(), equalTo(5L));

Map<String, Object> debug = new HashMap<>();
aggregator.collectDebugInfo(debug::put);
assertMap(
debug,
matchesMap().entry("segments_counted", greaterThanOrEqualTo(1))
.entry("segments_collected", 0)
.entry("segments_with_doc_count_field", 0)
.entry("segments_with_deleted_docs", 0)
.entry(
"filters",
matchesList().item(
matchesMap().entry("query", "*:*")
.entry("segments_counted_in_constant_time", searcher.getLeafContexts().size())
debugTestCase(
builder,
new MatchAllDocsQuery(),
searcher,
(InternalFilters filters, Class<? extends Aggregator> impl, Map<String, Map<String, Object>> debug) -> {
assertThat(filters.getBuckets(), hasSize(1));
assertThat(filters.getBucketByKey("q1").getDocCount(), equalTo(5L));
assertMap(
debug,
matchesMap().entry(
"test",
matchesMap().entry("segments_counted", greaterThanOrEqualTo(1))
.entry("segments_collected", 0)
.entry("segments_with_doc_count_field", 0)
.entry("segments_with_deleted_docs", 0)
.entry(
"filters",
matchesList().item(
matchesMap().entry("query", "*:*")
.entry("segments_counted_in_constant_time", searcher.getLeafContexts().size())
)
)
)
)
);
}
);
}
}
Expand Down Expand Up @@ -745,36 +729,32 @@ public void onCache(ShardId shardId, Accountable accountable) {}
LongPoint.newRangeQuery("t", 5, Long.MAX_VALUE)
);
IndexSearcher searcher = newIndexSearcher(limitedReader);
AggregationContext context = createAggregationContext(searcher, new MatchAllDocsQuery(), ft);
FilterByFilterAggregator aggregator = createAggregator(builder, context);
aggregator.preCollection();
searcher.search(context.query(), aggregator.asCollector());
aggregator.postCollection();

InternalAggregation result = aggregator.buildTopLevel();
result = result.reduce(
List.of(result),
new AggregationReduceContext.ForFinal(context.bigArrays(), getMockScriptService(), () -> false, null, b -> {})
);
InternalFilters filters = (InternalFilters) result;
assertThat(filters.getBuckets(), hasSize(1));
assertThat(filters.getBucketByKey("q1").getDocCount(), equalTo(5L));

Map<String, Object> debug = new HashMap<>();
aggregator.collectDebugInfo(debug::put);
assertMap(
debug,
matchesMap().entry("segments_counted", greaterThanOrEqualTo(1))
.entry("segments_collected", 0)
.entry("segments_with_doc_count_field", 0)
.entry("segments_with_deleted_docs", 0)
.entry(
"filters",
matchesList().item(
matchesMap().entry("query", "foo:bar")
.entry("segments_counted_in_constant_time", lessThan(searcher.getLeafContexts().size()))
debugTestCase(
builder,
new MatchAllDocsQuery(),
searcher,
(InternalFilters filters, Class<? extends Aggregator> impl, Map<String, Map<String, Object>> debug) -> {
assertThat(filters.getBuckets(), hasSize(1));
assertThat(filters.getBucketByKey("q1").getDocCount(), equalTo(5L));
assertMap(
debug,
matchesMap().entry(
"test",
matchesMap().entry("segments_counted", greaterThanOrEqualTo(1))
.entry("segments_collected", 0)
.entry("segments_with_doc_count_field", 0)
.entry("segments_with_deleted_docs", 0)
.entry(
"filters",
matchesList().item(
matchesMap().entry("query", "foo:bar")
.entry("segments_counted_in_constant_time", lessThan(searcher.getLeafContexts().size()))
)
)
)
)
);
},
ft
);
}
}
Expand Down Expand Up @@ -811,37 +791,35 @@ public void onCache(ShardId shardId, Accountable accountable) {}
LongPoint.newRangeQuery("t", Long.MIN_VALUE, Long.MAX_VALUE)
);
IndexSearcher searcher = newIndexSearcher(limitedReader);
AggregationContext context = createAggregationContext(searcher, new MatchAllDocsQuery(), ft);
FilterByFilterAggregator aggregator = createAggregator(builder, context);
aggregator.preCollection();
searcher.search(context.query(), aggregator.asCollector());
aggregator.postCollection();

InternalAggregation result = aggregator.buildTopLevel();
result = result.reduce(
List.of(result),
new AggregationReduceContext.ForFinal(context.bigArrays(), getMockScriptService(), () -> false, null, b -> {})
);
InternalFilters filters = (InternalFilters) result;
assertThat(filters.getBuckets(), hasSize(1));
assertThat(filters.getBucketByKey("q1").getDocCount(), equalTo(10L));

Map<String, Object> debug = new HashMap<>();
aggregator.collectDebugInfo(debug::put);
assertMap(
debug,
matchesMap().entry("segments_counted", greaterThanOrEqualTo(1))
.entry("segments_collected", 0)
.entry("segments_with_doc_count_field", 0)
.entry("segments_with_deleted_docs", 0)
.entry(
"filters",
matchesList().item(
matchesMap().entry("query", "foo:bar")
.entry("segments_counted_in_constant_time", searcher.getLeafContexts().size())
debugTestCase(
builder,
new MatchAllDocsQuery(),
searcher,
(InternalFilters filters, Class<? extends Aggregator> impl, Map<String, Map<String, Object>> debug) -> {
assertThat(filters.getBuckets(), hasSize(1));
assertThat(filters.getBucketByKey("q1").getDocCount(), equalTo(10L));
assertMap(
debug,
matchesMap().entry(
"test",
matchesMap().entry("segments_counted", greaterThanOrEqualTo(1))
.entry("segments_collected", 0)
.entry("segments_with_doc_count_field", 0)
.entry("segments_with_deleted_docs", 0)
.entry(
"filters",
matchesList().item(
matchesMap().entry("query", "foo:bar")
.entry("segments_counted_in_constant_time", searcher.getLeafContexts().size())
)
)
)
)
);
},
ft
);

}
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -159,6 +159,14 @@ public BigArrays withCircuitBreaking() {
return new MockBigArrays(this.recycler, this.breakerService, true);
}

/* This breaks a whole mess of tests. I'm fixing them in several PRs, but including this change, commented out, saves
* time and cuts down on conflicts when I'm working in multiple branches.
@Override
public BigArrays withBreakerService(CircuitBreakerService breakerService) {
return new MockBigArrays(this.recycler, breakerService, this.shouldCheckBreaker());
}
*/

@Override
public ByteArray newByteArray(long size, boolean clearOnResize) {
final ByteArrayWrapper array = new ByteArrayWrapper(super.newByteArray(size, clearOnResize), clearOnResize);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -851,8 +851,9 @@ protected void withAggregator(

try (DirectoryReader unwrapped = DirectoryReader.open(directory); IndexReader indexReader = wrapDirectoryReader(unwrapped)) {
IndexSearcher searcher = newIndexSearcher(indexReader);
AggregationContext context = createAggregationContext(searcher, query, fieldTypes);
verify.accept(searcher, createAggregator(aggregationBuilder, context));
try (AggregationContext context = createAggregationContext(searcher, query, fieldTypes)) {
verify.accept(searcher, createAggregator(aggregationBuilder, context));
}
}
}
}
Expand Down
Loading