Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -132,12 +132,67 @@ public Weight createWeight(Query query, ScoreMode scoreMode, float boost) throws
}
}

private void checkCancelled() {
if (checkCancelled != null) {
checkCancelled.run();
}
}

@Override
protected void search(List<LeafReaderContext> leaves, Weight weight, Collector collector) throws IOException {
final Weight cancellableWeight;
if (checkCancelled != null) {
cancellableWeight = new Weight(weight.getQuery()) {
for (LeafReaderContext ctx : leaves) { // search each subreader
searchLeaf(ctx, weight, collector);
}
}

/**
* Lower-level search API.
*
* {@link LeafCollector#collect(int)} is called for every matching document in
* the provided <code>ctx</code>.
*/
public void searchLeaf(LeafReaderContext ctx, Weight weight, Collector collector) throws IOException {
checkCancelled();
weight = wrapWeight(weight);
final LeafCollector leafCollector;
try {
leafCollector = collector.getLeafCollector(ctx);
} catch (CollectionTerminatedException e) {
// there is no doc of interest in this reader context
// continue with the following leaf
return;
}
Bits liveDocs = ctx.reader().getLiveDocs();
BitSet liveDocsBitSet = getSparseBitSetOrNull(ctx.reader().getLiveDocs());
if (liveDocsBitSet == null) {
BulkScorer bulkScorer = weight.bulkScorer(ctx);
if (bulkScorer != null) {
try {
bulkScorer.score(leafCollector, liveDocs);
} catch (CollectionTerminatedException e) {
// collection was terminated prematurely
// continue with the following leaf
}
}
} else {
// if the role query result set is sparse then we should use the SparseFixedBitSet for advancing:
Scorer scorer = weight.scorer(ctx);
if (scorer != null) {
try {
intersectScorerAndBitSet(scorer, liveDocsBitSet, leafCollector,
checkCancelled == null ? () -> {
} : checkCancelled);
} catch (CollectionTerminatedException e) {
// collection was terminated prematurely
// continue with the following leaf
}
}
}
}

private Weight wrapWeight(Weight weight) {
if (checkCancelled != null) {
return new Weight(weight.getQuery()) {
@Override
public void extractTerms(Set<Term> terms) {
throw new UnsupportedOperationException();
Expand Down Expand Up @@ -169,48 +224,10 @@ public BulkScorer bulkScorer(LeafReaderContext context) throws IOException {
}
};
} else {
cancellableWeight = weight;
return weight;
}
searchInternal(leaves, cancellableWeight, collector);
}

private void searchInternal(List<LeafReaderContext> leaves, Weight weight, Collector collector) throws IOException {
for (LeafReaderContext ctx : leaves) { // search each subreader
final LeafCollector leafCollector;
try {
leafCollector = collector.getLeafCollector(ctx);
} catch (CollectionTerminatedException e) {
// there is no doc of interest in this reader context
// continue with the following leaf
continue;
}
Bits liveDocs = ctx.reader().getLiveDocs();
BitSet liveDocsBitSet = getSparseBitSetOrNull(ctx.reader().getLiveDocs());
if (liveDocsBitSet == null) {
BulkScorer bulkScorer = weight.bulkScorer(ctx);
if (bulkScorer != null) {
try {
bulkScorer.score(leafCollector, liveDocs);
} catch (CollectionTerminatedException e) {
// collection was terminated prematurely
// continue with the following leaf
}
}
} else {
// if the role query result set is sparse then we should use the SparseFixedBitSet for advancing:
Scorer scorer = weight.scorer(ctx);
if (scorer != null) {
try {
intersectScorerAndBitSet(scorer, liveDocsBitSet, leafCollector,
checkCancelled == null ? () -> {} : checkCancelled);
} catch (CollectionTerminatedException e) {
// collection was terminated prematurely
// continue with the following leaf
}
}
}
}
}

private static BitSet getSparseBitSetOrNull(Bits liveDocs) {
if (liveDocs instanceof SparseFixedBitSet) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -49,8 +49,6 @@ public class CollectorResult implements ToXContentObject, Writeable {
public static final String REASON_SEARCH_POST_FILTER = "search_post_filter";
public static final String REASON_SEARCH_MIN_SCORE = "search_min_score";
public static final String REASON_SEARCH_MULTI = "search_multi";
public static final String REASON_SEARCH_TIMEOUT = "search_timeout";
public static final String REASON_SEARCH_CANCELLED = "search_cancelled";
public static final String REASON_AGGREGATION = "aggregation";
public static final String REASON_AGGREGATION_GLOBAL = "aggregation_global";

Expand Down

This file was deleted.

Original file line number Diff line number Diff line change
Expand Up @@ -28,16 +28,13 @@
import org.elasticsearch.common.lucene.MinimumScoreCollector;
import org.elasticsearch.common.lucene.search.FilteredCollector;
import org.elasticsearch.search.profile.query.InternalProfileCollector;
import org.elasticsearch.tasks.TaskCancelledException;

import java.io.IOException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
import java.util.function.BooleanSupplier;

import static org.elasticsearch.search.profile.query.CollectorResult.REASON_SEARCH_CANCELLED;
import static org.elasticsearch.search.profile.query.CollectorResult.REASON_SEARCH_MIN_SCORE;
import static org.elasticsearch.search.profile.query.CollectorResult.REASON_SEARCH_MULTI;
import static org.elasticsearch.search.profile.query.CollectorResult.REASON_SEARCH_POST_FILTER;
Expand Down Expand Up @@ -150,18 +147,6 @@ protected InternalProfileCollector createWithProfiler(InternalProfileCollector i
};
}

/**
* Creates a collector that throws {@link TaskCancelledException} if the search is cancelled
*/
static QueryCollectorContext createCancellableCollectorContext(BooleanSupplier cancelled) {
return new QueryCollectorContext(REASON_SEARCH_CANCELLED) {
@Override
Collector create(Collector in) throws IOException {
return new CancellableCollector(cancelled, in);
}
};
}

/**
* Creates collector limiting the collection to the first <code>numHits</code> documents
*/
Expand Down
81 changes: 59 additions & 22 deletions server/src/main/java/org/elasticsearch/search/query/QueryPhase.java
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,6 @@
import org.apache.lucene.search.ConstantScoreQuery;
import org.apache.lucene.search.DocValuesFieldExistsQuery;
import org.apache.lucene.search.FieldDoc;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.MatchAllDocsQuery;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.ScoreDoc;
Expand All @@ -43,8 +42,10 @@
import org.apache.lucene.search.TopDocs;
import org.apache.lucene.search.TopFieldDocs;
import org.apache.lucene.search.TotalHits;
import org.apache.lucene.search.Weight;
import org.elasticsearch.action.search.SearchTask;
import org.elasticsearch.common.Booleans;
import org.elasticsearch.common.CheckedConsumer;
import org.elasticsearch.common.lucene.Lucene;
import org.elasticsearch.common.lucene.search.TopDocsAndMaxScore;
import org.elasticsearch.common.util.concurrent.QueueResizingEsThreadPoolExecutor;
Expand All @@ -68,12 +69,14 @@
import org.elasticsearch.threadpool.ThreadPool;

import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.Comparator;
import java.util.LinkedList;
import java.util.List;
import java.util.concurrent.ExecutorService;
import java.util.function.Consumer;

import static org.elasticsearch.search.query.QueryCollectorContext.createCancellableCollectorContext;
import static org.elasticsearch.search.query.QueryCollectorContext.createEarlyTerminationCollectorContext;
import static org.elasticsearch.search.query.QueryCollectorContext.createFilteredCollectorContext;
import static org.elasticsearch.search.query.QueryCollectorContext.createMinScoreCollectorContext;
Expand All @@ -89,7 +92,7 @@
public class QueryPhase implements SearchPhase {
private static final Logger LOGGER = LogManager.getLogger(QueryPhase.class);
public static final boolean SYS_PROP_LONG_SORT_OPTIMIZED =
Booleans.parseBoolean(System.getProperty("es.search.long_sort_optimized", "false"));
Booleans.parseBoolean(System.getProperty("es.search.long_sort_optimized", "true"));

private final AggregationPhase aggregationPhase;
private final SuggestPhase suggestPhase;
Expand Down Expand Up @@ -124,8 +127,7 @@ public void execute(SearchContext searchContext) throws QueryPhaseExecutionExcep
// request, preProcess is called on the DFS phase phase, this is why we pre-process them
// here to make sure it happens during the QUERY phase
aggregationPhase.preProcess(searchContext);
final ContextIndexSearcher searcher = searchContext.searcher();
boolean rescore = execute(searchContext, searchContext.searcher(), searcher::setCheckCancelled);
boolean rescore = executeInternal(searchContext);

if (rescore) { // only if we do a regular search
rescorePhase.execute(searchContext);
Expand All @@ -145,9 +147,8 @@ public void execute(SearchContext searchContext) throws QueryPhaseExecutionExcep
* wire everything (mapperService, etc.)
* @return whether the rescoring phase should be executed
*/
static boolean execute(SearchContext searchContext,
final IndexSearcher searcher,
Consumer<Runnable> checkCancellationSetter) throws QueryPhaseExecutionException {
static boolean executeInternal(SearchContext searchContext) throws QueryPhaseExecutionException {
final ContextIndexSearcher searcher = searchContext.searcher();
SortAndFormats sortAndFormatsForRewrittenNumericSort = null;
final IndexReader reader = searcher.getIndexReader();
QuerySearchResult queryResult = searchContext.queryResult();
Expand Down Expand Up @@ -220,6 +221,7 @@ static boolean execute(SearchContext searchContext,
hasFilterCollector = true;
}

CheckedConsumer<List<LeafReaderContext>, IOException> leafSorter = l -> {};
// try to rewrite numeric or date sort to the optimized distanceFeatureQuery
if ((searchContext.sort() != null) && SYS_PROP_LONG_SORT_OPTIMIZED) {
Query rewrittenQuery = tryRewriteLongSort(searchContext, searcher.getIndexReader(), query, hasFilterCollector);
Expand All @@ -228,14 +230,20 @@ static boolean execute(SearchContext searchContext,
// modify sorts: add sort on _score as 1st sort, and move the sort on the original field as the 2nd sort
SortField[] oldSortFields = searchContext.sort().sort.getSort();
DocValueFormat[] oldFormats = searchContext.sort().formats;
SortField[] newSortFields = new SortField[oldSortFields.length + 1];
DocValueFormat[] newFormats = new DocValueFormat[oldSortFields.length + 1];
SortField[] newSortFields = new SortField[oldSortFields.length + 2];
DocValueFormat[] newFormats = new DocValueFormat[oldSortFields.length + 2];
newSortFields[0] = SortField.FIELD_SCORE;
newFormats[0] = DocValueFormat.RAW;
// Add a tiebreak on _doc in order to be able to search
// the leaves in any order. This is needed since we reorder
// the leaves based on the minimum value in each segment.
newSortFields[newSortFields.length-1] = SortField.FIELD_DOC;
newFormats[newSortFields.length-1] = DocValueFormat.RAW;
System.arraycopy(oldSortFields, 0, newSortFields, 1, oldSortFields.length);
System.arraycopy(oldFormats, 0, newFormats, 1, oldFormats.length);
sortAndFormatsForRewrittenNumericSort = searchContext.sort(); // stash SortAndFormats to restore it later
searchContext.sort(new SortAndFormats(new Sort(newSortFields), newFormats));
leafSorter = createLeafSorter(oldSortFields[0]);
}
}

Expand Down Expand Up @@ -279,16 +287,11 @@ static boolean execute(SearchContext searchContext,
checkCancelled = null;
}

checkCancellationSetter.accept(checkCancelled);

// add cancellable
// this only performs segment-level cancellation, which is cheap and checked regardless of
// searchContext.lowLevelCancellation()
collectors.add(createCancellableCollectorContext(searchContext.getTask()::isCancelled));
searcher.setCheckCancelled(checkCancelled);

final boolean doProfile = searchContext.getProfilers() != null;
// create the top docs collector last when the other collectors are known
final TopDocsCollectorContext topDocsFactory = createTopDocsCollectorContext(searchContext, reader, hasFilterCollector);
final TopDocsCollectorContext topDocsFactory = createTopDocsCollectorContext(searchContext, hasFilterCollector);
// add the top docs collector, the first collector context in the chain
collectors.addFirst(topDocsFactory);

Expand All @@ -302,7 +305,15 @@ static boolean execute(SearchContext searchContext,
}

try {
searcher.search(query, queryCollector);
Weight weight = searcher.createWeight(searcher.rewrite(query), queryCollector.scoreMode(), 1f);
// We search the leaves in a different order when the numeric sort optimization is
// activated. Collectors expect leaves in order when searching but this is fine in this
// case since we only have a TopFieldCollector and we force the tiebreak on _doc.
List<LeafReaderContext> leaves = new ArrayList<>(searcher.getIndexReader().leaves());
leafSorter.accept(leaves);
for (LeafReaderContext ctx : leaves) {
searcher.searchLeaf(ctx, weight, queryCollector);
}
} catch (EarlyTerminatingCollector.EarlyTerminationException e) {
queryResult.terminatedEarly(true);
} catch (TimeExceededException e) {
Expand Down Expand Up @@ -427,13 +438,39 @@ private static Query tryRewriteLongSort(SearchContext searchContext, IndexReader
return rewrittenQuery;
}

// Restore fieldsDocs to remove the first _score sort
// updating in place without creating new FieldDoc objects
/**
* Creates a sorter of {@link LeafReaderContext} that orders leaves depending on the minimum
* value and the sort order of the provided <code>sortField</code>.
*/
static CheckedConsumer<List<LeafReaderContext>, IOException> createLeafSorter(SortField sortField) {
return leaves -> {
long[] sortValues = new long[leaves.size()];
long missingValue = (long) sortField.getMissingValue();
for (LeafReaderContext ctx : leaves) {
PointValues values = ctx.reader().getPointValues(sortField.getField());
if (values == null) {
sortValues[ctx.ord] = missingValue;
} else {
byte[] sortValue = sortField.getReverse() ? values.getMaxPackedValue(): values.getMinPackedValue();
sortValues[ctx.ord] = sortValue == null ? missingValue : LongPoint.decodeDimension(sortValue, 0);
}
}
Comparator<LeafReaderContext> comparator = Comparator.comparingLong(l -> sortValues[l.ord]);
if (sortField.getReverse()) {
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I would think in the case of the reverse sort, we should look at the maxValues not minValues?

comparator = comparator.reversed();
}
Collections.sort(leaves, comparator);
};
}

/**
* Restore fieldsDocs to remove the first _score and last _doc sort.
*/
static void restoreTopFieldDocs(QuerySearchResult result, SortAndFormats originalSortAndFormats) {
TopDocs topDocs = result.topDocs().topDocs;
for (ScoreDoc scoreDoc : topDocs.scoreDocs) {
FieldDoc fieldDoc = (FieldDoc) scoreDoc;
fieldDoc.fields = Arrays.copyOfRange(fieldDoc.fields, 1, fieldDoc.fields.length);
fieldDoc.fields = Arrays.copyOfRange(fieldDoc.fields, 1, fieldDoc.fields.length-1);
}
TopFieldDocs newTopDocs = new TopFieldDocs(topDocs.totalHits, topDocs.scoreDocs, originalSortAndFormats.sort.getSort());
result.topDocs(new TopDocsAndMaxScore(newTopDocs, Float.NaN), originalSortAndFormats.formats);
Expand Down
Loading