Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@

package org.elasticsearch.search.aggregations.matrix.stats;

import org.elasticsearch.common.util.Maps;
import org.elasticsearch.search.aggregations.ParsedAggregation;
import org.elasticsearch.xcontent.ObjectParser;
import org.elasticsearch.xcontent.ParseField;
Expand Down Expand Up @@ -202,14 +203,14 @@ static class ParsedMatrixStatsResult {
);

RESULT_PARSER.declareObject((ParsedMatrixStatsResult result, Map<String, Object> covars) -> {
result.covariances = new LinkedHashMap<>(covars.size());
result.covariances = Maps.newLinkedHashMapWithExpectedSize(covars.size());
for (Map.Entry<String, Object> covar : covars.entrySet()) {
result.covariances.put(covar.getKey(), mapValueAsDouble(covar.getValue()));
}
}, (p, c) -> p.mapOrdered(), new ParseField(InternalMatrixStats.Fields.COVARIANCE));

RESULT_PARSER.declareObject((ParsedMatrixStatsResult result, Map<String, Object> correls) -> {
result.correlations = new LinkedHashMap<>(correls.size());
result.correlations = Maps.newLinkedHashMapWithExpectedSize(correls.size());
for (Map.Entry<String, Object> correl : correls.entrySet()) {
result.correlations.put(correl.getKey(), mapValueAsDouble(correl.getValue()));
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@
import org.elasticsearch.cluster.routing.ShardRouting;
import org.elasticsearch.common.settings.Setting;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.util.Maps;
import org.elasticsearch.core.TimeValue;
import org.elasticsearch.index.IndexService;
import org.elasticsearch.index.IndexSettings;
Expand Down Expand Up @@ -297,7 +298,7 @@ public void testBackgroundRetentionLeaseSync() throws Exception {
.getShardOrNull(new ShardId(resolveIndex("index"), 0));
// we will add multiple retention leases and expect to see them synced to all replicas
final int length = randomIntBetween(1, 8);
final Map<String, RetentionLease> currentRetentionLeases = new LinkedHashMap<>(length);
final Map<String, RetentionLease> currentRetentionLeases = Maps.newLinkedHashMapWithExpectedSize(length);
final List<String> ids = new ArrayList<>(length);
for (int i = 0; i < length; i++) {
final String id = randomValueOtherThanMany(currentRetentionLeases.keySet()::contains, () -> randomAlphaOfLength(8));
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -9,14 +9,14 @@
package org.elasticsearch.cluster.routing;

import org.elasticsearch.cluster.node.DiscoveryNode;
import org.elasticsearch.common.util.Maps;
import org.elasticsearch.core.Nullable;
import org.elasticsearch.index.Index;
import org.elasticsearch.index.shard.ShardId;

import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.Iterator;
import java.util.LinkedHashMap;
import java.util.LinkedHashSet;
Expand Down Expand Up @@ -44,7 +44,7 @@ public class RoutingNode implements Iterable<ShardRouting> {

private final LinkedHashSet<ShardRouting> relocatingShards;

private final HashMap<Index, LinkedHashSet<ShardRouting>> shardsByIndex;
private final Map<Index, LinkedHashSet<ShardRouting>> shardsByIndex;

public RoutingNode(String nodeId, DiscoveryNode node, ShardRouting... shards) {
this(nodeId, node, buildShardRoutingMap(shards));
Expand Down Expand Up @@ -74,7 +74,7 @@ private RoutingNode(RoutingNode original) {
this.shards = new LinkedHashMap<>(original.shards);
this.relocatingShards = new LinkedHashSet<>(original.relocatingShards);
this.initializingShards = new LinkedHashSet<>(original.initializingShards);
this.shardsByIndex = new LinkedHashMap<>(original.shardsByIndex.size());
this.shardsByIndex = Maps.newLinkedHashMapWithExpectedSize(original.shardsByIndex.size());
for (Map.Entry<Index, LinkedHashSet<ShardRouting>> entry : original.shardsByIndex.entrySet()) {
shardsByIndex.put(entry.getKey(), new LinkedHashSet<>(entry.getValue()));
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -784,7 +784,7 @@ private Map<String, Object> readLinkedHashMap() throws IOException {
if (size9 == 0) {
return Collections.emptyMap();
}
Map<String, Object> map9 = new LinkedHashMap<>(size9);
Map<String, Object> map9 = Maps.newLinkedHashMapWithExpectedSize(size9);
for (int i = 0; i < size9; i++) {
map9.put(readString(), readGenericValue());
}
Expand Down
12 changes: 12 additions & 0 deletions server/src/main/java/org/elasticsearch/common/util/Maps.java
Original file line number Diff line number Diff line change
Expand Up @@ -255,6 +255,18 @@ public static <K, V> Map<K, V> newMapWithExpectedSize(int expectedSize) {
return new HashMap<>(capacity(expectedSize));
}

/**
* Returns a linked hash map with a capacity sufficient to keep expectedSize elements without being resized.
*
* @param expectedSize the expected amount of elements in the map
* @param <K> the key type
* @param <V> the value type
* @return a new pre-sized {@link LinkedHashMap}
*/
public static <K, V> LinkedHashMap<K, V> newLinkedHashMapWithExpectedSize(int expectedSize) {
return new LinkedHashMap<>(capacity(expectedSize));
}

static int capacity(int expectedSize) {
assert expectedSize >= 0;
return expectedSize < 2 ? expectedSize + 1 : (int) (expectedSize / 0.75 + 1.0);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -9,12 +9,12 @@
package org.elasticsearch.search.fetch.subphase.highlight;

import org.apache.lucene.search.Query;
import org.elasticsearch.common.util.Maps;
import org.elasticsearch.search.fetch.subphase.highlight.HighlightBuilder.BoundaryScannerType;

import java.util.Arrays;
import java.util.Collection;
import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.Locale;
import java.util.Map;
import java.util.Set;
Expand All @@ -30,7 +30,7 @@ public SearchHighlightContext(Collection<Field> fields) {

public SearchHighlightContext(Collection<Field> fields, boolean globalForceSource) {
assert fields != null;
this.fields = new LinkedHashMap<>(fields.size());
this.fields = Maps.newLinkedHashMapWithExpectedSize(fields.size());
for (Field field : fields) {
this.fields.put(field.field, field);
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -9,14 +9,14 @@

import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.util.BytesRef;
import org.elasticsearch.common.util.Maps;
import org.elasticsearch.index.query.SearchExecutionContext;

import java.util.LinkedHashMap;
import java.util.Map;

public class SuggestionSearchContext {

private final Map<String, SuggestionContext> suggestions = new LinkedHashMap<>(4);
private final Map<String, SuggestionContext> suggestions = Maps.newLinkedHashMapWithExpectedSize(4);

public void addSuggestion(String name, SuggestionContext suggestion) {
suggestions.put(name, suggestion);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,7 @@
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.lucene.Lucene;
import org.elasticsearch.common.text.Text;
import org.elasticsearch.common.util.Maps;
import org.elasticsearch.search.SearchHit;
import org.elasticsearch.search.suggest.Suggest;
import org.elasticsearch.search.suggest.Suggest.Suggestion;
Expand All @@ -27,7 +28,6 @@
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Objects;
Expand Down Expand Up @@ -274,7 +274,7 @@ public Option(StreamInput in) throws IOException {
this.hit = new SearchHit(in);
}
int contextSize = in.readInt();
this.contexts = new LinkedHashMap<>(contextSize);
this.contexts = Maps.newLinkedHashMapWithExpectedSize(contextSize);
for (int i = 0; i < contextSize; i++) {
String contextName = in.readString();
int nContexts = in.readVInt();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,7 @@
import org.elasticsearch.cluster.metadata.IndexMetadata;
import org.elasticsearch.common.CheckedBiConsumer;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.util.Maps;
import org.elasticsearch.index.IndexSettings;
import org.elasticsearch.index.mapper.BooleanFieldMapper;
import org.elasticsearch.index.mapper.DateFieldMapper;
Expand Down Expand Up @@ -57,7 +58,6 @@
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Locale;
import java.util.Map;
Expand Down Expand Up @@ -969,7 +969,7 @@ private void indexSampleData(List<ZonedDateTime> dataset, RandomIndexWriter inde
}

private Map<String, Integer> bucketCountsAsMap(InternalAutoDateHistogram result) {
LinkedHashMap<String, Integer> map = new LinkedHashMap<>(result.getBuckets().size());
Map<String, Integer> map = Maps.newLinkedHashMapWithExpectedSize(result.getBuckets().size());
result.getBuckets().stream().forEach(b -> {
Object old = map.put(b.getKeyAsString(), Math.toIntExact(b.getDocCount()));
assertNull(old);
Expand All @@ -978,7 +978,7 @@ private Map<String, Integer> bucketCountsAsMap(InternalAutoDateHistogram result)
}

private Map<String, Double> maxAsMap(InternalAutoDateHistogram result) {
LinkedHashMap<String, Double> map = new LinkedHashMap<>(result.getBuckets().size());
Map<String, Double> map = Maps.newLinkedHashMapWithExpectedSize(result.getBuckets().size());
result.getBuckets().stream().forEach(b -> {
InternalMax max = b.getAggregations().get("max");
Object old = map.put(b.getKeyAsString(), max.getValue());
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@
package org.elasticsearch.xpack.eql.stats;

import org.elasticsearch.common.metrics.CounterMetric;
import org.elasticsearch.common.util.Maps;
import org.elasticsearch.xpack.core.watcher.common.stats.Counters;

import java.util.Collections;
Expand Down Expand Up @@ -40,7 +41,7 @@ public String toString() {
public Metrics() {
Map<QueryMetric, Map<OperationType, CounterMetric>> qMap = new LinkedHashMap<>();
for (QueryMetric metric : QueryMetric.values()) {
Map<OperationType, CounterMetric> metricsMap = new LinkedHashMap<>(OperationType.values().length);
Map<OperationType, CounterMetric> metricsMap = Maps.newLinkedHashMapWithExpectedSize(OperationType.values().length);
for (OperationType type : OperationType.values()) {
metricsMap.put(type, new CounterMetric());
}
Expand All @@ -49,7 +50,7 @@ public Metrics() {
}
opsByTypeMetrics = Collections.unmodifiableMap(qMap);

Map<FeatureMetric, CounterMetric> fMap = new LinkedHashMap<>(FeatureMetric.values().length);
Map<FeatureMetric, CounterMetric> fMap = Maps.newLinkedHashMapWithExpectedSize(FeatureMetric.values().length);
for (FeatureMetric featureMetric : FeatureMetric.values()) {
fMap.put(featureMetric, new CounterMetric());
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,7 @@
import org.elasticsearch.common.document.DocumentField;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.metrics.CounterMetric;
import org.elasticsearch.common.util.Maps;
import org.elasticsearch.common.util.set.Sets;
import org.elasticsearch.core.Tuple;
import org.elasticsearch.index.query.QueryBuilder;
Expand Down Expand Up @@ -321,8 +322,8 @@ static IngestStats ingestStatsForPipelineIds(NodeStats nodeStats, Set<String> pi

private static IngestStats mergeStats(List<IngestStats> ingestStatsList) {

Map<String, IngestStatsAccumulator> pipelineStatsAcc = new LinkedHashMap<>(ingestStatsList.size());
Map<String, Map<String, IngestStatsAccumulator>> processorStatsAcc = new LinkedHashMap<>(ingestStatsList.size());
Map<String, IngestStatsAccumulator> pipelineStatsAcc = Maps.newLinkedHashMapWithExpectedSize(ingestStatsList.size());
Map<String, Map<String, IngestStatsAccumulator>> processorStatsAcc = Maps.newLinkedHashMapWithExpectedSize(ingestStatsList.size());
IngestStatsAccumulator totalStats = new IngestStatsAccumulator();
ingestStatsList.forEach(ingestStats -> {

Expand Down Expand Up @@ -350,7 +351,7 @@ private static IngestStats mergeStats(List<IngestStats> ingestStatsList) {
(pipelineId, accumulator) -> pipelineStatList.add(new IngestStats.PipelineStat(pipelineId, accumulator.build()))
);

Map<String, List<IngestStats.ProcessorStat>> processorStatList = new LinkedHashMap<>(processorStatsAcc.size());
Map<String, List<IngestStats.ProcessorStat>> processorStatList = Maps.newLinkedHashMapWithExpectedSize(processorStatsAcc.size());
processorStatsAcc.forEach((pipelineId, accumulatorMap) -> {
List<IngestStats.ProcessorStat> processorStats = new ArrayList<>(accumulatorMap.size());
accumulatorMap.forEach(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@
*/
package org.elasticsearch.xpack.ql.expression.gen.script;

import org.elasticsearch.common.util.Maps;
import org.elasticsearch.xpack.ql.QlIllegalArgumentException;

import java.util.ArrayList;
Expand Down Expand Up @@ -61,7 +62,7 @@ List<String> asCodeNames() {
// return only the vars (as parameter for a script)
// agg refs are returned separately to be provided as bucket_paths
Map<String, Object> asParams() {
Map<String, Object> map = new LinkedHashMap<>(params.size());
Map<String, Object> map = Maps.newLinkedHashMapWithExpectedSize(params.size());

int count = 0;

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@
package org.elasticsearch.xpack.ql.expression.predicate.fulltext;

import org.elasticsearch.common.Strings;
import org.elasticsearch.common.util.Maps;
import org.elasticsearch.xpack.ql.ParsingException;
import org.elasticsearch.xpack.ql.expression.predicate.fulltext.FullTextPredicate.Operator;
import org.elasticsearch.xpack.ql.tree.Source;
Expand All @@ -27,7 +28,7 @@ static Map<String, String> parseSettings(String options, Source source) {
return emptyMap();
}
String[] list = Strings.delimitedListToStringArray(options, DELIMITER);
Map<String, String> op = new LinkedHashMap<>(list.length);
Map<String, String> op = Maps.newLinkedHashMapWithExpectedSize(list.length);

for (String entry : list) {
String[] split = splitInTwo(entry, "=");
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,7 @@
import org.elasticsearch.cluster.metadata.AliasMetadata;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.collect.ImmutableOpenMap;
import org.elasticsearch.common.util.Maps;
import org.elasticsearch.core.Tuple;
import org.elasticsearch.index.IndexNotFoundException;
import org.elasticsearch.transport.NoSuchRemoteClusterException;
Expand Down Expand Up @@ -665,7 +666,7 @@ private static List<EsIndex> buildIndices(

List<String> resolvedIndices = new ArrayList<>(asList(fieldCapsResponse.getIndices()));
int mapSize = CollectionUtils.mapSize(resolvedIndices.size() + resolvedAliases.size());
Map<String, Fields> indices = new LinkedHashMap<>(mapSize);
Map<String, Fields> indices = Maps.newLinkedHashMapWithExpectedSize(mapSize);
Pattern pattern = javaRegex != null ? Pattern.compile(javaRegex) : null;

// sort fields in reverse order to build the field hierarchy
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@
import org.apache.lucene.store.Directory;
import org.elasticsearch.action.index.IndexRequest;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.util.Maps;
import org.elasticsearch.index.mapper.DateFieldMapper;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.NumberFieldMapper;
Expand Down Expand Up @@ -313,7 +314,7 @@ public void testKeyOrdering() {
List<CompositeAggregation.Bucket> foos = new ArrayList<>();

CompositeAggregation.Bucket bucket = mock(CompositeAggregation.Bucket.class);
LinkedHashMap<String, Object> keys = new LinkedHashMap<>(3);
LinkedHashMap<String, Object> keys = Maps.newLinkedHashMapWithExpectedSize(3);
keys.put("foo.date_histogram", 123L);
keys.put("bar.terms", "baz");
keys.put("abc.histogram", 1.9);
Expand Down Expand Up @@ -361,7 +362,7 @@ public void testKeyOrderingLong() {
List<CompositeAggregation.Bucket> foos = new ArrayList<>();

CompositeAggregation.Bucket bucket = mock(CompositeAggregation.Bucket.class);
LinkedHashMap<String, Object> keys = new LinkedHashMap<>(3);
LinkedHashMap<String, Object> keys = Maps.newLinkedHashMapWithExpectedSize(3);
keys.put("foo.date_histogram", 123L);

char[] charArray = new char[IndexWriter.MAX_TERM_LENGTH];
Expand Down Expand Up @@ -409,7 +410,7 @@ public void testNullKeys() {
List<CompositeAggregation.Bucket> foos = new ArrayList<>();

CompositeAggregation.Bucket bucket = mock(CompositeAggregation.Bucket.class);
LinkedHashMap<String, Object> keys = new LinkedHashMap<>(3);
Map<String, Object> keys = Maps.newLinkedHashMapWithExpectedSize(3);
keys.put("bar.terms", null);
keys.put("abc.histogram", null);
when(bucket.getKey()).thenReturn(keys);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,7 @@
import org.elasticsearch.common.settings.KeyStoreWrapper;
import org.elasticsearch.common.settings.SecureString;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.util.Maps;
import org.elasticsearch.core.Booleans;
import org.elasticsearch.core.CheckedFunction;
import org.elasticsearch.env.Environment;
Expand Down Expand Up @@ -50,7 +51,6 @@
import java.net.URL;
import java.nio.charset.StandardCharsets;
import java.security.SecureRandom;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Objects;
Expand Down Expand Up @@ -586,7 +586,7 @@ void changePasswords(
CheckedBiConsumer<String, SecureString, Exception> successCallback,
Terminal terminal
) throws Exception {
Map<String, SecureString> passwordsMap = new LinkedHashMap<>(USERS.size());
Map<String, SecureString> passwordsMap = Maps.newLinkedHashMapWithExpectedSize(USERS.size());
try {
for (String user : USERS) {
if (USERS_WITH_SHARED_PASSWORDS.containsValue(user)) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@
import org.antlr.v4.runtime.ParserRuleContext;
import org.antlr.v4.runtime.Token;
import org.antlr.v4.runtime.tree.TerminalNode;
import org.elasticsearch.common.util.Maps;
import org.elasticsearch.xpack.ql.expression.Alias;
import org.elasticsearch.xpack.ql.expression.Expression;
import org.elasticsearch.xpack.ql.expression.Literal;
Expand Down Expand Up @@ -58,7 +59,6 @@

import java.time.ZoneId;
import java.util.ArrayList;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;

Expand All @@ -79,7 +79,7 @@ public LogicalPlan visitQuery(QueryContext ctx) {
List<SubQueryAlias> namedQueries = visitList(this, ctx.namedQuery(), SubQueryAlias.class);

// unwrap query (and validate while at it)
Map<String, SubQueryAlias> cteRelations = new LinkedHashMap<>(namedQueries.size());
Map<String, SubQueryAlias> cteRelations = Maps.newLinkedHashMapWithExpectedSize(namedQueries.size());
for (SubQueryAlias namedQuery : namedQueries) {
if (cteRelations.put(namedQuery.alias(), namedQuery) != null) {
throw new ParsingException(namedQuery.source(), "Duplicate alias {}", namedQuery.alias());
Expand Down
Loading