Skip to content

Commit 47b4925

Browse files
authored
Improve resiliency to auto-formatting in server (#48450)
Make a number of changes so that code in the `server` directory is more resilient to automatic formatting. This covers: * Reformatting multiline JSON to embed whitespace in the strings * Move some comments around to they aren't auto-formatted to a strange place. This also required moving some `&&` and `||` operators from the end-of-line to start-of-line`. * Turn of formatting of data tables in `HyperLogLogPlusPlus`. This also required a checkstyle suppression. * Add helper method `reformatJson()`, to strip whitespace from a JSON document using XContent methods. This is sometimes necessary where a test is comparing some machine-generated JSON with an expected value. Also, `HyperLogLogPlusPlus.java` is now excluded from formatting because it contains large data tables that don't reformat well with the current settings, and changing the settings would be worse for the rest of the codebase.
1 parent 30dcf27 commit 47b4925

File tree

22 files changed

+148
-89
lines changed

22 files changed

+148
-89
lines changed

build.gradle

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -114,7 +114,6 @@ subprojects {
114114

115115
spotless {
116116
java {
117-
118117
removeUnusedImports()
119118
eclipse().configFile rootProject.file('.eclipseformat.xml')
120119
trimTrailingWhitespace()

buildSrc/src/main/resources/checkstyle_suppressions.xml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -21,7 +21,7 @@
2121
configuration of classes that aren't in packages. -->
2222
<suppress files="test[/\\]framework[/\\]src[/\\]test[/\\]java[/\\]Dummy.java" checks="PackageDeclaration" />
2323

24-
<!-- Intentionally has long example curl commands to coinncide with sibling Painless tests. -->
24+
<!-- Intentionally has long example curl commands to coincide with sibling Painless tests. -->
2525
<suppress files="modules[/\\]lang-painless[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]painless[/\\]ContextExampleTests.java" checks="LineLength" />
2626

2727
<!--

server/build.gradle

Lines changed: 16 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -138,6 +138,22 @@ dependencies {
138138
compileJava.options.compilerArgs << "-Xlint:-cast,-rawtypes,-unchecked"
139139
compileTestJava.options.compilerArgs << "-Xlint:-cast,-rawtypes,-unchecked"
140140

141+
// Until this project is always being formatted with spotless, we need to
142+
// guard against `spotless()` not existing.
143+
try {
144+
spotless {
145+
java {
146+
// Contains large data tables that do not format well.
147+
targetExclude 'src/main/java/org/elasticsearch/search/aggregations/metrics/HyperLogLogPlusPlus.java'
148+
}
149+
}
150+
}
151+
catch (Exception e) {
152+
if (e.getMessage().contains("Could not find method spotless") == false) {
153+
throw e;
154+
}
155+
}
156+
141157
forbiddenPatterns {
142158
exclude '**/*.json'
143159
exclude '**/*.jmx'

server/src/main/java/org/apache/lucene/queries/BinaryDocValuesRangeQuery.java

Lines changed: 15 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -145,25 +145,31 @@ public enum QueryType {
145145
INTERSECTS {
146146
@Override
147147
boolean matches(BytesRef from, BytesRef to, BytesRef otherFrom, BytesRef otherTo) {
148-
// part of the other range must touch this range
149-
// this: |---------------|
150-
// other: |------|
148+
/*
149+
* part of the other range must touch this range
150+
* this: |---------------|
151+
* other: |------|
152+
*/
151153
return from.compareTo(otherTo) <= 0 && to.compareTo(otherFrom) >= 0;
152154
}
153155
}, WITHIN {
154156
@Override
155157
boolean matches(BytesRef from, BytesRef to, BytesRef otherFrom, BytesRef otherTo) {
156-
// other range must entirely lie within this range
157-
// this: |---------------|
158-
// other: |------|
158+
/*
159+
* other range must entirely lie within this range
160+
* this: |---------------|
161+
* other: |------|
162+
*/
159163
return from.compareTo(otherFrom) <= 0 && to.compareTo(otherTo) >= 0;
160164
}
161165
}, CONTAINS {
162166
@Override
163167
boolean matches(BytesRef from, BytesRef to, BytesRef otherFrom, BytesRef otherTo) {
164-
// this and other range must overlap
165-
// this: |------|
166-
// other: |---------------|
168+
/*
169+
* this and other range must overlap
170+
* this: |------|
171+
* other: |---------------|
172+
*/
167173
return from.compareTo(otherFrom) >= 0 && to.compareTo(otherTo) <= 0;
168174
}
169175
}, CROSSES {

server/src/main/java/org/elasticsearch/action/search/FetchSearchPhase.java

Lines changed: 4 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -110,10 +110,12 @@ private void innerRun() throws IOException {
110110
} else {
111111
ScoreDoc[] scoreDocs = reducedQueryPhase.sortedTopDocs.scoreDocs;
112112
final IntArrayList[] docIdsToLoad = searchPhaseController.fillDocIdsToLoad(numShards, scoreDocs);
113-
if (scoreDocs.length == 0) { // no docs to fetch -- sidestep everything and return
113+
// no docs to fetch -- sidestep everything and return
114+
if (scoreDocs.length == 0) {
115+
// we have to release contexts here to free up resources
114116
phaseResults.stream()
115117
.map(SearchPhaseResult::queryResult)
116-
.forEach(this::releaseIrrelevantSearchContext); // we have to release contexts here to free up resources
118+
.forEach(this::releaseIrrelevantSearchContext);
117119
finishPhase.run();
118120
} else {
119121
final ScoreDoc[] lastEmittedDocPerShard = isScrollSearch ?

server/src/main/java/org/elasticsearch/cluster/routing/RoutingNodes.java

Lines changed: 4 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -117,10 +117,11 @@ public RoutingNodes(ClusterState clusterState, boolean readOnly) {
117117
assignedShardsAdd(shard);
118118
if (shard.relocating()) {
119119
relocatingShards++;
120-
entries = nodesToShards.computeIfAbsent(shard.relocatingNodeId(),
121-
k -> new LinkedHashMap<>()); // LinkedHashMap to preserve order
122-
// add the counterpart shard with relocatingNodeId reflecting the source from which
120+
// LinkedHashMap to preserve order.
121+
// Add the counterpart shard with relocatingNodeId reflecting the source from which
123122
// it's relocating from.
123+
entries = nodesToShards.computeIfAbsent(shard.relocatingNodeId(),
124+
k -> new LinkedHashMap<>());
124125
ShardRouting targetShardRouting = shard.getTargetRelocatingShard();
125126
addInitialRecovery(targetShardRouting, indexShard.primary);
126127
previousValue = entries.put(targetShardRouting.shardId(), targetShardRouting);

server/src/main/java/org/elasticsearch/common/lucene/Lucene.java

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -256,7 +256,8 @@ public static void cleanLuceneIndex(Directory directory) throws IOException {
256256
.setSoftDeletesField(Lucene.SOFT_DELETES_FIELD)
257257
.setMergePolicy(NoMergePolicy.INSTANCE) // no merges
258258
.setCommitOnClose(false) // no commits
259-
.setOpenMode(IndexWriterConfig.OpenMode.CREATE))) // force creation - don't append...
259+
.setOpenMode(IndexWriterConfig.OpenMode.CREATE) // force creation - don't append...
260+
))
260261
{
261262
// do nothing and close this will kick of IndexFileDeleter which will remove all pending files
262263
}

server/src/main/java/org/elasticsearch/index/query/MoreLikeThisQueryBuilder.java

Lines changed: 25 additions & 25 deletions
Original file line numberDiff line numberDiff line change
@@ -458,14 +458,14 @@ public boolean equals(Object o) {
458458
if (this == o) return true;
459459
if (!(o instanceof Item)) return false;
460460
Item other = (Item) o;
461-
return Objects.equals(index, other.index) &&
462-
Objects.equals(id, other.id) &&
463-
Objects.equals(doc, other.doc) &&
464-
Arrays.equals(fields, other.fields) && // otherwise we are comparing pointers
465-
Objects.equals(perFieldAnalyzer, other.perFieldAnalyzer) &&
466-
Objects.equals(routing, other.routing) &&
467-
Objects.equals(version, other.version) &&
468-
Objects.equals(versionType, other.versionType);
461+
return Objects.equals(index, other.index)
462+
&& Objects.equals(id, other.id)
463+
&& Objects.equals(doc, other.doc)
464+
&& Arrays.equals(fields, other.fields) // otherwise we are comparing pointers
465+
&& Objects.equals(perFieldAnalyzer, other.perFieldAnalyzer)
466+
&& Objects.equals(routing, other.routing)
467+
&& Objects.equals(version, other.version)
468+
&& Objects.equals(versionType, other.versionType);
469469
}
470470
}
471471

@@ -1139,23 +1139,23 @@ protected int doHashCode() {
11391139

11401140
@Override
11411141
protected boolean doEquals(MoreLikeThisQueryBuilder other) {
1142-
return Arrays.equals(fields, other.fields) &&
1143-
Arrays.equals(likeTexts, other.likeTexts) &&
1144-
Arrays.equals(unlikeTexts, other.unlikeTexts) &&
1145-
Arrays.equals(likeItems, other.likeItems) &&
1146-
Arrays.equals(unlikeItems, other.unlikeItems) &&
1147-
Objects.equals(maxQueryTerms, other.maxQueryTerms) &&
1148-
Objects.equals(minTermFreq, other.minTermFreq) &&
1149-
Objects.equals(minDocFreq, other.minDocFreq) &&
1150-
Objects.equals(maxDocFreq, other.maxDocFreq) &&
1151-
Objects.equals(minWordLength, other.minWordLength) &&
1152-
Objects.equals(maxWordLength, other.maxWordLength) &&
1153-
Arrays.equals(stopWords, other.stopWords) && // otherwise we are comparing pointers
1154-
Objects.equals(analyzer, other.analyzer) &&
1155-
Objects.equals(minimumShouldMatch, other.minimumShouldMatch) &&
1156-
Objects.equals(boostTerms, other.boostTerms) &&
1157-
Objects.equals(include, other.include) &&
1158-
Objects.equals(failOnUnsupportedField, other.failOnUnsupportedField);
1142+
return Arrays.equals(fields, other.fields)
1143+
&& Arrays.equals(likeTexts, other.likeTexts)
1144+
&& Arrays.equals(unlikeTexts, other.unlikeTexts)
1145+
&& Arrays.equals(likeItems, other.likeItems)
1146+
&& Arrays.equals(unlikeItems, other.unlikeItems)
1147+
&& Objects.equals(maxQueryTerms, other.maxQueryTerms)
1148+
&& Objects.equals(minTermFreq, other.minTermFreq)
1149+
&& Objects.equals(minDocFreq, other.minDocFreq)
1150+
&& Objects.equals(maxDocFreq, other.maxDocFreq)
1151+
&& Objects.equals(minWordLength, other.minWordLength)
1152+
&& Objects.equals(maxWordLength, other.maxWordLength)
1153+
&& Arrays.equals(stopWords, other.stopWords) // otherwise we are comparing pointers
1154+
&& Objects.equals(analyzer, other.analyzer)
1155+
&& Objects.equals(minimumShouldMatch, other.minimumShouldMatch)
1156+
&& Objects.equals(boostTerms, other.boostTerms)
1157+
&& Objects.equals(include, other.include)
1158+
&& Objects.equals(failOnUnsupportedField, other.failOnUnsupportedField);
11591159
}
11601160

11611161
@Override

server/src/main/java/org/elasticsearch/indices/cluster/IndicesClusterStateService.java

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -229,8 +229,8 @@ public synchronized void applyClusterState(final ClusterChangedEvent event) {
229229
// TODO: feels hacky, a block disables state persistence, and then we clean the allocated shards, maybe another flag in blocks?
230230
if (state.blocks().disableStatePersistence()) {
231231
for (AllocatedIndex<? extends Shard> indexService : indicesService) {
232-
indicesService.removeIndex(indexService.index(), NO_LONGER_ASSIGNED,
233-
"cleaning index (disabled block persistence)"); // also cleans shards
232+
// also cleans shards
233+
indicesService.removeIndex(indexService.index(), NO_LONGER_ASSIGNED, "cleaning index (disabled block persistence)");
234234
}
235235
return;
236236
}

server/src/main/java/org/elasticsearch/search/aggregations/metrics/HyperLogLogPlusPlus.java

Lines changed: 27 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -748,8 +748,22 @@ public static long memoryUsage(int precision) {
748748
-527.016999999993, -664.681000000099, -680.306000000099, -704.050000000047, -850.486000000034, -757.43200000003,
749749
-713.308999999892, } };
750750

751-
private static final long[] THRESHOLDS = new long[] { 10, 20, 40, 80, 220, 400, 900, 1800, 3100, 6500, 11500, 20000, 50000, 120000,
752-
350000 };
751+
private static final long[] THRESHOLDS = new long[] {
752+
10,
753+
20,
754+
40,
755+
80,
756+
220,
757+
400,
758+
900,
759+
1800,
760+
3100,
761+
6500,
762+
11500,
763+
20000,
764+
50000,
765+
120000,
766+
350000 };
753767

754768
private final BigArrays bigArrays;
755769
private final OpenBitSet algorithm;
@@ -773,15 +787,15 @@ public HyperLogLogPlusPlus(int precision, BigArrays bigArrays, long initialBucke
773787
hashSet = new Hashset(initialBucketCount);
774788
final double alpha;
775789
switch (p) {
776-
case 4:
777-
alpha = 0.673;
778-
break;
779-
case 5:
780-
alpha = 0.697;
781-
break;
782-
default:
783-
alpha = 0.7213 / (1 + 1.079 / m);
784-
break;
790+
case 4:
791+
alpha = 0.673;
792+
break;
793+
case 5:
794+
alpha = 0.697;
795+
break;
796+
default:
797+
alpha = 0.7213 / (1 + 1.079 / m);
798+
break;
785799
}
786800
alphaMM = alpha * m * m;
787801
}
@@ -1050,8 +1064,8 @@ public int hashCode(long bucket) {
10501064

10511065
public boolean equals(long bucket, HyperLogLogPlusPlus other) {
10521066
return Objects.equals(p, other.p)
1053-
&& Objects.equals(algorithm.get(bucket), other.algorithm.get(bucket))
1054-
&& Objects.equals(getComparableData(bucket), other.getComparableData(bucket));
1067+
&& Objects.equals(algorithm.get(bucket), other.algorithm.get(bucket))
1068+
&& Objects.equals(getComparableData(bucket), other.getComparableData(bucket));
10551069
}
10561070

10571071
/**

0 commit comments

Comments
 (0)