diff --git a/buildSrc/src/main/resources/checkstyle_suppressions.xml b/buildSrc/src/main/resources/checkstyle_suppressions.xml
index df90fe70497f5..47908baa2cf66 100644
--- a/buildSrc/src/main/resources/checkstyle_suppressions.xml
+++ b/buildSrc/src/main/resources/checkstyle_suppressions.xml
@@ -216,29 +216,6 @@
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
@@ -480,17 +457,6 @@
-
-
-
-
-
-
-
-
-
-
-
diff --git a/server/src/main/java/org/elasticsearch/common/Numbers.java b/server/src/main/java/org/elasticsearch/common/Numbers.java
index 2c4d700c92ce3..7561175f3fe35 100644
--- a/server/src/main/java/org/elasticsearch/common/Numbers.java
+++ b/server/src/main/java/org/elasticsearch/common/Numbers.java
@@ -61,7 +61,8 @@ public static int bytesToInt(byte[] arr) {
}
public static int bytesToInt(BytesRef bytes) {
- return (bytes.bytes[bytes.offset] << 24) | ((bytes.bytes[bytes.offset + 1] & 0xff) << 16) | ((bytes.bytes[bytes.offset + 2] & 0xff) << 8) | (bytes.bytes[bytes.offset + 3] & 0xff);
+ return (bytes.bytes[bytes.offset] << 24) | ((bytes.bytes[bytes.offset + 1] & 0xff) << 16) |
+ ((bytes.bytes[bytes.offset + 2] & 0xff) << 8) | (bytes.bytes[bytes.offset + 3] & 0xff);
}
/**
@@ -77,8 +78,10 @@ public static long bytesToLong(byte[] arr) {
}
public static long bytesToLong(BytesRef bytes) {
- int high = (bytes.bytes[bytes.offset + 0] << 24) | ((bytes.bytes[bytes.offset + 1] & 0xff) << 16) | ((bytes.bytes[bytes.offset + 2] & 0xff) << 8) | (bytes.bytes[bytes.offset + 3] & 0xff);
- int low = (bytes.bytes[bytes.offset + 4] << 24) | ((bytes.bytes[bytes.offset + 5] & 0xff) << 16) | ((bytes.bytes[bytes.offset + 6] & 0xff) << 8) | (bytes.bytes[bytes.offset + 7] & 0xff);
+ int high = (bytes.bytes[bytes.offset + 0] << 24) | ((bytes.bytes[bytes.offset + 1] & 0xff) << 16) |
+ ((bytes.bytes[bytes.offset + 2] & 0xff) << 8) | (bytes.bytes[bytes.offset + 3] & 0xff);
+ int low = (bytes.bytes[bytes.offset + 4] << 24) | ((bytes.bytes[bytes.offset + 5] & 0xff) << 16) |
+ ((bytes.bytes[bytes.offset + 6] & 0xff) << 8) | (bytes.bytes[bytes.offset + 7] & 0xff);
return (((long) high) << 32) | (low & 0x0ffffffffL);
}
diff --git a/server/src/main/java/org/elasticsearch/common/blobstore/fs/FsBlobStore.java b/server/src/main/java/org/elasticsearch/common/blobstore/fs/FsBlobStore.java
index 29f3b2f7e15fa..c49143edb446e 100644
--- a/server/src/main/java/org/elasticsearch/common/blobstore/fs/FsBlobStore.java
+++ b/server/src/main/java/org/elasticsearch/common/blobstore/fs/FsBlobStore.java
@@ -48,7 +48,8 @@ public FsBlobStore(Settings settings, Path path) throws IOException {
if (!this.readOnly) {
Files.createDirectories(path);
}
- this.bufferSizeInBytes = (int) settings.getAsBytesSize("repositories.fs.buffer_size", new ByteSizeValue(100, ByteSizeUnit.KB)).getBytes();
+ this.bufferSizeInBytes = (int) settings.getAsBytesSize("repositories.fs.buffer_size",
+ new ByteSizeValue(100, ByteSizeUnit.KB)).getBytes();
}
@Override
diff --git a/server/src/main/java/org/elasticsearch/common/bytes/BytesArray.java b/server/src/main/java/org/elasticsearch/common/bytes/BytesArray.java
index 9b78c2fe5a788..de21acc487df5 100644
--- a/server/src/main/java/org/elasticsearch/common/bytes/BytesArray.java
+++ b/server/src/main/java/org/elasticsearch/common/bytes/BytesArray.java
@@ -68,7 +68,8 @@ public int length() {
@Override
public BytesReference slice(int from, int length) {
if (from < 0 || (from + length) > this.length) {
- throw new IllegalArgumentException("can't slice a buffer with length [" + this.length + "], with slice parameters from [" + from + "], length [" + length + "]");
+ throw new IllegalArgumentException("can't slice a buffer with length [" + this.length +
+ "], with slice parameters from [" + from + "], length [" + length + "]");
}
return new BytesArray(bytes, offset + from, length);
}
diff --git a/server/src/main/java/org/elasticsearch/common/bytes/PagedBytesReference.java b/server/src/main/java/org/elasticsearch/common/bytes/PagedBytesReference.java
index b336acfba2008..f6dcdfccca01a 100644
--- a/server/src/main/java/org/elasticsearch/common/bytes/PagedBytesReference.java
+++ b/server/src/main/java/org/elasticsearch/common/bytes/PagedBytesReference.java
@@ -63,7 +63,8 @@ public int length() {
@Override
public BytesReference slice(int from, int length) {
if (from < 0 || (from + length) > length()) {
- throw new IllegalArgumentException("can't slice a buffer with length [" + length() + "], with slice parameters from [" + from + "], length [" + length + "]");
+ throw new IllegalArgumentException("can't slice a buffer with length [" + length() +
+ "], with slice parameters from [" + from + "], length [" + length + "]");
}
return new PagedBytesReference(bigarrays, byteArray, offset + from, length);
}
diff --git a/server/src/main/java/org/elasticsearch/common/cache/Cache.java b/server/src/main/java/org/elasticsearch/common/cache/Cache.java
index beb2819f2e6dc..67061a1533475 100644
--- a/server/src/main/java/org/elasticsearch/common/cache/Cache.java
+++ b/server/src/main/java/org/elasticsearch/common/cache/Cache.java
@@ -485,7 +485,8 @@ private void put(K key, V value, long now) {
promote(tuple.v1(), now);
}
if (replaced) {
- removalListener.onRemoval(new RemovalNotification<>(tuple.v2().key, tuple.v2().value, RemovalNotification.RemovalReason.REPLACED));
+ removalListener.onRemoval(new RemovalNotification<>(tuple.v2().key, tuple.v2().value,
+ RemovalNotification.RemovalReason.REPLACED));
}
}
diff --git a/server/src/main/java/org/elasticsearch/common/collect/ImmutableOpenIntMap.java b/server/src/main/java/org/elasticsearch/common/collect/ImmutableOpenIntMap.java
index 43e3552909b36..cb4457ce24b9b 100644
--- a/server/src/main/java/org/elasticsearch/common/collect/ImmutableOpenIntMap.java
+++ b/server/src/main/java/org/elasticsearch/common/collect/ImmutableOpenIntMap.java
@@ -39,8 +39,8 @@
/**
* An immutable map implementation based on open hash map.
*
- * Can be constructed using a {@link #builder()}, or using {@link #builder(org.elasticsearch.common.collect.ImmutableOpenIntMap)} (which is an optimized
- * option to copy over existing content and modify it).
+ * Can be constructed using a {@link #builder()}, or using {@link #builder(org.elasticsearch.common.collect.ImmutableOpenIntMap)}
+ * (which is an optimized option to copy over existing content and modify it).
*/
public final class ImmutableOpenIntMap implements Iterable> {
diff --git a/server/src/main/java/org/elasticsearch/common/inject/DefaultConstructionProxyFactory.java b/server/src/main/java/org/elasticsearch/common/inject/DefaultConstructionProxyFactory.java
index 49ada56cefa6b..36c55d0cb932a 100644
--- a/server/src/main/java/org/elasticsearch/common/inject/DefaultConstructionProxyFactory.java
+++ b/server/src/main/java/org/elasticsearch/common/inject/DefaultConstructionProxyFactory.java
@@ -50,7 +50,8 @@ public T newInstance(Object... arguments) throws InvocationTargetException {
} catch (InstantiationException e) {
throw new AssertionError(e); // shouldn't happen, we know this is a concrete type
} catch (IllegalAccessException e) {
- throw new AssertionError("Wrong access modifiers on " + constructor, e); // a security manager is blocking us, we're hosed
+ // a security manager is blocking us, we're hosed
+ throw new AssertionError("Wrong access modifiers on " + constructor, e);
}
}
diff --git a/server/src/main/java/org/elasticsearch/common/inject/internal/ConstructionContext.java b/server/src/main/java/org/elasticsearch/common/inject/internal/ConstructionContext.java
index 34c9faf77e770..0813f1f51b34d 100644
--- a/server/src/main/java/org/elasticsearch/common/inject/internal/ConstructionContext.java
+++ b/server/src/main/java/org/elasticsearch/common/inject/internal/ConstructionContext.java
@@ -79,7 +79,8 @@ public Object createProxy(Errors errors, Class> expectedType) throws ErrorsExc
// ES: Replace, since we don't use bytecode gen, just get the type class loader, or system if its null
//ClassLoader classLoader = BytecodeGen.getClassLoader(expectedType);
- ClassLoader classLoader = expectedType.getClassLoader() == null ? ClassLoader.getSystemClassLoader() : expectedType.getClassLoader();
+ ClassLoader classLoader = expectedType.getClassLoader() == null ?
+ ClassLoader.getSystemClassLoader() : expectedType.getClassLoader();
return expectedType.cast(Proxy.newProxyInstance(classLoader,
new Class[]{expectedType}, invocationHandler));
}
diff --git a/server/src/main/java/org/elasticsearch/common/inject/multibindings/MapBinder.java b/server/src/main/java/org/elasticsearch/common/inject/multibindings/MapBinder.java
index a9a1bb173b797..a0a22d96f58d5 100644
--- a/server/src/main/java/org/elasticsearch/common/inject/multibindings/MapBinder.java
+++ b/server/src/main/java/org/elasticsearch/common/inject/multibindings/MapBinder.java
@@ -275,7 +275,8 @@ public static class MapBinderProviderWithDependencies implements ProviderWi
private final Provider>>> provider;
@SuppressWarnings("rawtypes") // code is silly stupid with generics
- MapBinderProviderWithDependencies(RealMapBinder binder, Set> dependencies, Provider>>> provider) {
+ MapBinderProviderWithDependencies(RealMapBinder binder, Set> dependencies,
+ Provider>>> provider) {
this.binder = binder;
this.dependencies = dependencies;
this.provider = provider;
@@ -315,7 +316,8 @@ public void configure(Binder binder) {
// binds a Map> from a collection of Map>
final Provider>>> entrySetProvider = binder
.getProvider(entrySetBinder.getSetKey());
- binder.bind(providerMapKey).toProvider(new MapBinderProviderWithDependencies(RealMapBinder.this, dependencies, entrySetProvider));
+ binder.bind(providerMapKey)
+ .toProvider(new MapBinderProviderWithDependencies(RealMapBinder.this, dependencies, entrySetProvider));
final Provider>> mapProvider = binder.getProvider(providerMapKey);
binder.bind(mapKey).toProvider(new ProviderWithDependencies>() {
diff --git a/server/src/main/java/org/elasticsearch/common/inject/spi/InjectionPoint.java b/server/src/main/java/org/elasticsearch/common/inject/spi/InjectionPoint.java
index 333938843c13e..07ef3162300d1 100644
--- a/server/src/main/java/org/elasticsearch/common/inject/spi/InjectionPoint.java
+++ b/server/src/main/java/org/elasticsearch/common/inject/spi/InjectionPoint.java
@@ -345,7 +345,8 @@ private static void checkForMisplacedBindingAnnotations(Member member, Errors er
}
private static void addInjectionPoints(TypeLiteral> type,
- Factory factory, boolean statics, Collection injectionPoints,
+ Factory factory, boolean statics,
+ Collection injectionPoints,
Errors errors) {
if (type.getType() == Object.class) {
return;
diff --git a/server/src/main/java/org/elasticsearch/common/io/Channels.java b/server/src/main/java/org/elasticsearch/common/io/Channels.java
index cb8ac062fbcb2..1d76be43ca981 100644
--- a/server/src/main/java/org/elasticsearch/common/io/Channels.java
+++ b/server/src/main/java/org/elasticsearch/common/io/Channels.java
@@ -62,7 +62,8 @@ public static byte[] readFromFileChannel(FileChannel channel, long position, int
* @param destOffset offset in dest to read into
* @param length number of bytes to read
*/
- public static void readFromFileChannelWithEofException(FileChannel channel, long channelPosition, byte[] dest, int destOffset, int length) throws IOException {
+ public static void readFromFileChannelWithEofException(FileChannel channel, long channelPosition,
+ byte[] dest, int destOffset, int length) throws IOException {
int read = readFromFileChannel(channel, channelPosition, dest, destOffset, length);
if (read < 0) {
throw new EOFException("read past EOF. pos [" + channelPosition + "] length: [" + length + "] end: [" + channel.size() + "]");
@@ -80,7 +81,8 @@ public static void readFromFileChannelWithEofException(FileChannel channel, long
* @return total bytes read or -1 if an attempt was made to read past EOF. The method always tries to read all the bytes
* that will fit in the destination byte buffer.
*/
- public static int readFromFileChannel(FileChannel channel, long channelPosition, byte[] dest, int destOffset, int length) throws IOException {
+ public static int readFromFileChannel(FileChannel channel, long channelPosition, byte[] dest,
+ int destOffset, int length) throws IOException {
ByteBuffer buffer = ByteBuffer.wrap(dest, destOffset, length);
return readFromFileChannel(channel, channelPosition, buffer);
}
@@ -97,7 +99,8 @@ public static int readFromFileChannel(FileChannel channel, long channelPosition,
public static void readFromFileChannelWithEofException(FileChannel channel, long channelPosition, ByteBuffer dest) throws IOException {
int read = readFromFileChannel(channel, channelPosition, dest);
if (read < 0) {
- throw new EOFException("read past EOF. pos [" + channelPosition + "] length: [" + dest.limit() + "] end: [" + channel.size() + "]");
+ throw new EOFException("read past EOF. pos [" + channelPosition +
+ "] length: [" + dest.limit() + "] end: [" + channel.size() + "]");
}
}
@@ -135,7 +138,8 @@ public static int readFromFileChannel(FileChannel channel, long channelPosition,
dest.position(tmpBuffer.position());
}
- assert bytesRead == bytesToRead : "failed to read an entire buffer but also didn't get an EOF (read [" + bytesRead + "] needed [" + bytesToRead + "]";
+ assert bytesRead == bytesToRead : "failed to read an entire buffer but also didn't get an EOF (read [" +
+ bytesRead + "] needed [" + bytesToRead + "]";
return bytesRead;
}
}
@@ -149,7 +153,8 @@ private static int readSingleChunk(FileChannel channel, long channelPosition, By
return read;
}
- assert read > 0 : "FileChannel.read with non zero-length bb.remaining() must always read at least one byte (FileChannel is in blocking mode, see spec of ReadableByteChannel)";
+ assert read > 0 : "FileChannel.read with non zero-length bb.remaining() must always read at least one byte " +
+ "(FileChannel is in blocking mode, see spec of ReadableByteChannel)";
bytesRead += read;
channelPosition += read;
diff --git a/server/src/main/java/org/elasticsearch/common/joda/Joda.java b/server/src/main/java/org/elasticsearch/common/joda/Joda.java
index 35ae6e2341f8d..9b3e5974fb6ca 100644
--- a/server/src/main/java/org/elasticsearch/common/joda/Joda.java
+++ b/server/src/main/java/org/elasticsearch/common/joda/Joda.java
@@ -154,9 +154,11 @@ public static FormatDateTimeFormatter forPattern(String input, Locale locale) {
} else if ("yearMonthDay".equals(input) || "year_month_day".equals(input)) {
formatter = ISODateTimeFormat.yearMonthDay();
} else if ("epoch_second".equals(input)) {
- formatter = new DateTimeFormatterBuilder().append(new EpochTimePrinter(false), new EpochTimeParser(false)).toFormatter();
+ formatter = new DateTimeFormatterBuilder().append(new EpochTimePrinter(false),
+ new EpochTimeParser(false)).toFormatter();
} else if ("epoch_millis".equals(input)) {
- formatter = new DateTimeFormatterBuilder().append(new EpochTimePrinter(true), new EpochTimeParser(true)).toFormatter();
+ formatter = new DateTimeFormatterBuilder().append(new EpochTimePrinter(true),
+ new EpochTimeParser(true)).toFormatter();
// strict date formats here, must be at least 4 digits for year and two for months and two for day
} else if ("strictBasicWeekDate".equals(input) || "strict_basic_week_date".equals(input)) {
formatter = StrictISODateTimeFormat.basicWeekDate();
@@ -245,7 +247,8 @@ public static FormatDateTimeFormatter forPattern(String input, Locale locale) {
parsers[i] = currentParser.getParser();
}
- DateTimeFormatterBuilder builder = new DateTimeFormatterBuilder().append(dateTimeFormatter.withZone(DateTimeZone.UTC).getPrinter(), parsers);
+ DateTimeFormatterBuilder builder = new DateTimeFormatterBuilder()
+ .append(dateTimeFormatter.withZone(DateTimeZone.UTC).getPrinter(), parsers);
formatter = builder.toFormatter();
}
} else {
@@ -286,9 +289,11 @@ public static FormatDateTimeFormatter getStrictStandardDateFormatter() {
.toFormatter()
.withZoneUTC();
- DateTimeFormatterBuilder builder = new DateTimeFormatterBuilder().append(longFormatter.withZone(DateTimeZone.UTC).getPrinter(), new DateTimeParser[]{longFormatter.getParser(), shortFormatter.getParser(), new EpochTimeParser(true)});
+ DateTimeFormatterBuilder builder = new DateTimeFormatterBuilder().append(longFormatter.withZone(DateTimeZone.UTC).getPrinter(),
+ new DateTimeParser[]{longFormatter.getParser(), shortFormatter.getParser(), new EpochTimeParser(true)});
- return new FormatDateTimeFormatter("yyyy/MM/dd HH:mm:ss||yyyy/MM/dd||epoch_millis", builder.toFormatter().withZone(DateTimeZone.UTC), Locale.ROOT);
+ return new FormatDateTimeFormatter("yyyy/MM/dd HH:mm:ss||yyyy/MM/dd||epoch_millis",
+ builder.toFormatter().withZone(DateTimeZone.UTC), Locale.ROOT);
}
@@ -312,7 +317,8 @@ public DurationFieldType getRangeDurationType() {
@Override
public DateTimeField getField(Chronology chronology) {
- return new OffsetDateTimeField(new DividedDateTimeField(new OffsetDateTimeField(chronology.monthOfYear(), -1), QuarterOfYear, 3), 1);
+ return new OffsetDateTimeField(
+ new DividedDateTimeField(new OffsetDateTimeField(chronology.monthOfYear(), -1), QuarterOfYear, 3), 1);
}
};
@@ -393,7 +399,8 @@ public void printTo(StringBuffer buf, long instant, Chronology chrono, int displ
* {@link DateTimeFormatter#printTo(Appendable, long, Chronology)} when using a time zone.
*/
@Override
- public void printTo(Writer out, long instant, Chronology chrono, int displayOffset, DateTimeZone displayZone, Locale locale) throws IOException {
+ public void printTo(Writer out, long instant, Chronology chrono, int displayOffset,
+ DateTimeZone displayZone, Locale locale) throws IOException {
if (hasMilliSecondPrecision) {
out.write(String.valueOf(instant - displayOffset));
} else {
@@ -427,7 +434,8 @@ private long getDateTimeMillis(ReadablePartial partial) {
int minuteOfHour = partial.get(DateTimeFieldType.minuteOfHour());
int secondOfMinute = partial.get(DateTimeFieldType.secondOfMinute());
int millisOfSecond = partial.get(DateTimeFieldType.millisOfSecond());
- return partial.getChronology().getDateTimeMillis(year, monthOfYear, dayOfMonth, hourOfDay, minuteOfHour, secondOfMinute, millisOfSecond);
+ return partial.getChronology().getDateTimeMillis(year, monthOfYear, dayOfMonth,
+ hourOfDay, minuteOfHour, secondOfMinute, millisOfSecond);
}
}
}
diff --git a/server/src/main/java/org/elasticsearch/common/lucene/search/XMoreLikeThis.java b/server/src/main/java/org/elasticsearch/common/lucene/search/XMoreLikeThis.java
index f931ee2dc31a7..1920db12117d4 100644
--- a/server/src/main/java/org/elasticsearch/common/lucene/search/XMoreLikeThis.java
+++ b/server/src/main/java/org/elasticsearch/common/lucene/search/XMoreLikeThis.java
@@ -157,7 +157,8 @@
public final class XMoreLikeThis {
// static {
-// assert Version.CURRENT.luceneVersion == org.apache.lucene.util.Version.LUCENE_4_9: "Remove this class once we upgrade to Lucene 5.0";
+// assert Version.CURRENT.luceneVersion == org.apache.lucene.util.Version.LUCENE_4_9:
+// "Remove this class once we upgrade to Lucene 5.0";
// }
/**
diff --git a/server/src/main/java/org/elasticsearch/common/network/Cidrs.java b/server/src/main/java/org/elasticsearch/common/network/Cidrs.java
index 1bdd7bf562b93..bdf2257e90298 100644
--- a/server/src/main/java/org/elasticsearch/common/network/Cidrs.java
+++ b/server/src/main/java/org/elasticsearch/common/network/Cidrs.java
@@ -40,13 +40,15 @@ public static long[] cidrMaskToMinMax(String cidr) {
String[] fields = cidr.split("/");
if (fields.length != 2) {
throw new IllegalArgumentException(
- String.format(Locale.ROOT, "invalid IPv4/CIDR; expected [a.b.c.d, e] but was [%s] after splitting on \"/\" in [%s]", Arrays.toString(fields), cidr)
+ String.format(Locale.ROOT, "invalid IPv4/CIDR; expected [a.b.c.d, e] but was [%s] after splitting on \"/\" in [%s]",
+ Arrays.toString(fields), cidr)
);
}
// do not try to parse IPv4-mapped IPv6 address
if (fields[0].contains(":")) {
throw new IllegalArgumentException(
- String.format(Locale.ROOT, "invalid IPv4/CIDR; expected [a.b.c.d, e] where a, b, c, d are decimal octets but was [%s] after splitting on \"/\" in [%s]", Arrays.toString(fields), cidr)
+ String.format(Locale.ROOT, "invalid IPv4/CIDR; expected [a.b.c.d, e] where a, b, c, d are decimal octets " +
+ "but was [%s] after splitting on \"/\" in [%s]", Arrays.toString(fields), cidr)
);
}
byte[] addressBytes;
diff --git a/server/src/main/java/org/elasticsearch/common/network/NetworkService.java b/server/src/main/java/org/elasticsearch/common/network/NetworkService.java
index 7dab3e5256682..de4aee289d336 100644
--- a/server/src/main/java/org/elasticsearch/common/network/NetworkService.java
+++ b/server/src/main/java/org/elasticsearch/common/network/NetworkService.java
@@ -113,7 +113,8 @@ public InetAddress[] resolveBindHostAddresses(String bindHosts[]) throws IOExcep
}
// check if its a wildcard address: this is only ok if its the only address!
if (address.isAnyLocalAddress() && addresses.length > 1) {
- throw new IllegalArgumentException("bind address: {" + NetworkAddress.format(address) + "} is wildcard, but multiple addresses specified: this makes no sense");
+ throw new IllegalArgumentException("bind address: {" + NetworkAddress.format(address) +
+ "} is wildcard, but multiple addresses specified: this makes no sense");
}
}
return addresses;
@@ -156,12 +157,14 @@ public InetAddress resolvePublishHostAddresses(String publishHosts[]) throws IOE
for (InetAddress address : addresses) {
// check if its multicast: flat out mistake
if (address.isMulticastAddress()) {
- throw new IllegalArgumentException("publish address: {" + NetworkAddress.format(address) + "} is invalid: multicast address");
+ throw new IllegalArgumentException("publish address: {" + NetworkAddress.format(address) +
+ "} is invalid: multicast address");
}
// check if its a wildcard address: this is only ok if its the only address!
// (if it was a single wildcard address, it was replaced by step 1 above)
if (address.isAnyLocalAddress()) {
- throw new IllegalArgumentException("publish address: {" + NetworkAddress.format(address) + "} is wildcard, but multiple addresses specified: this makes no sense");
+ throw new IllegalArgumentException("publish address: {" + NetworkAddress.format(address) +
+ "} is wildcard, but multiple addresses specified: this makes no sense");
}
}
diff --git a/server/src/main/java/org/elasticsearch/common/recycler/Recyclers.java b/server/src/main/java/org/elasticsearch/common/recycler/Recyclers.java
index f84441fbce436..b19f569481db1 100644
--- a/server/src/main/java/org/elasticsearch/common/recycler/Recyclers.java
+++ b/server/src/main/java/org/elasticsearch/common/recycler/Recyclers.java
@@ -148,7 +148,8 @@ public boolean isRecycled() {
}
/**
- * Create a concurrent implementation that can support concurrent access from concurrencyLevel threads with little contention.
+ * Create a concurrent implementation that can support concurrent access from
+ * concurrencyLevel threads with little contention.
*/
public static Recycler concurrent(final Recycler.Factory factory, final int concurrencyLevel) {
if (concurrencyLevel < 1) {
diff --git a/server/src/main/java/org/elasticsearch/common/util/BigArrays.java b/server/src/main/java/org/elasticsearch/common/util/BigArrays.java
index 1e305d60fea03..12c511311ea5b 100644
--- a/server/src/main/java/org/elasticsearch/common/util/BigArrays.java
+++ b/server/src/main/java/org/elasticsearch/common/util/BigArrays.java
@@ -44,7 +44,8 @@ public class BigArrays implements Releasable {
public static final int LONG_PAGE_SIZE = BigArrays.PAGE_SIZE_IN_BYTES / Long.BYTES;
public static final int OBJECT_PAGE_SIZE = BigArrays.PAGE_SIZE_IN_BYTES / RamUsageEstimator.NUM_BYTES_OBJECT_REF;
- /** Returns the next size to grow when working with parallel arrays that may have different page sizes or number of bytes per element. */
+ /** Returns the next size to grow when working with parallel arrays that
+ * may have different page sizes or number of bytes per element. */
public static long overSize(long minTargetSize) {
return overSize(minTargetSize, PAGE_SIZE_IN_BYTES / 8, 1);
}
@@ -345,7 +346,8 @@ private static class ObjectArrayWrapper extends AbstractArrayWrapper implemen
@Override
public long ramBytesUsed() {
- return SHALLOW_SIZE + RamUsageEstimator.alignObjectSize(RamUsageEstimator.NUM_BYTES_ARRAY_HEADER + RamUsageEstimator.NUM_BYTES_OBJECT_REF * size());
+ return SHALLOW_SIZE + RamUsageEstimator.alignObjectSize(RamUsageEstimator.NUM_BYTES_ARRAY_HEADER +
+ RamUsageEstimator.NUM_BYTES_OBJECT_REF * size());
}
@SuppressWarnings("unchecked")
@@ -503,7 +505,8 @@ public ByteArray resize(ByteArray array, long size) {
}
}
- /** Grow an array to a size that is larger than minSize, preserving content, and potentially reusing part of the provided array. */
+ /** Grow an array to a size that is larger than minSize,
+ * preserving content, and potentially reusing part of the provided array. */
public ByteArray grow(ByteArray array, long minSize) {
if (minSize <= array.size()) {
return array;
@@ -587,7 +590,8 @@ public IntArray resize(IntArray array, long size) {
}
}
- /** Grow an array to a size that is larger than minSize, preserving content, and potentially reusing part of the provided array. */
+ /** Grow an array to a size that is larger than minSize,
+ * preserving content, and potentially reusing part of the provided array. */
public IntArray grow(IntArray array, long minSize) {
if (minSize <= array.size()) {
return array;
@@ -638,7 +642,8 @@ public LongArray resize(LongArray array, long size) {
}
}
- /** Grow an array to a size that is larger than minSize, preserving content, and potentially reusing part of the provided array. */
+ /** Grow an array to a size that is larger than minSize,
+ * preserving content, and potentially reusing part of the provided array. */
public LongArray grow(LongArray array, long minSize) {
if (minSize <= array.size()) {
return array;
@@ -686,7 +691,8 @@ public DoubleArray resize(DoubleArray array, long size) {
}
}
- /** Grow an array to a size that is larger than minSize, preserving content, and potentially reusing part of the provided array. */
+ /** Grow an array to a size that is larger than minSize,
+ * preserving content, and potentially reusing part of the provided array. */
public DoubleArray grow(DoubleArray array, long minSize) {
if (minSize <= array.size()) {
return array;
@@ -734,7 +740,8 @@ public FloatArray resize(FloatArray array, long size) {
}
}
- /** Grow an array to a size that is larger than minSize, preserving content, and potentially reusing part of the provided array. */
+ /** Grow an array to a size that is larger than minSize,
+ * preserving content, and potentially reusing part of the provided array. */
public FloatArray grow(FloatArray array, long minSize) {
if (minSize <= array.size()) {
return array;
@@ -775,7 +782,8 @@ public ObjectArray resize(ObjectArray array, long size) {
}
}
- /** Grow an array to a size that is larger than minSize, preserving content, and potentially reusing part of the provided array. */
+ /** Grow an array to a size that is larger than minSize,
+ * preserving content, and potentially reusing part of the provided array. */
public ObjectArray grow(ObjectArray array, long minSize) {
if (minSize <= array.size()) {
return array;
diff --git a/server/src/main/java/org/elasticsearch/common/util/CancellableThreads.java b/server/src/main/java/org/elasticsearch/common/util/CancellableThreads.java
index 4399ba6a8fe5b..c2f55b8d9b939 100644
--- a/server/src/main/java/org/elasticsearch/common/util/CancellableThreads.java
+++ b/server/src/main/java/org/elasticsearch/common/util/CancellableThreads.java
@@ -45,7 +45,8 @@ public synchronized boolean isCancelled() {
}
- /** call this will throw an exception if operation was cancelled. Override {@link #onCancel(String, Exception)} for custom failure logic */
+ /** call this will throw an exception if operation was cancelled.
+ * Override {@link #onCancel(String, Exception)} for custom failure logic */
public synchronized void checkForCancel() {
if (isCancelled()) {
onCancel(reason, null);
diff --git a/server/src/main/java/org/elasticsearch/common/util/CollectionUtils.java b/server/src/main/java/org/elasticsearch/common/util/CollectionUtils.java
index 391f23c4f94c0..ce1bfe87131ba 100644
--- a/server/src/main/java/org/elasticsearch/common/util/CollectionUtils.java
+++ b/server/src/main/java/org/elasticsearch/common/util/CollectionUtils.java
@@ -307,7 +307,8 @@ public static void sort(final BytesRefArray bytes, final int[] indices) {
sort(new BytesRefBuilder(), new BytesRefBuilder(), bytes, indices);
}
- private static void sort(final BytesRefBuilder scratch, final BytesRefBuilder scratch1, final BytesRefArray bytes, final int[] indices) {
+ private static void sort(final BytesRefBuilder scratch, final BytesRefBuilder scratch1,
+ final BytesRefArray bytes, final int[] indices) {
final int numValues = bytes.size();
assert indices.length >= numValues;
diff --git a/server/src/main/java/org/elasticsearch/common/util/concurrent/EsExecutors.java b/server/src/main/java/org/elasticsearch/common/util/concurrent/EsExecutors.java
index d38eb03fae3dd..abc95810ba9a9 100644
--- a/server/src/main/java/org/elasticsearch/common/util/concurrent/EsExecutors.java
+++ b/server/src/main/java/org/elasticsearch/common/util/concurrent/EsExecutors.java
@@ -59,25 +59,30 @@ public static int numberOfProcessors(final Settings settings) {
return PROCESSORS_SETTING.get(settings);
}
- public static PrioritizedEsThreadPoolExecutor newSinglePrioritizing(String name, ThreadFactory threadFactory, ThreadContext contextHolder, ScheduledExecutorService timer) {
+ public static PrioritizedEsThreadPoolExecutor newSinglePrioritizing(String name, ThreadFactory threadFactory,
+ ThreadContext contextHolder, ScheduledExecutorService timer) {
return new PrioritizedEsThreadPoolExecutor(name, 1, 1, 0L, TimeUnit.MILLISECONDS, threadFactory, contextHolder, timer);
}
- public static EsThreadPoolExecutor newScaling(String name, int min, int max, long keepAliveTime, TimeUnit unit, ThreadFactory threadFactory, ThreadContext contextHolder) {
+ public static EsThreadPoolExecutor newScaling(String name, int min, int max, long keepAliveTime, TimeUnit unit,
+ ThreadFactory threadFactory, ThreadContext contextHolder) {
ExecutorScalingQueue queue = new ExecutorScalingQueue<>();
- EsThreadPoolExecutor executor = new EsThreadPoolExecutor(name, min, max, keepAliveTime, unit, queue, threadFactory, new ForceQueuePolicy(), contextHolder);
+ EsThreadPoolExecutor executor =
+ new EsThreadPoolExecutor(name, min, max, keepAliveTime, unit, queue, threadFactory, new ForceQueuePolicy(), contextHolder);
queue.executor = executor;
return executor;
}
- public static EsThreadPoolExecutor newFixed(String name, int size, int queueCapacity, ThreadFactory threadFactory, ThreadContext contextHolder) {
+ public static EsThreadPoolExecutor newFixed(String name, int size, int queueCapacity,
+ ThreadFactory threadFactory, ThreadContext contextHolder) {
BlockingQueue queue;
if (queueCapacity < 0) {
queue = ConcurrentCollections.newBlockingQueue();
} else {
queue = new SizeBlockingQueue<>(ConcurrentCollections.newBlockingQueue(), queueCapacity);
}
- return new EsThreadPoolExecutor(name, size, size, 0, TimeUnit.MILLISECONDS, queue, threadFactory, new EsAbortPolicy(), contextHolder);
+ return new EsThreadPoolExecutor(name, size, size, 0, TimeUnit.MILLISECONDS,
+ queue, threadFactory, new EsAbortPolicy(), contextHolder);
}
/**
diff --git a/server/src/main/java/org/elasticsearch/common/util/concurrent/ThreadBarrier.java b/server/src/main/java/org/elasticsearch/common/util/concurrent/ThreadBarrier.java
index 967f0c890d270..0b2b1a5a54c9e 100644
--- a/server/src/main/java/org/elasticsearch/common/util/concurrent/ThreadBarrier.java
+++ b/server/src/main/java/org/elasticsearch/common/util/concurrent/ThreadBarrier.java
@@ -246,18 +246,18 @@ private synchronized void initCause(Throwable t) {
*
* Usage example:
*
- * BarrierTimer timer = new BarrierTimer();
- * ThreadBarrier barrier = new ThreadBarrier( nTHREADS + 1, timer );
- * ..
- * barrier.await(); // starts timer when all threads trip on await
- * barrier.await(); // stops timer when all threads trip on await
- * ..
- * long time = timer.getTimeInNanos();
- * long tpi = time / ((long)nREPEATS * nTHREADS); //throughput per thread iteration
- * long secs = timer.getTimeInSeconds(); //total runtime in seconds
- * ..
- * timer.reset(); // reuse timer
- *
+ * BarrierTimer timer = new BarrierTimer();
+ * ThreadBarrier barrier = new ThreadBarrier( nTHREADS + 1, timer );
+ * ..
+ * barrier.await(); // starts timer when all threads trip on await
+ * barrier.await(); // stops timer when all threads trip on await
+ * ..
+ * long time = timer.getTimeInNanos();
+ * long tpi = time / ((long)nREPEATS * nTHREADS); //throughput per thread iteration
+ * long secs = timer.getTimeInSeconds(); //total runtime in seconds
+ * ..
+ * timer.reset(); // reuse timer
+ *
*/
public static class BarrierTimer implements Runnable {
volatile boolean started;
diff --git a/server/src/main/java/org/elasticsearch/common/util/concurrent/ThreadContext.java b/server/src/main/java/org/elasticsearch/common/util/concurrent/ThreadContext.java
index f2b1c209cd9dc..9664811149567 100644
--- a/server/src/main/java/org/elasticsearch/common/util/concurrent/ThreadContext.java
+++ b/server/src/main/java/org/elasticsearch/common/util/concurrent/ThreadContext.java
@@ -56,14 +56,15 @@
/**
* A ThreadContext is a map of string headers and a transient map of keyed objects that are associated with
* a thread. It allows to store and retrieve header information across method calls, network calls as well as threads spawned from a
- * thread that has a {@link ThreadContext} associated with. Threads spawned from a {@link org.elasticsearch.threadpool.ThreadPool} have out of the box
- * support for {@link ThreadContext} and all threads spawned will inherit the {@link ThreadContext} from the thread that it is forking from.".
- * Network calls will also preserve the senders headers automatically.
+ * thread that has a {@link ThreadContext} associated with. Threads spawned from a {@link org.elasticsearch.threadpool.ThreadPool}
+ * have out of the box support for {@link ThreadContext} and all threads spawned will inherit the {@link ThreadContext} from the thread
+ * that it is forking from.". Network calls will also preserve the senders headers automatically.
*
- * Consumers of ThreadContext usually don't need to interact with adding or stashing contexts. Every elasticsearch thread is managed by a thread pool or executor
- * being responsible for stashing and restoring the threads context. For instance if a network request is received, all headers are deserialized from the network
- * and directly added as the headers of the threads {@link ThreadContext} (see {@link #readHeaders(StreamInput)}. In order to not modify the context that is currently
- * active on this thread the network code uses a try/with pattern to stash it's current context, read headers into a fresh one and once the request is handled or a handler thread
+ * Consumers of ThreadContext usually don't need to interact with adding or stashing contexts. Every elasticsearch thread is managed by
+ * a thread pool or executor being responsible for stashing and restoring the threads context. For instance if a network request is
+ * received, all headers are deserialized from the network and directly added as the headers of the threads {@link ThreadContext}
+ * (see {@link #readHeaders(StreamInput)}. In order to not modify the context that is currently active on this thread the network code
+ * uses a try/with pattern to stash it's current context, read headers into a fresh one and once the request is handled or a handler thread
* is forked (which in turn inherits the context) it restores the previous context. For instance:
*
*
@@ -127,8 +128,9 @@ public StoredContext stashContext() {
}
/**
- * Removes the current context and resets a new context that contains a merge of the current headers and the given headers. The removed context can be
- * restored when closing the returned {@link StoredContext}. The merge strategy is that headers that are already existing are preserved unless they are defaults.
+ * Removes the current context and resets a new context that contains a merge of the current headers and the given headers.
+ * The removed context can be restored when closing the returned {@link StoredContext}. The merge strategy is that headers
+ * that are already existing are preserved unless they are defaults.
*/
public StoredContext stashAndMergeHeaders(Map headers) {
final ThreadContextStruct context = threadLocal.get();
@@ -481,7 +483,8 @@ private ThreadContextStruct putResponse(final String key, final String value, fi
logger.warn("Dropping a warning header, as their total size reached the maximum allowed of ["
+ maxWarningHeaderSize + "] bytes set in ["
+ HttpTransportSettings.SETTING_HTTP_MAX_WARNING_HEADER_SIZE.getKey() + "]!");
- return new ThreadContextStruct(requestHeaders, responseHeaders, transientHeaders, isSystemContext, newWarningHeaderSize);
+ return new ThreadContextStruct(requestHeaders, responseHeaders,
+ transientHeaders, isSystemContext, newWarningHeaderSize);
}
}
diff --git a/server/src/main/java/org/elasticsearch/common/xcontent/XContentHelper.java b/server/src/main/java/org/elasticsearch/common/xcontent/XContentHelper.java
index 9c01c094b7a0d..d193cfd510823 100644
--- a/server/src/main/java/org/elasticsearch/common/xcontent/XContentHelper.java
+++ b/server/src/main/java/org/elasticsearch/common/xcontent/XContentHelper.java
@@ -42,7 +42,8 @@ public class XContentHelper {
/**
* Creates a parser based on the bytes provided
- * @deprecated use {@link #createParser(NamedXContentRegistry, DeprecationHandler, BytesReference, XContentType)} to avoid content type auto-detection
+ * @deprecated use {@link #createParser(NamedXContentRegistry, DeprecationHandler, BytesReference, XContentType)}
+ * to avoid content type auto-detection
*/
@Deprecated
public static XContentParser createParser(NamedXContentRegistry xContentRegistry, DeprecationHandler deprecationHandler,
@@ -109,7 +110,8 @@ public static Tuple> convertToMap(BytesReferen
}
contentType = xContentType != null ? xContentType : XContentFactory.xContentType(input);
try (InputStream stream = input) {
- return new Tuple<>(Objects.requireNonNull(contentType), convertToMap(XContentFactory.xContent(contentType), stream, ordered));
+ return new Tuple<>(Objects.requireNonNull(contentType),
+ convertToMap(XContentFactory.xContent(contentType), stream, ordered));
}
} catch (IOException e) {
throw new ElasticsearchParseException("Failed to parse content to map", e);
@@ -294,7 +296,8 @@ private static boolean allListValuesAreMapsOfOne(List list) {
* auto-detection
*/
@Deprecated
- public static void writeRawField(String field, BytesReference source, XContentBuilder builder, ToXContent.Params params) throws IOException {
+ public static void writeRawField(String field, BytesReference source, XContentBuilder builder,
+ ToXContent.Params params) throws IOException {
Compressor compressor = CompressorFactory.compressor(source);
if (compressor != null) {
try (InputStream compressedStreamInput = compressor.streamInput(source.streamInput())) {
@@ -340,7 +343,8 @@ public static BytesReference toXContent(ToXContent toXContent, XContentType xCon
* {@link XContentType}. Wraps the output into a new anonymous object according to the value returned
* by the {@link ToXContent#isFragment()} method returns.
*/
- public static BytesReference toXContent(ToXContent toXContent, XContentType xContentType, Params params, boolean humanReadable) throws IOException {
+ public static BytesReference toXContent(ToXContent toXContent, XContentType xContentType, Params params,
+ boolean humanReadable) throws IOException {
try (XContentBuilder builder = XContentBuilder.builder(xContentType.xContent())) {
builder.humanReadable(humanReadable);
if (toXContent.isFragment()) {
diff --git a/server/src/test/java/org/elasticsearch/common/geo/ShapeBuilderTests.java b/server/src/test/java/org/elasticsearch/common/geo/ShapeBuilderTests.java
index 78c3963bd0429..5dcc811b0c6be 100644
--- a/server/src/test/java/org/elasticsearch/common/geo/ShapeBuilderTests.java
+++ b/server/src/test/java/org/elasticsearch/common/geo/ShapeBuilderTests.java
@@ -94,7 +94,8 @@ public void testNewPolygon_coordinate() {
public void testNewPolygon_coordinates() {
Polygon polygon = new PolygonBuilder(new CoordinatesBuilder()
- .coordinates(new Coordinate(-45, 30), new Coordinate(45, 30), new Coordinate(45, -30), new Coordinate(-45, -30), new Coordinate(-45, 30))
+ .coordinates(new Coordinate(-45, 30), new Coordinate(45, 30),
+ new Coordinate(45, -30), new Coordinate(-45, -30), new Coordinate(-45, 30))
).toPolygon();
LineString exterior = polygon.getExteriorRing();
diff --git a/server/src/test/java/org/elasticsearch/common/hash/MessageDigestsTests.java b/server/src/test/java/org/elasticsearch/common/hash/MessageDigestsTests.java
index e3c085f032830..ffe81fff5b634 100644
--- a/server/src/test/java/org/elasticsearch/common/hash/MessageDigestsTests.java
+++ b/server/src/test/java/org/elasticsearch/common/hash/MessageDigestsTests.java
@@ -34,8 +34,10 @@ private void assertHash(String expected, String test, MessageDigest messageDiges
public void testMd5() throws Exception {
assertHash("d41d8cd98f00b204e9800998ecf8427e", "", MessageDigests.md5());
assertHash("900150983cd24fb0d6963f7d28e17f72", "abc", MessageDigests.md5());
- assertHash("8215ef0796a20bcaaae116d3876c664a", "abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq", MessageDigests.md5());
- assertHash("7707d6ae4e027c70eea2a935c2296f21", new String(new char[1000000]).replace("\0", "a"), MessageDigests.md5());
+ assertHash("8215ef0796a20bcaaae116d3876c664a",
+ "abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq", MessageDigests.md5());
+ assertHash("7707d6ae4e027c70eea2a935c2296f21",
+ new String(new char[1000000]).replace("\0", "a"), MessageDigests.md5());
assertHash("9e107d9d372bb6826bd81d3542a419d6", "The quick brown fox jumps over the lazy dog", MessageDigests.md5());
assertHash("1055d3e698d289f2af8663725127bd4b", "The quick brown fox jumps over the lazy cog", MessageDigests.md5());
}
@@ -43,8 +45,10 @@ public void testMd5() throws Exception {
public void testSha1() throws Exception {
assertHash("da39a3ee5e6b4b0d3255bfef95601890afd80709", "", MessageDigests.sha1());
assertHash("a9993e364706816aba3e25717850c26c9cd0d89d", "abc", MessageDigests.sha1());
- assertHash("84983e441c3bd26ebaae4aa1f95129e5e54670f1", "abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq", MessageDigests.sha1());
- assertHash("34aa973cd4c4daa4f61eeb2bdbad27316534016f", new String(new char[1000000]).replace("\0", "a"), MessageDigests.sha1());
+ assertHash("84983e441c3bd26ebaae4aa1f95129e5e54670f1",
+ "abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq", MessageDigests.sha1());
+ assertHash("34aa973cd4c4daa4f61eeb2bdbad27316534016f",
+ new String(new char[1000000]).replace("\0", "a"), MessageDigests.sha1());
assertHash("2fd4e1c67a2d28fced849ee1bb76e7391b93eb12", "The quick brown fox jumps over the lazy dog", MessageDigests.sha1());
assertHash("de9f2c7fd25e1b3afad3e85a0bd17d9b100db4b3", "The quick brown fox jumps over the lazy cog", MessageDigests.sha1());
}
@@ -52,10 +56,14 @@ public void testSha1() throws Exception {
public void testSha256() throws Exception {
assertHash("e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855", "", MessageDigests.sha256());
assertHash("ba7816bf8f01cfea414140de5dae2223b00361a396177a9cb410ff61f20015ad", "abc", MessageDigests.sha256());
- assertHash("248d6a61d20638b8e5c026930c3e6039a33ce45964ff2167f6ecedd419db06c1", "abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq", MessageDigests.sha256());
- assertHash("cdc76e5c9914fb9281a1c7e284d73e67f1809a48a497200e046d39ccc7112cd0", new String(new char[1000000]).replace("\0", "a"), MessageDigests.sha256());
- assertHash("d7a8fbb307d7809469ca9abcb0082e4f8d5651e46d3cdb762d02d0bf37c9e592", "The quick brown fox jumps over the lazy dog", MessageDigests.sha256());
- assertHash("e4c4d8f3bf76b692de791a173e05321150f7a345b46484fe427f6acc7ecc81be", "The quick brown fox jumps over the lazy cog", MessageDigests.sha256());
+ assertHash("248d6a61d20638b8e5c026930c3e6039a33ce45964ff2167f6ecedd419db06c1",
+ "abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq", MessageDigests.sha256());
+ assertHash("cdc76e5c9914fb9281a1c7e284d73e67f1809a48a497200e046d39ccc7112cd0",
+ new String(new char[1000000]).replace("\0", "a"), MessageDigests.sha256());
+ assertHash("d7a8fbb307d7809469ca9abcb0082e4f8d5651e46d3cdb762d02d0bf37c9e592",
+ "The quick brown fox jumps over the lazy dog", MessageDigests.sha256());
+ assertHash("e4c4d8f3bf76b692de791a173e05321150f7a345b46484fe427f6acc7ecc81be",
+ "The quick brown fox jumps over the lazy cog", MessageDigests.sha256());
}
public void testToHexString() throws Exception {
diff --git a/server/src/test/java/org/elasticsearch/common/network/CidrsTests.java b/server/src/test/java/org/elasticsearch/common/network/CidrsTests.java
index 0b00353f98ab5..60609dc29930f 100644
--- a/server/src/test/java/org/elasticsearch/common/network/CidrsTests.java
+++ b/server/src/test/java/org/elasticsearch/common/network/CidrsTests.java
@@ -78,7 +78,8 @@ public void testSplittingDot() {
public void testValidSpecificCases() {
List> cases = new ArrayList<>();
cases.add(new Tuple<>("192.168.0.0/24", new long[]{(192L << 24) + (168 << 16), (192L << 24) + (168 << 16) + (1 << 8)}));
- cases.add(new Tuple<>("192.168.128.0/17", new long[]{(192L << 24) + (168 << 16) + (128 << 8), (192L << 24) + (168 << 16) + (128 << 8) + (1 << 15)}));
+ cases.add(new Tuple<>("192.168.128.0/17",
+ new long[]{(192L << 24) + (168 << 16) + (128 << 8), (192L << 24) + (168 << 16) + (128 << 8) + (1 << 15)}));
cases.add(new Tuple<>("128.0.0.0/1", new long[]{128L << 24, (128L << 24) + (1L << 31)})); // edge case
cases.add(new Tuple<>("0.0.0.0/0", new long[]{0, 1L << 32})); // edge case
cases.add(new Tuple<>("0.0.0.0/1", new long[]{0, 1L << 31})); // edge case
diff --git a/server/src/test/java/org/elasticsearch/common/unit/DistanceUnitTests.java b/server/src/test/java/org/elasticsearch/common/unit/DistanceUnitTests.java
index eafb7c69b8d9d..13480122d2fd8 100644
--- a/server/src/test/java/org/elasticsearch/common/unit/DistanceUnitTests.java
+++ b/server/src/test/java/org/elasticsearch/common/unit/DistanceUnitTests.java
@@ -53,8 +53,10 @@ public void testDistanceUnitParsing() {
double testValue = 12345.678;
for (DistanceUnit unit : DistanceUnit.values()) {
assertThat("Unit can be parsed from '" + unit.toString() + "'", DistanceUnit.fromString(unit.toString()), equalTo(unit));
- assertThat("Unit can be parsed from '" + testValue + unit.toString() + "'", DistanceUnit.fromString(unit.toString()), equalTo(unit));
- assertThat("Value can be parsed from '" + testValue + unit.toString() + "'", DistanceUnit.Distance.parseDistance(unit.toString(testValue)).value, equalTo(testValue));
+ assertThat("Unit can be parsed from '" + testValue + unit.toString() + "'",
+ DistanceUnit.fromString(unit.toString()), equalTo(unit));
+ assertThat("Value can be parsed from '" + testValue + unit.toString() + "'",
+ DistanceUnit.Distance.parseDistance(unit.toString(testValue)).value, equalTo(testValue));
}
}
diff --git a/server/src/test/java/org/elasticsearch/common/unit/FuzzinessTests.java b/server/src/test/java/org/elasticsearch/common/unit/FuzzinessTests.java
index 520f80fecac44..026c9a2e078a4 100644
--- a/server/src/test/java/org/elasticsearch/common/unit/FuzzinessTests.java
+++ b/server/src/test/java/org/elasticsearch/common/unit/FuzzinessTests.java
@@ -67,7 +67,8 @@ public void testParseFromXContent() throws IOException {
try (XContentParser parser = createParser(json)) {
assertThat(parser.nextToken(), equalTo(XContentParser.Token.START_OBJECT));
assertThat(parser.nextToken(), equalTo(XContentParser.Token.FIELD_NAME));
- assertThat(parser.nextToken(), anyOf(equalTo(XContentParser.Token.VALUE_NUMBER), equalTo(XContentParser.Token.VALUE_STRING)));
+ assertThat(parser.nextToken(), anyOf(equalTo(XContentParser.Token.VALUE_NUMBER),
+ equalTo(XContentParser.Token.VALUE_STRING)));
Fuzziness fuzziness = Fuzziness.parse(parser);
if (value.intValue() >= 1) {
assertThat(fuzziness.asDistance(), equalTo(Math.min(2, value.intValue())));
diff --git a/server/src/test/java/org/elasticsearch/common/util/LongObjectHashMapTests.java b/server/src/test/java/org/elasticsearch/common/util/LongObjectHashMapTests.java
index 9210565a10482..0c1c5bbbcb74e 100644
--- a/server/src/test/java/org/elasticsearch/common/util/LongObjectHashMapTests.java
+++ b/server/src/test/java/org/elasticsearch/common/util/LongObjectHashMapTests.java
@@ -32,7 +32,8 @@ private BigArrays randombigArrays() {
public void testDuel() {
final LongObjectHashMap map1 = new LongObjectHashMap<>();
- final LongObjectPagedHashMap map2 = new LongObjectPagedHashMap<>(randomInt(42), 0.6f + randomFloat() * 0.39f, randombigArrays());
+ final LongObjectPagedHashMap map2 =
+ new LongObjectPagedHashMap<>(randomInt(42), 0.6f + randomFloat() * 0.39f, randombigArrays());
final int maxKey = randomIntBetween(1, 10000);
final int iters = scaledRandomIntBetween(10000, 100000);
for (int i = 0; i < iters; ++i) {
diff --git a/server/src/test/java/org/elasticsearch/common/util/concurrent/EsExecutorsTests.java b/server/src/test/java/org/elasticsearch/common/util/concurrent/EsExecutorsTests.java
index a0fdcbf51ca1d..ff916c91613dc 100644
--- a/server/src/test/java/org/elasticsearch/common/util/concurrent/EsExecutorsTests.java
+++ b/server/src/test/java/org/elasticsearch/common/util/concurrent/EsExecutorsTests.java
@@ -172,7 +172,8 @@ public void testScaleUp() throws Exception {
final ThreadBarrier barrier = new ThreadBarrier(max + 1);
ThreadPoolExecutor pool =
- EsExecutors.newScaling(getClass().getName() + "/" + getTestName(), min, max, between(1, 100), randomTimeUnit(), EsExecutors.daemonThreadFactory("test"), threadContext);
+ EsExecutors.newScaling(getClass().getName() + "/" + getTestName(), min, max, between(1, 100), randomTimeUnit(),
+ EsExecutors.daemonThreadFactory("test"), threadContext);
assertThat("Min property", pool.getCorePoolSize(), equalTo(min));
assertThat("Max property", pool.getMaximumPoolSize(), equalTo(max));
@@ -209,7 +210,8 @@ public void testScaleDown() throws Exception {
final ThreadBarrier barrier = new ThreadBarrier(max + 1);
final ThreadPoolExecutor pool =
- EsExecutors.newScaling(getClass().getName() + "/" + getTestName(), min, max, between(1, 100), TimeUnit.MILLISECONDS, EsExecutors.daemonThreadFactory("test"), threadContext);
+ EsExecutors.newScaling(getClass().getName() + "/" + getTestName(), min, max, between(1, 100), TimeUnit.MILLISECONDS,
+ EsExecutors.daemonThreadFactory("test"), threadContext);
assertThat("Min property", pool.getCorePoolSize(), equalTo(min));
assertThat("Max property", pool.getMaximumPoolSize(), equalTo(max));
diff --git a/server/src/test/java/org/elasticsearch/common/util/concurrent/PrioritizedExecutorsTests.java b/server/src/test/java/org/elasticsearch/common/util/concurrent/PrioritizedExecutorsTests.java
index 1eacb4cb18cee..fa3868ec46f37 100644
--- a/server/src/test/java/org/elasticsearch/common/util/concurrent/PrioritizedExecutorsTests.java
+++ b/server/src/test/java/org/elasticsearch/common/util/concurrent/PrioritizedExecutorsTests.java
@@ -159,7 +159,8 @@ public void testSubmitPrioritizedExecutorWithCallables() throws Exception {
}
public void testSubmitPrioritizedExecutorWithMixed() throws Exception {
- ExecutorService executor = EsExecutors.newSinglePrioritizing(getTestName(), EsExecutors.daemonThreadFactory(getTestName()), holder, null);
+ ExecutorService executor = EsExecutors.newSinglePrioritizing(getTestName(),
+ EsExecutors.daemonThreadFactory(getTestName()), holder, null);
List results = new ArrayList<>(8);
CountDownLatch awaitingLatch = new CountDownLatch(1);
CountDownLatch finishedLatch = new CountDownLatch(8);
diff --git a/server/src/test/java/org/elasticsearch/common/xcontent/builder/XContentBuilderTests.java b/server/src/test/java/org/elasticsearch/common/xcontent/builder/XContentBuilderTests.java
index 07338d9286b70..a281d453e0764 100644
--- a/server/src/test/java/org/elasticsearch/common/xcontent/builder/XContentBuilderTests.java
+++ b/server/src/test/java/org/elasticsearch/common/xcontent/builder/XContentBuilderTests.java
@@ -123,7 +123,8 @@ public void testRaw() throws IOException {
xContentBuilder.rawField("foo", new BytesArray("{\"test\":\"value\"}").streamInput());
xContentBuilder.field("test1", "value1");
xContentBuilder.endObject();
- assertThat(Strings.toString(xContentBuilder), equalTo("{\"test\":\"value\",\"foo\":{\"test\":\"value\"},\"test1\":\"value1\"}"));
+ assertThat(Strings.toString(xContentBuilder),
+ equalTo("{\"test\":\"value\",\"foo\":{\"test\":\"value\"},\"test1\":\"value1\"}"));
}
{
XContentBuilder xContentBuilder = XContentFactory.contentBuilder(XContentType.JSON);
@@ -133,7 +134,8 @@ public void testRaw() throws IOException {
xContentBuilder.rawField("foo1", new BytesArray("{\"test\":\"value\"}").streamInput());
xContentBuilder.field("test1", "value1");
xContentBuilder.endObject();
- assertThat(Strings.toString(xContentBuilder), equalTo("{\"test\":\"value\",\"foo\":{\"test\":\"value\"},\"foo1\":{\"test\":\"value\"},\"test1\":\"value1\"}"));
+ assertThat(Strings.toString(xContentBuilder),
+ equalTo("{\"test\":\"value\",\"foo\":{\"test\":\"value\"},\"foo1\":{\"test\":\"value\"},\"test1\":\"value1\"}"));
}
}
diff --git a/server/src/test/java/org/elasticsearch/common/xcontent/support/filtering/FilterPathGeneratorFilteringTests.java b/server/src/test/java/org/elasticsearch/common/xcontent/support/filtering/FilterPathGeneratorFilteringTests.java
index b4d7cb11529b3..b0536fa908cf0 100644
--- a/server/src/test/java/org/elasticsearch/common/xcontent/support/filtering/FilterPathGeneratorFilteringTests.java
+++ b/server/src/test/java/org/elasticsearch/common/xcontent/support/filtering/FilterPathGeneratorFilteringTests.java
@@ -34,7 +34,8 @@ public class FilterPathGeneratorFilteringTests extends ESTestCase {
private final JsonFactory JSON_FACTORY = new JsonFactory();
public void testInclusiveFilters() throws Exception {
- final String SAMPLE = "{'a':0,'b':true,'c':'c_value','d':[0,1,2],'e':[{'f1':'f1_value','f2':'f2_value'},{'g1':'g1_value','g2':'g2_value'}],'h':{'i':{'j':{'k':{'l':'l_value'}}}}}";
+ final String SAMPLE = "{'a':0,'b':true,'c':'c_value','d':[0,1,2],'e':[{'f1':'f1_value','f2':'f2_value'}," +
+ "{'g1':'g1_value','g2':'g2_value'}],'h':{'i':{'j':{'k':{'l':'l_value'}}}}}";
assertResult(SAMPLE, "a", true, "{'a':0}");
assertResult(SAMPLE, "b", true, "{'b':true}");
@@ -79,48 +80,80 @@ public void testInclusiveFilters() throws Exception {
}
public void testExclusiveFilters() throws Exception {
- final String SAMPLE = "{'a':0,'b':true,'c':'c_value','d':[0,1,2],'e':[{'f1':'f1_value','f2':'f2_value'},{'g1':'g1_value','g2':'g2_value'}],'h':{'i':{'j':{'k':{'l':'l_value'}}}}}";
-
- assertResult(SAMPLE, "a", false, "{'b':true,'c':'c_value','d':[0,1,2],'e':[{'f1':'f1_value','f2':'f2_value'},{'g1':'g1_value','g2':'g2_value'}],'h':{'i':{'j':{'k':{'l':'l_value'}}}}}");
- assertResult(SAMPLE, "b", false, "{'a':0,'c':'c_value','d':[0,1,2],'e':[{'f1':'f1_value','f2':'f2_value'},{'g1':'g1_value','g2':'g2_value'}],'h':{'i':{'j':{'k':{'l':'l_value'}}}}}");
- assertResult(SAMPLE, "c", false, "{'a':0,'b':true,'d':[0,1,2],'e':[{'f1':'f1_value','f2':'f2_value'},{'g1':'g1_value','g2':'g2_value'}],'h':{'i':{'j':{'k':{'l':'l_value'}}}}}");
- assertResult(SAMPLE, "d", false, "{'a':0,'b':true,'c':'c_value','e':[{'f1':'f1_value','f2':'f2_value'},{'g1':'g1_value','g2':'g2_value'}],'h':{'i':{'j':{'k':{'l':'l_value'}}}}}");
+ final String SAMPLE = "{'a':0,'b':true,'c':'c_value','d':[0,1,2],'e':[{'f1':'f1_value','f2':'f2_value'}," +
+ "{'g1':'g1_value','g2':'g2_value'}],'h':{'i':{'j':{'k':{'l':'l_value'}}}}}";
+
+ assertResult(SAMPLE, "a", false, "{'b':true,'c':'c_value','d':[0,1,2],'e':[{'f1':'f1_value','f2':'f2_value'}," +
+ "{'g1':'g1_value','g2':'g2_value'}],'h':{'i':{'j':{'k':{'l':'l_value'}}}}}");
+ assertResult(SAMPLE, "b", false, "{'a':0,'c':'c_value','d':[0,1,2],'e':[{'f1':'f1_value','f2':'f2_value'}," +
+ "{'g1':'g1_value','g2':'g2_value'}],'h':{'i':{'j':{'k':{'l':'l_value'}}}}}");
+ assertResult(SAMPLE, "c", false, "{'a':0,'b':true,'d':[0,1,2],'e':[{'f1':'f1_value','f2':'f2_value'}," +
+ "{'g1':'g1_value','g2':'g2_value'}],'h':{'i':{'j':{'k':{'l':'l_value'}}}}}");
+ assertResult(SAMPLE, "d", false, "{'a':0,'b':true,'c':'c_value','e':[{'f1':'f1_value','f2':'f2_value'}," +
+ "{'g1':'g1_value','g2':'g2_value'}],'h':{'i':{'j':{'k':{'l':'l_value'}}}}}");
assertResult(SAMPLE, "e", false, "{'a':0,'b':true,'c':'c_value','d':[0,1,2],'h':{'i':{'j':{'k':{'l':'l_value'}}}}}");
- assertResult(SAMPLE, "h", false, "{'a':0,'b':true,'c':'c_value','d':[0,1,2],'e':[{'f1':'f1_value','f2':'f2_value'},{'g1':'g1_value','g2':'g2_value'}]}");
- assertResult(SAMPLE, "z", false, "{'a':0,'b':true,'c':'c_value','d':[0,1,2],'e':[{'f1':'f1_value','f2':'f2_value'},{'g1':'g1_value','g2':'g2_value'}],'h':{'i':{'j':{'k':{'l':'l_value'}}}}}");
-
- assertResult(SAMPLE, "e.f1", false, "{'a':0,'b':true,'c':'c_value','d':[0,1,2],'e':[{'f2':'f2_value'},{'g1':'g1_value','g2':'g2_value'}],'h':{'i':{'j':{'k':{'l':'l_value'}}}}}");
- assertResult(SAMPLE, "e.f2", false, "{'a':0,'b':true,'c':'c_value','d':[0,1,2],'e':[{'f1':'f1_value'},{'g1':'g1_value','g2':'g2_value'}],'h':{'i':{'j':{'k':{'l':'l_value'}}}}}");
- assertResult(SAMPLE, "e.f*", false, "{'a':0,'b':true,'c':'c_value','d':[0,1,2],'e':[{'g1':'g1_value','g2':'g2_value'}],'h':{'i':{'j':{'k':{'l':'l_value'}}}}}");
- assertResult(SAMPLE, "e.*2", false, "{'a':0,'b':true,'c':'c_value','d':[0,1,2],'e':[{'f1':'f1_value'},{'g1':'g1_value'}],'h':{'i':{'j':{'k':{'l':'l_value'}}}}}");
-
- assertResult(SAMPLE, "h.i", false, "{'a':0,'b':true,'c':'c_value','d':[0,1,2],'e':[{'f1':'f1_value','f2':'f2_value'},{'g1':'g1_value','g2':'g2_value'}]}");
- assertResult(SAMPLE, "h.i.j", false, "{'a':0,'b':true,'c':'c_value','d':[0,1,2],'e':[{'f1':'f1_value','f2':'f2_value'},{'g1':'g1_value','g2':'g2_value'}]}");
- assertResult(SAMPLE, "h.i.j.k", false, "{'a':0,'b':true,'c':'c_value','d':[0,1,2],'e':[{'f1':'f1_value','f2':'f2_value'},{'g1':'g1_value','g2':'g2_value'}]}");
- assertResult(SAMPLE, "h.i.j.k.l", false, "{'a':0,'b':true,'c':'c_value','d':[0,1,2],'e':[{'f1':'f1_value','f2':'f2_value'},{'g1':'g1_value','g2':'g2_value'}]}");
-
- assertResult(SAMPLE, "h.*", false, "{'a':0,'b':true,'c':'c_value','d':[0,1,2],'e':[{'f1':'f1_value','f2':'f2_value'},{'g1':'g1_value','g2':'g2_value'}]}");
- assertResult(SAMPLE, "*.i", false, "{'a':0,'b':true,'c':'c_value','d':[0,1,2],'e':[{'f1':'f1_value','f2':'f2_value'},{'g1':'g1_value','g2':'g2_value'}]}");
-
- assertResult(SAMPLE, "*.i.j", false, "{'a':0,'b':true,'c':'c_value','d':[0,1,2],'e':[{'f1':'f1_value','f2':'f2_value'},{'g1':'g1_value','g2':'g2_value'}]}");
- assertResult(SAMPLE, "h.*.j", false, "{'a':0,'b':true,'c':'c_value','d':[0,1,2],'e':[{'f1':'f1_value','f2':'f2_value'},{'g1':'g1_value','g2':'g2_value'}]}");
- assertResult(SAMPLE, "h.i.*", false, "{'a':0,'b':true,'c':'c_value','d':[0,1,2],'e':[{'f1':'f1_value','f2':'f2_value'},{'g1':'g1_value','g2':'g2_value'}]}");
-
- assertResult(SAMPLE, "*.i.j.k", false, "{'a':0,'b':true,'c':'c_value','d':[0,1,2],'e':[{'f1':'f1_value','f2':'f2_value'},{'g1':'g1_value','g2':'g2_value'}]}");
- assertResult(SAMPLE, "h.*.j.k", false, "{'a':0,'b':true,'c':'c_value','d':[0,1,2],'e':[{'f1':'f1_value','f2':'f2_value'},{'g1':'g1_value','g2':'g2_value'}]}");
- assertResult(SAMPLE, "h.i.*.k", false, "{'a':0,'b':true,'c':'c_value','d':[0,1,2],'e':[{'f1':'f1_value','f2':'f2_value'},{'g1':'g1_value','g2':'g2_value'}]}");
- assertResult(SAMPLE, "h.i.j.*", false, "{'a':0,'b':true,'c':'c_value','d':[0,1,2],'e':[{'f1':'f1_value','f2':'f2_value'},{'g1':'g1_value','g2':'g2_value'}]}");
-
- assertResult(SAMPLE, "*.i.j.k.l", false, "{'a':0,'b':true,'c':'c_value','d':[0,1,2],'e':[{'f1':'f1_value','f2':'f2_value'},{'g1':'g1_value','g2':'g2_value'}]}");
- assertResult(SAMPLE, "h.*.j.k.l", false, "{'a':0,'b':true,'c':'c_value','d':[0,1,2],'e':[{'f1':'f1_value','f2':'f2_value'},{'g1':'g1_value','g2':'g2_value'}]}");
- assertResult(SAMPLE, "h.i.*.k.l", false, "{'a':0,'b':true,'c':'c_value','d':[0,1,2],'e':[{'f1':'f1_value','f2':'f2_value'},{'g1':'g1_value','g2':'g2_value'}]}");
- assertResult(SAMPLE, "h.i.j.*.l", false, "{'a':0,'b':true,'c':'c_value','d':[0,1,2],'e':[{'f1':'f1_value','f2':'f2_value'},{'g1':'g1_value','g2':'g2_value'}]}");
- assertResult(SAMPLE, "h.i.j.k.*", false, "{'a':0,'b':true,'c':'c_value','d':[0,1,2],'e':[{'f1':'f1_value','f2':'f2_value'},{'g1':'g1_value','g2':'g2_value'}]}");
-
- assertResult(SAMPLE, "h.*.j.*.l", false, "{'a':0,'b':true,'c':'c_value','d':[0,1,2],'e':[{'f1':'f1_value','f2':'f2_value'},{'g1':'g1_value','g2':'g2_value'}]}");
- assertResult(SAMPLE, "**.l", false, "{'a':0,'b':true,'c':'c_value','d':[0,1,2],'e':[{'f1':'f1_value','f2':'f2_value'},{'g1':'g1_value','g2':'g2_value'}]}");
-
- assertResult(SAMPLE, "**.*2", false, "{'a':0,'b':true,'c':'c_value','d':[0,1,2],'e':[{'f1':'f1_value'},{'g1':'g1_value'}],'h':{'i':{'j':{'k':{'l':'l_value'}}}}}");
+ assertResult(SAMPLE, "h", false, "{'a':0,'b':true,'c':'c_value','d':[0,1,2],'e':[{'f1':'f1_value','f2':'f2_value'}," +
+ "{'g1':'g1_value','g2':'g2_value'}]}");
+ assertResult(SAMPLE, "z", false, "{'a':0,'b':true,'c':'c_value','d':[0,1,2],'e':[{'f1':'f1_value','f2':'f2_value'}," +
+ "{'g1':'g1_value','g2':'g2_value'}],'h':{'i':{'j':{'k':{'l':'l_value'}}}}}");
+
+ assertResult(SAMPLE, "e.f1", false, "{'a':0,'b':true,'c':'c_value','d':[0,1,2],'e':[{'f2':'f2_value'}," +
+ "{'g1':'g1_value','g2':'g2_value'}],'h':{'i':{'j':{'k':{'l':'l_value'}}}}}");
+ assertResult(SAMPLE, "e.f2", false, "{'a':0,'b':true,'c':'c_value','d':[0,1,2],'e':[{'f1':'f1_value'}," +
+ "{'g1':'g1_value','g2':'g2_value'}],'h':{'i':{'j':{'k':{'l':'l_value'}}}}}");
+ assertResult(SAMPLE, "e.f*", false, "{'a':0,'b':true,'c':'c_value','d':[0,1,2],'e':[{'g1':'g1_value','g2':'g2_value'}]," +
+ "'h':{'i':{'j':{'k':{'l':'l_value'}}}}}");
+ assertResult(SAMPLE, "e.*2", false, "{'a':0,'b':true,'c':'c_value','d':[0,1,2],'e':[{'f1':'f1_value'},{'g1':'g1_value'}]," +
+ "'h':{'i':{'j':{'k':{'l':'l_value'}}}}}");
+
+ assertResult(SAMPLE, "h.i", false, "{'a':0,'b':true,'c':'c_value','d':[0,1,2],'e':[{'f1':'f1_value','f2':'f2_value'}," +
+ "{'g1':'g1_value','g2':'g2_value'}]}");
+ assertResult(SAMPLE, "h.i.j", false, "{'a':0,'b':true,'c':'c_value','d':[0,1,2],'e':[{'f1':'f1_value','f2':'f2_value'}," +
+ "{'g1':'g1_value','g2':'g2_value'}]}");
+ assertResult(SAMPLE, "h.i.j.k", false, "{'a':0,'b':true,'c':'c_value','d':[0,1,2],'e':[{'f1':'f1_value','f2':'f2_value'}," +
+ "{'g1':'g1_value','g2':'g2_value'}]}");
+ assertResult(SAMPLE, "h.i.j.k.l", false, "{'a':0,'b':true,'c':'c_value','d':[0,1,2],'e':[{'f1':'f1_value','f2':'f2_value'}," +
+ "{'g1':'g1_value','g2':'g2_value'}]}");
+
+ assertResult(SAMPLE, "h.*", false, "{'a':0,'b':true,'c':'c_value','d':[0,1,2],'e':[{'f1':'f1_value','f2':'f2_value'}," +
+ "{'g1':'g1_value','g2':'g2_value'}]}");
+ assertResult(SAMPLE, "*.i", false, "{'a':0,'b':true,'c':'c_value','d':[0,1,2],'e':[{'f1':'f1_value','f2':'f2_value'}," +
+ "{'g1':'g1_value','g2':'g2_value'}]}");
+
+ assertResult(SAMPLE, "*.i.j", false, "{'a':0,'b':true,'c':'c_value','d':[0,1,2],'e':[{'f1':'f1_value','f2':'f2_value'}," +
+ "{'g1':'g1_value','g2':'g2_value'}]}");
+ assertResult(SAMPLE, "h.*.j", false, "{'a':0,'b':true,'c':'c_value','d':[0,1,2],'e':[{'f1':'f1_value','f2':'f2_value'}," +
+ "{'g1':'g1_value','g2':'g2_value'}]}");
+ assertResult(SAMPLE, "h.i.*", false, "{'a':0,'b':true,'c':'c_value','d':[0,1,2],'e':[{'f1':'f1_value','f2':'f2_value'}," +
+ "{'g1':'g1_value','g2':'g2_value'}]}");
+
+ assertResult(SAMPLE, "*.i.j.k", false, "{'a':0,'b':true,'c':'c_value','d':[0,1,2],'e':[{'f1':'f1_value','f2':'f2_value'}," +
+ "{'g1':'g1_value','g2':'g2_value'}]}");
+ assertResult(SAMPLE, "h.*.j.k", false, "{'a':0,'b':true,'c':'c_value','d':[0,1,2],'e':[{'f1':'f1_value','f2':'f2_value'}," +
+ "{'g1':'g1_value','g2':'g2_value'}]}");
+ assertResult(SAMPLE, "h.i.*.k", false, "{'a':0,'b':true,'c':'c_value','d':[0,1,2],'e':[{'f1':'f1_value','f2':'f2_value'}," +
+ "{'g1':'g1_value','g2':'g2_value'}]}");
+ assertResult(SAMPLE, "h.i.j.*", false, "{'a':0,'b':true,'c':'c_value','d':[0,1,2],'e':[{'f1':'f1_value','f2':'f2_value'}," +
+ "{'g1':'g1_value','g2':'g2_value'}]}");
+
+ assertResult(SAMPLE, "*.i.j.k.l", false, "{'a':0,'b':true,'c':'c_value','d':[0,1,2],'e':[{'f1':'f1_value','f2':'f2_value'}," +
+ "{'g1':'g1_value','g2':'g2_value'}]}");
+ assertResult(SAMPLE, "h.*.j.k.l", false, "{'a':0,'b':true,'c':'c_value','d':[0,1,2],'e':[{'f1':'f1_value','f2':'f2_value'}," +
+ "{'g1':'g1_value','g2':'g2_value'}]}");
+ assertResult(SAMPLE, "h.i.*.k.l", false, "{'a':0,'b':true,'c':'c_value','d':[0,1,2],'e':[{'f1':'f1_value','f2':'f2_value'}," +
+ "{'g1':'g1_value','g2':'g2_value'}]}");
+ assertResult(SAMPLE, "h.i.j.*.l", false, "{'a':0,'b':true,'c':'c_value','d':[0,1,2],'e':[{'f1':'f1_value','f2':'f2_value'}," +
+ "{'g1':'g1_value','g2':'g2_value'}]}");
+ assertResult(SAMPLE, "h.i.j.k.*", false, "{'a':0,'b':true,'c':'c_value','d':[0,1,2],'e':[{'f1':'f1_value','f2':'f2_value'}," +
+ "{'g1':'g1_value','g2':'g2_value'}]}");
+
+ assertResult(SAMPLE, "h.*.j.*.l", false, "{'a':0,'b':true,'c':'c_value','d':[0,1,2],'e':[{'f1':'f1_value','f2':'f2_value'}," +
+ "{'g1':'g1_value','g2':'g2_value'}]}");
+ assertResult(SAMPLE, "**.l", false, "{'a':0,'b':true,'c':'c_value','d':[0,1,2],'e':[{'f1':'f1_value','f2':'f2_value'}," +
+ "{'g1':'g1_value','g2':'g2_value'}]}");
+
+ assertResult(SAMPLE, "**.*2", false, "{'a':0,'b':true,'c':'c_value','d':[0,1,2],'e':[{'f1':'f1_value'}," +
+ "{'g1':'g1_value'}],'h':{'i':{'j':{'k':{'l':'l_value'}}}}}");
}