Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,6 @@
import org.apache.logging.log4j.Level;
import org.apache.logging.log4j.Logger;
import org.apache.logging.log4j.LogManager;
import org.elasticsearch.ExceptionsHelper;
import org.elasticsearch.Version;
import org.elasticsearch.cluster.AckedClusterStateUpdateTask;
import org.elasticsearch.cluster.ClusterChangedEvent;
Expand Down Expand Up @@ -523,7 +522,7 @@ public void clusterStatePublished(ClusterChangedEvent clusterChangedEvent) {
final ClusterStateTaskListener listener = new ClusterStateTaskListener() {
@Override
public void onFailure(String source, Exception e) {
fail(ExceptionsHelper.detailedMessage(e));
throw new AssertionError(e);
}

@Override
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,6 @@

import org.apache.logging.log4j.Logger;
import org.apache.logging.log4j.message.ParameterizedMessage;
import org.elasticsearch.ExceptionsHelper;
import org.elasticsearch.cluster.ClusterStateTaskConfig;
import org.elasticsearch.cluster.metadata.ProcessClusterEventTimeoutException;
import org.elasticsearch.common.Priority;
Expand Down Expand Up @@ -273,7 +272,7 @@ public void processed(String source) {

@Override
public void onFailure(String source, Exception e) {
fail(ExceptionsHelper.detailedMessage(e));
throw new AssertionError(e);
}
});
usedKeys.add(key);
Expand Down Expand Up @@ -304,7 +303,7 @@ public void processed(String source) {

@Override
public void onFailure(String source, Exception e) {
fail(ExceptionsHelper.detailedMessage(e));
throw new AssertionError(e);
}
};

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,6 @@
package org.elasticsearch.common.joda;

import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.ExceptionsHelper;
import org.elasticsearch.common.time.DateFormatter;
import org.elasticsearch.common.time.DateMathParser;
import org.elasticsearch.test.ESTestCase;
Expand Down Expand Up @@ -278,7 +277,7 @@ void assertParseException(String msg, String date, String exc) {
parser.parse(date, () -> 0);
fail("Date: " + date + "\n" + msg);
} catch (ElasticsearchParseException e) {
assertThat(ExceptionsHelper.detailedMessage(e).contains(exc), equalTo(true));
assertThat(e.getMessage().contains(exc), equalTo(true));
}
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,6 @@
package org.elasticsearch.common.time;

import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.ExceptionsHelper;
import org.elasticsearch.test.ESTestCase;

import java.time.Instant;
Expand Down Expand Up @@ -251,7 +250,7 @@ public void testTimestamps() {

void assertParseException(String msg, String date, String exc) {
ElasticsearchParseException e = expectThrows(ElasticsearchParseException.class, () -> parser.parse(date, () -> 0));
assertThat(msg, ExceptionsHelper.detailedMessage(e), containsString(exc));
assertThat(msg, e.getMessage(), containsString(exc));
}

public void testIllegalMathFormat() {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,6 @@

package org.elasticsearch.common.util.concurrent;

import org.elasticsearch.ExceptionsHelper;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.test.ESTestCase;
import org.hamcrest.Matcher;
Expand Down Expand Up @@ -276,7 +275,7 @@ public String toString() {
fail("Didn't get a rejection when we expected one.");
} catch (EsRejectedExecutionException e) {
assertFalse("Thread pool registering as terminated when it isn't", e.isExecutorShutdown());
String message = ExceptionsHelper.detailedMessage(e);
String message = e.getMessage();
assertThat(message, containsString("of dummy runnable"));
assertThat(message, containsString("on EsThreadPoolExecutor[name = " + getName()));
assertThat(message, containsString("queue capacity = " + queue));
Expand Down Expand Up @@ -316,7 +315,7 @@ public String toString() {
fail("Didn't get a rejection when we expected one.");
} catch (EsRejectedExecutionException e) {
assertTrue("Thread pool not registering as terminated when it is", e.isExecutorShutdown());
String message = ExceptionsHelper.detailedMessage(e);
String message = e.getMessage();
assertThat(message, containsString("of dummy runnable"));
assertThat(message, containsString("on EsThreadPoolExecutor[name = " + getName()));
assertThat(message, containsString("queue capacity = " + queue));
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,6 @@

package org.elasticsearch.index.mapper;

import org.elasticsearch.ExceptionsHelper;
import org.elasticsearch.Version;
import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.common.Strings;
Expand Down Expand Up @@ -51,6 +50,7 @@
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.instanceOf;
import static org.hamcrest.Matchers.not;
import static org.hamcrest.Matchers.notNullValue;

// TODO: make this a real unit test
public class DocumentParserTests extends ESSingleNodeTestCase {
Expand Down Expand Up @@ -1443,7 +1443,8 @@ public void testBlankFieldNames() throws Exception {

MapperParsingException err = expectThrows(MapperParsingException.class, () ->
client().prepareIndex("idx", "type").setSource(bytes, XContentType.JSON).get());
assertThat(ExceptionsHelper.detailedMessage(err), containsString("field name cannot be an empty string"));
assertThat(err.getCause(), notNullValue());
assertThat(err.getCause().getMessage(), containsString("field name cannot be an empty string"));

final BytesReference bytes2 = BytesReference.bytes(XContentFactory.jsonBuilder()
.startObject()
Expand All @@ -1454,7 +1455,8 @@ public void testBlankFieldNames() throws Exception {

err = expectThrows(MapperParsingException.class, () ->
client().prepareIndex("idx", "type").setSource(bytes2, XContentType.JSON).get());
assertThat(ExceptionsHelper.detailedMessage(err), containsString("field name cannot be an empty string"));
assertThat(err.getCause(), notNullValue());
assertThat(err.getCause().getMessage(), containsString("field name cannot be an empty string"));
}

public void testWriteToFieldAlias() throws Exception {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -125,6 +125,7 @@
import static org.hamcrest.Matchers.greaterThan;
import static org.hamcrest.Matchers.greaterThanOrEqualTo;
import static org.hamcrest.Matchers.instanceOf;
import static org.hamcrest.Matchers.notNullValue;

public class IndexShardIT extends ESSingleNodeTestCase {

Expand Down Expand Up @@ -625,8 +626,9 @@ public void testCircuitBreakerIncrementedByIndexShard() throws Exception {
Exception e = expectThrows(Exception.class,
() -> client().prepareSearch("test")
.addAggregation(AggregationBuilders.terms("foo_terms").field("foo.keyword")).get());
logger.info("--> got: {}", ExceptionsHelper.detailedMessage(e));
assertThat(ExceptionsHelper.detailedMessage(e), containsString("[parent] Data too large, data for [<agg [foo_terms]>]"));
logger.info("--> got an expected exception", e);
assertThat(e.getCause(), notNullValue());
assertThat(e.getCause().getMessage(), containsString("[parent] Data too large, data for [<agg [foo_terms]>]"));

client().admin().cluster().prepareUpdateSettings()
.setTransientSettings(Settings.builder()
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -96,6 +96,7 @@
import static org.hamcrest.Matchers.is;
import static org.hamcrest.Matchers.not;
import static org.hamcrest.Matchers.notNullValue;
import static org.hamcrest.Matchers.startsWith;

public class StoreTests extends ESTestCase {

Expand Down Expand Up @@ -977,32 +978,30 @@ public void testCanReadOldCorruptionMarker() throws IOException {
String uuid = Store.CORRUPTED + UUIDs.randomBase64UUID();
try (IndexOutput output = dir.createOutput(uuid, IOContext.DEFAULT)) {
CodecUtil.writeHeader(output, Store.CODEC, Store.VERSION_STACK_TRACE);
output.writeString(ExceptionsHelper.detailedMessage(exception));
output.writeString(exception.getMessage());
output.writeString(ExceptionsHelper.stackTrace(exception));
CodecUtil.writeFooter(output);
}
try {
store.failIfCorrupted();
fail("should be corrupted");
} catch (CorruptIndexException e) {
assertTrue(e.getMessage().startsWith("[index][1] Preexisting corrupted index [" + uuid +
"] caused by: CorruptIndexException[foo (resource=bar)]"));
assertThat(e.getMessage(), startsWith("[index][1] Preexisting corrupted index [" + uuid + "] caused by: foo (resource=bar)"));
assertTrue(e.getMessage().contains(ExceptionsHelper.stackTrace(exception)));
}

store.removeCorruptionMarker();

try (IndexOutput output = dir.createOutput(uuid, IOContext.DEFAULT)) {
CodecUtil.writeHeader(output, Store.CODEC, Store.VERSION_START);
output.writeString(ExceptionsHelper.detailedMessage(exception));
output.writeString(exception.getMessage());
CodecUtil.writeFooter(output);
}
try {
store.failIfCorrupted();
fail("should be corrupted");
} catch (CorruptIndexException e) {
assertTrue(e.getMessage().startsWith("[index][1] Preexisting corrupted index [" + uuid +
"] caused by: CorruptIndexException[foo (resource=bar)]"));
assertThat(e.getMessage(), startsWith("[index][1] Preexisting corrupted index [" + uuid + "] caused by: foo (resource=bar)"));
assertFalse(e.getMessage().contains(ExceptionsHelper.stackTrace(exception)));
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,6 @@
package org.elasticsearch.indices.store;

import org.apache.logging.log4j.Logger;
import org.elasticsearch.ExceptionsHelper;
import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse;
import org.elasticsearch.action.admin.cluster.state.ClusterStateResponse;
import org.elasticsearch.cluster.ClusterState;
Expand Down Expand Up @@ -456,7 +455,7 @@ public void onSuccess(String source) {
@Override
public void onFailure(String source, Exception e) {
latch.countDown();
fail("Excepted proper response " + ExceptionsHelper.detailedMessage(e));
throw new AssertionError("Expected a proper response", e);
}
});
latch.await();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,6 @@

package org.elasticsearch.search.aggregations.bucket;

import org.elasticsearch.ExceptionsHelper;
import org.elasticsearch.common.xcontent.XContentParseException;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.json.JsonXContent;
Expand All @@ -30,6 +29,7 @@
import java.io.IOException;

import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.notNullValue;

public class DateRangeTests extends BaseAggregationTestCase<DateRangeAggregationBuilder> {

Expand Down Expand Up @@ -81,7 +81,8 @@ public void testParsingRangeStrict() throws IOException {
XContentParser parser = createParser(JsonXContent.jsonXContent, rangeAggregation);
XContentParseException ex = expectThrows(XContentParseException.class,
() -> DateRangeAggregationBuilder.parse("aggregationName", parser));
assertThat(ExceptionsHelper.detailedMessage(ex), containsString("badField"));
assertThat(ex.getCause(), notNullValue());
assertThat(ex.getCause().getMessage(), containsString("badField"));
}

}
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,6 @@

package org.elasticsearch.search.aggregations.bucket;

import org.elasticsearch.ExceptionsHelper;
import org.elasticsearch.common.geo.GeoDistance;
import org.elasticsearch.common.geo.GeoPoint;
import org.elasticsearch.common.unit.DistanceUnit;
Expand All @@ -34,6 +33,7 @@
import java.io.IOException;

import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.notNullValue;

public class GeoDistanceRangeTests extends BaseAggregationTestCase<GeoDistanceAggregationBuilder> {

Expand Down Expand Up @@ -81,7 +81,8 @@ public void testParsingRangeStrict() throws IOException {
XContentParser parser = createParser(JsonXContent.jsonXContent, rangeAggregation);
XContentParseException ex = expectThrows(XContentParseException.class,
() -> GeoDistanceAggregationBuilder.parse("aggregationName", parser));
assertThat(ExceptionsHelper.detailedMessage(ex), containsString("badField"));
assertThat(ex.getCause(), notNullValue());
assertThat(ex.getCause().getMessage(), containsString("badField"));
}

/**
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,6 @@

package org.elasticsearch.search.aggregations.bucket;

import org.elasticsearch.ExceptionsHelper;
import org.elasticsearch.common.xcontent.XContentParseException;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.json.JsonXContent;
Expand All @@ -30,6 +29,7 @@
import java.io.IOException;

import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.notNullValue;

public class RangeTests extends BaseAggregationTestCase<RangeAggregationBuilder> {

Expand Down Expand Up @@ -77,7 +77,8 @@ public void testParsingRangeStrict() throws IOException {
XContentParser parser = createParser(JsonXContent.jsonXContent, rangeAggregation);
XContentParseException ex = expectThrows(XContentParseException.class,
() -> RangeAggregationBuilder.parse("aggregationName", parser));
assertThat(ExceptionsHelper.detailedMessage(ex), containsString("badField"));
assertThat(ex.getCause(), notNullValue());
assertThat(ex.getCause().getMessage(), containsString("badField"));
}

/**
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,6 @@
*/
package org.elasticsearch.search.aggregations.bucket.geogrid;

import org.elasticsearch.ExceptionsHelper;
import org.elasticsearch.common.unit.DistanceUnit;
import org.elasticsearch.common.xcontent.XContentParseException;
import org.elasticsearch.common.xcontent.XContentParser;
Expand Down Expand Up @@ -99,8 +98,7 @@ public void testParseErrorOnBooleanPrecision() throws Exception {
assertSame(XContentParser.Token.START_OBJECT, token);
XContentParseException e = expectThrows(XContentParseException.class,
() -> GeoHashGridAggregationBuilder.parse("geohash_grid", stParser));
assertThat(ExceptionsHelper.detailedMessage(e),
containsString("[geohash_grid] precision doesn't support values of type: VALUE_BOOLEAN"));
assertThat(e.getMessage(), containsString("[geohash_grid] precision doesn't support values of type: VALUE_BOOLEAN"));
}

public void testParseErrorOnPrecisionOutOfRange() throws Exception {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,6 @@

package org.elasticsearch.search.aggregations.metrics;

import org.elasticsearch.ExceptionsHelper;
import org.elasticsearch.common.xcontent.XContentParseException;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.json.JsonXContent;
Expand Down Expand Up @@ -89,6 +88,6 @@ public void testExceptionMultipleMethods() throws IOException {
assertEquals(XContentParser.Token.FIELD_NAME, parser.nextToken());
XContentParseException e = expectThrows(XContentParseException.class,
() -> PercentilesAggregationBuilder.parse("myPercentiles", parser));
assertThat(ExceptionsHelper.detailedMessage(e), containsString("[percentiles] failed to parse field [hdr]"));
assertThat(e.getMessage(), containsString("[percentiles] failed to parse field [hdr]"));
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -22,9 +22,9 @@
import org.apache.lucene.analysis.TokenFilter;
import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.analysis.tokenattributes.CharTermAttribute;
import org.elasticsearch.ExceptionsHelper;
import org.elasticsearch.action.admin.indices.create.CreateIndexRequestBuilder;
import org.elasticsearch.action.index.IndexRequestBuilder;
import org.elasticsearch.action.search.SearchPhaseExecutionException;
import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.settings.Settings;
Expand Down Expand Up @@ -564,11 +564,10 @@ public void testAllFieldsWithSpecifiedLeniency() throws IOException {
prepareCreate("test").setSource(indexBody, XContentType.JSON).get();
ensureGreen("test");

Exception e = expectThrows(Exception.class, () ->
SearchPhaseExecutionException e = expectThrows(SearchPhaseExecutionException.class, () ->
client().prepareSearch("test").setQuery(
simpleQueryStringQuery("foo123").lenient(false)).get());
assertThat(ExceptionsHelper.detailedMessage(e),
containsString("NumberFormatException[For input string: \"foo123\"]"));
assertThat(e.getDetailedMessage(), containsString("NumberFormatException[For input string: \"foo123\"]"));
}

public void testLimitOnExpandedFields() throws Exception {
Expand All @@ -591,15 +590,15 @@ public void testLimitOnExpandedFields() throws Exception {
client().prepareIndex("toomanyfields", "type1", "1").setSource("field1", "foo bar baz").get();
refresh();

Exception e = expectThrows(Exception.class, () -> {
SearchPhaseExecutionException e = expectThrows(SearchPhaseExecutionException.class, () -> {
SimpleQueryStringBuilder qb = simpleQueryStringQuery("bar");
if (randomBoolean()) {
qb.field("*");
}
client().prepareSearch("toomanyfields").setQuery(qb).get();
});
assertThat(ExceptionsHelper.detailedMessage(e),
containsString("field expansion matches too many fields, limit: " + CLUSTER_MAX_CLAUSE_COUNT + ", got: "
assertThat(e.getDetailedMessage(),
containsString("field expansion matches too many fields, limit: " + CLUSTER_MAX_CLAUSE_COUNT + ", got: "
+ (CLUSTER_MAX_CLAUSE_COUNT + 1)));
}

Expand Down
Loading