diff --git a/core/src/main/java/org/elasticsearch/ElasticsearchException.java b/core/src/main/java/org/elasticsearch/ElasticsearchException.java index 22abf3e52098c..49ece8f880fdb 100644 --- a/core/src/main/java/org/elasticsearch/ElasticsearchException.java +++ b/core/src/main/java/org/elasticsearch/ElasticsearchException.java @@ -36,6 +36,7 @@ import org.elasticsearch.transport.TcpTransport; import java.io.IOException; +import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.HashMap; @@ -50,7 +51,6 @@ import static java.util.Collections.unmodifiableMap; import static org.elasticsearch.cluster.metadata.IndexMetaData.INDEX_UUID_NA_VALUE; import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureExpectedToken; -import static org.elasticsearch.common.xcontent.XContentParserUtils.throwUnknownField; /** * A base class for all elasticsearch exceptions. @@ -391,12 +391,125 @@ private static void headerToXContent(XContentBuilder builder, String key, List> metadata = new HashMap<>(); + Map> headers = new HashMap<>(); + + for (; token == XContentParser.Token.FIELD_NAME; token = parser.nextToken()) { + String currentFieldName = parser.currentName(); + token = parser.nextToken(); + + if (token.isValue()) { + if (TYPE.equals(currentFieldName)) { + type = parser.text(); + } else if (REASON.equals(currentFieldName)) { + reason = parser.text(); + } else if (STACK_TRACE.equals(currentFieldName)) { + stack = parser.text(); + } else if (token == XContentParser.Token.VALUE_STRING) { + metadata.put(currentFieldName, Collections.singletonList(parser.text())); + } + } else if (token == XContentParser.Token.START_OBJECT) { + if (CAUSED_BY.equals(currentFieldName)) { + cause = fromXContent(parser); + } else if (HEADER.equals(currentFieldName)) { + while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { + if (token == XContentParser.Token.FIELD_NAME) { + currentFieldName = parser.currentName(); + } else { + List values = headers.getOrDefault(currentFieldName, new ArrayList<>()); + if (token == XContentParser.Token.VALUE_STRING) { + values.add(parser.text()); + } else if (token == XContentParser.Token.START_ARRAY) { + while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) { + if (token == XContentParser.Token.VALUE_STRING) { + values.add(parser.text()); + } else { + parser.skipChildren(); + } + } + } else if (token == XContentParser.Token.START_OBJECT) { + parser.skipChildren(); + } + headers.put(currentFieldName, values); + } + } + } else { + // Any additional metadata object added by the metadataToXContent method is ignored + // and skipped, so that the parser does not fail on unknown fields. The parser only + // support metadata key-pairs and metadata arrays of values. + parser.skipChildren(); + } + } else if (token == XContentParser.Token.START_ARRAY) { + // Parse the array and add each item to the corresponding list of metadata. + // Arrays of objects are not supported yet and just ignored and skipped. + List values = new ArrayList<>(); + while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) { + if (token == XContentParser.Token.VALUE_STRING) { + values.add(parser.text()); + } else { + parser.skipChildren(); + } + } + if (values.size() > 0) { + if (metadata.containsKey(currentFieldName)) { + values.addAll(metadata.get(currentFieldName)); + } + metadata.put(currentFieldName, values); + } + } + } + + StringBuilder message = new StringBuilder("Elasticsearch exception ["); + message.append(TYPE).append('=').append(type).append(", "); + message.append(REASON).append('=').append(reason); + if (stack != null) { + message.append(", ").append(STACK_TRACE).append('=').append(stack); + } + message.append(']'); + + ElasticsearchException e = new ElasticsearchException(message.toString(), cause); + + for (Map.Entry> entry : metadata.entrySet()) { + //subclasses can print out additional metadata through the metadataToXContent method. Simple key-value pairs will be + //parsed back and become part of this metadata set, while objects and arrays are not supported when parsing back. + //Those key-value pairs become part of the metadata set and inherit the "es." prefix as that is currently required + //by addMetadata. The prefix will get stripped out when printing metadata out so it will be effectively invisible. + //TODO move subclasses that print out simple metadata to using addMetadata directly and support also numbers and booleans. + //TODO rename metadataToXContent and have only SearchPhaseExecutionException use it, which prints out complex objects + e.addMetadata("es." + entry.getKey(), entry.getValue()); + } + for (Map.Entry> header : headers.entrySet()) { + e.addHeader(header.getKey(), header.getValue()); + } + return e; + } + /** * Static toXContent helper method that renders {@link org.elasticsearch.ElasticsearchException} or {@link Throwable} instances * as XContent, delegating the rendering to {@link #toXContent(XContentBuilder, Params)} * or {@link #innerToXContent(XContentBuilder, Params, Throwable, String, String, Map, Map, Throwable)}. * - * This method is usually used when the {@link Throwable} is rendered as a part of another XContent object. + * This method is usually used when the {@link Throwable} is rendered as a part of another XContent object, and its result can + * be parsed back using the {@link #fromXContent(XContentParser)} method. */ public static void generateThrowableXContent(XContentBuilder builder, Params params, Throwable t) throws IOException { t = ExceptionsHelper.unwrapCause(t); @@ -455,71 +568,6 @@ public static void generateFailureXContent(XContentBuilder builder, Params param builder.endObject(); } - /** - * Generate a {@link ElasticsearchException} from a {@link XContentParser}. This does not - * return the original exception type (ie NodeClosedException for example) but just wraps - * the type, the reason and the cause of the exception. It also recursively parses the - * tree structure of the cause, returning it as a tree structure of {@link ElasticsearchException} - * instances. - */ - public static ElasticsearchException fromXContent(XContentParser parser) throws IOException { - XContentParser.Token token = parser.nextToken(); - ensureExpectedToken(XContentParser.Token.FIELD_NAME, token, parser::getTokenLocation); - - String type = null, reason = null, stack = null; - ElasticsearchException cause = null; - Map> metadata = new HashMap<>(); - Map headers = new HashMap<>(); - - do { - String currentFieldName = parser.currentName(); - token = parser.nextToken(); - if (token.isValue()) { - if (TYPE.equals(currentFieldName)) { - type = parser.text(); - } else if (REASON.equals(currentFieldName)) { - reason = parser.text(); - } else if (STACK_TRACE.equals(currentFieldName)) { - stack = parser.text(); - } else { - metadata.put(currentFieldName, Collections.singletonList(parser.text())); - } - } else if (token == XContentParser.Token.START_OBJECT) { - if (CAUSED_BY.equals(currentFieldName)) { - cause = fromXContent(parser); - } else if (HEADER.equals(currentFieldName)) { - headers.putAll(parser.map()); - } else { - throwUnknownField(currentFieldName, parser.getTokenLocation()); - } - } - } while ((token = parser.nextToken()) == XContentParser.Token.FIELD_NAME); - - StringBuilder message = new StringBuilder("Elasticsearch exception ["); - message.append(TYPE).append('=').append(type).append(", "); - message.append(REASON).append('=').append(reason); - if (stack != null) { - message.append(", ").append(STACK_TRACE).append('=').append(stack); - } - message.append(']'); - - ElasticsearchException e = new ElasticsearchException(message.toString(), cause); - - for (Map.Entry> entry : metadata.entrySet()) { - //subclasses can print out additional metadata through the metadataToXContent method. Simple key-value pairs will be - //parsed back and become part of this metadata set, while objects and arrays are not supported when parsing back. - //Those key-value pairs become part of the metadata set and inherit the "es." prefix as that is currently required - //by addMetadata. The prefix will get stripped out when printing metadata out so it will be effectively invisible. - //TODO move subclasses that print out simple metadata to using addMetadata directly and support also numbers and booleans. - //TODO rename metadataToXContent and have only SearchPhaseExecutionException use it, which prints out complex objects - e.addMetadata("es." + entry.getKey(), entry.getValue()); - } - for (Map.Entry header : headers.entrySet()) { - e.addHeader(header.getKey(), String.valueOf(header.getValue())); - } - return e; - } - /** * Returns the root cause of this exception or multiple if different shards caused different exceptions */ diff --git a/core/src/test/java/org/elasticsearch/ElasticsearchExceptionTests.java b/core/src/test/java/org/elasticsearch/ElasticsearchExceptionTests.java index 4a8a90afabe02..8f209f397b16e 100644 --- a/core/src/test/java/org/elasticsearch/ElasticsearchExceptionTests.java +++ b/core/src/test/java/org/elasticsearch/ElasticsearchExceptionTests.java @@ -27,8 +27,10 @@ import org.elasticsearch.cluster.block.ClusterBlockException; import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.Strings; +import org.elasticsearch.common.breaker.CircuitBreakingException; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContent; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -54,11 +56,17 @@ import java.io.EOFException; import java.io.FileNotFoundException; import java.io.IOException; +import java.util.ArrayList; import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import static java.util.Collections.emptyList; import static java.util.Collections.singleton; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertToXContentEquivalent; import static org.hamcrest.CoreMatchers.hasItem; +import static org.hamcrest.CoreMatchers.hasItems; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.startsWith; @@ -516,6 +524,89 @@ public void testFromXContentWithHeadersAndMetadata() throws IOException { assertThat(cause.getMetadata("es.index_uuid"), hasItem("_na_")); } + /** + * Test that some values like arrays of numbers are ignored when parsing back + * an exception. + */ + public void testFromXContentWithIgnoredMetadataAndHeaders() throws IOException { + final XContent xContent = randomFrom(XContentType.values()).xContent(); + + // The exception content to parse is built using a XContentBuilder + // because the current Java API does not allow to add metadata/headers + // of other types than list of strings. + BytesReference originalBytes; + try (XContentBuilder builder = XContentBuilder.builder(xContent)) { + builder.startObject(); + builder.field("metadata_int", 1); + builder.array("metadata_array_of_ints", new int[]{8, 13, 21}); + builder.field("reason", "Custom reason"); + builder.array("metadata_array_of_boolean", new boolean[]{false, false}); + builder.field("type", "custom_exception"); + builder.field("metadata_long", 1L); + builder.array("metadata_array_of_longs", new long[]{2L, 3L, 5L}); + builder.field("metadata_other", "some metadata"); + builder.startObject("header"); + builder.field("header_string", "some header"); + builder.array("header_array_of_strings", new String[]{"foo", "bar", "baz"}); + builder.endObject(); + builder.endObject(); + + originalBytes = builder.bytes(); + } + + ElasticsearchException parsedException; + try (XContentParser parser = createParser(xContent, originalBytes)) { + assertEquals(XContentParser.Token.START_OBJECT, parser.nextToken()); + parsedException = ElasticsearchException.fromXContent(parser); + assertEquals(XContentParser.Token.END_OBJECT, parser.currentToken()); + assertNull(parser.nextToken()); + } + + assertNotNull(parsedException); + assertEquals("Elasticsearch exception [type=custom_exception, reason=Custom reason]", parsedException.getMessage()); + assertEquals(2, parsedException.getHeaderKeys().size()); + assertThat(parsedException.getHeader("header_string"), hasItem("some header")); + assertThat(parsedException.getHeader("header_array_of_strings"), hasItems("foo", "bar", "baz")); + assertEquals(1, parsedException.getMetadataKeys().size()); + assertThat(parsedException.getMetadata("es.metadata_other"), hasItem("some metadata")); + } + + public void testThrowableToAndFromXContent() throws IOException { + final XContent xContent = randomFrom(XContentType.values()).xContent(); + + final Tuple exceptions = randomExceptions(); + final Throwable throwable = exceptions.v1(); + + BytesReference throwableBytes = XContentHelper.toXContent((builder, params) -> { + ElasticsearchException.generateThrowableXContent(builder, params, throwable); + return builder; + }, xContent.type(), randomBoolean()); + + ElasticsearchException parsedException; + try (XContentParser parser = createParser(xContent, throwableBytes)) { + assertEquals(XContentParser.Token.START_OBJECT, parser.nextToken()); + parsedException = ElasticsearchException.fromXContent(parser); + assertEquals(XContentParser.Token.END_OBJECT, parser.currentToken()); + assertNull(parser.nextToken()); + } + assertNotNull(parsedException); + + ElasticsearchException expected = exceptions.v2(); + do { + assertEquals(expected.getMessage(), parsedException.getMessage()); + assertEquals(expected.getHeaders(), parsedException.getHeaders()); + assertEquals(expected.getMetadata(), parsedException.getMetadata()); + assertEquals(expected.getResourceType(), parsedException.getResourceType()); + assertEquals(expected.getResourceId(), parsedException.getResourceId()); + + expected = (ElasticsearchException) expected.getCause(); + parsedException = (ElasticsearchException) parsedException.getCause(); + if (expected == null) { + assertNull(parsedException); + } + } while (expected != null); + } + /** * Builds a {@link ToXContent} using a JSON XContentBuilder and check the resulting string with the given {@link Matcher}. * @@ -533,4 +624,123 @@ private static void assertExceptionAsJson(Exception e, String expectedJson) thro return builder; }, expectedJson); } + + private static Tuple randomExceptions() { + Throwable actual; + ElasticsearchException expected; + + int type = randomIntBetween(0, 5); + switch (type) { + case 0: + actual = new ClusterBlockException(singleton(DiscoverySettings.NO_MASTER_BLOCK_WRITES)); + expected = new ElasticsearchException("Elasticsearch exception [type=cluster_block_exception, " + + "reason=blocked by: [SERVICE_UNAVAILABLE/2/no master];]"); + break; + case 1: + actual = new CircuitBreakingException("Data too large", 123, 456); + expected = new ElasticsearchException("Elasticsearch exception [type=circuit_breaking_exception, reason=Data too large]"); + break; + case 2: + actual = new SearchParseException(new TestSearchContext(null), "Parse failure", new XContentLocation(12, 98)); + expected = new ElasticsearchException("Elasticsearch exception [type=search_parse_exception, reason=Parse failure]"); + break; + case 3: + actual = new IllegalArgumentException("Closed resource", new RuntimeException("Resource")); + expected = new ElasticsearchException("Elasticsearch exception [type=illegal_argument_exception, reason=Closed resource]", + new ElasticsearchException("Elasticsearch exception [type=runtime_exception, reason=Resource]")); + break; + case 4: + actual = new SearchPhaseExecutionException("search", "all shards failed", + new ShardSearchFailure[]{ + new ShardSearchFailure(new ParsingException(1, 2, "foobar", null), + new SearchShardTarget("node_1", new Index("foo", "_na_"), 1)) + }); + expected = new ElasticsearchException("Elasticsearch exception [type=search_phase_execution_exception, " + + "reason=all shards failed]"); + expected.addMetadata("es.phase", "search"); + break; + case 5: + actual = new ElasticsearchException("Parsing failed", + new ParsingException(9, 42, "Wrong state", + new NullPointerException("Unexpected null value"))); + + ElasticsearchException expectedCause = new ElasticsearchException("Elasticsearch exception [type=parsing_exception, " + + "reason=Wrong state]", new ElasticsearchException("Elasticsearch exception [type=null_pointer_exception, " + + "reason=Unexpected null value]")); + expected = new ElasticsearchException("Elasticsearch exception [type=exception, reason=Parsing failed]", expectedCause); + break; + default: + throw new UnsupportedOperationException("No randomized exceptions generated for type [" + type + "]"); + } + + if (actual instanceof ElasticsearchException) { + ElasticsearchException actualException = (ElasticsearchException) actual; + if (randomBoolean()) { + int nbHeaders = randomIntBetween(1, 5); + Map> randomHeaders = new HashMap<>(nbHeaders); + + for (int i = 0; i < nbHeaders; i++) { + List values = new ArrayList<>(); + + int nbValues = randomIntBetween(1, 3); + for (int j = 0; j < nbValues; j++) { + values.add(frequently() ? randomAsciiOfLength(5) : ""); + } + randomHeaders.put("header_" + i, values); + } + + for (Map.Entry> entry : randomHeaders.entrySet()) { + actualException.addHeader(entry.getKey(), entry.getValue()); + expected.addHeader(entry.getKey(), entry.getValue()); + } + + if (rarely()) { + // Empty or null headers are not printed out by the toXContent method + actualException.addHeader("ignored", randomBoolean() ? emptyList() : null); + } + } + + if (randomBoolean()) { + int nbMetadata = randomIntBetween(1, 5); + Map> randomMetadata = new HashMap<>(nbMetadata); + + for (int i = 0; i < nbMetadata; i++) { + List values = new ArrayList<>(); + + int nbValues = randomIntBetween(1, 3); + for (int j = 0; j < nbValues; j++) { + values.add(frequently() ? randomAsciiOfLength(5) : ""); + } + randomMetadata.put("es.metadata_" + i, values); + } + + for (Map.Entry> entry : randomMetadata.entrySet()) { + actualException.addMetadata(entry.getKey(), entry.getValue()); + expected.addMetadata(entry.getKey(), entry.getValue()); + } + + if (rarely()) { + // Empty or null metadata are not printed out by the toXContent method + actualException.addMetadata("ignored", randomBoolean() ? emptyList() : null); + } + } + + if (randomBoolean()) { + int nbResources = randomIntBetween(1, 5); + for (int i = 0; i < nbResources; i++) { + String resourceType = "type_" + i; + String[] resourceIds = null; + if (frequently()) { + resourceIds = new String[randomIntBetween(1, 3)]; + for (int j = 0; j < resourceIds.length; j++) { + resourceIds[j] = frequently() ? randomAsciiOfLength(5) : ""; + } + } + actualException.setResources(resourceType, resourceIds); + expected.setResources(resourceType, resourceIds); + } + } + } + return new Tuple<>(actual, expected); + } } diff --git a/core/src/test/java/org/elasticsearch/action/search/SearchPhaseExecutionExceptionTests.java b/core/src/test/java/org/elasticsearch/action/search/SearchPhaseExecutionExceptionTests.java new file mode 100644 index 0000000000000..1c523d6503ed2 --- /dev/null +++ b/core/src/test/java/org/elasticsearch/action/search/SearchPhaseExecutionExceptionTests.java @@ -0,0 +1,177 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.action.search; + +import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.action.TimestampParsingException; +import org.elasticsearch.common.ParsingException; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.common.xcontent.XContent; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentHelper; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.common.xcontent.XContentType; +import org.elasticsearch.index.Index; +import org.elasticsearch.index.shard.IndexShardClosedException; +import org.elasticsearch.index.shard.ShardId; +import org.elasticsearch.indices.InvalidIndexTemplateException; +import org.elasticsearch.search.SearchShardTarget; +import org.elasticsearch.test.ESTestCase; + +import java.io.IOException; + +import static java.util.Collections.singletonMap; +import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; +import static org.hamcrest.CoreMatchers.hasItem; +import static org.hamcrest.Matchers.hasSize; + +public class SearchPhaseExecutionExceptionTests extends ESTestCase { + + public void testToXContent() throws IOException { + SearchPhaseExecutionException exception = new SearchPhaseExecutionException("test", "all shards failed", + new ShardSearchFailure[]{ + new ShardSearchFailure(new ParsingException(1, 2, "foobar", null), + new SearchShardTarget("node_1", new Index("foo", "_na_"), 0)), + new ShardSearchFailure(new IndexShardClosedException(new ShardId(new Index("foo", "_na_"), 1)), + new SearchShardTarget("node_2", new Index("foo", "_na_"), 1)), + new ShardSearchFailure(new ParsingException(5, 7, "foobar", null), + new SearchShardTarget("node_3", new Index("foo", "_na_"), 2)), + }); + + // Failures are grouped (by default) + assertEquals("{" + + "\"type\":\"search_phase_execution_exception\"," + + "\"reason\":\"all shards failed\"," + + "\"phase\":\"test\"," + + "\"grouped\":true," + + "\"failed_shards\":[" + + "{" + + "\"shard\":0," + + "\"index\":\"foo\"," + + "\"node\":\"node_1\"," + + "\"reason\":{" + + "\"type\":\"parsing_exception\"," + + "\"reason\":\"foobar\"," + + "\"line\":1," + + "\"col\":2" + + "}" + + "}," + + "{" + + "\"shard\":1," + + "\"index\":\"foo\"," + + "\"node\":\"node_2\"," + + "\"reason\":{" + + "\"type\":\"index_shard_closed_exception\"," + + "\"reason\":\"CurrentState[CLOSED] Closed\"," + + "\"index_uuid\":\"_na_\"," + + "\"shard\":\"1\"," + + "\"index\":\"foo\"" + + "}" + + "}" + + "]}", Strings.toString(exception)); + + // Failures are NOT grouped + ToXContent.MapParams params = new ToXContent.MapParams(singletonMap("group_shard_failures", "false")); + try (XContentBuilder builder = jsonBuilder()) { + builder.startObject(); + exception.toXContent(builder, params); + builder.endObject(); + + assertEquals("{" + + "\"type\":\"search_phase_execution_exception\"," + + "\"reason\":\"all shards failed\"," + + "\"phase\":\"test\"," + + "\"grouped\":false," + + "\"failed_shards\":[" + + "{" + + "\"shard\":0," + + "\"index\":\"foo\"," + + "\"node\":\"node_1\"," + + "\"reason\":{" + + "\"type\":\"parsing_exception\"," + + "\"reason\":\"foobar\"," + + "\"line\":1," + + "\"col\":2" + + "}" + + "}," + + "{" + + "\"shard\":1," + + "\"index\":\"foo\"," + + "\"node\":\"node_2\"," + + "\"reason\":{" + + "\"type\":\"index_shard_closed_exception\"," + + "\"reason\":\"CurrentState[CLOSED] Closed\"," + + "\"index_uuid\":\"_na_\"," + + "\"shard\":\"1\"," + + "\"index\":\"foo\"" + + "}" + + "}," + + "{" + + "\"shard\":2," + + "\"index\":\"foo\"," + + "\"node\":\"node_3\"," + + "\"reason\":{" + + "\"type\":\"parsing_exception\"," + + "\"reason\":\"foobar\"," + + "\"line\":5," + + "\"col\":7" + + "}" + + "}" + + "]}", builder.string()); + } + } + + public void testToAndFromXContent() throws IOException { + final XContent xContent = randomFrom(XContentType.values()).xContent(); + + ShardSearchFailure[] shardSearchFailures = new ShardSearchFailure[randomIntBetween(1, 5)]; + for (int i = 0; i < shardSearchFailures.length; i++) { + Exception cause = randomFrom( + new ParsingException(1, 2, "foobar", null), + new InvalidIndexTemplateException("foo", "bar"), + new TimestampParsingException("foo", null), + new NullPointerException() + ); + shardSearchFailures[i] = new ShardSearchFailure(cause, new SearchShardTarget("node_" + i, new Index("test", "_na_"), i)); + } + + final String phase = randomFrom("query", "search", "other"); + SearchPhaseExecutionException actual = new SearchPhaseExecutionException(phase, "unexpected failures", shardSearchFailures); + + BytesReference exceptionBytes = XContentHelper.toXContent(actual, xContent.type(), randomBoolean()); + + ElasticsearchException parsedException; + try (XContentParser parser = createParser(xContent, exceptionBytes)) { + assertEquals(XContentParser.Token.START_OBJECT, parser.nextToken()); + parsedException = ElasticsearchException.fromXContent(parser); + assertEquals(XContentParser.Token.END_OBJECT, parser.currentToken()); + assertNull(parser.nextToken()); + } + + assertNotNull(parsedException); + assertThat(parsedException.getHeaderKeys(), hasSize(0)); + assertThat(parsedException.getMetadataKeys(), hasSize(1)); + assertThat(parsedException.getMetadata("es.phase"), hasItem(phase)); + // SearchPhaseExecutionException has no cause field + assertNull(parsedException.getCause()); + } +}