From bc74444669c60085dcc80bab2237fbd6f387d14e Mon Sep 17 00:00:00 2001 From: Armin Braun Date: Thu, 20 May 2021 21:25:44 +0200 Subject: [PATCH] Stricter Parsing Shard Level Repository Metadata (#73269) Similar to #73268 we should be stricter here, especially when we are super-strict about additional fields anyway. Also, use our parser exception utils to get better exceptions if parsing fails. --- .../BlobStoreIndexShardSnapshot.java | 114 +++++++++--------- .../BlobStoreIndexShardSnapshots.java | 68 +++++------ 2 files changed, 89 insertions(+), 93 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/index/snapshots/blobstore/BlobStoreIndexShardSnapshot.java b/server/src/main/java/org/elasticsearch/index/snapshots/blobstore/BlobStoreIndexShardSnapshot.java index 3005227809d1e..4581ebe24052d 100644 --- a/server/src/main/java/org/elasticsearch/index/snapshots/blobstore/BlobStoreIndexShardSnapshot.java +++ b/server/src/main/java/org/elasticsearch/index/snapshots/blobstore/BlobStoreIndexShardSnapshot.java @@ -273,38 +273,37 @@ public static FileInfo fromXContent(XContentParser parser) throws IOException { Version writtenBy = null; String writtenByStr = null; BytesRef metaHash = new BytesRef(); - if (token == XContentParser.Token.START_OBJECT) { - while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { - if (token == XContentParser.Token.FIELD_NAME) { - String currentFieldName = parser.currentName(); - token = parser.nextToken(); - if (token.isValue()) { - if (NAME.equals(currentFieldName)) { - name = parser.text(); - } else if (PHYSICAL_NAME.equals(currentFieldName)) { - physicalName = parser.text(); - } else if (LENGTH.equals(currentFieldName)) { - length = parser.longValue(); - } else if (CHECKSUM.equals(currentFieldName)) { - checksum = parser.text(); - } else if (PART_SIZE.equals(currentFieldName)) { - partSize = new ByteSizeValue(parser.longValue()); - } else if (WRITTEN_BY.equals(currentFieldName)) { - writtenByStr = parser.text(); - writtenBy = Lucene.parseVersionLenient(writtenByStr, null); - } else if (META_HASH.equals(currentFieldName)) { - metaHash.bytes = parser.binaryValue(); - metaHash.offset = 0; - metaHash.length = metaHash.bytes.length; - } else { - throw new ElasticsearchParseException("unknown parameter [{}]", currentFieldName); - } + XContentParserUtils.ensureExpectedToken(token, XContentParser.Token.START_OBJECT, parser); + while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { + if (token == XContentParser.Token.FIELD_NAME) { + String currentFieldName = parser.currentName(); + token = parser.nextToken(); + if (token.isValue()) { + if (NAME.equals(currentFieldName)) { + name = parser.text(); + } else if (PHYSICAL_NAME.equals(currentFieldName)) { + physicalName = parser.text(); + } else if (LENGTH.equals(currentFieldName)) { + length = parser.longValue(); + } else if (CHECKSUM.equals(currentFieldName)) { + checksum = parser.text(); + } else if (PART_SIZE.equals(currentFieldName)) { + partSize = new ByteSizeValue(parser.longValue()); + } else if (WRITTEN_BY.equals(currentFieldName)) { + writtenByStr = parser.text(); + writtenBy = Lucene.parseVersionLenient(writtenByStr, null); + } else if (META_HASH.equals(currentFieldName)) { + metaHash.bytes = parser.binaryValue(); + metaHash.offset = 0; + metaHash.length = metaHash.bytes.length; } else { - throw new ElasticsearchParseException("unexpected token [{}]", token); + XContentParserUtils.throwUnknownField(currentFieldName, parser.getTokenLocation()); } } else { - throw new ElasticsearchParseException("unexpected token [{}]",token); + XContentParserUtils.throwUnknownToken(token, parser.getTokenLocation()); } + } else { + XContentParserUtils.throwUnknownToken(token, parser.getTokenLocation()); } } @@ -513,39 +512,38 @@ public static BlobStoreIndexShardSnapshot fromXContent(XContentParser parser) th parser.nextToken(); } XContentParser.Token token = parser.currentToken(); - if (token == XContentParser.Token.START_OBJECT) { - while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { - XContentParserUtils.ensureExpectedToken(XContentParser.Token.FIELD_NAME, token, parser); - final String currentFieldName = parser.currentName(); - token = parser.nextToken(); - if (token.isValue()) { - if (PARSE_NAME.match(currentFieldName, parser.getDeprecationHandler())) { - snapshot = parser.text(); - } else if (PARSE_INDEX_VERSION.match(currentFieldName, parser.getDeprecationHandler())) { - // The index-version is needed for backward compatibility with v 1.0 - indexVersion = parser.longValue(); - } else if (PARSE_START_TIME.match(currentFieldName, parser.getDeprecationHandler())) { - startTime = parser.longValue(); - } else if (PARSE_TIME.match(currentFieldName, parser.getDeprecationHandler())) { - time = parser.longValue(); - } else if (PARSE_INCREMENTAL_FILE_COUNT.match(currentFieldName, parser.getDeprecationHandler())) { - incrementalFileCount = parser.intValue(); - } else if (PARSE_INCREMENTAL_SIZE.match(currentFieldName, parser.getDeprecationHandler())) { - incrementalSize = parser.longValue(); - } else { - throw new ElasticsearchParseException("unknown parameter [{}]", currentFieldName); - } - } else if (token == XContentParser.Token.START_ARRAY) { - if (PARSE_FILES.match(currentFieldName, parser.getDeprecationHandler())) { - while ((parser.nextToken()) != XContentParser.Token.END_ARRAY) { - indexFiles.add(FileInfo.fromXContent(parser)); - } - } else { - throw new ElasticsearchParseException("unknown parameter [{}]", currentFieldName); + XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_OBJECT, token, parser); + while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { + XContentParserUtils.ensureExpectedToken(XContentParser.Token.FIELD_NAME, token, parser); + final String currentFieldName = parser.currentName(); + token = parser.nextToken(); + if (token.isValue()) { + if (PARSE_NAME.match(currentFieldName, parser.getDeprecationHandler())) { + snapshot = parser.text(); + } else if (PARSE_INDEX_VERSION.match(currentFieldName, parser.getDeprecationHandler())) { + // The index-version is needed for backward compatibility with v 1.0 + indexVersion = parser.longValue(); + } else if (PARSE_START_TIME.match(currentFieldName, parser.getDeprecationHandler())) { + startTime = parser.longValue(); + } else if (PARSE_TIME.match(currentFieldName, parser.getDeprecationHandler())) { + time = parser.longValue(); + } else if (PARSE_INCREMENTAL_FILE_COUNT.match(currentFieldName, parser.getDeprecationHandler())) { + incrementalFileCount = parser.intValue(); + } else if (PARSE_INCREMENTAL_SIZE.match(currentFieldName, parser.getDeprecationHandler())) { + incrementalSize = parser.longValue(); + } else { + XContentParserUtils.throwUnknownField(currentFieldName, parser.getTokenLocation()); + } + } else if (token == XContentParser.Token.START_ARRAY) { + if (PARSE_FILES.match(currentFieldName, parser.getDeprecationHandler())) { + while ((parser.nextToken()) != XContentParser.Token.END_ARRAY) { + indexFiles.add(FileInfo.fromXContent(parser)); } } else { - throw new ElasticsearchParseException("unexpected token [{}]", token); + XContentParserUtils.throwUnknownField(currentFieldName, parser.getTokenLocation()); } + } else { + XContentParserUtils.throwUnknownToken(token, parser.getTokenLocation()); } } diff --git a/server/src/main/java/org/elasticsearch/index/snapshots/blobstore/BlobStoreIndexShardSnapshots.java b/server/src/main/java/org/elasticsearch/index/snapshots/blobstore/BlobStoreIndexShardSnapshots.java index db07d301a7d76..4c9aef0aad846 100644 --- a/server/src/main/java/org/elasticsearch/index/snapshots/blobstore/BlobStoreIndexShardSnapshots.java +++ b/server/src/main/java/org/elasticsearch/index/snapshots/blobstore/BlobStoreIndexShardSnapshots.java @@ -8,7 +8,6 @@ package org.elasticsearch.index.snapshots.blobstore; -import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.util.CollectionUtils; import org.elasticsearch.common.xcontent.ToXContentFragment; @@ -248,47 +247,46 @@ public static BlobStoreIndexShardSnapshots fromXContent(XContentParser parser) t Map> snapshotsMap = new HashMap<>(); Map historyUUIDs = new HashMap<>(); Map files = new HashMap<>(); - if (token == XContentParser.Token.START_OBJECT) { - while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { - XContentParserUtils.ensureExpectedToken(XContentParser.Token.FIELD_NAME, token, parser); - String currentFieldName = parser.currentName(); - token = parser.nextToken(); - if (token == XContentParser.Token.START_ARRAY) { - if (ParseFields.FILES.match(currentFieldName, parser.getDeprecationHandler()) == false) { - throw new ElasticsearchParseException("unknown array [{}]", currentFieldName); - } - while (parser.nextToken() != XContentParser.Token.END_ARRAY) { - FileInfo fileInfo = FileInfo.fromXContent(parser); - files.put(fileInfo.name(), fileInfo); - } - } else if (token == XContentParser.Token.START_OBJECT) { - if (ParseFields.SNAPSHOTS.match(currentFieldName, parser.getDeprecationHandler()) == false) { - throw new ElasticsearchParseException("unknown object [{}]", currentFieldName); - } + XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_OBJECT, token, parser); + while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { + XContentParserUtils.ensureExpectedToken(XContentParser.Token.FIELD_NAME, token, parser); + String currentFieldName = parser.currentName(); + token = parser.nextToken(); + if (token == XContentParser.Token.START_ARRAY) { + if (ParseFields.FILES.match(currentFieldName, parser.getDeprecationHandler()) == false) { + XContentParserUtils.throwUnknownField(currentFieldName, parser.getTokenLocation()); + } + while (parser.nextToken() != XContentParser.Token.END_ARRAY) { + FileInfo fileInfo = FileInfo.fromXContent(parser); + files.put(fileInfo.name(), fileInfo); + } + } else if (token == XContentParser.Token.START_OBJECT) { + if (ParseFields.SNAPSHOTS.match(currentFieldName, parser.getDeprecationHandler()) == false) { + XContentParserUtils.throwUnknownField(currentFieldName, parser.getTokenLocation()); + } + while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { + XContentParserUtils.ensureExpectedToken(XContentParser.Token.FIELD_NAME, token, parser); + String snapshot = parser.currentName(); + XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.nextToken(), parser); while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { - XContentParserUtils.ensureExpectedToken(XContentParser.Token.FIELD_NAME, token, parser); - String snapshot = parser.currentName(); - XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.nextToken(), parser); - while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { - if (token == XContentParser.Token.FIELD_NAME) { - currentFieldName = parser.currentName(); - if (ParseFields.FILES.match(currentFieldName, parser.getDeprecationHandler()) && + if (token == XContentParser.Token.FIELD_NAME) { + currentFieldName = parser.currentName(); + if (ParseFields.FILES.match(currentFieldName, parser.getDeprecationHandler()) && parser.nextToken() == XContentParser.Token.START_ARRAY) { - List fileNames = new ArrayList<>(); - while (parser.nextToken() != XContentParser.Token.END_ARRAY) { - fileNames.add(parser.text()); - } - snapshotsMap.put(snapshot, fileNames); - } else if (ParseFields.SHARD_STATE_ID.match(currentFieldName, parser.getDeprecationHandler())) { - parser.nextToken(); - historyUUIDs.put(snapshot, parser.text()); + List fileNames = new ArrayList<>(); + while (parser.nextToken() != XContentParser.Token.END_ARRAY) { + fileNames.add(parser.text()); } + snapshotsMap.put(snapshot, fileNames); + } else if (ParseFields.SHARD_STATE_ID.match(currentFieldName, parser.getDeprecationHandler())) { + parser.nextToken(); + historyUUIDs.put(snapshot, parser.text()); } } } - } else { - throw new ElasticsearchParseException("unexpected token [{}]", token); } + } else { + XContentParserUtils.throwUnknownToken(token, parser.getTokenLocation()); } }