From 1439477510c6c422fcf93e3a99829ee825d7a3b4 Mon Sep 17 00:00:00 2001 From: PJ Fanning Date: Sun, 21 Jul 2024 16:34:59 +0100 Subject: [PATCH 1/6] HADOOP-19231. Add JacksonUtil --- .../org/apache/hadoop/conf/Configuration.java | 5 +- .../crypto/key/kms/KMSClientProvider.java | 11 ++- .../apache/hadoop/ipc/DecayRpcScheduler.java | 3 +- .../java/org/apache/hadoop/ipc/Server.java | 5 +- .../org/apache/hadoop/jmx/JMXJsonServlet.java | 3 +- .../hadoop/metrics2/MetricsJsonBuilder.java | 4 +- .../DelegationTokenAuthenticationHandler.java | 5 +- .../org/apache/hadoop/util/JacksonUtil.java | 96 +++++++++++++++++++ .../apache/hadoop/util/JsonSerialization.java | 10 +- .../crypto/key/kms/server/KMSJSONReader.java | 3 +- .../server/datanode/DiskBalancerWorkItem.java | 5 +- .../datanode/DiskBalancerWorkStatus.java | 11 ++- .../hdfs/util/CombinedHostsFileReader.java | 16 +++- .../hdfs/util/CombinedHostsFileWriter.java | 3 +- .../hadoop/hdfs/web/JsonUtilClient.java | 4 +- .../fs/http/client/HttpFSFileSystem.java | 3 +- .../blockmanagement/SlowDiskTracker.java | 4 +- .../blockmanagement/SlowPeerTracker.java | 4 +- .../datanode/fsdataset/impl/FsVolumeImpl.java | 6 +- .../fsdataset/impl/ProvidedVolumeImpl.java | 3 +- .../server/diskbalancer/command/Command.java | 3 +- .../connectors/JsonNodeConnector.java | 7 +- .../datamodel/DiskBalancerCluster.java | 4 +- .../datamodel/DiskBalancerVolume.java | 4 +- .../server/diskbalancer/planner/NodePlan.java | 5 +- .../namenode/NetworkTopologyServlet.java | 3 +- .../namenode/StartupProgressServlet.java | 4 +- .../org/apache/hadoop/hdfs/web/JsonUtil.java | 3 +- .../apache/hadoop/mapred/QueueManager.java | 3 +- .../mapreduce/util/JobHistoryEventUtils.java | 3 +- .../fs/azure/NativeAzureFileSystem.java | 3 +- .../fs/azure/RemoteSASKeyGeneratorImpl.java | 6 +- .../fs/azure/RemoteWasbAuthorizerImpl.java | 5 +- .../azurebfs/oauth2/AzureADAuthenticator.java | 5 +- .../azurebfs/services/AbfsHttpOperation.java | 8 +- .../tools/dynamometer/DynoInfraUtils.java | 5 +- .../apache/hadoop/tools/rumen/Anonymizer.java | 11 +-- .../tools/rumen/JsonObjectMapperParser.java | 9 +- .../tools/rumen/JsonObjectMapperWriter.java | 5 +- .../hadoop/tools/rumen/state/StatePool.java | 10 +- .../hadoop/tools/rumen/TestHistograms.java | 4 +- .../org/apache/hadoop/yarn/sls/AMRunner.java | 6 +- .../hadoop/yarn/sls/RumenToSLSConverter.java | 8 +- .../sls/synthetic/SynthTraceJobProducer.java | 6 +- .../hadoop/yarn/sls/utils/SLSUtils.java | 12 +-- .../yarn/sls/TestSynthJobGeneration.java | 5 +- .../application/AppCatalogSolrClient.java | 54 +++++------ .../application/YarnServiceClient.java | 34 ++++--- .../yarn/service/utils/JsonSerDeser.java | 5 +- .../service/utils/PublishedConfiguration.java | 17 +++- .../api/impl/FileSystemTimelineWriter.java | 5 +- .../yarn/util/DockerClientConfigHandler.java | 4 +- .../apache/hadoop/yarn/webapp/Controller.java | 3 +- .../server/timeline/GenericObjectMapper.java | 11 +-- .../containermanager/AuxServices.java | 3 +- .../NetworkTagMappingJsonManager.java | 5 +- .../linux/runtime/RuncContainerRuntime.java | 4 +- .../runc/ImageTagToManifestPlugin.java | 12 ++- .../resource/ResourceProfilesManagerImpl.java | 11 ++- .../placement/MappingRuleCreator.java | 15 ++- .../converter/LegacyMappingRuleToJson.java | 15 +-- .../FSConfigToCSConfigConverter.java | 6 +- .../timeline/EntityGroupFSTimelineStore.java | 3 +- .../timeline/LevelDBCacheTimelineStore.java | 3 +- .../server/timeline/PluginStoreTestUtils.java | 9 +- .../documentstore/JsonUtils.java | 3 +- .../storage/FileSystemTimelineReaderImpl.java | 4 +- 67 files changed, 369 insertions(+), 208 deletions(-) create mode 100644 hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/JacksonUtil.java diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configuration.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configuration.java index 94285a4dfb7e5..6c25af5b099fc 100755 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configuration.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configuration.java @@ -101,6 +101,7 @@ import org.apache.hadoop.security.alias.CredentialProviderFactory; import org.apache.hadoop.thirdparty.com.google.common.base.Strings; import org.apache.hadoop.util.ConfigurationHelper; +import org.apache.hadoop.util.JacksonUtil; import org.apache.hadoop.util.Preconditions; import org.apache.hadoop.util.ReflectionUtils; import org.apache.hadoop.util.StringInterner; @@ -3792,7 +3793,7 @@ public static void dumpConfiguration(Configuration config, throw new IllegalArgumentException("Property " + propertyName + " not found"); } else { - JsonFactory dumpFactory = new JsonFactory(); + final JsonFactory dumpFactory = JacksonUtil.createBasicJsonFactory(); JsonGenerator dumpGenerator = dumpFactory.createGenerator(out); dumpGenerator.writeStartObject(); dumpGenerator.writeFieldName("property"); @@ -3831,7 +3832,7 @@ public static void dumpConfiguration(Configuration config, */ public static void dumpConfiguration(Configuration config, Writer out) throws IOException { - JsonFactory dumpFactory = new JsonFactory(); + final JsonFactory dumpFactory = JacksonUtil.createBasicJsonFactory(); JsonGenerator dumpGenerator = dumpFactory.createGenerator(out); dumpGenerator.writeStartObject(); dumpGenerator.writeFieldName("properties"); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/kms/KMSClientProvider.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/kms/KMSClientProvider.java index 6ee9068ea3458..b4b66cbf6da73 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/kms/KMSClientProvider.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/kms/KMSClientProvider.java @@ -41,6 +41,7 @@ import org.apache.hadoop.security.token.delegation.AbstractDelegationTokenSelector; import org.apache.hadoop.security.token.delegation.web.DelegationTokenAuthenticatedURL; import org.apache.hadoop.util.HttpExceptionUtils; +import org.apache.hadoop.util.JacksonUtil; import org.apache.hadoop.util.JsonSerialization; import org.apache.hadoop.util.KMSUtil; import org.apache.http.client.utils.URIBuilder; @@ -129,6 +130,13 @@ public class KMSClientProvider extends KeyProvider implements CryptoExtension, + "authentication.retry-count"; public static final int DEFAULT_AUTH_RETRY = 1; + /** + * It is more performant to reuse ObjectMapper instances but keeping the instance + * private makes it harder for someone to reconfigure it which might have unwanted + * side effects. + */ + private static final ObjectMapper OBJECT_MAPPER = JacksonUtil.createBasicObjectMapper(); + private final ValueQueue encKeyVersionQueue; private KeyProviderDelegationTokenExtension.DelegationTokenExtension @@ -592,11 +600,10 @@ private T call(HttpURLConnection conn, Object jsonOutput, && conn.getContentType().trim().toLowerCase() .startsWith(APPLICATION_JSON_MIME) && klass != null) { - ObjectMapper mapper = new ObjectMapper(); InputStream is = null; try { is = conn.getInputStream(); - ret = mapper.readValue(is, klass); + ret = OBJECT_MAPPER.readValue(is, klass); } finally { IOUtils.closeStream(is); } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/DecayRpcScheduler.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/DecayRpcScheduler.java index 63274bb01e72d..756f31e842f45 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/DecayRpcScheduler.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/DecayRpcScheduler.java @@ -42,6 +42,7 @@ import com.fasterxml.jackson.databind.ObjectWriter; import org.apache.hadoop.security.UserGroupInformation; +import org.apache.hadoop.util.JacksonUtil; import org.apache.hadoop.util.Preconditions; import org.apache.hadoop.thirdparty.com.google.common.util.concurrent.AtomicDoubleArray; import org.apache.commons.lang3.exception.ExceptionUtils; @@ -146,7 +147,7 @@ public class DecayRpcScheduler implements RpcScheduler, public static final Logger LOG = LoggerFactory.getLogger(DecayRpcScheduler.class); - private static final ObjectWriter WRITER = new ObjectMapper().writer(); + private static final ObjectWriter WRITER = JacksonUtil.createBasicWriter(); // Track the decayed and raw (no decay) number of calls for each schedulable // identity from all previous decay windows: idx 0 for decayed call cost and diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java index 0d9e7296d2a4c..16ca3607c1c9f 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java @@ -121,6 +121,7 @@ import org.apache.hadoop.security.token.SecretManager.InvalidToken; import org.apache.hadoop.security.token.TokenIdentifier; import org.apache.hadoop.util.ExitUtil; +import org.apache.hadoop.util.JacksonUtil; import org.apache.hadoop.util.ProtoUtil; import org.apache.hadoop.util.StringUtils; import org.apache.hadoop.util.Time; @@ -130,7 +131,6 @@ import org.apache.hadoop.tracing.TraceScope; import org.apache.hadoop.tracing.Tracer; import org.apache.hadoop.tracing.TraceUtils; -import com.fasterxml.jackson.databind.ObjectMapper; import org.apache.hadoop.classification.VisibleForTesting; import org.apache.hadoop.thirdparty.com.google.common.util.concurrent.ThreadFactoryBuilder; @@ -3843,9 +3843,8 @@ public int getNumOpenConnections() { * @return Get the NumOpenConnections/User. */ public String getNumOpenConnectionsPerUser() { - ObjectMapper mapper = new ObjectMapper(); try { - return mapper + return JacksonUtil.createBasicObjectMapper() .writeValueAsString(connectionManager.getUserToConnectionsMap()); } catch (IOException ignored) { } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/jmx/JMXJsonServlet.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/jmx/JMXJsonServlet.java index f089db502783e..64e9554c23562 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/jmx/JMXJsonServlet.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/jmx/JMXJsonServlet.java @@ -45,6 +45,7 @@ import com.fasterxml.jackson.core.JsonFactory; import com.fasterxml.jackson.core.JsonGenerator; +import org.apache.hadoop.util.JacksonUtil; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -146,7 +147,7 @@ public class JMXJsonServlet extends HttpServlet { public void init() throws ServletException { // Retrieve the MBean server mBeanServer = ManagementFactory.getPlatformMBeanServer(); - jsonFactory = new JsonFactory(); + jsonFactory = JacksonUtil.createBasicJsonFactory(); } protected boolean isInstrumentationAccessAllowed(HttpServletRequest request, diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/MetricsJsonBuilder.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/MetricsJsonBuilder.java index 3a9be12803143..2a38a9906f2bd 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/MetricsJsonBuilder.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/MetricsJsonBuilder.java @@ -22,8 +22,8 @@ import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; -import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.ObjectWriter; +import org.apache.hadoop.util.JacksonUtil; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -47,7 +47,7 @@ public class MetricsJsonBuilder extends MetricsRecordBuilder { private Map innerMetrics = new LinkedHashMap<>(); private static final ObjectWriter WRITER = - new ObjectMapper().writer(); + JacksonUtil.createBasicWriter(); /** * Build an instance. diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/web/DelegationTokenAuthenticationHandler.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/web/DelegationTokenAuthenticationHandler.java index f4ede6f35edb0..62c7c4ba6e024 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/web/DelegationTokenAuthenticationHandler.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/web/DelegationTokenAuthenticationHandler.java @@ -46,6 +46,7 @@ import org.apache.hadoop.security.token.delegation.AbstractDelegationTokenIdentifier; import org.apache.hadoop.security.token.delegation.AbstractDelegationTokenSecretManager; import org.apache.hadoop.util.HttpExceptionUtils; +import org.apache.hadoop.util.JacksonUtil; import org.apache.hadoop.util.StringUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -165,7 +166,7 @@ public void initTokenManager(Properties config) { @VisibleForTesting public void initJsonFactory(Properties config) { boolean hasFeature = false; - JsonFactory tmpJsonFactory = new JsonFactory(); + JsonFactory tmpJsonFactory = JacksonUtil.createBasicJsonFactory(); for (Map.Entry entry : config.entrySet()) { String key = (String)entry.getKey(); @@ -335,7 +336,7 @@ public boolean managementOperation(AuthenticationToken token, if (map != null) { response.setContentType(MediaType.APPLICATION_JSON); Writer writer = response.getWriter(); - ObjectMapper jsonMapper = new ObjectMapper(jsonFactory); + ObjectMapper jsonMapper = JacksonUtil.createObjectMapper(jsonFactory); jsonMapper.writeValue(writer, map); writer.write(ENTER); writer.flush(); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/JacksonUtil.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/JacksonUtil.java new file mode 100644 index 0000000000000..f417c35cd35c6 --- /dev/null +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/JacksonUtil.java @@ -0,0 +1,96 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.util; + +import com.fasterxml.jackson.core.JsonFactory; +import com.fasterxml.jackson.databind.JavaType; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.ObjectReader; +import com.fasterxml.jackson.databind.ObjectWriter; +import com.fasterxml.jackson.databind.json.JsonMapper; + +/** + * Utility for sharing code related to Jackson usage in Hadoop. + * + * @since 3.5.0 + */ +public class JacksonUtil { + + private static final JsonFactory DEFAULT_JSON_FACTORY = createBasicJsonFactory(); + private static final ObjectMapper DEFAULT_OBJECT_MAPPER = createBasicObjectMapper(); + + /** + * Creates a new {@link JsonFactory} instance with basic configuration. + * + * @return an {@link JsonFactory} with basic configuration + */ + public static JsonFactory createBasicJsonFactory() { + // do not expose DEFAULT_JSON_FACTORY because we don't want anyone to access it and modify it + return new JsonFactory(); + } + + /** + * Creates a new {@link ObjectMapper} instance with basic configuration. + * + * @return an {@link ObjectMapper} with basic configuration + */ + public static ObjectMapper createBasicObjectMapper() { + // do not expose DEFAULT_OBJECT_MAPPER because we don't want anyone to access it and modify it + return JsonMapper.builder(DEFAULT_JSON_FACTORY).build(); + } + + /** + * Creates a new {@link ObjectMapper} instance based on the configuration + * in the input {@link JsonFactory}. + * + * @param jsonFactory a pre-configured {@link JsonFactory} + * @return an {@link ObjectMapper} with configuration set by the input {@link JsonFactory}. + */ + public static ObjectMapper createObjectMapper(final JsonFactory jsonFactory) { + return JsonMapper.builder(jsonFactory).build(); + } + + /** + * Creates a new {@link ObjectReader} for the provided type. + * + * @param type a class instance + * @return an {@link ObjectReader} with basic configuration + */ + public static ObjectReader createReaderFor(final Class type) { + return DEFAULT_OBJECT_MAPPER.readerFor(type); + } + + /** + * Creates a new {@link ObjectReader} for the provided type. + * + * @param type a {@link JavaType} instance + * @return an {@link ObjectReader} with basic configuration + */ + public static ObjectReader createReaderFor(final JavaType type) { + return DEFAULT_OBJECT_MAPPER.readerFor(type); + } + + /** + * Creates a new {@link ObjectWriter} with basic configuration. + * + * @return an {@link ObjectWriter} with basic configuration + */ + public static ObjectWriter createBasicWriter() { + return DEFAULT_OBJECT_MAPPER.writer(); + } +} diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/JsonSerialization.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/JsonSerialization.java index 52c6c4505226a..0d180860a1291 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/JsonSerialization.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/JsonSerialization.java @@ -76,11 +76,11 @@ public class JsonSerialization { private final Class classType; private final ObjectMapper mapper; - private static final ObjectWriter WRITER = - new ObjectMapper().writerWithDefaultPrettyPrinter(); + private static final ObjectWriter WRITER = JacksonUtil + .createBasicObjectMapper() + .writerWithDefaultPrettyPrinter(); - private static final ObjectReader MAP_READER = - new ObjectMapper().readerFor(Map.class); + private static final ObjectReader MAP_READER = JacksonUtil.createReaderFor(Map.class); /** * @return an ObjectWriter which pretty-prints its output @@ -106,7 +106,7 @@ public JsonSerialization(Class classType, boolean failOnUnknownProperties, boolean pretty) { Preconditions.checkArgument(classType != null, "null classType"); this.classType = classType; - this.mapper = new ObjectMapper(); + this.mapper = JacksonUtil.createBasicObjectMapper(); mapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, failOnUnknownProperties); mapper.configure(SerializationFeature.INDENT_OUTPUT, pretty); diff --git a/hadoop-common-project/hadoop-kms/src/main/java/org/apache/hadoop/crypto/key/kms/server/KMSJSONReader.java b/hadoop-common-project/hadoop-kms/src/main/java/org/apache/hadoop/crypto/key/kms/server/KMSJSONReader.java index af781f5277850..e7ad7c3c0a79c 100644 --- a/hadoop-common-project/hadoop-kms/src/main/java/org/apache/hadoop/crypto/key/kms/server/KMSJSONReader.java +++ b/hadoop-common-project/hadoop-kms/src/main/java/org/apache/hadoop/crypto/key/kms/server/KMSJSONReader.java @@ -20,6 +20,7 @@ import com.fasterxml.jackson.databind.ObjectMapper; import org.apache.hadoop.classification.InterfaceAudience; +import org.apache.hadoop.util.JacksonUtil; import javax.ws.rs.Consumes; import javax.ws.rs.WebApplicationException; @@ -38,7 +39,7 @@ @Consumes(MediaType.APPLICATION_JSON) @InterfaceAudience.Private public class KMSJSONReader implements MessageBodyReader { - private static final ObjectMapper MAPPER = new ObjectMapper(); + private static final ObjectMapper MAPPER = JacksonUtil.createBasicObjectMapper(); @Override public boolean isReadable(Class type, Type genericType, diff --git a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/server/datanode/DiskBalancerWorkItem.java b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/server/datanode/DiskBalancerWorkItem.java index d1ad5a2079f5f..8d4a106b18307 100644 --- a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/server/datanode/DiskBalancerWorkItem.java +++ b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/server/datanode/DiskBalancerWorkItem.java @@ -22,6 +22,7 @@ import com.fasterxml.jackson.annotation.JsonInclude; import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.ObjectReader; +import org.apache.hadoop.util.JacksonUtil; import org.apache.hadoop.util.Preconditions; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; @@ -35,9 +36,9 @@ @InterfaceStability.Unstable @JsonInclude(JsonInclude.Include.NON_DEFAULT) public class DiskBalancerWorkItem { - private static final ObjectMapper MAPPER = new ObjectMapper(); + private static final ObjectMapper MAPPER = JacksonUtil.createBasicObjectMapper(); private static final ObjectReader READER = - new ObjectMapper().readerFor(DiskBalancerWorkItem.class); + JacksonUtil.createReaderFor(DiskBalancerWorkItem.class); private long startTime; private long secondsElapsed; diff --git a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/server/datanode/DiskBalancerWorkStatus.java b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/server/datanode/DiskBalancerWorkStatus.java index 5a5da7326a4e0..07dc3b8def0b3 100644 --- a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/server/datanode/DiskBalancerWorkStatus.java +++ b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/server/datanode/DiskBalancerWorkStatus.java @@ -23,6 +23,7 @@ import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.ObjectReader; import com.fasterxml.jackson.databind.SerializationFeature; +import org.apache.hadoop.util.JacksonUtil; import org.apache.hadoop.util.Preconditions; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; @@ -39,13 +40,13 @@ @InterfaceAudience.Private @InterfaceStability.Unstable public class DiskBalancerWorkStatus { - private static final ObjectMapper MAPPER = new ObjectMapper(); + private static final ObjectMapper MAPPER = JacksonUtil.createBasicObjectMapper(); private static final ObjectMapper MAPPER_WITH_INDENT_OUTPUT = - new ObjectMapper().enable(SerializationFeature.INDENT_OUTPUT); + JacksonUtil.createBasicObjectMapper().enable(SerializationFeature.INDENT_OUTPUT); private static final ObjectReader READER_WORKSTATUS = - new ObjectMapper().readerFor(DiskBalancerWorkStatus.class); - private static final ObjectReader READER_WORKENTRY = new ObjectMapper() - .readerFor(defaultInstance().constructCollectionType(List.class, + JacksonUtil.createReaderFor(DiskBalancerWorkStatus.class); + private static final ObjectReader READER_WORKENTRY = JacksonUtil.createReaderFor( + defaultInstance().constructCollectionType(List.class, DiskBalancerWorkEntry.class)); private final List currentState; diff --git a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/util/CombinedHostsFileReader.java b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/util/CombinedHostsFileReader.java index 33f4934e5489d..156aaffee0b86 100644 --- a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/util/CombinedHostsFileReader.java +++ b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/util/CombinedHostsFileReader.java @@ -43,6 +43,7 @@ import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.hdfs.protocol.DatanodeAdminProperties; +import org.apache.hadoop.util.JacksonUtil; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -66,6 +67,13 @@ public final class CombinedHostsFileReader { public static final Logger LOG = LoggerFactory.getLogger(CombinedHostsFileReader.class); + /** + * It is more performant to reuse ObjectMapper instances but keeping the instance + * private makes it harder for someone to reconfigure it which might have unwanted + * side effects. + */ + private static final ObjectMapper OBJECT_MAPPER = JacksonUtil.createBasicObjectMapper(); + private CombinedHostsFileReader() { } @@ -83,7 +91,6 @@ private CombinedHostsFileReader() { public static DatanodeAdminProperties[] readFile(final String hostsFilePath) throws IOException { DatanodeAdminProperties[] allDNs = new DatanodeAdminProperties[0]; - ObjectMapper objectMapper = new ObjectMapper(); File hostFile = new File(hostsFilePath); boolean tryOldFormat = false; @@ -91,7 +98,7 @@ private CombinedHostsFileReader() { try (Reader input = new InputStreamReader( Files.newInputStream(hostFile.toPath()), StandardCharsets.UTF_8)) { - allDNs = objectMapper.readValue(input, DatanodeAdminProperties[].class); + allDNs = OBJECT_MAPPER.readValue(input, DatanodeAdminProperties[].class); } catch (JsonMappingException jme) { // The old format doesn't have json top-level token to enclose // the array. @@ -104,14 +111,13 @@ private CombinedHostsFileReader() { if (tryOldFormat) { ObjectReader objectReader = - objectMapper.readerFor(DatanodeAdminProperties.class); - JsonFactory jsonFactory = new JsonFactory(); + JacksonUtil.createReaderFor(DatanodeAdminProperties.class); List all = new ArrayList<>(); try (Reader input = new InputStreamReader(Files.newInputStream(Paths.get(hostsFilePath)), StandardCharsets.UTF_8)) { Iterator iterator = - objectReader.readValues(jsonFactory.createParser(input)); + objectReader.readValues(OBJECT_MAPPER.createParser(input)); while (iterator.hasNext()) { DatanodeAdminProperties properties = iterator.next(); all.add(properties); diff --git a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/util/CombinedHostsFileWriter.java b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/util/CombinedHostsFileWriter.java index de4c12d556cc7..e2ef4591b6b8d 100644 --- a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/util/CombinedHostsFileWriter.java +++ b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/util/CombinedHostsFileWriter.java @@ -31,6 +31,7 @@ import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.hdfs.protocol.DatanodeAdminProperties; +import org.apache.hadoop.util.JacksonUtil; /** * Writer support for JSON-based datanode configuration, an alternative format @@ -59,7 +60,7 @@ private CombinedHostsFileWriter() { */ public static void writeFile(final String hostsFile, final Set allDNs) throws IOException { - final ObjectMapper objectMapper = new ObjectMapper(); + final ObjectMapper objectMapper = JacksonUtil.createBasicObjectMapper(); try (Writer output = new OutputStreamWriter(Files.newOutputStream(Paths.get(hostsFile)), diff --git a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/web/JsonUtilClient.java b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/web/JsonUtilClient.java index 108f74997a63e..8695d6c72a967 100644 --- a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/web/JsonUtilClient.java +++ b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/web/JsonUtilClient.java @@ -17,12 +17,12 @@ */ package org.apache.hadoop.hdfs.web; -import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.ObjectReader; import org.apache.hadoop.classification.VisibleForTesting; import org.apache.hadoop.fs.BlockLocation; import org.apache.hadoop.fs.Path; +import org.apache.hadoop.util.JacksonUtil; import org.apache.hadoop.util.Preconditions; import org.apache.hadoop.thirdparty.com.google.common.collect.Maps; import org.apache.hadoop.fs.ContentSummary; @@ -654,7 +654,7 @@ static List toXAttrNames(final Map json) } final String namesInJson = (String) json.get("XAttrNames"); - ObjectReader reader = new ObjectMapper().readerFor(List.class); + ObjectReader reader = JacksonUtil.createBasicObjectMapper().readerFor(List.class); final List xattrs = reader.readValue(namesInJson); final List names = Lists.newArrayListWithCapacity(json.keySet().size()); diff --git a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/client/HttpFSFileSystem.java b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/client/HttpFSFileSystem.java index dab4776575bff..1ec907004bd26 100644 --- a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/client/HttpFSFileSystem.java +++ b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/client/HttpFSFileSystem.java @@ -71,6 +71,7 @@ import org.apache.hadoop.security.token.delegation.web.DelegationTokenAuthenticator; import org.apache.hadoop.security.token.delegation.web.KerberosDelegationTokenAuthenticator; import org.apache.hadoop.util.HttpExceptionUtils; +import org.apache.hadoop.util.JacksonUtil; import org.apache.hadoop.util.Lists; import org.apache.hadoop.util.Progressable; import org.apache.hadoop.util.ReflectionUtils; @@ -1818,7 +1819,7 @@ public Collection getTrashRoots(boolean allUsers) { @VisibleForTesting static BlockLocation[] toBlockLocations(JSONObject json) throws IOException { - ObjectMapper mapper = new ObjectMapper(); + ObjectMapper mapper = JacksonUtil.createBasicObjectMapper(); MapType subType = mapper.getTypeFactory().constructMapType(Map.class, String.class, BlockLocation[].class); MapType rootType = mapper.getTypeFactory().constructMapType(Map.class, diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/SlowDiskTracker.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/SlowDiskTracker.java index 798b5fb5966f7..a4629d484f009 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/SlowDiskTracker.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/SlowDiskTracker.java @@ -21,7 +21,6 @@ import com.fasterxml.jackson.annotation.JsonIgnore; import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.core.JsonProcessingException; -import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.ObjectWriter; import org.apache.hadoop.classification.VisibleForTesting; import org.apache.hadoop.thirdparty.com.google.common.collect.ImmutableList; @@ -32,6 +31,7 @@ import org.apache.hadoop.hdfs.DFSConfigKeys; import org.apache.hadoop.hdfs.server.protocol.SlowDiskReports; import org.apache.hadoop.hdfs.server.protocol.SlowDiskReports.DiskOp; +import org.apache.hadoop.util.JacksonUtil; import org.apache.hadoop.util.Lists; import org.apache.hadoop.util.Timer; import org.slf4j.Logger; @@ -71,7 +71,7 @@ public class SlowDiskTracker { /** * ObjectWriter to convert JSON reports to String. */ - private static final ObjectWriter WRITER = new ObjectMapper().writer(); + private static final ObjectWriter WRITER = JacksonUtil.createBasicWriter(); /** * Number of disks to include in JSON report per operation. We will return diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/SlowPeerTracker.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/SlowPeerTracker.java index e4feb4815eee4..6ecc9ff326858 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/SlowPeerTracker.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/SlowPeerTracker.java @@ -30,6 +30,7 @@ import org.apache.hadoop.hdfs.DFSConfigKeys; import org.apache.hadoop.hdfs.server.protocol.OutlierMetrics; import org.apache.hadoop.hdfs.server.protocol.SlowPeerReports; +import org.apache.hadoop.util.JacksonUtil; import org.apache.hadoop.util.Timer; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -75,7 +76,8 @@ public class SlowPeerTracker { /** * ObjectWriter to convert JSON reports to String. */ - private static final ObjectWriter WRITER = new ObjectMapper().writer(); + private static final ObjectWriter WRITER = JacksonUtil.createBasicWriter(); + /** * Number of nodes to include in JSON report. We will return nodes with * the highest number of votes from peers. diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/FsVolumeImpl.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/FsVolumeImpl.java index 6b026823f19f9..29522bd2bb78d 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/FsVolumeImpl.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/FsVolumeImpl.java @@ -79,6 +79,7 @@ import org.apache.hadoop.util.DataChecksum; import org.apache.hadoop.util.DiskChecker.DiskErrorException; import org.apache.hadoop.util.DiskChecker.DiskOutOfSpaceException; +import org.apache.hadoop.util.JacksonUtil; import org.apache.hadoop.util.Time; import org.apache.hadoop.util.Timer; import org.slf4j.Logger; @@ -104,9 +105,8 @@ public class FsVolumeImpl implements FsVolumeSpi { public static final Logger LOG = LoggerFactory.getLogger(FsVolumeImpl.class); private static final ObjectWriter WRITER = - new ObjectMapper().writerWithDefaultPrettyPrinter(); - private static final ObjectReader READER = - new ObjectMapper().readerFor(BlockIteratorState.class); + JacksonUtil.createBasicObjectMapper().writerWithDefaultPrettyPrinter(); + private static final ObjectReader READER = JacksonUtil.createReaderFor(BlockIteratorState.class); private final FsDatasetImpl dataset; private final String storageID; diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/ProvidedVolumeImpl.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/ProvidedVolumeImpl.java index 69a46257317bf..1e25c32c40f06 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/ProvidedVolumeImpl.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/ProvidedVolumeImpl.java @@ -60,6 +60,7 @@ import org.apache.hadoop.hdfs.server.datanode.checker.VolumeCheckResult; import org.apache.hadoop.hdfs.server.datanode.fsdataset.FsVolumeSpi; import org.apache.hadoop.util.DiskChecker.DiskErrorException; +import org.apache.hadoop.util.JacksonUtil; import org.apache.hadoop.util.ReflectionUtils; import org.apache.hadoop.util.Time; import org.apache.hadoop.util.Timer; @@ -369,7 +370,7 @@ public void releaseReservedSpace(long bytesToRelease) { } private static final ObjectWriter WRITER = - new ObjectMapper().writerWithDefaultPrettyPrinter(); + JacksonUtil.createBasicObjectMapper().writerWithDefaultPrettyPrinter(); private static class ProvidedBlockIteratorState { ProvidedBlockIteratorState() { diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/diskbalancer/command/Command.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/diskbalancer/command/Command.java index c90b77e98d2e8..85e6a3b56a52d 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/diskbalancer/command/Command.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/diskbalancer/command/Command.java @@ -47,6 +47,7 @@ import org.apache.hadoop.net.NetUtils; import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.util.HostsFileReader; +import org.apache.hadoop.util.JacksonUtil; import org.apache.hadoop.util.Lists; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -78,7 +79,7 @@ */ public abstract class Command extends Configured implements Closeable { private static final ObjectReader READER = - new ObjectMapper().readerFor(HashMap.class); + JacksonUtil.createBasicObjectMapper().readerFor(HashMap.class); static final Logger LOG = LoggerFactory.getLogger(Command.class); private Map validArgs = new HashMap<>(); private URI clusterURI; diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/diskbalancer/connectors/JsonNodeConnector.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/diskbalancer/connectors/JsonNodeConnector.java index 1cc82253f9885..494a9f3c5ad10 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/diskbalancer/connectors/JsonNodeConnector.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/diskbalancer/connectors/JsonNodeConnector.java @@ -17,15 +17,14 @@ package org.apache.hadoop.hdfs.server.diskbalancer.connectors; -import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.ObjectReader; +import org.apache.hadoop.util.JacksonUtil; import org.apache.hadoop.util.Preconditions; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.apache.hadoop.hdfs.server.diskbalancer.datamodel.DiskBalancerCluster; -import org.apache.hadoop.hdfs.server.diskbalancer.datamodel - .DiskBalancerDataNode; +import org.apache.hadoop.hdfs.server.diskbalancer.datamodel.DiskBalancerDataNode; import java.io.File; import java.net.URL; @@ -38,7 +37,7 @@ public class JsonNodeConnector implements ClusterConnector { private static final Logger LOG = LoggerFactory.getLogger(JsonNodeConnector.class); private static final ObjectReader READER = - new ObjectMapper().readerFor(DiskBalancerCluster.class); + JacksonUtil.createReaderFor(DiskBalancerCluster.class); private final URL clusterURI; /** diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/diskbalancer/datamodel/DiskBalancerCluster.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/diskbalancer/datamodel/DiskBalancerCluster.java index 7e935a3f82058..e2a1203f6a357 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/diskbalancer/datamodel/DiskBalancerCluster.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/diskbalancer/datamodel/DiskBalancerCluster.java @@ -19,8 +19,8 @@ import com.fasterxml.jackson.annotation.JsonIgnore; import com.fasterxml.jackson.annotation.JsonIgnoreProperties; -import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.ObjectReader; +import org.apache.hadoop.util.JacksonUtil; import org.apache.hadoop.util.Preconditions; import org.apache.commons.io.FileUtils; @@ -73,7 +73,7 @@ public class DiskBalancerCluster { private static final Logger LOG = LoggerFactory.getLogger(DiskBalancerCluster.class); private static final ObjectReader READER = - new ObjectMapper().readerFor(DiskBalancerCluster.class); + JacksonUtil.createReaderFor(DiskBalancerCluster.class); private final Set exclusionList; private final Set inclusionList; private ClusterConnector clusterConnector; diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/diskbalancer/datamodel/DiskBalancerVolume.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/diskbalancer/datamodel/DiskBalancerVolume.java index e43b83e39ce3a..0dd03a7ab49fd 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/diskbalancer/datamodel/DiskBalancerVolume.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/diskbalancer/datamodel/DiskBalancerVolume.java @@ -19,10 +19,10 @@ import com.fasterxml.jackson.annotation.JsonIgnore; import com.fasterxml.jackson.annotation.JsonIgnoreProperties; -import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.ObjectReader; import org.apache.hadoop.hdfs.web.JsonUtil; +import org.apache.hadoop.util.JacksonUtil; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -34,7 +34,7 @@ @JsonIgnoreProperties(ignoreUnknown = true) public class DiskBalancerVolume { private static final ObjectReader READER = - new ObjectMapper().readerFor(DiskBalancerVolume.class); + JacksonUtil.createReaderFor(DiskBalancerVolume.class); private static final Logger LOG = LoggerFactory.getLogger(DiskBalancerVolume.class); diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/diskbalancer/planner/NodePlan.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/diskbalancer/planner/NodePlan.java index 39a7c57bca2cd..f03505bb4c001 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/diskbalancer/planner/NodePlan.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/diskbalancer/planner/NodePlan.java @@ -21,6 +21,7 @@ import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.ObjectReader; import com.fasterxml.jackson.databind.ObjectWriter; +import org.apache.hadoop.util.JacksonUtil; import org.apache.hadoop.util.Preconditions; import java.io.IOException; @@ -39,8 +40,8 @@ public class NodePlan { private int port; private long timeStamp; - private static final ObjectMapper MAPPER = new ObjectMapper(); - private static final ObjectReader READER = MAPPER.readerFor(NodePlan.class); + private static final ObjectMapper MAPPER = JacksonUtil.createBasicObjectMapper(); + private static final ObjectReader READER = JacksonUtil.createReaderFor(NodePlan.class); private static final ObjectWriter WRITER = MAPPER.writerFor( MAPPER.constructType(NodePlan.class)); /** diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NetworkTopologyServlet.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NetworkTopologyServlet.java index a6460280835d3..e789e507861b9 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NetworkTopologyServlet.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NetworkTopologyServlet.java @@ -26,6 +26,7 @@ import org.apache.hadoop.net.NodeBase; import org.apache.hadoop.classification.VisibleForTesting; import org.apache.hadoop.thirdparty.com.google.common.net.HttpHeaders; +import org.apache.hadoop.util.JacksonUtil; import org.apache.hadoop.util.StringUtils; import javax.servlet.ServletContext; @@ -123,7 +124,7 @@ protected void printTopology(PrintStream stream, List leaves, protected void printJsonFormat(PrintStream stream, Map> tree, ArrayList racks) throws IOException { - JsonFactory dumpFactory = new JsonFactory(); + JsonFactory dumpFactory = JacksonUtil.createBasicJsonFactory(); JsonGenerator dumpGenerator = dumpFactory.createGenerator(stream); dumpGenerator.writeStartArray(); diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/StartupProgressServlet.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/StartupProgressServlet.java index 449a1aa62ab46..d0fee730a89d5 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/StartupProgressServlet.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/StartupProgressServlet.java @@ -21,7 +21,6 @@ import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; -import com.fasterxml.jackson.core.JsonFactory; import com.fasterxml.jackson.core.JsonGenerator; import org.apache.hadoop.hdfs.server.namenode.startupprogress.Phase; import org.apache.hadoop.hdfs.server.namenode.startupprogress.StartupProgress; @@ -31,6 +30,7 @@ import org.apache.hadoop.io.IOUtils; import org.apache.hadoop.classification.InterfaceAudience; +import org.apache.hadoop.util.JacksonUtil; /** * Servlet that provides a JSON representation of the namenode's current startup @@ -61,7 +61,7 @@ protected void doGet(HttpServletRequest req, HttpServletResponse resp) StartupProgress prog = NameNodeHttpServer.getStartupProgressFromContext( getServletContext()); StartupProgressView view = prog.createView(); - JsonGenerator json = new JsonFactory().createGenerator(resp.getWriter()); + JsonGenerator json = JacksonUtil.createBasicJsonFactory().createGenerator(resp.getWriter()); try { json.writeStartObject(); json.writeNumberField(ELAPSED_TIME, view.getElapsedTime()); diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/JsonUtil.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/JsonUtil.java index 5f90404ebee25..4ea9a4afc8ad7 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/JsonUtil.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/JsonUtil.java @@ -38,6 +38,7 @@ import org.apache.hadoop.ipc.RemoteException; import org.apache.hadoop.security.token.Token; import org.apache.hadoop.security.token.TokenIdentifier; +import org.apache.hadoop.util.JacksonUtil; import org.apache.hadoop.util.Lists; import org.apache.hadoop.util.StringUtils; @@ -56,7 +57,7 @@ public class JsonUtil { // ObjectMapper is thread safe as long as we always configure instance // before use. We don't have a re-entrant call pattern in WebHDFS, // so we just need to worry about thread-safety. - private static final ObjectMapper MAPPER = new ObjectMapper(); + private static final ObjectMapper MAPPER = JacksonUtil.createBasicObjectMapper(); /** Convert a token object to a Json string. */ public static String toJsonString(final Token token diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/QueueManager.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/QueueManager.java index ec43bce678b26..ab1821bf5707a 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/QueueManager.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/QueueManager.java @@ -28,6 +28,7 @@ import org.apache.hadoop.mapreduce.QueueState; import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.security.authorize.AccessControlList; +import org.apache.hadoop.util.JacksonUtil; import org.apache.hadoop.util.StringUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -531,7 +532,7 @@ static void dumpConfiguration(Writer out, String configFile, return; } - JsonFactory dumpFactory = new JsonFactory(); + JsonFactory dumpFactory = JacksonUtil.createBasicJsonFactory(); JsonGenerator dumpGenerator = dumpFactory.createGenerator(out); QueueConfigurationParser parser; boolean aclsEnabled = false; diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/util/JobHistoryEventUtils.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/util/JobHistoryEventUtils.java index b5c8b1178d1dd..c7cd7a63a8692 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/util/JobHistoryEventUtils.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/util/JobHistoryEventUtils.java @@ -28,6 +28,7 @@ import org.apache.hadoop.mapreduce.Counter; import org.apache.hadoop.mapreduce.CounterGroup; import org.apache.hadoop.mapreduce.Counters; +import org.apache.hadoop.util.JacksonUtil; import org.apache.hadoop.yarn.api.records.timelineservice.TimelineMetric; /** @@ -41,7 +42,7 @@ private JobHistoryEventUtils() { public static final int ATS_CONFIG_PUBLISH_SIZE_BYTES = 10 * 1024; public static JsonNode countersToJSON(Counters counters) { - ObjectMapper mapper = new ObjectMapper(); + ObjectMapper mapper = JacksonUtil.createBasicObjectMapper(); ArrayNode nodes = mapper.createArrayNode(); if (counters != null) { for (CounterGroup counterGroup : counters) { diff --git a/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azure/NativeAzureFileSystem.java b/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azure/NativeAzureFileSystem.java index 4e777da8b409f..e37efd296ef06 100644 --- a/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azure/NativeAzureFileSystem.java +++ b/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azure/NativeAzureFileSystem.java @@ -84,6 +84,7 @@ import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.security.token.Token; import org.apache.hadoop.security.token.delegation.web.DelegationTokenAuthenticatedURL; +import org.apache.hadoop.util.JacksonUtil; import org.apache.hadoop.util.LambdaUtils; import org.apache.hadoop.util.Progressable; import org.apache.hadoop.util.Time; @@ -127,7 +128,7 @@ public static class FolderRenamePending { private static final int FORMATTING_BUFFER = 10000; private boolean committed; public static final String SUFFIX = "-RenamePending.json"; - private static final ObjectReader READER = new ObjectMapper() + private static final ObjectReader READER = JacksonUtil.createBasicObjectMapper() .configure(JsonParser.Feature.ALLOW_UNQUOTED_FIELD_NAMES, true) .readerFor(JsonNode.class); diff --git a/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azure/RemoteSASKeyGeneratorImpl.java b/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azure/RemoteSASKeyGeneratorImpl.java index 473fa54f97c83..c77971e540f6a 100644 --- a/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azure/RemoteSASKeyGeneratorImpl.java +++ b/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azure/RemoteSASKeyGeneratorImpl.java @@ -31,6 +31,7 @@ import org.apache.hadoop.io.retry.RetryUtils; import org.apache.hadoop.security.UserGroupInformation; +import org.apache.hadoop.util.JacksonUtil; import org.apache.http.NameValuePair; import org.apache.http.client.methods.HttpGet; import org.apache.http.client.utils.URIBuilder; @@ -40,7 +41,6 @@ import com.fasterxml.jackson.core.JsonParseException; import com.fasterxml.jackson.databind.JsonMappingException; -import com.fasterxml.jackson.databind.ObjectMapper; import static org.apache.hadoop.fs.azure.WasbRemoteCallHelper.REMOTE_CALL_SUCCESS_CODE; @@ -53,8 +53,8 @@ public class RemoteSASKeyGeneratorImpl extends SASKeyGeneratorImpl { public static final Logger LOG = LoggerFactory.getLogger(AzureNativeFileSystemStore.class); - private static final ObjectReader RESPONSE_READER = new ObjectMapper() - .readerFor(RemoteSASKeyGenerationResponse.class); + private static final ObjectReader RESPONSE_READER = JacksonUtil + .createReaderFor(RemoteSASKeyGenerationResponse.class); /** * Configuration parameter name expected in the Configuration diff --git a/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azure/RemoteWasbAuthorizerImpl.java b/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azure/RemoteWasbAuthorizerImpl.java index eca8443b6c587..9abc498966f74 100644 --- a/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azure/RemoteWasbAuthorizerImpl.java +++ b/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azure/RemoteWasbAuthorizerImpl.java @@ -29,6 +29,7 @@ import org.apache.hadoop.io.retry.RetryPolicy; import org.apache.hadoop.io.retry.RetryUtils; import org.apache.hadoop.security.UserGroupInformation; +import org.apache.hadoop.util.JacksonUtil; import org.apache.http.client.methods.HttpGet; import org.apache.http.client.utils.URIBuilder; import org.slf4j.Logger; @@ -49,8 +50,8 @@ public class RemoteWasbAuthorizerImpl implements WasbAuthorizerInterface { public static final Logger LOG = LoggerFactory .getLogger(RemoteWasbAuthorizerImpl.class); - private static final ObjectReader RESPONSE_READER = new ObjectMapper() - .readerFor(RemoteWasbAuthorizerResponse.class); + private static final ObjectReader RESPONSE_READER = JacksonUtil + .createReaderFor(RemoteWasbAuthorizerResponse.class); /** * Configuration parameter name expected in the Configuration object to diff --git a/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/oauth2/AzureADAuthenticator.java b/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/oauth2/AzureADAuthenticator.java index dab4d79658451..9945715714aea 100644 --- a/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/oauth2/AzureADAuthenticator.java +++ b/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/oauth2/AzureADAuthenticator.java @@ -29,9 +29,9 @@ import java.util.Hashtable; import java.util.Map; +import org.apache.hadoop.util.JacksonUtil; import org.apache.hadoop.util.Preconditions; -import com.fasterxml.jackson.core.JsonFactory; import com.fasterxml.jackson.core.JsonParser; import com.fasterxml.jackson.core.JsonToken; import org.slf4j.Logger; @@ -493,8 +493,7 @@ private static AzureADToken parseTokenFromStream( int expiryPeriodInSecs = 0; long expiresOnInSecs = -1; - JsonFactory jf = new JsonFactory(); - JsonParser jp = jf.createParser(httpResponseStream); + JsonParser jp = JacksonUtil.createBasicJsonFactory().createParser(httpResponseStream); String fieldName, fieldValue; jp.nextToken(); while (jp.hasCurrentToken()) { diff --git a/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/services/AbfsHttpOperation.java b/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/services/AbfsHttpOperation.java index e2ce5c628a4b6..e5bb80bf65b9d 100644 --- a/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/services/AbfsHttpOperation.java +++ b/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/services/AbfsHttpOperation.java @@ -30,7 +30,7 @@ import com.fasterxml.jackson.core.JsonFactory; import com.fasterxml.jackson.core.JsonParser; import com.fasterxml.jackson.core.JsonToken; -import com.fasterxml.jackson.databind.ObjectMapper; +import org.apache.hadoop.util.JacksonUtil; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -447,7 +447,7 @@ private void processStorageErrorResponse() { if (stream == null) { return; } - JsonFactory jf = new JsonFactory(); + JsonFactory jf = JacksonUtil.createBasicJsonFactory(); try (JsonParser jp = jf.createParser(stream)) { String fieldName, fieldValue; jp.nextToken(); // START_OBJECT - { @@ -509,9 +509,7 @@ private void parseListFilesResponse(final InputStream stream) } try { - final ObjectMapper objectMapper = new ObjectMapper(); - this.listResultSchema = objectMapper.readValue(stream, - ListResultSchema.class); + this.listResultSchema = JacksonUtil.createBasicObjectMapper().readValue(stream, ListResultSchema.class); } catch (IOException ex) { log.error("Unable to deserialize list results", ex); throw ex; diff --git a/hadoop-tools/hadoop-dynamometer/hadoop-dynamometer-infra/src/main/java/org/apache/hadoop/tools/dynamometer/DynoInfraUtils.java b/hadoop-tools/hadoop-dynamometer/hadoop-dynamometer-infra/src/main/java/org/apache/hadoop/tools/dynamometer/DynoInfraUtils.java index f6c8a6ac4d58b..04e98754ca837 100644 --- a/hadoop-tools/hadoop-dynamometer/hadoop-dynamometer-infra/src/main/java/org/apache/hadoop/tools/dynamometer/DynoInfraUtils.java +++ b/hadoop-tools/hadoop-dynamometer/hadoop-dynamometer-infra/src/main/java/org/apache/hadoop/tools/dynamometer/DynoInfraUtils.java @@ -51,6 +51,7 @@ import org.apache.hadoop.hdfs.protocol.DatanodeInfo; import org.apache.hadoop.net.NetUtils; import org.apache.hadoop.security.UserGroupInformation; +import org.apache.hadoop.util.JacksonUtil; import org.apache.hadoop.util.Time; import org.apache.hadoop.yarn.YarnUncaughtExceptionHandler; import org.apache.hadoop.yarn.api.ApplicationConstants.Environment; @@ -484,7 +485,7 @@ static Set parseStaleDataNodeList(String liveNodeJsonString, final int blockThreshold, final Logger log) throws IOException { final Set dataNodesToReport = new HashSet<>(); - JsonFactory fac = new JsonFactory(); + JsonFactory fac = JacksonUtil.createBasicJsonFactory(); JsonParser parser = fac.createParser(IOUtils .toInputStream(liveNodeJsonString, StandardCharsets.UTF_8.name())); @@ -554,7 +555,7 @@ static String fetchNameNodeJMXValue(Properties nameNodeProperties, "Unable to retrieve JMX: " + conn.getResponseMessage()); } InputStream in = conn.getInputStream(); - JsonFactory fac = new JsonFactory(); + JsonFactory fac = JacksonUtil.createBasicJsonFactory(); JsonParser parser = fac.createParser(in); if (parser.nextToken() != JsonToken.START_OBJECT || parser.nextToken() != JsonToken.FIELD_NAME diff --git a/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/Anonymizer.java b/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/Anonymizer.java index 3c85a93ddbfc9..dc0856cd58a09 100644 --- a/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/Anonymizer.java +++ b/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/Anonymizer.java @@ -22,7 +22,6 @@ import java.io.OutputStream; import com.fasterxml.jackson.core.JsonEncoding; -import com.fasterxml.jackson.core.JsonFactory; import com.fasterxml.jackson.core.JsonGenerator; import com.fasterxml.jackson.core.Version; import com.fasterxml.jackson.databind.ObjectMapper; @@ -36,6 +35,7 @@ import org.apache.hadoop.io.compress.CompressionCodecFactory; import org.apache.hadoop.io.compress.Compressor; import org.apache.hadoop.mapreduce.ID; +import org.apache.hadoop.util.JacksonUtil; import org.apache.hadoop.util.Tool; import org.apache.hadoop.util.ToolRunner; import org.apache.hadoop.tools.rumen.datatypes.*; @@ -55,8 +55,7 @@ public class Anonymizer extends Configured implements Tool { private StatePool statePool; private ObjectMapper outMapper = null; - private JsonFactory outFactory = null; - + private void initialize(String[] args) throws Exception { try { for (int i = 0; i < args.length; ++i) { @@ -85,7 +84,7 @@ private void initialize(String[] args) throws Exception { // initialize the state manager after the anonymizers are registered statePool.initialize(getConf()); - outMapper = new ObjectMapper(); + outMapper = JacksonUtil.createBasicObjectMapper(); // define a module SimpleModule module = new SimpleModule( "Anonymization Serializer", new Version(0, 1, 1, "FINAL", "", "")); @@ -104,8 +103,6 @@ private void initialize(String[] args) throws Exception { // register the module with the object-mapper outMapper.registerModule(module); - - outFactory = outMapper.getFactory(); } // anonymize the job trace file @@ -191,7 +188,7 @@ private JsonGenerator createJsonGenerator(Configuration conf, Path path) } JsonGenerator outGen = - outFactory.createGenerator(output, JsonEncoding.UTF8); + outMapper.createGenerator(output, JsonEncoding.UTF8); outGen.useDefaultPrettyPrinter(); return outGen; diff --git a/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/JsonObjectMapperParser.java b/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/JsonObjectMapperParser.java index f95878dde95e3..3d644b5ad2272 100644 --- a/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/JsonObjectMapperParser.java +++ b/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/JsonObjectMapperParser.java @@ -26,6 +26,7 @@ import com.fasterxml.jackson.databind.ObjectMapper; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; +import org.apache.hadoop.util.JacksonUtil; /** * A simple wrapper for parsing JSON-encoded data using ObjectMapper. @@ -48,10 +49,10 @@ class JsonObjectMapperParser implements Closeable { */ public JsonObjectMapperParser(Path path, Class clazz, Configuration conf) throws IOException { - mapper = new ObjectMapper(); + mapper = JacksonUtil.createBasicObjectMapper(); this.clazz = clazz; InputStream input = new PossiblyDecompressedInputStream(path, conf); - jsonParser = mapper.getFactory().createParser(input); + jsonParser = mapper.createParser(input); } /** @@ -62,9 +63,9 @@ public JsonObjectMapperParser(Path path, Class clazz, */ public JsonObjectMapperParser(InputStream input, Class clazz) throws IOException { - mapper = new ObjectMapper(); + mapper = JacksonUtil.createBasicObjectMapper(); this.clazz = clazz; - jsonParser = mapper.getFactory().createParser(input); + jsonParser = mapper.createParser(input); } /** diff --git a/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/JsonObjectMapperWriter.java b/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/JsonObjectMapperWriter.java index 747b141fd98be..e0caa18fff792 100644 --- a/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/JsonObjectMapperWriter.java +++ b/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/JsonObjectMapperWriter.java @@ -30,6 +30,7 @@ import org.apache.hadoop.tools.rumen.datatypes.DataType; import org.apache.hadoop.tools.rumen.serializers.DefaultRumenSerializer; import org.apache.hadoop.tools.rumen.serializers.ObjectStringSerializer; +import org.apache.hadoop.util.JacksonUtil; /** * Simple wrapper around {@link JsonGenerator} to write objects in JSON format. @@ -39,7 +40,7 @@ public class JsonObjectMapperWriter implements Closeable { private JsonGenerator writer; public JsonObjectMapperWriter(OutputStream output, boolean prettyPrint) throws IOException { - ObjectMapper mapper = new ObjectMapper(); + ObjectMapper mapper = JacksonUtil.createBasicObjectMapper(); // define a module SimpleModule module = new SimpleModule( @@ -53,7 +54,7 @@ public JsonObjectMapperWriter(OutputStream output, boolean prettyPrint) throws I // register the module with the object-mapper mapper.registerModule(module); - writer = mapper.getFactory().createGenerator(output, JsonEncoding.UTF8); + writer = mapper.createGenerator(output, JsonEncoding.UTF8); if (prettyPrint) { writer.useDefaultPrettyPrinter(); } diff --git a/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/state/StatePool.java b/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/state/StatePool.java index ab6f8942e7cfb..3e2ecb2c4ba92 100644 --- a/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/state/StatePool.java +++ b/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/state/StatePool.java @@ -44,6 +44,7 @@ import org.apache.hadoop.fs.Path; import org.apache.hadoop.tools.rumen.Anonymizer; import org.apache.hadoop.tools.rumen.datatypes.DataType; +import org.apache.hadoop.util.JacksonUtil; /** * A pool of states. States used by {@link DataType}'s can be managed the @@ -206,7 +207,7 @@ private boolean reloadState(Path stateFile, Configuration configuration) } private void read(DataInput in) throws IOException { - ObjectMapper mapper = new ObjectMapper(); + ObjectMapper mapper = JacksonUtil.createBasicObjectMapper(); // define a module SimpleModule module = new SimpleModule("State Serializer", new Version(0, 1, 1, "FINAL", "", "")); @@ -216,7 +217,7 @@ private void read(DataInput in) throws IOException { // register the module with the object-mapper mapper.registerModule(module); - JsonParser parser = mapper.getFactory().createParser((InputStream)in); + JsonParser parser = mapper.createParser((InputStream)in); StatePool statePool = mapper.readValue(parser, StatePool.class); this.setStates(statePool.getStates()); parser.close(); @@ -273,7 +274,7 @@ public void persist() throws IOException { private void write(DataOutput out) throws IOException { // This is just a JSON experiment System.out.println("Dumping the StatePool's in JSON format."); - ObjectMapper outMapper = new ObjectMapper(); + ObjectMapper outMapper = JacksonUtil.createBasicObjectMapper(); // define a module SimpleModule module = new SimpleModule("State Serializer", new Version(0, 1, 1, "FINAL", "", "")); @@ -283,9 +284,8 @@ private void write(DataOutput out) throws IOException { // register the module with the object-mapper outMapper.registerModule(module); - JsonFactory outFactory = outMapper.getFactory(); JsonGenerator jGen = - outFactory.createGenerator((OutputStream)out, JsonEncoding.UTF8); + outMapper.createGenerator((OutputStream)out, JsonEncoding.UTF8); jGen.useDefaultPrettyPrinter(); jGen.writeObject(this); diff --git a/hadoop-tools/hadoop-rumen/src/test/java/org/apache/hadoop/tools/rumen/TestHistograms.java b/hadoop-tools/hadoop-rumen/src/test/java/org/apache/hadoop/tools/rumen/TestHistograms.java index 187251900b75d..db6d47cf0726e 100644 --- a/hadoop-tools/hadoop-rumen/src/test/java/org/apache/hadoop/tools/rumen/TestHistograms.java +++ b/hadoop-tools/hadoop-rumen/src/test/java/org/apache/hadoop/tools/rumen/TestHistograms.java @@ -23,7 +23,6 @@ import java.util.List; import com.fasterxml.jackson.core.JsonEncoding; -import com.fasterxml.jackson.core.JsonFactory; import com.fasterxml.jackson.core.JsonGenerator; import com.fasterxml.jackson.databind.ObjectMapper; @@ -141,9 +140,8 @@ public static void main(String[] args) throws IOException { Path goldFilePath = new Path(filePath.getParent(), "gold"+testName); ObjectMapper mapper = new ObjectMapper(); - JsonFactory factory = mapper.getFactory(); FSDataOutputStream ostream = lfs.create(goldFilePath, true); - JsonGenerator gen = factory.createGenerator((OutputStream)ostream, + JsonGenerator gen = mapper.createGenerator((OutputStream)ostream, JsonEncoding.UTF8); gen.useDefaultPrettyPrinter(); diff --git a/hadoop-tools/hadoop-sls/src/main/java/org/apache/hadoop/yarn/sls/AMRunner.java b/hadoop-tools/hadoop-sls/src/main/java/org/apache/hadoop/yarn/sls/AMRunner.java index 2dc09de665368..3fe7be846a68b 100644 --- a/hadoop-tools/hadoop-sls/src/main/java/org/apache/hadoop/yarn/sls/AMRunner.java +++ b/hadoop-tools/hadoop-sls/src/main/java/org/apache/hadoop/yarn/sls/AMRunner.java @@ -23,6 +23,7 @@ import org.apache.hadoop.fs.Path; import org.apache.hadoop.tools.rumen.JobTraceReader; import org.apache.hadoop.tools.rumen.LoggedJob; +import org.apache.hadoop.util.JacksonUtil; import org.apache.hadoop.util.ReflectionUtils; import org.apache.hadoop.yarn.api.records.ApplicationId; import org.apache.hadoop.yarn.api.records.ReservationId; @@ -122,15 +123,14 @@ public void startAM() throws YarnException, IOException { * Parse workload from a SLS trace file. */ private void startAMFromSLSTrace(String inputTrace) throws IOException { - JsonFactory jsonF = new JsonFactory(); - ObjectMapper mapper = new ObjectMapper(); + ObjectMapper mapper = JacksonUtil.createBasicObjectMapper(); try (Reader input = new InputStreamReader( new FileInputStream(inputTrace), StandardCharsets.UTF_8)) { JavaType type = mapper.getTypeFactory(). constructMapType(Map.class, String.class, String.class); Iterator> jobIter = mapper.readValues( - jsonF.createParser(input), type); + mapper.createParser(input), type); while (jobIter.hasNext()) { try { diff --git a/hadoop-tools/hadoop-sls/src/main/java/org/apache/hadoop/yarn/sls/RumenToSLSConverter.java b/hadoop-tools/hadoop-sls/src/main/java/org/apache/hadoop/yarn/sls/RumenToSLSConverter.java index 2cdfe236c410d..9b25275912377 100644 --- a/hadoop-tools/hadoop-sls/src/main/java/org/apache/hadoop/yarn/sls/RumenToSLSConverter.java +++ b/hadoop-tools/hadoop-sls/src/main/java/org/apache/hadoop/yarn/sls/RumenToSLSConverter.java @@ -35,7 +35,6 @@ import java.util.TreeMap; import java.util.TreeSet; -import com.fasterxml.jackson.core.JsonFactory; import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.ObjectWriter; import org.apache.commons.cli.CommandLine; @@ -44,6 +43,7 @@ import org.apache.commons.cli.Options; import org.apache.hadoop.classification.InterfaceAudience.Private; import org.apache.hadoop.classification.InterfaceStability.Unstable; +import org.apache.hadoop.util.JacksonUtil; import org.apache.hadoop.yarn.sls.utils.SLSUtils; @Private @@ -126,10 +126,10 @@ private static void generateSLSLoadFile(String inputFile, String outputFile) StandardCharsets.UTF_8)) { try (Writer output = new OutputStreamWriter(new FileOutputStream(outputFile), StandardCharsets.UTF_8)) { - ObjectMapper mapper = new ObjectMapper(); + ObjectMapper mapper = JacksonUtil.createBasicObjectMapper(); ObjectWriter writer = mapper.writerWithDefaultPrettyPrinter(); Iterator i = mapper.readValues( - new JsonFactory().createParser(input), Map.class); + mapper.createParser(input), Map.class); while (i.hasNext()) { Map m = i.next(); output.write(writer.writeValueAsString(createSLSJob(m)) + EOL); @@ -143,7 +143,7 @@ private static void generateSLSNodeFile(String outputFile) throws IOException { try (Writer output = new OutputStreamWriter(new FileOutputStream(outputFile), StandardCharsets.UTF_8)) { - ObjectMapper mapper = new ObjectMapper(); + ObjectMapper mapper = JacksonUtil.createBasicObjectMapper(); ObjectWriter writer = mapper.writerWithDefaultPrettyPrinter(); for (Map.Entry> entry : rackNodeMap.entrySet()) { Map rack = new LinkedHashMap(); diff --git a/hadoop-tools/hadoop-sls/src/main/java/org/apache/hadoop/yarn/sls/synthetic/SynthTraceJobProducer.java b/hadoop-tools/hadoop-sls/src/main/java/org/apache/hadoop/yarn/sls/synthetic/SynthTraceJobProducer.java index 18b1c034bdf3a..e0767557fcf8d 100644 --- a/hadoop-tools/hadoop-sls/src/main/java/org/apache/hadoop/yarn/sls/synthetic/SynthTraceJobProducer.java +++ b/hadoop-tools/hadoop-sls/src/main/java/org/apache/hadoop/yarn/sls/synthetic/SynthTraceJobProducer.java @@ -24,6 +24,7 @@ import com.fasterxml.jackson.core.JsonParser; import com.fasterxml.jackson.databind.JsonMappingException; import com.fasterxml.jackson.databind.ObjectMapper; +import org.apache.hadoop.util.JacksonUtil; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.apache.commons.math3.distribution.AbstractRealDistribution; @@ -87,8 +88,9 @@ public SynthTraceJobProducer(Configuration conf, Path path) this.rand = new JDKRandomGenerator(); JsonFactoryBuilder jsonFactoryBuilder = new JsonFactoryBuilder(); - jsonFactoryBuilder.configure(JsonFactory.Feature.INTERN_FIELD_NAMES, true); - ObjectMapper mapper = new ObjectMapper(jsonFactoryBuilder.build()); + jsonFactoryBuilder.configure(JsonFactory.Feature.INTERN_FIELD_NAMES, true) + + ObjectMapper mapper = JacksonUtil.createObjectMapper(jsonFactoryBuilder.build()); mapper.configure(FAIL_ON_UNKNOWN_PROPERTIES, false); FileSystem ifs = path.getFileSystem(conf); diff --git a/hadoop-tools/hadoop-sls/src/main/java/org/apache/hadoop/yarn/sls/utils/SLSUtils.java b/hadoop-tools/hadoop-sls/src/main/java/org/apache/hadoop/yarn/sls/utils/SLSUtils.java index af0b4f6caf3ab..676ef13b5a8e4 100644 --- a/hadoop-tools/hadoop-sls/src/main/java/org/apache/hadoop/yarn/sls/utils/SLSUtils.java +++ b/hadoop-tools/hadoop-sls/src/main/java/org/apache/hadoop/yarn/sls/utils/SLSUtils.java @@ -34,7 +34,6 @@ import java.util.Map; import java.util.Set; -import com.fasterxml.jackson.core.JsonFactory; import com.fasterxml.jackson.databind.ObjectMapper; import org.apache.hadoop.classification.InterfaceAudience.Private; import org.apache.hadoop.classification.InterfaceStability.Unstable; @@ -45,6 +44,7 @@ import org.apache.hadoop.tools.rumen.LoggedJob; import org.apache.hadoop.tools.rumen.LoggedTask; import org.apache.hadoop.tools.rumen.LoggedTaskAttempt; +import org.apache.hadoop.util.JacksonUtil; import org.apache.hadoop.yarn.api.records.NodeLabel; import org.apache.hadoop.yarn.api.records.Resource; import org.apache.hadoop.yarn.api.records.ResourceInformation; @@ -120,12 +120,11 @@ public static Set parseNodesFromRumenTrace( public static Set parseNodesFromSLSTrace( String jobTrace) throws IOException { Set nodeSet = new HashSet<>(); - JsonFactory jsonF = new JsonFactory(); - ObjectMapper mapper = new ObjectMapper(); + ObjectMapper mapper = JacksonUtil.createBasicObjectMapper(); Reader input = new InputStreamReader(new FileInputStream(jobTrace), StandardCharsets.UTF_8); try { - Iterator i = mapper.readValues(jsonF.createParser(input), Map.class); + Iterator i = mapper.readValues(mapper.createParser(input), Map.class); while (i.hasNext()) { addNodes(nodeSet, i.next()); } @@ -167,12 +166,11 @@ private static void addNodes(Set nodeSet, public static Set parseNodesFromNodeFile( String nodeFile, Resource nmDefaultResource) throws IOException { Set nodeSet = new HashSet<>(); - JsonFactory jsonF = new JsonFactory(); - ObjectMapper mapper = new ObjectMapper(); + ObjectMapper mapper = JacksonUtil.createBasicObjectMapper(); Reader input = new InputStreamReader(new FileInputStream(nodeFile), StandardCharsets.UTF_8); try { - Iterator i = mapper.readValues(jsonF.createParser(input), Map.class); + Iterator i = mapper.readValues(mapper.createParser(input), Map.class); while (i.hasNext()) { Map jsonE = i.next(); String rack = "/" + jsonE.get("rack"); diff --git a/hadoop-tools/hadoop-sls/src/test/java/org/apache/hadoop/yarn/sls/TestSynthJobGeneration.java b/hadoop-tools/hadoop-sls/src/test/java/org/apache/hadoop/yarn/sls/TestSynthJobGeneration.java index dd12a10f94612..f690808f8e143 100644 --- a/hadoop-tools/hadoop-sls/src/test/java/org/apache/hadoop/yarn/sls/TestSynthJobGeneration.java +++ b/hadoop-tools/hadoop-sls/src/test/java/org/apache/hadoop/yarn/sls/TestSynthJobGeneration.java @@ -18,6 +18,7 @@ package org.apache.hadoop.yarn.sls; import org.apache.commons.math3.random.JDKRandomGenerator; +import org.apache.hadoop.util.JacksonUtil; import org.apache.hadoop.yarn.api.records.ExecutionType; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.yarn.sls.synthetic.SynthJob; @@ -60,7 +61,7 @@ public void testWorkloadGenerateTime() JsonFactoryBuilder jsonFactoryBuilder = new JsonFactoryBuilder(); jsonFactoryBuilder.configure(JsonFactory.Feature.INTERN_FIELD_NAMES, true); - ObjectMapper mapper = new ObjectMapper(jsonFactoryBuilder.build()); + ObjectMapper mapper = JacksonUtil.createObjectMapper(jsonFactoryBuilder.build()); mapper.configure(FAIL_ON_UNKNOWN_PROPERTIES, false); SynthTraceJobProducer.Workload wl = mapper.readValue(workloadJson, SynthTraceJobProducer.Workload.class); @@ -181,7 +182,7 @@ public void testSample() throws IOException { JsonFactoryBuilder jsonFactoryBuilder = new JsonFactoryBuilder(); jsonFactoryBuilder.configure(JsonFactory.Feature.INTERN_FIELD_NAMES, true); - ObjectMapper mapper = new ObjectMapper(jsonFactoryBuilder.build()); + ObjectMapper mapper = JacksonUtil.createObjectMapper(jsonFactoryBuilder.build()); mapper.configure(FAIL_ON_UNKNOWN_PROPERTIES, false); JDKRandomGenerator rand = new JDKRandomGenerator(); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-catalog/hadoop-yarn-applications-catalog-webapp/src/main/java/org/apache/hadoop/yarn/appcatalog/application/AppCatalogSolrClient.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-catalog/hadoop-yarn-applications-catalog-webapp/src/main/java/org/apache/hadoop/yarn/appcatalog/application/AppCatalogSolrClient.java index ac8dbbac61d35..ea7a0ecdef669 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-catalog/hadoop-yarn-applications-catalog-webapp/src/main/java/org/apache/hadoop/yarn/appcatalog/application/AppCatalogSolrClient.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-catalog/hadoop-yarn-applications-catalog-webapp/src/main/java/org/apache/hadoop/yarn/appcatalog/application/AppCatalogSolrClient.java @@ -28,6 +28,7 @@ import java.util.Properties; import java.util.Random; +import org.apache.hadoop.util.JacksonUtil; import org.apache.hadoop.yarn.appcatalog.model.AppEntry; import org.apache.hadoop.yarn.appcatalog.model.AppStoreEntry; import org.apache.hadoop.yarn.appcatalog.model.Application; @@ -57,6 +58,18 @@ public class AppCatalogSolrClient { private static final Logger LOG = LoggerFactory.getLogger(AppCatalogSolrClient.class); private static String urlString; + /** + * It is more performant to reuse ObjectMapper instances but keeping the instance + * private makes it harder for someone to reconfigure it which might have unwanted + * side effects. + */ + private static final ObjectMapper OBJECT_MAPPER; + + static { + OBJECT_MAPPER = JacksonUtil.createBasicObjectMapper(); + OBJECT_MAPPER.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false); + } + public AppCatalogSolrClient() { // Locate Solr URL ClassLoader classLoader = Thread.currentThread().getContextClassLoader(); @@ -146,8 +159,6 @@ public List search(String keyword) { public List listAppEntries() { List list = new ArrayList(); - ObjectMapper mapper = new ObjectMapper(); - mapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false); SolrClient solr = getSolrClient(); SolrQuery query = new SolrQuery(); @@ -164,7 +175,7 @@ public List listAppEntries() { entry.setId(d.get("id").toString()); entry.setName(d.get("name_s").toString()); entry.setApp(d.get("app_s").toString()); - entry.setYarnfile(mapper.readValue(d.get("yarnfile_s").toString(), + entry.setYarnfile(OBJECT_MAPPER.readValue(d.get("yarnfile_s").toString(), Service.class)); list.add(entry); } @@ -176,8 +187,6 @@ public List listAppEntries() { public AppStoreEntry findAppStoreEntry(String id) { AppStoreEntry entry = new AppStoreEntry(); - ObjectMapper mapper = new ObjectMapper(); - mapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false); SolrClient solr = getSolrClient(); SolrQuery query = new SolrQuery(); @@ -197,7 +206,7 @@ public AppStoreEntry findAppStoreEntry(String id) { entry.setDesc(d.get("desc_s").toString()); entry.setLike(Integer.parseInt(d.get("like_i").toString())); entry.setDownload(Integer.parseInt(d.get("download_i").toString())); - Service yarnApp = mapper.readValue(d.get("yarnfile_s").toString(), + Service yarnApp = OBJECT_MAPPER.readValue(d.get("yarnfile_s").toString(), Service.class); String name; try { @@ -222,9 +231,6 @@ public AppStoreEntry findAppStoreEntry(String id) { public AppEntry findAppEntry(String id) { AppEntry entry = new AppEntry(); - ObjectMapper mapper = new ObjectMapper(); - mapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false); - SolrClient solr = getSolrClient(); SolrQuery query = new SolrQuery(); query.setQuery("id:" + id); @@ -240,7 +246,7 @@ public AppEntry findAppEntry(String id) { entry.setId(d.get("id").toString()); entry.setApp(d.get("app_s").toString()); entry.setName(d.get("name_s").toString()); - entry.setYarnfile(mapper.readValue(d.get("yarnfile_s").toString(), + entry.setYarnfile(OBJECT_MAPPER.readValue(d.get("yarnfile_s").toString(), Service.class)); } } catch (SolrServerException | IOException e) { @@ -252,8 +258,6 @@ public AppEntry findAppEntry(String id) { public void deployApp(String id, Service service) throws SolrServerException, IOException { long download = 0; - ObjectMapper mapper = new ObjectMapper(); - mapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false); Collection docs = new HashSet(); SolrClient solr = getSolrClient(); // Find application information from AppStore @@ -287,7 +291,7 @@ public void deployApp(String id, Service service) throws SolrServerException, request.addField("id", name); request.addField("name_s", name); request.addField("app_s", entry.getOrg()+"/"+entry.getName()); - request.addField("yarnfile_s", mapper.writeValueAsString(service)); + request.addField("yarnfile_s", OBJECT_MAPPER.writeValueAsString(service)); docs.add(request); } @@ -326,8 +330,6 @@ public void deleteApp(String id) { public void register(Application app) throws IOException { Collection docs = new HashSet(); SolrClient solr = getSolrClient(); - ObjectMapper mapper = new ObjectMapper(); - mapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false); try { SolrInputDocument buffer = new SolrInputDocument(); buffer.setField("id", java.util.UUID.randomUUID().toString() @@ -343,10 +345,10 @@ public void register(Application app) throws IOException { buffer.setField("download_i", 0); // Keep only YARN data model for yarnfile field - String yarnFile = mapper.writeValueAsString(app); - LOG.info("app:"+yarnFile); - Service yarnApp = mapper.readValue(yarnFile, Service.class); - buffer.setField("yarnfile_s", mapper.writeValueAsString(yarnApp)); + String yarnFile = OBJECT_MAPPER.writeValueAsString(app); + LOG.info("app:{}", yarnFile); + Service yarnApp = OBJECT_MAPPER.readValue(yarnFile, Service.class); + buffer.setField("yarnfile_s", OBJECT_MAPPER.writeValueAsString(yarnApp)); docs.add(buffer); commitSolrChanges(solr, docs); @@ -359,8 +361,6 @@ public void register(Application app) throws IOException { protected void register(AppStoreEntry app) throws IOException { Collection docs = new HashSet(); SolrClient solr = getSolrClient(); - ObjectMapper mapper = new ObjectMapper(); - mapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false); try { SolrInputDocument buffer = new SolrInputDocument(); buffer.setField("id", java.util.UUID.randomUUID().toString() @@ -376,10 +376,10 @@ protected void register(AppStoreEntry app) throws IOException { buffer.setField("download_i", app.getDownload()); // Keep only YARN data model for yarnfile field - String yarnFile = mapper.writeValueAsString(app); - LOG.info("app:"+yarnFile); - Service yarnApp = mapper.readValue(yarnFile, Service.class); - buffer.setField("yarnfile_s", mapper.writeValueAsString(yarnApp)); + String yarnFile = OBJECT_MAPPER.writeValueAsString(app); + LOG.info("app:{}", yarnFile); + Service yarnApp = OBJECT_MAPPER.readValue(yarnFile, Service.class); + buffer.setField("yarnfile_s", OBJECT_MAPPER.writeValueAsString(yarnApp)); docs.add(buffer); commitSolrChanges(solr, docs); @@ -391,8 +391,6 @@ protected void register(AppStoreEntry app) throws IOException { public void upgradeApp(Service service) throws IOException, SolrServerException { - ObjectMapper mapper = new ObjectMapper(); - mapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false); Collection docs = new HashSet(); SolrClient solr = getSolrClient(); if (service!=null) { @@ -420,7 +418,7 @@ public void upgradeApp(Service service) throws IOException, request.addField("id", name); request.addField("name_s", name); request.addField("app_s", app); - request.addField("yarnfile_s", mapper.writeValueAsString(service)); + request.addField("yarnfile_s", OBJECT_MAPPER.writeValueAsString(service)); docs.add(request); } try { diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-catalog/hadoop-yarn-applications-catalog-webapp/src/main/java/org/apache/hadoop/yarn/appcatalog/application/YarnServiceClient.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-catalog/hadoop-yarn-applications-catalog-webapp/src/main/java/org/apache/hadoop/yarn/appcatalog/application/YarnServiceClient.java index 185b1c8ddebd5..57c4b353d099c 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-catalog/hadoop-yarn-applications-catalog-webapp/src/main/java/org/apache/hadoop/yarn/appcatalog/application/YarnServiceClient.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-catalog/hadoop-yarn-applications-catalog-webapp/src/main/java/org/apache/hadoop/yarn/appcatalog/application/YarnServiceClient.java @@ -23,6 +23,7 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.security.UserGroupInformation; +import org.apache.hadoop.util.JacksonUtil; import org.apache.hadoop.yarn.appcatalog.model.AppEntry; import org.apache.hadoop.yarn.service.api.records.Service; import org.apache.hadoop.yarn.service.api.records.ServiceState; @@ -46,6 +47,19 @@ public class YarnServiceClient { private static final Logger LOG = LoggerFactory.getLogger(YarnServiceClient.class); + + /** + * It is more performant to reuse ObjectMapper instances but keeping the instance + * private makes it harder for someone to reconfigure it which might have unwanted + * side effects. + */ + private static final ObjectMapper OBJECT_MAPPER; + + static { + OBJECT_MAPPER = JacksonUtil.createBasicObjectMapper(); + OBJECT_MAPPER.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false); + } + private static Configuration conf = new Configuration(); private static ClientConfig getClientConfig() { ClientConfig config = new DefaultClientConfig(); @@ -66,8 +80,6 @@ public YarnServiceClient() { } public void createApp(Service app) { - ObjectMapper mapper = new ObjectMapper(); - mapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false); ClientResponse response; try { boolean useKerberos = UserGroupInformation.isSecurityEnabled(); @@ -90,7 +102,7 @@ public void createApp(Service app) { app.setKerberosPrincipal(kerberos); } response = asc.getApiClient().post(ClientResponse.class, - mapper.writeValueAsString(app)); + OBJECT_MAPPER.writeValueAsString(app)); if (response.getStatus() >= 299) { String message = response.getEntity(String.class); throw new RuntimeException("Failed : HTTP error code : " @@ -119,10 +131,8 @@ public void deleteApp(String appInstanceId) { } public void restartApp(Service app) throws JsonProcessingException { - ObjectMapper mapper = new ObjectMapper(); - mapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false); String appInstanceId = app.getName(); - String yarnFile = mapper.writeValueAsString(app); + String yarnFile = OBJECT_MAPPER.writeValueAsString(app); ClientResponse response; try { response = asc.getApiClient(asc.getServicePath(appInstanceId)) @@ -139,10 +149,8 @@ public void restartApp(Service app) throws JsonProcessingException { } public void stopApp(Service app) throws JsonProcessingException { - ObjectMapper mapper = new ObjectMapper(); - mapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false); String appInstanceId = app.getName(); - String yarnFile = mapper.writeValueAsString(app); + String yarnFile = OBJECT_MAPPER.writeValueAsString(app); ClientResponse response; try { response = asc.getApiClient(asc.getServicePath(appInstanceId)) @@ -159,14 +167,12 @@ public void stopApp(Service app) throws JsonProcessingException { } public void getStatus(AppEntry entry) { - ObjectMapper mapper = new ObjectMapper(); - mapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false); String appInstanceId = entry.getName(); Service app = null; try { String yarnFile = asc.getApiClient(asc.getServicePath(appInstanceId)) .get(String.class); - app = mapper.readValue(yarnFile, Service.class); + app = OBJECT_MAPPER.readValue(yarnFile, Service.class); entry.setYarnfile(app); } catch (UniformInterfaceException | IOException e) { LOG.error("Error in fetching application status: ", e); @@ -174,11 +180,9 @@ public void getStatus(AppEntry entry) { } public void upgradeApp(Service app) throws JsonProcessingException { - ObjectMapper mapper = new ObjectMapper(); - mapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false); String appInstanceId = app.getName(); app.setState(ServiceState.EXPRESS_UPGRADING); - String yarnFile = mapper.writeValueAsString(app); + String yarnFile = OBJECT_MAPPER.writeValueAsString(app); ClientResponse response; try { response = asc.getApiClient(asc.getServicePath(appInstanceId)) diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/utils/JsonSerDeser.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/utils/JsonSerDeser.java index 254d6c5d37954..cf3d785a22ea6 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/utils/JsonSerDeser.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/utils/JsonSerDeser.java @@ -30,6 +30,7 @@ import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.io.IOUtils; +import org.apache.hadoop.util.JacksonUtil; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -61,9 +62,10 @@ public class JsonSerDeser { @SuppressWarnings("deprecation") public JsonSerDeser(Class classType) { this.classType = classType; - this.mapper = new ObjectMapper(); + this.mapper = JacksonUtil.createBasicObjectMapper(); mapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false); mapper.configure(SerializationFeature.WRITE_NULL_MAP_VALUES, false); + mapper.configure(SerializationFeature.INDENT_OUTPUT, true); } public JsonSerDeser(Class classType, PropertyNamingStrategy namingStrategy) { @@ -231,7 +233,6 @@ private void writeJsonAsBytes(T instance, * @throws JsonProcessingException parse problems */ public String toJson(T instance) throws JsonProcessingException { - mapper.configure(SerializationFeature.INDENT_OUTPUT, true); return mapper.writeValueAsString(instance); } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/utils/PublishedConfiguration.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/utils/PublishedConfiguration.java index e7ec2d6f5e7c2..ac30480fd8856 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/utils/PublishedConfiguration.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/utils/PublishedConfiguration.java @@ -23,6 +23,7 @@ import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.SerializationFeature; import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.util.JacksonUtil; import org.apache.hadoop.yarn.service.exceptions.BadConfigException; import java.io.IOException; @@ -41,6 +42,18 @@ @JsonInclude(value = JsonInclude.Include.NON_NULL) public class PublishedConfiguration { + /** + * It is more performant to reuse ObjectMapper instances but keeping the instance + * private makes it harder for someone to reconfigure it which might have unwanted + * side effects. + */ + private static final ObjectMapper OBJECT_MAPPER; + + static { + OBJECT_MAPPER = JacksonUtil.createBasicObjectMapper(); + OBJECT_MAPPER.configure(SerializationFeature.INDENT_OUTPUT, true); + } + public String description; public long updated; @@ -154,9 +167,7 @@ public Properties asProperties() { * @throws IOException marshalling failure */ public String asJson() throws IOException { - ObjectMapper mapper = new ObjectMapper(); - mapper.configure(SerializationFeature.INDENT_OUTPUT, true); - String json = mapper.writeValueAsString(entries); + String json = OBJECT_MAPPER.writeValueAsString(entries); return json; } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/client/api/impl/FileSystemTimelineWriter.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/client/api/impl/FileSystemTimelineWriter.java index b92f4e412347c..62d9c48737e64 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/client/api/impl/FileSystemTimelineWriter.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/client/api/impl/FileSystemTimelineWriter.java @@ -38,6 +38,7 @@ import java.util.concurrent.locks.ReentrantReadWriteLock.ReadLock; import java.util.concurrent.locks.ReentrantReadWriteLock.WriteLock; +import org.apache.hadoop.util.JacksonUtil; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.apache.hadoop.classification.InterfaceAudience.Private; @@ -365,8 +366,8 @@ public long getLastModifiedTime() { protected void prepareForWrite() throws IOException{ this.stream = createLogFileStream(fs, logPath); - this.jsonGenerator = new JsonFactory().createGenerator( - (OutputStream)stream); + this.jsonGenerator = JacksonUtil.createBasicJsonFactory() + .createGenerator((OutputStream)stream); this.jsonGenerator.setPrettyPrinter(new MinimalPrettyPrinter("\n")); this.lastModifiedTime = Time.monotonicNow(); } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/DockerClientConfigHandler.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/DockerClientConfigHandler.java index 6351cb69c82e7..99fccb6b253ae 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/DockerClientConfigHandler.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/DockerClientConfigHandler.java @@ -29,7 +29,6 @@ import org.apache.hadoop.security.token.TokenIdentifier; import org.apache.hadoop.yarn.security.DockerCredentialTokenIdentifier; -import com.fasterxml.jackson.core.JsonFactory; import com.fasterxml.jackson.core.JsonParser; import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.ObjectMapper; @@ -97,8 +96,7 @@ public static Credentials readCredentialsFromConfigFile(Path configFile, // Parse the JSON and create the Tokens/Credentials. ObjectMapper mapper = new ObjectMapper(); - JsonFactory factory = mapper.getFactory(); - JsonParser parser = factory.createParser(contents); + JsonParser parser = mapper.createParser(contents); JsonNode rootNode = mapper.readTree(parser); Credentials credentials = new Credentials(); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/Controller.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/Controller.java index ad80a2eefe5bd..e25e98e8b59d7 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/Controller.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/Controller.java @@ -30,6 +30,7 @@ import com.fasterxml.jackson.databind.ObjectMapper; import org.apache.hadoop.classification.InterfaceAudience; +import org.apache.hadoop.util.JacksonUtil; import org.apache.hadoop.yarn.webapp.view.DefaultPage; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -42,7 +43,7 @@ @InterfaceAudience.LimitedPrivate({"YARN", "MapReduce"}) public abstract class Controller implements Params { public static final Logger LOG = LoggerFactory.getLogger(Controller.class); - static final ObjectMapper jsonMapper = new ObjectMapper(); + static final ObjectMapper jsonMapper = JacksonUtil.createBasicObjectMapper(); @RequestScoped public static class RequestContext{ diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/timeline/GenericObjectMapper.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/timeline/GenericObjectMapper.java index fdafcf0cd1c9d..6d782384561bc 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/timeline/GenericObjectMapper.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/timeline/GenericObjectMapper.java @@ -24,6 +24,7 @@ import com.fasterxml.jackson.databind.ObjectWriter; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; +import org.apache.hadoop.util.JacksonUtil; /** * A utility class providing methods for serializing and deserializing @@ -38,14 +39,8 @@ public class GenericObjectMapper { private static final byte[] EMPTY_BYTES = new byte[0]; - public static final ObjectReader OBJECT_READER; - public static final ObjectWriter OBJECT_WRITER; - - static { - ObjectMapper mapper = new ObjectMapper(); - OBJECT_READER = mapper.reader(Object.class); - OBJECT_WRITER = mapper.writer(); - } + public static final ObjectReader OBJECT_READER = JacksonUtil.createReaderFor(Object.class); + public static final ObjectWriter OBJECT_WRITER = JacksonUtil.createBasicWriter(); /** * Serializes an Object into a byte array. Along with {@link #read(byte[])}, diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/AuxServices.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/AuxServices.java index 794ef9d9a4326..cbbc33706db34 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/AuxServices.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/AuxServices.java @@ -43,6 +43,7 @@ import org.apache.hadoop.classification.VisibleForTesting; import org.apache.hadoop.fs.FSDataInputStream; import org.apache.hadoop.security.authorize.AccessControlList; +import org.apache.hadoop.util.JacksonUtil; import org.apache.hadoop.yarn.server.nodemanager.containermanager.records.AuxServiceConfiguration; import org.apache.hadoop.yarn.server.nodemanager.containermanager.records.AuxServiceFile; import org.apache.hadoop.yarn.server.nodemanager.containermanager.records.AuxServiceRecord; @@ -135,7 +136,7 @@ public class AuxServices extends AbstractService this.dirsHandler = nmContext.getLocalDirsHandler(); this.delService = deletionService; this.userUGI = getRemoteUgi(); - this.mapper = new ObjectMapper(); + this.mapper = JacksonUtil.createBasicObjectMapper(); mapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false); // Obtain services from configuration in init() } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/linux/resources/NetworkTagMappingJsonManager.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/linux/resources/NetworkTagMappingJsonManager.java index cc2ded4422b71..a8fa09248e778 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/linux/resources/NetworkTagMappingJsonManager.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/linux/resources/NetworkTagMappingJsonManager.java @@ -28,11 +28,11 @@ import com.fasterxml.jackson.annotation.JsonIgnore; import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.databind.ObjectMapper; import org.apache.hadoop.classification.InterfaceAudience.Private; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.security.UserGroupInformation; +import org.apache.hadoop.util.JacksonUtil; import org.apache.hadoop.yarn.conf.YarnConfiguration; import org.apache.hadoop.yarn.exceptions.YarnRuntimeException; import org.apache.hadoop.yarn.server.nodemanager.containermanager.container.Container; @@ -58,9 +58,8 @@ public void initialize(Configuration conf) { + " we have to set the configuration:" + YarnConfiguration.NM_NETWORK_TAG_MAPPING_FILE_PATH); } - ObjectMapper mapper = new ObjectMapper(); try { - networkTagMapping = mapper.readValue(new File(mappingJsonFile), + networkTagMapping = JacksonUtil.createBasicObjectMapper().readValue(new File(mappingJsonFile), NetworkTagMapping.class); } catch (Exception e) { throw new YarnRuntimeException(e); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/linux/runtime/RuncContainerRuntime.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/linux/runtime/RuncContainerRuntime.java index 2c327c04ebaf1..86bb5113dd26b 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/linux/runtime/RuncContainerRuntime.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/linux/runtime/RuncContainerRuntime.java @@ -27,6 +27,7 @@ import org.apache.hadoop.fs.Path; import org.apache.hadoop.hdfs.protocol.datatransfer.IOStreamPair; import org.apache.hadoop.security.authorize.AccessControlList; +import org.apache.hadoop.util.JacksonUtil; import org.apache.hadoop.util.StringUtils; import org.apache.hadoop.util.concurrent.HadoopExecutors; import org.apache.hadoop.yarn.api.records.ApplicationId; @@ -91,6 +92,7 @@ import static org.apache.hadoop.yarn.conf.YarnConfiguration.NM_RUNC_MANIFEST_TO_RESOURCES_PLUGIN; import static org.apache.hadoop.yarn.conf.YarnConfiguration.NM_REAP_RUNC_LAYER_MOUNTS_INTERVAL; import static org.apache.hadoop.yarn.server.nodemanager.containermanager.linux.runtime.LinuxContainerRuntimeConstants.*; + /** *

This class is an extension of {@link OCIContainerRuntime} that uses the * native {@code container-executor} binary via a @@ -206,7 +208,7 @@ public void initialize(Configuration configuration, Context nmCtx) imageTagToManifestPlugin.init(conf); manifestToResourcesPlugin = chooseManifestToResourcesPlugin(); manifestToResourcesPlugin.init(conf); - mapper = new ObjectMapper(); + mapper = JacksonUtil.createBasicObjectMapper(); defaultRuncImage = conf.get(YarnConfiguration.NM_RUNC_IMAGE_NAME); allowedNetworks.clear(); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/linux/runtime/runc/ImageTagToManifestPlugin.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/linux/runtime/runc/ImageTagToManifestPlugin.java index 457939c9a1740..add0cd10c6c49 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/linux/runtime/runc/ImageTagToManifestPlugin.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/linux/runtime/runc/ImageTagToManifestPlugin.java @@ -26,6 +26,7 @@ import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.service.AbstractService; +import org.apache.hadoop.util.JacksonUtil; import org.apache.hadoop.util.concurrent.HadoopExecutors; import java.io.BufferedReader; @@ -64,8 +65,14 @@ public class ImageTagToManifestPlugin extends AbstractService implements RuncImageTagToManifestPlugin { + /** + * It is more performant to reuse ObjectMapper instances but keeping the instance + * private makes it harder for someone to reconfigure it which might have unwanted + * side effects. + */ + private static final ObjectMapper OBJECT_MAPPER = JacksonUtil.createBasicObjectMapper(); + private Map manifestCache; - private ObjectMapper objMapper; private AtomicReference> localImageToHashCache = new AtomicReference<>(new HashMap<>()); private AtomicReference> hdfsImageToHashCache = @@ -107,7 +114,7 @@ public ImageManifest getManifestFromImageTag(String imageTag) } byte[] bytes = IOUtils.toByteArray(input); - manifest = objMapper.readValue(bytes, ImageManifest.class); + manifest = OBJECT_MAPPER.readValue(bytes, ImageManifest.class); manifestCache.put(hash, manifest); return manifest; @@ -279,7 +286,6 @@ protected void serviceInit(Configuration configuration) throws Exception { DEFAULT_NM_RUNC_IMAGE_TOPLEVEL_DIR) + "/manifests/"; int numManifestsToCache = conf.getInt(NM_RUNC_NUM_MANIFESTS_TO_CACHE, DEFAULT_NUM_MANIFESTS_TO_CACHE); - this.objMapper = new ObjectMapper(); this.manifestCache = Collections.synchronizedMap( new LRUCache(numManifestsToCache, 0.75f)); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/resource/ResourceProfilesManagerImpl.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/resource/ResourceProfilesManagerImpl.java index 24cb34327b745..dde3026be5528 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/resource/ResourceProfilesManagerImpl.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/resource/ResourceProfilesManagerImpl.java @@ -21,6 +21,7 @@ import org.apache.hadoop.classification.VisibleForTesting; import com.fasterxml.jackson.databind.ObjectMapper; +import org.apache.hadoop.util.JacksonUtil; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; @@ -73,6 +74,13 @@ public class ResourceProfilesManagerImpl implements ResourceProfilesManager { + " (by setting " + YarnConfiguration.RM_RESOURCE_PROFILES_ENABLED + " to true)"; + /** + * It is more performant to reuse ObjectMapper instances but keeping the instance + * private makes it harder for someone to reconfigure it which might have unwanted + * side effects. + */ + private static final ObjectMapper OBJECT_MAPPER = JacksonUtil.createBasicObjectMapper(); + public ResourceProfilesManagerImpl() { ReentrantReadWriteLock lock = new ReentrantReadWriteLock(); readLock = lock.readLock(); @@ -105,8 +113,7 @@ private void loadProfiles() throws IOException { resourcesFile = tmp.getPath(); } } - ObjectMapper mapper = new ObjectMapper(); - Map data = mapper.readValue(new File(resourcesFile), Map.class); + Map data = OBJECT_MAPPER.readValue(new File(resourcesFile), Map.class); Iterator iterator = data.entrySet().iterator(); while (iterator.hasNext()) { Map.Entry entry = (Map.Entry) iterator.next(); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/capacity/placement/MappingRuleCreator.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/capacity/placement/MappingRuleCreator.java index 174577099e48c..03368950692e5 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/capacity/placement/MappingRuleCreator.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/capacity/placement/MappingRuleCreator.java @@ -26,7 +26,9 @@ import java.util.ArrayList; import java.util.List; +import com.fasterxml.jackson.databind.DeserializationFeature; import org.apache.commons.lang3.StringUtils; +import org.apache.hadoop.util.JacksonUtil; import org.apache.hadoop.yarn.server.resourcemanager.placement.csmappingrule.MappingRule; import org.apache.hadoop.yarn.server.resourcemanager.placement.csmappingrule.MappingRuleAction; import org.apache.hadoop.yarn.server.resourcemanager.placement.csmappingrule.MappingRuleActions; @@ -50,6 +52,13 @@ public class MappingRuleCreator { private static final String ALL_USER = "*"; private static Logger LOG = LoggerFactory.getLogger(MappingRuleCreator.class); + /** + * It is more performant to reuse ObjectMapper instances but keeping the instance + * private makes it harder for someone to reconfigure it which might have unwanted + * side effects. + */ + private static final ObjectMapper OBJECT_MAPPER = JacksonUtil.createBasicObjectMapper(); + public MappingRulesDescription getMappingRulesFromJsonFile(String filePath) throws IOException { byte[] fileContents = Files.readAllBytes(Paths.get(filePath)); @@ -58,14 +67,12 @@ public MappingRulesDescription getMappingRulesFromJsonFile(String filePath) MappingRulesDescription getMappingRulesFromJson(byte[] contents) throws IOException { - ObjectMapper objectMapper = new ObjectMapper(); - return objectMapper.readValue(contents, MappingRulesDescription.class); + return OBJECT_MAPPER.readValue(contents, MappingRulesDescription.class); } MappingRulesDescription getMappingRulesFromJson(String contents) throws IOException { - ObjectMapper objectMapper = new ObjectMapper(); - return objectMapper.readValue(contents, MappingRulesDescription.class); + return OBJECT_MAPPER.readValue(contents, MappingRulesDescription.class); } public List getMappingRulesFromFile(String jsonPath) diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/capacity/placement/converter/LegacyMappingRuleToJson.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/capacity/placement/converter/LegacyMappingRuleToJson.java index 108d52bc40c36..6c963775be770 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/capacity/placement/converter/LegacyMappingRuleToJson.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/capacity/placement/converter/LegacyMappingRuleToJson.java @@ -21,6 +21,7 @@ import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.node.ArrayNode; import com.fasterxml.jackson.databind.node.ObjectNode; +import org.apache.hadoop.util.JacksonUtil; import org.apache.hadoop.util.StringUtils; import org.apache.hadoop.yarn.server.resourcemanager.scheduler.capacity.QueuePath; @@ -52,9 +53,11 @@ public class LegacyMappingRuleToJson { public static final String JSON_NODE_MATCHES = "matches"; /** - * Our internal object mapper, used to create JSON nodes. + * It is more performant to reuse ObjectMapper instances but keeping the instance + * private makes it harder for someone to reconfigure it which might have unwanted + * side effects. */ - private ObjectMapper objectMapper = new ObjectMapper(); + private static final ObjectMapper OBJECT_MAPPER = JacksonUtil.createBasicObjectMapper(); /** * Collection to store the legacy group mapping rule strings. @@ -138,8 +141,8 @@ public LegacyMappingRuleToJson setAppNameMappingRules( */ public String convert() { //creating the basic JSON config structure - ObjectNode rootNode = objectMapper.createObjectNode(); - ArrayNode rulesNode = objectMapper.createArrayNode(); + ObjectNode rootNode = OBJECT_MAPPER.createObjectNode(); + ArrayNode rulesNode = OBJECT_MAPPER.createArrayNode(); rootNode.set("rules", rulesNode); //Processing and adding all the user group mapping rules @@ -158,7 +161,7 @@ public String convert() { } try { - return objectMapper + return OBJECT_MAPPER .writerWithDefaultPrettyPrinter() .writeValueAsString(rootNode); } catch (JsonProcessingException e) { @@ -246,7 +249,7 @@ private String[] splitRule(String rule, int expectedParts) { * @return The object node with the preset fields */ private ObjectNode createDefaultRuleNode(String type) { - return objectMapper + return OBJECT_MAPPER .createObjectNode() .put("type", type) //All legacy rule fallback to place to default diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fair/converter/FSConfigToCSConfigConverter.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fair/converter/FSConfigToCSConfigConverter.java index d801652377983..a4b9821344829 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fair/converter/FSConfigToCSConfigConverter.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fair/converter/FSConfigToCSConfigConverter.java @@ -29,9 +29,11 @@ import java.util.List; import java.util.Map; +import com.fasterxml.jackson.databind.SerializationFeature; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; import org.apache.hadoop.security.authorize.AccessControlList; +import org.apache.hadoop.util.JacksonUtil; import org.apache.hadoop.yarn.api.records.QueueACL; import org.apache.hadoop.yarn.api.records.Resource; import org.apache.hadoop.yarn.conf.YarnConfiguration; @@ -327,14 +329,14 @@ private void performRuleConversion(FairScheduler fs) placementConverter.convertPlacementPolicy(placementManager, ruleHandler, capacitySchedulerConfig, usePercentages); - ObjectMapper mapper = new ObjectMapper(); + final ObjectMapper mapper = JacksonUtil.createBasicObjectMapper(); // close output stream if we write to a file, leave it open otherwise if (!consoleMode && rulesToFile) { mapper.configure(JsonGenerator.Feature.AUTO_CLOSE_TARGET, true); } else { mapper.configure(JsonGenerator.Feature.AUTO_CLOSE_TARGET, false); } - ObjectWriter writer = mapper.writer(new DefaultPrettyPrinter()); + ObjectWriter writer = mapper.writerWithDefaultPrettyPrinter(); if (consoleMode && rulesToFile) { System.out.println("======= " + MAPPING_RULES_JSON + " ======="); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timeline-pluginstorage/src/main/java/org/apache/hadoop/yarn/server/timeline/EntityGroupFSTimelineStore.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timeline-pluginstorage/src/main/java/org/apache/hadoop/yarn/server/timeline/EntityGroupFSTimelineStore.java index 1f4a9f42a9f8c..7e49bd19aef73 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timeline-pluginstorage/src/main/java/org/apache/hadoop/yarn/server/timeline/EntityGroupFSTimelineStore.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timeline-pluginstorage/src/main/java/org/apache/hadoop/yarn/server/timeline/EntityGroupFSTimelineStore.java @@ -42,6 +42,7 @@ import org.apache.hadoop.service.ServiceOperations; import org.apache.hadoop.ipc.CallerContext; import org.apache.hadoop.util.ApplicationClassLoader; +import org.apache.hadoop.util.JacksonUtil; import org.apache.hadoop.util.ReflectionUtils; import org.apache.hadoop.util.Time; import org.apache.hadoop.yarn.api.records.ApplicationAttemptId; @@ -320,7 +321,7 @@ protected void serviceStart() throws Exception { } } - objMapper = new ObjectMapper(); + objMapper = JacksonUtil.createBasicObjectMapper(); objMapper.setAnnotationIntrospector( new JaxbAnnotationIntrospector(TypeFactory.defaultInstance())); jsonFactory = new MappingJsonFactory(objMapper); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timeline-pluginstorage/src/main/java/org/apache/hadoop/yarn/server/timeline/LevelDBCacheTimelineStore.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timeline-pluginstorage/src/main/java/org/apache/hadoop/yarn/server/timeline/LevelDBCacheTimelineStore.java index f84eeebbf0c8e..6334fe05e1add 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timeline-pluginstorage/src/main/java/org/apache/hadoop/yarn/server/timeline/LevelDBCacheTimelineStore.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timeline-pluginstorage/src/main/java/org/apache/hadoop/yarn/server/timeline/LevelDBCacheTimelineStore.java @@ -25,6 +25,7 @@ import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.io.IOUtils; +import org.apache.hadoop.util.JacksonUtil; import org.apache.hadoop.yarn.api.records.timeline.TimelineEntity; import org.apache.hadoop.yarn.conf.YarnConfiguration; import org.apache.hadoop.yarn.server.timeline.util.LeveldbUtils; @@ -298,7 +299,7 @@ public void close() throws IOException { } }; } - static final ObjectMapper OBJECT_MAPPER = new ObjectMapper(); + static final ObjectMapper OBJECT_MAPPER = JacksonUtil.createBasicObjectMapper(); @SuppressWarnings("unchecked") private V getEntityForKey(byte[] key) throws IOException { diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timeline-pluginstorage/src/test/java/org/apache/hadoop/yarn/server/timeline/PluginStoreTestUtils.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timeline-pluginstorage/src/test/java/org/apache/hadoop/yarn/server/timeline/PluginStoreTestUtils.java index cb887fe264fab..d3885c5bc8fb4 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timeline-pluginstorage/src/test/java/org/apache/hadoop/yarn/server/timeline/PluginStoreTestUtils.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timeline-pluginstorage/src/test/java/org/apache/hadoop/yarn/server/timeline/PluginStoreTestUtils.java @@ -18,7 +18,6 @@ package org.apache.hadoop.yarn.server.timeline; import com.fasterxml.jackson.annotation.JsonInclude; -import com.fasterxml.jackson.core.JsonFactory; import com.fasterxml.jackson.core.JsonGenerator; import com.fasterxml.jackson.core.util.MinimalPrettyPrinter; import com.fasterxml.jackson.databind.ObjectMapper; @@ -31,6 +30,7 @@ import org.apache.hadoop.fs.Path; import org.apache.hadoop.hdfs.MiniDFSCluster; import org.apache.hadoop.security.UserGroupInformation; +import org.apache.hadoop.util.JacksonUtil; import org.apache.hadoop.yarn.api.records.timeline.TimelineEntities; import org.apache.hadoop.yarn.api.records.timeline.TimelineEntity; import org.apache.hadoop.yarn.api.records.timeline.TimelineEvent; @@ -108,7 +108,7 @@ static FSDataOutputStream createLogFile(Path logPath, FileSystem fs) } static ObjectMapper createObjectMapper() { - ObjectMapper mapper = new ObjectMapper(); + ObjectMapper mapper = JacksonUtil.createBasicObjectMapper(); mapper.setAnnotationIntrospector( new JaxbAnnotationIntrospector(TypeFactory.defaultInstance())); mapper.setSerializationInclusion(JsonInclude.Include.NON_NULL); @@ -230,10 +230,9 @@ static TimelineEvent createEvent(long timestamp, String type, Map Date: Sun, 21 Jul 2024 19:13:05 +0100 Subject: [PATCH 2/6] more changes fix checkstyle --- .../apache/hadoop/ipc/DecayRpcScheduler.java | 3 +- .../hadoop/metrics2/MetricsJsonBuilder.java | 2 +- .../org/apache/hadoop/util/JacksonUtil.java | 43 +++---------------- .../apache/hadoop/util/JsonSerialization.java | 8 ++-- .../server/datanode/DiskBalancerWorkItem.java | 3 +- .../datanode/DiskBalancerWorkStatus.java | 7 ++- .../hdfs/util/CombinedHostsFileReader.java | 3 +- .../blockmanagement/SlowDiskTracker.java | 2 +- .../blockmanagement/SlowPeerTracker.java | 3 +- .../datanode/fsdataset/impl/FsVolumeImpl.java | 6 +-- .../fsdataset/impl/ProvidedVolumeImpl.java | 1 - .../server/diskbalancer/command/Command.java | 1 - .../connectors/JsonNodeConnector.java | 2 +- .../datamodel/DiskBalancerCluster.java | 2 +- .../datamodel/DiskBalancerVolume.java | 2 +- .../server/diskbalancer/planner/NodePlan.java | 2 +- .../fs/azure/NativeAzureFileSystem.java | 1 - .../fs/azure/RemoteSASKeyGeneratorImpl.java | 3 +- .../fs/azure/RemoteWasbAuthorizerImpl.java | 8 ++-- .../hadoop/tools/rumen/state/StatePool.java | 1 - .../org/apache/hadoop/yarn/sls/AMRunner.java | 4 -- .../sls/synthetic/SynthTraceJobProducer.java | 2 +- .../component/instance/ComponentInstance.java | 3 +- .../api/impl/FileSystemTimelineWriter.java | 3 +- .../client/api/impl/TimelineClientImpl.java | 3 +- .../yarn/util/DockerClientConfigHandler.java | 5 ++- .../yarn/util/timeline/TimelineUtils.java | 4 +- .../apache/hadoop/yarn/webapp/Controller.java | 4 +- .../server/timeline/GenericObjectMapper.java | 5 ++- .../placement/MappingRuleCreator.java | 1 - .../FSConfigToCSConfigConverter.java | 2 - .../storage/FileSystemTimelineReaderImpl.java | 6 +-- 32 files changed, 50 insertions(+), 95 deletions(-) diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/DecayRpcScheduler.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/DecayRpcScheduler.java index 756f31e842f45..0f76ad68d0694 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/DecayRpcScheduler.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/DecayRpcScheduler.java @@ -38,7 +38,6 @@ import javax.management.ObjectName; -import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.ObjectWriter; import org.apache.hadoop.security.UserGroupInformation; @@ -147,7 +146,7 @@ public class DecayRpcScheduler implements RpcScheduler, public static final Logger LOG = LoggerFactory.getLogger(DecayRpcScheduler.class); - private static final ObjectWriter WRITER = JacksonUtil.createBasicWriter(); + private static final ObjectWriter WRITER = JacksonUtil.createBasicObjectMapper().writer(); // Track the decayed and raw (no decay) number of calls for each schedulable // identity from all previous decay windows: idx 0 for decayed call cost and diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/MetricsJsonBuilder.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/MetricsJsonBuilder.java index 2a38a9906f2bd..3bcd23cc81ce8 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/MetricsJsonBuilder.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/MetricsJsonBuilder.java @@ -47,7 +47,7 @@ public class MetricsJsonBuilder extends MetricsRecordBuilder { private Map innerMetrics = new LinkedHashMap<>(); private static final ObjectWriter WRITER = - JacksonUtil.createBasicWriter(); + JacksonUtil.createBasicObjectMapper().writer(); /** * Build an instance. diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/JacksonUtil.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/JacksonUtil.java index f417c35cd35c6..13ee7290d75f4 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/JacksonUtil.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/JacksonUtil.java @@ -18,10 +18,7 @@ package org.apache.hadoop.util; import com.fasterxml.jackson.core.JsonFactory; -import com.fasterxml.jackson.databind.JavaType; import com.fasterxml.jackson.databind.ObjectMapper; -import com.fasterxml.jackson.databind.ObjectReader; -import com.fasterxml.jackson.databind.ObjectWriter; import com.fasterxml.jackson.databind.json.JsonMapper; /** @@ -31,16 +28,14 @@ */ public class JacksonUtil { - private static final JsonFactory DEFAULT_JSON_FACTORY = createBasicJsonFactory(); - private static final ObjectMapper DEFAULT_OBJECT_MAPPER = createBasicObjectMapper(); - /** * Creates a new {@link JsonFactory} instance with basic configuration. * * @return an {@link JsonFactory} with basic configuration */ public static JsonFactory createBasicJsonFactory() { - // do not expose DEFAULT_JSON_FACTORY because we don't want anyone to access it and modify it + // deliberately return a new instance instead of sharing one because we can't trust + // that users won't modify this instance return new JsonFactory(); } @@ -50,8 +45,9 @@ public static JsonFactory createBasicJsonFactory() { * @return an {@link ObjectMapper} with basic configuration */ public static ObjectMapper createBasicObjectMapper() { - // do not expose DEFAULT_OBJECT_MAPPER because we don't want anyone to access it and modify it - return JsonMapper.builder(DEFAULT_JSON_FACTORY).build(); + // deliberately return a new instance instead of sharing one because we can't trust + // that users won't modify this instance + return JsonMapper.builder(createBasicJsonFactory()).build(); } /** @@ -65,32 +61,5 @@ public static ObjectMapper createObjectMapper(final JsonFactory jsonFactory) { return JsonMapper.builder(jsonFactory).build(); } - /** - * Creates a new {@link ObjectReader} for the provided type. - * - * @param type a class instance - * @return an {@link ObjectReader} with basic configuration - */ - public static ObjectReader createReaderFor(final Class type) { - return DEFAULT_OBJECT_MAPPER.readerFor(type); - } - - /** - * Creates a new {@link ObjectReader} for the provided type. - * - * @param type a {@link JavaType} instance - * @return an {@link ObjectReader} with basic configuration - */ - public static ObjectReader createReaderFor(final JavaType type) { - return DEFAULT_OBJECT_MAPPER.readerFor(type); - } - - /** - * Creates a new {@link ObjectWriter} with basic configuration. - * - * @return an {@link ObjectWriter} with basic configuration - */ - public static ObjectWriter createBasicWriter() { - return DEFAULT_OBJECT_MAPPER.writer(); - } + private JacksonUtil() {} } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/JsonSerialization.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/JsonSerialization.java index 0d180860a1291..d6d645eaefe2c 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/JsonSerialization.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/JsonSerialization.java @@ -76,11 +76,9 @@ public class JsonSerialization { private final Class classType; private final ObjectMapper mapper; - private static final ObjectWriter WRITER = JacksonUtil - .createBasicObjectMapper() - .writerWithDefaultPrettyPrinter(); - - private static final ObjectReader MAP_READER = JacksonUtil.createReaderFor(Map.class); + private static final ObjectMapper OBJECT_MAPPER = JacksonUtil.createBasicObjectMapper(); + private static final ObjectWriter WRITER = OBJECT_MAPPER.writerWithDefaultPrettyPrinter(); + private static final ObjectReader MAP_READER = OBJECT_MAPPER.readerFor(Map.class); /** * @return an ObjectWriter which pretty-prints its output diff --git a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/server/datanode/DiskBalancerWorkItem.java b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/server/datanode/DiskBalancerWorkItem.java index 8d4a106b18307..ac7476977bd47 100644 --- a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/server/datanode/DiskBalancerWorkItem.java +++ b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/server/datanode/DiskBalancerWorkItem.java @@ -37,8 +37,7 @@ @JsonInclude(JsonInclude.Include.NON_DEFAULT) public class DiskBalancerWorkItem { private static final ObjectMapper MAPPER = JacksonUtil.createBasicObjectMapper(); - private static final ObjectReader READER = - JacksonUtil.createReaderFor(DiskBalancerWorkItem.class); + private static final ObjectReader READER = MAPPER.readerFor(DiskBalancerWorkItem.class); private long startTime; private long secondsElapsed; diff --git a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/server/datanode/DiskBalancerWorkStatus.java b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/server/datanode/DiskBalancerWorkStatus.java index 07dc3b8def0b3..7ea6e9d885e9e 100644 --- a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/server/datanode/DiskBalancerWorkStatus.java +++ b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/server/datanode/DiskBalancerWorkStatus.java @@ -44,10 +44,9 @@ public class DiskBalancerWorkStatus { private static final ObjectMapper MAPPER_WITH_INDENT_OUTPUT = JacksonUtil.createBasicObjectMapper().enable(SerializationFeature.INDENT_OUTPUT); private static final ObjectReader READER_WORKSTATUS = - JacksonUtil.createReaderFor(DiskBalancerWorkStatus.class); - private static final ObjectReader READER_WORKENTRY = JacksonUtil.createReaderFor( - defaultInstance().constructCollectionType(List.class, - DiskBalancerWorkEntry.class)); + MAPPER.readerFor(DiskBalancerWorkStatus.class); + private static final ObjectReader READER_WORKENTRY = MAPPER.readerFor( + defaultInstance().constructCollectionType(List.class, DiskBalancerWorkEntry.class)); private final List currentState; private Result result; diff --git a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/util/CombinedHostsFileReader.java b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/util/CombinedHostsFileReader.java index 156aaffee0b86..c3ac22b7db38f 100644 --- a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/util/CombinedHostsFileReader.java +++ b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/util/CombinedHostsFileReader.java @@ -18,7 +18,6 @@ package org.apache.hadoop.hdfs.util; -import com.fasterxml.jackson.core.JsonFactory; import com.fasterxml.jackson.databind.JsonMappingException; import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.ObjectReader; @@ -111,7 +110,7 @@ private CombinedHostsFileReader() { if (tryOldFormat) { ObjectReader objectReader = - JacksonUtil.createReaderFor(DatanodeAdminProperties.class); + JacksonUtil.createBasicObjectMapper().readerFor(DatanodeAdminProperties.class); List all = new ArrayList<>(); try (Reader input = new InputStreamReader(Files.newInputStream(Paths.get(hostsFilePath)), diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/SlowDiskTracker.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/SlowDiskTracker.java index a4629d484f009..d3621a6097dee 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/SlowDiskTracker.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/SlowDiskTracker.java @@ -71,7 +71,7 @@ public class SlowDiskTracker { /** * ObjectWriter to convert JSON reports to String. */ - private static final ObjectWriter WRITER = JacksonUtil.createBasicWriter(); + private static final ObjectWriter WRITER = JacksonUtil.createBasicObjectMapper().writer(); /** * Number of disks to include in JSON report per operation. We will return diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/SlowPeerTracker.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/SlowPeerTracker.java index 6ecc9ff326858..719c72afc7206 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/SlowPeerTracker.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/SlowPeerTracker.java @@ -19,7 +19,6 @@ package org.apache.hadoop.hdfs.server.blockmanagement; import com.fasterxml.jackson.core.JsonProcessingException; -import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.ObjectWriter; import org.apache.hadoop.classification.VisibleForTesting; import org.apache.hadoop.thirdparty.com.google.common.collect.ImmutableMap; @@ -76,7 +75,7 @@ public class SlowPeerTracker { /** * ObjectWriter to convert JSON reports to String. */ - private static final ObjectWriter WRITER = JacksonUtil.createBasicWriter(); + private static final ObjectWriter WRITER = JacksonUtil.createBasicObjectMapper().writer(); /** * Number of nodes to include in JSON report. We will return nodes with diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/FsVolumeImpl.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/FsVolumeImpl.java index 29522bd2bb78d..dc8dcb2d42737 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/FsVolumeImpl.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/FsVolumeImpl.java @@ -104,9 +104,9 @@ public class FsVolumeImpl implements FsVolumeSpi { public static final Logger LOG = LoggerFactory.getLogger(FsVolumeImpl.class); - private static final ObjectWriter WRITER = - JacksonUtil.createBasicObjectMapper().writerWithDefaultPrettyPrinter(); - private static final ObjectReader READER = JacksonUtil.createReaderFor(BlockIteratorState.class); + private static final ObjectMapper MAPPER = JacksonUtil.createBasicObjectMapper(); + private static final ObjectWriter WRITER = MAPPER.writerWithDefaultPrettyPrinter(); + private static final ObjectReader READER = MAPPER.readerFor(BlockIteratorState.class); private final FsDatasetImpl dataset; private final String storageID; diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/ProvidedVolumeImpl.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/ProvidedVolumeImpl.java index 1e25c32c40f06..e1d54053cc2ee 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/ProvidedVolumeImpl.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/ProvidedVolumeImpl.java @@ -32,7 +32,6 @@ import java.util.concurrent.atomic.AtomicLong; import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.ObjectWriter; import org.apache.hadoop.classification.InterfaceAudience; diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/diskbalancer/command/Command.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/diskbalancer/command/Command.java index 85e6a3b56a52d..4c75493e9ee69 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/diskbalancer/command/Command.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/diskbalancer/command/Command.java @@ -18,7 +18,6 @@ package org.apache.hadoop.hdfs.server.diskbalancer.command; -import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.ObjectReader; import org.apache.commons.cli.CommandLine; diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/diskbalancer/connectors/JsonNodeConnector.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/diskbalancer/connectors/JsonNodeConnector.java index 494a9f3c5ad10..f60a474c2c9b1 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/diskbalancer/connectors/JsonNodeConnector.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/diskbalancer/connectors/JsonNodeConnector.java @@ -37,7 +37,7 @@ public class JsonNodeConnector implements ClusterConnector { private static final Logger LOG = LoggerFactory.getLogger(JsonNodeConnector.class); private static final ObjectReader READER = - JacksonUtil.createReaderFor(DiskBalancerCluster.class); + JacksonUtil.createBasicObjectMapper().readerFor(DiskBalancerCluster.class); private final URL clusterURI; /** diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/diskbalancer/datamodel/DiskBalancerCluster.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/diskbalancer/datamodel/DiskBalancerCluster.java index e2a1203f6a357..3eb80fb521be5 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/diskbalancer/datamodel/DiskBalancerCluster.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/diskbalancer/datamodel/DiskBalancerCluster.java @@ -73,7 +73,7 @@ public class DiskBalancerCluster { private static final Logger LOG = LoggerFactory.getLogger(DiskBalancerCluster.class); private static final ObjectReader READER = - JacksonUtil.createReaderFor(DiskBalancerCluster.class); + JacksonUtil.createBasicObjectMapper().readerFor(DiskBalancerCluster.class); private final Set exclusionList; private final Set inclusionList; private ClusterConnector clusterConnector; diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/diskbalancer/datamodel/DiskBalancerVolume.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/diskbalancer/datamodel/DiskBalancerVolume.java index 0dd03a7ab49fd..dcfd149bb3ab3 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/diskbalancer/datamodel/DiskBalancerVolume.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/diskbalancer/datamodel/DiskBalancerVolume.java @@ -34,7 +34,7 @@ @JsonIgnoreProperties(ignoreUnknown = true) public class DiskBalancerVolume { private static final ObjectReader READER = - JacksonUtil.createReaderFor(DiskBalancerVolume.class); + JacksonUtil.createBasicObjectMapper().readerFor(DiskBalancerVolume.class); private static final Logger LOG = LoggerFactory.getLogger(DiskBalancerVolume.class); diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/diskbalancer/planner/NodePlan.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/diskbalancer/planner/NodePlan.java index f03505bb4c001..59480b190757e 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/diskbalancer/planner/NodePlan.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/diskbalancer/planner/NodePlan.java @@ -41,7 +41,7 @@ public class NodePlan { private long timeStamp; private static final ObjectMapper MAPPER = JacksonUtil.createBasicObjectMapper(); - private static final ObjectReader READER = JacksonUtil.createReaderFor(NodePlan.class); + private static final ObjectReader READER = MAPPER.readerFor(NodePlan.class); private static final ObjectWriter WRITER = MAPPER.writerFor( MAPPER.constructType(NodePlan.class)); /** diff --git a/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azure/NativeAzureFileSystem.java b/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azure/NativeAzureFileSystem.java index e37efd296ef06..2b59452a32d86 100644 --- a/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azure/NativeAzureFileSystem.java +++ b/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azure/NativeAzureFileSystem.java @@ -97,7 +97,6 @@ import static org.apache.hadoop.fs.azure.NativeAzureFileSystemHelper.*; import static org.apache.hadoop.fs.impl.PathCapabilitiesSupport.validatePathCapabilityArgs; -import com.fasterxml.jackson.databind.ObjectMapper; import org.apache.hadoop.classification.VisibleForTesting; import com.microsoft.azure.storage.StorageException; diff --git a/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azure/RemoteSASKeyGeneratorImpl.java b/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azure/RemoteSASKeyGeneratorImpl.java index c77971e540f6a..0d5092a82e085 100644 --- a/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azure/RemoteSASKeyGeneratorImpl.java +++ b/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azure/RemoteSASKeyGeneratorImpl.java @@ -54,7 +54,8 @@ public class RemoteSASKeyGeneratorImpl extends SASKeyGeneratorImpl { public static final Logger LOG = LoggerFactory.getLogger(AzureNativeFileSystemStore.class); private static final ObjectReader RESPONSE_READER = JacksonUtil - .createReaderFor(RemoteSASKeyGenerationResponse.class); + .createBasicObjectMapper() + .readerFor(RemoteSASKeyGenerationResponse.class); /** * Configuration parameter name expected in the Configuration diff --git a/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azure/RemoteWasbAuthorizerImpl.java b/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azure/RemoteWasbAuthorizerImpl.java index 9abc498966f74..f181d8f645035 100644 --- a/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azure/RemoteWasbAuthorizerImpl.java +++ b/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azure/RemoteWasbAuthorizerImpl.java @@ -20,7 +20,6 @@ import com.fasterxml.jackson.core.JsonParseException; import com.fasterxml.jackson.databind.JsonMappingException; -import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.ObjectReader; import org.apache.hadoop.classification.VisibleForTesting; import org.apache.commons.lang3.StringUtils; @@ -34,9 +33,9 @@ import org.apache.http.client.utils.URIBuilder; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import java.util.concurrent.TimeUnit; import java.io.IOException; +import java.util.concurrent.TimeUnit; import static org.apache.hadoop.fs.azure.WasbRemoteCallHelper.REMOTE_CALL_SUCCESS_CODE; @@ -51,7 +50,8 @@ public class RemoteWasbAuthorizerImpl implements WasbAuthorizerInterface { public static final Logger LOG = LoggerFactory .getLogger(RemoteWasbAuthorizerImpl.class); private static final ObjectReader RESPONSE_READER = JacksonUtil - .createReaderFor(RemoteWasbAuthorizerResponse.class); + .createBasicObjectMapper() + .readerFor(RemoteWasbAuthorizerResponse.class); /** * Configuration parameter name expected in the Configuration object to @@ -177,7 +177,7 @@ private boolean authorizeInternal(String wasbAbsolutePath, String accessType, St uriBuilder .addParameter(WASB_ABSOLUTE_PATH_QUERY_PARAM_NAME, wasbAbsolutePath); uriBuilder.addParameter(ACCESS_OPERATION_QUERY_PARAM_NAME, accessType); - if (resourceOwner != null && StringUtils.isNotEmpty(resourceOwner)) { + if (StringUtils.isNotEmpty(resourceOwner)) { uriBuilder.addParameter(WASB_RESOURCE_OWNER_QUERY_PARAM_NAME, resourceOwner); } diff --git a/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/state/StatePool.java b/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/state/StatePool.java index 3e2ecb2c4ba92..0c594afc3b72c 100644 --- a/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/state/StatePool.java +++ b/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/state/StatePool.java @@ -30,7 +30,6 @@ import com.fasterxml.jackson.annotation.JsonIgnore; import com.fasterxml.jackson.core.JsonEncoding; -import com.fasterxml.jackson.core.JsonFactory; import com.fasterxml.jackson.core.JsonGenerator; import com.fasterxml.jackson.core.JsonParser; import com.fasterxml.jackson.core.Version; diff --git a/hadoop-tools/hadoop-sls/src/main/java/org/apache/hadoop/yarn/sls/AMRunner.java b/hadoop-tools/hadoop-sls/src/main/java/org/apache/hadoop/yarn/sls/AMRunner.java index 3fe7be846a68b..0d943471c6f9c 100644 --- a/hadoop-tools/hadoop-sls/src/main/java/org/apache/hadoop/yarn/sls/AMRunner.java +++ b/hadoop-tools/hadoop-sls/src/main/java/org/apache/hadoop/yarn/sls/AMRunner.java @@ -16,7 +16,6 @@ package org.apache.hadoop.yarn.sls; -import com.fasterxml.jackson.core.JsonFactory; import com.fasterxml.jackson.databind.JavaType; import com.fasterxml.jackson.databind.ObjectMapper; import org.apache.hadoop.conf.Configuration; @@ -45,11 +44,8 @@ import java.io.InputStreamReader; import java.io.Reader; import java.nio.charset.StandardCharsets; -import java.util.ArrayList; -import java.util.Collections; import java.util.HashMap; import java.util.Iterator; -import java.util.List; import java.util.Map; import java.util.Set; import java.util.concurrent.ConcurrentHashMap; diff --git a/hadoop-tools/hadoop-sls/src/main/java/org/apache/hadoop/yarn/sls/synthetic/SynthTraceJobProducer.java b/hadoop-tools/hadoop-sls/src/main/java/org/apache/hadoop/yarn/sls/synthetic/SynthTraceJobProducer.java index e0767557fcf8d..7c73c83fb17a1 100644 --- a/hadoop-tools/hadoop-sls/src/main/java/org/apache/hadoop/yarn/sls/synthetic/SynthTraceJobProducer.java +++ b/hadoop-tools/hadoop-sls/src/main/java/org/apache/hadoop/yarn/sls/synthetic/SynthTraceJobProducer.java @@ -88,7 +88,7 @@ public SynthTraceJobProducer(Configuration conf, Path path) this.rand = new JDKRandomGenerator(); JsonFactoryBuilder jsonFactoryBuilder = new JsonFactoryBuilder(); - jsonFactoryBuilder.configure(JsonFactory.Feature.INTERN_FIELD_NAMES, true) + jsonFactoryBuilder.configure(JsonFactory.Feature.INTERN_FIELD_NAMES, true); ObjectMapper mapper = JacksonUtil.createObjectMapper(jsonFactoryBuilder.build()); mapper.configure(FAIL_ON_UNKNOWN_PROPERTIES, false); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/component/instance/ComponentInstance.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/component/instance/ComponentInstance.java index cab4870493561..1e30fbd5ba1ec 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/component/instance/ComponentInstance.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/component/instance/ComponentInstance.java @@ -26,6 +26,7 @@ import org.apache.hadoop.registry.client.binding.RegistryPathUtils; import org.apache.hadoop.registry.client.types.ServiceRecord; import org.apache.hadoop.registry.client.types.yarn.PersistencePolicies; +import org.apache.hadoop.util.JacksonUtil; import org.apache.hadoop.util.StringUtils; import org.apache.hadoop.yarn.api.records.Container; import org.apache.hadoop.yarn.api.records.ContainerExitStatus; @@ -875,7 +876,7 @@ public void updateContainerStatus(ContainerStatus status) { doRegistryUpdate = false; } } - ObjectMapper mapper = new ObjectMapper(); + final ObjectMapper mapper = JacksonUtil.createBasicObjectMapper(); try { Map>> ports = null; ports = mapper.readValue(status.getExposedPorts(), diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/client/api/impl/FileSystemTimelineWriter.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/client/api/impl/FileSystemTimelineWriter.java index 62d9c48737e64..a7a30c2180f56 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/client/api/impl/FileSystemTimelineWriter.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/client/api/impl/FileSystemTimelineWriter.java @@ -61,7 +61,6 @@ import org.apache.hadoop.yarn.exceptions.YarnException; import com.fasterxml.jackson.annotation.JsonInclude; -import com.fasterxml.jackson.core.JsonFactory; import com.fasterxml.jackson.core.JsonGenerator; import com.fasterxml.jackson.core.util.MinimalPrettyPrinter; import com.fasterxml.jackson.databind.ObjectMapper; @@ -275,7 +274,7 @@ public void flush() throws IOException { } private ObjectMapper createObjectMapper() { - ObjectMapper mapper = new ObjectMapper(); + ObjectMapper mapper = JacksonUtil.createBasicObjectMapper(); mapper.setAnnotationIntrospector( new JaxbAnnotationIntrospector(TypeFactory.defaultInstance())); mapper.setSerializationInclusion(JsonInclude.Include.NON_NULL); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/client/api/impl/TimelineClientImpl.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/client/api/impl/TimelineClientImpl.java index 45da0f444ba0d..0264e40c7be28 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/client/api/impl/TimelineClientImpl.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/client/api/impl/TimelineClientImpl.java @@ -30,6 +30,7 @@ import org.apache.commons.cli.Options; import org.apache.hadoop.security.authentication.server.KerberosAuthenticationHandler; import org.apache.hadoop.security.authentication.server.PseudoAuthenticationHandler; +import org.apache.hadoop.util.JacksonUtil; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.apache.hadoop.classification.InterfaceAudience.Private; @@ -62,7 +63,7 @@ public class TimelineClientImpl extends TimelineClient { private static final Logger LOG = LoggerFactory.getLogger(TimelineClientImpl.class); - private static final ObjectMapper MAPPER = new ObjectMapper(); + private static final ObjectMapper MAPPER = JacksonUtil.createBasicObjectMapper(); private static final String RESOURCE_URI_STR_V1 = "/ws/v1/timeline/"; private static Options opts; diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/DockerClientConfigHandler.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/DockerClientConfigHandler.java index 99fccb6b253ae..83b6a09607512 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/DockerClientConfigHandler.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/DockerClientConfigHandler.java @@ -27,6 +27,7 @@ import org.apache.hadoop.security.Credentials; import org.apache.hadoop.security.token.Token; import org.apache.hadoop.security.token.TokenIdentifier; +import org.apache.hadoop.util.JacksonUtil; import org.apache.hadoop.yarn.security.DockerCredentialTokenIdentifier; import com.fasterxml.jackson.core.JsonParser; @@ -95,7 +96,7 @@ public static Credentials readCredentialsFromConfigFile(Path configFile, } // Parse the JSON and create the Tokens/Credentials. - ObjectMapper mapper = new ObjectMapper(); + ObjectMapper mapper = JacksonUtil.createBasicObjectMapper(); JsonParser parser = mapper.createParser(contents); JsonNode rootNode = mapper.readTree(parser); @@ -159,7 +160,7 @@ public static boolean writeDockerCredentialsToPath(File outConfigFile, Credentials credentials) throws IOException { boolean foundDockerCred = false; if (credentials.numberOfTokens() > 0) { - ObjectMapper mapper = new ObjectMapper(); + ObjectMapper mapper = JacksonUtil.createBasicObjectMapper(); ObjectNode rootNode = mapper.createObjectNode(); ObjectNode registryUrlNode = mapper.createObjectNode(); for (Token tk : credentials.getAllTokens()) { diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/timeline/TimelineUtils.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/timeline/TimelineUtils.java index 14b9b0ceb7d12..9d3504120ea14 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/timeline/TimelineUtils.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/timeline/TimelineUtils.java @@ -31,6 +31,7 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.io.Text; import org.apache.hadoop.security.SecurityUtil; +import org.apache.hadoop.util.JacksonUtil; import org.apache.hadoop.util.VersionInfo; import org.apache.hadoop.yarn.api.records.ApplicationId; import org.apache.hadoop.yarn.api.records.timeline.TimelineAbout; @@ -53,10 +54,9 @@ public class TimelineUtils { "TIMELINE_FLOW_RUN_ID_TAG"; public final static String DEFAULT_FLOW_VERSION = "1"; - private static ObjectMapper mapper; + private static final ObjectMapper mapper = JacksonUtil.createBasicObjectMapper(); static { - mapper = new ObjectMapper(); YarnJacksonJaxbJsonProvider.configObjectMapper(mapper); } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/Controller.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/Controller.java index e25e98e8b59d7..44986b11bf1b5 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/Controller.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/Controller.java @@ -43,7 +43,7 @@ @InterfaceAudience.LimitedPrivate({"YARN", "MapReduce"}) public abstract class Controller implements Params { public static final Logger LOG = LoggerFactory.getLogger(Controller.class); - static final ObjectMapper jsonMapper = JacksonUtil.createBasicObjectMapper(); + static final ObjectMapper OBJECT_MAPPER = JacksonUtil.createBasicObjectMapper(); @RequestScoped public static class RequestContext{ @@ -226,7 +226,7 @@ protected void renderJSON(Object object) { context().rendered = true; context().response.setContentType(MimeType.JSON); try { - jsonMapper.writeValue(writer(), object); + OBJECT_MAPPER.writeValue(writer(), object); } catch (Exception e) { throw new WebAppException(e); } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/timeline/GenericObjectMapper.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/timeline/GenericObjectMapper.java index 6d782384561bc..d5b6ddfcdbf64 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/timeline/GenericObjectMapper.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/timeline/GenericObjectMapper.java @@ -39,8 +39,9 @@ public class GenericObjectMapper { private static final byte[] EMPTY_BYTES = new byte[0]; - public static final ObjectReader OBJECT_READER = JacksonUtil.createReaderFor(Object.class); - public static final ObjectWriter OBJECT_WRITER = JacksonUtil.createBasicWriter(); + private static final ObjectMapper OBJECT_MAPPER = JacksonUtil.createBasicObjectMapper(); + public static final ObjectReader OBJECT_READER = OBJECT_MAPPER.readerFor(Object.class); + public static final ObjectWriter OBJECT_WRITER = OBJECT_MAPPER.writer(); /** * Serializes an Object into a byte array. Along with {@link #read(byte[])}, diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/capacity/placement/MappingRuleCreator.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/capacity/placement/MappingRuleCreator.java index 03368950692e5..d80a901bd8dce 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/capacity/placement/MappingRuleCreator.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/capacity/placement/MappingRuleCreator.java @@ -26,7 +26,6 @@ import java.util.ArrayList; import java.util.List; -import com.fasterxml.jackson.databind.DeserializationFeature; import org.apache.commons.lang3.StringUtils; import org.apache.hadoop.util.JacksonUtil; import org.apache.hadoop.yarn.server.resourcemanager.placement.csmappingrule.MappingRule; diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fair/converter/FSConfigToCSConfigConverter.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fair/converter/FSConfigToCSConfigConverter.java index a4b9821344829..6a16aac686d6a 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fair/converter/FSConfigToCSConfigConverter.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fair/converter/FSConfigToCSConfigConverter.java @@ -29,7 +29,6 @@ import java.util.List; import java.util.Map; -import com.fasterxml.jackson.databind.SerializationFeature; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; import org.apache.hadoop.security.authorize.AccessControlList; @@ -57,7 +56,6 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import com.fasterxml.jackson.core.util.DefaultPrettyPrinter; import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.ObjectWriter; import org.apache.hadoop.classification.VisibleForTesting; diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/FileSystemTimelineReaderImpl.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/FileSystemTimelineReaderImpl.java index 9b74c30b9dc14..3fec37d9eb68f 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/FileSystemTimelineReaderImpl.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/FileSystemTimelineReaderImpl.java @@ -105,10 +105,10 @@ String getRootPath() { return rootPath.toString(); } - private static final ObjectMapper mapper = JacksonUtil.createBasicObjectMapper(); + private static final ObjectMapper OBJECT_MAPPER = JacksonUtil.createBasicObjectMapper(); static { - YarnJacksonJaxbJsonProvider.configObjectMapper(mapper); + YarnJacksonJaxbJsonProvider.configObjectMapper(OBJECT_MAPPER); } /** @@ -127,7 +127,7 @@ String getRootPath() { public static T getTimelineRecordFromJSON( String jsonString, Class clazz) throws JsonGenerationException, JsonMappingException, IOException { - return mapper.readValue(jsonString, clazz); + return OBJECT_MAPPER.readValue(jsonString, clazz); } private static void fillFields(TimelineEntity finalEntity, From 4ed892e6b01cb5170b116e65e2077d916bf8eb67 Mon Sep 17 00:00:00 2001 From: PJ Fanning Date: Wed, 24 Jul 2024 21:45:27 +0100 Subject: [PATCH 3/6] refactor to try to reuse instances --- .../crypto/key/kms/KMSClientProvider.java | 10 +--- .../apache/hadoop/ipc/DecayRpcScheduler.java | 2 +- .../java/org/apache/hadoop/ipc/Server.java | 2 +- .../hadoop/metrics2/MetricsJsonBuilder.java | 3 +- .../org/apache/hadoop/util/JacksonUtil.java | 55 ++++++++++++++++++- .../apache/hadoop/util/JsonSerialization.java | 5 +- .../crypto/key/kms/server/KMSJSONReader.java | 5 +- .../server/datanode/DiskBalancerWorkItem.java | 7 +-- .../hdfs/util/CombinedHostsFileReader.java | 17 ++---- .../hdfs/util/CombinedHostsFileWriter.java | 5 +- .../hadoop/hdfs/web/JsonUtilClient.java | 2 +- .../blockmanagement/SlowDiskTracker.java | 2 +- .../blockmanagement/SlowPeerTracker.java | 2 +- .../datanode/fsdataset/impl/FsVolumeImpl.java | 8 +-- .../fsdataset/impl/ProvidedVolumeImpl.java | 2 +- .../server/diskbalancer/command/Command.java | 3 +- .../connectors/JsonNodeConnector.java | 2 +- .../datamodel/DiskBalancerCluster.java | 2 +- .../datamodel/DiskBalancerVolume.java | 2 +- .../server/diskbalancer/planner/NodePlan.java | 7 +-- .../org/apache/hadoop/hdfs/web/JsonUtil.java | 23 ++++---- .../fs/azure/RemoteSASKeyGeneratorImpl.java | 3 +- .../fs/azure/RemoteWasbAuthorizerImpl.java | 3 +- .../azurebfs/services/AbfsHttpOperation.java | 5 +- .../yarn/util/timeline/TimelineUtils.java | 8 +-- .../apache/hadoop/yarn/webapp/Controller.java | 4 +- .../server/timeline/GenericObjectMapper.java | 6 +- .../NetworkTagMappingJsonManager.java | 2 +- .../runc/ImageTagToManifestPlugin.java | 12 +--- .../resource/ResourceProfilesManagerImpl.java | 12 +--- .../placement/MappingRuleCreator.java | 12 +--- .../timeline/LevelDBCacheTimelineStore.java | 4 +- 32 files changed, 113 insertions(+), 124 deletions(-) diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/kms/KMSClientProvider.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/kms/KMSClientProvider.java index b4b66cbf6da73..b5a6d882334d9 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/kms/KMSClientProvider.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/kms/KMSClientProvider.java @@ -79,7 +79,6 @@ import org.apache.hadoop.crypto.key.KeyProviderCryptoExtension; import org.apache.hadoop.crypto.key.KeyProviderCryptoExtension.CryptoExtension; -import com.fasterxml.jackson.databind.ObjectMapper; import org.apache.hadoop.classification.VisibleForTesting; import org.apache.hadoop.util.Preconditions; import org.apache.hadoop.thirdparty.com.google.common.base.Strings; @@ -130,13 +129,6 @@ public class KMSClientProvider extends KeyProvider implements CryptoExtension, + "authentication.retry-count"; public static final int DEFAULT_AUTH_RETRY = 1; - /** - * It is more performant to reuse ObjectMapper instances but keeping the instance - * private makes it harder for someone to reconfigure it which might have unwanted - * side effects. - */ - private static final ObjectMapper OBJECT_MAPPER = JacksonUtil.createBasicObjectMapper(); - private final ValueQueue encKeyVersionQueue; private KeyProviderDelegationTokenExtension.DelegationTokenExtension @@ -603,7 +595,7 @@ private T call(HttpURLConnection conn, Object jsonOutput, InputStream is = null; try { is = conn.getInputStream(); - ret = OBJECT_MAPPER.readValue(is, klass); + ret = JacksonUtil.getSharedReader().readValue(is, klass); } finally { IOUtils.closeStream(is); } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/DecayRpcScheduler.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/DecayRpcScheduler.java index 0f76ad68d0694..4d7cd023b5afa 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/DecayRpcScheduler.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/DecayRpcScheduler.java @@ -146,7 +146,7 @@ public class DecayRpcScheduler implements RpcScheduler, public static final Logger LOG = LoggerFactory.getLogger(DecayRpcScheduler.class); - private static final ObjectWriter WRITER = JacksonUtil.createBasicObjectMapper().writer(); + private static final ObjectWriter WRITER = JacksonUtil.getSharedWriter(); // Track the decayed and raw (no decay) number of calls for each schedulable // identity from all previous decay windows: idx 0 for decayed call cost and diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java index 16ca3607c1c9f..a808f07b0c0aa 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java @@ -3844,7 +3844,7 @@ public int getNumOpenConnections() { */ public String getNumOpenConnectionsPerUser() { try { - return JacksonUtil.createBasicObjectMapper() + return JacksonUtil.getSharedWriter() .writeValueAsString(connectionManager.getUserToConnectionsMap()); } catch (IOException ignored) { } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/MetricsJsonBuilder.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/MetricsJsonBuilder.java index 3bcd23cc81ce8..71c497b6f9a39 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/MetricsJsonBuilder.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/MetricsJsonBuilder.java @@ -46,8 +46,7 @@ public class MetricsJsonBuilder extends MetricsRecordBuilder { private final MetricsCollector parent; private Map innerMetrics = new LinkedHashMap<>(); - private static final ObjectWriter WRITER = - JacksonUtil.createBasicObjectMapper().writer(); + private static final ObjectWriter WRITER = JacksonUtil.getSharedWriter(); /** * Build an instance. diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/JacksonUtil.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/JacksonUtil.java index 13ee7290d75f4..fc8e672eb5b03 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/JacksonUtil.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/JacksonUtil.java @@ -19,6 +19,8 @@ import com.fasterxml.jackson.core.JsonFactory; import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.ObjectReader; +import com.fasterxml.jackson.databind.ObjectWriter; import com.fasterxml.jackson.databind.json.JsonMapper; /** @@ -26,7 +28,13 @@ * * @since 3.5.0 */ -public class JacksonUtil { +public final class JacksonUtil { + + private static final ObjectMapper SHARED_BASIC_OBJECT_MAPPER = createBasicObjectMapper(); + private static final ObjectReader SHARED_BASIC_OBJECT_READER = SHARED_BASIC_OBJECT_MAPPER.reader(); + private static final ObjectWriter SHARED_BASIC_OBJECT_WRITER = SHARED_BASIC_OBJECT_MAPPER.writer(); + private static final ObjectWriter SHARED_BASIC_OBJECT_WRITER_PRETTY = + SHARED_BASIC_OBJECT_MAPPER.writerWithDefaultPrettyPrinter(); /** * Creates a new {@link JsonFactory} instance with basic configuration. @@ -61,5 +69,50 @@ public static ObjectMapper createObjectMapper(final JsonFactory jsonFactory) { return JsonMapper.builder(jsonFactory).build(); } + /** + * Returns a shared {@link ObjectReader} instance with basic configuration. + * + * @return a shared {@link ObjectReader} instance with basic configuration + */ + public static ObjectReader getSharedReader() { + return SHARED_BASIC_OBJECT_READER; + } + + /** + * Returns an {@link ObjectReader} for the given type instance with basic configuration. + * + * @return an {@link ObjectReader} instance with basic configuration + */ + public static ObjectReader createBasicReaderFor(Class type) { + return SHARED_BASIC_OBJECT_MAPPER.readerFor(type); + } + + /** + * Returns a shared {@link ObjectWriter} instance with basic configuration. + * + * @return a shared {@link ObjectWriter} instance with basic configuration + */ + public static ObjectWriter getSharedWriter() { + return SHARED_BASIC_OBJECT_WRITER; + } + + /** + * Returns a shared {@link ObjectWriter} instance with pretty print and basic configuration. + * + * @return a shared {@link ObjectWriter} instance with pretty print and basic configuration + */ + public static ObjectWriter getSharedWriterWithPrettyPrint() { + return SHARED_BASIC_OBJECT_WRITER_PRETTY; + } + + /** + * Returns an {@link ObjectWriter} for the given type instance with basic configuration. + * + * @return an {@link ObjectWriter} instance with basic configuration + */ + public static ObjectWriter createBasicWriterFor(Class type) { + return SHARED_BASIC_OBJECT_MAPPER.writerFor(type); + } + private JacksonUtil() {} } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/JsonSerialization.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/JsonSerialization.java index d6d645eaefe2c..05b069c3ad9b8 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/JsonSerialization.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/JsonSerialization.java @@ -76,9 +76,8 @@ public class JsonSerialization { private final Class classType; private final ObjectMapper mapper; - private static final ObjectMapper OBJECT_MAPPER = JacksonUtil.createBasicObjectMapper(); - private static final ObjectWriter WRITER = OBJECT_MAPPER.writerWithDefaultPrettyPrinter(); - private static final ObjectReader MAP_READER = OBJECT_MAPPER.readerFor(Map.class); + private static final ObjectWriter WRITER = JacksonUtil.getSharedWriterWithPrettyPrint(); + private static final ObjectReader MAP_READER = JacksonUtil.createBasicReaderFor(Map.class); /** * @return an ObjectWriter which pretty-prints its output diff --git a/hadoop-common-project/hadoop-kms/src/main/java/org/apache/hadoop/crypto/key/kms/server/KMSJSONReader.java b/hadoop-common-project/hadoop-kms/src/main/java/org/apache/hadoop/crypto/key/kms/server/KMSJSONReader.java index e7ad7c3c0a79c..2f7a6d8557731 100644 --- a/hadoop-common-project/hadoop-kms/src/main/java/org/apache/hadoop/crypto/key/kms/server/KMSJSONReader.java +++ b/hadoop-common-project/hadoop-kms/src/main/java/org/apache/hadoop/crypto/key/kms/server/KMSJSONReader.java @@ -17,8 +17,6 @@ */ package org.apache.hadoop.crypto.key.kms.server; -import com.fasterxml.jackson.databind.ObjectMapper; - import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.util.JacksonUtil; @@ -39,7 +37,6 @@ @Consumes(MediaType.APPLICATION_JSON) @InterfaceAudience.Private public class KMSJSONReader implements MessageBodyReader { - private static final ObjectMapper MAPPER = JacksonUtil.createBasicObjectMapper(); @Override public boolean isReadable(Class type, Type genericType, @@ -53,6 +50,6 @@ public Object readFrom(Class type, Type genericType, Annotation[] annotations, MediaType mediaType, MultivaluedMap httpHeaders, InputStream entityStream) throws IOException, WebApplicationException { - return MAPPER.readValue(entityStream, type); + return JacksonUtil.getSharedReader().readValue(entityStream, type); } } diff --git a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/server/datanode/DiskBalancerWorkItem.java b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/server/datanode/DiskBalancerWorkItem.java index ac7476977bd47..041eb2912be50 100644 --- a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/server/datanode/DiskBalancerWorkItem.java +++ b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/server/datanode/DiskBalancerWorkItem.java @@ -20,7 +20,6 @@ package org.apache.hadoop.hdfs.server.datanode; import com.fasterxml.jackson.annotation.JsonInclude; -import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.ObjectReader; import org.apache.hadoop.util.JacksonUtil; import org.apache.hadoop.util.Preconditions; @@ -36,8 +35,8 @@ @InterfaceStability.Unstable @JsonInclude(JsonInclude.Include.NON_DEFAULT) public class DiskBalancerWorkItem { - private static final ObjectMapper MAPPER = JacksonUtil.createBasicObjectMapper(); - private static final ObjectReader READER = MAPPER.readerFor(DiskBalancerWorkItem.class); + private static final ObjectReader READER = + JacksonUtil.createBasicReaderFor(DiskBalancerWorkItem.class); private long startTime; private long secondsElapsed; @@ -173,7 +172,7 @@ public void incBlocksCopied() { * @throws IOException */ public String toJson() throws IOException { - return MAPPER.writeValueAsString(this); + return JacksonUtil.getSharedWriter().writeValueAsString(this); } /** diff --git a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/util/CombinedHostsFileReader.java b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/util/CombinedHostsFileReader.java index c3ac22b7db38f..972254a62dc9e 100644 --- a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/util/CombinedHostsFileReader.java +++ b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/util/CombinedHostsFileReader.java @@ -19,7 +19,6 @@ package org.apache.hadoop.hdfs.util; import com.fasterxml.jackson.databind.JsonMappingException; -import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.ObjectReader; import java.io.File; @@ -66,13 +65,6 @@ public final class CombinedHostsFileReader { public static final Logger LOG = LoggerFactory.getLogger(CombinedHostsFileReader.class); - /** - * It is more performant to reuse ObjectMapper instances but keeping the instance - * private makes it harder for someone to reconfigure it which might have unwanted - * side effects. - */ - private static final ObjectMapper OBJECT_MAPPER = JacksonUtil.createBasicObjectMapper(); - private CombinedHostsFileReader() { } @@ -97,7 +89,8 @@ private CombinedHostsFileReader() { try (Reader input = new InputStreamReader( Files.newInputStream(hostFile.toPath()), StandardCharsets.UTF_8)) { - allDNs = OBJECT_MAPPER.readValue(input, DatanodeAdminProperties[].class); + allDNs = JacksonUtil.getSharedReader() + .readValue(input, DatanodeAdminProperties[].class); } catch (JsonMappingException jme) { // The old format doesn't have json top-level token to enclose // the array. @@ -109,14 +102,12 @@ private CombinedHostsFileReader() { } if (tryOldFormat) { - ObjectReader objectReader = - JacksonUtil.createBasicObjectMapper().readerFor(DatanodeAdminProperties.class); + ObjectReader objectReader = JacksonUtil.createBasicReaderFor(DatanodeAdminProperties.class); List all = new ArrayList<>(); try (Reader input = new InputStreamReader(Files.newInputStream(Paths.get(hostsFilePath)), StandardCharsets.UTF_8)) { - Iterator iterator = - objectReader.readValues(OBJECT_MAPPER.createParser(input)); + Iterator iterator = objectReader.readValues(input); while (iterator.hasNext()) { DatanodeAdminProperties properties = iterator.next(); all.add(properties); diff --git a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/util/CombinedHostsFileWriter.java b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/util/CombinedHostsFileWriter.java index e2ef4591b6b8d..dcd08cfc7010f 100644 --- a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/util/CombinedHostsFileWriter.java +++ b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/util/CombinedHostsFileWriter.java @@ -26,7 +26,6 @@ import java.nio.file.Paths; import java.util.Set; -import com.fasterxml.jackson.databind.ObjectMapper; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; @@ -60,12 +59,10 @@ private CombinedHostsFileWriter() { */ public static void writeFile(final String hostsFile, final Set allDNs) throws IOException { - final ObjectMapper objectMapper = JacksonUtil.createBasicObjectMapper(); - try (Writer output = new OutputStreamWriter(Files.newOutputStream(Paths.get(hostsFile)), StandardCharsets.UTF_8)) { - objectMapper.writeValue(output, allDNs); + JacksonUtil.getSharedWriter().writeValue(output, allDNs); } } } diff --git a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/web/JsonUtilClient.java b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/web/JsonUtilClient.java index 8695d6c72a967..54a44b33b17b7 100644 --- a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/web/JsonUtilClient.java +++ b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/web/JsonUtilClient.java @@ -654,7 +654,7 @@ static List toXAttrNames(final Map json) } final String namesInJson = (String) json.get("XAttrNames"); - ObjectReader reader = JacksonUtil.createBasicObjectMapper().readerFor(List.class); + ObjectReader reader = JacksonUtil.createBasicReaderFor(List.class); final List xattrs = reader.readValue(namesInJson); final List names = Lists.newArrayListWithCapacity(json.keySet().size()); diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/SlowDiskTracker.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/SlowDiskTracker.java index d3621a6097dee..312d63daed4e1 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/SlowDiskTracker.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/SlowDiskTracker.java @@ -71,7 +71,7 @@ public class SlowDiskTracker { /** * ObjectWriter to convert JSON reports to String. */ - private static final ObjectWriter WRITER = JacksonUtil.createBasicObjectMapper().writer(); + private static final ObjectWriter WRITER = JacksonUtil.getSharedWriter(); /** * Number of disks to include in JSON report per operation. We will return diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/SlowPeerTracker.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/SlowPeerTracker.java index 719c72afc7206..3774a9dbdff21 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/SlowPeerTracker.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/SlowPeerTracker.java @@ -75,7 +75,7 @@ public class SlowPeerTracker { /** * ObjectWriter to convert JSON reports to String. */ - private static final ObjectWriter WRITER = JacksonUtil.createBasicObjectMapper().writer(); + private static final ObjectWriter WRITER = JacksonUtil.getSharedWriter(); /** * Number of nodes to include in JSON report. We will return nodes with diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/FsVolumeImpl.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/FsVolumeImpl.java index dc8dcb2d42737..4028702ed2b2c 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/FsVolumeImpl.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/FsVolumeImpl.java @@ -79,6 +79,7 @@ import org.apache.hadoop.util.DataChecksum; import org.apache.hadoop.util.DiskChecker.DiskErrorException; import org.apache.hadoop.util.DiskChecker.DiskOutOfSpaceException; +import org.apache.hadoop.util.Preconditions; import org.apache.hadoop.util.JacksonUtil; import org.apache.hadoop.util.Time; import org.apache.hadoop.util.Timer; @@ -86,12 +87,10 @@ import org.slf4j.LoggerFactory; import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.ObjectReader; import com.fasterxml.jackson.databind.ObjectWriter; import org.apache.hadoop.classification.VisibleForTesting; import org.apache.hadoop.thirdparty.com.google.common.base.Joiner; -import org.apache.hadoop.util.Preconditions; import org.apache.hadoop.thirdparty.com.google.common.util.concurrent.ThreadFactoryBuilder; /** @@ -104,9 +103,8 @@ public class FsVolumeImpl implements FsVolumeSpi { public static final Logger LOG = LoggerFactory.getLogger(FsVolumeImpl.class); - private static final ObjectMapper MAPPER = JacksonUtil.createBasicObjectMapper(); - private static final ObjectWriter WRITER = MAPPER.writerWithDefaultPrettyPrinter(); - private static final ObjectReader READER = MAPPER.readerFor(BlockIteratorState.class); + private static final ObjectWriter WRITER = JacksonUtil.getSharedWriterWithPrettyPrint(); + private static final ObjectReader READER = JacksonUtil.createBasicReaderFor(BlockIteratorState.class); private final FsDatasetImpl dataset; private final String storageID; diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/ProvidedVolumeImpl.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/ProvidedVolumeImpl.java index e1d54053cc2ee..816a765c52907 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/ProvidedVolumeImpl.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/ProvidedVolumeImpl.java @@ -369,7 +369,7 @@ public void releaseReservedSpace(long bytesToRelease) { } private static final ObjectWriter WRITER = - JacksonUtil.createBasicObjectMapper().writerWithDefaultPrettyPrinter(); + JacksonUtil.getSharedWriterWithPrettyPrint(); private static class ProvidedBlockIteratorState { ProvidedBlockIteratorState() { diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/diskbalancer/command/Command.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/diskbalancer/command/Command.java index 4c75493e9ee69..e9ba658ecdc91 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/diskbalancer/command/Command.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/diskbalancer/command/Command.java @@ -77,8 +77,7 @@ * Common interface for command handling. */ public abstract class Command extends Configured implements Closeable { - private static final ObjectReader READER = - JacksonUtil.createBasicObjectMapper().readerFor(HashMap.class); + private static final ObjectReader READER = JacksonUtil.createBasicReaderFor(HashMap.class); static final Logger LOG = LoggerFactory.getLogger(Command.class); private Map validArgs = new HashMap<>(); private URI clusterURI; diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/diskbalancer/connectors/JsonNodeConnector.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/diskbalancer/connectors/JsonNodeConnector.java index f60a474c2c9b1..4e76c7e45e999 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/diskbalancer/connectors/JsonNodeConnector.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/diskbalancer/connectors/JsonNodeConnector.java @@ -37,7 +37,7 @@ public class JsonNodeConnector implements ClusterConnector { private static final Logger LOG = LoggerFactory.getLogger(JsonNodeConnector.class); private static final ObjectReader READER = - JacksonUtil.createBasicObjectMapper().readerFor(DiskBalancerCluster.class); + JacksonUtil.createBasicReaderFor(DiskBalancerCluster.class); private final URL clusterURI; /** diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/diskbalancer/datamodel/DiskBalancerCluster.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/diskbalancer/datamodel/DiskBalancerCluster.java index 3eb80fb521be5..4d5ae15cbc2e5 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/diskbalancer/datamodel/DiskBalancerCluster.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/diskbalancer/datamodel/DiskBalancerCluster.java @@ -73,7 +73,7 @@ public class DiskBalancerCluster { private static final Logger LOG = LoggerFactory.getLogger(DiskBalancerCluster.class); private static final ObjectReader READER = - JacksonUtil.createBasicObjectMapper().readerFor(DiskBalancerCluster.class); + JacksonUtil.createBasicReaderFor(DiskBalancerCluster.class); private final Set exclusionList; private final Set inclusionList; private ClusterConnector clusterConnector; diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/diskbalancer/datamodel/DiskBalancerVolume.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/diskbalancer/datamodel/DiskBalancerVolume.java index dcfd149bb3ab3..e354a23519ff2 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/diskbalancer/datamodel/DiskBalancerVolume.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/diskbalancer/datamodel/DiskBalancerVolume.java @@ -34,7 +34,7 @@ @JsonIgnoreProperties(ignoreUnknown = true) public class DiskBalancerVolume { private static final ObjectReader READER = - JacksonUtil.createBasicObjectMapper().readerFor(DiskBalancerVolume.class); + JacksonUtil.createBasicReaderFor(DiskBalancerVolume.class); private static final Logger LOG = LoggerFactory.getLogger(DiskBalancerVolume.class); diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/diskbalancer/planner/NodePlan.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/diskbalancer/planner/NodePlan.java index 59480b190757e..3dfd27dde4d2d 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/diskbalancer/planner/NodePlan.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/diskbalancer/planner/NodePlan.java @@ -18,7 +18,6 @@ package org.apache.hadoop.hdfs.server.diskbalancer.planner; import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.ObjectReader; import com.fasterxml.jackson.databind.ObjectWriter; import org.apache.hadoop.util.JacksonUtil; @@ -40,10 +39,8 @@ public class NodePlan { private int port; private long timeStamp; - private static final ObjectMapper MAPPER = JacksonUtil.createBasicObjectMapper(); - private static final ObjectReader READER = MAPPER.readerFor(NodePlan.class); - private static final ObjectWriter WRITER = MAPPER.writerFor( - MAPPER.constructType(NodePlan.class)); + private static final ObjectReader READER = JacksonUtil.createBasicReaderFor(NodePlan.class); + private static final ObjectWriter WRITER = JacksonUtil.createBasicWriterFor(NodePlan.class); /** * returns timestamp when this plan was created. * diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/JsonUtil.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/JsonUtil.java index 4ea9a4afc8ad7..1ec6730bb87d2 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/JsonUtil.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/JsonUtil.java @@ -17,6 +17,7 @@ */ package org.apache.hadoop.hdfs.web; +import com.fasterxml.jackson.databind.ObjectWriter; import org.apache.hadoop.classification.VisibleForTesting; import org.apache.hadoop.fs.BlockLocation; import org.apache.hadoop.fs.ContentSummary; @@ -44,8 +45,6 @@ import org.apache.hadoop.thirdparty.com.google.common.collect.ImmutableMap; -import com.fasterxml.jackson.databind.ObjectMapper; - import java.io.IOException; import java.util.*; @@ -53,11 +52,11 @@ public class JsonUtil { private static final Object[] EMPTY_OBJECT_ARRAY = {}; - // Reuse ObjectMapper instance for improving performance. - // ObjectMapper is thread safe as long as we always configure instance + // Reuse ObjectWriter instance for improving performance. + // ObjectWriter is thread safe as long as we always configure instance // before use. We don't have a re-entrant call pattern in WebHDFS, // so we just need to worry about thread-safety. - private static final ObjectMapper MAPPER = JacksonUtil.createBasicObjectMapper(); + private static final ObjectWriter SHARED_WRITER = JacksonUtil.getSharedWriter(); /** Convert a token object to a Json string. */ public static String toJsonString(final Token token @@ -94,7 +93,7 @@ public static String toJsonString(final String key, final Object value) { final Map m = new TreeMap(); m.put(key, value); try { - return MAPPER.writeValueAsString(m); + return SHARED_WRITER.writeValueAsString(m); } catch (IOException ignored) { } return null; @@ -114,7 +113,7 @@ public static String toJsonString(final HdfsFileStatus status, final Map m = toJsonMap(status); try { return includeType ? - toJsonString(FileStatus.class, m) : MAPPER.writeValueAsString(m); + toJsonString(FileStatus.class, m) : SHARED_WRITER.writeValueAsString(m); } catch (IOException ignored) { } return null; @@ -454,7 +453,7 @@ public static String toJsonString(final AclStatus status) { finalMap.put(AclStatus.class.getSimpleName(), m); try { - return MAPPER.writeValueAsString(finalMap); + return SHARED_WRITER.writeValueAsString(finalMap); } catch (IOException ignored) { } return null; @@ -492,7 +491,7 @@ public static String toJsonString(final List xAttrs, final XAttrCodec encoding) throws IOException { final Map finalMap = new TreeMap(); finalMap.put("XAttrs", toJsonArray(xAttrs, encoding)); - return MAPPER.writeValueAsString(finalMap); + return SHARED_WRITER.writeValueAsString(finalMap); } public static String toJsonString(final List xAttrs) @@ -501,14 +500,14 @@ public static String toJsonString(final List xAttrs) for (XAttr xAttr : xAttrs) { names.add(XAttrHelper.getPrefixedName(xAttr)); } - String ret = MAPPER.writeValueAsString(names); + String ret = SHARED_WRITER.writeValueAsString(names); final Map finalMap = new TreeMap(); finalMap.put("XAttrNames", ret); - return MAPPER.writeValueAsString(finalMap); + return SHARED_WRITER.writeValueAsString(finalMap); } public static String toJsonString(Object obj) throws IOException { - return MAPPER.writeValueAsString(obj); + return SHARED_WRITER.writeValueAsString(obj); } public static String toJsonString(BlockStoragePolicy[] storagePolicies) { diff --git a/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azure/RemoteSASKeyGeneratorImpl.java b/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azure/RemoteSASKeyGeneratorImpl.java index 0d5092a82e085..2085532de00fe 100644 --- a/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azure/RemoteSASKeyGeneratorImpl.java +++ b/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azure/RemoteSASKeyGeneratorImpl.java @@ -54,8 +54,7 @@ public class RemoteSASKeyGeneratorImpl extends SASKeyGeneratorImpl { public static final Logger LOG = LoggerFactory.getLogger(AzureNativeFileSystemStore.class); private static final ObjectReader RESPONSE_READER = JacksonUtil - .createBasicObjectMapper() - .readerFor(RemoteSASKeyGenerationResponse.class); + .createBasicReaderFor(RemoteSASKeyGenerationResponse.class); /** * Configuration parameter name expected in the Configuration diff --git a/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azure/RemoteWasbAuthorizerImpl.java b/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azure/RemoteWasbAuthorizerImpl.java index f181d8f645035..7bcaecdba5b0b 100644 --- a/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azure/RemoteWasbAuthorizerImpl.java +++ b/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azure/RemoteWasbAuthorizerImpl.java @@ -50,8 +50,7 @@ public class RemoteWasbAuthorizerImpl implements WasbAuthorizerInterface { public static final Logger LOG = LoggerFactory .getLogger(RemoteWasbAuthorizerImpl.class); private static final ObjectReader RESPONSE_READER = JacksonUtil - .createBasicObjectMapper() - .readerFor(RemoteWasbAuthorizerResponse.class); + .createBasicReaderFor(RemoteWasbAuthorizerResponse.class); /** * Configuration parameter name expected in the Configuration object to diff --git a/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/services/AbfsHttpOperation.java b/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/services/AbfsHttpOperation.java index e5bb80bf65b9d..658f2cfe65167 100644 --- a/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/services/AbfsHttpOperation.java +++ b/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/services/AbfsHttpOperation.java @@ -30,7 +30,6 @@ import com.fasterxml.jackson.core.JsonFactory; import com.fasterxml.jackson.core.JsonParser; import com.fasterxml.jackson.core.JsonToken; -import org.apache.hadoop.util.JacksonUtil; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -40,6 +39,7 @@ import org.apache.hadoop.fs.azurebfs.contracts.services.AbfsPerfLoggable; import org.apache.hadoop.fs.azurebfs.contracts.services.ListResultSchema; import org.apache.hadoop.fs.azurebfs.utils.UriUtils; +import org.apache.hadoop.util.JacksonUtil; /** * Base Http operation class for orchestrating server IO calls. Child classes would @@ -509,7 +509,8 @@ private void parseListFilesResponse(final InputStream stream) } try { - this.listResultSchema = JacksonUtil.createBasicObjectMapper().readValue(stream, ListResultSchema.class); + this.listResultSchema = JacksonUtil.getSharedReader().readValue(stream, + ListResultSchema.class); } catch (IOException ex) { log.error("Unable to deserialize list results", ex); throw ex; diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/timeline/TimelineUtils.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/timeline/TimelineUtils.java index 9d3504120ea14..a36b96dca205a 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/timeline/TimelineUtils.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/timeline/TimelineUtils.java @@ -54,10 +54,10 @@ public class TimelineUtils { "TIMELINE_FLOW_RUN_ID_TAG"; public final static String DEFAULT_FLOW_VERSION = "1"; - private static final ObjectMapper mapper = JacksonUtil.createBasicObjectMapper(); + private static final ObjectMapper OBJECT_MAPPER = JacksonUtil.createBasicObjectMapper(); static { - YarnJacksonJaxbJsonProvider.configObjectMapper(mapper); + YarnJacksonJaxbJsonProvider.configObjectMapper(OBJECT_MAPPER); } /** @@ -90,9 +90,9 @@ public static String dumpTimelineRecordtoJSON(Object o) public static String dumpTimelineRecordtoJSON(Object o, boolean pretty) throws JsonGenerationException, JsonMappingException, IOException { if (pretty) { - return mapper.writerWithDefaultPrettyPrinter().writeValueAsString(o); + return OBJECT_MAPPER.writerWithDefaultPrettyPrinter().writeValueAsString(o); } else { - return mapper.writeValueAsString(o); + return OBJECT_MAPPER.writeValueAsString(o); } } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/Controller.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/Controller.java index 44986b11bf1b5..bf5500892de14 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/Controller.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/Controller.java @@ -28,7 +28,6 @@ import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; -import com.fasterxml.jackson.databind.ObjectMapper; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.util.JacksonUtil; import org.apache.hadoop.yarn.webapp.view.DefaultPage; @@ -43,7 +42,6 @@ @InterfaceAudience.LimitedPrivate({"YARN", "MapReduce"}) public abstract class Controller implements Params { public static final Logger LOG = LoggerFactory.getLogger(Controller.class); - static final ObjectMapper OBJECT_MAPPER = JacksonUtil.createBasicObjectMapper(); @RequestScoped public static class RequestContext{ @@ -226,7 +224,7 @@ protected void renderJSON(Object object) { context().rendered = true; context().response.setContentType(MimeType.JSON); try { - OBJECT_MAPPER.writeValue(writer(), object); + JacksonUtil.getSharedWriter().writeValue(writer(), object); } catch (Exception e) { throw new WebAppException(e); } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/timeline/GenericObjectMapper.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/timeline/GenericObjectMapper.java index d5b6ddfcdbf64..440c5d6f0600c 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/timeline/GenericObjectMapper.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/timeline/GenericObjectMapper.java @@ -19,7 +19,6 @@ import java.io.IOException; -import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.ObjectReader; import com.fasterxml.jackson.databind.ObjectWriter; import org.apache.hadoop.classification.InterfaceAudience; @@ -39,9 +38,8 @@ public class GenericObjectMapper { private static final byte[] EMPTY_BYTES = new byte[0]; - private static final ObjectMapper OBJECT_MAPPER = JacksonUtil.createBasicObjectMapper(); - public static final ObjectReader OBJECT_READER = OBJECT_MAPPER.readerFor(Object.class); - public static final ObjectWriter OBJECT_WRITER = OBJECT_MAPPER.writer(); + public static final ObjectReader OBJECT_READER = JacksonUtil.createBasicReaderFor(Object.class); + public static final ObjectWriter OBJECT_WRITER = JacksonUtil.getSharedWriter(); /** * Serializes an Object into a byte array. Along with {@link #read(byte[])}, diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/linux/resources/NetworkTagMappingJsonManager.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/linux/resources/NetworkTagMappingJsonManager.java index a8fa09248e778..3b4e26eda1ff3 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/linux/resources/NetworkTagMappingJsonManager.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/linux/resources/NetworkTagMappingJsonManager.java @@ -59,7 +59,7 @@ public void initialize(Configuration conf) { YarnConfiguration.NM_NETWORK_TAG_MAPPING_FILE_PATH); } try { - networkTagMapping = JacksonUtil.createBasicObjectMapper().readValue(new File(mappingJsonFile), + networkTagMapping = JacksonUtil.getSharedReader().readValue(new File(mappingJsonFile), NetworkTagMapping.class); } catch (Exception e) { throw new YarnRuntimeException(e); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/linux/runtime/runc/ImageTagToManifestPlugin.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/linux/runtime/runc/ImageTagToManifestPlugin.java index add0cd10c6c49..bb21c45f735a0 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/linux/runtime/runc/ImageTagToManifestPlugin.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/linux/runtime/runc/ImageTagToManifestPlugin.java @@ -43,7 +43,6 @@ import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicReference; -import com.fasterxml.jackson.databind.ObjectMapper; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -65,13 +64,6 @@ public class ImageTagToManifestPlugin extends AbstractService implements RuncImageTagToManifestPlugin { - /** - * It is more performant to reuse ObjectMapper instances but keeping the instance - * private makes it harder for someone to reconfigure it which might have unwanted - * side effects. - */ - private static final ObjectMapper OBJECT_MAPPER = JacksonUtil.createBasicObjectMapper(); - private Map manifestCache; private AtomicReference> localImageToHashCache = new AtomicReference<>(new HashMap<>()); @@ -114,7 +106,7 @@ public ImageManifest getManifestFromImageTag(String imageTag) } byte[] bytes = IOUtils.toByteArray(input); - manifest = OBJECT_MAPPER.readValue(bytes, ImageManifest.class); + manifest = JacksonUtil.getSharedReader().readValue(bytes, ImageManifest.class); manifestCache.put(hash, manifest); return manifest; @@ -321,7 +313,7 @@ protected void serviceStop() throws Exception { } private static class LRUCache extends LinkedHashMap { - private int cacheSize; + private final int cacheSize; LRUCache(int initialCapacity, float loadFactor) { super(initialCapacity, loadFactor, true); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/resource/ResourceProfilesManagerImpl.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/resource/ResourceProfilesManagerImpl.java index dde3026be5528..8910ab48ddaaa 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/resource/ResourceProfilesManagerImpl.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/resource/ResourceProfilesManagerImpl.java @@ -20,11 +20,10 @@ import org.apache.hadoop.classification.VisibleForTesting; -import com.fasterxml.jackson.databind.ObjectMapper; -import org.apache.hadoop.util.JacksonUtil; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.util.JacksonUtil; import org.apache.hadoop.yarn.api.records.Resource; import org.apache.hadoop.yarn.api.records.ResourceInformation; import org.apache.hadoop.yarn.conf.YarnConfiguration; @@ -74,13 +73,6 @@ public class ResourceProfilesManagerImpl implements ResourceProfilesManager { + " (by setting " + YarnConfiguration.RM_RESOURCE_PROFILES_ENABLED + " to true)"; - /** - * It is more performant to reuse ObjectMapper instances but keeping the instance - * private makes it harder for someone to reconfigure it which might have unwanted - * side effects. - */ - private static final ObjectMapper OBJECT_MAPPER = JacksonUtil.createBasicObjectMapper(); - public ResourceProfilesManagerImpl() { ReentrantReadWriteLock lock = new ReentrantReadWriteLock(); readLock = lock.readLock(); @@ -113,7 +105,7 @@ private void loadProfiles() throws IOException { resourcesFile = tmp.getPath(); } } - Map data = OBJECT_MAPPER.readValue(new File(resourcesFile), Map.class); + Map data = JacksonUtil.getSharedReader().readValue(new File(resourcesFile), Map.class); Iterator iterator = data.entrySet().iterator(); while (iterator.hasNext()) { Map.Entry entry = (Map.Entry) iterator.next(); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/capacity/placement/MappingRuleCreator.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/capacity/placement/MappingRuleCreator.java index d80a901bd8dce..0fa10570d030a 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/capacity/placement/MappingRuleCreator.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/capacity/placement/MappingRuleCreator.java @@ -44,20 +44,12 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import com.fasterxml.jackson.databind.ObjectMapper; import org.apache.hadoop.classification.VisibleForTesting; public class MappingRuleCreator { private static final String ALL_USER = "*"; private static Logger LOG = LoggerFactory.getLogger(MappingRuleCreator.class); - /** - * It is more performant to reuse ObjectMapper instances but keeping the instance - * private makes it harder for someone to reconfigure it which might have unwanted - * side effects. - */ - private static final ObjectMapper OBJECT_MAPPER = JacksonUtil.createBasicObjectMapper(); - public MappingRulesDescription getMappingRulesFromJsonFile(String filePath) throws IOException { byte[] fileContents = Files.readAllBytes(Paths.get(filePath)); @@ -66,12 +58,12 @@ public MappingRulesDescription getMappingRulesFromJsonFile(String filePath) MappingRulesDescription getMappingRulesFromJson(byte[] contents) throws IOException { - return OBJECT_MAPPER.readValue(contents, MappingRulesDescription.class); + return JacksonUtil.getSharedReader().readValue(contents, MappingRulesDescription.class); } MappingRulesDescription getMappingRulesFromJson(String contents) throws IOException { - return OBJECT_MAPPER.readValue(contents, MappingRulesDescription.class); + return JacksonUtil.getSharedReader().readValue(contents, MappingRulesDescription.class); } public List getMappingRulesFromFile(String jsonPath) diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timeline-pluginstorage/src/main/java/org/apache/hadoop/yarn/server/timeline/LevelDBCacheTimelineStore.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timeline-pluginstorage/src/main/java/org/apache/hadoop/yarn/server/timeline/LevelDBCacheTimelineStore.java index 6334fe05e1add..8ee6d1864c694 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timeline-pluginstorage/src/main/java/org/apache/hadoop/yarn/server/timeline/LevelDBCacheTimelineStore.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timeline-pluginstorage/src/main/java/org/apache/hadoop/yarn/server/timeline/LevelDBCacheTimelineStore.java @@ -18,7 +18,6 @@ package org.apache.hadoop.yarn.server.timeline; -import com.fasterxml.jackson.databind.ObjectMapper; import org.apache.hadoop.classification.InterfaceAudience.Private; import org.apache.hadoop.classification.InterfaceStability.Unstable; import org.apache.hadoop.conf.Configuration; @@ -299,7 +298,6 @@ public void close() throws IOException { } }; } - static final ObjectMapper OBJECT_MAPPER = JacksonUtil.createBasicObjectMapper(); @SuppressWarnings("unchecked") private V getEntityForKey(byte[] key) throws IOException { @@ -307,7 +305,7 @@ private V getEntityForKey(byte[] key) throws IOException { if (resultRaw == null) { return null; } - return (V) OBJECT_MAPPER.readValue(resultRaw, TimelineEntity.class); + return (V) JacksonUtil.getSharedReader().readValue(resultRaw, TimelineEntity.class); } private byte[] getStartTimeKey(K entityId) { From fc18ed4180a31e418d41fd601b27a2e1c70c372a Mon Sep 17 00:00:00 2001 From: PJ Fanning Date: Wed, 24 Jul 2024 22:59:14 +0100 Subject: [PATCH 4/6] more changes checkstyle issues --- .../main/java/org/apache/hadoop/conf/Configuration.java | 7 ++----- .../main/java/org/apache/hadoop/jmx/JMXJsonServlet.java | 9 +-------- .../main/java/org/apache/hadoop/util/JacksonUtil.java | 8 ++++++-- .../server/datanode/fsdataset/impl/FsVolumeImpl.java | 3 ++- .../hdfs/server/namenode/NetworkTopologyServlet.java | 4 +--- .../hdfs/server/namenode/StartupProgressServlet.java | 2 +- .../main/java/org/apache/hadoop/mapred/QueueManager.java | 4 +--- .../org/apache/hadoop/fs/s3a/impl/S3AEncryption.java | 6 +++--- .../yarn/client/api/impl/FileSystemTimelineWriter.java | 2 +- 9 files changed, 18 insertions(+), 27 deletions(-) diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configuration.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configuration.java index 6c25af5b099fc..4f372374abe1b 100755 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configuration.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configuration.java @@ -22,7 +22,6 @@ import com.ctc.wstx.io.StreamBootstrapper; import com.ctc.wstx.io.SystemId; import com.ctc.wstx.stax.WstxInputFactory; -import com.fasterxml.jackson.core.JsonFactory; import com.fasterxml.jackson.core.JsonGenerator; import java.io.BufferedInputStream; @@ -3793,8 +3792,7 @@ public static void dumpConfiguration(Configuration config, throw new IllegalArgumentException("Property " + propertyName + " not found"); } else { - final JsonFactory dumpFactory = JacksonUtil.createBasicJsonFactory(); - JsonGenerator dumpGenerator = dumpFactory.createGenerator(out); + JsonGenerator dumpGenerator = JacksonUtil.getSharedWriter().createGenerator(out); dumpGenerator.writeStartObject(); dumpGenerator.writeFieldName("property"); appendJSONProperty(dumpGenerator, config, propertyName, @@ -3832,8 +3830,7 @@ public static void dumpConfiguration(Configuration config, */ public static void dumpConfiguration(Configuration config, Writer out) throws IOException { - final JsonFactory dumpFactory = JacksonUtil.createBasicJsonFactory(); - JsonGenerator dumpGenerator = dumpFactory.createGenerator(out); + JsonGenerator dumpGenerator = JacksonUtil.getSharedWriter().createGenerator(out); dumpGenerator.writeStartObject(); dumpGenerator.writeFieldName("properties"); dumpGenerator.writeStartArray(); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/jmx/JMXJsonServlet.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/jmx/JMXJsonServlet.java index 64e9554c23562..06a1fe2e1a548 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/jmx/JMXJsonServlet.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/jmx/JMXJsonServlet.java @@ -43,7 +43,6 @@ import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; -import com.fasterxml.jackson.core.JsonFactory; import com.fasterxml.jackson.core.JsonGenerator; import org.apache.hadoop.util.JacksonUtil; import org.slf4j.Logger; @@ -135,11 +134,6 @@ public class JMXJsonServlet extends HttpServlet { */ protected transient MBeanServer mBeanServer; - /** - * Json Factory to create Json generators for write objects in json format - */ - protected transient JsonFactory jsonFactory; - /** * Initialize this servlet. */ @@ -147,7 +141,6 @@ public class JMXJsonServlet extends HttpServlet { public void init() throws ServletException { // Retrieve the MBean server mBeanServer = ManagementFactory.getPlatformMBeanServer(); - jsonFactory = JacksonUtil.createBasicJsonFactory(); } protected boolean isInstrumentationAccessAllowed(HttpServletRequest request, @@ -188,7 +181,7 @@ public void doGet(HttpServletRequest request, HttpServletResponse response) { response.setHeader(ACCESS_CONTROL_ALLOW_METHODS, "GET"); response.setHeader(ACCESS_CONTROL_ALLOW_ORIGIN, "*"); - jg = jsonFactory.createGenerator(writer); + jg = JacksonUtil.getSharedWriter().createGenerator(writer); jg.disable(JsonGenerator.Feature.AUTO_CLOSE_TARGET); jg.useDefaultPrettyPrinter(); jg.writeStartObject(); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/JacksonUtil.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/JacksonUtil.java index fc8e672eb5b03..56089f277bb60 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/JacksonUtil.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/JacksonUtil.java @@ -31,8 +31,10 @@ public final class JacksonUtil { private static final ObjectMapper SHARED_BASIC_OBJECT_MAPPER = createBasicObjectMapper(); - private static final ObjectReader SHARED_BASIC_OBJECT_READER = SHARED_BASIC_OBJECT_MAPPER.reader(); - private static final ObjectWriter SHARED_BASIC_OBJECT_WRITER = SHARED_BASIC_OBJECT_MAPPER.writer(); + private static final ObjectReader SHARED_BASIC_OBJECT_READER = + SHARED_BASIC_OBJECT_MAPPER.reader(); + private static final ObjectWriter SHARED_BASIC_OBJECT_WRITER = + SHARED_BASIC_OBJECT_MAPPER.writer(); private static final ObjectWriter SHARED_BASIC_OBJECT_WRITER_PRETTY = SHARED_BASIC_OBJECT_MAPPER.writerWithDefaultPrettyPrinter(); @@ -81,6 +83,7 @@ public static ObjectReader getSharedReader() { /** * Returns an {@link ObjectReader} for the given type instance with basic configuration. * + * @param type the class that the reader has to support * @return an {@link ObjectReader} instance with basic configuration */ public static ObjectReader createBasicReaderFor(Class type) { @@ -108,6 +111,7 @@ public static ObjectWriter getSharedWriterWithPrettyPrint() { /** * Returns an {@link ObjectWriter} for the given type instance with basic configuration. * + * @param type the class that the writer has to support * @return an {@link ObjectWriter} instance with basic configuration */ public static ObjectWriter createBasicWriterFor(Class type) { diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/FsVolumeImpl.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/FsVolumeImpl.java index 4028702ed2b2c..080418db08afa 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/FsVolumeImpl.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/FsVolumeImpl.java @@ -104,7 +104,8 @@ public class FsVolumeImpl implements FsVolumeSpi { public static final Logger LOG = LoggerFactory.getLogger(FsVolumeImpl.class); private static final ObjectWriter WRITER = JacksonUtil.getSharedWriterWithPrettyPrint(); - private static final ObjectReader READER = JacksonUtil.createBasicReaderFor(BlockIteratorState.class); + private static final ObjectReader READER = + JacksonUtil.createBasicReaderFor(BlockIteratorState.class); private final FsDatasetImpl dataset; private final String storageID; diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NetworkTopologyServlet.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NetworkTopologyServlet.java index e789e507861b9..16d9e203d3143 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NetworkTopologyServlet.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NetworkTopologyServlet.java @@ -17,7 +17,6 @@ */ package org.apache.hadoop.hdfs.server.namenode; -import com.fasterxml.jackson.core.JsonFactory; import com.fasterxml.jackson.core.JsonGenerator; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.hdfs.server.blockmanagement.BlockManager; @@ -124,8 +123,7 @@ protected void printTopology(PrintStream stream, List leaves, protected void printJsonFormat(PrintStream stream, Map> tree, ArrayList racks) throws IOException { - JsonFactory dumpFactory = JacksonUtil.createBasicJsonFactory(); - JsonGenerator dumpGenerator = dumpFactory.createGenerator(stream); + JsonGenerator dumpGenerator = JacksonUtil.getSharedWriter().createGenerator(stream); dumpGenerator.writeStartArray(); for(String r : racks) { diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/StartupProgressServlet.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/StartupProgressServlet.java index d0fee730a89d5..48bb85040da49 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/StartupProgressServlet.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/StartupProgressServlet.java @@ -61,7 +61,7 @@ protected void doGet(HttpServletRequest req, HttpServletResponse resp) StartupProgress prog = NameNodeHttpServer.getStartupProgressFromContext( getServletContext()); StartupProgressView view = prog.createView(); - JsonGenerator json = JacksonUtil.createBasicJsonFactory().createGenerator(resp.getWriter()); + JsonGenerator json = JacksonUtil.getSharedWriter().createGenerator(resp.getWriter()); try { json.writeStartObject(); json.writeNumberField(ELAPSED_TIME, view.getElapsedTime()); diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/QueueManager.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/QueueManager.java index ab1821bf5707a..3a44b427928d8 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/QueueManager.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/QueueManager.java @@ -18,7 +18,6 @@ package org.apache.hadoop.mapred; -import com.fasterxml.jackson.core.JsonFactory; import com.fasterxml.jackson.core.JsonGenerationException; import com.fasterxml.jackson.core.JsonGenerator; import org.apache.hadoop.classification.InterfaceAudience; @@ -532,8 +531,7 @@ static void dumpConfiguration(Writer out, String configFile, return; } - JsonFactory dumpFactory = JacksonUtil.createBasicJsonFactory(); - JsonGenerator dumpGenerator = dumpFactory.createGenerator(out); + JsonGenerator dumpGenerator = JacksonUtil.getSharedWriter().createGenerator(out); QueueConfigurationParser parser; boolean aclsEnabled = false; if (conf != null) { diff --git a/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/impl/S3AEncryption.java b/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/impl/S3AEncryption.java index a720d2ca10000..4229075f5a057 100644 --- a/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/impl/S3AEncryption.java +++ b/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/impl/S3AEncryption.java @@ -22,7 +22,7 @@ import java.nio.charset.StandardCharsets; import java.util.Map; -import com.fasterxml.jackson.databind.ObjectMapper; +import org.apache.hadoop.util.JacksonUtil; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -91,8 +91,8 @@ public static String getS3EncryptionContextBase64Encoded( if (encryptionContextMap.isEmpty()) { return ""; } - final String encryptionContextJson = new ObjectMapper().writeValueAsString( - encryptionContextMap); + final String encryptionContextJson = JacksonUtil.getSharedWriter() + .writeValueAsString(encryptionContextMap); return Base64.encodeBase64String(encryptionContextJson.getBytes(StandardCharsets.UTF_8)); } catch (IOException e) { if (propagateExceptions) { diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/client/api/impl/FileSystemTimelineWriter.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/client/api/impl/FileSystemTimelineWriter.java index a7a30c2180f56..bead293896af7 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/client/api/impl/FileSystemTimelineWriter.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/client/api/impl/FileSystemTimelineWriter.java @@ -365,7 +365,7 @@ public long getLastModifiedTime() { protected void prepareForWrite() throws IOException{ this.stream = createLogFileStream(fs, logPath); - this.jsonGenerator = JacksonUtil.createBasicJsonFactory() + this.jsonGenerator = JacksonUtil.getSharedWriter() .createGenerator((OutputStream)stream); this.jsonGenerator.setPrettyPrinter(new MinimalPrettyPrinter("\n")); this.lastModifiedTime = Time.monotonicNow(); From da58fafdc0a3dedd084b3cb50272ad6bb0e03954 Mon Sep 17 00:00:00 2001 From: PJ Fanning Date: Tue, 13 Aug 2024 22:06:16 +0100 Subject: [PATCH 5/6] reorder imports --- .../src/main/java/org/apache/hadoop/jmx/JMXJsonServlet.java | 2 +- .../java/org/apache/hadoop/metrics2/MetricsJsonBuilder.java | 2 +- .../src/main/java/org/apache/hadoop/util/JacksonUtil.java | 4 ++-- .../org/apache/hadoop/hdfs/util/CombinedHostsFileReader.java | 2 +- .../server/diskbalancer/datamodel/DiskBalancerCluster.java | 4 ++-- .../hadoop/hdfs/server/namenode/StartupProgressServlet.java | 2 +- .../java/org/apache/hadoop/fs/s3a/impl/S3AEncryption.java | 2 +- .../apache/hadoop/fs/azure/RemoteSASKeyGeneratorImpl.java | 2 +- .../hadoop/fs/azurebfs/oauth2/AzureADAuthenticator.java | 5 ++--- .../hadoop/yarn/sls/synthetic/SynthTraceJobProducer.java | 2 +- .../yarn/client/api/impl/FileSystemTimelineWriter.java | 2 +- 11 files changed, 14 insertions(+), 15 deletions(-) diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/jmx/JMXJsonServlet.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/jmx/JMXJsonServlet.java index 06a1fe2e1a548..6f54364fff4e0 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/jmx/JMXJsonServlet.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/jmx/JMXJsonServlet.java @@ -44,12 +44,12 @@ import javax.servlet.http.HttpServletResponse; import com.fasterxml.jackson.core.JsonGenerator; -import org.apache.hadoop.util.JacksonUtil; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.apache.commons.lang3.NotImplementedException; import org.apache.hadoop.http.HttpServer2; +import org.apache.hadoop.util.JacksonUtil; /* * This servlet is based off of the JMXProxyServlet from Tomcat 7.0.14. It has diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/MetricsJsonBuilder.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/MetricsJsonBuilder.java index 71c497b6f9a39..3534adfd6903e 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/MetricsJsonBuilder.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/MetricsJsonBuilder.java @@ -21,9 +21,9 @@ import org.apache.commons.lang3.exception.ExceptionUtils; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; +import org.apache.hadoop.util.JacksonUtil; import com.fasterxml.jackson.databind.ObjectWriter; -import org.apache.hadoop.util.JacksonUtil; import org.slf4j.Logger; import org.slf4j.LoggerFactory; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/JacksonUtil.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/JacksonUtil.java index 56089f277bb60..ea54e834c328e 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/JacksonUtil.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/JacksonUtil.java @@ -22,12 +22,12 @@ import com.fasterxml.jackson.databind.ObjectReader; import com.fasterxml.jackson.databind.ObjectWriter; import com.fasterxml.jackson.databind.json.JsonMapper; +import org.apache.hadoop.classification.InterfaceAudience.Private; /** * Utility for sharing code related to Jackson usage in Hadoop. - * - * @since 3.5.0 */ +@Private public final class JacksonUtil { private static final ObjectMapper SHARED_BASIC_OBJECT_MAPPER = createBasicObjectMapper(); diff --git a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/util/CombinedHostsFileReader.java b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/util/CombinedHostsFileReader.java index 972254a62dc9e..a41b727ab2d20 100644 --- a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/util/CombinedHostsFileReader.java +++ b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/util/CombinedHostsFileReader.java @@ -40,8 +40,8 @@ import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.hdfs.protocol.DatanodeAdminProperties; - import org.apache.hadoop.util.JacksonUtil; + import org.slf4j.Logger; import org.slf4j.LoggerFactory; diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/diskbalancer/datamodel/DiskBalancerCluster.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/diskbalancer/datamodel/DiskBalancerCluster.java index 4d5ae15cbc2e5..f24f92ff1392d 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/diskbalancer/datamodel/DiskBalancerCluster.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/diskbalancer/datamodel/DiskBalancerCluster.java @@ -20,8 +20,6 @@ import com.fasterxml.jackson.annotation.JsonIgnore; import com.fasterxml.jackson.annotation.JsonIgnoreProperties; import com.fasterxml.jackson.databind.ObjectReader; -import org.apache.hadoop.util.JacksonUtil; -import org.apache.hadoop.util.Preconditions; import org.apache.commons.io.FileUtils; import org.slf4j.Logger; @@ -31,6 +29,8 @@ import org.apache.hadoop.hdfs.server.diskbalancer.planner.Planner; import org.apache.hadoop.hdfs.server.diskbalancer.planner.PlannerFactory; import org.apache.hadoop.hdfs.web.JsonUtil; +import org.apache.hadoop.util.JacksonUtil; +import org.apache.hadoop.util.Preconditions; import java.io.File; import java.io.IOException; diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/StartupProgressServlet.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/StartupProgressServlet.java index 48bb85040da49..17cd49c2d5708 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/StartupProgressServlet.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/StartupProgressServlet.java @@ -28,9 +28,9 @@ import org.apache.hadoop.hdfs.server.namenode.startupprogress.Step; import org.apache.hadoop.hdfs.server.namenode.startupprogress.StepType; import org.apache.hadoop.io.IOUtils; +import org.apache.hadoop.util.JacksonUtil; import org.apache.hadoop.classification.InterfaceAudience; -import org.apache.hadoop.util.JacksonUtil; /** * Servlet that provides a JSON representation of the namenode's current startup diff --git a/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/impl/S3AEncryption.java b/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/impl/S3AEncryption.java index 4229075f5a057..00692abcf182f 100644 --- a/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/impl/S3AEncryption.java +++ b/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/impl/S3AEncryption.java @@ -22,7 +22,6 @@ import java.nio.charset.StandardCharsets; import java.util.Map; -import org.apache.hadoop.util.JacksonUtil; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -30,6 +29,7 @@ import org.apache.commons.lang3.StringUtils; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.s3a.S3AUtils; +import org.apache.hadoop.util.JacksonUtil; import static org.apache.hadoop.fs.s3a.Constants.S3_ENCRYPTION_CONTEXT; diff --git a/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azure/RemoteSASKeyGeneratorImpl.java b/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azure/RemoteSASKeyGeneratorImpl.java index 2085532de00fe..2771f90888d25 100644 --- a/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azure/RemoteSASKeyGeneratorImpl.java +++ b/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azure/RemoteSASKeyGeneratorImpl.java @@ -29,9 +29,9 @@ import org.apache.hadoop.fs.azure.security.Constants; import org.apache.hadoop.io.retry.RetryPolicy; import org.apache.hadoop.io.retry.RetryUtils; +import org.apache.hadoop.util.JacksonUtil; import org.apache.hadoop.security.UserGroupInformation; -import org.apache.hadoop.util.JacksonUtil; import org.apache.http.NameValuePair; import org.apache.http.client.methods.HttpGet; import org.apache.http.client.utils.URIBuilder; diff --git a/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/oauth2/AzureADAuthenticator.java b/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/oauth2/AzureADAuthenticator.java index 9945715714aea..ab0282e19fc1d 100644 --- a/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/oauth2/AzureADAuthenticator.java +++ b/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/oauth2/AzureADAuthenticator.java @@ -29,9 +29,6 @@ import java.util.Hashtable; import java.util.Map; -import org.apache.hadoop.util.JacksonUtil; -import org.apache.hadoop.util.Preconditions; - import com.fasterxml.jackson.core.JsonParser; import com.fasterxml.jackson.core.JsonToken; import org.slf4j.Logger; @@ -42,6 +39,8 @@ import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.fs.azurebfs.services.AbfsIoUtils; import org.apache.hadoop.fs.azurebfs.services.ExponentialRetryPolicy; +import org.apache.hadoop.util.JacksonUtil; +import org.apache.hadoop.util.Preconditions; /** * This class provides convenience methods to obtain AAD tokens. diff --git a/hadoop-tools/hadoop-sls/src/main/java/org/apache/hadoop/yarn/sls/synthetic/SynthTraceJobProducer.java b/hadoop-tools/hadoop-sls/src/main/java/org/apache/hadoop/yarn/sls/synthetic/SynthTraceJobProducer.java index 7c73c83fb17a1..58f8b59ba65e9 100644 --- a/hadoop-tools/hadoop-sls/src/main/java/org/apache/hadoop/yarn/sls/synthetic/SynthTraceJobProducer.java +++ b/hadoop-tools/hadoop-sls/src/main/java/org/apache/hadoop/yarn/sls/synthetic/SynthTraceJobProducer.java @@ -24,7 +24,6 @@ import com.fasterxml.jackson.core.JsonParser; import com.fasterxml.jackson.databind.JsonMappingException; import com.fasterxml.jackson.databind.ObjectMapper; -import org.apache.hadoop.util.JacksonUtil; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.apache.commons.math3.distribution.AbstractRealDistribution; @@ -35,6 +34,7 @@ import org.apache.hadoop.fs.Path; import org.apache.hadoop.tools.rumen.JobStory; import org.apache.hadoop.tools.rumen.JobStoryProducer; +import org.apache.hadoop.util.JacksonUtil; import org.apache.hadoop.yarn.api.records.ExecutionType; import org.apache.hadoop.yarn.exceptions.YarnRuntimeException; import org.apache.hadoop.yarn.sls.appmaster.MRAMSimulator; diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/client/api/impl/FileSystemTimelineWriter.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/client/api/impl/FileSystemTimelineWriter.java index bead293896af7..dc60f9b274ede 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/client/api/impl/FileSystemTimelineWriter.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/client/api/impl/FileSystemTimelineWriter.java @@ -38,7 +38,6 @@ import java.util.concurrent.locks.ReentrantReadWriteLock.ReadLock; import java.util.concurrent.locks.ReentrantReadWriteLock.WriteLock; -import org.apache.hadoop.util.JacksonUtil; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.apache.hadoop.classification.InterfaceAudience.Private; @@ -50,6 +49,7 @@ import org.apache.hadoop.fs.permission.FsPermission; import org.apache.hadoop.io.IOUtils; import org.apache.hadoop.security.UserGroupInformation; +import org.apache.hadoop.util.JacksonUtil; import org.apache.hadoop.util.Time; import org.apache.hadoop.yarn.api.records.ApplicationAttemptId; import org.apache.hadoop.yarn.api.records.ApplicationId; From ada910cd0d86fa74be3c96d0d562c194629f43a6 Mon Sep 17 00:00:00 2001 From: PJ Fanning Date: Wed, 14 Aug 2024 18:13:25 +0100 Subject: [PATCH 6/6] more changes to import order --- .../src/main/java/org/apache/hadoop/util/JacksonUtil.java | 1 + .../org/apache/hadoop/fs/azure/RemoteSASKeyGeneratorImpl.java | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/JacksonUtil.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/JacksonUtil.java index ea54e834c328e..7d90555c8780b 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/JacksonUtil.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/JacksonUtil.java @@ -22,6 +22,7 @@ import com.fasterxml.jackson.databind.ObjectReader; import com.fasterxml.jackson.databind.ObjectWriter; import com.fasterxml.jackson.databind.json.JsonMapper; + import org.apache.hadoop.classification.InterfaceAudience.Private; /** diff --git a/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azure/RemoteSASKeyGeneratorImpl.java b/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azure/RemoteSASKeyGeneratorImpl.java index 2771f90888d25..3f8862e6d1def 100644 --- a/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azure/RemoteSASKeyGeneratorImpl.java +++ b/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azure/RemoteSASKeyGeneratorImpl.java @@ -24,7 +24,6 @@ import java.util.List; import java.util.concurrent.TimeUnit; -import com.fasterxml.jackson.databind.ObjectReader; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.azure.security.Constants; import org.apache.hadoop.io.retry.RetryPolicy; @@ -41,6 +40,7 @@ import com.fasterxml.jackson.core.JsonParseException; import com.fasterxml.jackson.databind.JsonMappingException; +import com.fasterxml.jackson.databind.ObjectReader; import static org.apache.hadoop.fs.azure.WasbRemoteCallHelper.REMOTE_CALL_SUCCESS_CODE;