Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -79,8 +79,6 @@

import com.google.common.base.Charsets;
import org.apache.commons.collections.map.UnmodifiableMap;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.fs.CommonConfigurationKeysPublic;
Expand All @@ -98,6 +96,8 @@
import org.apache.hadoop.util.StringUtils;
import org.codehaus.stax2.XMLInputFactory2;
import org.codehaus.stax2.XMLStreamReader2;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.w3c.dom.Document;
import org.w3c.dom.Element;

Expand Down Expand Up @@ -192,11 +192,12 @@
@InterfaceStability.Stable
public class Configuration implements Iterable<Map.Entry<String,String>>,
Writable {
private static final Log LOG =
LogFactory.getLog(Configuration.class);
private static final Logger LOG =
LoggerFactory.getLogger(Configuration.class);

private static final Log LOG_DEPRECATION =
LogFactory.getLog("org.apache.hadoop.conf.Configuration.deprecation");
private static final Logger LOG_DEPRECATION =
LoggerFactory.getLogger(
"org.apache.hadoop.conf.Configuration.deprecation");

private boolean quietmode = true;

Expand Down Expand Up @@ -2877,10 +2878,10 @@ private Resource loadResource(Properties properties,
}
return null;
} catch (IOException e) {
LOG.fatal("error parsing conf " + name, e);
LOG.error("error parsing conf " + name, e);
throw new RuntimeException(e);
} catch (XMLStreamException e) {
LOG.fatal("error parsing conf " + name, e);
LOG.error("error parsing conf " + name, e);
throw new RuntimeException(e);
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -22,9 +22,10 @@
import com.google.common.base.Optional;
import com.google.common.base.Preconditions;
import com.google.common.collect.Maps;
import org.apache.commons.logging.*;
import org.apache.hadoop.util.Time;
import org.apache.hadoop.conf.ReconfigurationUtil.PropertyChange;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.io.IOException;
import java.util.Collection;
Expand All @@ -41,8 +42,8 @@
public abstract class ReconfigurableBase
extends Configured implements Reconfigurable {

private static final Log LOG =
LogFactory.getLog(ReconfigurableBase.class);
private static final Logger LOG =
LoggerFactory.getLogger(ReconfigurableBase.class);
// Use for testing purpose.
private ReconfigurationUtil reconfigurationUtil = new ReconfigurationUtil();

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,8 +18,6 @@

package org.apache.hadoop.conf;

import org.apache.commons.logging.*;

import org.apache.commons.lang.StringEscapeUtils;

import java.util.Collection;
Expand All @@ -33,6 +31,8 @@
import javax.servlet.http.HttpServletResponse;

import org.apache.hadoop.util.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

/**
* A servlet for changing a node's configuration.
Expand All @@ -45,8 +45,8 @@ public class ReconfigurationServlet extends HttpServlet {

private static final long serialVersionUID = 1L;

private static final Log LOG =
LogFactory.getLog(ReconfigurationServlet.class);
private static final Logger LOG =
LoggerFactory.getLogger(ReconfigurationServlet.class);

// the prefix used to fing the attribute holding the reconfigurable
// for a given request
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -26,12 +26,12 @@
import javax.crypto.spec.IvParameterSpec;
import javax.crypto.spec.SecretKeySpec;

import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.conf.Configuration;

import com.google.common.base.Preconditions;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import static org.apache.hadoop.fs.CommonConfigurationKeysPublic.HADOOP_SECURITY_CRYPTO_JCE_PROVIDER_KEY;
import static org.apache.hadoop.fs.CommonConfigurationKeysPublic.HADOOP_SECURITY_JAVA_SECURE_RANDOM_ALGORITHM_KEY;
Expand All @@ -42,8 +42,8 @@
*/
@InterfaceAudience.Private
public class JceAesCtrCryptoCodec extends AesCtrCryptoCodec {
private static final Log LOG =
LogFactory.getLog(JceAesCtrCryptoCodec.class.getName());
private static final Logger LOG =
LoggerFactory.getLogger(JceAesCtrCryptoCodec.class.getName());

private Configuration conf;
private String provider;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -26,22 +26,22 @@
import java.security.SecureRandom;
import java.util.Random;

import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.conf.Configuration;

import com.google.common.base.Preconditions;
import org.apache.hadoop.crypto.random.OsSecureRandom;
import org.apache.hadoop.util.ReflectionUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

/**
* Implement the AES-CTR crypto codec using JNI into OpenSSL.
*/
@InterfaceAudience.Private
public class OpensslAesCtrCryptoCodec extends AesCtrCryptoCodec {
private static final Log LOG =
LogFactory.getLog(OpensslAesCtrCryptoCodec.class.getName());
private static final Logger LOG =
LoggerFactory.getLogger(OpensslAesCtrCryptoCodec.class.getName());

private Configuration conf;
private Random random;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -26,13 +26,13 @@
import javax.crypto.NoSuchPaddingException;
import javax.crypto.ShortBufferException;

import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.util.NativeCodeLoader;

import com.google.common.base.Preconditions;
import org.apache.hadoop.util.PerformanceAdvisory;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

/**
* OpenSSL cipher using JNI.
Expand All @@ -41,8 +41,8 @@
*/
@InterfaceAudience.Private
public final class OpensslCipher {
private static final Log LOG =
LogFactory.getLog(OpensslCipher.class.getName());
private static final Logger LOG =
LoggerFactory.getLogger(OpensslCipher.class.getName());
public static final int ENCRYPT_MODE = 1;
public static final int DECRYPT_MODE = 0;

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,13 +19,13 @@

import java.util.Random;

import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.util.NativeCodeLoader;

import com.google.common.base.Preconditions;
import org.apache.hadoop.util.PerformanceAdvisory;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

/**
* OpenSSL secure random using JNI.
Expand All @@ -44,8 +44,8 @@
@InterfaceAudience.Private
public class OpensslSecureRandom extends Random {
private static final long serialVersionUID = -7828193502768789584L;
private static final Log LOG =
LogFactory.getLog(OpensslSecureRandom.class.getName());
private static final Logger LOG =
LoggerFactory.getLogger(OpensslSecureRandom.class.getName());

/** If native SecureRandom unavailable, use java SecureRandom */
private java.security.SecureRandom fallback = null;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,12 +23,12 @@
import java.io.IOException;
import java.util.Random;

import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.conf.Configurable;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.io.IOUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import static org.apache.hadoop.fs.CommonConfigurationKeysPublic.HADOOP_SECURITY_SECURE_RANDOM_DEVICE_FILE_PATH_KEY;
import static org.apache.hadoop.fs.CommonConfigurationKeysPublic.HADOOP_SECURITY_SECURE_RANDOM_DEVICE_FILE_PATH_DEFAULT;
Expand All @@ -39,7 +39,8 @@
*/
@InterfaceAudience.Private
public class OsSecureRandom extends Random implements Closeable, Configurable {
public static final Log LOG = LogFactory.getLog(OsSecureRandom.class);
public static final Logger LOG =
LoggerFactory.getLogger(OsSecureRandom.class);

private static final long serialVersionUID = 6391500337172057900L;

Expand Down Expand Up @@ -112,7 +113,7 @@ synchronized protected int next(int nbits) {
@Override
synchronized public void close() {
if (stream != null) {
IOUtils.cleanup(LOG, stream);
IOUtils.cleanupWithLogger(LOG, stream);
stream = null;
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -32,8 +32,6 @@
import java.util.StringTokenizer;
import java.util.concurrent.ConcurrentHashMap;

import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.HadoopIllegalArgumentException;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
Expand All @@ -52,6 +50,8 @@
import org.apache.hadoop.util.Progressable;

import com.google.common.annotations.VisibleForTesting;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

/**
* This class provides an interface for implementors of a Hadoop file system
Expand All @@ -66,7 +66,7 @@
@InterfaceAudience.Public
@InterfaceStability.Stable
public abstract class AbstractFileSystem {
static final Log LOG = LogFactory.getLog(AbstractFileSystem.class);
static final Logger LOG = LoggerFactory.getLogger(AbstractFileSystem.class);

/** Recording statistics per a file system class. */
private static final Map<URI, Statistics>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -27,14 +27,14 @@
import java.util.Arrays;
import java.util.EnumSet;

import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.fs.Options.ChecksumOpt;
import org.apache.hadoop.fs.permission.FsPermission;
import org.apache.hadoop.util.DataChecksum;
import org.apache.hadoop.util.Progressable;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

/**
* Abstract Checksumed Fs.
Expand Down Expand Up @@ -110,8 +110,8 @@ private int getSumBufferSize(int bytesPerSum, int bufferSize, Path file)
* It verifies that data matches checksums.
*******************************************************/
private static class ChecksumFSInputChecker extends FSInputChecker {
public static final Log LOG
= LogFactory.getLog(FSInputChecker.class);
public static final Logger LOG =
LoggerFactory.getLogger(FSInputChecker.class);
private static final int HEADER_LENGTH = 8;

private ChecksumFs fs;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -26,21 +26,21 @@
import java.util.concurrent.Delayed;
import java.util.concurrent.TimeUnit;

import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.security.token.Token;
import org.apache.hadoop.security.token.TokenIdentifier;
import org.apache.hadoop.util.Time;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

/**
* A daemon thread that waits for the next file system to renew.
*/
@InterfaceAudience.Private
public class DelegationTokenRenewer
extends Thread {
private static final Log LOG = LogFactory
.getLog(DelegationTokenRenewer.class);
private static final Logger LOG = LoggerFactory
.getLogger(DelegationTokenRenewer.class);

/** The renewable interface used by the renewer. */
public interface Renewable {
Expand Down Expand Up @@ -243,7 +243,7 @@ public <T extends FileSystem & Renewable> void removeRenewAction(
LOG.error("Interrupted while canceling token for " + fs.getUri()
+ "filesystem");
if (LOG.isDebugEnabled()) {
LOG.debug(ie.getStackTrace());
LOG.debug("Exception in removeRenewAction: ", ie);
}
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -22,11 +22,12 @@
import java.io.InputStream;
import java.util.zip.Checksum;

import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.util.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.nio.ByteBuffer;
import java.nio.IntBuffer;

Expand All @@ -37,8 +38,8 @@
@InterfaceAudience.LimitedPrivate({"HDFS"})
@InterfaceStability.Unstable
abstract public class FSInputChecker extends FSInputStream {
public static final Log LOG
= LogFactory.getLog(FSInputChecker.class);
public static final Logger LOG =
LoggerFactory.getLogger(FSInputChecker.class);

/** The file name from which data is read from */
protected Path file;
Expand Down
Loading