diff --git a/bin/hbase b/bin/hbase
index d2307c50c33a..601e2c141d96 100755
--- a/bin/hbase
+++ b/bin/hbase
@@ -301,10 +301,13 @@ else
# make it easier to check for shaded/not later on.
shaded_jar=""
fi
+# here we will add slf4j-api, commons-logging, jul-to-slf4j, jcl-over-slf4j
+# to classpath, as they are all logging bridges. Only exclude log4j* so we
+# will not actually log anything out. Add it later if necessary
for f in "${HBASE_HOME}"/lib/client-facing-thirdparty/*.jar; do
if [[ ! "${f}" =~ ^.*/htrace-core-3.*\.jar$ ]] && \
- [ "${f}" != "htrace-core.jar$" ] && \
- [[ ! "${f}" =~ ^.*/slf4j-log4j.*$ ]]; then
+ [[ "${f}" != "htrace-core.jar$" ]] && \
+ [[ ! "${f}" =~ ^.*/log4j.*$ ]]; then
CLASSPATH="${CLASSPATH}:${f}"
fi
done
@@ -658,7 +661,7 @@ elif [ "$COMMAND" = "mapredcp" ] ; then
for f in "${HBASE_HOME}"/lib/client-facing-thirdparty/*.jar; do
if [[ ! "${f}" =~ ^.*/htrace-core-3.*\.jar$ ]] && \
[ "${f}" != "htrace-core.jar$" ] && \
- [[ ! "${f}" =~ ^.*/slf4j-log4j.*$ ]]; then
+ [[ ! "${f}" =~ ^.*/log4j.*$ ]]; then
echo -n ":${f}"
fi
done
@@ -775,7 +778,11 @@ HBASE_OPTS="$HBASE_OPTS -Dhbase.log.dir=$HBASE_LOG_DIR"
HBASE_OPTS="$HBASE_OPTS -Dhbase.log.file=$HBASE_LOGFILE"
HBASE_OPTS="$HBASE_OPTS -Dhbase.home.dir=$HBASE_HOME"
HBASE_OPTS="$HBASE_OPTS -Dhbase.id.str=$HBASE_IDENT_STRING"
-HBASE_OPTS="$HBASE_OPTS -Dhbase.root.logger=${HBASE_ROOT_LOGGER:-INFO,console}"
+# log4j2 does not support setting log level and appender at once, so we need to split HBASE_ROOT_LOGGER
+HBASE_ROOT_LOGGER=${HBASE_ROOT_LOGGER:-INFO,console}
+array=(${HBASE_ROOT_LOGGER//,/ })
+HBASE_OPTS="$HBASE_OPTS -Dhbase.root.logger.level=${array[0]}"
+HBASE_OPTS="$HBASE_OPTS -Dhbase.root.logger.appender=${array[1]}"
if [ "x$JAVA_LIBRARY_PATH" != "x" ]; then
HBASE_OPTS="$HBASE_OPTS -Djava.library.path=$JAVA_LIBRARY_PATH"
export LD_LIBRARY_PATH="$LD_LIBRARY_PATH:$JAVA_LIBRARY_PATH"
@@ -783,17 +790,19 @@ fi
# Enable security logging on the master and regionserver only
if [ "$COMMAND" = "master" ] || [ "$COMMAND" = "regionserver" ]; then
- HBASE_OPTS="$HBASE_OPTS -Dhbase.security.logger=${HBASE_SECURITY_LOGGER:-INFO,RFAS}"
+ HBASE_SECURITY_LOGGER=${HBASE_SECURITY_LOGGER:-INFO,RFAS}
else
- HBASE_OPTS="$HBASE_OPTS -Dhbase.security.logger=${HBASE_SECURITY_LOGGER:-INFO,NullAppender}"
+ HBASE_SECURITY_LOGGER=${HBASE_SECURITY_LOGGER:-INFO,NullAppender}
fi
+array=(${HBASE_SECURITY_LOGGER//,/ })
+HBASE_OPTS="$HBASE_OPTS -Dhbase.security.logger.level=${array[0]}"
+HBASE_OPTS="$HBASE_OPTS -Dhbase.security.logger.appender=${array[1]}"
HEAP_SETTINGS="$JAVA_HEAP_MAX $JAVA_OFFHEAP_MAX"
# by now if we're running a command it means we need logging
-for f in ${HBASE_HOME}/lib/client-facing-thirdparty/slf4j-log4j*.jar; do
+for f in ${HBASE_HOME}/lib/client-facing-thirdparty/log4j*.jar; do
if [ -f "${f}" ]; then
CLASSPATH="${CLASSPATH}:${f}"
- break
fi
done
diff --git a/bin/hbase-daemon.sh b/bin/hbase-daemon.sh
index 11c13eb52300..6fafab0ccec0 100755
--- a/bin/hbase-daemon.sh
+++ b/bin/hbase-daemon.sh
@@ -155,12 +155,20 @@ JAVA=$JAVA_HOME/bin/java
export HBASE_LOG_PREFIX=hbase-$HBASE_IDENT_STRING-$command-$HOSTNAME
export HBASE_LOGFILE=$HBASE_LOG_PREFIX.log
-if [ -z "${HBASE_ROOT_LOGGER}" ]; then
-export HBASE_ROOT_LOGGER=${HBASE_ROOT_LOGGER:-"INFO,RFA"}
+if [ -z "${HBASE_ROOT_LOGGER_LEVEL}" ]; then
+export HBASE_ROOT_LOGGER_LEVEL=${HBASE_ROOT_LOGGER_LEVEL:-"INFO"}
fi
-if [ -z "${HBASE_SECURITY_LOGGER}" ]; then
-export HBASE_SECURITY_LOGGER=${HBASE_SECURITY_LOGGER:-"INFO,RFAS"}
+if [ -z "${HBASE_ROOT_LOGGER_APPENDER}" ]; then
+export HBASE_ROOT_LOGGER_APPENDER=${HBASE_ROOT_LOGGER_APPENDER:-"RFA"}
+fi
+
+if [ -z "${HBASE_SECURITY_LOGGER_LEVEL}" ]; then
+export HBASE_SECURITY_LOGGER_LEVEL=${HBASE_SECURITY_LOGGER_LEVEL:-"INFO"}
+fi
+
+if [ -z "${HBASE_SECURITY_LOGGER_APPENDER}" ]; then
+export HBASE_SECURITY_LOGGER_APPENDER=${HBASE_SECURITY_LOGGER_APPENDER:-"RFAS"}
fi
HBASE_LOGOUT=${HBASE_LOGOUT:-"$HBASE_LOG_DIR/$HBASE_LOG_PREFIX.out"}
diff --git a/bin/hbase.cmd b/bin/hbase.cmd
index a927227aa9fb..1fd39d2247a0 100644
--- a/bin/hbase.cmd
+++ b/bin/hbase.cmd
@@ -329,7 +329,13 @@ set HBASE_OPTS=%HBASE_OPTS% -XX:OnOutOfMemoryError="taskkill /F /PID %p"
if not defined HBASE_ROOT_LOGGER (
set HBASE_ROOT_LOGGER=INFO,console
)
-set HBASE_OPTS=%HBASE_OPTS% -Dhbase.root.logger="%HBASE_ROOT_LOGGER%"
+
+for /F "tokens=1,2 delims=," %%a in ("%HBASE_ROOT_LOGGER%") do (
+ set HBASE_ROOT_LOGGER_LEVEL=%%a
+ set HBASE_ROOT_LOGGER_APPENDER=%%b
+)
+
+set HBASE_OPTS=%HBASE_OPTS% -Dhbase.root.logger.level="%HBASE_ROOT_LOGGER_LEVEL% -Dhbase.root.logger.appender="%HBASE_ROOT_LOGGER_APPENDER% "
if defined JAVA_LIBRARY_PATH (
set HBASE_OPTS=%HBASE_OPTS% -Djava.library.path="%JAVA_LIBRARY_PATH%"
@@ -345,7 +351,13 @@ if not defined HBASE_SECURITY_LOGGER (
set HBASE_SECURITY_LOGGER=INFO,DRFAS
)
)
-set HBASE_OPTS=%HBASE_OPTS% -Dhbase.security.logger="%HBASE_SECURITY_LOGGER%"
+
+for /F "tokens=1,2 delims=," %%a in ("%HBASE_SECURITY_LOGGER%") do (
+ set HBASE_SECURITY_LOGGER_LEVEL=%%a
+ set HBASE_SECURITY_LOGGER_APPENDER=%%b
+)
+
+set HBASE_OPTS=%HBASE_OPTS% -Dhbase.security.logger.level="%HBASE_SECURITY_LOGGER_LEVEL% -Dhbase.security.logger.appender="%HBASE_SECURITY_LOGGER_APPENDER%"
set HEAP_SETTINGS=%JAVA_HEAP_MAX% %JAVA_OFFHEAP_MAX%
set java_arguments=%HEAP_SETTINGS% %HBASE_OPTS% -classpath "%CLASSPATH%" %CLASS% %hbase-command-arguments%
diff --git a/conf/log4j-hbtop.properties b/conf/log4j-hbtop.properties
deleted file mode 100644
index 4d68d79db70d..000000000000
--- a/conf/log4j-hbtop.properties
+++ /dev/null
@@ -1,27 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements. See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership. The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License. You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-log4j.rootLogger=WARN,console
-log4j.threshold=WARN
-
-# console
-log4j.appender.console=org.apache.log4j.ConsoleAppender
-log4j.appender.console.target=System.err
-log4j.appender.console.layout=org.apache.log4j.PatternLayout
-log4j.appender.console.layout.ConversionPattern=%d{ISO8601} %-5p [%t] %c{2}: %m%n
-
-# ZooKeeper will still put stuff at WARN
-log4j.logger.org.apache.zookeeper=ERROR
diff --git a/conf/log4j.properties b/conf/log4j.properties
deleted file mode 100644
index 2282fa5d4a35..000000000000
--- a/conf/log4j.properties
+++ /dev/null
@@ -1,139 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements. See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership. The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License. You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-# Define some default values that can be overridden by system properties
-hbase.root.logger=INFO,console
-hbase.security.logger=INFO,console
-hbase.log.dir=.
-hbase.log.file=hbase.log
-hbase.log.level=INFO
-
-# Define the root logger to the system property "hbase.root.logger".
-log4j.rootLogger=${hbase.root.logger}
-
-# Logging Threshold
-log4j.threshold=ALL
-
-#
-# Daily Rolling File Appender
-#
-log4j.appender.DRFA=org.apache.log4j.DailyRollingFileAppender
-log4j.appender.DRFA.File=${hbase.log.dir}/${hbase.log.file}
-
-# Rollver at midnight
-log4j.appender.DRFA.DatePattern=.yyyy-MM-dd
-
-# 30-day backup
-#log4j.appender.DRFA.MaxBackupIndex=30
-log4j.appender.DRFA.layout=org.apache.log4j.PatternLayout
-
-# Pattern format: Date LogLevel LoggerName LogMessage
-log4j.appender.DRFA.layout.ConversionPattern=%d{ISO8601} %-5p [%t] %c{2}: %.1000m%n
-
-# Rolling File Appender properties
-hbase.log.maxfilesize=256MB
-hbase.log.maxbackupindex=20
-
-# Rolling File Appender
-log4j.appender.RFA=org.apache.log4j.RollingFileAppender
-log4j.appender.RFA.File=${hbase.log.dir}/${hbase.log.file}
-
-log4j.appender.RFA.MaxFileSize=${hbase.log.maxfilesize}
-log4j.appender.RFA.MaxBackupIndex=${hbase.log.maxbackupindex}
-
-log4j.appender.RFA.layout=org.apache.log4j.PatternLayout
-log4j.appender.RFA.layout.ConversionPattern=%d{ISO8601} %-5p [%t] %c{2}: %.1000m%n
-
-#
-# Security audit appender
-#
-hbase.security.log.file=SecurityAuth.audit
-hbase.security.log.maxfilesize=256MB
-hbase.security.log.maxbackupindex=20
-log4j.appender.RFAS=org.apache.log4j.RollingFileAppender
-log4j.appender.RFAS.File=${hbase.log.dir}/${hbase.security.log.file}
-log4j.appender.RFAS.MaxFileSize=${hbase.security.log.maxfilesize}
-log4j.appender.RFAS.MaxBackupIndex=${hbase.security.log.maxbackupindex}
-log4j.appender.RFAS.layout=org.apache.log4j.PatternLayout
-log4j.appender.RFAS.layout.ConversionPattern=%d{ISO8601} %p %c: %.1000m%n
-log4j.category.SecurityLogger=${hbase.security.logger}
-log4j.additivity.SecurityLogger=false
-#log4j.logger.SecurityLogger.org.apache.hadoop.hbase.security.access.AccessController=TRACE
-#log4j.logger.SecurityLogger.org.apache.hadoop.hbase.security.visibility.VisibilityController=TRACE
-
-#
-# Null Appender
-#
-log4j.appender.NullAppender=org.apache.log4j.varia.NullAppender
-
-#
-# console
-# Add "console" to rootlogger above if you want to use this
-#
-log4j.appender.console=org.apache.log4j.ConsoleAppender
-log4j.appender.console.target=System.err
-log4j.appender.console.layout=org.apache.log4j.PatternLayout
-log4j.appender.console.layout.ConversionPattern=%d{ISO8601} %-5p [%t] %c{2}: %.1000m%n
-
-log4j.appender.asyncconsole=org.apache.hadoop.hbase.AsyncConsoleAppender
-log4j.appender.asyncconsole.target=System.err
-
-# Custom Logging levels
-
-log4j.logger.org.apache.zookeeper=${hbase.log.level}
-#log4j.logger.org.apache.hadoop.fs.FSNamesystem=DEBUG
-log4j.logger.org.apache.hadoop.hbase=${hbase.log.level}
-log4j.logger.org.apache.hadoop.hbase.META=${hbase.log.level}
-# Make these two classes INFO-level. Make them DEBUG to see more zk debug.
-log4j.logger.org.apache.hadoop.hbase.zookeeper.ZKUtil=${hbase.log.level}
-log4j.logger.org.apache.hadoop.hbase.zookeeper.ZKWatcher=${hbase.log.level}
-#log4j.logger.org.apache.hadoop.dfs=DEBUG
-# Set this class to log INFO only otherwise its OTT
-# Enable this to get detailed connection error/retry logging.
-# log4j.logger.org.apache.hadoop.hbase.client.ConnectionImplementation=TRACE
-
-
-# Uncomment this line to enable tracing on _every_ RPC call (this can be a lot of output)
-#log4j.logger.org.apache.hadoop.ipc.HBaseServer.trace=DEBUG
-
-# Uncomment the below if you want to remove logging of client region caching'
-# and scan of hbase:meta messages
-# log4j.logger.org.apache.hadoop.hbase.client.ConnectionImplementation=INFO
-
-# EventCounter
-# Add "EventCounter" to rootlogger if you want to use this
-# Uncomment the line below to add EventCounter information
-# log4j.appender.EventCounter=org.apache.hadoop.log.metrics.EventCounter
-
-# Prevent metrics subsystem start/stop messages (HBASE-17722)
-log4j.logger.org.apache.hadoop.metrics2.impl.MetricsConfig=WARN
-log4j.logger.org.apache.hadoop.metrics2.impl.MetricsSinkAdapter=WARN
-log4j.logger.org.apache.hadoop.metrics2.impl.MetricsSystemImpl=WARN
-
-# Disable request log by default, you can enable this by changing the appender
-log4j.category.http.requests=INFO,NullAppender
-log4j.additivity.http.requests=false
-# Replace the above with this configuration if you want an http access.log
-#log4j.appender.accessRFA=org.apache.log4j.RollingFileAppender
-#log4j.appender.accessRFA.File=/var/log/hbase/access.log
-#log4j.appender.accessRFA.layout=org.apache.log4j.PatternLayout
-#log4j.appender.accessRFA.layout.ConversionPattern=%m%n
-#log4j.appender.accessRFA.MaxFileSize=200MB
-#log4j.appender.accessRFA.MaxBackupIndex=10
-# route http.requests to the accessRFA appender
-#log4j.logger.http.requests=INFO,accessRFA
-# disable http.requests.* entries going up to the root logger
-#log4j.additivity.http.requests=false
diff --git a/hbase-logging/src/main/java/org/apache/hadoop/hbase/AsyncConsoleAppender.java b/conf/log4j2-hbtop.xml
similarity index 51%
rename from hbase-logging/src/main/java/org/apache/hadoop/hbase/AsyncConsoleAppender.java
rename to conf/log4j2-hbtop.xml
index 939b453c8d4b..de0fb5769c50 100644
--- a/hbase-logging/src/main/java/org/apache/hadoop/hbase/AsyncConsoleAppender.java
+++ b/conf/log4j2-hbtop.xml
@@ -1,3 +1,5 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/conf/log4j2.xml b/conf/log4j2.xml
new file mode 100644
index 000000000000..63dbeba3cc7a
--- /dev/null
+++ b/conf/log4j2.xml
@@ -0,0 +1,103 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/hbase-archetypes/hbase-client-project/pom.xml b/hbase-archetypes/hbase-client-project/pom.xml
index e8192ed68b34..86217f63975c 100644
--- a/hbase-archetypes/hbase-client-project/pom.xml
+++ b/hbase-archetypes/hbase-client-project/pom.xml
@@ -54,13 +54,23 @@
hbase-client
- org.slf4j
- slf4j-log4j12
+ org.apache.logging.log4j
+ log4j-apiruntime
- log4j
- log4j
+ org.apache.logging.log4j
+ log4j-core
+ runtime
+
+
+ org.apache.logging.log4j
+ log4j-slf4j-impl
+ runtime
+
+
+ org.apache.logging.log4j
+ log4j-1.2-apiruntime
diff --git a/hbase-archetypes/hbase-client-project/src/main/resources/log4j.properties b/hbase-archetypes/hbase-client-project/src/main/resources/log4j.properties
deleted file mode 100644
index 0b01e57e6ea6..000000000000
--- a/hbase-archetypes/hbase-client-project/src/main/resources/log4j.properties
+++ /dev/null
@@ -1,121 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements. See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership. The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License. You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied. See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-# Define some default values that can be overridden by system properties
-hbase.root.logger=INFO,console
-hbase.security.logger=INFO,console
-hbase.log.dir=.
-hbase.log.file=hbase.log
-
-# Define the root logger to the system property "hbase.root.logger".
-log4j.rootLogger=${hbase.root.logger}
-
-# Logging Threshold
-log4j.threshold=ALL
-
-#
-# Daily Rolling File Appender
-#
-log4j.appender.DRFA=org.apache.log4j.DailyRollingFileAppender
-log4j.appender.DRFA.File=${hbase.log.dir}/${hbase.log.file}
-
-# Rollver at midnight
-log4j.appender.DRFA.DatePattern=.yyyy-MM-dd
-
-# 30-day backup
-#log4j.appender.DRFA.MaxBackupIndex=30
-log4j.appender.DRFA.layout=org.apache.log4j.PatternLayout
-
-# Pattern format: Date LogLevel LoggerName LogMessage
-log4j.appender.DRFA.layout.ConversionPattern=%d{ISO8601} %-5p [%t] %c{2}: %m%n
-
-# Rolling File Appender properties
-hbase.log.maxfilesize=256MB
-hbase.log.maxbackupindex=20
-
-# Rolling File Appender
-log4j.appender.RFA=org.apache.log4j.RollingFileAppender
-log4j.appender.RFA.File=${hbase.log.dir}/${hbase.log.file}
-
-log4j.appender.RFA.MaxFileSize=${hbase.log.maxfilesize}
-log4j.appender.RFA.MaxBackupIndex=${hbase.log.maxbackupindex}
-
-log4j.appender.RFA.layout=org.apache.log4j.PatternLayout
-log4j.appender.RFA.layout.ConversionPattern=%d{ISO8601} %-5p [%t] %c{2}: %m%n
-
-#
-# Security audit appender
-#
-hbase.security.log.file=SecurityAuth.audit
-hbase.security.log.maxfilesize=256MB
-hbase.security.log.maxbackupindex=20
-log4j.appender.RFAS=org.apache.log4j.RollingFileAppender
-log4j.appender.RFAS.File=${hbase.log.dir}/${hbase.security.log.file}
-log4j.appender.RFAS.MaxFileSize=${hbase.security.log.maxfilesize}
-log4j.appender.RFAS.MaxBackupIndex=${hbase.security.log.maxbackupindex}
-log4j.appender.RFAS.layout=org.apache.log4j.PatternLayout
-log4j.appender.RFAS.layout.ConversionPattern=%d{ISO8601} %p %c: %m%n
-log4j.category.SecurityLogger=${hbase.security.logger}
-log4j.additivity.SecurityLogger=false
-#log4j.logger.SecurityLogger.org.apache.hadoop.hbase.security.access.AccessController=TRACE
-#log4j.logger.SecurityLogger.org.apache.hadoop.hbase.security.visibility.VisibilityController=TRACE
-
-#
-# Null Appender
-#
-log4j.appender.NullAppender=org.apache.log4j.varia.NullAppender
-
-#
-# console
-# Add "console" to rootlogger above if you want to use this
-#
-log4j.appender.console=org.apache.log4j.ConsoleAppender
-log4j.appender.console.target=System.err
-log4j.appender.console.layout=org.apache.log4j.PatternLayout
-log4j.appender.console.layout.ConversionPattern=%d{ISO8601} %-5p [%t] %c{2}: %m%n
-
-# Custom Logging levels
-
-log4j.logger.org.apache.zookeeper=INFO
-#log4j.logger.org.apache.hadoop.fs.FSNamesystem=DEBUG
-log4j.logger.org.apache.hadoop.hbase=INFO
-# Make these two classes INFO-level. Make them DEBUG to see more zk debug.
-log4j.logger.org.apache.hadoop.hbase.zookeeper.ZKUtil=INFO
-log4j.logger.org.apache.hadoop.hbase.zookeeper.ZKWatcher=INFO
-#log4j.logger.org.apache.hadoop.dfs=DEBUG
-# Set this class to log INFO only otherwise its OTT
-# Enable this to get detailed connection error/retry logging.
-# log4j.logger.org.apache.hadoop.hbase.client.HConnectionManager$HConnectionImplementation=TRACE
-
-
-# Uncomment this line to enable tracing on _every_ RPC call (this can be a lot of output)
-#log4j.logger.org.apache.hadoop.ipc.HBaseServer.trace=DEBUG
-
-# Uncomment the below if you want to remove logging of client region caching'
-# and scan of hbase:meta messages
-# log4j.logger.org.apache.hadoop.hbase.client.HConnectionManager$HConnectionImplementation=INFO
-# log4j.logger.org.apache.hadoop.hbase.client.MetaScanner=INFO
-
-# EventCounter
-# Add "EventCounter" to rootlogger if you want to use this
-# Uncomment the line below to add EventCounter information
-# log4j.appender.EventCounter=org.apache.hadoop.log.metrics.EventCounter
-
-# Prevent metrics subsystem start/stop messages (HBASE-17722)
-log4j.logger.org.apache.hadoop.metrics2.impl.MetricsConfig=WARN
-log4j.logger.org.apache.hadoop.metrics2.impl.MetricsSinkAdapter=WARN
-log4j.logger.org.apache.hadoop.metrics2.impl.MetricsSystemImpl=WARN
diff --git a/hbase-archetypes/hbase-client-project/src/main/resources/log4j2.xml b/hbase-archetypes/hbase-client-project/src/main/resources/log4j2.xml
new file mode 100644
index 000000000000..63dbeba3cc7a
--- /dev/null
+++ b/hbase-archetypes/hbase-client-project/src/main/resources/log4j2.xml
@@ -0,0 +1,103 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/hbase-archetypes/hbase-shaded-client-project/pom.xml b/hbase-archetypes/hbase-shaded-client-project/pom.xml
index cd8dfaa05948..16276d20e61d 100644
--- a/hbase-archetypes/hbase-shaded-client-project/pom.xml
+++ b/hbase-archetypes/hbase-shaded-client-project/pom.xml
@@ -60,13 +60,23 @@
hbase-shaded-client
- org.slf4j
- slf4j-log4j12
+ org.apache.logging.log4j
+ log4j-apiruntime
- log4j
- log4j
+ org.apache.logging.log4j
+ log4j-core
+ runtime
+
+
+ org.apache.logging.log4j
+ log4j-slf4j-impl
+ runtime
+
+
+ org.apache.logging.log4j
+ log4j-1.2-apiruntime
diff --git a/hbase-archetypes/hbase-shaded-client-project/src/main/resources/log4j.properties b/hbase-archetypes/hbase-shaded-client-project/src/main/resources/log4j.properties
deleted file mode 100644
index 0b01e57e6ea6..000000000000
--- a/hbase-archetypes/hbase-shaded-client-project/src/main/resources/log4j.properties
+++ /dev/null
@@ -1,121 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements. See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership. The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License. You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied. See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-# Define some default values that can be overridden by system properties
-hbase.root.logger=INFO,console
-hbase.security.logger=INFO,console
-hbase.log.dir=.
-hbase.log.file=hbase.log
-
-# Define the root logger to the system property "hbase.root.logger".
-log4j.rootLogger=${hbase.root.logger}
-
-# Logging Threshold
-log4j.threshold=ALL
-
-#
-# Daily Rolling File Appender
-#
-log4j.appender.DRFA=org.apache.log4j.DailyRollingFileAppender
-log4j.appender.DRFA.File=${hbase.log.dir}/${hbase.log.file}
-
-# Rollver at midnight
-log4j.appender.DRFA.DatePattern=.yyyy-MM-dd
-
-# 30-day backup
-#log4j.appender.DRFA.MaxBackupIndex=30
-log4j.appender.DRFA.layout=org.apache.log4j.PatternLayout
-
-# Pattern format: Date LogLevel LoggerName LogMessage
-log4j.appender.DRFA.layout.ConversionPattern=%d{ISO8601} %-5p [%t] %c{2}: %m%n
-
-# Rolling File Appender properties
-hbase.log.maxfilesize=256MB
-hbase.log.maxbackupindex=20
-
-# Rolling File Appender
-log4j.appender.RFA=org.apache.log4j.RollingFileAppender
-log4j.appender.RFA.File=${hbase.log.dir}/${hbase.log.file}
-
-log4j.appender.RFA.MaxFileSize=${hbase.log.maxfilesize}
-log4j.appender.RFA.MaxBackupIndex=${hbase.log.maxbackupindex}
-
-log4j.appender.RFA.layout=org.apache.log4j.PatternLayout
-log4j.appender.RFA.layout.ConversionPattern=%d{ISO8601} %-5p [%t] %c{2}: %m%n
-
-#
-# Security audit appender
-#
-hbase.security.log.file=SecurityAuth.audit
-hbase.security.log.maxfilesize=256MB
-hbase.security.log.maxbackupindex=20
-log4j.appender.RFAS=org.apache.log4j.RollingFileAppender
-log4j.appender.RFAS.File=${hbase.log.dir}/${hbase.security.log.file}
-log4j.appender.RFAS.MaxFileSize=${hbase.security.log.maxfilesize}
-log4j.appender.RFAS.MaxBackupIndex=${hbase.security.log.maxbackupindex}
-log4j.appender.RFAS.layout=org.apache.log4j.PatternLayout
-log4j.appender.RFAS.layout.ConversionPattern=%d{ISO8601} %p %c: %m%n
-log4j.category.SecurityLogger=${hbase.security.logger}
-log4j.additivity.SecurityLogger=false
-#log4j.logger.SecurityLogger.org.apache.hadoop.hbase.security.access.AccessController=TRACE
-#log4j.logger.SecurityLogger.org.apache.hadoop.hbase.security.visibility.VisibilityController=TRACE
-
-#
-# Null Appender
-#
-log4j.appender.NullAppender=org.apache.log4j.varia.NullAppender
-
-#
-# console
-# Add "console" to rootlogger above if you want to use this
-#
-log4j.appender.console=org.apache.log4j.ConsoleAppender
-log4j.appender.console.target=System.err
-log4j.appender.console.layout=org.apache.log4j.PatternLayout
-log4j.appender.console.layout.ConversionPattern=%d{ISO8601} %-5p [%t] %c{2}: %m%n
-
-# Custom Logging levels
-
-log4j.logger.org.apache.zookeeper=INFO
-#log4j.logger.org.apache.hadoop.fs.FSNamesystem=DEBUG
-log4j.logger.org.apache.hadoop.hbase=INFO
-# Make these two classes INFO-level. Make them DEBUG to see more zk debug.
-log4j.logger.org.apache.hadoop.hbase.zookeeper.ZKUtil=INFO
-log4j.logger.org.apache.hadoop.hbase.zookeeper.ZKWatcher=INFO
-#log4j.logger.org.apache.hadoop.dfs=DEBUG
-# Set this class to log INFO only otherwise its OTT
-# Enable this to get detailed connection error/retry logging.
-# log4j.logger.org.apache.hadoop.hbase.client.HConnectionManager$HConnectionImplementation=TRACE
-
-
-# Uncomment this line to enable tracing on _every_ RPC call (this can be a lot of output)
-#log4j.logger.org.apache.hadoop.ipc.HBaseServer.trace=DEBUG
-
-# Uncomment the below if you want to remove logging of client region caching'
-# and scan of hbase:meta messages
-# log4j.logger.org.apache.hadoop.hbase.client.HConnectionManager$HConnectionImplementation=INFO
-# log4j.logger.org.apache.hadoop.hbase.client.MetaScanner=INFO
-
-# EventCounter
-# Add "EventCounter" to rootlogger if you want to use this
-# Uncomment the line below to add EventCounter information
-# log4j.appender.EventCounter=org.apache.hadoop.log.metrics.EventCounter
-
-# Prevent metrics subsystem start/stop messages (HBASE-17722)
-log4j.logger.org.apache.hadoop.metrics2.impl.MetricsConfig=WARN
-log4j.logger.org.apache.hadoop.metrics2.impl.MetricsSinkAdapter=WARN
-log4j.logger.org.apache.hadoop.metrics2.impl.MetricsSystemImpl=WARN
diff --git a/hbase-archetypes/hbase-shaded-client-project/src/main/resources/log4j2.xml b/hbase-archetypes/hbase-shaded-client-project/src/main/resources/log4j2.xml
new file mode 100644
index 000000000000..63dbeba3cc7a
--- /dev/null
+++ b/hbase-archetypes/hbase-shaded-client-project/src/main/resources/log4j2.xml
@@ -0,0 +1,103 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/hbase-assembly/pom.xml b/hbase-assembly/pom.xml
index 805351767e37..160293ceb3ea 100644
--- a/hbase-assembly/pom.xml
+++ b/hbase-assembly/pom.xml
@@ -332,12 +332,20 @@
jul-to-slf4j
- org.slf4j
- slf4j-log4j12
+ org.apache.logging.log4j
+ log4j-api
+
+
+ org.apache.logging.log4j
+ log4j-core
+
+
+ org.apache.logging.log4j
+ log4j-slf4j-impl
- log4j
- log4j
+ org.apache.logging.log4j
+ log4j-1.2-api
diff --git a/hbase-assembly/src/main/assembly/client.xml b/hbase-assembly/src/main/assembly/client.xml
index bd65cb43f8e9..62828fa5afc9 100644
--- a/hbase-assembly/src/main/assembly/client.xml
+++ b/hbase-assembly/src/main/assembly/client.xml
@@ -61,10 +61,8 @@
org.apache.htrace:htrace-core4org.apache.htrace:htrace-coreorg.apache.yetus:audience-annotations
- org.slf4j:slf4j-api
- org.slf4j:jcl-over-slf4j
- org.slf4j:jul-to-slf4j
- org.slf4j:slf4j-log4j12
+ org.slf4j:*
+ org.apache.logging.log4j:*
@@ -149,10 +147,8 @@
org.apache.htrace:htrace-core4org.apache.htrace:htrace-coreorg.apache.yetus:audience-annotations
- org.slf4j:slf4j-api
- org.slf4j:jcl-over-slf4j
- org.slf4j:jul-to-slf4j
- org.slf4j:slf4j-log4j12
+ org.slf4j:*
+ org.apache.logging.log4j:*
diff --git a/hbase-assembly/src/main/assembly/hadoop-three-compat.xml b/hbase-assembly/src/main/assembly/hadoop-three-compat.xml
index 1c172e9c31ff..ab2a4c5ede8a 100644
--- a/hbase-assembly/src/main/assembly/hadoop-three-compat.xml
+++ b/hbase-assembly/src/main/assembly/hadoop-three-compat.xml
@@ -50,11 +50,9 @@
org.apache.hbase:hbase-metricsorg.apache.hbase:hbase-metrics-apiorg.apache.hbase:hbase-procedure
- org.apache.hbase:hbase-protocolorg.apache.hbase:hbase-protocol-shadedorg.apache.hbase:hbase-replicationorg.apache.hbase:hbase-rest
- org.apache.hbase:hbase-rsgrouporg.apache.hbase:hbase-serverorg.apache.hbase:hbase-shellorg.apache.hbase:hbase-testing-util
@@ -111,8 +109,8 @@
org.apache.htrace:htrace-core4org.apache.htrace:htrace-coreorg.apache.yetus:audience-annotations
- org.slf4j:slf4j-api
- org.slf4j:slf4j-log4j12
+ org.slf4j:*
+ org.apache.logging.log4j:*
@@ -209,10 +207,8 @@
org.apache.htrace:htrace-core4org.apache.htrace:htrace-coreorg.apache.yetus:audience-annotations
- org.slf4j:slf4j-api
- org.slf4j:jcl-over-slf4j
- org.slf4j:jul-to-slf4j
- org.slf4j:slf4j-log4j12
+ org.slf4j:*
+ org.apache.logging.log4j:*
diff --git a/hbase-asyncfs/pom.xml b/hbase-asyncfs/pom.xml
index 3d45e4c7d13e..1efc98082a82 100644
--- a/hbase-asyncfs/pom.xml
+++ b/hbase-asyncfs/pom.xml
@@ -149,13 +149,23 @@
test
- org.slf4j
- slf4j-log4j12
+ org.apache.logging.log4j
+ log4j-api
+ test
+
+
+ org.apache.logging.log4j
+ log4j-core
+ test
+
+
+ org.apache.logging.log4j
+ log4j-slf4j-impltest
- log4j
- log4j
+ org.apache.logging.log4j
+ log4j-1.2-apitest
diff --git a/hbase-asyncfs/src/test/java/org/apache/hadoop/hbase/io/asyncfs/AsyncFSTestBase.java b/hbase-asyncfs/src/test/java/org/apache/hadoop/hbase/io/asyncfs/AsyncFSTestBase.java
index 9b276aca0785..12ba93fb50b9 100644
--- a/hbase-asyncfs/src/test/java/org/apache/hadoop/hbase/io/asyncfs/AsyncFSTestBase.java
+++ b/hbase-asyncfs/src/test/java/org/apache/hadoop/hbase/io/asyncfs/AsyncFSTestBase.java
@@ -98,11 +98,6 @@ protected static void startMiniDFSCluster(int servers) throws IOException {
createDirsAndSetProperties();
Configuration conf = UTIL.getConfiguration();
- // Error level to skip some warnings specific to the minicluster. See HBASE-4709
- org.apache.log4j.Logger.getLogger(org.apache.hadoop.metrics2.util.MBeans.class)
- .setLevel(org.apache.log4j.Level.ERROR);
- org.apache.log4j.Logger.getLogger(org.apache.hadoop.metrics2.impl.MetricsSystemImpl.class)
- .setLevel(org.apache.log4j.Level.ERROR);
TraceUtil.initTracer(conf);
CLUSTER = new MiniDFSCluster.Builder(conf).numDataNodes(servers).build();
diff --git a/hbase-backup/pom.xml b/hbase-backup/pom.xml
index 972ea8d02c99..a1fedc3b267d 100644
--- a/hbase-backup/pom.xml
+++ b/hbase-backup/pom.xml
@@ -148,13 +148,23 @@
test
- org.slf4j
- slf4j-log4j12
+ org.apache.logging.log4j
+ log4j-api
+ test
+
+
+ org.apache.logging.log4j
+ log4j-core
+ test
+
+
+ org.apache.logging.log4j
+ log4j-slf4j-impltest
- log4j
- log4j
+ org.apache.logging.log4j
+ log4j-1.2-apitest
diff --git a/hbase-balancer/pom.xml b/hbase-balancer/pom.xml
index 2fcbc2bc0c8c..c321af556b16 100644
--- a/hbase-balancer/pom.xml
+++ b/hbase-balancer/pom.xml
@@ -108,13 +108,18 @@
test
- org.slf4j
- slf4j-log4j12
+ org.apache.logging.log4j
+ log4j-api
+ test
+
+
+ org.apache.logging.log4j
+ log4j-coretest
- log4j
- log4j
+ org.apache.logging.log4j
+ log4j-slf4j-impltest
diff --git a/hbase-client/pom.xml b/hbase-client/pom.xml
index 63e81d95c64f..f9982d6c6aa9 100644
--- a/hbase-client/pom.xml
+++ b/hbase-client/pom.xml
@@ -155,13 +155,18 @@
test
- org.slf4j
- slf4j-log4j12
+ org.apache.logging.log4j
+ log4j-api
+ test
+
+
+ org.apache.logging.log4j
+ log4j-coretest
- log4j
- log4j
+ org.apache.logging.log4j
+ log4j-slf4j-impltest
diff --git a/hbase-client/src/test/java/org/apache/hadoop/hbase/ipc/TestFailedServersLog.java b/hbase-client/src/test/java/org/apache/hadoop/hbase/ipc/TestFailedServersLog.java
index fa44022f8d09..dc94e91f4fde 100644
--- a/hbase-client/src/test/java/org/apache/hadoop/hbase/ipc/TestFailedServersLog.java
+++ b/hbase-client/src/test/java/org/apache/hadoop/hbase/ipc/TestFailedServersLog.java
@@ -17,73 +17,82 @@
*/
package org.apache.hadoop.hbase.ipc;
-import static org.hamcrest.CoreMatchers.is;
-import static org.hamcrest.MatcherAssert.assertThat;
import static org.junit.Assert.assertEquals;
+import static org.mockito.ArgumentMatchers.any;
+import static org.mockito.Mockito.doAnswer;
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.times;
+import static org.mockito.Mockito.verify;
+import static org.mockito.Mockito.when;
+import java.util.concurrent.atomic.AtomicReference;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseClassTestRule;
import org.apache.hadoop.hbase.net.Address;
import org.apache.hadoop.hbase.testclassification.ClientTests;
import org.apache.hadoop.hbase.testclassification.SmallTests;
-import org.apache.log4j.Appender;
-import org.apache.log4j.Level;
-import org.apache.log4j.LogManager;
-import org.apache.log4j.spi.LoggingEvent;
import org.junit.After;
import org.junit.Before;
import org.junit.ClassRule;
import org.junit.Test;
import org.junit.experimental.categories.Category;
-import org.junit.runner.RunWith;
-import org.mockito.ArgumentCaptor;
-import org.mockito.Captor;
-import org.mockito.Mock;
-import org.mockito.Mockito;
-import org.mockito.runners.MockitoJUnitRunner;
+import org.mockito.invocation.InvocationOnMock;
+import org.mockito.stubbing.Answer;
-@RunWith(MockitoJUnitRunner.class)
@Category({ ClientTests.class, SmallTests.class })
public class TestFailedServersLog {
@ClassRule
public static final HBaseClassTestRule CLASS_RULE =
- HBaseClassTestRule.forClass(TestFailedServersLog.class);
+ HBaseClassTestRule.forClass(TestFailedServersLog.class);
static final int TEST_PORT = 9999;
- private Address addr;
- @Mock
- private Appender mockAppender;
+ private Address addr;
- @Captor
- private ArgumentCaptor captorLoggingEvent;
+ private org.apache.logging.log4j.core.Appender mockAppender;
@Before
public void setup() {
- LogManager.getRootLogger().addAppender(mockAppender);
+ mockAppender = mock(org.apache.logging.log4j.core.Appender.class);
+ when(mockAppender.getName()).thenReturn("mockAppender");
+ when(mockAppender.isStarted()).thenReturn(true);
+ ((org.apache.logging.log4j.core.Logger) org.apache.logging.log4j.LogManager
+ .getLogger(FailedServers.class)).addAppender(mockAppender);
+
}
@After
public void teardown() {
- LogManager.getRootLogger().removeAppender(mockAppender);
+ ((org.apache.logging.log4j.core.Logger) org.apache.logging.log4j.LogManager
+ .getLogger(FailedServers.class)).removeAppender(mockAppender);
}
@Test
public void testAddToFailedServersLogging() {
- Throwable nullException = new NullPointerException();
+ AtomicReference level = new AtomicReference<>();
+ AtomicReference msg = new AtomicReference();
+ doAnswer(new Answer() {
+ @Override
+ public Void answer(InvocationOnMock invocation) throws Throwable {
+ org.apache.logging.log4j.core.LogEvent logEvent =
+ invocation.getArgument(0, org.apache.logging.log4j.core.LogEvent.class);
+ level.set(logEvent.getLevel());
+ msg.set(logEvent.getMessage().getFormattedMessage());
+ return null;
+ }
+ }).when(mockAppender).append(any(org.apache.logging.log4j.core.LogEvent.class));
+
+ Throwable nullException = new NullPointerException();
FailedServers fs = new FailedServers(new Configuration());
addr = Address.fromParts("localhost", TEST_PORT);
fs.addToFailedServers(addr, nullException);
- Mockito.verify(mockAppender).doAppend((LoggingEvent) captorLoggingEvent.capture());
- LoggingEvent loggingEvent = (LoggingEvent) captorLoggingEvent.getValue();
- assertThat(loggingEvent.getLevel(), is(Level.DEBUG));
- assertEquals("Added failed server with address " + addr.toString() + " to list caused by "
- + nullException.toString(),
- loggingEvent.getRenderedMessage());
+ verify(mockAppender, times(1)).append(any(org.apache.logging.log4j.core.LogEvent.class));
+ assertEquals(org.apache.logging.log4j.Level.DEBUG, level.get());
+ assertEquals("Added failed server with address " + addr.toString() + " to list caused by " +
+ nullException.toString(), msg.get());
}
-
}
diff --git a/hbase-client/src/test/java/org/apache/hadoop/hbase/security/TestHBaseSaslRpcClient.java b/hbase-client/src/test/java/org/apache/hadoop/hbase/security/TestHBaseSaslRpcClient.java
index 2252c215fa68..538a9b91c3c5 100644
--- a/hbase-client/src/test/java/org/apache/hadoop/hbase/security/TestHBaseSaslRpcClient.java
+++ b/hbase-client/src/test/java/org/apache/hadoop/hbase/security/TestHBaseSaslRpcClient.java
@@ -30,7 +30,6 @@
import java.io.IOException;
import java.net.InetAddress;
import java.util.Map;
-
import javax.security.auth.callback.Callback;
import javax.security.auth.callback.NameCallback;
import javax.security.auth.callback.PasswordCallback;
@@ -39,7 +38,6 @@
import javax.security.sasl.RealmCallback;
import javax.security.sasl.Sasl;
import javax.security.sasl.SaslClient;
-
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseClassTestRule;
import org.apache.hadoop.hbase.HBaseConfiguration;
@@ -55,16 +53,15 @@
import org.apache.hadoop.io.WritableUtils;
import org.apache.hadoop.security.token.Token;
import org.apache.hadoop.security.token.TokenIdentifier;
-import org.apache.log4j.Level;
-import org.apache.log4j.Logger;
import org.junit.Assert;
-import org.junit.BeforeClass;
import org.junit.ClassRule;
import org.junit.Rule;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.rules.ExpectedException;
import org.mockito.Mockito;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hbase.thirdparty.com.google.common.base.Strings;
@@ -83,17 +80,12 @@ public class TestHBaseSaslRpcClient {
static final String DEFAULT_USER_NAME = "principal";
static final String DEFAULT_USER_PASSWORD = "password";
- private static final Logger LOG = Logger.getLogger(TestHBaseSaslRpcClient.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestHBaseSaslRpcClient.class);
@Rule
public ExpectedException exception = ExpectedException.none();
- @BeforeClass
- public static void before() {
- Logger.getRootLogger().setLevel(Level.DEBUG);
- }
-
@Test
public void testSaslClientUsesGivenRpcProtection() throws Exception {
Token extends TokenIdentifier> token = createTokenMockWithCredentials(DEFAULT_USER_NAME,
diff --git a/hbase-common/pom.xml b/hbase-common/pom.xml
index 8b9154156ba5..fca3c5bbd53d 100644
--- a/hbase-common/pom.xml
+++ b/hbase-common/pom.xml
@@ -232,13 +232,18 @@
test
- org.slf4j
- slf4j-log4j12
+ org.apache.logging.log4j
+ log4j-api
+ test
+
+
+ org.apache.logging.log4j
+ log4j-coretest
- log4j
- log4j
+ org.apache.logging.log4j
+ log4j-slf4j-impltest
diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/logging/TestLog4jUtils.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/logging/TestLog4jUtils.java
index 89931de7128f..806107b55c66 100644
--- a/hbase-common/src/test/java/org/apache/hadoop/hbase/logging/TestLog4jUtils.java
+++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/logging/TestLog4jUtils.java
@@ -24,9 +24,6 @@
import org.apache.hadoop.hbase.HBaseClassTestRule;
import org.apache.hadoop.hbase.testclassification.MiscTests;
import org.apache.hadoop.hbase.testclassification.SmallTests;
-import org.apache.log4j.Level;
-import org.apache.log4j.LogManager;
-import org.apache.log4j.Logger;
import org.junit.ClassRule;
import org.junit.Test;
import org.junit.experimental.categories.Category;
@@ -44,23 +41,29 @@ public class TestLog4jUtils {
@Test
public void test() {
- Logger zk = LogManager.getLogger("org.apache.zookeeper");
- Level zkLevel = zk.getEffectiveLevel();
- Logger hbaseZk = LogManager.getLogger("org.apache.hadoop.hbase.zookeeper");
- Level hbaseZkLevel = hbaseZk.getEffectiveLevel();
- Logger client = LogManager.getLogger("org.apache.hadoop.hbase.client");
- Level clientLevel = client.getEffectiveLevel();
+ org.apache.logging.log4j.Logger zk =
+ org.apache.logging.log4j.LogManager.getLogger("org.apache.zookeeper");
+ org.apache.logging.log4j.Level zkLevel = zk.getLevel();
+ org.apache.logging.log4j.Logger hbaseZk =
+ org.apache.logging.log4j.LogManager.getLogger("org.apache.hadoop.hbase.zookeeper");
+ org.apache.logging.log4j.Level hbaseZkLevel = hbaseZk.getLevel();
+ org.apache.logging.log4j.Logger client =
+ org.apache.logging.log4j.LogManager.getLogger("org.apache.hadoop.hbase.client");
+ org.apache.logging.log4j.Level clientLevel = client.getLevel();
Log4jUtils.disableZkAndClientLoggers();
- assertEquals(Level.OFF, zk.getLevel());
- assertEquals(Level.OFF.toString(), Log4jUtils.getEffectiveLevel(zk.getName()));
- assertEquals(Level.OFF, hbaseZk.getLevel());
- assertEquals(Level.OFF.toString(), Log4jUtils.getEffectiveLevel(hbaseZk.getName()));
- assertEquals(Level.OFF, client.getLevel());
- assertEquals(Level.OFF.toString(), Log4jUtils.getEffectiveLevel(client.getName()));
+ assertEquals(org.apache.logging.log4j.Level.OFF, zk.getLevel());
+ assertEquals(org.apache.logging.log4j.Level.OFF.toString(),
+ Log4jUtils.getEffectiveLevel(zk.getName()));
+ assertEquals(org.apache.logging.log4j.Level.OFF, hbaseZk.getLevel());
+ assertEquals(org.apache.logging.log4j.Level.OFF.toString(),
+ Log4jUtils.getEffectiveLevel(hbaseZk.getName()));
+ assertEquals(org.apache.logging.log4j.Level.OFF, client.getLevel());
+ assertEquals(org.apache.logging.log4j.Level.OFF.toString(),
+ Log4jUtils.getEffectiveLevel(client.getName()));
// restore the level
- zk.setLevel(zkLevel);
- hbaseZk.setLevel(hbaseZkLevel);
- client.setLevel(clientLevel);
+ org.apache.logging.log4j.core.config.Configurator.setLevel(zk.getName(), zkLevel);
+ org.apache.logging.log4j.core.config.Configurator.setLevel(hbaseZk.getName(), hbaseZkLevel);
+ org.apache.logging.log4j.core.config.Configurator.setLevel(client.getName(), clientLevel);
}
@Test
diff --git a/hbase-endpoint/pom.xml b/hbase-endpoint/pom.xml
index 09558d47508c..e5ca7421550e 100644
--- a/hbase-endpoint/pom.xml
+++ b/hbase-endpoint/pom.xml
@@ -194,13 +194,23 @@
test
- org.slf4j
- slf4j-log4j12
+ org.apache.logging.log4j
+ log4j-api
+ test
+
+
+ org.apache.logging.log4j
+ log4j-core
+ test
+
+
+ org.apache.logging.log4j
+ log4j-slf4j-impltest
- log4j
- log4j
+ org.apache.logging.log4j
+ log4j-1.2-apitest
diff --git a/hbase-examples/pom.xml b/hbase-examples/pom.xml
index 0328eee1d503..91d5894287b3 100644
--- a/hbase-examples/pom.xml
+++ b/hbase-examples/pom.xml
@@ -272,13 +272,23 @@
test
- org.slf4j
- slf4j-log4j12
+ org.apache.logging.log4j
+ log4j-api
+ test
+
+
+ org.apache.logging.log4j
+ log4j-core
+ test
+
+
+ org.apache.logging.log4j
+ log4j-slf4j-impltest
- log4j
- log4j
+ org.apache.logging.log4j
+ log4j-1.2-apitest
diff --git a/hbase-hadoop-compat/pom.xml b/hbase-hadoop-compat/pom.xml
index a687de8214ca..dec129d45048 100644
--- a/hbase-hadoop-compat/pom.xml
+++ b/hbase-hadoop-compat/pom.xml
@@ -1,5 +1,7 @@
-
+
- 4.0.0
-
- hbase-build-configuration
- org.apache.hbase
- 3.0.0-SNAPSHOT
- ../hbase-build-configuration
-
+ 4.0.0
+
+ hbase-build-configuration
+ org.apache.hbase
+ 3.0.0-SNAPSHOT
+ ../hbase-build-configuration
+
- hbase-hadoop-compat
- Apache HBase - Hadoop Compatibility
-
+ hbase-hadoop-compat
+ Apache HBase - Hadoop Compatibility
+
Interfaces to be implemented in order to smooth
over hadoop version differences
-
-
+
+ maven-assembly-plugin
@@ -44,156 +46,166 @@
-
- org.apache.maven.plugins
- maven-source-plugin
-
-
- org.apache.maven.plugins
- maven-checkstyle-plugin
-
- true
-
-
-
- net.revelc.code
- warbucks-maven-plugin
-
-
-
+
+ org.apache.maven.plugins
+ maven-source-plugin
+
+
+ org.apache.maven.plugins
+ maven-checkstyle-plugin
+
+ true
+
+
+
+ net.revelc.code
+ warbucks-maven-plugin
+
+
+
-
-
- org.apache.hbase
- hbase-annotations
- test-jar
- test
-
-
- org.apache.hbase
- hbase-logging
- test-jar
- test
-
-
- org.apache.hbase
- hbase-common
-
-
- org.apache.hbase
- hbase-common
- test-jar
- test
-
-
- org.apache.hbase
- hbase-metrics
-
-
- org.apache.hbase
- hbase-metrics-api
-
-
- org.apache.hbase.thirdparty
- hbase-shaded-miscellaneous
-
-
- org.apache.hadoop
- hadoop-mapreduce-client-core
-
-
- com.google.guava
- guava
-
-
-
-
- org.apache.hadoop
- hadoop-common
-
-
- org.slf4j
- slf4j-api
-
-
+
+
+ org.apache.hbase
+ hbase-annotations
+ test-jar
+ test
+
+
+ org.apache.hbase
+ hbase-logging
+ test-jar
+ test
+
+
+ org.apache.hbase
+ hbase-common
+
+
+ org.apache.hbase
+ hbase-common
+ test-jar
+ test
+
+
+ org.apache.hbase
+ hbase-metrics
+
+
+ org.apache.hbase
+ hbase-metrics-api
+
+
+ org.apache.hbase.thirdparty
+ hbase-shaded-miscellaneous
+
+
+ org.apache.hadoop
+ hadoop-mapreduce-client-core
+
+
+ com.google.guava
+ guava
+
+
+
+
+ org.apache.hadoop
+ hadoop-common
+
+
+ org.slf4j
+ slf4j-api
+
+
- javax.activation
- javax.activation-api
- runtime
-
-
- org.apache.commons
- commons-lang3
-
-
- junit
- junit
- test
-
-
- org.slf4j
- jcl-over-slf4j
- test
-
-
- org.slf4j
- jul-to-slf4j
- test
-
-
- org.slf4j
- slf4j-log4j12
- test
-
-
- log4j
- log4j
- test
-
-
+ javax.activation
+ javax.activation-api
+ runtime
+
+
+ org.apache.commons
+ commons-lang3
+
+
+ junit
+ junit
+ test
+
+
+ org.slf4j
+ jcl-over-slf4j
+ test
+
+
+ org.slf4j
+ jul-to-slf4j
+ test
+
+
+ org.apache.logging.log4j
+ log4j-api
+ test
+
+
+ org.apache.logging.log4j
+ log4j-core
+ test
+
+
+ org.apache.logging.log4j
+ log4j-slf4j-impl
+ test
+
+
+ org.apache.logging.log4j
+ log4j-1.2-api
+ test
+
+
-
+
-
- skipHadoopCompatTests
-
-
- skipHadoopCompatTests
-
-
-
- true
- true
-
-
-
- eclipse-specific
-
-
- m2e.version
-
-
-
-
-
+
+ skipHadoopCompatTests
+
+
+ skipHadoopCompatTests
+
+
+
+ true
+ true
+
+
+
+ eclipse-specific
+
+
+ m2e.version
+
+
+
+
+
-
- org.eclipse.m2e
- lifecycle-mapping
-
-
-
-
-
-
-
-
-
-
-
-
+
+ org.eclipse.m2e
+ lifecycle-mapping
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/hbase-hbtop/pom.xml b/hbase-hbtop/pom.xml
index 2a1fd38a7b43..07542e024d70 100644
--- a/hbase-hbtop/pom.xml
+++ b/hbase-hbtop/pom.xml
@@ -92,13 +92,18 @@
test
- org.slf4j
- slf4j-log4j12
+ org.apache.logging.log4j
+ log4j-api
+ test
+
+
+ org.apache.logging.log4j
+ log4j-coretest
- log4j
- log4j
+ org.apache.logging.log4j
+ log4j-slf4j-impltest
diff --git a/hbase-http/pom.xml b/hbase-http/pom.xml
index 51ce06c3d459..8e3251d51532 100644
--- a/hbase-http/pom.xml
+++ b/hbase-http/pom.xml
@@ -241,13 +241,18 @@
test
- org.slf4j
- slf4j-log4j12
+ org.apache.logging.log4j
+ log4j-api
+ test
+
+
+ org.apache.logging.log4j
+ log4j-coretest
- log4j
- log4j
+ org.apache.logging.log4j
+ log4j-slf4j-impltest
diff --git a/hbase-http/src/main/java/org/apache/hadoop/hbase/http/log/LogLevel.java b/hbase-http/src/main/java/org/apache/hadoop/hbase/http/log/LogLevel.java
index 1fcfa1390c2c..91b2615b81b9 100644
--- a/hbase-http/src/main/java/org/apache/hadoop/hbase/http/log/LogLevel.java
+++ b/hbase-http/src/main/java/org/apache/hadoop/hbase/http/log/LogLevel.java
@@ -44,7 +44,6 @@
import org.apache.hadoop.util.ServletUtil;
import org.apache.hadoop.util.Tool;
import org.apache.yetus.audience.InterfaceAudience;
-import org.apache.yetus.audience.InterfaceStability;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -302,8 +301,7 @@ private void process(String urlString) throws Exception {
/**
* A servlet implementation
*/
- @InterfaceAudience.LimitedPrivate({"HDFS", "MapReduce"})
- @InterfaceStability.Unstable
+ @InterfaceAudience.Private
public static class Servlet extends HttpServlet {
private static final long serialVersionUID = 1L;
diff --git a/hbase-http/src/main/java/org/apache/hadoop/hbase/util/LogMonitoring.java b/hbase-http/src/main/java/org/apache/hadoop/hbase/util/LogMonitoring.java
index 70ce5ec0b4c4..92dc20d35b59 100644
--- a/hbase-http/src/main/java/org/apache/hadoop/hbase/util/LogMonitoring.java
+++ b/hbase-http/src/main/java/org/apache/hadoop/hbase/util/LogMonitoring.java
@@ -31,8 +31,7 @@
import org.apache.yetus.audience.InterfaceAudience;
/**
- * Utility functions for reading the log4j logs that are
- * being written by HBase.
+ * Utility functions for reading the log4j logs that are being written by HBase.
*/
@InterfaceAudience.Private
public abstract class LogMonitoring {
@@ -54,13 +53,12 @@ public static void dumpTailOfLogs(
}
}
- private static void dumpTailOfLog(File f, PrintWriter out, long tailKb)
- throws IOException {
+ private static void dumpTailOfLog(File f, PrintWriter out, long tailKb) throws IOException {
FileInputStream fis = new FileInputStream(f);
BufferedReader r = null;
try {
FileChannel channel = fis.getChannel();
- channel.position(Math.max(0, channel.size() - tailKb*1024));
+ channel.position(Math.max(0, channel.size() - tailKb * 1024));
r = new BufferedReader(new InputStreamReader(fis));
r.readLine(); // skip the first partial line
String line;
diff --git a/hbase-http/src/test/java/org/apache/hadoop/hbase/http/log/TestLogLevel.java b/hbase-http/src/test/java/org/apache/hadoop/hbase/http/log/TestLogLevel.java
index 2c5d0c42b6da..4b8bb6b11b57 100644
--- a/hbase-http/src/test/java/org/apache/hadoop/hbase/http/log/TestLogLevel.java
+++ b/hbase-http/src/test/java/org/apache/hadoop/hbase/http/log/TestLogLevel.java
@@ -22,6 +22,7 @@
import static org.junit.Assert.assertNotEquals;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
+
import java.io.File;
import java.net.BindException;
import java.net.SocketException;
@@ -51,9 +52,6 @@
import org.apache.hadoop.security.ssl.SSLFactory;
import org.apache.hadoop.test.GenericTestUtils;
import org.apache.hadoop.util.StringUtils;
-import org.apache.log4j.Level;
-import org.apache.log4j.LogManager;
-import org.apache.log4j.Logger;
import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.ClassRule;
@@ -63,11 +61,11 @@
/**
* Test LogLevel.
*/
-@Category({MiscTests.class, SmallTests.class})
+@Category({ MiscTests.class, SmallTests.class })
public class TestLogLevel {
@ClassRule
public static final HBaseClassTestRule CLASS_RULE =
- HBaseClassTestRule.forClass(TestLogLevel.class);
+ HBaseClassTestRule.forClass(TestLogLevel.class);
private static String keystoresDir;
private static String sslConfDir;
@@ -75,9 +73,10 @@ public class TestLogLevel {
private static Configuration clientConf;
private static Configuration sslConf;
private static final String logName = TestLogLevel.class.getName();
- private static final Logger log = LogManager.getLogger(logName);
+ private static final org.apache.logging.log4j.Logger log =
+ org.apache.logging.log4j.LogManager.getLogger(logName);
private final static String PRINCIPAL = "loglevel.principal";
- private final static String KEYTAB = "loglevel.keytab";
+ private final static String KEYTAB = "loglevel.keytab";
private static MiniKdc kdc;
@@ -106,8 +105,7 @@ public static void setUp() throws Exception {
}
/**
- * Sets up {@link MiniKdc} for testing security.
- * Copied from HBaseTestingUtility#setupMiniKdc().
+ * Sets up {@link MiniKdc} for testing security. Copied from HBaseTestingUtility#setupMiniKdc().
*/
static private MiniKdc setupMiniKdc() throws Exception {
Properties conf = MiniKdc.createConf();
@@ -125,7 +123,7 @@ static private MiniKdc setupMiniKdc() throws Exception {
kdc = new MiniKdc(conf, dir);
kdc.start();
} catch (BindException e) {
- FileUtils.deleteDirectory(dir); // clean directory
+ FileUtils.deleteDirectory(dir); // clean directory
numTries++;
if (numTries == 3) {
log.error("Failed setting up MiniKDC. Tried " + numTries + " times.");
@@ -151,15 +149,15 @@ static private void setupSSL(File base) throws Exception {
}
/**
- * Get the SSL configuration.
- * This method is copied from KeyStoreTestUtil#getSslConfig() in Hadoop.
+ * Get the SSL configuration. This method is copied from KeyStoreTestUtil#getSslConfig() in
+ * Hadoop.
* @return {@link Configuration} instance with ssl configs loaded.
* @param conf to pull client/server SSL settings filename from
*/
- private static Configuration getSslConfig(Configuration conf){
+ private static Configuration getSslConfig(Configuration conf) {
Configuration sslConf = new Configuration(false);
String sslServerConfFile = conf.get(SSLFactory.SSL_SERVER_CONF_KEY);
- String sslClientConfFile = conf.get(SSLFactory.SSL_CLIENT_CONF_KEY);
+ String sslClientConfFile = conf.get(SSLFactory.SSL_CLIENT_CONF_KEY);
sslConf.addResource(sslServerConfFile);
sslConf.addResource(sslClientConfFile);
sslConf.set(SSLFactory.SSL_SERVER_CONF_KEY, sslServerConfFile);
@@ -184,36 +182,29 @@ public static void tearDown() {
public void testCommandOptions() throws Exception {
final String className = this.getClass().getName();
- assertFalse(validateCommand(new String[] {"-foo" }));
+ assertFalse(validateCommand(new String[] { "-foo" }));
// fail due to insufficient number of arguments
assertFalse(validateCommand(new String[] {}));
- assertFalse(validateCommand(new String[] {"-getlevel" }));
- assertFalse(validateCommand(new String[] {"-setlevel" }));
- assertFalse(validateCommand(new String[] {"-getlevel", "foo.bar:8080" }));
+ assertFalse(validateCommand(new String[] { "-getlevel" }));
+ assertFalse(validateCommand(new String[] { "-setlevel" }));
+ assertFalse(validateCommand(new String[] { "-getlevel", "foo.bar:8080" }));
// valid command arguments
- assertTrue(validateCommand(
- new String[] {"-getlevel", "foo.bar:8080", className }));
- assertTrue(validateCommand(
- new String[] {"-setlevel", "foo.bar:8080", className, "DEBUG" }));
- assertTrue(validateCommand(
- new String[] {"-getlevel", "foo.bar:8080", className }));
- assertTrue(validateCommand(
- new String[] {"-setlevel", "foo.bar:8080", className, "DEBUG" }));
+ assertTrue(validateCommand(new String[] { "-getlevel", "foo.bar:8080", className }));
+ assertTrue(validateCommand(new String[] { "-setlevel", "foo.bar:8080", className, "DEBUG" }));
+ assertTrue(validateCommand(new String[] { "-getlevel", "foo.bar:8080", className }));
+ assertTrue(validateCommand(new String[] { "-setlevel", "foo.bar:8080", className, "DEBUG" }));
// fail due to the extra argument
- assertFalse(validateCommand(
- new String[] {"-getlevel", "foo.bar:8080", className, "blah" }));
- assertFalse(validateCommand(
- new String[] {"-setlevel", "foo.bar:8080", className, "DEBUG", "blah" }));
- assertFalse(validateCommand(
- new String[] {"-getlevel", "foo.bar:8080", className, "-setlevel", "foo.bar:8080",
- className }));
+ assertFalse(validateCommand(new String[] { "-getlevel", "foo.bar:8080", className, "blah" }));
+ assertFalse(
+ validateCommand(new String[] { "-setlevel", "foo.bar:8080", className, "DEBUG", "blah" }));
+ assertFalse(validateCommand(new String[] { "-getlevel", "foo.bar:8080", className, "-setlevel",
+ "foo.bar:8080", className }));
}
/**
* Check to see if a command can be accepted.
- *
* @param args a String array of arguments
* @return true if the command can be accepted, false if not.
*/
@@ -232,40 +223,32 @@ private boolean validateCommand(String[] args) {
}
/**
- * Creates and starts a Jetty server binding at an ephemeral port to run
- * LogLevel servlet.
+ * Creates and starts a Jetty server binding at an ephemeral port to run LogLevel servlet.
* @param protocol "http" or "https"
* @param isSpnego true if SPNEGO is enabled
* @return a created HttpServer object
* @throws Exception if unable to create or start a Jetty server
*/
- private HttpServer createServer(String protocol, boolean isSpnego)
- throws Exception {
- HttpServer.Builder builder = new HttpServer.Builder()
- .setName("..")
- .addEndpoint(new URI(protocol + "://localhost:0"))
- .setFindPort(true)
- .setConf(serverConf);
+ private HttpServer createServer(String protocol, boolean isSpnego) throws Exception {
+ HttpServer.Builder builder = new HttpServer.Builder().setName("..")
+ .addEndpoint(new URI(protocol + "://localhost:0")).setFindPort(true).setConf(serverConf);
if (isSpnego) {
// Set up server Kerberos credentials.
// Since the server may fall back to simple authentication,
// use ACL to make sure the connection is Kerberos/SPNEGO authenticated.
- builder.setSecurityEnabled(true)
- .setUsernameConfKey(PRINCIPAL)
- .setKeytabConfKey(KEYTAB)
- .setACL(new AccessControlList("client"));
+ builder.setSecurityEnabled(true).setUsernameConfKey(PRINCIPAL).setKeytabConfKey(KEYTAB)
+ .setACL(new AccessControlList("client"));
}
// if using HTTPS, configure keystore/truststore properties.
if (protocol.equals(LogLevel.PROTOCOL_HTTPS)) {
- builder = builder.
- keyPassword(sslConf.get("ssl.server.keystore.keypassword"))
- .keyStore(sslConf.get("ssl.server.keystore.location"),
- sslConf.get("ssl.server.keystore.password"),
- sslConf.get("ssl.server.keystore.type", "jks"))
- .trustStore(sslConf.get("ssl.server.truststore.location"),
- sslConf.get("ssl.server.truststore.password"),
- sslConf.get("ssl.server.truststore.type", "jks"));
+ builder = builder.keyPassword(sslConf.get("ssl.server.keystore.keypassword"))
+ .keyStore(sslConf.get("ssl.server.keystore.location"),
+ sslConf.get("ssl.server.keystore.password"),
+ sslConf.get("ssl.server.keystore.type", "jks"))
+ .trustStore(sslConf.get("ssl.server.truststore.location"),
+ sslConf.get("ssl.server.truststore.password"),
+ sslConf.get("ssl.server.truststore.type", "jks"));
}
HttpServer server = builder.build();
@@ -274,31 +257,29 @@ private HttpServer createServer(String protocol, boolean isSpnego)
}
private void testDynamicLogLevel(final String bindProtocol, final String connectProtocol,
- final boolean isSpnego)
- throws Exception {
- testDynamicLogLevel(bindProtocol, connectProtocol, isSpnego, Level.DEBUG.toString());
+ final boolean isSpnego) throws Exception {
+ testDynamicLogLevel(bindProtocol, connectProtocol, isSpnego,
+ org.apache.logging.log4j.Level.DEBUG.toString());
}
/**
* Run both client and server using the given protocol.
- *
* @param bindProtocol specify either http or https for server
* @param connectProtocol specify either http or https for client
* @param isSpnego true if SPNEGO is enabled
* @throws Exception if client can't accesss server.
*/
private void testDynamicLogLevel(final String bindProtocol, final String connectProtocol,
- final boolean isSpnego, final String newLevel)
- throws Exception {
+ final boolean isSpnego, final String newLevel) throws Exception {
if (!LogLevel.isValidProtocol(bindProtocol)) {
throw new Exception("Invalid server protocol " + bindProtocol);
}
if (!LogLevel.isValidProtocol(connectProtocol)) {
throw new Exception("Invalid client protocol " + connectProtocol);
}
- Level oldLevel = log.getEffectiveLevel();
+ org.apache.logging.log4j.Level oldLevel = log.getLevel();
assertNotEquals("Get default Log Level which shouldn't be ERROR.",
- Level.ERROR, oldLevel);
+ org.apache.logging.log4j.Level.ERROR, oldLevel);
// configs needed for SPNEGO at server side
if (isSpnego) {
@@ -319,8 +300,8 @@ private void testDynamicLogLevel(final String bindProtocol, final String connect
String keytabFilePath = keyTabFile.getAbsolutePath();
- UserGroupInformation clientUGI = UserGroupInformation.
- loginUserFromKeytabAndReturnUGI(clientPrincipal, keytabFilePath);
+ UserGroupInformation clientUGI =
+ UserGroupInformation.loginUserFromKeytabAndReturnUGI(clientPrincipal, keytabFilePath);
try {
clientUGI.doAs((PrivilegedExceptionAction) () -> {
// client command line
@@ -334,44 +315,38 @@ private void testDynamicLogLevel(final String bindProtocol, final String connect
}
// restore log level
- GenericTestUtils.setLogLevel(log, oldLevel);
+ org.apache.logging.log4j.core.config.Configurator.setLevel(log.getName(), oldLevel);
}
/**
- * Run LogLevel command line to start a client to get log level of this test
- * class.
- *
+ * Run LogLevel command line to start a client to get log level of this test class.
* @param protocol specify either http or https
* @param authority daemon's web UI address
* @throws Exception if unable to connect
*/
private void getLevel(String protocol, String authority) throws Exception {
- String[] getLevelArgs = {"-getlevel", authority, logName, "-protocol", protocol};
+ String[] getLevelArgs = { "-getlevel", authority, logName, "-protocol", protocol };
CLI cli = new CLI(protocol.equalsIgnoreCase("https") ? sslConf : clientConf);
cli.run(getLevelArgs);
}
/**
- * Run LogLevel command line to start a client to set log level of this test
- * class to debug.
- *
+ * Run LogLevel command line to start a client to set log level of this test class to debug.
* @param protocol specify either http or https
* @param authority daemon's web UI address
* @throws Exception if unable to run or log level does not change as expected
*/
- private void setLevel(String protocol, String authority, String newLevel)
- throws Exception {
- String[] setLevelArgs = {"-setlevel", authority, logName, newLevel, "-protocol", protocol};
+ private void setLevel(String protocol, String authority, String newLevel) throws Exception {
+ String[] setLevelArgs = { "-setlevel", authority, logName, newLevel, "-protocol", protocol };
CLI cli = new CLI(protocol.equalsIgnoreCase("https") ? sslConf : clientConf);
cli.run(setLevelArgs);
assertEquals("new level not equal to expected: ", newLevel.toUpperCase(),
- log.getEffectiveLevel().toString());
+ log.getLevel().toString());
}
/**
* Test setting log level to "Info".
- *
* @throws Exception if client can't set log level to INFO.
*/
@Test
@@ -381,7 +356,6 @@ public void testInfoLogLevel() throws Exception {
/**
* Test setting log level to "Error".
- *
* @throws Exception if client can't set log level to ERROR.
*/
@Test
@@ -391,18 +365,15 @@ public void testErrorLogLevel() throws Exception {
/**
* Server runs HTTP, no SPNEGO.
- *
- * @throws Exception if http client can't access http server,
- * or http client can access https server.
+ * @throws Exception if http client can't access http server, or http client can access https
+ * server.
*/
@Test
public void testLogLevelByHttp() throws Exception {
testDynamicLogLevel(LogLevel.PROTOCOL_HTTP, LogLevel.PROTOCOL_HTTP, false);
try {
- testDynamicLogLevel(LogLevel.PROTOCOL_HTTP, LogLevel.PROTOCOL_HTTPS,
- false);
- fail("An HTTPS Client should not have succeeded in connecting to a " +
- "HTTP server");
+ testDynamicLogLevel(LogLevel.PROTOCOL_HTTP, LogLevel.PROTOCOL_HTTPS, false);
+ fail("An HTTPS Client should not have succeeded in connecting to a " + "HTTP server");
} catch (SSLException e) {
exceptionShouldContains("Unrecognized SSL message", e);
}
@@ -410,18 +381,15 @@ public void testLogLevelByHttp() throws Exception {
/**
* Server runs HTTP + SPNEGO.
- *
- * @throws Exception if http client can't access http server,
- * or http client can access https server.
+ * @throws Exception if http client can't access http server, or http client can access https
+ * server.
*/
@Test
public void testLogLevelByHttpWithSpnego() throws Exception {
testDynamicLogLevel(LogLevel.PROTOCOL_HTTP, LogLevel.PROTOCOL_HTTP, true);
try {
- testDynamicLogLevel(LogLevel.PROTOCOL_HTTP, LogLevel.PROTOCOL_HTTPS,
- true);
- fail("An HTTPS Client should not have succeeded in connecting to a " +
- "HTTP server");
+ testDynamicLogLevel(LogLevel.PROTOCOL_HTTP, LogLevel.PROTOCOL_HTTPS, true);
+ fail("An HTTPS Client should not have succeeded in connecting to a " + "HTTP server");
} catch (SSLException e) {
exceptionShouldContains("Unrecognized SSL message", e);
}
@@ -429,19 +397,15 @@ public void testLogLevelByHttpWithSpnego() throws Exception {
/**
* Server runs HTTPS, no SPNEGO.
- *
- * @throws Exception if https client can't access https server,
- * or https client can access http server.
+ * @throws Exception if https client can't access https server, or https client can access http
+ * server.
*/
@Test
public void testLogLevelByHttps() throws Exception {
- testDynamicLogLevel(LogLevel.PROTOCOL_HTTPS, LogLevel.PROTOCOL_HTTPS,
- false);
+ testDynamicLogLevel(LogLevel.PROTOCOL_HTTPS, LogLevel.PROTOCOL_HTTPS, false);
try {
- testDynamicLogLevel(LogLevel.PROTOCOL_HTTPS, LogLevel.PROTOCOL_HTTP,
- false);
- fail("An HTTP Client should not have succeeded in connecting to a " +
- "HTTPS server");
+ testDynamicLogLevel(LogLevel.PROTOCOL_HTTPS, LogLevel.PROTOCOL_HTTP, false);
+ fail("An HTTP Client should not have succeeded in connecting to a " + "HTTPS server");
} catch (SocketException e) {
exceptionShouldContains("Unexpected end of file from server", e);
}
@@ -449,32 +413,27 @@ public void testLogLevelByHttps() throws Exception {
/**
* Server runs HTTPS + SPNEGO.
- *
- * @throws Exception if https client can't access https server,
- * or https client can access http server.
+ * @throws Exception if https client can't access https server, or https client can access http
+ * server.
*/
@Test
public void testLogLevelByHttpsWithSpnego() throws Exception {
- testDynamicLogLevel(LogLevel.PROTOCOL_HTTPS, LogLevel.PROTOCOL_HTTPS,
- true);
+ testDynamicLogLevel(LogLevel.PROTOCOL_HTTPS, LogLevel.PROTOCOL_HTTPS, true);
try {
- testDynamicLogLevel(LogLevel.PROTOCOL_HTTPS, LogLevel.PROTOCOL_HTTP,
- true);
- fail("An HTTP Client should not have succeeded in connecting to a " +
- "HTTPS server");
- } catch (SocketException e) {
+ testDynamicLogLevel(LogLevel.PROTOCOL_HTTPS, LogLevel.PROTOCOL_HTTP, true);
+ fail("An HTTP Client should not have succeeded in connecting to a " + "HTTPS server");
+ } catch (SocketException e) {
exceptionShouldContains("Unexpected end of file from server", e);
}
}
/**
- * Assert that a throwable or one of its causes should contain the substr in its message.
- *
- * Ideally we should use {@link GenericTestUtils#assertExceptionContains(String, Throwable)} util
- * method which asserts t.toString() contains the substr. As the original throwable may have been
- * wrapped in Hadoop3 because of HADOOP-12897, it's required to check all the wrapped causes.
- * After stop supporting Hadoop2, this method can be removed and assertion in tests can use
- * t.getCause() directly, similar to HADOOP-15280.
+ * Assert that a throwable or one of its causes should contain the substr in its message. Ideally
+ * we should use {@link GenericTestUtils#assertExceptionContains(String, Throwable)} util method
+ * which asserts t.toString() contains the substr. As the original throwable may have been wrapped
+ * in Hadoop3 because of HADOOP-12897, it's required to check all the wrapped causes. After stop
+ * supporting Hadoop2, this method can be removed and assertion in tests can use t.getCause()
+ * directly, similar to HADOOP-15280.
*/
private static void exceptionShouldContains(String substr, Throwable throwable) {
Throwable t = throwable;
@@ -486,6 +445,6 @@ private static void exceptionShouldContains(String substr, Throwable throwable)
t = t.getCause();
}
throw new AssertionError("Expected to find '" + substr + "' but got unexpected exception:" +
- StringUtils.stringifyException(throwable), throwable);
+ StringUtils.stringifyException(throwable), throwable);
}
}
diff --git a/hbase-it/pom.xml b/hbase-it/pom.xml
index f508ffd08b36..83c5874923be 100644
--- a/hbase-it/pom.xml
+++ b/hbase-it/pom.xml
@@ -249,13 +249,23 @@
test
- org.slf4j
- slf4j-log4j12
+ org.apache.logging.log4j
+ log4j-api
+ test
+
+
+ org.apache.logging.log4j
+ log4j-core
+ test
+
+
+ org.apache.logging.log4j
+ log4j-slf4j-impltest
- log4j
- log4j
+ org.apache.logging.log4j
+ log4j-1.2-apitest
diff --git a/hbase-logging/pom.xml b/hbase-logging/pom.xml
index d48ffca32248..c1409b35071e 100644
--- a/hbase-logging/pom.xml
+++ b/hbase-logging/pom.xml
@@ -38,7 +38,7 @@
src/test/resources
- log4j.properties
+ log4j2.xml
@@ -80,13 +80,33 @@
org.slf4j
- slf4j-log4j12
+ jcl-over-slf4jtest
- log4j
- log4j
+ org.slf4j
+ jul-to-slf4j
+ test
+
+
+ org.apache.logging.log4j
+ log4j-api
+ provided
+
+
+ org.apache.logging.log4j
+ log4j-coreprovided
+
+ org.apache.logging.log4j
+ log4j-slf4j-impl
+ test
+
+
+ org.apache.logging.log4j
+ log4j-1.2-api
+ test
+
diff --git a/hbase-logging/src/main/java/org/apache/hadoop/hbase/logging/InternalLog4jUtils.java b/hbase-logging/src/main/java/org/apache/hadoop/hbase/logging/InternalLog4jUtils.java
index 28d29bf30131..b0711d7e8f1a 100644
--- a/hbase-logging/src/main/java/org/apache/hadoop/hbase/logging/InternalLog4jUtils.java
+++ b/hbase-logging/src/main/java/org/apache/hadoop/hbase/logging/InternalLog4jUtils.java
@@ -19,16 +19,15 @@
import java.io.File;
import java.io.IOException;
-import java.util.Enumeration;
import java.util.HashSet;
import java.util.Set;
import org.apache.yetus.audience.InterfaceAudience;
/**
- * The actual class for operating on log4j.
+ * The actual class for operating on log4j2.
*
* This class will depend on log4j directly, so callers should not use this class directly to avoid
- * introducing log4j dependencies to downstream users. Please call the methods in
+ * introducing log4j2 dependencies to downstream users. Please call the methods in
* {@link Log4jUtils}, as they will call the methods here through reflection.
*/
@InterfaceAudience.Private
@@ -38,32 +37,53 @@ private InternalLog4jUtils() {
}
static void setLogLevel(String loggerName, String levelName) {
- org.apache.log4j.Logger logger = org.apache.log4j.LogManager.getLogger(loggerName);
- org.apache.log4j.Level level = org.apache.log4j.Level.toLevel(levelName.toUpperCase());
+ org.apache.logging.log4j.Level level =
+ org.apache.logging.log4j.Level.toLevel(levelName.toUpperCase());
if (!level.toString().equalsIgnoreCase(levelName)) {
throw new IllegalArgumentException("Unsupported log level " + levelName);
}
- logger.setLevel(level);
+ org.apache.logging.log4j.core.config.Configurator.setLevel(loggerName, level);
}
static String getEffectiveLevel(String loggerName) {
- org.apache.log4j.Logger logger = org.apache.log4j.LogManager.getLogger(loggerName);
- return logger.getEffectiveLevel().toString();
+ org.apache.logging.log4j.Logger logger =
+ org.apache.logging.log4j.LogManager.getLogger(loggerName);
+ return logger.getLevel().name();
}
static Set getActiveLogFiles() throws IOException {
Set ret = new HashSet<>();
- org.apache.log4j.Appender a;
- @SuppressWarnings("unchecked")
- Enumeration e =
- org.apache.log4j.Logger.getRootLogger().getAllAppenders();
- while (e.hasMoreElements()) {
- a = e.nextElement();
- if (a instanceof org.apache.log4j.FileAppender) {
- org.apache.log4j.FileAppender fa = (org.apache.log4j.FileAppender) a;
- String filename = fa.getFile();
- ret.add(new File(filename));
- }
+ org.apache.logging.log4j.Logger logger = org.apache.logging.log4j.LogManager.getRootLogger();
+ if (!(logger instanceof org.apache.logging.log4j.core.Logger)) {
+ return ret;
+ }
+ org.apache.logging.log4j.core.Logger coreLogger = (org.apache.logging.log4j.core.Logger) logger;
+ for (org.apache.logging.log4j.core.Appender appender : coreLogger.getAppenders().values()) {
+ if (appender instanceof org.apache.logging.log4j.core.appender.FileAppender) {
+ String fileName =
+ ((org.apache.logging.log4j.core.appender.FileAppender) appender).getFileName();
+ ret.add(new File(fileName));
+ } else if (appender instanceof org.apache.logging.log4j.core.appender.AbstractFileAppender) {
+ String fileName =
+ ((org.apache.logging.log4j.core.appender.AbstractFileAppender>) appender).getFileName();
+ ret.add(new File(fileName));
+ } else if (appender instanceof org.apache.logging.log4j.core.appender.RollingFileAppender) {
+ String fileName =
+ ((org.apache.logging.log4j.core.appender.RollingFileAppender) appender).getFileName();
+ ret.add(new File(fileName));
+ } else
+ if (appender instanceof org.apache.logging.log4j.core.appender.RandomAccessFileAppender) {
+ String fileName =
+ ((org.apache.logging.log4j.core.appender.RandomAccessFileAppender) appender)
+ .getFileName();
+ ret.add(new File(fileName));
+ } else
+ if (appender instanceof org.apache.logging.log4j.core.appender.MemoryMappedFileAppender) {
+ String fileName =
+ ((org.apache.logging.log4j.core.appender.MemoryMappedFileAppender) appender)
+ .getFileName();
+ ret.add(new File(fileName));
+ }
}
return ret;
}
diff --git a/hbase-logging/src/test/java/org/apache/log4j/FileAppender.java b/hbase-logging/src/test/java/org/apache/log4j/FileAppender.java
new file mode 100644
index 000000000000..7b3876ce0833
--- /dev/null
+++ b/hbase-logging/src/test/java/org/apache/log4j/FileAppender.java
@@ -0,0 +1,288 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.log4j;
+
+import java.io.BufferedWriter;
+import java.io.File;
+import java.io.FileNotFoundException;
+import java.io.FileOutputStream;
+import java.io.IOException;
+import java.io.InterruptedIOException;
+import java.io.Writer;
+
+/**
+ * Just a copy of the old log4j12 FileAppender. The ContainerLogAppender for YARN NodeManager needs
+ * this class but the log4j-1.2-api bridge does not provide it which causes the UTs in
+ * hbase-mapreduce module to fail if we start a separated MR cluster.
+ */
+public class FileAppender extends WriterAppender {
+
+ /**
+ * Controls file truncatation. The default value for this variable is true, meaning
+ * that by default a FileAppender will append to an existing file and not truncate
+ * it.
+ *
+ * This option is meaningful only if the FileAppender opens the file.
+ */
+ protected boolean fileAppend = true;
+
+ /**
+ * The name of the log file.
+ */
+ protected String fileName = null;
+
+ /**
+ * Do we do bufferedIO?
+ */
+ protected boolean bufferedIO = false;
+
+ /**
+ * Determines the size of IO buffer be. Default is 8K.
+ */
+ protected int bufferSize = 8 * 1024;
+
+ /**
+ * The default constructor does not do anything.
+ */
+ public FileAppender() {
+ }
+
+ /**
+ * Instantiate a FileAppender and open the file designated by fileName.
+ * The opened filename will become the output destination for this appender.
+ *
+ * If the append parameter is true, the file will be appended to. Otherwise, the file
+ * designated by fileName will be truncated before being opened.
+ *
+ * If the bufferedIO parameter is true, then buffered IO will be used to
+ * write to the output file.
+ */
+ public FileAppender(Layout layout, String fileName, boolean append, boolean bufferedIO,
+ int bufferSize) throws IOException {
+ this.layout = layout;
+ this.setFile(fileName, append, bufferedIO, bufferSize);
+ }
+
+ /**
+ * Instantiate a FileAppender and open the file designated by fileName. The opened
+ * filename will become the output destination for this appender.
+ *
+ * If the append parameter is true, the file will be appended to. Otherwise, the file
+ * designated by fileName will be truncated before being opened.
+ */
+ public FileAppender(Layout layout, String fileName, boolean append) throws IOException {
+ this.layout = layout;
+ this.setFile(fileName, append, false, bufferSize);
+ }
+
+ /**
+ * Instantiate a FileAppender and open the file designated by filename. The opened
+ * filename will become the output destination for this appender.
+ *
+ * The file will be appended to.
+ */
+ public FileAppender(Layout layout, String fileName) throws IOException {
+ this(layout, fileName, true);
+ }
+
+ /**
+ * The File property takes a string value which should be the name of the file to append
+ * to.
+ *
+ * Note that the special values "System.out" or "System.err" are no
+ * longer honored.
+ *
+ * Note: Actual opening of the file is made when {@link #activateOptions} is called, not when the
+ * options are set.
+ */
+ public void setFile(String file) {
+ // Trim spaces from both ends. The users probably does not want
+ // trailing spaces in file names.
+ String val = file.trim();
+ fileName = val;
+ }
+
+ /**
+ * Returns the value of the Append option.
+ */
+ public boolean getAppend() {
+ return fileAppend;
+ }
+
+ /** Returns the value of the File option. */
+ public String getFile() {
+ return fileName;
+ }
+
+ /**
+ * If the value of File is not null, then {@link #setFile} is called with the
+ * values of File and Append properties.
+ * @since 0.8.1
+ */
+ @Override
+ public void activateOptions() {
+ if (fileName != null) {
+ try {
+ setFile(fileName, fileAppend, bufferedIO, bufferSize);
+ } catch (java.io.IOException e) {
+ errorHandler.error("setFile(" + fileName + "," + fileAppend + ") call failed.", e,
+ org.apache.log4j.spi.ErrorCode.FILE_OPEN_FAILURE);
+ }
+ }
+ }
+
+ /**
+ * Closes the previously opened file.
+ */
+ protected void closeFile() {
+ if (this.qw != null) {
+ try {
+ this.qw.close();
+ } catch (java.io.IOException e) {
+ if (e instanceof InterruptedIOException) {
+ Thread.currentThread().interrupt();
+ }
+ // Exceptionally, it does not make sense to delegate to an
+ // ErrorHandler. Since a closed appender is basically dead.
+ }
+ }
+ }
+
+ /**
+ * Get the value of the BufferedIO option.
+ *
+ * BufferedIO will significatnly increase performance on heavily loaded systems.
+ */
+ public boolean getBufferedIO() {
+ return this.bufferedIO;
+ }
+
+ /**
+ * Get the size of the IO buffer.
+ */
+ public int getBufferSize() {
+ return this.bufferSize;
+ }
+
+ /**
+ * The Append option takes a boolean value. It is set to true by default. If
+ * true, then File will be opened in append mode by {@link #setFile setFile} (see
+ * above). Otherwise, {@link #setFile setFile} will open File in truncate mode.
+ *
+ * Note: Actual opening of the file is made when {@link #activateOptions} is called, not when the
+ * options are set.
+ */
+ public void setAppend(boolean flag) {
+ fileAppend = flag;
+ }
+
+ /**
+ * The BufferedIO option takes a boolean value. It is set to false by default.
+ * If true, then File will be opened and the resulting {@link java.io.Writer} wrapped
+ * around a {@link BufferedWriter}. BufferedIO will significatnly increase performance on heavily
+ * loaded systems.
+ */
+ public void setBufferedIO(boolean bufferedIO) {
+ this.bufferedIO = bufferedIO;
+ if (bufferedIO) {
+ immediateFlush = false;
+ }
+ }
+
+ /**
+ * Set the size of the IO buffer.
+ */
+ public void setBufferSize(int bufferSize) {
+ this.bufferSize = bufferSize;
+ }
+
+ /**
+ *
+ * Sets and opens the file where the log output will go. The specified file must be
+ * writable.
+ *
+ * If there was already an opened file, then the previous file is closed first.
+ *
+ * Do not use this method directly. To configure a FileAppender or one of its subclasses, set
+ * its properties one by one and then call activateOptions.
+ * @param fileName The path to the log file.
+ * @param append If true will append to fileName. Otherwise will truncate fileName.
+ */
+ public synchronized void setFile(String fileName, boolean append, boolean bufferedIO,
+ int bufferSize) throws IOException {
+
+ // It does not make sense to have immediate flush and bufferedIO.
+ if (bufferedIO) {
+ setImmediateFlush(false);
+ }
+
+ reset();
+ FileOutputStream ostream = null;
+ try {
+ //
+ // attempt to create file
+ //
+ ostream = new FileOutputStream(fileName, append);
+ } catch (FileNotFoundException ex) {
+ //
+ // if parent directory does not exist then
+ // attempt to create it and try to create file
+ // see bug 9150
+ //
+ String parentName = new File(fileName).getParent();
+ if (parentName != null) {
+ File parentDir = new File(parentName);
+ if (!parentDir.exists() && parentDir.mkdirs()) {
+ ostream = new FileOutputStream(fileName, append);
+ } else {
+ throw ex;
+ }
+ } else {
+ throw ex;
+ }
+ }
+ Writer fw = createWriter(ostream);
+ if (bufferedIO) {
+ fw = new BufferedWriter(fw, bufferSize);
+ }
+ this.setQWForFiles(fw);
+ this.fileName = fileName;
+ this.fileAppend = append;
+ this.bufferedIO = bufferedIO;
+ this.bufferSize = bufferSize;
+ writeHeader();
+ }
+
+ /**
+ * Sets the quiet writer being used. This method is overriden by {@code RollingFileAppender}.
+ */
+ protected void setQWForFiles(Writer writer) {
+ this.qw = new org.apache.log4j.helpers.QuietWriter(writer, errorHandler);
+ }
+
+ /**
+ * Close any previously opened file and call the parent's reset.
+ */
+ @Override
+ protected void reset() {
+ closeFile();
+ this.fileName = null;
+ super.reset();
+ }
+}
diff --git a/hbase-logging/src/test/resources/log4j.properties b/hbase-logging/src/test/resources/log4j.properties
deleted file mode 100644
index c322699ced24..000000000000
--- a/hbase-logging/src/test/resources/log4j.properties
+++ /dev/null
@@ -1,68 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements. See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership. The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License. You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-# Define some default values that can be overridden by system properties
-hbase.root.logger=INFO,console
-hbase.log.dir=.
-hbase.log.file=hbase.log
-
-# Define the root logger to the system property "hbase.root.logger".
-log4j.rootLogger=${hbase.root.logger}
-
-# Logging Threshold
-log4j.threshold=ALL
-
-#
-# Daily Rolling File Appender
-#
-log4j.appender.DRFA=org.apache.log4j.DailyRollingFileAppender
-log4j.appender.DRFA.File=${hbase.log.dir}/${hbase.log.file}
-
-# Rollver at midnight
-log4j.appender.DRFA.DatePattern=.yyyy-MM-dd
-
-# 30-day backup
-#log4j.appender.DRFA.MaxBackupIndex=30
-log4j.appender.DRFA.layout=org.apache.log4j.PatternLayout
-# Debugging Pattern format
-log4j.appender.DRFA.layout.ConversionPattern=%d{ISO8601} %-5p [%t] %C{2}(%L): %m%n
-
-
-#
-# console
-# Add "console" to rootlogger above if you want to use this
-#
-log4j.appender.console=org.apache.log4j.ConsoleAppender
-log4j.appender.console.target=System.err
-log4j.appender.console.layout=org.apache.log4j.PatternLayout
-log4j.appender.console.layout.ConversionPattern=%d{ISO8601} %-5p [%t] %C{2}(%L): %m%n
-
-# Custom Logging levels
-
-#log4j.logger.org.apache.hadoop.fs.FSNamesystem=DEBUG
-
-log4j.logger.org.apache.hadoop=WARN
-log4j.logger.org.apache.zookeeper=ERROR
-log4j.logger.org.apache.hadoop.hbase=DEBUG
-
-#These settings are workarounds against spurious logs from the minicluster.
-#See HBASE-4709
-log4j.logger.org.apache.hadoop.metrics2.impl.MetricsConfig=WARN
-log4j.logger.org.apache.hadoop.metrics2.impl.MetricsSinkAdapter=WARN
-log4j.logger.org.apache.hadoop.metrics2.impl.MetricsSystemImpl=WARN
-log4j.logger.org.apache.hadoop.metrics2.util.MBeans=WARN
-# Enable this to get detailed connection error/retry logging.
-# log4j.logger.org.apache.hadoop.hbase.client.ConnectionImplementation=TRACE
diff --git a/hbase-logging/src/test/resources/log4j2.xml b/hbase-logging/src/test/resources/log4j2.xml
new file mode 100644
index 000000000000..643fae61c7c8
--- /dev/null
+++ b/hbase-logging/src/test/resources/log4j2.xml
@@ -0,0 +1,45 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/hbase-mapreduce/pom.xml b/hbase-mapreduce/pom.xml
index 5bd03f4e394c..65042aad50b5 100644
--- a/hbase-mapreduce/pom.xml
+++ b/hbase-mapreduce/pom.xml
@@ -260,13 +260,23 @@
test
- org.slf4j
- slf4j-log4j12
+ org.apache.logging.log4j
+ log4j-api
+ test
+
+
+ org.apache.logging.log4j
+ log4j-core
+ test
+
+
+ org.apache.logging.log4j
+ log4j-slf4j-impltest
- log4j
- log4j
+ org.apache.logging.log4j
+ log4j-1.2-apitest
diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/util/LoadTestTool.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/util/LoadTestTool.java
index 63c1760626f0..7614b8376d07 100644
--- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/util/LoadTestTool.java
+++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/util/LoadTestTool.java
@@ -25,24 +25,16 @@
import java.util.List;
import java.util.Properties;
import java.util.concurrent.atomic.AtomicReference;
-
import javax.crypto.spec.SecretKeySpec;
-
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HBaseInterfaceAudience;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.TableName;
+import org.apache.hadoop.hbase.client.Admin;
import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;
import org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder;
-import org.apache.log4j.Level;
-import org.apache.log4j.LogManager;
-import org.apache.yetus.audience.InterfaceAudience;
-import org.apache.zookeeper.ZooKeeper;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-import org.apache.hadoop.hbase.client.Admin;
import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.client.ConnectionFactory;
import org.apache.hadoop.hbase.client.Durability;
@@ -52,6 +44,7 @@
import org.apache.hadoop.hbase.io.crypto.Encryption;
import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding;
import org.apache.hadoop.hbase.log.HBaseMarkers;
+import org.apache.hadoop.hbase.logging.Log4jUtils;
import org.apache.hadoop.hbase.regionserver.BloomType;
import org.apache.hadoop.hbase.security.EncryptionUtil;
import org.apache.hadoop.hbase.security.HBaseKerberosUtils;
@@ -61,6 +54,10 @@
import org.apache.hadoop.hbase.util.test.LoadTestDataGenerator;
import org.apache.hadoop.hbase.util.test.LoadTestDataGeneratorWithACL;
import org.apache.hadoop.util.ToolRunner;
+import org.apache.yetus.audience.InterfaceAudience;
+import org.apache.zookeeper.ZooKeeper;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hbase.thirdparty.org.apache.commons.cli.AlreadySelectedException;
import org.apache.hbase.thirdparty.org.apache.commons.cli.CommandLine;
@@ -583,7 +580,7 @@ public void initTestTable() throws IOException {
@Override
protected int doWork() throws IOException {
if (!isVerbose) {
- LogManager.getLogger(ZooKeeper.class.getName()).setLevel(Level.WARN);
+ Log4jUtils.setLogLevel(ZooKeeper.class.getName(), "WARN");
}
if (numTables > 1) {
return parallelLoadTables();
diff --git a/hbase-metrics-api/pom.xml b/hbase-metrics-api/pom.xml
index ec8bb4d19582..0db0458b26ed 100644
--- a/hbase-metrics-api/pom.xml
+++ b/hbase-metrics-api/pom.xml
@@ -133,13 +133,18 @@
test
- org.slf4j
- slf4j-log4j12
+ org.apache.logging.log4j
+ log4j-api
+ test
+
+
+ org.apache.logging.log4j
+ log4j-coretest
- log4j
- log4j
+ org.apache.logging.log4j
+ log4j-slf4j-impltest
diff --git a/hbase-metrics/pom.xml b/hbase-metrics/pom.xml
index 9c2bcc2de7d9..0496cc3db793 100644
--- a/hbase-metrics/pom.xml
+++ b/hbase-metrics/pom.xml
@@ -141,13 +141,18 @@
test
- org.slf4j
- slf4j-log4j12
+ org.apache.logging.log4j
+ log4j-api
+ test
+
+
+ org.apache.logging.log4j
+ log4j-coretest
- log4j
- log4j
+ org.apache.logging.log4j
+ log4j-slf4j-impltest
diff --git a/hbase-procedure/pom.xml b/hbase-procedure/pom.xml
index 13cfc739d86c..5cf027096bd5 100644
--- a/hbase-procedure/pom.xml
+++ b/hbase-procedure/pom.xml
@@ -130,13 +130,18 @@
test
- org.slf4j
- slf4j-log4j12
+ org.apache.logging.log4j
+ log4j-api
+ test
+
+
+ org.apache.logging.log4j
+ log4j-coretest
- log4j
- log4j
+ org.apache.logging.log4j
+ log4j-slf4j-impltest
diff --git a/hbase-replication/pom.xml b/hbase-replication/pom.xml
index f56ca628f4b3..7575fd176144 100644
--- a/hbase-replication/pom.xml
+++ b/hbase-replication/pom.xml
@@ -138,13 +138,18 @@
test
- org.slf4j
- slf4j-log4j12
+ org.apache.logging.log4j
+ log4j-api
+ test
+
+
+ org.apache.logging.log4j
+ log4j-coretest
- log4j
- log4j
+ org.apache.logging.log4j
+ log4j-slf4j-impltest
diff --git a/hbase-rest/pom.xml b/hbase-rest/pom.xml
index d583d7aad1f5..2cf40cfa40a3 100644
--- a/hbase-rest/pom.xml
+++ b/hbase-rest/pom.xml
@@ -354,13 +354,23 @@
test
- org.slf4j
- slf4j-log4j12
+ org.apache.logging.log4j
+ log4j-api
+ test
+
+
+ org.apache.logging.log4j
+ log4j-core
+ test
+
+
+ org.apache.logging.log4j
+ log4j-slf4j-impltest
- log4j
- log4j
+ org.apache.logging.log4j
+ log4j-1.2-apitest
diff --git a/hbase-server/pom.xml b/hbase-server/pom.xml
index 04df64e44432..858835749b9a 100644
--- a/hbase-server/pom.xml
+++ b/hbase-server/pom.xml
@@ -494,13 +494,23 @@
test
- org.slf4j
- slf4j-log4j12
+ org.apache.logging.log4j
+ log4j-api
+ test
+
+
+ org.apache.logging.log4j
+ log4j-core
+ test
+
+
+ org.apache.logging.log4j
+ log4j-slf4j-impltest
- log4j
- log4j
+ org.apache.logging.log4j
+ log4j-1.2-apitest
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestingUtility.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestingUtility.java
index 29e888372908..a036f93d2d07 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestingUtility.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestingUtility.java
@@ -639,7 +639,6 @@ public MiniDFSCluster startMiniDFSClusterForTestWAL(int namenodePort) throws IOE
* This is used before starting HDFS and map-reduce mini-clusters Run something like the below to
* check for the likes of '/tmp' references -- i.e. references outside of the test data dir -- in
* the conf.
- *
*