Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
80 changes: 77 additions & 3 deletions x-pack/plugin/core/src/main/config/log4j2.properties
Original file line number Diff line number Diff line change
@@ -1,19 +1,93 @@
appender.audit_rolling.type = RollingFile
appender.audit_rolling.name = audit_rolling
appender.audit_rolling.fileName = ${sys:es.logs.base_path}${sys:file.separator}${sys:es.logs.cluster_name}_access.log
appender.audit_rolling.fileName = ${sys:es.logs.base_path}${sys:file.separator}${sys:es.logs.cluster_name}_audit.log
appender.audit_rolling.layout.type = PatternLayout
appender.audit_rolling.layout.pattern = [%d{ISO8601}] %m%n
appender.audit_rolling.filePattern = ${sys:es.logs.base_path}${sys:file.separator}${sys:es.logs.cluster_name}_access-%d{yyyy-MM-dd}.log
appender.audit_rolling.layout.pattern = {\
"@timestamp":"%d{ISO8601}"\
%varsNotEmpty{, "node.name":"%enc{%map{node.name}}{JSON}"}\
%varsNotEmpty{, "node.id":"%enc{%map{node.id}}{JSON}"}\
%varsNotEmpty{, "host.name":"%enc{%map{host.name}}{JSON}"}\
%varsNotEmpty{, "host.ip":"%enc{%map{host.ip}}{JSON}"}\
%varsNotEmpty{, "event.type":"%enc{%map{event.type}}{JSON}"}\
%varsNotEmpty{, "event.action":"%enc{%map{event.action}}{JSON}"}\
%varsNotEmpty{, "user.name":"%enc{%map{user.name}}{JSON}"}\
%varsNotEmpty{, "user.run_by.name":"%enc{%map{user.run_by.name}}{JSON}"}\
%varsNotEmpty{, "user.run_as.name":"%enc{%map{user.run_as.name}}{JSON}"}\
%varsNotEmpty{, "user.realm":"%enc{%map{user.realm}}{JSON}"}\
%varsNotEmpty{, "user.run_by.realm":"%enc{%map{user.run_by.realm}}{JSON}"}\
%varsNotEmpty{, "user.run_as.realm":"%enc{%map{user.run_as.realm}}{JSON}"}\
%varsNotEmpty{, "user.roles":%map{user.roles}}\
%varsNotEmpty{, "origin.type":"%enc{%map{origin.type}}{JSON}"}\
%varsNotEmpty{, "origin.address":"%enc{%map{origin.address}}{JSON}"}\
%varsNotEmpty{, "realm":"%enc{%map{realm}}{JSON}"}\
%varsNotEmpty{, "url.path":"%enc{%map{url.path}}{JSON}"}\
%varsNotEmpty{, "url.query":"%enc{%map{url.query}}{JSON}"}\
%varsNotEmpty{, "request.body":"%enc{%map{request.body}}{JSON}"}\
%varsNotEmpty{, "action":"%enc{%map{action}}{JSON}"}\
%varsNotEmpty{, "request.name":"%enc{%map{request.name}}{JSON}"}\
%varsNotEmpty{, "indices":%map{indices}}\
%varsNotEmpty{, "opaque_id":"%enc{%map{opaque_id}}{JSON}"}\
%varsNotEmpty{, "transport.profile":"%enc{%map{transport.profile}}{JSON}"}\
%varsNotEmpty{, "rule":"%enc{%map{rule}}{JSON}"}\
%varsNotEmpty{, "event.category":"%enc{%map{event.category}}{JSON}"}\
}%n
# "node.name" node name from the `elasticsearch.yml` settings
# "node.id" node id which should not change between cluster restarts
# "host.name" unresolved hostname of the local node
# "host.ip" the local bound ip (i.e. the ip listening for connections)
# "event.type" a received REST request is translated into one or more transport requests. This indicates which processing layer generated the event "rest" or "transport" (internal)
# "event.action" the name of the audited event, eg. "authentication_failed", "access_granted", "run_as_granted", etc.
# "user.name" the subject name as authenticated by a realm
# "user.run_by.name" the original authenticated subject name that is impersonating another one.
# "user.run_as.name" if this "event.action" is of a run_as type, this is the subject name to be impersonated as.
# "user.realm" the name of the realm that authenticated "user.name"
# "user.run_by.realm" the realm name of the impersonating subject ("user.run_by.name")
# "user.run_as.realm" if this "event.action" is of a run_as type, this is the realm name the impersonated user is looked up from
# "user.roles" the roles array of the user; these are the roles that are granting privileges
# "origin.type" it is "rest" if the event is originating (is in relation to) a REST request; possible other values are "transport" and "ip_filter"
# "origin.address" the remote address and port of the first network hop, i.e. a REST proxy or another cluster node
# "realm" name of a realm that has generated an "authentication_failed" or an "authentication_successful"; the subject is not yet authenticated
# "url.path" the URI component between the port and the query string; it is percent (URL) encoded
# "url.query" the URI component after the path and before the fragment; it is percent (URL) encoded
# "request.body" the content of the request body entity, JSON escaped
# "action" an action is the most granular operation that is authorized and this identifies it in a namespaced way (internal)
# "request.name" if the event is in connection to a transport message this is the name of the request class, similar to how rest requests are identified by the url path (internal)
# "indices" the array of indices that the "action" is acting upon
# "opaque_id" opaque value conveyed by the "X-Opaque-Id" request header
# "transport.profile" name of the transport profile in case this is a "connection_granted" or "connection_denied" event
# "rule" name of the applied rulee if the "origin.type" is "ip_filter"
# "event.category" fixed value "elasticsearch-audit"

appender.audit_rolling.filePattern = ${sys:es.logs.base_path}${sys:file.separator}${sys:es.logs.cluster_name}_audit-%d{yyyy-MM-dd}.log
appender.audit_rolling.policies.type = Policies
appender.audit_rolling.policies.time.type = TimeBasedTriggeringPolicy
appender.audit_rolling.policies.time.interval = 1
appender.audit_rolling.policies.time.modulate = true

appender.deprecated_audit_rolling.type = RollingFile
appender.deprecated_audit_rolling.name = deprecated_audit_rolling
appender.deprecated_audit_rolling.fileName = ${sys:es.logs.base_path}${sys:file.separator}${sys:es.logs.cluster_name}_access.log
appender.deprecated_audit_rolling.layout.type = PatternLayout
appender.deprecated_audit_rolling.layout.pattern = [%d{ISO8601}] %m%n
appender.deprecated_audit_rolling.filePattern = ${sys:es.logs.base_path}${sys:file.separator}${sys:es.logs.cluster_name}_access-%d{yyyy-MM-dd}.log
appender.deprecated_audit_rolling.policies.type = Policies
appender.deprecated_audit_rolling.policies.time.type = TimeBasedTriggeringPolicy
appender.deprecated_audit_rolling.policies.time.interval = 1
appender.deprecated_audit_rolling.policies.time.modulate = true

logger.xpack_security_audit_logfile.name = org.elasticsearch.xpack.security.audit.logfile.LoggingAuditTrail
logger.xpack_security_audit_logfile.level = info
logger.xpack_security_audit_logfile.appenderRef.audit_rolling.ref = audit_rolling
logger.xpack_security_audit_logfile.additivity = false

logger.xpack_security_audit_deprecated_logfile.name = org.elasticsearch.xpack.security.audit.logfile.DeprecatedLoggingAuditTrail
# set this to "off" instead of "info" to disable the deprecated appender
# in the 6.x releases both the new and the previous appenders are enabled
# for the logfile auditing
logger.xpack_security_audit_deprecated_logfile.level = info
logger.xpack_security_audit_deprecated_logfile.appenderRef.deprecated_audit_rolling.ref = deprecated_audit_rolling
logger.xpack_security_audit_deprecated_logfile.additivity = false

logger.xmlsig.name = org.apache.xml.security.signature.XMLSignature
logger.xmlsig.level = error
logger.samlxml_decrypt.name = org.opensaml.xmlsec.encryption.support.Decrypter
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -8,25 +8,50 @@
import org.apache.logging.log4j.Level;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.apache.logging.log4j.core.Layout;
import org.apache.logging.log4j.core.LogEvent;
import org.apache.logging.log4j.core.LoggerContext;
import org.apache.logging.log4j.core.StringLayout;
import org.apache.logging.log4j.core.appender.AbstractAppender;
import org.apache.logging.log4j.core.config.Configuration;
import org.apache.logging.log4j.core.config.LoggerConfig;
import org.apache.logging.log4j.core.filter.RegexFilter;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.logging.Loggers;

import java.util.ArrayList;
import java.util.List;

/**
* Logger that captures events and appends them to in memory lists, with one
* list for each log level. This works with the global log manager context,
* meaning that there could only be a single logger with the same name.
*/
public class CapturingLogger {

public static Logger newCapturingLogger(final Level level) throws IllegalAccessException {
/**
* Constructs a new {@link CapturingLogger} named as the fully qualified name of
* the invoking method. One name can be assigned to a single logger globally, so
* don't call this method multiple times in the same method.
*
* @param level
* The minimum priority level of events that will be captured.
* @param layout
* Optional parameter allowing to set the layout format of events.
* This is useful because events are captured to be inspected (and
* parsed) later. When parsing, it is useful to be in control of the
* printing format as well. If not specified,
* {@code event.getMessage().getFormattedMessage()} is called to
* format the event.
* @return The new logger.
*/
public static Logger newCapturingLogger(final Level level, @Nullable StringLayout layout) throws IllegalAccessException {
// careful, don't "bury" this on the call stack, unless you know what you're doing
final StackTraceElement caller = Thread.currentThread().getStackTrace()[2];
final String name = caller.getClassName() + "." + caller.getMethodName() + "." + level.toString();
final Logger logger = LogManager.getLogger(name);
Loggers.setLevel(logger, level);
final MockAppender appender = new MockAppender(name);
final MockAppender appender = new MockAppender(name, layout);
appender.start();
Loggers.addAppender(logger, appender);
return logger;
Expand All @@ -39,11 +64,27 @@ private static MockAppender getMockAppender(final String name) {
return (MockAppender) loggerConfig.getAppenders().get(name);
}

/**
* Checks if the logger's appender has captured any events.
*
* @param name
* The unique global name of the logger.
* @return {@code true} if no event has been captured, {@code false} otherwise.
*/
public static boolean isEmpty(final String name) {
final MockAppender appender = getMockAppender(name);
return appender.isEmpty();
}

/**
* Gets the captured events for a logger by its name.
*
* @param name
* The unique global name of the logger.
* @param level
* The priority level of the captured events to be returned.
* @return A list of captured events formated to {@code String}.
*/
public static List<String> output(final String name, final Level level) {
final MockAppender appender = getMockAppender(name);
return appender.output(level);
Expand All @@ -57,8 +98,8 @@ private static class MockAppender extends AbstractAppender {
public final List<String> debug = new ArrayList<>();
public final List<String> trace = new ArrayList<>();

private MockAppender(final String name) throws IllegalAccessException {
super(name, RegexFilter.createFilter(".*(\n.*)*", new String[0], false, null, null), null);
private MockAppender(final String name, StringLayout layout) throws IllegalAccessException {
super(name, RegexFilter.createFilter(".*(\n.*)*", new String[0], false, null, null), layout);
}

@Override
Expand All @@ -67,25 +108,34 @@ public void append(LogEvent event) {
// we can not keep a reference to the event here because Log4j is using a thread
// local instance under the hood
case "ERROR":
error.add(event.getMessage().getFormattedMessage());
error.add(formatMessage(event));
break;
case "WARN":
warn.add(event.getMessage().getFormattedMessage());
warn.add(formatMessage(event));
break;
case "INFO":
info.add(event.getMessage().getFormattedMessage());
info.add(formatMessage(event));
break;
case "DEBUG":
debug.add(event.getMessage().getFormattedMessage());
debug.add(formatMessage(event));
break;
case "TRACE":
trace.add(event.getMessage().getFormattedMessage());
trace.add(formatMessage(event));
break;
default:
throw invalidLevelException(event.getLevel());
}
}

private String formatMessage(LogEvent event) {
final Layout<?> layout = getLayout();
if (layout instanceof StringLayout) {
return ((StringLayout) layout).toSerializable(event);
} else {
return event.getMessage().getFormattedMessage();
}
}

private IllegalArgumentException invalidLevelException(Level level) {
return new IllegalArgumentException("invalid level, expected [ERROR|WARN|INFO|DEBUG|TRACE] but was [" + level + "]");
}
Expand Down
4 changes: 4 additions & 0 deletions x-pack/plugin/security/build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -146,6 +146,10 @@ processResources {

sourceSets.test.resources {
srcDir '../core/src/test/resources'
// this is required because audit logging tests (LoggingAuditTrailTests)
// parse the actual log4j2.properties config file to extract and test
// the log line pattern. These tests need access to the file as a resource.
srcDir '../core/src/main/config'
}
dependencyLicenses {
mapping from: /java-support|opensaml-.*/, to: 'shibboleth'
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -172,6 +172,7 @@
import org.elasticsearch.xpack.security.audit.AuditTrailService;
import org.elasticsearch.xpack.security.audit.index.IndexAuditTrail;
import org.elasticsearch.xpack.security.audit.index.IndexNameResolver;
import org.elasticsearch.xpack.security.audit.logfile.DeprecatedLoggingAuditTrail;
import org.elasticsearch.xpack.security.audit.logfile.LoggingAuditTrail;
import org.elasticsearch.xpack.security.authc.AuthenticationService;
import org.elasticsearch.xpack.security.authc.InternalRealms;
Expand Down Expand Up @@ -422,6 +423,9 @@ Collection<Object> createComponents(Client client, ThreadPool threadPool, Cluste
switch (output) {
case LoggingAuditTrail.NAME:
auditTrails.add(new LoggingAuditTrail(settings, clusterService, threadPool));
// also enabling the deprecated format. To disable it, remove it's associated
// appender in the log4j2.properties file
auditTrails.add(new DeprecatedLoggingAuditTrail(settings, clusterService, threadPool));
break;
case IndexAuditTrail.NAME:
indexAuditTrail.set(new IndexAuditTrail(settings, client, threadPool, clusterService));
Expand Down
Loading