Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
26 changes: 16 additions & 10 deletions notebook/log.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@
from .prometheus.log_functions import prometheus_log_method


def log_request(handler):
def log_request(handler, log=access_log, log_json=False):
"""log a bit more information about each request than tornado's default

- move static file get success to debug-level (reduces noise)
Expand All @@ -22,29 +22,35 @@ def log_request(handler):
request = handler.request
if status < 300 or status == 304:
# Successes (or 304 FOUND) are debug-level
log_method = access_log.debug
log_method = log.debug
elif status < 400:
log_method = access_log.info
log_method = log.info
elif status < 500:
log_method = access_log.warning
log_method = log.warning
else:
log_method = access_log.error
log_method = log.error

request_time = 1000.0 * handler.request.request_time()
request_time = 1000.0 * request.request_time()
ns = dict(
status=status,
method=request.method,
ip=request.remote_ip,
uri=request.uri,
request_time=request_time,
request_time=float('%.2f' % request_time),
)
msg = "{status} {method} {uri} ({ip}) {request_time:.2f}ms"
msg = "{status} {method} {uri} ({ip}) {request_time:f}ms"
if status >= 400:
# log bad referers
ns['referer'] = request.headers.get('Referer', 'None')
msg = msg + ' referer={referer}'
if status >= 500 and status != 502:
# log all headers if it caused an error
log_method(json.dumps(dict(request.headers), indent=2))
log_method(msg.format(**ns))
if log_json:
log_method("", extra=dict(props=dict(request.headers)))
else:
log_method(json.dumps(dict(request.headers), indent=2))
if log_json:
log_method("", extra=dict(props=ns))
else:
log_method(msg.format(**ns))
prometheus_log_method(handler)
43 changes: 42 additions & 1 deletion notebook/notebookapp.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@
import binascii
import datetime
import errno
import functools
import gettext
import hashlib
import hmac
Expand Down Expand Up @@ -247,9 +248,12 @@ def init_settings(self, jupyter_app, kernel_manager, contents_manager,
# collapse $HOME to ~
root_dir = '~' + root_dir[len(home):]

# Use the NotebookApp logger and its formatting for tornado request logging.
log_function = functools.partial(
log_request, log=log, log_json=jupyter_app.log_json)
settings = dict(
# basics
log_function=log_request,
log_function=log_function,
base_url=base_url,
default_url=default_url,
template_path=template_path,
Expand Down Expand Up @@ -701,6 +705,43 @@ class NotebookApp(JupyterApp):

_log_formatter_cls = LogFormatter

_json_logging_import_error_logged = False

log_json = Bool(False, config=True,
help=_('Set to True to enable JSON formatted logs. '
'Run "pip install notebook[json-logging]" to install the '
'required dependent packages. Can also be set using the '
'environment variable JUPYTER_ENABLE_JSON_LOGGING=true.')
)

@default('log_json')
def _default_log_json(self):
"""Get the log_json value from the environment."""
return os.getenv('JUPYTER_ENABLE_JSON_LOGGING', 'false').lower() == 'true'

@validate('log_json')
def _validate_log_json(self, proposal):
# If log_json=True, see if the json_logging package can be imported and
# override _log_formatter_cls if so.
value = proposal['value']
if value:
try:
import json_logging
self.log.debug('initializing json logging')
json_logging.init_non_web(enable_json=True)
self._log_formatter_cls = json_logging.JSONLogFormatter
except ImportError:
# If configured for json logs and we can't do it, log a hint.
# Only log the error once though.
if not self._json_logging_import_error_logged:
self.log.warning(
'Unable to use json logging due to missing packages. '
'Run "pip install notebook[json-logging]" to fix.'
)
self._json_logging_import_error_logged = True
value = False
return value

@default('log_level')
def _default_log_level(self):
return logging.INFO
Expand Down
40 changes: 40 additions & 0 deletions notebook/tests/test_log.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,40 @@
import unittest
from unittest import mock

from notebook import log


class TestLogRequest(unittest.TestCase):

@mock.patch('notebook.log.prometheus_log_method')
def test_log_request_json(self, mock_prometheus):
headers = {'Referer': 'test'}
request = mock.Mock(
request_time=mock.Mock(return_value=1),
headers=headers,
method='GET',
remote_ip='1.2.3.4',
uri='/notebooks/foo/bar'
)
handler = mock.MagicMock(
request=request,
get_status=mock.Mock(return_value=500)
)
logger = mock.MagicMock()
log.log_request(handler, log=logger, log_json=True)
# Since the status was 500 there should be two calls to log.error,
# one with the request headers and another with the other request
# parameters.
self.assertEqual(2, logger.error.call_count)
logger.error.assert_has_calls([
mock.call("", extra=dict(props=dict(headers))),
mock.call("", extra=dict(props={
'status': handler.get_status(),
'method': request.method,
'ip': request.remote_ip,
'uri': request.uri,
'request_time': 1000.0,
'referer': headers['Referer']
}))
])
mock_prometheus.assert_called_once_with(handler)
32 changes: 32 additions & 0 deletions notebook/tests/test_notebookapp.py
Original file line number Diff line number Diff line change
Expand Up @@ -190,6 +190,12 @@ def test_list_running_servers(self):
assert len(servers) >= 1
assert self.port in {info['port'] for info in servers}

def test_log_json_default(self):
self.assertFalse(self.notebook.log_json)

def test_validate_log_json(self):
self.assertFalse(self.notebook._validate_log_json(dict(value=False)))


# UNIX sockets aren't available on Windows.
if not sys.platform.startswith('win'):
Expand All @@ -201,3 +207,29 @@ def test_list_running_sock_servers(self):
servers = list(notebookapp.list_running_servers())
assert len(servers) >= 1
assert self.sock in {info['sock'] for info in servers}


class NotebookAppJSONLoggingTests(NotebookTestBase):
"""Tests for when json logging is enabled."""
@classmethod
def setup_class(cls):
super(NotebookAppJSONLoggingTests, cls).setup_class()
try:
import json_logging
cls.json_logging_available = True
except ImportError:
cls.json_logging_available = False

@classmethod
def get_patch_env(cls):
test_env = super(NotebookAppJSONLoggingTests, cls).get_patch_env()
test_env.update({'JUPYTER_ENABLE_JSON_LOGGING': 'true'})
return test_env

def test_log_json_enabled(self):
self.assertTrue(self.notebook._default_log_json())

def test_validate_log_json(self):
self.assertEqual(
self.notebook._validate_log_json(dict(value=True)),
self.json_logging_available)
1 change: 1 addition & 0 deletions setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -119,6 +119,7 @@
'nbval', 'nose-exclude', 'selenium', 'pytest', 'pytest-cov'],
'docs': ['sphinx', 'nbsphinx', 'sphinxcontrib_github_alt', 'sphinx_rtd_theme'],
'test:sys_platform != "win32"': ['requests-unixsocket'],
'json-logging': ['json-logging']
},
python_requires = '>=3.5',
entry_points = {
Expand Down