2014-08-31 09:06:39 -06:00
|
|
|
# -*- coding: utf-8 -*-
|
2016-01-06 21:26:29 -07:00
|
|
|
# Copyright 2014-2016 OpenMarket Ltd
|
2014-08-31 09:06:39 -06:00
|
|
|
#
|
|
|
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
|
|
# you may not use this file except in compliance with the License.
|
|
|
|
# You may obtain a copy of the License at
|
|
|
|
#
|
|
|
|
# http://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
#
|
|
|
|
# Unless required by applicable law or agreed to in writing, software
|
|
|
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
|
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
|
|
# See the License for the specific language governing permissions and
|
|
|
|
# limitations under the License.
|
|
|
|
import logging
|
|
|
|
import logging.config
|
2015-04-30 09:52:57 -06:00
|
|
|
import os
|
2015-09-14 12:03:53 -06:00
|
|
|
import signal
|
2018-06-20 08:33:14 -06:00
|
|
|
import sys
|
2018-07-09 00:09:20 -06:00
|
|
|
from string import Template
|
2015-04-30 09:52:57 -06:00
|
|
|
|
2018-06-20 08:33:14 -06:00
|
|
|
import yaml
|
|
|
|
|
2018-07-09 00:09:20 -06:00
|
|
|
from twisted.logger import STDLibLogObserver, globalLogBeginner
|
|
|
|
|
2018-06-20 08:33:14 -06:00
|
|
|
import synapse
|
|
|
|
from synapse.util.logcontext import LoggingContextFilter
|
|
|
|
from synapse.util.versionstring import get_version_string
|
2018-07-09 00:09:20 -06:00
|
|
|
|
2018-06-20 08:33:14 -06:00
|
|
|
from ._base import Config
|
2015-04-30 09:52:57 -06:00
|
|
|
|
|
|
|
DEFAULT_LOG_CONFIG = Template("""
|
|
|
|
version: 1
|
|
|
|
|
|
|
|
formatters:
|
2018-01-09 04:27:19 -07:00
|
|
|
precise:
|
|
|
|
format: '%(asctime)s - %(name)s - %(lineno)d - %(levelname)s - \
|
|
|
|
%(request)s - %(message)s'
|
2015-04-30 09:52:57 -06:00
|
|
|
|
|
|
|
filters:
|
2018-01-09 04:27:19 -07:00
|
|
|
context:
|
|
|
|
(): synapse.util.logcontext.LoggingContextFilter
|
|
|
|
request: ""
|
2015-04-30 09:52:57 -06:00
|
|
|
|
|
|
|
handlers:
|
2018-01-09 04:27:19 -07:00
|
|
|
file:
|
|
|
|
class: logging.handlers.RotatingFileHandler
|
|
|
|
formatter: precise
|
|
|
|
filename: ${log_file}
|
|
|
|
maxBytes: 104857600
|
|
|
|
backupCount: 10
|
|
|
|
filters: [context]
|
2018-11-02 09:28:07 -06:00
|
|
|
encoding: utf8
|
2018-01-09 04:27:19 -07:00
|
|
|
console:
|
|
|
|
class: logging.StreamHandler
|
|
|
|
formatter: precise
|
|
|
|
filters: [context]
|
2015-04-30 09:52:57 -06:00
|
|
|
|
|
|
|
loggers:
|
|
|
|
synapse:
|
|
|
|
level: INFO
|
|
|
|
|
|
|
|
synapse.storage.SQL:
|
2017-03-10 08:23:20 -07:00
|
|
|
# beware: increasing this to DEBUG will make synapse log sensitive
|
|
|
|
# information such as access tokens.
|
2015-04-30 09:52:57 -06:00
|
|
|
level: INFO
|
|
|
|
|
|
|
|
root:
|
|
|
|
level: INFO
|
|
|
|
handlers: [file, console]
|
|
|
|
""")
|
2014-08-31 09:06:39 -06:00
|
|
|
|
2014-10-30 04:13:46 -06:00
|
|
|
|
2014-08-31 09:06:39 -06:00
|
|
|
class LoggingConfig(Config):
|
|
|
|
|
2015-04-29 21:24:44 -06:00
|
|
|
def read_config(self, config):
|
|
|
|
self.verbosity = config.get("verbose", 0)
|
2017-03-10 08:38:29 -07:00
|
|
|
self.no_redirect_stdio = config.get("no_redirect_stdio", False)
|
2015-04-29 21:24:44 -06:00
|
|
|
self.log_config = self.abspath(config.get("log_config"))
|
|
|
|
self.log_file = self.abspath(config.get("log_file"))
|
|
|
|
|
2015-09-22 05:57:40 -06:00
|
|
|
def default_config(self, config_dir_path, server_name, **kwargs):
|
2018-12-21 08:04:57 -07:00
|
|
|
log_config = os.path.join(config_dir_path, server_name + ".log.config")
|
2015-04-29 21:24:44 -06:00
|
|
|
return """
|
|
|
|
# A yaml python logging config file
|
2015-04-30 09:52:57 -06:00
|
|
|
log_config: "%(log_config)s"
|
2015-04-29 21:24:44 -06:00
|
|
|
""" % locals()
|
|
|
|
|
|
|
|
def read_arguments(self, args):
|
|
|
|
if args.verbose is not None:
|
|
|
|
self.verbosity = args.verbose
|
2017-03-10 08:38:29 -07:00
|
|
|
if args.no_redirect_stdio is not None:
|
|
|
|
self.no_redirect_stdio = args.no_redirect_stdio
|
2015-04-29 21:24:44 -06:00
|
|
|
if args.log_config is not None:
|
|
|
|
self.log_config = args.log_config
|
|
|
|
if args.log_file is not None:
|
|
|
|
self.log_file = args.log_file
|
|
|
|
|
2014-08-31 09:06:39 -06:00
|
|
|
def add_arguments(cls, parser):
|
|
|
|
logging_group = parser.add_argument_group("logging")
|
|
|
|
logging_group.add_argument(
|
|
|
|
'-v', '--verbose', dest="verbose", action='count',
|
2017-03-10 08:23:20 -07:00
|
|
|
help="The verbosity level. Specify multiple times to increase "
|
|
|
|
"verbosity. (Ignored if --log-config is specified.)"
|
2014-08-31 09:06:39 -06:00
|
|
|
)
|
|
|
|
logging_group.add_argument(
|
2015-04-29 21:24:44 -06:00
|
|
|
'-f', '--log-file', dest="log_file",
|
2017-03-10 08:23:20 -07:00
|
|
|
help="File to log to. (Ignored if --log-config is specified.)"
|
2014-08-31 09:06:39 -06:00
|
|
|
)
|
|
|
|
logging_group.add_argument(
|
|
|
|
'--log-config', dest="log_config", default=None,
|
|
|
|
help="Python logging config file"
|
|
|
|
)
|
2017-03-10 08:38:29 -07:00
|
|
|
logging_group.add_argument(
|
|
|
|
'-n', '--no-redirect-stdio',
|
|
|
|
action='store_true', default=None,
|
|
|
|
help="Do not redirect stdout/stderr to the log"
|
|
|
|
)
|
2014-08-31 09:06:39 -06:00
|
|
|
|
2015-04-30 09:52:57 -06:00
|
|
|
def generate_files(self, config):
|
|
|
|
log_config = config.get("log_config")
|
|
|
|
if log_config and not os.path.exists(log_config):
|
2018-01-09 04:28:33 -07:00
|
|
|
log_file = self.abspath("homeserver.log")
|
2018-04-28 04:29:02 -06:00
|
|
|
with open(log_config, "w") as log_config_file:
|
2015-04-30 09:52:57 -06:00
|
|
|
log_config_file.write(
|
2018-01-09 04:28:33 -07:00
|
|
|
DEFAULT_LOG_CONFIG.substitute(log_file=log_file)
|
2015-04-30 09:52:57 -06:00
|
|
|
)
|
|
|
|
|
2016-06-16 04:06:12 -06:00
|
|
|
|
2017-03-10 08:16:50 -07:00
|
|
|
def setup_logging(config, use_worker_options=False):
|
|
|
|
""" Set up python logging
|
|
|
|
|
|
|
|
Args:
|
|
|
|
config (LoggingConfig | synapse.config.workers.WorkerConfig):
|
|
|
|
configuration data
|
|
|
|
|
|
|
|
use_worker_options (bool): True to use 'worker_log_config' and
|
|
|
|
'worker_log_file' options instead of 'log_config' and 'log_file'.
|
|
|
|
"""
|
|
|
|
log_config = (config.worker_log_config if use_worker_options
|
|
|
|
else config.log_config)
|
|
|
|
log_file = (config.worker_log_file if use_worker_options
|
|
|
|
else config.log_file)
|
2016-06-16 04:06:12 -06:00
|
|
|
|
|
|
|
log_format = (
|
|
|
|
"%(asctime)s - %(name)s - %(lineno)d - %(levelname)s - %(request)s"
|
|
|
|
" - %(message)s"
|
|
|
|
)
|
|
|
|
|
2017-10-26 09:45:20 -06:00
|
|
|
if log_config is None:
|
2018-01-05 05:30:28 -07:00
|
|
|
# We don't have a logfile, so fall back to the 'verbosity' param from
|
|
|
|
# the config or cmdline. (Note that we generate a log config for new
|
|
|
|
# installs, so this will be an unusual case)
|
2016-06-16 04:06:12 -06:00
|
|
|
level = logging.INFO
|
|
|
|
level_for_storage = logging.INFO
|
2017-03-10 08:16:50 -07:00
|
|
|
if config.verbosity:
|
2016-06-16 04:06:12 -06:00
|
|
|
level = logging.DEBUG
|
2017-03-10 08:16:50 -07:00
|
|
|
if config.verbosity > 1:
|
2016-06-16 04:06:12 -06:00
|
|
|
level_for_storage = logging.DEBUG
|
|
|
|
|
|
|
|
logger = logging.getLogger('')
|
|
|
|
logger.setLevel(level)
|
|
|
|
|
2018-01-05 05:30:28 -07:00
|
|
|
logging.getLogger('synapse.storage.SQL').setLevel(level_for_storage)
|
2016-06-16 04:06:12 -06:00
|
|
|
|
|
|
|
formatter = logging.Formatter(log_format)
|
|
|
|
if log_file:
|
|
|
|
# TODO: Customisable file size / backup count
|
|
|
|
handler = logging.handlers.RotatingFileHandler(
|
2018-08-20 07:54:49 -06:00
|
|
|
log_file, maxBytes=(1000 * 1000 * 100), backupCount=3,
|
|
|
|
encoding='utf8'
|
2016-06-16 04:06:12 -06:00
|
|
|
)
|
|
|
|
|
|
|
|
def sighup(signum, stack):
|
|
|
|
logger.info("Closing log file due to SIGHUP")
|
|
|
|
handler.doRollover()
|
|
|
|
logger.info("Opened new log file due to SIGHUP")
|
2014-08-31 09:06:39 -06:00
|
|
|
else:
|
2016-06-16 04:06:12 -06:00
|
|
|
handler = logging.StreamHandler()
|
2017-10-26 09:45:20 -06:00
|
|
|
|
|
|
|
def sighup(signum, stack):
|
|
|
|
pass
|
|
|
|
|
2016-06-16 04:06:12 -06:00
|
|
|
handler.setFormatter(formatter)
|
|
|
|
|
|
|
|
handler.addFilter(LoggingContextFilter(request=""))
|
|
|
|
|
|
|
|
logger.addHandler(handler)
|
|
|
|
else:
|
2017-03-09 15:54:52 -07:00
|
|
|
def load_log_config():
|
|
|
|
with open(log_config, 'r') as f:
|
|
|
|
logging.config.dictConfig(yaml.load(f))
|
|
|
|
|
|
|
|
def sighup(signum, stack):
|
|
|
|
# it might be better to use a file watcher or something for this.
|
|
|
|
load_log_config()
|
2018-08-10 07:56:48 -06:00
|
|
|
logging.info("Reloaded log config from %s due to SIGHUP", log_config)
|
2017-03-09 15:54:52 -07:00
|
|
|
|
|
|
|
load_log_config()
|
|
|
|
|
|
|
|
# TODO(paul): obviously this is a terrible mechanism for
|
|
|
|
# stealing SIGHUP, because it means no other part of synapse
|
|
|
|
# can use it instead. If we want to catch SIGHUP anywhere
|
|
|
|
# else as well, I'd suggest we find a nicer way to broadcast
|
|
|
|
# it around.
|
|
|
|
if getattr(signal, "SIGHUP"):
|
|
|
|
signal.signal(signal.SIGHUP, sighup)
|
2014-08-31 09:06:39 -06:00
|
|
|
|
2018-06-20 08:33:14 -06:00
|
|
|
# make sure that the first thing we log is a thing we can grep backwards
|
|
|
|
# for
|
|
|
|
logging.warn("***** STARTING SERVER *****")
|
|
|
|
logging.warn(
|
|
|
|
"Server %s version %s",
|
|
|
|
sys.argv[0], get_version_string(synapse),
|
|
|
|
)
|
|
|
|
logging.info("Server hostname: %s", config.server_name)
|
|
|
|
|
2016-12-30 04:05:37 -07:00
|
|
|
# It's critical to point twisted's internal logging somewhere, otherwise it
|
|
|
|
# stacks up and leaks kup to 64K object;
|
|
|
|
# see: https://twistedmatrix.com/trac/ticket/8164
|
|
|
|
#
|
|
|
|
# Routing to the python logging framework could be a performance problem if
|
|
|
|
# the handlers blocked for a long time as python.logging is a blocking API
|
|
|
|
# see https://twistedmatrix.com/documents/current/core/howto/logger.html
|
|
|
|
# filed as https://github.com/matrix-org/synapse/issues/1727
|
|
|
|
#
|
|
|
|
# However this may not be too much of a problem if we are just writing to a file.
|
|
|
|
observer = STDLibLogObserver()
|
2018-09-13 03:59:32 -06:00
|
|
|
|
|
|
|
def _log(event):
|
|
|
|
|
|
|
|
if "log_text" in event:
|
|
|
|
if event["log_text"].startswith("DNSDatagramProtocol starting on "):
|
|
|
|
return
|
|
|
|
|
|
|
|
if event["log_text"].startswith("(UDP Port "):
|
|
|
|
return
|
|
|
|
|
|
|
|
if event["log_text"].startswith("Timing out client"):
|
|
|
|
return
|
|
|
|
|
|
|
|
return observer(event)
|
|
|
|
|
2017-03-10 08:38:29 -07:00
|
|
|
globalLogBeginner.beginLoggingTo(
|
2018-09-13 03:59:32 -06:00
|
|
|
[_log],
|
2017-03-10 08:38:29 -07:00
|
|
|
redirectStandardIO=not config.no_redirect_stdio,
|
|
|
|
)
|