2017-08-15 08:57:46 -06:00
|
|
|
# -*- coding: utf-8 -*-
|
|
|
|
# Copyright 2017 New Vector Ltd
|
|
|
|
#
|
|
|
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
|
|
# you may not use this file except in compliance with the License.
|
|
|
|
# You may obtain a copy of the License at
|
|
|
|
#
|
|
|
|
# http://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
#
|
|
|
|
# Unless required by applicable law or agreed to in writing, software
|
|
|
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
|
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
|
|
# See the License for the specific language governing permissions and
|
|
|
|
# limitations under the License.
|
2017-10-02 11:03:59 -06:00
|
|
|
|
2017-08-15 08:57:46 -06:00
|
|
|
import gc
|
|
|
|
import logging
|
2019-02-08 10:25:57 -07:00
|
|
|
import signal
|
2017-10-02 11:03:59 -06:00
|
|
|
import sys
|
2019-02-08 10:25:57 -07:00
|
|
|
import traceback
|
2017-10-02 11:03:59 -06:00
|
|
|
|
2018-10-12 07:14:08 -06:00
|
|
|
import psutil
|
2018-07-09 00:09:20 -06:00
|
|
|
from daemonize import Daemonize
|
|
|
|
|
|
|
|
from twisted.internet import error, reactor
|
2019-02-11 03:36:26 -07:00
|
|
|
from twisted.protocols.tls import TLSMemoryBIOFactory
|
2018-07-09 00:09:20 -06:00
|
|
|
|
2019-02-12 06:55:58 -07:00
|
|
|
import synapse
|
2019-01-30 07:17:55 -07:00
|
|
|
from synapse.app import check_bind_error
|
2019-02-08 10:25:57 -07:00
|
|
|
from synapse.crypto import context_factory
|
2018-07-09 00:09:20 -06:00
|
|
|
from synapse.util import PreserveLoggingContext
|
|
|
|
from synapse.util.rlimit import change_resource_limit
|
2019-02-12 06:55:58 -07:00
|
|
|
from synapse.util.versionstring import get_version_string
|
2018-07-09 00:09:20 -06:00
|
|
|
|
2017-12-17 05:04:05 -07:00
|
|
|
logger = logging.getLogger(__name__)
|
|
|
|
|
2019-02-08 10:25:57 -07:00
|
|
|
_sighup_callbacks = []
|
|
|
|
|
|
|
|
|
|
|
|
def register_sighup(func):
|
|
|
|
"""
|
|
|
|
Register a function to be called when a SIGHUP occurs.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
func (function): Function to be called when sent a SIGHUP signal.
|
|
|
|
Will be called with a single argument, the homeserver.
|
|
|
|
"""
|
|
|
|
_sighup_callbacks.append(func)
|
|
|
|
|
2017-08-15 08:57:46 -06:00
|
|
|
|
|
|
|
def start_worker_reactor(appname, config):
|
|
|
|
""" Run the reactor in the main process
|
|
|
|
|
|
|
|
Daemonizes if necessary, and then configures some resources, before starting
|
|
|
|
the reactor. Pulls configuration from the 'worker' settings in 'config'.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
appname (str): application name which will be sent to syslog
|
|
|
|
config (synapse.config.Config): config object
|
|
|
|
"""
|
|
|
|
|
|
|
|
logger = logging.getLogger(config.worker_app)
|
|
|
|
|
|
|
|
start_reactor(
|
|
|
|
appname,
|
|
|
|
config.soft_file_limit,
|
|
|
|
config.gc_thresholds,
|
|
|
|
config.worker_pid_file,
|
|
|
|
config.worker_daemonize,
|
2017-08-15 10:08:28 -06:00
|
|
|
config.worker_cpu_affinity,
|
|
|
|
logger,
|
2017-08-15 08:57:46 -06:00
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
def start_reactor(
|
|
|
|
appname,
|
|
|
|
soft_file_limit,
|
|
|
|
gc_thresholds,
|
|
|
|
pid_file,
|
|
|
|
daemonize,
|
2017-08-15 10:08:28 -06:00
|
|
|
cpu_affinity,
|
2017-08-15 08:57:46 -06:00
|
|
|
logger,
|
|
|
|
):
|
|
|
|
""" Run the reactor in the main process
|
|
|
|
|
|
|
|
Daemonizes if necessary, and then configures some resources, before starting
|
|
|
|
the reactor
|
|
|
|
|
|
|
|
Args:
|
|
|
|
appname (str): application name which will be sent to syslog
|
|
|
|
soft_file_limit (int):
|
|
|
|
gc_thresholds:
|
|
|
|
pid_file (str): name of pid file to write to if daemonize is True
|
|
|
|
daemonize (bool): true to run the reactor in a background process
|
2017-08-15 10:08:28 -06:00
|
|
|
cpu_affinity (int|None): cpu affinity mask
|
2017-08-15 08:57:46 -06:00
|
|
|
logger (logging.Logger): logger instance to pass to Daemonize
|
|
|
|
"""
|
|
|
|
|
|
|
|
def run():
|
|
|
|
# make sure that we run the reactor with the sentinel log context,
|
|
|
|
# otherwise other PreserveLoggingContext instances will get confused
|
|
|
|
# and complain when they see the logcontext arbitrarily swapping
|
|
|
|
# between the sentinel and `run` logcontexts.
|
|
|
|
with PreserveLoggingContext():
|
|
|
|
logger.info("Running")
|
2017-08-15 10:08:28 -06:00
|
|
|
if cpu_affinity is not None:
|
2018-10-12 07:14:08 -06:00
|
|
|
# Turn the bitmask into bits, reverse it so we go from 0 up
|
|
|
|
mask_to_bits = bin(cpu_affinity)[2:][::-1]
|
|
|
|
|
|
|
|
cpus = []
|
|
|
|
cpu_num = 0
|
|
|
|
|
|
|
|
for i in mask_to_bits:
|
|
|
|
if i == "1":
|
|
|
|
cpus.append(cpu_num)
|
|
|
|
cpu_num += 1
|
|
|
|
|
|
|
|
p = psutil.Process()
|
|
|
|
p.cpu_affinity(cpus)
|
|
|
|
|
2017-08-15 08:57:46 -06:00
|
|
|
change_resource_limit(soft_file_limit)
|
|
|
|
if gc_thresholds:
|
|
|
|
gc.set_threshold(*gc_thresholds)
|
|
|
|
reactor.run()
|
|
|
|
|
|
|
|
if daemonize:
|
|
|
|
daemon = Daemonize(
|
|
|
|
app=appname,
|
|
|
|
pid=pid_file,
|
|
|
|
action=run,
|
|
|
|
auto_close_fds=False,
|
|
|
|
verbose=True,
|
|
|
|
logger=logger,
|
|
|
|
)
|
|
|
|
daemon.start()
|
|
|
|
else:
|
|
|
|
run()
|
2017-10-02 10:59:34 -06:00
|
|
|
|
|
|
|
|
|
|
|
def quit_with_error(error_string):
|
|
|
|
message_lines = error_string.split("\n")
|
|
|
|
line_length = max([len(l) for l in message_lines if len(l) < 80]) + 2
|
|
|
|
sys.stderr.write("*" * line_length + '\n')
|
|
|
|
for line in message_lines:
|
|
|
|
sys.stderr.write(" %s\n" % (line.rstrip(),))
|
|
|
|
sys.stderr.write("*" * line_length + '\n')
|
|
|
|
sys.exit(1)
|
2017-09-06 09:48:49 -06:00
|
|
|
|
|
|
|
|
2018-05-31 03:04:50 -06:00
|
|
|
def listen_metrics(bind_addresses, port):
|
|
|
|
"""
|
|
|
|
Start Prometheus metrics server.
|
|
|
|
"""
|
|
|
|
from synapse.metrics import RegistryProxy
|
|
|
|
from prometheus_client import start_http_server
|
|
|
|
|
|
|
|
for host in bind_addresses:
|
2019-02-13 04:48:56 -07:00
|
|
|
logger.info("Starting metrics listener on %s:%d", host, port)
|
|
|
|
start_http_server(port, addr=host, registry=RegistryProxy)
|
2018-05-31 03:04:50 -06:00
|
|
|
|
|
|
|
|
2018-08-17 09:08:45 -06:00
|
|
|
def listen_tcp(bind_addresses, port, factory, reactor=reactor, backlog=50):
|
2017-09-06 09:48:49 -06:00
|
|
|
"""
|
|
|
|
Create a TCP socket for a port and several addresses
|
2019-01-30 04:00:02 -07:00
|
|
|
|
|
|
|
Returns:
|
2019-02-13 06:24:27 -07:00
|
|
|
list[twisted.internet.tcp.Port]: listening for TCP connections
|
2017-09-06 09:48:49 -06:00
|
|
|
"""
|
2019-02-13 04:53:43 -07:00
|
|
|
r = []
|
2017-09-06 09:48:49 -06:00
|
|
|
for address in bind_addresses:
|
|
|
|
try:
|
2019-02-13 04:53:43 -07:00
|
|
|
r.append(
|
|
|
|
reactor.listenTCP(
|
|
|
|
port,
|
|
|
|
factory,
|
|
|
|
backlog,
|
|
|
|
address
|
|
|
|
)
|
2017-09-06 09:48:49 -06:00
|
|
|
)
|
|
|
|
except error.CannotListenError as e:
|
2017-12-17 05:04:05 -07:00
|
|
|
check_bind_error(e, address, bind_addresses)
|
2017-09-06 09:48:49 -06:00
|
|
|
|
2019-02-13 04:53:43 -07:00
|
|
|
return r
|
2019-01-30 04:00:02 -07:00
|
|
|
|
2017-09-06 09:48:49 -06:00
|
|
|
|
2018-08-17 09:08:45 -06:00
|
|
|
def listen_ssl(
|
|
|
|
bind_addresses, port, factory, context_factory, reactor=reactor, backlog=50
|
|
|
|
):
|
2017-09-06 09:48:49 -06:00
|
|
|
"""
|
2019-01-30 04:00:02 -07:00
|
|
|
Create an TLS-over-TCP socket for a port and several addresses
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
list of twisted.internet.tcp.Port listening for TLS connections
|
2017-09-06 09:48:49 -06:00
|
|
|
"""
|
2019-01-30 04:00:02 -07:00
|
|
|
r = []
|
2017-09-06 09:48:49 -06:00
|
|
|
for address in bind_addresses:
|
|
|
|
try:
|
2019-01-30 04:00:02 -07:00
|
|
|
r.append(
|
|
|
|
reactor.listenSSL(
|
|
|
|
port,
|
|
|
|
factory,
|
|
|
|
context_factory,
|
|
|
|
backlog,
|
|
|
|
address
|
|
|
|
)
|
2017-09-06 09:48:49 -06:00
|
|
|
)
|
|
|
|
except error.CannotListenError as e:
|
2017-12-17 05:04:05 -07:00
|
|
|
check_bind_error(e, address, bind_addresses)
|
2017-09-06 09:48:49 -06:00
|
|
|
|
2019-01-30 04:00:02 -07:00
|
|
|
return r
|
2019-02-08 10:25:57 -07:00
|
|
|
|
|
|
|
|
|
|
|
def refresh_certificate(hs):
|
|
|
|
"""
|
|
|
|
Refresh the TLS certificates that Synapse is using by re-reading them from
|
|
|
|
disk and updating the TLS context factories to use them.
|
|
|
|
"""
|
2019-02-11 14:30:59 -07:00
|
|
|
|
2019-02-11 10:57:58 -07:00
|
|
|
if not hs.config.has_tls_listener():
|
2019-02-12 03:51:31 -07:00
|
|
|
# attempt to reload the certs for the good of the tls_fingerprints
|
|
|
|
hs.config.read_certificate_from_disk(require_cert_and_key=False)
|
2019-02-11 14:30:59 -07:00
|
|
|
return
|
|
|
|
|
2019-02-12 03:51:31 -07:00
|
|
|
hs.config.read_certificate_from_disk(require_cert_and_key=True)
|
2019-02-08 10:25:57 -07:00
|
|
|
hs.tls_server_context_factory = context_factory.ServerContextFactory(hs.config)
|
|
|
|
|
2019-02-11 03:36:26 -07:00
|
|
|
if hs._listening_services:
|
2019-02-11 14:00:41 -07:00
|
|
|
logger.info("Updating context factories...")
|
2019-02-11 03:36:26 -07:00
|
|
|
for i in hs._listening_services:
|
|
|
|
# When you listenSSL, it doesn't make an SSL port but a TCP one with
|
|
|
|
# a TLS wrapping factory around the factory you actually want to get
|
|
|
|
# requests. This factory attribute is public but missing from
|
|
|
|
# Twisted's documentation.
|
|
|
|
if isinstance(i.factory, TLSMemoryBIOFactory):
|
2019-02-13 04:53:43 -07:00
|
|
|
addr = i.getHost()
|
|
|
|
logger.info(
|
|
|
|
"Replacing TLS context factory on [%s]:%i", addr.host, addr.port,
|
|
|
|
)
|
2019-02-11 03:36:26 -07:00
|
|
|
# We want to replace TLS factories with a new one, with the new
|
|
|
|
# TLS configuration. We do this by reaching in and pulling out
|
|
|
|
# the wrappedFactory, and then re-wrapping it.
|
|
|
|
i.factory = TLSMemoryBIOFactory(
|
|
|
|
hs.tls_server_context_factory,
|
|
|
|
False,
|
|
|
|
i.factory.wrappedFactory
|
|
|
|
)
|
2019-02-11 14:00:41 -07:00
|
|
|
logger.info("Context factories updated.")
|
2019-02-11 03:36:26 -07:00
|
|
|
|
2019-02-08 10:25:57 -07:00
|
|
|
|
|
|
|
def start(hs, listeners=None):
|
|
|
|
"""
|
|
|
|
Start a Synapse server or worker.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
hs (synapse.server.HomeServer)
|
|
|
|
listeners (list[dict]): Listener configuration ('listeners' in homeserver.yaml)
|
|
|
|
"""
|
|
|
|
try:
|
|
|
|
# Set up the SIGHUP machinery.
|
|
|
|
if hasattr(signal, "SIGHUP"):
|
|
|
|
def handle_sighup(*args, **kwargs):
|
|
|
|
for i in _sighup_callbacks:
|
|
|
|
i(hs)
|
|
|
|
|
|
|
|
signal.signal(signal.SIGHUP, handle_sighup)
|
|
|
|
|
|
|
|
register_sighup(refresh_certificate)
|
|
|
|
|
|
|
|
# Load the certificate from disk.
|
|
|
|
refresh_certificate(hs)
|
|
|
|
|
|
|
|
# It is now safe to start your Synapse.
|
|
|
|
hs.start_listening(listeners)
|
|
|
|
hs.get_datastore().start_profiling()
|
2019-02-12 06:55:58 -07:00
|
|
|
|
2019-02-13 09:14:37 -07:00
|
|
|
setup_sentry(hs)
|
2019-02-08 10:25:57 -07:00
|
|
|
except Exception:
|
|
|
|
traceback.print_exc(file=sys.stderr)
|
|
|
|
reactor = hs.get_reactor()
|
|
|
|
if reactor.running:
|
|
|
|
reactor.stop()
|
|
|
|
sys.exit(1)
|
2019-02-12 06:55:58 -07:00
|
|
|
|
|
|
|
|
2019-02-13 09:14:37 -07:00
|
|
|
def setup_sentry(hs):
|
|
|
|
"""Enable sentry integration, if enabled in configuration
|
2019-02-12 09:03:40 -07:00
|
|
|
|
|
|
|
Args:
|
|
|
|
hs (synapse.server.HomeServer)
|
|
|
|
"""
|
|
|
|
|
2019-02-12 06:55:58 -07:00
|
|
|
if not hs.config.sentry_enabled:
|
|
|
|
return
|
|
|
|
|
|
|
|
import sentry_sdk
|
|
|
|
sentry_sdk.init(
|
|
|
|
dsn=hs.config.sentry_dsn,
|
|
|
|
release=get_version_string(synapse),
|
|
|
|
)
|
2019-02-12 09:03:40 -07:00
|
|
|
|
|
|
|
# We set some default tags that give some context to this instance
|
2019-02-12 06:55:58 -07:00
|
|
|
with sentry_sdk.configure_scope() as scope:
|
|
|
|
scope.set_tag("matrix_server_name", hs.config.server_name)
|
|
|
|
|
|
|
|
app = hs.config.worker_app if hs.config.worker_app else "synapse.app.homeserver"
|
|
|
|
name = hs.config.worker_name if hs.config.worker_name else "master"
|
|
|
|
scope.set_tag("worker_app", app)
|
|
|
|
scope.set_tag("worker_name", name)
|