2014-08-12 20:14:34 -06:00
|
|
|
#!/usr/bin/env python
|
2014-08-12 08:10:52 -06:00
|
|
|
# -*- coding: utf-8 -*-
|
2016-01-06 21:26:29 -07:00
|
|
|
# Copyright 2014-2016 OpenMarket Ltd
|
2014-08-12 08:10:52 -06:00
|
|
|
#
|
|
|
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
|
|
# you may not use this file except in compliance with the License.
|
|
|
|
# You may obtain a copy of the License at
|
|
|
|
#
|
|
|
|
# http://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
#
|
|
|
|
# Unless required by applicable law or agreed to in writing, software
|
|
|
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
|
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
|
|
# See the License for the specific language governing permissions and
|
|
|
|
# limitations under the License.
|
|
|
|
|
2016-02-02 10:18:50 -07:00
|
|
|
import synapse
|
|
|
|
|
|
|
|
import contextlib
|
|
|
|
import logging
|
|
|
|
import os
|
|
|
|
import re
|
|
|
|
import resource
|
|
|
|
import subprocess
|
2015-02-17 03:54:06 -07:00
|
|
|
import sys
|
2016-02-02 10:18:50 -07:00
|
|
|
import time
|
Error if macaroon key is missing from config
Currently we store all access tokens in the DB, and fall back to that
check if we can't validate the macaroon, so our fallback works here, but
for guests, their macaroons don't get persisted, so we don't get to
find them in the database. Each restart, we generate a new ephemeral
key, so guests lose access after each server restart.
I tried to fix up the config stuff to be less insane, but gave up, so
instead I bolt on yet another piece of custom one-off insanity.
Also, add some basic tests for config generation and loading.
2016-02-04 18:58:23 -07:00
|
|
|
from synapse.config._base import ConfigError
|
2015-12-08 08:26:52 -07:00
|
|
|
|
2015-09-01 09:47:26 -06:00
|
|
|
from synapse.python_dependencies import (
|
2016-02-02 10:18:50 -07:00
|
|
|
check_requirements, DEPENDENCY_LINKS
|
2015-09-01 09:47:26 -06:00
|
|
|
)
|
2015-04-29 07:52:42 -06:00
|
|
|
|
2016-02-02 10:18:50 -07:00
|
|
|
from synapse.rest import ClientRestResource
|
2015-04-29 04:42:28 -06:00
|
|
|
from synapse.storage.engines import create_engine, IncorrectDatabaseSetup
|
2015-10-13 04:41:04 -06:00
|
|
|
from synapse.storage import are_all_users_on_domain
|
2015-10-13 06:56:22 -06:00
|
|
|
from synapse.storage.prepare_database import UpgradeDatabaseException
|
2014-08-12 08:10:52 -06:00
|
|
|
|
|
|
|
from synapse.server import HomeServer
|
|
|
|
|
2015-01-08 10:07:28 -07:00
|
|
|
|
2016-01-07 06:57:04 -07:00
|
|
|
from twisted.conch.manhole import ColoredManhole
|
|
|
|
from twisted.conch.insults import insults
|
|
|
|
from twisted.conch import manhole_ssh
|
|
|
|
from twisted.cred import checkers, portal
|
|
|
|
|
|
|
|
|
2015-09-22 05:57:40 -06:00
|
|
|
from twisted.internet import reactor, task, defer
|
2015-01-07 06:46:37 -07:00
|
|
|
from twisted.application import service
|
2015-05-14 09:39:19 -06:00
|
|
|
from twisted.web.resource import Resource, EncodingResourceWrapper
|
2014-08-14 02:52:20 -06:00
|
|
|
from twisted.web.static import File
|
2015-06-12 10:13:23 -06:00
|
|
|
from twisted.web.server import Site, GzipEncoderFactory, Request
|
2016-01-26 06:52:29 -07:00
|
|
|
from synapse.http.server import RootRedirect
|
2015-01-22 09:10:07 -07:00
|
|
|
from synapse.rest.media.v0.content_repository import ContentRepoResource
|
|
|
|
from synapse.rest.media.v1.media_repository import MediaRepositoryResource
|
2015-04-14 06:28:11 -06:00
|
|
|
from synapse.rest.key.v1.server_key_resource import LocalKey
|
2015-04-14 09:04:52 -06:00
|
|
|
from synapse.rest.key.v2 import KeyApiV2Resource
|
2014-08-18 08:01:08 -06:00
|
|
|
from synapse.api.urls import (
|
2015-12-08 08:26:52 -07:00
|
|
|
FEDERATION_PREFIX, WEB_CLIENT_PREFIX, CONTENT_REPO_PREFIX,
|
2016-02-05 03:47:46 -07:00
|
|
|
SERVER_KEY_PREFIX, LEGACY_MEDIA_PREFIX, MEDIA_PREFIX, STATIC_PREFIX,
|
2015-04-14 09:04:52 -06:00
|
|
|
SERVER_KEY_V2_PREFIX,
|
2014-08-18 08:01:08 -06:00
|
|
|
)
|
2014-08-31 09:06:39 -06:00
|
|
|
from synapse.config.homeserver import HomeServerConfig
|
2014-09-01 09:30:43 -06:00
|
|
|
from synapse.crypto import context_factory
|
2014-10-29 19:21:33 -06:00
|
|
|
from synapse.util.logcontext import LoggingContext
|
2015-03-12 09:33:53 -06:00
|
|
|
from synapse.metrics.resource import MetricsResource, METRICS_PREFIX
|
2016-03-01 07:49:41 -07:00
|
|
|
from synapse.replication.resource import ReplicationResource, REPLICATION_PREFIX
|
2016-01-26 06:52:29 -07:00
|
|
|
from synapse.federation.transport.server import TransportLayerServer
|
2014-08-12 08:10:52 -06:00
|
|
|
|
2015-05-29 05:17:33 -06:00
|
|
|
from synapse import events
|
|
|
|
|
2014-08-12 08:10:52 -06:00
|
|
|
from daemonize import Daemonize
|
|
|
|
|
2015-04-07 05:04:02 -06:00
|
|
|
logger = logging.getLogger("synapse.app.homeserver")
|
2014-08-12 08:10:52 -06:00
|
|
|
|
|
|
|
|
2016-01-13 04:47:32 -07:00
|
|
|
ACCESS_TOKEN_RE = re.compile(r'(\?.*access(_|%5[Ff])token=)[^&]*(.*)$')
|
|
|
|
|
|
|
|
|
2015-05-14 09:39:19 -06:00
|
|
|
def gz_wrap(r):
|
|
|
|
return EncodingResourceWrapper(r, [GzipEncoderFactory()])
|
|
|
|
|
|
|
|
|
2016-01-26 06:52:29 -07:00
|
|
|
def build_resource_for_web_client(hs):
|
|
|
|
webclient_path = hs.get_config().web_client_location
|
|
|
|
if not webclient_path:
|
|
|
|
try:
|
|
|
|
import syweb
|
|
|
|
except ImportError:
|
|
|
|
quit_with_error(
|
|
|
|
"Could not find a webclient.\n\n"
|
|
|
|
"Please either install the matrix-angular-sdk or configure\n"
|
|
|
|
"the location of the source to serve via the configuration\n"
|
|
|
|
"option `web_client_location`\n\n"
|
|
|
|
"To install the `matrix-angular-sdk` via pip, run:\n\n"
|
|
|
|
" pip install '%(dep)s'\n"
|
|
|
|
"\n"
|
|
|
|
"You can also disable hosting of the webclient via the\n"
|
|
|
|
"configuration option `web_client`\n"
|
|
|
|
% {"dep": DEPENDENCY_LINKS["matrix-angular-sdk"]}
|
|
|
|
)
|
|
|
|
syweb_path = os.path.dirname(syweb.__file__)
|
|
|
|
webclient_path = os.path.join(syweb_path, "webclient")
|
|
|
|
# GZip is disabled here due to
|
|
|
|
# https://twistedmatrix.com/trac/ticket/7678
|
|
|
|
# (It can stay enabled for the API resources: they call
|
|
|
|
# write() with the whole body and then finish() straight
|
|
|
|
# after and so do not trigger the bug.
|
|
|
|
# GzipFile was removed in commit 184ba09
|
|
|
|
# return GzipFile(webclient_path) # TODO configurable?
|
|
|
|
return File(webclient_path) # TODO configurable?
|
2015-03-20 04:55:55 -06:00
|
|
|
|
2014-08-12 08:10:52 -06:00
|
|
|
|
2016-01-26 06:52:29 -07:00
|
|
|
class SynapseHomeServer(HomeServer):
|
2015-06-12 08:33:07 -06:00
|
|
|
def _listener_http(self, config, listener_config):
|
|
|
|
port = listener_config["port"]
|
|
|
|
bind_address = listener_config.get("bind_address", "")
|
|
|
|
tls = listener_config.get("tls", False)
|
2015-06-15 10:11:44 -06:00
|
|
|
site_tag = listener_config.get("tag", port)
|
2015-06-12 08:33:07 -06:00
|
|
|
|
|
|
|
if tls and config.no_tls:
|
|
|
|
return
|
2015-03-12 09:51:33 -06:00
|
|
|
|
2015-06-12 08:33:07 -06:00
|
|
|
resources = {}
|
|
|
|
for res in listener_config["resources"]:
|
|
|
|
for name in res["names"]:
|
|
|
|
if name == "client":
|
2016-01-26 06:52:29 -07:00
|
|
|
client_resource = ClientRestResource(self)
|
2015-06-12 08:33:07 -06:00
|
|
|
if res["compress"]:
|
2015-12-08 08:26:52 -07:00
|
|
|
client_resource = gz_wrap(client_resource)
|
2015-06-12 08:33:07 -06:00
|
|
|
|
|
|
|
resources.update({
|
2015-12-08 08:26:52 -07:00
|
|
|
"/_matrix/client/api/v1": client_resource,
|
|
|
|
"/_matrix/client/r0": client_resource,
|
|
|
|
"/_matrix/client/unstable": client_resource,
|
|
|
|
"/_matrix/client/v2_alpha": client_resource,
|
2016-01-06 11:08:52 -07:00
|
|
|
"/_matrix/client/versions": client_resource,
|
2015-06-12 08:33:07 -06:00
|
|
|
})
|
|
|
|
|
|
|
|
if name == "federation":
|
|
|
|
resources.update({
|
2016-01-26 06:52:29 -07:00
|
|
|
FEDERATION_PREFIX: TransportLayerServer(self),
|
2015-06-12 08:33:07 -06:00
|
|
|
})
|
|
|
|
|
|
|
|
if name in ["static", "client"]:
|
|
|
|
resources.update({
|
2016-01-26 06:52:29 -07:00
|
|
|
STATIC_PREFIX: File(
|
|
|
|
os.path.join(os.path.dirname(synapse.__file__), "static")
|
|
|
|
),
|
2015-06-12 08:33:07 -06:00
|
|
|
})
|
|
|
|
|
|
|
|
if name in ["media", "federation", "client"]:
|
2016-02-05 03:47:46 -07:00
|
|
|
media_repo = MediaRepositoryResource(self)
|
2015-06-12 08:33:07 -06:00
|
|
|
resources.update({
|
2016-02-05 03:47:46 -07:00
|
|
|
MEDIA_PREFIX: media_repo,
|
|
|
|
LEGACY_MEDIA_PREFIX: media_repo,
|
2016-01-26 06:52:29 -07:00
|
|
|
CONTENT_REPO_PREFIX: ContentRepoResource(
|
|
|
|
self, self.config.uploads_path, self.auth, self.content_addr
|
|
|
|
),
|
2015-06-12 08:33:07 -06:00
|
|
|
})
|
|
|
|
|
|
|
|
if name in ["keys", "federation"]:
|
|
|
|
resources.update({
|
2016-01-26 06:52:29 -07:00
|
|
|
SERVER_KEY_PREFIX: LocalKey(self),
|
|
|
|
SERVER_KEY_V2_PREFIX: KeyApiV2Resource(self),
|
2015-06-12 08:33:07 -06:00
|
|
|
})
|
|
|
|
|
|
|
|
if name == "webclient":
|
2016-01-26 06:52:29 -07:00
|
|
|
resources[WEB_CLIENT_PREFIX] = build_resource_for_web_client(self)
|
2015-06-12 08:33:07 -06:00
|
|
|
|
2016-01-26 06:52:29 -07:00
|
|
|
if name == "metrics" and self.get_config().enable_metrics:
|
|
|
|
resources[METRICS_PREFIX] = MetricsResource(self)
|
2015-06-12 08:33:07 -06:00
|
|
|
|
2016-03-01 07:49:41 -07:00
|
|
|
if name == "replication":
|
|
|
|
resources[REPLICATION_PREFIX] = ReplicationResource(self)
|
|
|
|
|
2015-06-12 08:33:07 -06:00
|
|
|
root_resource = create_resource_tree(resources)
|
|
|
|
if tls:
|
2014-09-01 15:38:52 -06:00
|
|
|
reactor.listenSSL(
|
2015-06-12 08:33:07 -06:00
|
|
|
port,
|
2015-04-30 09:17:27 -06:00
|
|
|
SynapseSite(
|
2015-06-19 03:16:48 -06:00
|
|
|
"synapse.access.https.%s" % (site_tag,),
|
2015-06-15 10:11:44 -06:00
|
|
|
site_tag,
|
2015-06-12 10:13:23 -06:00
|
|
|
listener_config,
|
2015-06-12 08:33:07 -06:00
|
|
|
root_resource,
|
2015-04-30 06:58:13 -06:00
|
|
|
),
|
2015-09-09 05:02:07 -06:00
|
|
|
self.tls_server_context_factory,
|
2015-06-12 08:33:07 -06:00
|
|
|
interface=bind_address
|
2014-09-01 15:38:52 -06:00
|
|
|
)
|
2015-06-12 08:33:07 -06:00
|
|
|
else:
|
2014-09-01 15:38:52 -06:00
|
|
|
reactor.listenTCP(
|
2015-06-12 08:33:07 -06:00
|
|
|
port,
|
2015-04-30 09:17:27 -06:00
|
|
|
SynapseSite(
|
2015-06-19 03:16:48 -06:00
|
|
|
"synapse.access.http.%s" % (site_tag,),
|
2015-06-15 10:11:44 -06:00
|
|
|
site_tag,
|
2015-06-12 10:13:23 -06:00
|
|
|
listener_config,
|
2015-06-12 08:33:07 -06:00
|
|
|
root_resource,
|
2015-04-30 06:58:13 -06:00
|
|
|
),
|
2015-06-12 08:33:07 -06:00
|
|
|
interface=bind_address
|
2014-09-01 15:38:52 -06:00
|
|
|
)
|
2015-06-12 08:33:07 -06:00
|
|
|
logger.info("Synapse now listening on port %d", port)
|
2014-08-14 02:52:20 -06:00
|
|
|
|
2015-06-12 08:33:07 -06:00
|
|
|
def start_listening(self):
|
|
|
|
config = self.get_config()
|
|
|
|
|
|
|
|
for listener in config.listeners:
|
|
|
|
if listener["type"] == "http":
|
|
|
|
self._listener_http(config, listener)
|
|
|
|
elif listener["type"] == "manhole":
|
2016-01-07 06:57:04 -07:00
|
|
|
checker = checkers.InMemoryUsernamePasswordDatabaseDontUse(
|
|
|
|
matrix="rabbithole"
|
|
|
|
)
|
|
|
|
|
|
|
|
rlm = manhole_ssh.TerminalRealm()
|
|
|
|
rlm.chainedProtocolFactory = lambda: insults.ServerProtocol(
|
|
|
|
ColoredManhole,
|
|
|
|
{
|
|
|
|
"__name__": "__console__",
|
|
|
|
"hs": self,
|
|
|
|
}
|
|
|
|
)
|
|
|
|
|
|
|
|
f = manhole_ssh.ConchFactory(portal.Portal(rlm, [checker]))
|
|
|
|
|
2015-06-12 08:33:07 -06:00
|
|
|
reactor.listenTCP(
|
|
|
|
listener["port"],
|
|
|
|
f,
|
|
|
|
interface=listener.get("bind_address", '127.0.0.1')
|
|
|
|
)
|
|
|
|
else:
|
|
|
|
logger.warn("Unrecognized listener type: %s", listener["type"])
|
2015-03-12 10:05:46 -06:00
|
|
|
|
2015-04-28 06:39:42 -06:00
|
|
|
def run_startup_checks(self, db_conn, database_engine):
|
2015-04-27 04:46:00 -06:00
|
|
|
all_users_native = are_all_users_on_domain(
|
2015-04-28 06:39:42 -06:00
|
|
|
db_conn.cursor(), database_engine, self.hostname
|
2015-04-24 11:11:21 -06:00
|
|
|
)
|
|
|
|
if not all_users_native:
|
2015-04-29 05:12:18 -06:00
|
|
|
quit_with_error(
|
2015-04-24 11:11:21 -06:00
|
|
|
"Found users in database not native to %s!\n"
|
2015-04-29 05:12:18 -06:00
|
|
|
"You cannot changed a synapse server_name after it's been configured"
|
|
|
|
% (self.hostname,)
|
2015-04-24 11:11:21 -06:00
|
|
|
)
|
|
|
|
|
2015-04-29 04:42:28 -06:00
|
|
|
try:
|
|
|
|
database_engine.check_database(db_conn.cursor())
|
|
|
|
except IncorrectDatabaseSetup as e:
|
2015-04-29 05:12:18 -06:00
|
|
|
quit_with_error(e.message)
|
|
|
|
|
2016-01-26 08:51:06 -07:00
|
|
|
def get_db_conn(self):
|
2016-01-28 07:32:05 -07:00
|
|
|
# Any param beginning with cp_ is a parameter for adbapi, and should
|
|
|
|
# not be passed to the database engine.
|
|
|
|
db_params = {
|
|
|
|
k: v for k, v in self.db_config.get("args", {}).items()
|
|
|
|
if not k.startswith("cp_")
|
|
|
|
}
|
|
|
|
db_conn = self.database_engine.module.connect(**db_params)
|
2016-01-26 08:51:06 -07:00
|
|
|
|
|
|
|
self.database_engine.on_new_connection(db_conn)
|
|
|
|
return db_conn
|
|
|
|
|
2015-04-29 05:12:18 -06:00
|
|
|
|
|
|
|
def quit_with_error(error_string):
|
|
|
|
message_lines = error_string.split("\n")
|
2015-08-25 08:33:23 -06:00
|
|
|
line_length = max([len(l) for l in message_lines if len(l) < 80]) + 2
|
2015-04-29 05:12:18 -06:00
|
|
|
sys.stderr.write("*" * line_length + '\n')
|
|
|
|
for line in message_lines:
|
2015-08-25 07:19:09 -06:00
|
|
|
sys.stderr.write(" %s\n" % (line.rstrip(),))
|
2015-04-29 05:12:18 -06:00
|
|
|
sys.stderr.write("*" * line_length + '\n')
|
|
|
|
sys.exit(1)
|
2015-04-29 04:42:28 -06:00
|
|
|
|
2014-08-12 08:10:52 -06:00
|
|
|
|
2015-02-18 09:21:35 -07:00
|
|
|
def get_version_string():
|
|
|
|
try:
|
2015-02-21 06:44:46 -07:00
|
|
|
null = open(os.devnull, 'w')
|
|
|
|
cwd = os.path.dirname(os.path.abspath(__file__))
|
|
|
|
try:
|
|
|
|
git_branch = subprocess.check_output(
|
|
|
|
['git', 'rev-parse', '--abbrev-ref', 'HEAD'],
|
|
|
|
stderr=null,
|
|
|
|
cwd=cwd,
|
|
|
|
).strip()
|
|
|
|
git_branch = "b=" + git_branch
|
|
|
|
except subprocess.CalledProcessError:
|
|
|
|
git_branch = ""
|
|
|
|
|
|
|
|
try:
|
|
|
|
git_tag = subprocess.check_output(
|
|
|
|
['git', 'describe', '--exact-match'],
|
|
|
|
stderr=null,
|
|
|
|
cwd=cwd,
|
|
|
|
).strip()
|
|
|
|
git_tag = "t=" + git_tag
|
|
|
|
except subprocess.CalledProcessError:
|
|
|
|
git_tag = ""
|
|
|
|
|
|
|
|
try:
|
|
|
|
git_commit = subprocess.check_output(
|
|
|
|
['git', 'rev-parse', '--short', 'HEAD'],
|
|
|
|
stderr=null,
|
|
|
|
cwd=cwd,
|
|
|
|
).strip()
|
|
|
|
except subprocess.CalledProcessError:
|
|
|
|
git_commit = ""
|
|
|
|
|
|
|
|
try:
|
|
|
|
dirty_string = "-this_is_a_dirty_checkout"
|
|
|
|
is_dirty = subprocess.check_output(
|
|
|
|
['git', 'describe', '--dirty=' + dirty_string],
|
|
|
|
stderr=null,
|
|
|
|
cwd=cwd,
|
|
|
|
).strip().endswith(dirty_string)
|
|
|
|
|
|
|
|
git_dirty = "dirty" if is_dirty else ""
|
|
|
|
except subprocess.CalledProcessError:
|
|
|
|
git_dirty = ""
|
|
|
|
|
|
|
|
if git_branch or git_tag or git_commit or git_dirty:
|
|
|
|
git_version = ",".join(
|
|
|
|
s for s in
|
|
|
|
(git_branch, git_tag, git_commit, git_dirty,)
|
|
|
|
if s
|
2015-02-18 09:21:35 -07:00
|
|
|
)
|
2015-02-21 06:44:46 -07:00
|
|
|
|
|
|
|
return (
|
|
|
|
"Synapse/%s (%s)" % (
|
|
|
|
synapse.__version__, git_version,
|
|
|
|
)
|
|
|
|
).encode("ascii")
|
|
|
|
except Exception as e:
|
2015-09-03 02:51:01 -06:00
|
|
|
logger.info("Failed to check for git repository: %s", e)
|
2015-02-18 09:21:35 -07:00
|
|
|
|
|
|
|
return ("Synapse/%s" % (synapse.__version__,)).encode("ascii")
|
|
|
|
|
|
|
|
|
2015-02-19 04:50:49 -07:00
|
|
|
def change_resource_limit(soft_file_no):
|
|
|
|
try:
|
|
|
|
soft, hard = resource.getrlimit(resource.RLIMIT_NOFILE)
|
2015-02-20 09:09:44 -07:00
|
|
|
|
|
|
|
if not soft_file_no:
|
|
|
|
soft_file_no = hard
|
|
|
|
|
2015-02-19 04:50:49 -07:00
|
|
|
resource.setrlimit(resource.RLIMIT_NOFILE, (soft_file_no, hard))
|
|
|
|
logger.info("Set file limit to: %d", soft_file_no)
|
2016-01-07 08:28:40 -07:00
|
|
|
|
|
|
|
resource.setrlimit(
|
|
|
|
resource.RLIMIT_CORE, (resource.RLIM_INFINITY, resource.RLIM_INFINITY)
|
|
|
|
)
|
2015-04-07 05:16:05 -06:00
|
|
|
except (ValueError, resource.error) as e:
|
2016-01-07 08:28:40 -07:00
|
|
|
logger.warn("Failed to set file or core limit: %s", e)
|
2015-02-19 04:50:49 -07:00
|
|
|
|
|
|
|
|
2015-03-10 03:58:33 -06:00
|
|
|
def setup(config_options):
|
2015-03-10 03:39:42 -06:00
|
|
|
"""
|
|
|
|
Args:
|
|
|
|
config_options_options: The options passed to Synapse. Usually
|
|
|
|
`sys.argv[1:]`.
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
HomeServer
|
|
|
|
"""
|
Error if macaroon key is missing from config
Currently we store all access tokens in the DB, and fall back to that
check if we can't validate the macaroon, so our fallback works here, but
for guests, their macaroons don't get persisted, so we don't get to
find them in the database. Each restart, we generate a new ephemeral
key, so guests lose access after each server restart.
I tried to fix up the config stuff to be less insane, but gave up, so
instead I bolt on yet another piece of custom one-off insanity.
Also, add some basic tests for config generation and loading.
2016-02-04 18:58:23 -07:00
|
|
|
try:
|
|
|
|
config = HomeServerConfig.load_config(
|
|
|
|
"Synapse Homeserver",
|
|
|
|
config_options,
|
|
|
|
generate_section="Homeserver"
|
|
|
|
)
|
|
|
|
except ConfigError as e:
|
|
|
|
sys.stderr.write("\n" + e.message + "\n")
|
|
|
|
sys.exit(1)
|
|
|
|
|
|
|
|
if not config:
|
|
|
|
# If a config isn't returned, and an exception isn't raised, we're just
|
|
|
|
# generating config files and shouldn't try to continue.
|
|
|
|
sys.exit(0)
|
2014-11-18 08:57:00 -07:00
|
|
|
|
|
|
|
config.setup_logging()
|
|
|
|
|
2015-03-17 05:45:37 -06:00
|
|
|
# check any extra requirements we have now we have a config
|
|
|
|
check_requirements(config)
|
2015-01-08 10:07:28 -07:00
|
|
|
|
2015-02-18 09:21:35 -07:00
|
|
|
version_string = get_version_string()
|
|
|
|
|
2014-08-31 09:06:39 -06:00
|
|
|
logger.info("Server hostname: %s", config.server_name)
|
2015-02-18 09:21:35 -07:00
|
|
|
logger.info("Server version: %s", version_string)
|
2014-08-12 08:10:52 -06:00
|
|
|
|
2015-05-29 05:17:33 -06:00
|
|
|
events.USE_FROZEN_DICTS = config.use_frozen_dicts
|
|
|
|
|
2015-09-09 05:02:07 -06:00
|
|
|
tls_server_context_factory = context_factory.ServerContextFactory(config)
|
2014-09-01 09:30:43 -06:00
|
|
|
|
2016-02-11 07:10:00 -07:00
|
|
|
database_engine = create_engine(config)
|
2015-04-27 08:57:43 -06:00
|
|
|
config.database_config["args"]["cp_openfun"] = database_engine.on_new_connection
|
2015-04-01 07:12:33 -06:00
|
|
|
|
2014-08-12 08:10:52 -06:00
|
|
|
hs = SynapseHomeServer(
|
2014-08-31 09:06:39 -06:00
|
|
|
config.server_name,
|
2015-04-27 08:57:43 -06:00
|
|
|
db_config=config.database_config,
|
2015-09-09 05:02:07 -06:00
|
|
|
tls_server_context_factory=tls_server_context_factory,
|
2014-09-02 10:57:04 -06:00
|
|
|
config=config,
|
2014-09-03 04:57:23 -06:00
|
|
|
content_addr=config.content_addr,
|
2015-02-18 09:21:35 -07:00
|
|
|
version_string=version_string,
|
2015-04-01 07:12:33 -06:00
|
|
|
database_engine=database_engine,
|
2014-08-12 08:10:52 -06:00
|
|
|
)
|
|
|
|
|
2015-08-29 15:23:21 -06:00
|
|
|
logger.info("Preparing database: %s...", config.database_config['name'])
|
2014-09-10 09:23:58 -06:00
|
|
|
|
2014-12-16 07:20:32 -07:00
|
|
|
try:
|
2016-01-26 08:51:06 -07:00
|
|
|
db_conn = hs.get_db_conn()
|
2015-04-02 03:06:22 -06:00
|
|
|
database_engine.prepare_database(db_conn)
|
2015-04-28 06:39:42 -06:00
|
|
|
hs.run_startup_checks(db_conn, database_engine)
|
2015-04-01 07:12:33 -06:00
|
|
|
|
|
|
|
db_conn.commit()
|
2014-12-16 07:20:32 -07:00
|
|
|
except UpgradeDatabaseException:
|
|
|
|
sys.stderr.write(
|
|
|
|
"\nFailed to upgrade database.\n"
|
2015-01-19 08:30:48 -07:00
|
|
|
"Have you checked for version specific instructions in"
|
|
|
|
" UPGRADES.rst?\n"
|
2014-12-16 07:20:32 -07:00
|
|
|
)
|
|
|
|
sys.exit(1)
|
2014-09-10 09:23:58 -06:00
|
|
|
|
2015-08-29 15:23:21 -06:00
|
|
|
logger.info("Database prepared in %s.", config.database_config['name'])
|
2014-09-10 08:42:15 -06:00
|
|
|
|
2016-01-26 08:51:06 -07:00
|
|
|
hs.setup()
|
2015-03-12 09:51:33 -06:00
|
|
|
hs.start_listening()
|
2014-09-10 09:16:24 -06:00
|
|
|
|
2016-01-26 08:51:06 -07:00
|
|
|
def start():
|
|
|
|
hs.get_pusherpool().start()
|
|
|
|
hs.get_state_handler().start_caching()
|
|
|
|
hs.get_datastore().start_profiling()
|
|
|
|
hs.get_datastore().start_doing_background_updates()
|
|
|
|
hs.get_replication_layer().start_get_pdu_cache()
|
|
|
|
|
|
|
|
reactor.callWhenRunning(start)
|
2015-02-06 09:52:22 -07:00
|
|
|
|
2015-03-10 03:39:42 -06:00
|
|
|
return hs
|
|
|
|
|
2014-11-20 10:26:36 -07:00
|
|
|
|
2015-01-07 06:46:37 -07:00
|
|
|
class SynapseService(service.Service):
|
2015-03-10 03:39:42 -06:00
|
|
|
"""A twisted Service class that will start synapse. Used to run synapse
|
|
|
|
via twistd and a .tac.
|
|
|
|
"""
|
2015-01-07 06:46:37 -07:00
|
|
|
def __init__(self, config):
|
|
|
|
self.config = config
|
|
|
|
|
|
|
|
def startService(self):
|
2015-03-10 03:58:33 -06:00
|
|
|
hs = setup(self.config)
|
2015-03-10 03:39:42 -06:00
|
|
|
change_resource_limit(hs.config.soft_file_limit)
|
2015-01-07 06:46:37 -07:00
|
|
|
|
|
|
|
def stopService(self):
|
|
|
|
return self._port.stopListening()
|
|
|
|
|
|
|
|
|
2015-06-15 09:36:49 -06:00
|
|
|
class SynapseRequest(Request):
|
2015-06-15 11:18:05 -06:00
|
|
|
def __init__(self, site, *args, **kw):
|
2015-06-12 10:13:23 -06:00
|
|
|
Request.__init__(self, *args, **kw)
|
2015-06-15 11:18:05 -06:00
|
|
|
self.site = site
|
2015-06-15 09:36:49 -06:00
|
|
|
self.authenticated_entity = None
|
2015-06-15 11:18:05 -06:00
|
|
|
self.start_time = 0
|
2015-06-15 09:36:49 -06:00
|
|
|
|
|
|
|
def __repr__(self):
|
|
|
|
# We overwrite this so that we don't log ``access_token``
|
|
|
|
return '<%s at 0x%x method=%s uri=%s clientproto=%s site=%s>' % (
|
|
|
|
self.__class__.__name__,
|
|
|
|
id(self),
|
|
|
|
self.method,
|
|
|
|
self.get_redacted_uri(),
|
|
|
|
self.clientproto,
|
2015-06-15 11:18:05 -06:00
|
|
|
self.site.site_tag,
|
2015-06-15 09:36:49 -06:00
|
|
|
)
|
|
|
|
|
|
|
|
def get_redacted_uri(self):
|
2016-01-13 04:47:32 -07:00
|
|
|
return ACCESS_TOKEN_RE.sub(
|
2016-01-08 10:48:08 -07:00
|
|
|
r'\1<redacted>\3',
|
2015-06-15 09:36:49 -06:00
|
|
|
self.uri
|
|
|
|
)
|
|
|
|
|
2015-06-15 10:11:44 -06:00
|
|
|
def get_user_agent(self):
|
|
|
|
return self.requestHeaders.getRawHeaders("User-Agent", [None])[-1]
|
|
|
|
|
2015-06-15 11:18:05 -06:00
|
|
|
def started_processing(self):
|
|
|
|
self.site.access_logger.info(
|
|
|
|
"%s - %s - Received request: %s %s",
|
|
|
|
self.getClientIP(),
|
|
|
|
self.site.site_tag,
|
|
|
|
self.method,
|
|
|
|
self.get_redacted_uri()
|
|
|
|
)
|
|
|
|
self.start_time = int(time.time() * 1000)
|
|
|
|
|
|
|
|
def finished_processing(self):
|
2015-12-04 04:34:05 -07:00
|
|
|
|
|
|
|
try:
|
|
|
|
context = LoggingContext.current_context()
|
|
|
|
ru_utime, ru_stime = context.get_resource_usage()
|
2015-12-07 10:56:11 -07:00
|
|
|
db_txn_count = context.db_txn_count
|
|
|
|
db_txn_duration = context.db_txn_duration
|
2015-12-04 04:34:05 -07:00
|
|
|
except:
|
|
|
|
ru_utime, ru_stime = (0, 0)
|
2015-12-07 10:56:11 -07:00
|
|
|
db_txn_count, db_txn_duration = (0, 0)
|
2015-12-04 04:34:05 -07:00
|
|
|
|
2015-06-15 11:18:05 -06:00
|
|
|
self.site.access_logger.info(
|
|
|
|
"%s - %s - {%s}"
|
2015-12-07 10:56:11 -07:00
|
|
|
" Processed request: %dms (%dms, %dms) (%dms/%d)"
|
2015-12-04 04:34:05 -07:00
|
|
|
" %sB %s \"%s %s %s\" \"%s\"",
|
2015-06-15 11:18:05 -06:00
|
|
|
self.getClientIP(),
|
|
|
|
self.site.site_tag,
|
|
|
|
self.authenticated_entity,
|
|
|
|
int(time.time() * 1000) - self.start_time,
|
2015-12-04 04:34:05 -07:00
|
|
|
int(ru_utime * 1000),
|
|
|
|
int(ru_stime * 1000),
|
2015-12-07 10:56:11 -07:00
|
|
|
int(db_txn_duration * 1000),
|
|
|
|
int(db_txn_count),
|
2015-06-15 11:18:05 -06:00
|
|
|
self.sentLength,
|
|
|
|
self.code,
|
|
|
|
self.method,
|
|
|
|
self.get_redacted_uri(),
|
|
|
|
self.clientproto,
|
|
|
|
self.get_user_agent(),
|
|
|
|
)
|
|
|
|
|
|
|
|
@contextlib.contextmanager
|
|
|
|
def processing(self):
|
|
|
|
self.started_processing()
|
|
|
|
yield
|
|
|
|
self.finished_processing()
|
|
|
|
|
2015-06-15 09:36:49 -06:00
|
|
|
|
|
|
|
class XForwardedForRequest(SynapseRequest):
|
|
|
|
def __init__(self, *args, **kw):
|
|
|
|
SynapseRequest.__init__(self, *args, **kw)
|
2015-06-12 10:13:23 -06:00
|
|
|
|
|
|
|
"""
|
|
|
|
Add a layer on top of another request that only uses the value of an
|
|
|
|
X-Forwarded-For header as the result of C{getClientIP}.
|
|
|
|
"""
|
|
|
|
def getClientIP(self):
|
|
|
|
"""
|
|
|
|
@return: The client address (the first address) in the value of the
|
|
|
|
I{X-Forwarded-For header}. If the header is not present, return
|
|
|
|
C{b"-"}.
|
|
|
|
"""
|
|
|
|
return self.requestHeaders.getRawHeaders(
|
|
|
|
b"x-forwarded-for", [b"-"])[0].split(b",")[0].strip()
|
|
|
|
|
|
|
|
|
2015-06-15 09:36:49 -06:00
|
|
|
class SynapseRequestFactory(object):
|
2015-06-15 11:18:05 -06:00
|
|
|
def __init__(self, site, x_forwarded_for):
|
|
|
|
self.site = site
|
2015-06-15 09:36:49 -06:00
|
|
|
self.x_forwarded_for = x_forwarded_for
|
|
|
|
|
|
|
|
def __call__(self, *args, **kwargs):
|
|
|
|
if self.x_forwarded_for:
|
2015-06-15 11:18:05 -06:00
|
|
|
return XForwardedForRequest(self.site, *args, **kwargs)
|
2015-06-15 09:36:49 -06:00
|
|
|
else:
|
2015-06-15 11:18:05 -06:00
|
|
|
return SynapseRequest(self.site, *args, **kwargs)
|
2015-06-12 10:13:23 -06:00
|
|
|
|
|
|
|
|
2015-04-30 09:17:27 -06:00
|
|
|
class SynapseSite(Site):
|
|
|
|
"""
|
|
|
|
Subclass of a twisted http Site that does access logging with python's
|
|
|
|
standard logging
|
|
|
|
"""
|
2015-06-15 11:18:05 -06:00
|
|
|
def __init__(self, logger_name, site_tag, config, resource, *args, **kwargs):
|
2015-04-30 09:17:27 -06:00
|
|
|
Site.__init__(self, resource, *args, **kwargs)
|
2015-06-15 09:36:49 -06:00
|
|
|
|
2015-06-15 11:18:05 -06:00
|
|
|
self.site_tag = site_tag
|
2015-06-15 09:36:49 -06:00
|
|
|
|
2015-06-15 11:18:05 -06:00
|
|
|
proxied = config.get("x_forwarded", False)
|
|
|
|
self.requestFactory = SynapseRequestFactory(self, proxied)
|
2015-04-30 09:17:27 -06:00
|
|
|
self.access_logger = logging.getLogger(logger_name)
|
|
|
|
|
|
|
|
def log(self, request):
|
2015-06-15 11:18:05 -06:00
|
|
|
pass
|
2015-04-30 09:17:27 -06:00
|
|
|
|
|
|
|
|
2015-06-12 04:52:52 -06:00
|
|
|
def create_resource_tree(desired_tree, redirect_root_to_web_client=True):
|
|
|
|
"""Create the resource tree for this Home Server.
|
|
|
|
|
|
|
|
This in unduly complicated because Twisted does not support putting
|
|
|
|
child resources more than 1 level deep at a time.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
web_client (bool): True to enable the web client.
|
|
|
|
redirect_root_to_web_client (bool): True to redirect '/' to the
|
|
|
|
location of the web client. This does nothing if web_client is not
|
|
|
|
True.
|
|
|
|
"""
|
|
|
|
if redirect_root_to_web_client and WEB_CLIENT_PREFIX in desired_tree:
|
|
|
|
root_resource = RootRedirect(WEB_CLIENT_PREFIX)
|
|
|
|
else:
|
|
|
|
root_resource = Resource()
|
|
|
|
|
|
|
|
# ideally we'd just use getChild and putChild but getChild doesn't work
|
|
|
|
# unless you give it a Request object IN ADDITION to the name :/ So
|
|
|
|
# instead, we'll store a copy of this mapping so we can actually add
|
|
|
|
# extra resources to existing nodes. See self._resource_id for the key.
|
|
|
|
resource_mappings = {}
|
|
|
|
for full_path, res in desired_tree.items():
|
|
|
|
logger.info("Attaching %s to path %s", res, full_path)
|
|
|
|
last_resource = root_resource
|
|
|
|
for path_seg in full_path.split('/')[1:-1]:
|
|
|
|
if path_seg not in last_resource.listNames():
|
|
|
|
# resource doesn't exist, so make a "dummy resource"
|
|
|
|
child_resource = Resource()
|
|
|
|
last_resource.putChild(path_seg, child_resource)
|
|
|
|
res_id = _resource_id(last_resource, path_seg)
|
|
|
|
resource_mappings[res_id] = child_resource
|
|
|
|
last_resource = child_resource
|
|
|
|
else:
|
|
|
|
# we have an existing Resource, use that instead.
|
|
|
|
res_id = _resource_id(last_resource, path_seg)
|
|
|
|
last_resource = resource_mappings[res_id]
|
|
|
|
|
|
|
|
# ===========================
|
|
|
|
# now attach the actual desired resource
|
|
|
|
last_path_seg = full_path.split('/')[-1]
|
|
|
|
|
|
|
|
# if there is already a resource here, thieve its children and
|
|
|
|
# replace it
|
|
|
|
res_id = _resource_id(last_resource, last_path_seg)
|
|
|
|
if res_id in resource_mappings:
|
|
|
|
# there is a dummy resource at this path already, which needs
|
|
|
|
# to be replaced with the desired resource.
|
|
|
|
existing_dummy_resource = resource_mappings[res_id]
|
|
|
|
for child_name in existing_dummy_resource.listNames():
|
2015-06-12 10:13:54 -06:00
|
|
|
child_res_id = _resource_id(
|
|
|
|
existing_dummy_resource, child_name
|
|
|
|
)
|
2015-06-12 04:52:52 -06:00
|
|
|
child_resource = resource_mappings[child_res_id]
|
|
|
|
# steal the children
|
|
|
|
res.putChild(child_name, child_resource)
|
|
|
|
|
|
|
|
# finally, insert the desired resource in the right place
|
|
|
|
last_resource.putChild(last_path_seg, res)
|
|
|
|
res_id = _resource_id(last_resource, last_path_seg)
|
|
|
|
resource_mappings[res_id] = res
|
|
|
|
|
|
|
|
return root_resource
|
|
|
|
|
|
|
|
|
|
|
|
def _resource_id(resource, path_seg):
|
|
|
|
"""Construct an arbitrary resource ID so you can retrieve the mapping
|
|
|
|
later.
|
|
|
|
|
|
|
|
If you want to represent resource A putChild resource B with path C,
|
|
|
|
the mapping should looks like _resource_id(A,C) = B.
|
|
|
|
|
|
|
|
Args:
|
2016-01-26 06:52:29 -07:00
|
|
|
resource (Resource): The *parent* Resourceb
|
2015-06-12 04:52:52 -06:00
|
|
|
path_seg (str): The name of the child Resource to be attached.
|
|
|
|
Returns:
|
|
|
|
str: A unique string which can be a key to the child Resource.
|
|
|
|
"""
|
|
|
|
return "%s-%s" % (resource, path_seg)
|
|
|
|
|
|
|
|
|
2015-03-10 03:58:33 -06:00
|
|
|
def run(hs):
|
2016-01-26 11:27:23 -07:00
|
|
|
PROFILE_SYNAPSE = False
|
2015-05-06 10:08:00 -06:00
|
|
|
if PROFILE_SYNAPSE:
|
|
|
|
def profile(func):
|
|
|
|
from cProfile import Profile
|
|
|
|
from threading import current_thread
|
|
|
|
|
|
|
|
def profiled(*args, **kargs):
|
|
|
|
profile = Profile()
|
|
|
|
profile.enable()
|
|
|
|
func(*args, **kargs)
|
|
|
|
profile.disable()
|
|
|
|
ident = current_thread().ident
|
|
|
|
profile.dump_stats("/tmp/%s.%s.%i.pstat" % (
|
|
|
|
hs.hostname, func.__name__, ident
|
|
|
|
))
|
|
|
|
|
|
|
|
return profiled
|
|
|
|
|
|
|
|
from twisted.python.threadpool import ThreadPool
|
|
|
|
ThreadPool._worker = profile(ThreadPool._worker)
|
|
|
|
reactor.run = profile(reactor.run)
|
2015-03-10 03:58:33 -06:00
|
|
|
|
2015-09-22 05:57:40 -06:00
|
|
|
start_time = hs.get_clock().time()
|
|
|
|
|
|
|
|
@defer.inlineCallbacks
|
|
|
|
def phone_stats_home():
|
2016-01-06 07:13:34 -07:00
|
|
|
logger.info("Gathering stats for reporting")
|
2015-09-22 05:57:40 -06:00
|
|
|
now = int(hs.get_clock().time())
|
|
|
|
uptime = int(now - start_time)
|
|
|
|
if uptime < 0:
|
|
|
|
uptime = 0
|
|
|
|
|
|
|
|
stats = {}
|
|
|
|
stats["homeserver"] = hs.config.server_name
|
|
|
|
stats["timestamp"] = now
|
|
|
|
stats["uptime_seconds"] = uptime
|
|
|
|
stats["total_users"] = yield hs.get_datastore().count_all_users()
|
|
|
|
|
2016-02-03 06:23:32 -07:00
|
|
|
room_count = yield hs.get_datastore().get_room_count()
|
|
|
|
stats["total_room_count"] = room_count
|
2015-09-22 05:57:40 -06:00
|
|
|
|
|
|
|
stats["daily_active_users"] = yield hs.get_datastore().count_daily_users()
|
|
|
|
daily_messages = yield hs.get_datastore().count_daily_messages()
|
|
|
|
if daily_messages is not None:
|
|
|
|
stats["daily_messages"] = daily_messages
|
|
|
|
|
|
|
|
logger.info("Reporting stats to matrix.org: %s" % (stats,))
|
2015-09-22 06:34:29 -06:00
|
|
|
try:
|
|
|
|
yield hs.get_simple_http_client().put_json(
|
|
|
|
"https://matrix.org/report-usage-stats/push",
|
|
|
|
stats
|
|
|
|
)
|
|
|
|
except Exception as e:
|
|
|
|
logger.warn("Error reporting stats: %s", e)
|
2015-09-22 05:57:40 -06:00
|
|
|
|
|
|
|
if hs.config.report_stats:
|
|
|
|
phone_home_task = task.LoopingCall(phone_stats_home)
|
2016-01-06 07:04:27 -07:00
|
|
|
logger.info("Scheduling stats reporting for 24 hour intervals")
|
2015-09-22 05:57:40 -06:00
|
|
|
phone_home_task.start(60 * 60 * 24, now=False)
|
|
|
|
|
2015-03-10 03:58:33 -06:00
|
|
|
def in_thread():
|
2016-02-04 03:22:44 -07:00
|
|
|
# Uncomment to enable tracing of log context changes.
|
|
|
|
# sys.settrace(logcontext_tracer)
|
2015-03-10 03:58:33 -06:00
|
|
|
with LoggingContext("run"):
|
|
|
|
change_resource_limit(hs.config.soft_file_limit)
|
|
|
|
reactor.run()
|
|
|
|
|
|
|
|
if hs.config.daemonize:
|
|
|
|
|
2015-08-07 09:36:42 -06:00
|
|
|
if hs.config.print_pidfile:
|
2016-03-07 13:13:10 -07:00
|
|
|
print (hs.config.pid_file)
|
2015-03-10 03:58:33 -06:00
|
|
|
|
|
|
|
daemon = Daemonize(
|
|
|
|
app="synapse-homeserver",
|
|
|
|
pid=hs.config.pid_file,
|
|
|
|
action=lambda: in_thread(),
|
|
|
|
auto_close_fds=False,
|
|
|
|
verbose=True,
|
|
|
|
logger=logger,
|
|
|
|
)
|
|
|
|
|
|
|
|
daemon.start()
|
|
|
|
else:
|
2015-03-10 04:19:03 -06:00
|
|
|
in_thread()
|
2014-10-29 19:21:33 -06:00
|
|
|
|
2014-11-20 10:26:36 -07:00
|
|
|
|
2014-11-18 08:57:00 -07:00
|
|
|
def main():
|
2014-10-30 05:15:39 -06:00
|
|
|
with LoggingContext("main"):
|
2015-03-17 05:45:37 -06:00
|
|
|
# check base requirements
|
2015-01-08 10:07:28 -07:00
|
|
|
check_requirements()
|
2015-03-10 03:58:33 -06:00
|
|
|
hs = setup(sys.argv[1:])
|
|
|
|
run(hs)
|
2014-08-12 08:10:52 -06:00
|
|
|
|
2014-11-20 10:26:36 -07:00
|
|
|
|
2014-08-12 08:10:52 -06:00
|
|
|
if __name__ == '__main__':
|
2014-11-18 08:57:00 -07:00
|
|
|
main()
|