2020-02-25 09:56:55 -07:00
|
|
|
# Copyright 2016 OpenMarket Ltd
|
|
|
|
# Copyright 2020 The Matrix.org Foundation C.I.C.
|
|
|
|
#
|
|
|
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
|
|
# you may not use this file except in compliance with the License.
|
|
|
|
# You may obtain a copy of the License at
|
|
|
|
#
|
|
|
|
# http://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
#
|
|
|
|
# Unless required by applicable law or agreed to in writing, software
|
|
|
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
|
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
|
|
# See the License for the specific language governing permissions and
|
|
|
|
# limitations under the License.
|
|
|
|
import logging
|
|
|
|
import sys
|
2021-11-10 13:06:54 -07:00
|
|
|
from typing import Dict, List, Optional, Tuple
|
2020-02-25 09:56:55 -07:00
|
|
|
|
2022-02-14 06:12:22 -07:00
|
|
|
from matrix_common.versionstring import get_distribution_version_string
|
|
|
|
|
2021-01-11 08:55:05 -07:00
|
|
|
from twisted.internet import address
|
2021-11-10 13:06:54 -07:00
|
|
|
from twisted.web.resource import Resource
|
2020-02-25 09:56:55 -07:00
|
|
|
|
|
|
|
import synapse
|
|
|
|
import synapse.events
|
2020-05-22 04:39:20 -06:00
|
|
|
from synapse.api.errors import HttpResponseException, RequestSendFailed, SynapseError
|
2020-02-25 09:56:55 -07:00
|
|
|
from synapse.api.urls import (
|
|
|
|
CLIENT_API_PREFIX,
|
|
|
|
FEDERATION_PREFIX,
|
|
|
|
LEGACY_MEDIA_PREFIX,
|
2021-11-17 08:30:24 -07:00
|
|
|
MEDIA_R0_PREFIX,
|
|
|
|
MEDIA_V3_PREFIX,
|
2020-02-25 09:56:55 -07:00
|
|
|
SERVER_KEY_V2_PREFIX,
|
|
|
|
)
|
|
|
|
from synapse.app import _base
|
2021-06-21 04:41:25 -06:00
|
|
|
from synapse.app._base import (
|
|
|
|
handle_startup_exception,
|
|
|
|
max_request_body_size,
|
|
|
|
redirect_stdio_to_logs,
|
|
|
|
register_start,
|
|
|
|
)
|
2020-02-25 09:56:55 -07:00
|
|
|
from synapse.config._base import ConfigError
|
|
|
|
from synapse.config.homeserver import HomeServerConfig
|
|
|
|
from synapse.config.logger import setup_logging
|
2020-06-16 05:44:07 -06:00
|
|
|
from synapse.config.server import ListenerConfig
|
2020-02-25 09:56:55 -07:00
|
|
|
from synapse.federation.transport.server import TransportLayerServer
|
2020-05-22 07:30:07 -06:00
|
|
|
from synapse.http.server import JsonResource, OptionsResource
|
2020-02-25 09:56:55 -07:00
|
|
|
from synapse.http.servlet import RestServlet, parse_json_object_from_request
|
2021-11-10 13:06:54 -07:00
|
|
|
from synapse.http.site import SynapseRequest, SynapseSite
|
2020-04-03 05:29:30 -06:00
|
|
|
from synapse.logging.context import LoggingContext
|
2020-02-25 09:56:55 -07:00
|
|
|
from synapse.metrics import METRICS_PREFIX, MetricsResource, RegistryProxy
|
2020-05-18 05:24:48 -06:00
|
|
|
from synapse.replication.http import REPLICATION_PREFIX, ReplicationRestResource
|
2020-04-22 15:39:04 -06:00
|
|
|
from synapse.replication.slave.storage._base import BaseSlavedStore
|
2020-02-25 09:56:55 -07:00
|
|
|
from synapse.replication.slave.storage.account_data import SlavedAccountDataStore
|
|
|
|
from synapse.replication.slave.storage.appservice import SlavedApplicationServiceStore
|
|
|
|
from synapse.replication.slave.storage.client_ips import SlavedClientIpStore
|
|
|
|
from synapse.replication.slave.storage.deviceinbox import SlavedDeviceInboxStore
|
|
|
|
from synapse.replication.slave.storage.devices import SlavedDeviceStore
|
|
|
|
from synapse.replication.slave.storage.directory import DirectoryStore
|
|
|
|
from synapse.replication.slave.storage.events import SlavedEventStore
|
|
|
|
from synapse.replication.slave.storage.filtering import SlavedFilteringStore
|
|
|
|
from synapse.replication.slave.storage.groups import SlavedGroupServerStore
|
|
|
|
from synapse.replication.slave.storage.keys import SlavedKeyStore
|
|
|
|
from synapse.replication.slave.storage.profile import SlavedProfileStore
|
|
|
|
from synapse.replication.slave.storage.push_rule import SlavedPushRuleStore
|
|
|
|
from synapse.replication.slave.storage.pushers import SlavedPusherStore
|
|
|
|
from synapse.replication.slave.storage.receipts import SlavedReceiptsStore
|
|
|
|
from synapse.replication.slave.storage.registration import SlavedRegistrationStore
|
|
|
|
from synapse.rest.admin import register_servlets_for_media_repo
|
2021-08-17 05:57:58 -06:00
|
|
|
from synapse.rest.client import (
|
2021-01-18 08:47:59 -07:00
|
|
|
account_data,
|
2021-08-17 05:57:58 -06:00
|
|
|
events,
|
2021-01-18 08:47:59 -07:00
|
|
|
groups,
|
2021-09-08 09:02:31 -06:00
|
|
|
initial_sync,
|
2021-08-17 05:57:58 -06:00
|
|
|
login,
|
|
|
|
presence,
|
2021-09-08 09:02:31 -06:00
|
|
|
profile,
|
|
|
|
push_rule,
|
2021-01-18 08:47:59 -07:00
|
|
|
read_marker,
|
|
|
|
receipts,
|
2021-08-17 05:57:58 -06:00
|
|
|
room,
|
2021-01-18 08:47:59 -07:00
|
|
|
room_keys,
|
2021-09-08 09:02:31 -06:00
|
|
|
sendtodevice,
|
2021-01-18 08:47:59 -07:00
|
|
|
sync,
|
|
|
|
tags,
|
|
|
|
user_directory,
|
2021-09-08 09:02:31 -06:00
|
|
|
versions,
|
|
|
|
voip,
|
2021-01-18 08:47:59 -07:00
|
|
|
)
|
2021-08-17 05:57:58 -06:00
|
|
|
from synapse.rest.client._base import client_patterns
|
|
|
|
from synapse.rest.client.account import ThreepidRestServlet
|
|
|
|
from synapse.rest.client.devices import DevicesRestServlet
|
|
|
|
from synapse.rest.client.keys import (
|
2021-01-11 11:01:27 -07:00
|
|
|
KeyChangesServlet,
|
|
|
|
KeyQueryServlet,
|
|
|
|
OneTimeKeyServlet,
|
|
|
|
)
|
2021-08-21 15:14:43 -06:00
|
|
|
from synapse.rest.client.register import (
|
|
|
|
RegisterRestServlet,
|
|
|
|
RegistrationTokenValidityRestServlet,
|
|
|
|
)
|
2020-08-07 07:21:24 -06:00
|
|
|
from synapse.rest.health import HealthResource
|
2020-02-25 09:56:55 -07:00
|
|
|
from synapse.rest.key.v2 import KeyApiV2Resource
|
2021-02-01 08:47:59 -07:00
|
|
|
from synapse.rest.synapse.client import build_synapse_client_resource_tree
|
2021-11-01 09:10:16 -06:00
|
|
|
from synapse.rest.well_known import well_known_resource
|
2021-04-14 10:06:06 -06:00
|
|
|
from synapse.server import HomeServer
|
2020-08-05 14:38:57 -06:00
|
|
|
from synapse.storage.databases.main.censor_events import CensorEventsStore
|
2020-10-07 09:27:56 -06:00
|
|
|
from synapse.storage.databases.main.client_ips import ClientIpWorkerStore
|
2021-01-11 11:01:27 -07:00
|
|
|
from synapse.storage.databases.main.e2e_room_keys import EndToEndRoomKeyStore
|
2021-06-29 12:15:47 -06:00
|
|
|
from synapse.storage.databases.main.lock import LockStore
|
2020-08-05 14:38:57 -06:00
|
|
|
from synapse.storage.databases.main.media_repository import MediaRepositoryStore
|
2020-10-02 06:23:15 -06:00
|
|
|
from synapse.storage.databases.main.metrics import ServerMetricsStore
|
2020-08-05 14:38:57 -06:00
|
|
|
from synapse.storage.databases.main.monthly_active_users import (
|
2020-02-25 09:56:55 -07:00
|
|
|
MonthlyActiveUsersWorkerStore,
|
|
|
|
)
|
2021-04-23 05:21:55 -06:00
|
|
|
from synapse.storage.databases.main.presence import PresenceStore
|
2021-08-17 07:02:50 -06:00
|
|
|
from synapse.storage.databases.main.room import RoomWorkerStore
|
2021-11-29 06:13:23 -07:00
|
|
|
from synapse.storage.databases.main.room_batch import RoomBatchStore
|
2021-05-27 15:45:43 -06:00
|
|
|
from synapse.storage.databases.main.search import SearchStore
|
2021-08-24 06:14:03 -06:00
|
|
|
from synapse.storage.databases.main.session import SessionStore
|
2020-10-02 06:23:15 -06:00
|
|
|
from synapse.storage.databases.main.stats import StatsStore
|
2020-10-07 09:27:56 -06:00
|
|
|
from synapse.storage.databases.main.transactions import TransactionWorkerStore
|
2020-08-05 14:38:57 -06:00
|
|
|
from synapse.storage.databases.main.ui_auth import UIAuthWorkerStore
|
|
|
|
from synapse.storage.databases.main.user_directory import UserDirectoryStore
|
2021-11-10 13:06:54 -07:00
|
|
|
from synapse.types import JsonDict
|
2020-02-25 09:56:55 -07:00
|
|
|
from synapse.util.httpresourcetree import create_resource_tree
|
|
|
|
|
|
|
|
logger = logging.getLogger("synapse.app.generic_worker")
|
|
|
|
|
|
|
|
|
|
|
|
class KeyUploadServlet(RestServlet):
|
|
|
|
"""An implementation of the `KeyUploadServlet` that responds to read only
|
|
|
|
requests, but otherwise proxies through to the master instance.
|
|
|
|
"""
|
|
|
|
|
|
|
|
PATTERNS = client_patterns("/keys/upload(/(?P<device_id>[^/]+))?$")
|
|
|
|
|
2021-10-22 11:15:41 -06:00
|
|
|
def __init__(self, hs: HomeServer):
|
2020-02-25 09:56:55 -07:00
|
|
|
"""
|
|
|
|
Args:
|
2021-10-22 11:15:41 -06:00
|
|
|
hs: server
|
2020-02-25 09:56:55 -07:00
|
|
|
"""
|
2020-09-18 07:56:44 -06:00
|
|
|
super().__init__()
|
2020-02-25 09:56:55 -07:00
|
|
|
self.auth = hs.get_auth()
|
2022-02-23 04:04:02 -07:00
|
|
|
self.store = hs.get_datastores().main
|
2020-02-25 09:56:55 -07:00
|
|
|
self.http_client = hs.get_simple_http_client()
|
2021-09-24 05:25:21 -06:00
|
|
|
self.main_uri = hs.config.worker.worker_main_http_uri
|
2020-02-25 09:56:55 -07:00
|
|
|
|
2021-11-10 13:06:54 -07:00
|
|
|
async def on_POST(
|
|
|
|
self, request: SynapseRequest, device_id: Optional[str]
|
|
|
|
) -> Tuple[int, JsonDict]:
|
2020-02-25 09:56:55 -07:00
|
|
|
requester = await self.auth.get_user_by_req(request, allow_guest=True)
|
|
|
|
user_id = requester.user.to_string()
|
|
|
|
body = parse_json_object_from_request(request)
|
|
|
|
|
|
|
|
if device_id is not None:
|
|
|
|
# passing the device_id here is deprecated; however, we allow it
|
|
|
|
# for now for compatibility with older clients.
|
|
|
|
if requester.device_id is not None and device_id != requester.device_id:
|
|
|
|
logger.warning(
|
|
|
|
"Client uploading keys for a different device "
|
|
|
|
"(logged in as %s, uploading for %s)",
|
|
|
|
requester.device_id,
|
|
|
|
device_id,
|
|
|
|
)
|
|
|
|
else:
|
|
|
|
device_id = requester.device_id
|
|
|
|
|
|
|
|
if device_id is None:
|
|
|
|
raise SynapseError(
|
|
|
|
400, "To upload keys, you must pass device_id when authenticating"
|
|
|
|
)
|
|
|
|
|
|
|
|
if body:
|
|
|
|
# They're actually trying to upload something, proxy to main synapse.
|
2020-07-09 05:34:46 -06:00
|
|
|
|
|
|
|
# Proxy headers from the original request, such as the auth headers
|
|
|
|
# (in case the access token is there) and the original IP /
|
|
|
|
# User-Agent of the request.
|
|
|
|
headers = {
|
|
|
|
header: request.requestHeaders.getRawHeaders(header, [])
|
|
|
|
for header in (b"Authorization", b"User-Agent")
|
|
|
|
}
|
2021-03-03 11:49:08 -07:00
|
|
|
# Add the previous hop to the X-Forwarded-For header.
|
2020-07-09 05:34:46 -06:00
|
|
|
x_forwarded_for = request.requestHeaders.getRawHeaders(
|
|
|
|
b"X-Forwarded-For", []
|
|
|
|
)
|
2021-03-03 11:49:08 -07:00
|
|
|
# we use request.client here, since we want the previous hop, not the
|
|
|
|
# original client (as returned by request.getClientAddress()).
|
2020-07-09 05:34:46 -06:00
|
|
|
if isinstance(request.client, (address.IPv4Address, address.IPv6Address)):
|
|
|
|
previous_host = request.client.host.encode("ascii")
|
|
|
|
# If the header exists, add to the comma-separated list of the first
|
|
|
|
# instance of the header. Otherwise, generate a new header.
|
|
|
|
if x_forwarded_for:
|
2021-11-10 13:06:54 -07:00
|
|
|
x_forwarded_for = [x_forwarded_for[0] + b", " + previous_host]
|
|
|
|
x_forwarded_for.extend(x_forwarded_for[1:])
|
2020-07-09 05:34:46 -06:00
|
|
|
else:
|
|
|
|
x_forwarded_for = [previous_host]
|
|
|
|
headers[b"X-Forwarded-For"] = x_forwarded_for
|
|
|
|
|
2021-03-03 11:49:08 -07:00
|
|
|
# Replicate the original X-Forwarded-Proto header. Note that
|
|
|
|
# XForwardedForRequest overrides isSecure() to give us the original protocol
|
|
|
|
# used by the client, as opposed to the protocol used by our upstream proxy
|
|
|
|
# - which is what we want here.
|
|
|
|
headers[b"X-Forwarded-Proto"] = [
|
|
|
|
b"https" if request.isSecure() else b"http"
|
|
|
|
]
|
|
|
|
|
2020-05-22 04:39:20 -06:00
|
|
|
try:
|
|
|
|
result = await self.http_client.post_json_get_json(
|
|
|
|
self.main_uri + request.uri.decode("ascii"), body, headers=headers
|
|
|
|
)
|
|
|
|
except HttpResponseException as e:
|
2020-05-26 04:42:22 -06:00
|
|
|
raise e.to_synapse_error() from e
|
2020-05-22 04:39:20 -06:00
|
|
|
except RequestSendFailed as e:
|
|
|
|
raise SynapseError(502, "Failed to talk to master") from e
|
2020-02-25 09:56:55 -07:00
|
|
|
|
|
|
|
return 200, result
|
|
|
|
else:
|
|
|
|
# Just interested in counts.
|
|
|
|
result = await self.store.count_e2e_one_time_keys(user_id, device_id)
|
|
|
|
return 200, {"one_time_key_counts": result}
|
|
|
|
|
|
|
|
|
|
|
|
class GenericWorkerSlavedStore(
|
|
|
|
# FIXME(#3714): We need to add UserDirectoryStore as we write directly
|
|
|
|
# rather than going via the correct worker.
|
|
|
|
UserDirectoryStore,
|
2020-10-02 06:23:15 -06:00
|
|
|
StatsStore,
|
2020-04-30 11:47:49 -06:00
|
|
|
UIAuthWorkerStore,
|
2021-01-11 11:01:27 -07:00
|
|
|
EndToEndRoomKeyStore,
|
2021-04-23 05:21:55 -06:00
|
|
|
PresenceStore,
|
2020-02-25 09:56:55 -07:00
|
|
|
SlavedDeviceInboxStore,
|
|
|
|
SlavedDeviceStore,
|
|
|
|
SlavedReceiptsStore,
|
|
|
|
SlavedPushRuleStore,
|
|
|
|
SlavedGroupServerStore,
|
|
|
|
SlavedAccountDataStore,
|
|
|
|
SlavedPusherStore,
|
2020-05-13 10:15:40 -06:00
|
|
|
CensorEventsStore,
|
2020-10-07 09:27:56 -06:00
|
|
|
ClientIpWorkerStore,
|
2020-02-25 09:56:55 -07:00
|
|
|
SlavedEventStore,
|
|
|
|
SlavedKeyStore,
|
2021-08-17 07:02:50 -06:00
|
|
|
RoomWorkerStore,
|
2021-11-29 06:13:23 -07:00
|
|
|
RoomBatchStore,
|
2020-02-25 09:56:55 -07:00
|
|
|
DirectoryStore,
|
|
|
|
SlavedApplicationServiceStore,
|
|
|
|
SlavedRegistrationStore,
|
|
|
|
SlavedProfileStore,
|
|
|
|
SlavedClientIpStore,
|
|
|
|
SlavedFilteringStore,
|
|
|
|
MonthlyActiveUsersWorkerStore,
|
|
|
|
MediaRepositoryStore,
|
2020-10-02 06:23:15 -06:00
|
|
|
ServerMetricsStore,
|
2021-05-27 15:45:43 -06:00
|
|
|
SearchStore,
|
2020-10-07 09:27:56 -06:00
|
|
|
TransactionWorkerStore,
|
2021-06-29 12:15:47 -06:00
|
|
|
LockStore,
|
2021-08-24 06:14:03 -06:00
|
|
|
SessionStore,
|
2020-02-25 09:56:55 -07:00
|
|
|
BaseSlavedStore,
|
|
|
|
):
|
2021-11-10 13:06:54 -07:00
|
|
|
# Properties that multiple storage classes define. Tell mypy what the
|
|
|
|
# expected type is.
|
|
|
|
server_name: str
|
|
|
|
config: HomeServerConfig
|
2020-02-25 09:56:55 -07:00
|
|
|
|
|
|
|
|
|
|
|
class GenericWorkerServer(HomeServer):
|
2021-11-10 13:06:54 -07:00
|
|
|
DATASTORE_CLASS = GenericWorkerSlavedStore # type: ignore
|
2020-02-25 09:56:55 -07:00
|
|
|
|
2021-11-10 13:06:54 -07:00
|
|
|
def _listen_http(self, listener_config: ListenerConfig) -> None:
|
2020-06-16 05:44:07 -06:00
|
|
|
port = listener_config.port
|
|
|
|
bind_addresses = listener_config.bind_addresses
|
|
|
|
|
|
|
|
assert listener_config.http_options is not None
|
|
|
|
|
|
|
|
site_tag = listener_config.http_options.tag
|
|
|
|
if site_tag is None:
|
2021-11-10 13:06:54 -07:00
|
|
|
site_tag = str(port)
|
2020-08-07 07:21:24 -06:00
|
|
|
|
|
|
|
# We always include a health resource.
|
2021-11-10 13:06:54 -07:00
|
|
|
resources: Dict[str, Resource] = {"/health": HealthResource()}
|
2020-08-07 07:21:24 -06:00
|
|
|
|
2020-06-16 05:44:07 -06:00
|
|
|
for res in listener_config.http_options.resources:
|
|
|
|
for name in res.names:
|
2020-02-25 09:56:55 -07:00
|
|
|
if name == "metrics":
|
|
|
|
resources[METRICS_PREFIX] = MetricsResource(RegistryProxy)
|
|
|
|
elif name == "client":
|
|
|
|
resource = JsonResource(self, canonical_json=False)
|
|
|
|
|
|
|
|
RegisterRestServlet(self).register(resource)
|
2021-08-21 15:14:43 -06:00
|
|
|
RegistrationTokenValidityRestServlet(self).register(resource)
|
2021-02-01 08:47:59 -07:00
|
|
|
login.register_servlets(self, resource)
|
2020-02-25 09:56:55 -07:00
|
|
|
ThreepidRestServlet(self).register(resource)
|
2021-01-13 10:35:40 -07:00
|
|
|
DevicesRestServlet(self).register(resource)
|
2021-09-08 09:02:31 -06:00
|
|
|
|
|
|
|
# Read-only
|
|
|
|
KeyUploadServlet(self).register(resource)
|
2020-02-25 09:56:55 -07:00
|
|
|
KeyQueryServlet(self).register(resource)
|
|
|
|
KeyChangesServlet(self).register(resource)
|
2021-09-08 09:02:31 -06:00
|
|
|
OneTimeKeyServlet(self).register(resource)
|
2020-12-29 05:40:12 -07:00
|
|
|
|
2021-09-08 09:02:31 -06:00
|
|
|
voip.register_servlets(self, resource)
|
|
|
|
push_rule.register_servlets(self, resource)
|
|
|
|
versions.register_servlets(self, resource)
|
|
|
|
|
|
|
|
profile.register_servlets(self, resource)
|
2020-02-25 09:56:55 -07:00
|
|
|
|
|
|
|
sync.register_servlets(self, resource)
|
|
|
|
events.register_servlets(self, resource)
|
2021-09-08 09:02:31 -06:00
|
|
|
room.register_servlets(self, resource, is_worker=True)
|
2020-12-29 05:40:12 -07:00
|
|
|
room.register_deprecated_servlets(self, resource)
|
2021-09-08 09:02:31 -06:00
|
|
|
initial_sync.register_servlets(self, resource)
|
2021-01-11 11:01:27 -07:00
|
|
|
room_keys.register_servlets(self, resource)
|
2021-01-18 08:47:59 -07:00
|
|
|
tags.register_servlets(self, resource)
|
|
|
|
account_data.register_servlets(self, resource)
|
|
|
|
receipts.register_servlets(self, resource)
|
|
|
|
read_marker.register_servlets(self, resource)
|
2020-02-25 09:56:55 -07:00
|
|
|
|
2021-09-08 09:02:31 -06:00
|
|
|
sendtodevice.register_servlets(self, resource)
|
2021-01-07 13:19:26 -07:00
|
|
|
|
2020-02-25 09:56:55 -07:00
|
|
|
user_directory.register_servlets(self, resource)
|
|
|
|
|
2021-04-23 05:21:55 -06:00
|
|
|
presence.register_servlets(self, resource)
|
2020-02-25 09:56:55 -07:00
|
|
|
|
|
|
|
groups.register_servlets(self, resource)
|
|
|
|
|
|
|
|
resources.update({CLIENT_API_PREFIX: resource})
|
2021-02-01 08:47:59 -07:00
|
|
|
|
|
|
|
resources.update(build_synapse_client_resource_tree(self))
|
2021-11-01 09:10:16 -06:00
|
|
|
resources.update({"/.well-known": well_known_resource(self)})
|
|
|
|
|
2020-02-25 09:56:55 -07:00
|
|
|
elif name == "federation":
|
|
|
|
resources.update({FEDERATION_PREFIX: TransportLayerServer(self)})
|
|
|
|
elif name == "media":
|
2021-09-24 05:25:21 -06:00
|
|
|
if self.config.media.can_load_media_repo:
|
2020-02-27 09:33:21 -07:00
|
|
|
media_repo = self.get_media_repository_resource()
|
|
|
|
|
|
|
|
# We need to serve the admin servlets for media on the
|
|
|
|
# worker.
|
|
|
|
admin_resource = JsonResource(self, canonical_json=False)
|
|
|
|
register_servlets_for_media_repo(self, admin_resource)
|
|
|
|
|
|
|
|
resources.update(
|
|
|
|
{
|
2021-11-17 08:30:24 -07:00
|
|
|
MEDIA_R0_PREFIX: media_repo,
|
|
|
|
MEDIA_V3_PREFIX: media_repo,
|
2020-02-27 09:33:21 -07:00
|
|
|
LEGACY_MEDIA_PREFIX: media_repo,
|
|
|
|
"/_synapse/admin": admin_resource,
|
|
|
|
}
|
|
|
|
)
|
|
|
|
else:
|
|
|
|
logger.warning(
|
|
|
|
"A 'media' listener is configured but the media"
|
|
|
|
" repository is disabled. Ignoring."
|
|
|
|
)
|
2020-02-25 09:56:55 -07:00
|
|
|
|
2020-06-16 05:44:07 -06:00
|
|
|
if name == "openid" and "federation" not in res.names:
|
2020-02-25 09:56:55 -07:00
|
|
|
# Only load the openid resource separately if federation resource
|
|
|
|
# is not specified since federation resource includes openid
|
|
|
|
# resource.
|
|
|
|
resources.update(
|
|
|
|
{
|
|
|
|
FEDERATION_PREFIX: TransportLayerServer(
|
|
|
|
self, servlet_groups=["openid"]
|
|
|
|
)
|
|
|
|
}
|
|
|
|
)
|
|
|
|
|
|
|
|
if name in ["keys", "federation"]:
|
|
|
|
resources[SERVER_KEY_V2_PREFIX] = KeyApiV2Resource(self)
|
|
|
|
|
2020-05-18 05:24:48 -06:00
|
|
|
if name == "replication":
|
|
|
|
resources[REPLICATION_PREFIX] = ReplicationRestResource(self)
|
|
|
|
|
2021-06-18 05:15:52 -06:00
|
|
|
# Attach additional resources registered by modules.
|
|
|
|
resources.update(self._module_web_resources)
|
|
|
|
self._module_web_resources_consumed = True
|
|
|
|
|
2020-05-22 07:30:07 -06:00
|
|
|
root_resource = create_resource_tree(resources, OptionsResource())
|
2020-02-25 09:56:55 -07:00
|
|
|
|
|
|
|
_base.listen_tcp(
|
|
|
|
bind_addresses,
|
|
|
|
port,
|
|
|
|
SynapseSite(
|
|
|
|
"synapse.access.http.%s" % (site_tag,),
|
|
|
|
site_tag,
|
|
|
|
listener_config,
|
|
|
|
root_resource,
|
|
|
|
self.version_string,
|
2021-04-23 12:20:44 -06:00
|
|
|
max_request_body_size=max_request_body_size(self.config),
|
2021-04-23 10:06:47 -06:00
|
|
|
reactor=self.get_reactor(),
|
2020-02-25 09:56:55 -07:00
|
|
|
),
|
|
|
|
reactor=self.get_reactor(),
|
|
|
|
)
|
|
|
|
|
|
|
|
logger.info("Synapse worker now listening on port %d", port)
|
|
|
|
|
2021-11-10 13:06:54 -07:00
|
|
|
def start_listening(self) -> None:
|
2021-09-24 05:25:21 -06:00
|
|
|
for listener in self.config.worker.worker_listeners:
|
2020-06-16 05:44:07 -06:00
|
|
|
if listener.type == "http":
|
2020-02-25 09:56:55 -07:00
|
|
|
self._listen_http(listener)
|
2020-06-16 05:44:07 -06:00
|
|
|
elif listener.type == "manhole":
|
2021-03-26 11:33:55 -06:00
|
|
|
_base.listen_manhole(
|
2021-09-06 09:08:03 -06:00
|
|
|
listener.bind_addresses,
|
|
|
|
listener.port,
|
|
|
|
manhole_settings=self.config.server.manhole_settings,
|
|
|
|
manhole_globals={"hs": self},
|
2020-02-25 09:56:55 -07:00
|
|
|
)
|
2020-06-16 05:44:07 -06:00
|
|
|
elif listener.type == "metrics":
|
2021-09-23 10:03:01 -06:00
|
|
|
if not self.config.metrics.enable_metrics:
|
2020-02-25 09:56:55 -07:00
|
|
|
logger.warning(
|
2021-07-19 08:28:05 -06:00
|
|
|
"Metrics listener configured, but "
|
|
|
|
"enable_metrics is not True!"
|
2020-02-25 09:56:55 -07:00
|
|
|
)
|
|
|
|
else:
|
2020-06-16 05:44:07 -06:00
|
|
|
_base.listen_metrics(listener.bind_addresses, listener.port)
|
2020-02-25 09:56:55 -07:00
|
|
|
else:
|
2020-06-16 05:44:07 -06:00
|
|
|
logger.warning("Unsupported listener type: %s", listener.type)
|
2020-02-25 09:56:55 -07:00
|
|
|
|
|
|
|
self.get_tcp_replication().start_replication(self)
|
|
|
|
|
|
|
|
|
2021-11-10 13:06:54 -07:00
|
|
|
def start(config_options: List[str]) -> None:
|
2020-02-25 09:56:55 -07:00
|
|
|
try:
|
|
|
|
config = HomeServerConfig.load_config("Synapse worker", config_options)
|
|
|
|
except ConfigError as e:
|
|
|
|
sys.stderr.write("\n" + str(e) + "\n")
|
|
|
|
sys.exit(1)
|
|
|
|
|
|
|
|
# For backwards compatibility let any of the old app names.
|
2021-09-13 11:07:12 -06:00
|
|
|
assert config.worker.worker_app in (
|
2020-02-25 09:56:55 -07:00
|
|
|
"synapse.app.appservice",
|
|
|
|
"synapse.app.client_reader",
|
|
|
|
"synapse.app.event_creator",
|
|
|
|
"synapse.app.federation_reader",
|
|
|
|
"synapse.app.federation_sender",
|
|
|
|
"synapse.app.frontend_proxy",
|
|
|
|
"synapse.app.generic_worker",
|
|
|
|
"synapse.app.media_repository",
|
|
|
|
"synapse.app.pusher",
|
|
|
|
"synapse.app.synchrotron",
|
|
|
|
"synapse.app.user_dir",
|
|
|
|
)
|
|
|
|
|
2021-09-13 11:07:12 -06:00
|
|
|
if config.worker.worker_app == "synapse.app.appservice":
|
2020-07-10 11:26:36 -06:00
|
|
|
if config.appservice.notify_appservices:
|
2020-02-25 09:56:55 -07:00
|
|
|
sys.stderr.write(
|
|
|
|
"\nThe appservices must be disabled in the main synapse process"
|
|
|
|
"\nbefore they can be run in a separate worker."
|
|
|
|
"\nPlease add ``notify_appservices: false`` to the main config"
|
|
|
|
"\n"
|
|
|
|
)
|
|
|
|
sys.exit(1)
|
|
|
|
|
|
|
|
# Force the appservice to start since they will be disabled in the main config
|
2020-07-10 11:26:36 -06:00
|
|
|
config.appservice.notify_appservices = True
|
2020-03-24 04:35:00 -06:00
|
|
|
else:
|
|
|
|
# For other worker types we force this to off.
|
2020-07-10 11:26:36 -06:00
|
|
|
config.appservice.notify_appservices = False
|
2020-02-25 09:56:55 -07:00
|
|
|
|
2021-09-13 11:07:12 -06:00
|
|
|
if config.worker.worker_app == "synapse.app.user_dir":
|
2020-07-10 11:26:36 -06:00
|
|
|
if config.server.update_user_directory:
|
2020-02-25 09:56:55 -07:00
|
|
|
sys.stderr.write(
|
|
|
|
"\nThe update_user_directory must be disabled in the main synapse process"
|
|
|
|
"\nbefore they can be run in a separate worker."
|
|
|
|
"\nPlease add ``update_user_directory: false`` to the main config"
|
|
|
|
"\n"
|
|
|
|
)
|
|
|
|
sys.exit(1)
|
|
|
|
|
|
|
|
# Force the pushers to start since they will be disabled in the main config
|
2020-07-10 11:26:36 -06:00
|
|
|
config.server.update_user_directory = True
|
2020-03-24 04:35:00 -06:00
|
|
|
else:
|
|
|
|
# For other worker types we force this to off.
|
2020-07-10 11:26:36 -06:00
|
|
|
config.server.update_user_directory = False
|
2020-02-25 09:56:55 -07:00
|
|
|
|
2021-09-29 04:44:15 -06:00
|
|
|
synapse.events.USE_FROZEN_DICTS = config.server.use_frozen_dicts
|
2021-05-05 09:54:36 -06:00
|
|
|
synapse.util.caches.TRACK_MEMORY_USAGE = config.caches.track_memory_usage
|
2020-02-25 09:56:55 -07:00
|
|
|
|
2021-05-05 09:53:45 -06:00
|
|
|
if config.server.gc_seconds:
|
|
|
|
synapse.metrics.MIN_TIME_BETWEEN_GCS = config.server.gc_seconds
|
|
|
|
|
2020-04-28 06:34:12 -06:00
|
|
|
hs = GenericWorkerServer(
|
2021-09-13 11:07:12 -06:00
|
|
|
config.server.server_name,
|
2020-02-25 09:56:55 -07:00
|
|
|
config=config,
|
2022-02-14 06:12:22 -07:00
|
|
|
version_string="Synapse/" + get_distribution_version_string("matrix-synapse"),
|
2020-02-25 09:56:55 -07:00
|
|
|
)
|
|
|
|
|
2020-04-28 06:34:12 -06:00
|
|
|
setup_logging(hs, config, use_worker_options=True)
|
|
|
|
|
2021-06-21 04:41:25 -06:00
|
|
|
try:
|
|
|
|
hs.setup()
|
2020-04-28 06:34:12 -06:00
|
|
|
|
2021-06-21 04:41:25 -06:00
|
|
|
# Ensure the replication streamer is always started in case we write to any
|
|
|
|
# streams. Will no-op if no streams can be written to by this worker.
|
|
|
|
hs.get_replication_streamer()
|
|
|
|
except Exception as e:
|
|
|
|
handle_startup_exception(e)
|
2020-02-25 09:56:55 -07:00
|
|
|
|
2021-04-23 12:20:44 -06:00
|
|
|
register_start(_base.start, hs)
|
2020-02-25 09:56:55 -07:00
|
|
|
|
2021-06-21 04:41:25 -06:00
|
|
|
# redirect stdio to the logs, if configured.
|
2021-09-23 10:03:01 -06:00
|
|
|
if not hs.config.logging.no_redirect_stdio:
|
2021-06-21 04:41:25 -06:00
|
|
|
redirect_stdio_to_logs()
|
|
|
|
|
2020-02-25 09:56:55 -07:00
|
|
|
_base.start_worker_reactor("synapse-generic-worker", config)
|
|
|
|
|
|
|
|
|
2021-11-30 09:52:45 -07:00
|
|
|
def main() -> None:
|
2020-02-25 09:56:55 -07:00
|
|
|
with LoggingContext("main"):
|
|
|
|
start(sys.argv[1:])
|
2021-11-30 09:52:45 -07:00
|
|
|
|
|
|
|
|
|
|
|
if __name__ == "__main__":
|
|
|
|
main()
|