2015-01-26 03:45:24 -07:00
|
|
|
# -*- coding: utf-8 -*-
|
2016-01-06 21:26:29 -07:00
|
|
|
# Copyright 2015, 2016 OpenMarket Ltd
|
2015-01-26 03:45:24 -07:00
|
|
|
#
|
|
|
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
|
|
# you may not use this file except in compliance with the License.
|
|
|
|
# You may obtain a copy of the License at
|
|
|
|
#
|
|
|
|
# http://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
#
|
|
|
|
# Unless required by applicable law or agreed to in writing, software
|
|
|
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
|
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
|
|
# See the License for the specific language governing permissions and
|
|
|
|
# limitations under the License.
|
2017-12-30 11:40:19 -07:00
|
|
|
import logging
|
2015-01-26 03:45:24 -07:00
|
|
|
|
2017-12-30 11:40:19 -07:00
|
|
|
import simplejson as json
|
|
|
|
from twisted.internet import defer
|
2015-01-26 03:45:24 -07:00
|
|
|
|
2018-03-12 10:17:08 -06:00
|
|
|
from synapse.api.errors import AuthError, FederationError, SynapseError, NotFoundError
|
2017-12-30 11:40:19 -07:00
|
|
|
from synapse.crypto.event_signing import compute_event_signature
|
2017-12-30 11:40:19 -07:00
|
|
|
from synapse.federation.federation_base import (
|
|
|
|
FederationBase,
|
|
|
|
event_from_pdu_json,
|
|
|
|
)
|
2018-03-12 08:07:39 -06:00
|
|
|
|
|
|
|
from synapse.federation.persistence import TransactionActions
|
2017-12-30 11:40:19 -07:00
|
|
|
from synapse.federation.units import Edu, Transaction
|
|
|
|
import synapse.metrics
|
|
|
|
from synapse.types import get_domain_from_id
|
2017-10-06 09:07:20 -06:00
|
|
|
from synapse.util import async
|
2017-12-30 11:40:19 -07:00
|
|
|
from synapse.util.caches.response_cache import ResponseCache
|
2017-10-25 08:21:08 -06:00
|
|
|
from synapse.util.logcontext import make_deferred_yieldable, preserve_fn
|
2015-01-26 03:45:24 -07:00
|
|
|
from synapse.util.logutils import log_function
|
|
|
|
|
2017-10-06 09:07:20 -06:00
|
|
|
# when processing incoming transactions, we try to handle multiple rooms in
|
|
|
|
# parallel, up to this limit.
|
|
|
|
TRANSACTION_CONCURRENCY_LIMIT = 10
|
2015-01-26 03:45:24 -07:00
|
|
|
|
|
|
|
logger = logging.getLogger(__name__)
|
|
|
|
|
2015-03-10 09:29:22 -06:00
|
|
|
# synapse.federation.federation_server is a silly name
|
|
|
|
metrics = synapse.metrics.get_metrics_for("synapse.federation.server")
|
|
|
|
|
|
|
|
received_pdus_counter = metrics.register_counter("received_pdus")
|
|
|
|
|
|
|
|
received_edus_counter = metrics.register_counter("received_edus")
|
|
|
|
|
|
|
|
received_queries_counter = metrics.register_counter("received_queries", labels=["type"])
|
2015-02-24 11:10:44 -07:00
|
|
|
|
2015-01-26 03:45:24 -07:00
|
|
|
|
2015-02-03 07:58:30 -07:00
|
|
|
class FederationServer(FederationBase):
|
2016-06-15 08:12:59 -06:00
|
|
|
def __init__(self, hs):
|
|
|
|
super(FederationServer, self).__init__(hs)
|
|
|
|
|
2016-07-21 03:30:12 -06:00
|
|
|
self.auth = hs.get_auth()
|
2018-03-12 08:34:31 -06:00
|
|
|
self.handler = hs.get_handlers().federation_handler
|
2016-07-21 03:30:12 -06:00
|
|
|
|
2017-10-06 09:07:20 -06:00
|
|
|
self._server_linearizer = async.Linearizer("fed_server")
|
|
|
|
self._transaction_linearizer = async.Linearizer("fed_txn_handler")
|
2016-06-15 08:12:59 -06:00
|
|
|
|
2018-03-12 08:07:39 -06:00
|
|
|
self.transaction_actions = TransactionActions(self.store)
|
|
|
|
|
2018-03-12 10:17:08 -06:00
|
|
|
self.registry = hs.get_federation_registry()
|
|
|
|
|
2016-07-21 03:30:12 -06:00
|
|
|
# We cache responses to state queries, as they take a while and often
|
|
|
|
# come in waves.
|
|
|
|
self._state_resp_cache = ResponseCache(hs, timeout_ms=30000)
|
|
|
|
|
2015-01-26 03:45:24 -07:00
|
|
|
@defer.inlineCallbacks
|
|
|
|
@log_function
|
|
|
|
def on_backfill_request(self, origin, room_id, versions, limit):
|
2016-06-17 09:43:45 -06:00
|
|
|
with (yield self._server_linearizer.queue((origin, room_id))):
|
|
|
|
pdus = yield self.handler.on_backfill_request(
|
|
|
|
origin, room_id, versions, limit
|
|
|
|
)
|
2015-01-26 03:45:24 -07:00
|
|
|
|
2016-06-17 09:43:45 -06:00
|
|
|
res = self._transaction_from_pdus(pdus).get_dict()
|
|
|
|
|
|
|
|
defer.returnValue((200, res))
|
2015-01-26 03:45:24 -07:00
|
|
|
|
|
|
|
@defer.inlineCallbacks
|
|
|
|
@log_function
|
|
|
|
def on_incoming_transaction(self, transaction_data):
|
2017-10-06 08:18:58 -06:00
|
|
|
# keep this as early as possible to make the calculated origin ts as
|
|
|
|
# accurate as possible.
|
2017-10-06 08:31:58 -06:00
|
|
|
request_time = self._clock.time_msec()
|
2015-03-10 09:29:22 -06:00
|
|
|
|
2017-10-06 08:18:58 -06:00
|
|
|
transaction = Transaction(**transaction_data)
|
2015-01-26 03:45:24 -07:00
|
|
|
|
2017-10-06 08:31:58 -06:00
|
|
|
if not transaction.transaction_id:
|
|
|
|
raise Exception("Transaction missing transaction_id")
|
|
|
|
if not transaction.origin:
|
|
|
|
raise Exception("Transaction missing origin")
|
|
|
|
|
2015-01-26 03:45:24 -07:00
|
|
|
logger.debug("[%s] Got transaction", transaction.transaction_id)
|
|
|
|
|
2017-10-06 08:31:58 -06:00
|
|
|
# use a linearizer to ensure that we don't process the same transaction
|
|
|
|
# multiple times in parallel.
|
|
|
|
with (yield self._transaction_linearizer.queue(
|
|
|
|
(transaction.origin, transaction.transaction_id),
|
|
|
|
)):
|
|
|
|
result = yield self._handle_incoming_transaction(
|
|
|
|
transaction, request_time,
|
|
|
|
)
|
|
|
|
|
|
|
|
defer.returnValue(result)
|
|
|
|
|
|
|
|
@defer.inlineCallbacks
|
|
|
|
def _handle_incoming_transaction(self, transaction, request_time):
|
|
|
|
""" Process an incoming transaction and return the HTTP response
|
|
|
|
|
|
|
|
Args:
|
|
|
|
transaction (Transaction): incoming transaction
|
|
|
|
request_time (int): timestamp that the HTTP request arrived at
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
Deferred[(int, object)]: http response code and body
|
|
|
|
"""
|
2015-01-26 03:45:24 -07:00
|
|
|
response = yield self.transaction_actions.have_responded(transaction)
|
|
|
|
|
|
|
|
if response:
|
2015-01-26 07:33:11 -07:00
|
|
|
logger.debug(
|
2017-01-06 19:13:06 -07:00
|
|
|
"[%s] We've already responded to this request",
|
2015-01-26 07:33:11 -07:00
|
|
|
transaction.transaction_id
|
|
|
|
)
|
2015-01-26 03:45:24 -07:00
|
|
|
defer.returnValue(response)
|
|
|
|
return
|
|
|
|
|
|
|
|
logger.debug("[%s] Transaction is new", transaction.transaction_id)
|
|
|
|
|
2017-10-06 08:18:58 -06:00
|
|
|
received_pdus_counter.inc_by(len(transaction.pdus))
|
|
|
|
|
2017-10-06 09:07:20 -06:00
|
|
|
pdus_by_room = {}
|
2017-10-06 08:18:58 -06:00
|
|
|
|
|
|
|
for p in transaction.pdus:
|
|
|
|
if "unsigned" in p:
|
|
|
|
unsigned = p["unsigned"]
|
|
|
|
if "age" in unsigned:
|
|
|
|
p["age"] = unsigned["age"]
|
|
|
|
if "age" in p:
|
|
|
|
p["age_ts"] = request_time - int(p["age"])
|
|
|
|
del p["age"]
|
|
|
|
|
2017-12-30 11:40:19 -07:00
|
|
|
event = event_from_pdu_json(p)
|
2017-10-06 09:07:20 -06:00
|
|
|
room_id = event.room_id
|
|
|
|
pdus_by_room.setdefault(room_id, []).append(event)
|
2017-10-06 08:18:58 -06:00
|
|
|
|
|
|
|
pdu_results = {}
|
2015-05-08 09:32:18 -06:00
|
|
|
|
2017-10-06 09:07:20 -06:00
|
|
|
# we can process different rooms in parallel (which is useful if they
|
|
|
|
# require callouts to other servers to fetch missing events), but
|
|
|
|
# impose a limit to avoid going too crazy with ram/cpu.
|
|
|
|
@defer.inlineCallbacks
|
|
|
|
def process_pdus_for_room(room_id):
|
|
|
|
logger.debug("Processing PDUs for %s", room_id)
|
|
|
|
for pdu in pdus_by_room[room_id]:
|
|
|
|
event_id = pdu.event_id
|
|
|
|
try:
|
|
|
|
yield self._handle_received_pdu(
|
|
|
|
transaction.origin, pdu
|
|
|
|
)
|
|
|
|
pdu_results[event_id] = {}
|
|
|
|
except FederationError as e:
|
|
|
|
logger.warn("Error handling PDU %s: %s", event_id, e)
|
|
|
|
pdu_results[event_id] = {"error": str(e)}
|
|
|
|
except Exception as e:
|
|
|
|
pdu_results[event_id] = {"error": str(e)}
|
|
|
|
logger.exception("Failed to handle PDU %s", event_id)
|
|
|
|
|
|
|
|
yield async.concurrently_execute(
|
|
|
|
process_pdus_for_room, pdus_by_room.keys(),
|
|
|
|
TRANSACTION_CONCURRENCY_LIMIT,
|
|
|
|
)
|
2015-05-08 09:32:18 -06:00
|
|
|
|
|
|
|
if hasattr(transaction, "edus"):
|
2016-03-18 07:55:16 -06:00
|
|
|
for edu in (Edu(**x) for x in transaction.edus):
|
|
|
|
yield self.received_edu(
|
2015-05-08 09:32:18 -06:00
|
|
|
transaction.origin,
|
|
|
|
edu.edu_type,
|
|
|
|
edu.content
|
|
|
|
)
|
2015-01-26 03:45:24 -07:00
|
|
|
|
2017-10-11 07:25:16 -06:00
|
|
|
pdu_failures = getattr(transaction, "pdu_failures", [])
|
|
|
|
for failure in pdu_failures:
|
|
|
|
logger.info("Got failure %r", failure)
|
2015-02-17 06:08:27 -07:00
|
|
|
|
2015-02-17 06:58:52 -07:00
|
|
|
response = {
|
2017-10-06 08:18:58 -06:00
|
|
|
"pdus": pdu_results,
|
2015-02-17 06:58:52 -07:00
|
|
|
}
|
2015-02-17 06:24:13 -07:00
|
|
|
|
2017-10-06 08:18:58 -06:00
|
|
|
logger.debug("Returning: %s", str(response))
|
|
|
|
|
2015-01-26 03:45:24 -07:00
|
|
|
yield self.transaction_actions.set_response(
|
|
|
|
transaction,
|
|
|
|
200, response
|
|
|
|
)
|
|
|
|
defer.returnValue((200, response))
|
|
|
|
|
2016-03-18 07:55:16 -06:00
|
|
|
@defer.inlineCallbacks
|
2015-01-26 03:45:24 -07:00
|
|
|
def received_edu(self, origin, edu_type, content):
|
2015-03-10 09:29:22 -06:00
|
|
|
received_edus_counter.inc()
|
2018-03-12 10:17:08 -06:00
|
|
|
yield self.registry.on_edu(edu_type, origin, content)
|
2015-01-26 03:45:24 -07:00
|
|
|
|
|
|
|
@defer.inlineCallbacks
|
|
|
|
@log_function
|
|
|
|
def on_context_state_request(self, origin, room_id, event_id):
|
2016-07-21 03:30:12 -06:00
|
|
|
if not event_id:
|
|
|
|
raise NotImplementedError("Specify an event")
|
|
|
|
|
|
|
|
in_room = yield self.auth.check_host_in_room(room_id, origin)
|
|
|
|
if not in_room:
|
|
|
|
raise AuthError(403, "Host not in room.")
|
|
|
|
|
|
|
|
result = self._state_resp_cache.get((room_id, event_id))
|
|
|
|
if not result:
|
|
|
|
with (yield self._server_linearizer.queue((origin, room_id))):
|
2017-10-25 08:21:08 -06:00
|
|
|
d = self._state_resp_cache.set(
|
2016-07-21 03:30:12 -06:00
|
|
|
(room_id, event_id),
|
2017-10-25 08:21:08 -06:00
|
|
|
preserve_fn(self._on_context_state_request_compute)(room_id, event_id)
|
2016-06-17 09:43:45 -06:00
|
|
|
)
|
2017-10-25 08:21:08 -06:00
|
|
|
resp = yield make_deferred_yieldable(d)
|
2016-07-21 03:30:12 -06:00
|
|
|
else:
|
2017-10-25 08:21:08 -06:00
|
|
|
resp = yield make_deferred_yieldable(result)
|
2016-07-21 03:30:12 -06:00
|
|
|
|
|
|
|
defer.returnValue((200, resp))
|
2015-02-10 08:46:24 -07:00
|
|
|
|
2016-08-03 07:47:37 -06:00
|
|
|
@defer.inlineCallbacks
|
|
|
|
def on_state_ids_request(self, origin, room_id, event_id):
|
|
|
|
if not event_id:
|
|
|
|
raise NotImplementedError("Specify an event")
|
|
|
|
|
|
|
|
in_room = yield self.auth.check_host_in_room(room_id, origin)
|
|
|
|
if not in_room:
|
|
|
|
raise AuthError(403, "Host not in room.")
|
|
|
|
|
2016-09-02 03:49:43 -06:00
|
|
|
state_ids = yield self.handler.get_state_ids_for_pdu(
|
2016-08-03 07:47:37 -06:00
|
|
|
room_id, event_id,
|
|
|
|
)
|
2016-09-02 03:49:43 -06:00
|
|
|
auth_chain_ids = yield self.store.get_auth_chain_ids(state_ids)
|
2016-08-03 07:47:37 -06:00
|
|
|
|
|
|
|
defer.returnValue((200, {
|
2016-09-02 03:49:43 -06:00
|
|
|
"pdu_ids": state_ids,
|
|
|
|
"auth_chain_ids": auth_chain_ids,
|
2016-08-03 07:47:37 -06:00
|
|
|
}))
|
|
|
|
|
2016-07-21 03:30:12 -06:00
|
|
|
@defer.inlineCallbacks
|
|
|
|
def _on_context_state_request_compute(self, room_id, event_id):
|
|
|
|
pdus = yield self.handler.get_state_for_pdu(
|
|
|
|
room_id, event_id,
|
|
|
|
)
|
|
|
|
auth_chain = yield self.store.get_auth_chain(
|
|
|
|
[pdu.event_id for pdu in pdus]
|
|
|
|
)
|
|
|
|
|
|
|
|
for event in auth_chain:
|
|
|
|
# We sign these again because there was a bug where we
|
|
|
|
# incorrectly signed things the first time round
|
|
|
|
if self.hs.is_mine_id(event.event_id):
|
|
|
|
event.signatures.update(
|
|
|
|
compute_event_signature(
|
|
|
|
event,
|
|
|
|
self.hs.hostname,
|
|
|
|
self.hs.config.signing_key[0]
|
|
|
|
)
|
|
|
|
)
|
2015-01-26 03:45:24 -07:00
|
|
|
|
2016-08-02 09:45:53 -06:00
|
|
|
defer.returnValue({
|
2015-01-26 03:45:24 -07:00
|
|
|
"pdus": [pdu.get_pdu_json() for pdu in pdus],
|
|
|
|
"auth_chain": [pdu.get_pdu_json() for pdu in auth_chain],
|
2016-08-02 09:45:53 -06:00
|
|
|
})
|
2015-01-26 03:45:24 -07:00
|
|
|
|
|
|
|
@defer.inlineCallbacks
|
|
|
|
@log_function
|
|
|
|
def on_pdu_request(self, origin, event_id):
|
|
|
|
pdu = yield self._get_persisted_pdu(origin, event_id)
|
|
|
|
|
|
|
|
if pdu:
|
|
|
|
defer.returnValue(
|
|
|
|
(200, self._transaction_from_pdus([pdu]).get_dict())
|
|
|
|
)
|
|
|
|
else:
|
|
|
|
defer.returnValue((404, ""))
|
|
|
|
|
|
|
|
@defer.inlineCallbacks
|
|
|
|
@log_function
|
|
|
|
def on_pull_request(self, origin, versions):
|
|
|
|
raise NotImplementedError("Pull transactions not implemented")
|
|
|
|
|
|
|
|
@defer.inlineCallbacks
|
|
|
|
def on_query_request(self, query_type, args):
|
2015-03-10 09:29:22 -06:00
|
|
|
received_queries_counter.inc(query_type)
|
2018-03-12 10:17:08 -06:00
|
|
|
resp = yield self.registry.on_query(query_type, args)
|
|
|
|
defer.returnValue((200, resp))
|
2015-01-26 03:45:24 -07:00
|
|
|
|
|
|
|
@defer.inlineCallbacks
|
2015-11-05 09:43:19 -07:00
|
|
|
def on_make_join_request(self, room_id, user_id):
|
|
|
|
pdu = yield self.handler.on_make_join_request(room_id, user_id)
|
2015-01-26 03:45:24 -07:00
|
|
|
time_now = self._clock.time_msec()
|
|
|
|
defer.returnValue({"event": pdu.get_pdu_json(time_now)})
|
|
|
|
|
|
|
|
@defer.inlineCallbacks
|
|
|
|
def on_invite_request(self, origin, content):
|
2017-12-30 11:40:19 -07:00
|
|
|
pdu = event_from_pdu_json(content)
|
2015-01-26 03:45:24 -07:00
|
|
|
ret_pdu = yield self.handler.on_invite_request(origin, pdu)
|
|
|
|
time_now = self._clock.time_msec()
|
|
|
|
defer.returnValue((200, {"event": ret_pdu.get_pdu_json(time_now)}))
|
|
|
|
|
|
|
|
@defer.inlineCallbacks
|
|
|
|
def on_send_join_request(self, origin, content):
|
|
|
|
logger.debug("on_send_join_request: content: %s", content)
|
2017-12-30 11:40:19 -07:00
|
|
|
pdu = event_from_pdu_json(content)
|
2015-01-26 03:45:24 -07:00
|
|
|
logger.debug("on_send_join_request: pdu sigs: %s", pdu.signatures)
|
|
|
|
res_pdus = yield self.handler.on_send_join_request(origin, pdu)
|
|
|
|
time_now = self._clock.time_msec()
|
|
|
|
defer.returnValue((200, {
|
|
|
|
"state": [p.get_pdu_json(time_now) for p in res_pdus["state"]],
|
|
|
|
"auth_chain": [
|
|
|
|
p.get_pdu_json(time_now) for p in res_pdus["auth_chain"]
|
|
|
|
],
|
|
|
|
}))
|
|
|
|
|
2015-10-20 04:58:58 -06:00
|
|
|
@defer.inlineCallbacks
|
|
|
|
def on_make_leave_request(self, room_id, user_id):
|
|
|
|
pdu = yield self.handler.on_make_leave_request(room_id, user_id)
|
|
|
|
time_now = self._clock.time_msec()
|
|
|
|
defer.returnValue({"event": pdu.get_pdu_json(time_now)})
|
|
|
|
|
|
|
|
@defer.inlineCallbacks
|
|
|
|
def on_send_leave_request(self, origin, content):
|
|
|
|
logger.debug("on_send_leave_request: content: %s", content)
|
2017-12-30 11:40:19 -07:00
|
|
|
pdu = event_from_pdu_json(content)
|
2015-10-20 04:58:58 -06:00
|
|
|
logger.debug("on_send_leave_request: pdu sigs: %s", pdu.signatures)
|
|
|
|
yield self.handler.on_send_leave_request(origin, pdu)
|
|
|
|
defer.returnValue((200, {}))
|
|
|
|
|
2015-01-26 03:45:24 -07:00
|
|
|
@defer.inlineCallbacks
|
|
|
|
def on_event_auth(self, origin, room_id, event_id):
|
2016-06-17 09:43:45 -06:00
|
|
|
with (yield self._server_linearizer.queue((origin, room_id))):
|
|
|
|
time_now = self._clock.time_msec()
|
|
|
|
auth_pdus = yield self.handler.on_event_auth(event_id)
|
|
|
|
res = {
|
|
|
|
"auth_chain": [a.get_pdu_json(time_now) for a in auth_pdus],
|
|
|
|
}
|
|
|
|
defer.returnValue((200, res))
|
2015-01-26 03:45:24 -07:00
|
|
|
|
2015-01-29 09:50:23 -07:00
|
|
|
@defer.inlineCallbacks
|
2016-06-17 09:43:45 -06:00
|
|
|
def on_query_auth_request(self, origin, content, room_id, event_id):
|
2015-01-30 08:58:28 -07:00
|
|
|
"""
|
|
|
|
Content is a dict with keys::
|
|
|
|
auth_chain (list): A list of events that give the auth chain.
|
|
|
|
missing (list): A list of event_ids indicating what the other
|
|
|
|
side (`origin`) think we're missing.
|
|
|
|
rejects (dict): A mapping from event_id to a 2-tuple of reason
|
|
|
|
string and a proof (or None) of why the event was rejected.
|
|
|
|
The keys of this dict give the list of events the `origin` has
|
|
|
|
rejected.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
origin (str)
|
|
|
|
content (dict)
|
|
|
|
event_id (str)
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
Deferred: Results in `dict` with the same format as `content`
|
|
|
|
"""
|
2016-06-17 09:43:45 -06:00
|
|
|
with (yield self._server_linearizer.queue((origin, room_id))):
|
|
|
|
auth_chain = [
|
2017-12-30 11:40:19 -07:00
|
|
|
event_from_pdu_json(e)
|
2016-06-17 09:43:45 -06:00
|
|
|
for e in content["auth_chain"]
|
|
|
|
]
|
|
|
|
|
|
|
|
signed_auth = yield self._check_sigs_and_hash_and_fetch(
|
|
|
|
origin, auth_chain, outlier=True
|
|
|
|
)
|
2015-01-29 09:50:23 -07:00
|
|
|
|
2016-06-17 09:43:45 -06:00
|
|
|
ret = yield self.handler.on_query_auth(
|
|
|
|
origin,
|
|
|
|
event_id,
|
|
|
|
signed_auth,
|
|
|
|
content.get("rejects", []),
|
|
|
|
content.get("missing", []),
|
|
|
|
)
|
2015-01-29 09:50:23 -07:00
|
|
|
|
2016-06-17 09:43:45 -06:00
|
|
|
time_now = self._clock.time_msec()
|
|
|
|
send_content = {
|
|
|
|
"auth_chain": [
|
|
|
|
e.get_pdu_json(time_now)
|
|
|
|
for e in ret["auth_chain"]
|
|
|
|
],
|
|
|
|
"rejects": ret.get("rejects", []),
|
|
|
|
"missing": ret.get("missing", []),
|
|
|
|
}
|
2015-01-29 09:50:23 -07:00
|
|
|
|
|
|
|
defer.returnValue(
|
|
|
|
(200, send_content)
|
|
|
|
)
|
|
|
|
|
2015-07-23 09:03:38 -06:00
|
|
|
@log_function
|
|
|
|
def on_query_client_keys(self, origin, content):
|
2016-08-02 11:06:31 -06:00
|
|
|
return self.on_query_request("client_keys", content)
|
2015-07-23 09:03:38 -06:00
|
|
|
|
2017-01-26 09:06:54 -07:00
|
|
|
def on_query_user_devices(self, origin, user_id):
|
|
|
|
return self.on_query_request("user_devices", user_id)
|
|
|
|
|
2015-07-23 09:03:38 -06:00
|
|
|
@defer.inlineCallbacks
|
|
|
|
@log_function
|
|
|
|
def on_claim_client_keys(self, origin, content):
|
|
|
|
query = []
|
|
|
|
for user_id, device_keys in content.get("one_time_keys", {}).items():
|
|
|
|
for device_id, algorithm in device_keys.items():
|
|
|
|
query.append((user_id, device_id, algorithm))
|
2015-08-13 10:27:46 -06:00
|
|
|
|
2015-07-23 09:03:38 -06:00
|
|
|
results = yield self.store.claim_e2e_one_time_keys(query)
|
2015-08-13 10:27:46 -06:00
|
|
|
|
2015-07-23 09:03:38 -06:00
|
|
|
json_result = {}
|
|
|
|
for user_id, device_keys in results.items():
|
|
|
|
for device_id, keys in device_keys.items():
|
|
|
|
for key_id, json_bytes in keys.items():
|
|
|
|
json_result.setdefault(user_id, {})[device_id] = {
|
|
|
|
key_id: json.loads(json_bytes)
|
|
|
|
}
|
2015-08-13 10:27:46 -06:00
|
|
|
|
2017-05-09 12:01:39 -06:00
|
|
|
logger.info(
|
|
|
|
"Claimed one-time-keys: %s",
|
|
|
|
",".join((
|
|
|
|
"%s for %s:%s" % (key_id, user_id, device_id)
|
|
|
|
for user_id, user_keys in json_result.iteritems()
|
|
|
|
for device_id, device_keys in user_keys.iteritems()
|
|
|
|
for key_id, _ in device_keys.iteritems()
|
|
|
|
)),
|
|
|
|
)
|
|
|
|
|
2015-07-23 09:03:38 -06:00
|
|
|
defer.returnValue({"one_time_keys": json_result})
|
|
|
|
|
2015-02-19 10:24:14 -07:00
|
|
|
@defer.inlineCallbacks
|
2015-02-23 06:58:02 -07:00
|
|
|
@log_function
|
|
|
|
def on_get_missing_events(self, origin, room_id, earliest_events,
|
|
|
|
latest_events, limit, min_depth):
|
2016-06-17 09:43:45 -06:00
|
|
|
with (yield self._server_linearizer.queue((origin, room_id))):
|
|
|
|
logger.info(
|
|
|
|
"on_get_missing_events: earliest_events: %r, latest_events: %r,"
|
|
|
|
" limit: %d, min_depth: %d",
|
|
|
|
earliest_events, latest_events, limit, min_depth
|
|
|
|
)
|
2016-12-31 08:21:37 -07:00
|
|
|
|
2016-06-17 09:43:45 -06:00
|
|
|
missing_events = yield self.handler.on_get_missing_events(
|
|
|
|
origin, room_id, earliest_events, latest_events, limit, min_depth
|
|
|
|
)
|
2015-02-19 10:24:14 -07:00
|
|
|
|
2016-06-17 09:43:45 -06:00
|
|
|
if len(missing_events) < 5:
|
|
|
|
logger.info(
|
|
|
|
"Returning %d events: %r", len(missing_events), missing_events
|
|
|
|
)
|
|
|
|
else:
|
|
|
|
logger.info("Returning %d events", len(missing_events))
|
2016-06-08 04:55:31 -06:00
|
|
|
|
2016-06-17 09:43:45 -06:00
|
|
|
time_now = self._clock.time_msec()
|
2015-02-19 10:24:14 -07:00
|
|
|
|
|
|
|
defer.returnValue({
|
|
|
|
"events": [ev.get_pdu_json(time_now) for ev in missing_events],
|
|
|
|
})
|
|
|
|
|
2016-05-05 06:42:44 -06:00
|
|
|
@log_function
|
|
|
|
def on_openid_userinfo(self, token):
|
|
|
|
ts_now_ms = self._clock.time_msec()
|
|
|
|
return self.store.get_user_id_for_open_id_token(token, ts_now_ms)
|
|
|
|
|
2015-01-26 03:45:24 -07:00
|
|
|
@log_function
|
|
|
|
def _get_persisted_pdu(self, origin, event_id, do_auth=True):
|
|
|
|
""" Get a PDU from the database with given origin and id.
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
Deferred: Results in a `Pdu`.
|
|
|
|
"""
|
|
|
|
return self.handler.get_persisted_pdu(
|
|
|
|
origin, event_id, do_auth=do_auth
|
|
|
|
)
|
|
|
|
|
|
|
|
def _transaction_from_pdus(self, pdu_list):
|
|
|
|
"""Returns a new Transaction containing the given PDUs suitable for
|
|
|
|
transmission.
|
|
|
|
"""
|
|
|
|
time_now = self._clock.time_msec()
|
|
|
|
pdus = [p.get_pdu_json(time_now) for p in pdu_list]
|
|
|
|
return Transaction(
|
|
|
|
origin=self.server_name,
|
|
|
|
pdus=pdus,
|
|
|
|
origin_server_ts=int(time_now),
|
|
|
|
destination=None,
|
|
|
|
)
|
|
|
|
|
2017-03-09 06:15:27 -07:00
|
|
|
@defer.inlineCallbacks
|
|
|
|
def _handle_received_pdu(self, origin, pdu):
|
|
|
|
""" Process a PDU received in a federation /send/ transaction.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
origin (str): server which sent the pdu
|
|
|
|
pdu (FrozenEvent): received pdu
|
|
|
|
|
|
|
|
Returns (Deferred): completes with None
|
|
|
|
Raises: FederationError if the signatures / hash do not match
|
|
|
|
"""
|
2017-10-06 07:24:06 -06:00
|
|
|
# check that it's actually being sent from a valid destination to
|
|
|
|
# workaround bug #1753 in 0.18.5 and 0.18.6
|
|
|
|
if origin != get_domain_from_id(pdu.event_id):
|
|
|
|
# We continue to accept join events from any server; this is
|
|
|
|
# necessary for the federation join dance to work correctly.
|
|
|
|
# (When we join over federation, the "helper" server is
|
|
|
|
# responsible for sending out the join event, rather than the
|
|
|
|
# origin. See bug #1893).
|
|
|
|
if not (
|
|
|
|
pdu.type == 'm.room.member' and
|
|
|
|
pdu.content and
|
|
|
|
pdu.content.get("membership", None) == 'join'
|
|
|
|
):
|
|
|
|
logger.info(
|
|
|
|
"Discarding PDU %s from invalid origin %s",
|
|
|
|
pdu.event_id, origin
|
|
|
|
)
|
|
|
|
return
|
|
|
|
else:
|
|
|
|
logger.info(
|
|
|
|
"Accepting join PDU %s from %s",
|
|
|
|
pdu.event_id, origin
|
|
|
|
)
|
|
|
|
|
2017-03-09 06:15:27 -07:00
|
|
|
# Check signature.
|
|
|
|
try:
|
|
|
|
pdu = yield self._check_sigs_and_hash(pdu)
|
|
|
|
except SynapseError as e:
|
|
|
|
raise FederationError(
|
|
|
|
"ERROR",
|
|
|
|
e.code,
|
|
|
|
e.msg,
|
|
|
|
affected=pdu.event_id,
|
|
|
|
)
|
|
|
|
|
2017-03-09 09:20:13 -07:00
|
|
|
yield self.handler.on_receive_pdu(origin, pdu, get_missing=True)
|
2017-03-09 05:20:46 -07:00
|
|
|
|
2015-01-26 03:45:24 -07:00
|
|
|
def __str__(self):
|
|
|
|
return "<ReplicationLayer(%s)>" % self.server_name
|
|
|
|
|
2015-11-05 09:43:19 -07:00
|
|
|
@defer.inlineCallbacks
|
2016-02-23 08:11:25 -07:00
|
|
|
def exchange_third_party_invite(
|
|
|
|
self,
|
|
|
|
sender_user_id,
|
|
|
|
target_user_id,
|
|
|
|
room_id,
|
|
|
|
signed,
|
|
|
|
):
|
|
|
|
ret = yield self.handler.exchange_third_party_invite(
|
|
|
|
sender_user_id,
|
|
|
|
target_user_id,
|
|
|
|
room_id,
|
|
|
|
signed,
|
|
|
|
)
|
2015-11-05 09:43:19 -07:00
|
|
|
defer.returnValue(ret)
|
|
|
|
|
|
|
|
@defer.inlineCallbacks
|
|
|
|
def on_exchange_third_party_invite_request(self, origin, room_id, event_dict):
|
|
|
|
ret = yield self.handler.on_exchange_third_party_invite_request(
|
|
|
|
origin, room_id, event_dict
|
|
|
|
)
|
|
|
|
defer.returnValue(ret)
|
2018-03-12 10:17:08 -06:00
|
|
|
|
|
|
|
|
|
|
|
class FederationHandlerRegistry(object):
|
|
|
|
"""Allows classes to register themselves as handlers for a given EDU or
|
|
|
|
query type for incoming federation traffic.
|
|
|
|
"""
|
|
|
|
def __init__(self):
|
|
|
|
self.edu_handlers = {}
|
|
|
|
self.query_handlers = {}
|
|
|
|
|
|
|
|
def register_edu_handler(self, edu_type, handler):
|
|
|
|
"""Sets the handler callable that will be used to handle an incoming
|
|
|
|
federation EDU of the given type.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
edu_type (str): The type of the incoming EDU to register handler for
|
2018-03-13 05:29:17 -06:00
|
|
|
handler (Callable[[str, dict]]): A callable invoked on incoming EDU
|
2018-03-12 10:17:08 -06:00
|
|
|
of the given type. The arguments are the origin server name and
|
|
|
|
the EDU contents.
|
|
|
|
"""
|
|
|
|
if edu_type in self.edu_handlers:
|
|
|
|
raise KeyError("Already have an EDU handler for %s" % (edu_type,))
|
|
|
|
|
|
|
|
self.edu_handlers[edu_type] = handler
|
|
|
|
|
|
|
|
def register_query_handler(self, query_type, handler):
|
|
|
|
"""Sets the handler callable that will be used to handle an incoming
|
|
|
|
federation query of the given type.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
query_type (str): Category name of the query, which should match
|
|
|
|
the string used by make_query.
|
2018-03-13 05:29:17 -06:00
|
|
|
handler (Callable[[dict], Deferred[dict]]): Invoked to handle
|
2018-03-12 10:17:08 -06:00
|
|
|
incoming queries of this type. The return will be yielded
|
|
|
|
on and the result used as the response to the query request.
|
|
|
|
"""
|
|
|
|
if query_type in self.query_handlers:
|
|
|
|
raise KeyError(
|
|
|
|
"Already have a Query handler for %s" % (query_type,)
|
|
|
|
)
|
|
|
|
|
|
|
|
self.query_handlers[query_type] = handler
|
|
|
|
|
|
|
|
@defer.inlineCallbacks
|
|
|
|
def on_edu(self, edu_type, origin, content):
|
|
|
|
handler = self.edu_handlers.get(edu_type)
|
|
|
|
if not handler:
|
|
|
|
logger.warn("No handler registered for EDU type %s", edu_type)
|
|
|
|
|
|
|
|
try:
|
|
|
|
yield handler(origin, content)
|
|
|
|
except SynapseError as e:
|
|
|
|
logger.info("Failed to handle edu %r: %r", edu_type, e)
|
|
|
|
except Exception as e:
|
|
|
|
logger.exception("Failed to handle edu %r", edu_type)
|
|
|
|
|
|
|
|
def on_query(self, query_type, args):
|
|
|
|
handler = self.query_handlers.get(query_type)
|
|
|
|
if not handler:
|
|
|
|
logger.warn("No handler registered for query type %s", query_type)
|
|
|
|
raise NotFoundError("No handler for Query type '%s'" % (query_type,))
|
|
|
|
|
|
|
|
return handler(args)
|