2016-09-06 11:16:20 -06:00
|
|
|
# -*- coding: utf-8 -*-
|
|
|
|
# Copyright 2016 OpenMarket Ltd
|
|
|
|
#
|
|
|
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
|
|
# you may not use this file except in compliance with the License.
|
|
|
|
# You may obtain a copy of the License at
|
|
|
|
#
|
|
|
|
# http://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
#
|
|
|
|
# Unless required by applicable law or agreed to in writing, software
|
|
|
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
|
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
|
|
# See the License for the specific language governing permissions and
|
|
|
|
# limitations under the License.
|
|
|
|
|
|
|
|
import logging
|
2020-10-09 05:20:51 -06:00
|
|
|
from typing import TYPE_CHECKING, Any, Dict
|
2016-09-06 11:16:20 -06:00
|
|
|
|
2018-01-16 11:25:28 -07:00
|
|
|
from synapse.api.errors import SynapseError
|
2020-01-30 10:06:38 -07:00
|
|
|
from synapse.logging.context import run_in_background
|
2019-08-22 11:21:10 -06:00
|
|
|
from synapse.logging.opentracing import (
|
|
|
|
get_active_span_text_map,
|
2019-09-03 03:21:30 -06:00
|
|
|
log_kv,
|
2019-08-22 11:21:10 -06:00
|
|
|
set_tag,
|
|
|
|
start_active_span,
|
|
|
|
)
|
2020-10-09 05:20:51 -06:00
|
|
|
from synapse.types import JsonDict, UserID, get_domain_from_id
|
2020-08-20 08:32:33 -06:00
|
|
|
from synapse.util import json_encoder
|
2016-09-06 11:16:20 -06:00
|
|
|
from synapse.util.stringutils import random_string
|
|
|
|
|
2020-10-09 05:20:51 -06:00
|
|
|
if TYPE_CHECKING:
|
|
|
|
from synapse.app.homeserver import HomeServer
|
|
|
|
|
|
|
|
|
2016-09-06 11:16:20 -06:00
|
|
|
logger = logging.getLogger(__name__)
|
|
|
|
|
|
|
|
|
2020-09-04 04:54:56 -06:00
|
|
|
class DeviceMessageHandler:
|
2020-10-09 05:20:51 -06:00
|
|
|
def __init__(self, hs: "HomeServer"):
|
2016-09-06 11:16:20 -06:00
|
|
|
"""
|
|
|
|
Args:
|
2020-10-09 05:20:51 -06:00
|
|
|
hs: server
|
2016-09-06 11:16:20 -06:00
|
|
|
"""
|
|
|
|
self.store = hs.get_datastore()
|
|
|
|
self.notifier = hs.get_notifier()
|
2018-01-16 11:25:28 -07:00
|
|
|
self.is_mine = hs.is_mine
|
2016-11-16 07:28:03 -07:00
|
|
|
self.federation = hs.get_federation_sender()
|
2016-09-06 11:16:20 -06:00
|
|
|
|
2018-03-12 10:17:08 -06:00
|
|
|
hs.get_federation_registry().register_edu_handler(
|
2016-09-06 11:16:20 -06:00
|
|
|
"m.direct_to_device", self.on_direct_to_device_edu
|
|
|
|
)
|
|
|
|
|
2020-01-30 10:06:38 -07:00
|
|
|
self._device_list_updater = hs.get_device_handler().device_list_updater
|
|
|
|
|
2020-10-09 05:20:51 -06:00
|
|
|
async def on_direct_to_device_edu(self, origin: str, content: JsonDict) -> None:
|
2016-09-06 11:16:20 -06:00
|
|
|
local_messages = {}
|
|
|
|
sender_user_id = content["sender"]
|
|
|
|
if origin != get_domain_from_id(sender_user_id):
|
2019-10-31 04:23:24 -06:00
|
|
|
logger.warning(
|
2016-09-06 11:16:20 -06:00
|
|
|
"Dropping device message from %r with spoofed sender %r",
|
2019-06-20 03:32:02 -06:00
|
|
|
origin,
|
|
|
|
sender_user_id,
|
2016-09-06 11:16:20 -06:00
|
|
|
)
|
|
|
|
message_type = content["type"]
|
|
|
|
message_id = content["message_id"]
|
|
|
|
for user_id, by_device in content["messages"].items():
|
2018-01-16 11:25:28 -07:00
|
|
|
# we use UserID.from_string to catch invalid user ids
|
|
|
|
if not self.is_mine(UserID.from_string(user_id)):
|
2019-06-20 03:32:02 -06:00
|
|
|
logger.warning("Request for keys for non-local user %s", user_id)
|
2018-01-16 11:25:28 -07:00
|
|
|
raise SynapseError(400, "Not a user here")
|
|
|
|
|
2020-01-28 07:43:21 -07:00
|
|
|
if not by_device:
|
|
|
|
continue
|
|
|
|
|
2016-09-06 11:16:20 -06:00
|
|
|
messages_by_device = {
|
|
|
|
device_id: {
|
|
|
|
"content": message_content,
|
|
|
|
"type": message_type,
|
|
|
|
"sender": sender_user_id,
|
|
|
|
}
|
|
|
|
for device_id, message_content in by_device.items()
|
|
|
|
}
|
2020-01-28 07:43:21 -07:00
|
|
|
local_messages[user_id] = messages_by_device
|
|
|
|
|
2020-06-16 06:06:17 -06:00
|
|
|
await self._check_for_unknown_devices(
|
2020-01-28 07:43:21 -07:00
|
|
|
message_type, sender_user_id, by_device
|
|
|
|
)
|
2016-09-06 11:16:20 -06:00
|
|
|
|
2020-06-16 06:06:17 -06:00
|
|
|
stream_id = await self.store.add_messages_from_remote_to_device_inbox(
|
2016-09-06 11:16:20 -06:00
|
|
|
origin, message_id, local_messages
|
|
|
|
)
|
|
|
|
|
|
|
|
self.notifier.on_new_event(
|
|
|
|
"to_device_key", stream_id, users=local_messages.keys()
|
|
|
|
)
|
|
|
|
|
2020-06-16 06:06:17 -06:00
|
|
|
async def _check_for_unknown_devices(
|
2020-01-28 07:43:21 -07:00
|
|
|
self,
|
|
|
|
message_type: str,
|
|
|
|
sender_user_id: str,
|
|
|
|
by_device: Dict[str, Dict[str, Any]],
|
2020-10-09 05:20:51 -06:00
|
|
|
) -> None:
|
2020-06-16 06:06:17 -06:00
|
|
|
"""Checks inbound device messages for unknown remote devices, and if
|
2020-01-28 07:43:21 -07:00
|
|
|
found marks the remote cache for the user as stale.
|
|
|
|
"""
|
|
|
|
|
|
|
|
if message_type != "m.room_key_request":
|
|
|
|
return
|
|
|
|
|
|
|
|
# Get the sending device IDs
|
|
|
|
requesting_device_ids = set()
|
|
|
|
for message_content in by_device.values():
|
|
|
|
device_id = message_content.get("requesting_device_id")
|
|
|
|
requesting_device_ids.add(device_id)
|
|
|
|
|
|
|
|
# Check if we are tracking the devices of the remote user.
|
2020-06-16 06:06:17 -06:00
|
|
|
room_ids = await self.store.get_rooms_for_user(sender_user_id)
|
2020-01-28 07:43:21 -07:00
|
|
|
if not room_ids:
|
|
|
|
logger.info(
|
|
|
|
"Received device message from remote device we don't"
|
|
|
|
" share a room with: %s %s",
|
|
|
|
sender_user_id,
|
|
|
|
requesting_device_ids,
|
|
|
|
)
|
|
|
|
return
|
|
|
|
|
|
|
|
# If we are tracking check that we know about the sending
|
|
|
|
# devices.
|
2020-06-16 06:06:17 -06:00
|
|
|
cached_devices = await self.store.get_cached_devices_for_user(sender_user_id)
|
2020-01-28 07:43:21 -07:00
|
|
|
|
|
|
|
unknown_devices = requesting_device_ids - set(cached_devices)
|
|
|
|
if unknown_devices:
|
|
|
|
logger.info(
|
|
|
|
"Received device message from remote device not in our cache: %s %s",
|
|
|
|
sender_user_id,
|
|
|
|
unknown_devices,
|
|
|
|
)
|
2020-06-16 06:06:17 -06:00
|
|
|
await self.store.mark_remote_user_device_cache_as_stale(sender_user_id)
|
2020-01-30 10:06:38 -07:00
|
|
|
|
|
|
|
# Immediately attempt a resync in the background
|
|
|
|
run_in_background(
|
|
|
|
self._device_list_updater.user_device_resync, sender_user_id
|
|
|
|
)
|
2020-01-28 07:43:21 -07:00
|
|
|
|
2020-10-09 05:20:51 -06:00
|
|
|
async def send_device_message(
|
|
|
|
self,
|
|
|
|
sender_user_id: str,
|
|
|
|
message_type: str,
|
|
|
|
messages: Dict[str, Dict[str, JsonDict]],
|
|
|
|
) -> None:
|
2019-09-03 03:21:30 -06:00
|
|
|
set_tag("number_of_messages", len(messages))
|
|
|
|
set_tag("sender", sender_user_id)
|
2016-09-06 11:16:20 -06:00
|
|
|
local_messages = {}
|
2020-10-09 05:20:51 -06:00
|
|
|
remote_messages = {} # type: Dict[str, Dict[str, Dict[str, JsonDict]]]
|
2016-09-06 11:16:20 -06:00
|
|
|
for user_id, by_device in messages.items():
|
2018-01-16 11:25:28 -07:00
|
|
|
# we use UserID.from_string to catch invalid user ids
|
|
|
|
if self.is_mine(UserID.from_string(user_id)):
|
2016-09-06 11:16:20 -06:00
|
|
|
messages_by_device = {
|
|
|
|
device_id: {
|
|
|
|
"content": message_content,
|
|
|
|
"type": message_type,
|
|
|
|
"sender": sender_user_id,
|
|
|
|
}
|
|
|
|
for device_id, message_content in by_device.items()
|
|
|
|
}
|
|
|
|
if messages_by_device:
|
|
|
|
local_messages[user_id] = messages_by_device
|
|
|
|
else:
|
|
|
|
destination = get_domain_from_id(user_id)
|
|
|
|
remote_messages.setdefault(destination, {})[user_id] = by_device
|
|
|
|
|
|
|
|
message_id = random_string(16)
|
|
|
|
|
2019-08-22 11:21:10 -06:00
|
|
|
context = get_active_span_text_map()
|
|
|
|
|
2016-09-06 11:16:20 -06:00
|
|
|
remote_edu_contents = {}
|
|
|
|
for destination, messages in remote_messages.items():
|
2019-08-22 11:21:10 -06:00
|
|
|
with start_active_span("to_device_for_user"):
|
|
|
|
set_tag("destination", destination)
|
|
|
|
remote_edu_contents[destination] = {
|
|
|
|
"messages": messages,
|
|
|
|
"sender": sender_user_id,
|
|
|
|
"type": message_type,
|
|
|
|
"message_id": message_id,
|
2020-08-20 08:32:33 -06:00
|
|
|
"org.matrix.opentracing_context": json_encoder.encode(context),
|
2019-08-22 11:21:10 -06:00
|
|
|
}
|
2016-09-06 11:16:20 -06:00
|
|
|
|
2019-09-03 03:21:30 -06:00
|
|
|
log_kv({"local_messages": local_messages})
|
2020-06-16 06:06:17 -06:00
|
|
|
stream_id = await self.store.add_messages_to_device_inbox(
|
2016-09-06 11:16:20 -06:00
|
|
|
local_messages, remote_edu_contents
|
|
|
|
)
|
|
|
|
|
|
|
|
self.notifier.on_new_event(
|
|
|
|
"to_device_key", stream_id, users=local_messages.keys()
|
|
|
|
)
|
|
|
|
|
2019-09-03 03:21:30 -06:00
|
|
|
log_kv({"remote_messages": remote_messages})
|
2016-09-06 11:16:20 -06:00
|
|
|
for destination in remote_messages.keys():
|
2016-09-07 09:10:51 -06:00
|
|
|
# Enqueue a new federation transaction to send the new
|
|
|
|
# device messages to each remote destination.
|
|
|
|
self.federation.send_device_messages(destination)
|