2015-01-26 11:53:31 -07:00
|
|
|
# -*- coding: utf-8 -*-
|
2016-01-05 11:01:18 -07:00
|
|
|
# Copyright 2015 - 2016 OpenMarket Ltd
|
2015-01-26 11:53:31 -07:00
|
|
|
#
|
|
|
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
|
|
# you may not use this file except in compliance with the License.
|
|
|
|
# You may obtain a copy of the License at
|
|
|
|
#
|
|
|
|
# http://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
#
|
|
|
|
# Unless required by applicable law or agreed to in writing, software
|
|
|
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
|
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
|
|
# See the License for the specific language governing permissions and
|
|
|
|
# limitations under the License.
|
|
|
|
|
|
|
|
from ._base import BaseHandler
|
|
|
|
|
2016-01-06 09:44:13 -07:00
|
|
|
from synapse.streams.config import PaginationConfig
|
2015-01-30 06:33:41 -07:00
|
|
|
from synapse.api.constants import Membership, EventTypes
|
2016-04-01 07:06:00 -06:00
|
|
|
from synapse.util.async import concurrently_execute
|
|
|
|
from synapse.util.logcontext import LoggingContext
|
2016-02-09 04:31:04 -07:00
|
|
|
from synapse.util.metrics import Measure
|
2016-03-24 11:47:31 -06:00
|
|
|
from synapse.util.caches.response_cache import ResponseCache
|
2016-03-04 07:44:01 -07:00
|
|
|
from synapse.push.clientformat import format_push_rules_for_user
|
2015-01-26 11:53:31 -07:00
|
|
|
|
|
|
|
from twisted.internet import defer
|
|
|
|
|
2015-01-26 08:46:31 -07:00
|
|
|
import collections
|
2015-01-26 11:53:31 -07:00
|
|
|
import logging
|
2016-02-01 08:59:40 -07:00
|
|
|
import itertools
|
2015-01-26 11:53:31 -07:00
|
|
|
|
|
|
|
logger = logging.getLogger(__name__)
|
2015-01-26 08:46:31 -07:00
|
|
|
|
|
|
|
|
|
|
|
SyncConfig = collections.namedtuple("SyncConfig", [
|
|
|
|
"user",
|
2016-01-25 03:10:44 -07:00
|
|
|
"filter_collection",
|
2016-01-06 09:44:13 -07:00
|
|
|
"is_guest",
|
2016-03-24 11:47:31 -06:00
|
|
|
"request_key",
|
2015-01-26 11:53:31 -07:00
|
|
|
])
|
2015-01-26 08:46:31 -07:00
|
|
|
|
|
|
|
|
2015-10-01 10:53:07 -06:00
|
|
|
class TimelineBatch(collections.namedtuple("TimelineBatch", [
|
|
|
|
"prev_batch",
|
2015-01-27 09:24:22 -07:00
|
|
|
"events",
|
2015-10-01 10:53:07 -06:00
|
|
|
"limited",
|
|
|
|
])):
|
|
|
|
__slots__ = []
|
|
|
|
|
|
|
|
def __nonzero__(self):
|
|
|
|
"""Make the result appear empty if there are no updates. This is used
|
|
|
|
to tell if room needs to be part of the sync result.
|
|
|
|
"""
|
|
|
|
return bool(self.events)
|
2015-01-26 08:46:31 -07:00
|
|
|
|
2015-10-05 09:39:22 -06:00
|
|
|
|
2015-10-13 03:24:51 -06:00
|
|
|
class JoinedSyncResult(collections.namedtuple("JoinedSyncResult", [
|
2015-11-13 03:31:15 -07:00
|
|
|
"room_id", # str
|
|
|
|
"timeline", # TimelineBatch
|
2015-11-12 09:34:42 -07:00
|
|
|
"state", # dict[(str, str), FrozenEvent]
|
2015-01-29 09:41:21 -07:00
|
|
|
"ephemeral",
|
2015-11-18 08:31:04 -07:00
|
|
|
"account_data",
|
2016-01-19 10:19:53 -07:00
|
|
|
"unread_notifications",
|
2015-01-27 09:24:22 -07:00
|
|
|
])):
|
|
|
|
__slots__ = []
|
|
|
|
|
|
|
|
def __nonzero__(self):
|
2015-01-30 08:52:05 -07:00
|
|
|
"""Make the result appear empty if there are no updates. This is used
|
|
|
|
to tell if room needs to be part of the sync result.
|
|
|
|
"""
|
2015-11-02 09:23:15 -07:00
|
|
|
return bool(
|
|
|
|
self.timeline
|
|
|
|
or self.state
|
|
|
|
or self.ephemeral
|
2015-11-18 08:31:04 -07:00
|
|
|
or self.account_data
|
2015-12-18 10:47:00 -07:00
|
|
|
# nb the notification count does not, er, count: if there's nothing
|
|
|
|
# else in the result, we don't need to send it.
|
2015-11-02 09:23:15 -07:00
|
|
|
)
|
2015-01-26 08:46:31 -07:00
|
|
|
|
|
|
|
|
2016-01-27 10:06:52 -07:00
|
|
|
class ArchivedSyncResult(collections.namedtuple("ArchivedSyncResult", [
|
2015-11-13 03:31:15 -07:00
|
|
|
"room_id", # str
|
|
|
|
"timeline", # TimelineBatch
|
2015-11-12 09:34:42 -07:00
|
|
|
"state", # dict[(str, str), FrozenEvent]
|
2015-11-18 08:31:04 -07:00
|
|
|
"account_data",
|
2015-10-19 10:26:18 -06:00
|
|
|
])):
|
|
|
|
__slots__ = []
|
|
|
|
|
|
|
|
def __nonzero__(self):
|
|
|
|
"""Make the result appear empty if there are no updates. This is used
|
|
|
|
to tell if room needs to be part of the sync result.
|
|
|
|
"""
|
2015-11-02 09:23:15 -07:00
|
|
|
return bool(
|
|
|
|
self.timeline
|
|
|
|
or self.state
|
2015-11-18 08:31:04 -07:00
|
|
|
or self.account_data
|
2015-11-02 09:23:15 -07:00
|
|
|
)
|
2015-10-19 10:26:18 -06:00
|
|
|
|
|
|
|
|
2015-10-13 03:24:51 -06:00
|
|
|
class InvitedSyncResult(collections.namedtuple("InvitedSyncResult", [
|
2015-11-13 03:31:15 -07:00
|
|
|
"room_id", # str
|
|
|
|
"invite", # FrozenEvent: the invite event
|
2015-10-13 03:24:51 -06:00
|
|
|
])):
|
|
|
|
__slots__ = []
|
2015-01-26 08:46:31 -07:00
|
|
|
|
2015-10-19 10:26:18 -06:00
|
|
|
def __nonzero__(self):
|
|
|
|
"""Invited rooms should always be reported to the client"""
|
|
|
|
return True
|
|
|
|
|
2015-01-26 08:46:31 -07:00
|
|
|
|
|
|
|
class SyncResult(collections.namedtuple("SyncResult", [
|
2015-01-27 13:19:36 -07:00
|
|
|
"next_batch", # Token for the next sync
|
2015-10-01 10:53:07 -06:00
|
|
|
"presence", # List of presence events for the user.
|
2015-12-01 11:41:32 -07:00
|
|
|
"account_data", # List of account_data events for the user.
|
2015-10-13 03:24:51 -06:00
|
|
|
"joined", # JoinedSyncResult for each joined room.
|
|
|
|
"invited", # InvitedSyncResult for each invited room.
|
2015-10-19 10:26:18 -06:00
|
|
|
"archived", # ArchivedSyncResult for each archived room.
|
2015-01-26 08:46:31 -07:00
|
|
|
])):
|
|
|
|
__slots__ = []
|
|
|
|
|
|
|
|
def __nonzero__(self):
|
2015-01-30 08:52:05 -07:00
|
|
|
"""Make the result appear empty if there are no updates. This is used
|
|
|
|
to tell if the notifier needs to wait for more events when polling for
|
|
|
|
events.
|
|
|
|
"""
|
2015-01-27 09:24:22 -07:00
|
|
|
return bool(
|
2016-02-23 06:40:02 -07:00
|
|
|
self.presence or
|
|
|
|
self.joined or
|
|
|
|
self.invited or
|
|
|
|
self.archived or
|
|
|
|
self.account_data
|
2015-01-27 09:24:22 -07:00
|
|
|
)
|
2015-01-26 08:46:31 -07:00
|
|
|
|
|
|
|
|
|
|
|
class SyncHandler(BaseHandler):
|
|
|
|
|
|
|
|
def __init__(self, hs):
|
|
|
|
super(SyncHandler, self).__init__(hs)
|
|
|
|
self.event_sources = hs.get_event_sources()
|
2015-01-26 11:53:31 -07:00
|
|
|
self.clock = hs.get_clock()
|
2016-03-24 11:47:31 -06:00
|
|
|
self.response_cache = ResponseCache()
|
2015-01-26 08:46:31 -07:00
|
|
|
|
2015-10-26 12:47:18 -06:00
|
|
|
def wait_for_sync_for_user(self, sync_config, since_token=None, timeout=0,
|
|
|
|
full_state=False):
|
2015-01-27 09:24:22 -07:00
|
|
|
"""Get the sync for a client if we have new data for it now. Otherwise
|
|
|
|
wait for new data to arrive on the server. If the timeout expires, then
|
|
|
|
return an empty sync result.
|
|
|
|
Returns:
|
|
|
|
A Deferred SyncResult.
|
|
|
|
"""
|
2016-03-24 11:47:31 -06:00
|
|
|
result = self.response_cache.get(sync_config.request_key)
|
|
|
|
if not result:
|
|
|
|
result = self.response_cache.set(
|
|
|
|
sync_config.request_key,
|
|
|
|
self._wait_for_sync_for_user(
|
|
|
|
sync_config, since_token, timeout, full_state
|
|
|
|
)
|
|
|
|
)
|
|
|
|
return result
|
2015-10-26 12:47:18 -06:00
|
|
|
|
2016-03-24 11:47:31 -06:00
|
|
|
@defer.inlineCallbacks
|
|
|
|
def _wait_for_sync_for_user(self, sync_config, since_token, timeout,
|
|
|
|
full_state):
|
2016-02-03 06:51:25 -07:00
|
|
|
context = LoggingContext.current_context()
|
|
|
|
if context:
|
|
|
|
if since_token is None:
|
|
|
|
context.tag = "initial_sync"
|
|
|
|
elif full_state:
|
|
|
|
context.tag = "full_state_sync"
|
|
|
|
else:
|
|
|
|
context.tag = "incremental_sync"
|
|
|
|
|
2015-10-26 12:47:18 -06:00
|
|
|
if timeout == 0 or since_token is None or full_state:
|
|
|
|
# we are going to return immediately, so don't bother calling
|
|
|
|
# notifier.wait_for_events.
|
2016-01-25 03:10:44 -07:00
|
|
|
result = yield self.current_sync_for_user(
|
|
|
|
sync_config, since_token, full_state=full_state,
|
|
|
|
)
|
2015-01-27 13:09:52 -07:00
|
|
|
defer.returnValue(result)
|
2015-01-26 08:46:31 -07:00
|
|
|
else:
|
2015-05-14 04:25:30 -06:00
|
|
|
def current_sync_callback(before_token, after_token):
|
2015-01-27 13:09:52 -07:00
|
|
|
return self.current_sync_for_user(sync_config, since_token)
|
|
|
|
|
|
|
|
result = yield self.notifier.wait_for_events(
|
2016-01-20 08:34:07 -07:00
|
|
|
sync_config.user.to_string(), timeout, current_sync_callback,
|
2016-01-25 03:10:44 -07:00
|
|
|
from_token=since_token,
|
2015-01-26 08:46:31 -07:00
|
|
|
)
|
2015-01-27 13:09:52 -07:00
|
|
|
defer.returnValue(result)
|
2015-01-26 08:46:31 -07:00
|
|
|
|
2015-10-26 12:47:18 -06:00
|
|
|
def current_sync_for_user(self, sync_config, since_token=None,
|
|
|
|
full_state=False):
|
2015-01-27 09:24:22 -07:00
|
|
|
"""Get the sync for client needed to match what the server has now.
|
|
|
|
Returns:
|
|
|
|
A Deferred SyncResult.
|
|
|
|
"""
|
2015-10-26 12:47:18 -06:00
|
|
|
if since_token is None or full_state:
|
|
|
|
return self.full_state_sync(sync_config, since_token)
|
2015-01-26 08:46:31 -07:00
|
|
|
else:
|
2015-10-05 09:39:22 -06:00
|
|
|
return self.incremental_sync_with_gap(sync_config, since_token)
|
2015-01-26 08:46:31 -07:00
|
|
|
|
|
|
|
@defer.inlineCallbacks
|
2015-10-26 12:47:18 -06:00
|
|
|
def full_state_sync(self, sync_config, timeline_since_token):
|
|
|
|
"""Get a sync for a client which is starting without any state.
|
|
|
|
|
|
|
|
If a 'message_since_token' is given, only timeline events which have
|
|
|
|
happened since that token will be returned.
|
|
|
|
|
2015-01-27 09:24:22 -07:00
|
|
|
Returns:
|
|
|
|
A Deferred SyncResult.
|
|
|
|
"""
|
2015-01-26 08:46:31 -07:00
|
|
|
now_token = yield self.event_sources.get_current_token()
|
|
|
|
|
2016-01-06 09:44:13 -07:00
|
|
|
now_token, ephemeral_by_room = yield self.ephemeral_by_room(
|
|
|
|
sync_config, now_token
|
|
|
|
)
|
2015-12-04 10:32:09 -07:00
|
|
|
|
2016-01-06 09:44:13 -07:00
|
|
|
presence_stream = self.event_sources.sources["presence"]
|
|
|
|
# TODO (mjark): This looks wrong, shouldn't we be getting the presence
|
|
|
|
# UP to the present rather than after the present?
|
|
|
|
pagination_config = PaginationConfig(from_token=now_token)
|
|
|
|
presence, _ = yield presence_stream.get_pagination_rows(
|
|
|
|
user=sync_config.user,
|
|
|
|
pagination_config=pagination_config.get_source_config("presence"),
|
|
|
|
key=None
|
|
|
|
)
|
2015-01-26 08:46:31 -07:00
|
|
|
|
2016-03-04 08:27:55 -07:00
|
|
|
membership_list = (
|
|
|
|
Membership.INVITE, Membership.JOIN, Membership.LEAVE, Membership.BAN
|
|
|
|
)
|
2015-12-22 04:21:03 -07:00
|
|
|
|
2016-01-06 09:44:13 -07:00
|
|
|
room_list = yield self.store.get_rooms_for_user_where_membership_is(
|
|
|
|
user_id=sync_config.user.to_string(),
|
|
|
|
membership_list=membership_list
|
|
|
|
)
|
2015-12-22 04:21:03 -07:00
|
|
|
|
2016-01-06 09:44:13 -07:00
|
|
|
account_data, account_data_by_room = (
|
|
|
|
yield self.store.get_account_data_for_user(
|
2015-12-01 11:41:32 -07:00
|
|
|
sync_config.user.to_string()
|
|
|
|
)
|
2015-11-02 09:23:15 -07:00
|
|
|
)
|
|
|
|
|
2016-03-04 07:44:01 -07:00
|
|
|
account_data['m.push_rules'] = yield self.push_rules_for_user(
|
|
|
|
sync_config.user
|
|
|
|
)
|
|
|
|
|
2016-01-06 09:44:13 -07:00
|
|
|
tags_by_room = yield self.store.get_tags_for_user(
|
|
|
|
sync_config.user.to_string()
|
2015-12-22 04:59:55 -07:00
|
|
|
)
|
|
|
|
|
2015-10-13 03:24:51 -06:00
|
|
|
joined = []
|
2015-10-13 04:03:48 -06:00
|
|
|
invited = []
|
2015-10-19 10:26:18 -06:00
|
|
|
archived = []
|
2016-02-10 04:41:04 -07:00
|
|
|
|
2016-04-01 06:10:07 -06:00
|
|
|
user_id = sync_config.user.to_string()
|
|
|
|
|
2016-04-01 07:06:00 -06:00
|
|
|
@defer.inlineCallbacks
|
|
|
|
def _generate_room_entry(event):
|
|
|
|
if event.membership == Membership.JOIN:
|
|
|
|
room_result = yield self.full_state_sync_for_joined_room(
|
|
|
|
room_id=event.room_id,
|
|
|
|
sync_config=sync_config,
|
|
|
|
now_token=now_token,
|
|
|
|
timeline_since_token=timeline_since_token,
|
|
|
|
ephemeral_by_room=ephemeral_by_room,
|
|
|
|
tags_by_room=tags_by_room,
|
|
|
|
account_data_by_room=account_data_by_room,
|
|
|
|
)
|
|
|
|
joined.append(room_result)
|
|
|
|
elif event.membership == Membership.INVITE:
|
|
|
|
invite = yield self.store.get_event(event.event_id)
|
|
|
|
invited.append(InvitedSyncResult(
|
|
|
|
room_id=event.room_id,
|
|
|
|
invite=invite,
|
|
|
|
))
|
|
|
|
elif event.membership in (Membership.LEAVE, Membership.BAN):
|
|
|
|
# Always send down rooms we were banned or kicked from.
|
|
|
|
if not sync_config.filter_collection.include_leave:
|
|
|
|
if event.membership == Membership.LEAVE:
|
|
|
|
if user_id == event.sender:
|
|
|
|
return
|
|
|
|
|
|
|
|
leave_token = now_token.copy_and_replace(
|
|
|
|
"room_key", "s%d" % (event.stream_ordering,)
|
|
|
|
)
|
|
|
|
room_result = yield self.full_state_sync_for_archived_room(
|
|
|
|
sync_config=sync_config,
|
|
|
|
room_id=event.room_id,
|
|
|
|
leave_event_id=event.event_id,
|
|
|
|
leave_token=leave_token,
|
|
|
|
timeline_since_token=timeline_since_token,
|
|
|
|
tags_by_room=tags_by_room,
|
|
|
|
account_data_by_room=account_data_by_room,
|
|
|
|
)
|
|
|
|
archived.append(room_result)
|
|
|
|
|
|
|
|
yield concurrently_execute(_generate_room_entry, room_list, 10)
|
2015-01-27 09:24:22 -07:00
|
|
|
|
2016-01-25 03:10:44 -07:00
|
|
|
account_data_for_user = sync_config.filter_collection.filter_account_data(
|
|
|
|
self.account_data_for_user(account_data)
|
|
|
|
)
|
|
|
|
|
|
|
|
presence = sync_config.filter_collection.filter_presence(
|
|
|
|
presence
|
|
|
|
)
|
|
|
|
|
2015-01-27 09:24:22 -07:00
|
|
|
defer.returnValue(SyncResult(
|
2015-10-01 10:53:07 -06:00
|
|
|
presence=presence,
|
2016-01-25 03:10:44 -07:00
|
|
|
account_data=account_data_for_user,
|
2015-10-13 03:24:51 -06:00
|
|
|
joined=joined,
|
2015-10-13 04:03:48 -06:00
|
|
|
invited=invited,
|
2015-10-19 10:26:18 -06:00
|
|
|
archived=archived,
|
2015-01-27 09:24:22 -07:00
|
|
|
next_batch=now_token,
|
|
|
|
))
|
|
|
|
|
|
|
|
@defer.inlineCallbacks
|
2015-10-26 12:47:18 -06:00
|
|
|
def full_state_sync_for_joined_room(self, room_id, sync_config,
|
|
|
|
now_token, timeline_since_token,
|
2015-12-01 11:41:32 -07:00
|
|
|
ephemeral_by_room, tags_by_room,
|
|
|
|
account_data_by_room):
|
2015-01-27 09:24:22 -07:00
|
|
|
"""Sync a room for a client which is starting without any state
|
|
|
|
Returns:
|
2015-10-13 03:24:51 -06:00
|
|
|
A Deferred JoinedSyncResult.
|
2015-01-27 09:24:22 -07:00
|
|
|
"""
|
2015-01-30 04:35:20 -07:00
|
|
|
|
2015-10-05 09:39:22 -06:00
|
|
|
batch = yield self.load_filtered_recents(
|
2015-10-26 12:47:18 -06:00
|
|
|
room_id, sync_config, now_token, since_token=timeline_since_token
|
2015-01-27 09:24:22 -07:00
|
|
|
)
|
2015-01-30 04:35:20 -07:00
|
|
|
|
2016-01-28 07:03:48 -07:00
|
|
|
room_sync = yield self.incremental_sync_with_gap_for_room(
|
|
|
|
room_id, sync_config,
|
|
|
|
now_token=now_token,
|
|
|
|
since_token=timeline_since_token,
|
|
|
|
ephemeral_by_room=ephemeral_by_room,
|
|
|
|
tags_by_room=tags_by_room,
|
|
|
|
account_data_by_room=account_data_by_room,
|
|
|
|
batch=batch,
|
|
|
|
full_state=True,
|
2015-12-16 11:42:09 -07:00
|
|
|
)
|
2016-01-19 04:35:50 -07:00
|
|
|
|
2016-01-28 07:03:48 -07:00
|
|
|
defer.returnValue(room_sync)
|
2015-01-27 09:24:22 -07:00
|
|
|
|
2016-03-04 07:44:01 -07:00
|
|
|
@defer.inlineCallbacks
|
|
|
|
def push_rules_for_user(self, user):
|
|
|
|
user_id = user.to_string()
|
|
|
|
rawrules = yield self.store.get_push_rules_for_user(user_id)
|
|
|
|
enabled_map = yield self.store.get_push_rules_enabled_for_user(user_id)
|
|
|
|
rules = format_push_rules_for_user(user, rawrules, enabled_map)
|
|
|
|
defer.returnValue(rules)
|
|
|
|
|
2015-12-01 11:41:32 -07:00
|
|
|
def account_data_for_user(self, account_data):
|
|
|
|
account_data_events = []
|
|
|
|
|
|
|
|
for account_data_type, content in account_data.items():
|
|
|
|
account_data_events.append({
|
|
|
|
"type": account_data_type,
|
|
|
|
"content": content,
|
|
|
|
})
|
|
|
|
|
|
|
|
return account_data_events
|
|
|
|
|
|
|
|
def account_data_for_room(self, room_id, tags_by_room, account_data_by_room):
|
|
|
|
account_data_events = []
|
2015-11-02 09:23:15 -07:00
|
|
|
tags = tags_by_room.get(room_id)
|
2015-11-09 07:52:18 -07:00
|
|
|
if tags is not None:
|
2015-12-01 11:41:32 -07:00
|
|
|
account_data_events.append({
|
2015-11-02 09:23:15 -07:00
|
|
|
"type": "m.tag",
|
|
|
|
"content": {"tags": tags},
|
|
|
|
})
|
2015-12-01 11:41:32 -07:00
|
|
|
|
|
|
|
account_data = account_data_by_room.get(room_id, {})
|
|
|
|
for account_data_type, content in account_data.items():
|
|
|
|
account_data_events.append({
|
|
|
|
"type": account_data_type,
|
|
|
|
"content": content,
|
|
|
|
})
|
|
|
|
|
|
|
|
return account_data_events
|
2015-11-02 09:23:15 -07:00
|
|
|
|
2015-10-20 09:36:20 -06:00
|
|
|
@defer.inlineCallbacks
|
2016-01-06 09:44:13 -07:00
|
|
|
def ephemeral_by_room(self, sync_config, now_token, since_token=None):
|
2015-11-02 10:54:04 -07:00
|
|
|
"""Get the ephemeral events for each room the user is in
|
2015-10-21 08:45:37 -06:00
|
|
|
Args:
|
|
|
|
sync_config (SyncConfig): The flags, filters and user for the sync.
|
|
|
|
now_token (StreamToken): Where the server is currently up to.
|
|
|
|
since_token (StreamToken): Where the server was when the client
|
|
|
|
last synced.
|
|
|
|
Returns:
|
|
|
|
A tuple of the now StreamToken, updated to reflect the which typing
|
|
|
|
events are included, and a dict mapping from room_id to a list of
|
|
|
|
typing events for that room.
|
|
|
|
"""
|
|
|
|
|
2016-02-09 04:31:04 -07:00
|
|
|
with Measure(self.clock, "ephemeral_by_room"):
|
|
|
|
typing_key = since_token.typing_key if since_token else "0"
|
2015-10-20 09:36:20 -06:00
|
|
|
|
2016-02-09 04:31:04 -07:00
|
|
|
rooms = yield self.store.get_rooms_for_user(sync_config.user.to_string())
|
|
|
|
room_ids = [room.room_id for room in rooms]
|
2015-10-20 09:36:20 -06:00
|
|
|
|
2016-02-09 04:31:04 -07:00
|
|
|
typing_source = self.event_sources.sources["typing"]
|
|
|
|
typing, typing_key = yield typing_source.get_new_events(
|
|
|
|
user=sync_config.user,
|
|
|
|
from_key=typing_key,
|
|
|
|
limit=sync_config.filter_collection.ephemeral_limit(),
|
|
|
|
room_ids=room_ids,
|
|
|
|
is_guest=sync_config.is_guest,
|
|
|
|
)
|
|
|
|
now_token = now_token.copy_and_replace("typing_key", typing_key)
|
|
|
|
|
|
|
|
ephemeral_by_room = {}
|
|
|
|
|
|
|
|
for event in typing:
|
|
|
|
# we want to exclude the room_id from the event, but modifying the
|
|
|
|
# result returned by the event source is poor form (it might cache
|
|
|
|
# the object)
|
|
|
|
room_id = event["room_id"]
|
|
|
|
event_copy = {k: v for (k, v) in event.iteritems()
|
|
|
|
if k != "room_id"}
|
|
|
|
ephemeral_by_room.setdefault(room_id, []).append(event_copy)
|
|
|
|
|
|
|
|
receipt_key = since_token.receipt_key if since_token else "0"
|
|
|
|
|
|
|
|
receipt_source = self.event_sources.sources["receipt"]
|
|
|
|
receipts, receipt_key = yield receipt_source.get_new_events(
|
|
|
|
user=sync_config.user,
|
|
|
|
from_key=receipt_key,
|
|
|
|
limit=sync_config.filter_collection.ephemeral_limit(),
|
|
|
|
room_ids=room_ids,
|
|
|
|
is_guest=sync_config.is_guest,
|
|
|
|
)
|
|
|
|
now_token = now_token.copy_and_replace("receipt_key", receipt_key)
|
2015-11-02 10:54:04 -07:00
|
|
|
|
2016-02-09 04:31:04 -07:00
|
|
|
for event in receipts:
|
|
|
|
room_id = event["room_id"]
|
|
|
|
# exclude room id, as above
|
|
|
|
event_copy = {k: v for (k, v) in event.iteritems()
|
|
|
|
if k != "room_id"}
|
|
|
|
ephemeral_by_room.setdefault(room_id, []).append(event_copy)
|
2015-10-20 09:36:20 -06:00
|
|
|
|
2015-11-02 10:54:04 -07:00
|
|
|
defer.returnValue((now_token, ephemeral_by_room))
|
2015-10-20 09:36:20 -06:00
|
|
|
|
2015-10-26 12:47:18 -06:00
|
|
|
def full_state_sync_for_archived_room(self, room_id, sync_config,
|
|
|
|
leave_event_id, leave_token,
|
2015-12-01 11:41:32 -07:00
|
|
|
timeline_since_token, tags_by_room,
|
|
|
|
account_data_by_room):
|
2015-10-19 10:26:18 -06:00
|
|
|
"""Sync a room for a client which is starting without any state
|
|
|
|
Returns:
|
2016-01-27 10:06:52 -07:00
|
|
|
A Deferred ArchivedSyncResult.
|
2015-10-19 10:26:18 -06:00
|
|
|
"""
|
|
|
|
|
2016-01-27 10:06:52 -07:00
|
|
|
return self.incremental_sync_for_archived_room(
|
|
|
|
sync_config, room_id, leave_event_id, timeline_since_token, tags_by_room,
|
|
|
|
account_data_by_room, full_state=True, leave_token=leave_token,
|
2015-10-19 10:26:18 -06:00
|
|
|
)
|
|
|
|
|
2015-01-27 09:24:22 -07:00
|
|
|
@defer.inlineCallbacks
|
|
|
|
def incremental_sync_with_gap(self, sync_config, since_token):
|
|
|
|
""" Get the incremental delta needed to bring the client up to
|
|
|
|
date with the server.
|
|
|
|
Returns:
|
|
|
|
A Deferred SyncResult.
|
|
|
|
"""
|
|
|
|
now_token = yield self.event_sources.get_current_token()
|
|
|
|
|
2016-01-06 09:44:13 -07:00
|
|
|
rooms = yield self.store.get_rooms_for_user(sync_config.user.to_string())
|
|
|
|
room_ids = [room.room_id for room in rooms]
|
2015-12-22 04:59:55 -07:00
|
|
|
|
2015-01-28 20:33:51 -07:00
|
|
|
presence_source = self.event_sources.sources["presence"]
|
2015-11-05 07:32:26 -07:00
|
|
|
presence, presence_key = yield presence_source.get_new_events(
|
2015-01-28 20:33:51 -07:00
|
|
|
user=sync_config.user,
|
|
|
|
from_key=since_token.presence_key,
|
2016-01-25 03:10:44 -07:00
|
|
|
limit=sync_config.filter_collection.presence_limit(),
|
2015-11-05 07:32:26 -07:00
|
|
|
room_ids=room_ids,
|
2016-01-06 09:54:57 -07:00
|
|
|
is_guest=sync_config.is_guest,
|
2015-01-27 09:24:22 -07:00
|
|
|
)
|
2015-01-28 20:33:51 -07:00
|
|
|
now_token = now_token.copy_and_replace("presence_key", presence_key)
|
|
|
|
|
2016-01-06 09:44:13 -07:00
|
|
|
now_token, ephemeral_by_room = yield self.ephemeral_by_room(
|
|
|
|
sync_config, now_token, since_token
|
|
|
|
)
|
|
|
|
|
2015-01-29 07:40:28 -07:00
|
|
|
rm_handler = self.hs.get_handlers().room_member_handler
|
2015-08-19 06:46:03 -06:00
|
|
|
app_service = yield self.store.get_app_service_by_user_id(
|
|
|
|
sync_config.user.to_string()
|
|
|
|
)
|
|
|
|
if app_service:
|
|
|
|
rooms = yield self.store.get_app_service_rooms(app_service)
|
2015-10-13 04:43:12 -06:00
|
|
|
joined_room_ids = set(r.room_id for r in rooms)
|
2015-08-19 06:46:03 -06:00
|
|
|
else:
|
2015-10-13 04:43:12 -06:00
|
|
|
joined_room_ids = yield rm_handler.get_joined_rooms_for_user(
|
2015-08-19 06:46:03 -06:00
|
|
|
sync_config.user
|
|
|
|
)
|
2015-01-27 09:24:22 -07:00
|
|
|
|
2016-01-27 10:06:52 -07:00
|
|
|
user_id = sync_config.user.to_string()
|
|
|
|
|
2016-01-25 03:10:44 -07:00
|
|
|
timeline_limit = sync_config.filter_collection.timeline_limit()
|
2015-01-26 08:46:31 -07:00
|
|
|
|
2016-01-06 09:44:13 -07:00
|
|
|
tags_by_room = yield self.store.get_updated_tags(
|
2016-01-27 10:06:52 -07:00
|
|
|
user_id,
|
2016-01-06 09:44:13 -07:00
|
|
|
since_token.account_data_key,
|
|
|
|
)
|
|
|
|
|
|
|
|
account_data, account_data_by_room = (
|
|
|
|
yield self.store.get_updated_account_data_for_user(
|
2016-01-27 10:06:52 -07:00
|
|
|
user_id,
|
2016-01-06 09:44:13 -07:00
|
|
|
since_token.account_data_key,
|
|
|
|
)
|
2015-12-01 11:41:32 -07:00
|
|
|
)
|
|
|
|
|
2016-03-04 07:44:01 -07:00
|
|
|
push_rules_changed = yield self.store.have_push_rules_changed_for_user(
|
|
|
|
user_id, int(since_token.push_rules_key)
|
|
|
|
)
|
|
|
|
|
|
|
|
if push_rules_changed:
|
|
|
|
account_data["m.push_rules"] = yield self.push_rules_for_user(
|
|
|
|
sync_config.user
|
|
|
|
)
|
|
|
|
|
2016-01-27 10:06:52 -07:00
|
|
|
# Get a list of membership change events that have happened.
|
2016-02-02 09:12:10 -07:00
|
|
|
rooms_changed = yield self.store.get_membership_changes_for_user(
|
2016-01-27 10:06:52 -07:00
|
|
|
user_id, since_token.room_key, now_token.room_key
|
2016-01-27 02:54:30 -07:00
|
|
|
)
|
|
|
|
|
2016-01-27 10:06:52 -07:00
|
|
|
mem_change_events_by_room_id = {}
|
|
|
|
for event in rooms_changed:
|
|
|
|
mem_change_events_by_room_id.setdefault(event.room_id, []).append(event)
|
|
|
|
|
|
|
|
newly_joined_rooms = []
|
|
|
|
archived = []
|
|
|
|
invited = []
|
|
|
|
for room_id, events in mem_change_events_by_room_id.items():
|
|
|
|
non_joins = [e for e in events if e.membership != Membership.JOIN]
|
|
|
|
has_join = len(non_joins) != len(events)
|
|
|
|
|
|
|
|
# We want to figure out if we joined the room at some point since
|
|
|
|
# the last sync (even if we have since left). This is to make sure
|
|
|
|
# we do send down the room, and with full state, where necessary
|
|
|
|
if room_id in joined_room_ids or has_join:
|
|
|
|
old_state = yield self.get_state_at(room_id, since_token)
|
|
|
|
old_mem_ev = old_state.get((EventTypes.Member, user_id), None)
|
|
|
|
if not old_mem_ev or old_mem_ev.membership != Membership.JOIN:
|
|
|
|
newly_joined_rooms.append(room_id)
|
|
|
|
|
|
|
|
if room_id in joined_room_ids:
|
|
|
|
continue
|
|
|
|
|
|
|
|
if not non_joins:
|
|
|
|
continue
|
|
|
|
|
|
|
|
# Only bother if we're still currently invited
|
|
|
|
should_invite = non_joins[-1].membership == Membership.INVITE
|
|
|
|
if should_invite:
|
|
|
|
room_sync = InvitedSyncResult(room_id, invite=non_joins[-1])
|
|
|
|
if room_sync:
|
|
|
|
invited.append(room_sync)
|
|
|
|
|
|
|
|
# Always include leave/ban events. Just take the last one.
|
|
|
|
# TODO: How do we handle ban -> leave in same batch?
|
|
|
|
leave_events = [
|
|
|
|
e for e in non_joins
|
|
|
|
if e.membership in (Membership.LEAVE, Membership.BAN)
|
|
|
|
]
|
|
|
|
|
|
|
|
if leave_events:
|
|
|
|
leave_event = leave_events[-1]
|
|
|
|
room_sync = yield self.incremental_sync_for_archived_room(
|
|
|
|
sync_config, room_id, leave_event.event_id, since_token,
|
|
|
|
tags_by_room, account_data_by_room,
|
|
|
|
full_state=room_id in newly_joined_rooms
|
|
|
|
)
|
|
|
|
if room_sync:
|
|
|
|
archived.append(room_sync)
|
|
|
|
|
|
|
|
# Get all events for rooms we're currently joined to.
|
2016-01-27 02:54:30 -07:00
|
|
|
room_to_events = yield self.store.get_room_events_stream_for_rooms(
|
2016-01-27 10:06:52 -07:00
|
|
|
room_ids=joined_room_ids,
|
2016-01-27 02:54:30 -07:00
|
|
|
from_key=since_token.room_key,
|
|
|
|
to_key=now_token.room_key,
|
|
|
|
limit=timeline_limit + 1,
|
|
|
|
)
|
|
|
|
|
2015-10-13 03:24:51 -06:00
|
|
|
joined = []
|
2016-01-27 10:06:52 -07:00
|
|
|
# We loop through all room ids, even if there are no new events, in case
|
|
|
|
# there are non room events taht we need to notify about.
|
|
|
|
for room_id in joined_room_ids:
|
|
|
|
room_entry = room_to_events.get(room_id, None)
|
2015-11-12 09:34:42 -07:00
|
|
|
|
2016-01-27 10:06:52 -07:00
|
|
|
if room_entry:
|
|
|
|
events, start_key = room_entry
|
2016-01-25 03:10:44 -07:00
|
|
|
|
2016-01-27 10:06:52 -07:00
|
|
|
prev_batch_token = now_token.copy_and_replace("room_key", start_key)
|
2016-01-25 03:10:44 -07:00
|
|
|
|
2016-01-27 10:06:52 -07:00
|
|
|
newly_joined_room = room_id in newly_joined_rooms
|
|
|
|
full_state = newly_joined_room
|
2016-01-25 03:10:44 -07:00
|
|
|
|
2016-01-27 10:06:52 -07:00
|
|
|
batch = yield self.load_filtered_recents(
|
|
|
|
room_id, sync_config, prev_batch_token,
|
|
|
|
since_token=since_token,
|
|
|
|
recents=events,
|
|
|
|
newly_joined_room=newly_joined_room,
|
2015-01-29 07:40:28 -07:00
|
|
|
)
|
2016-01-27 10:06:52 -07:00
|
|
|
else:
|
|
|
|
batch = TimelineBatch(
|
|
|
|
events=[],
|
|
|
|
prev_batch=since_token,
|
|
|
|
limited=False,
|
2015-01-29 07:40:28 -07:00
|
|
|
)
|
2016-01-27 10:06:52 -07:00
|
|
|
full_state = False
|
2015-01-26 08:46:31 -07:00
|
|
|
|
2016-01-27 10:06:52 -07:00
|
|
|
room_sync = yield self.incremental_sync_with_gap_for_room(
|
|
|
|
room_id=room_id,
|
|
|
|
sync_config=sync_config,
|
|
|
|
since_token=since_token,
|
|
|
|
now_token=now_token,
|
|
|
|
ephemeral_by_room=ephemeral_by_room,
|
|
|
|
tags_by_room=tags_by_room,
|
|
|
|
account_data_by_room=account_data_by_room,
|
|
|
|
batch=batch,
|
|
|
|
full_state=full_state,
|
2015-10-19 10:26:18 -06:00
|
|
|
)
|
2016-01-12 08:01:56 -07:00
|
|
|
if room_sync:
|
2016-01-27 10:06:52 -07:00
|
|
|
joined.append(room_sync)
|
2015-01-26 08:46:31 -07:00
|
|
|
|
2016-02-15 10:10:40 -07:00
|
|
|
# For each newly joined room, we want to send down presence of
|
|
|
|
# existing users.
|
|
|
|
presence_handler = self.hs.get_handlers().presence_handler
|
|
|
|
extra_presence_users = set()
|
|
|
|
for room_id in newly_joined_rooms:
|
|
|
|
users = yield self.store.get_users_in_room(event.room_id)
|
|
|
|
extra_presence_users.update(users)
|
|
|
|
|
|
|
|
# For each new member, send down presence.
|
|
|
|
for joined_sync in joined:
|
|
|
|
it = itertools.chain(joined_sync.timeline.events, joined_sync.state.values())
|
|
|
|
for event in it:
|
|
|
|
if event.type == EventTypes.Member:
|
|
|
|
if event.membership == Membership.JOIN:
|
|
|
|
extra_presence_users.add(event.state_key)
|
|
|
|
|
|
|
|
states = yield presence_handler.get_states(
|
|
|
|
[u for u in extra_presence_users if u != user_id],
|
|
|
|
as_event=True,
|
|
|
|
)
|
|
|
|
presence.extend(states)
|
|
|
|
|
2016-01-25 03:10:44 -07:00
|
|
|
account_data_for_user = sync_config.filter_collection.filter_account_data(
|
|
|
|
self.account_data_for_user(account_data)
|
|
|
|
)
|
|
|
|
|
|
|
|
presence = sync_config.filter_collection.filter_presence(
|
|
|
|
presence
|
|
|
|
)
|
|
|
|
|
2015-01-26 11:53:31 -07:00
|
|
|
defer.returnValue(SyncResult(
|
2015-10-05 09:39:22 -06:00
|
|
|
presence=presence,
|
2016-01-25 03:10:44 -07:00
|
|
|
account_data=account_data_for_user,
|
2015-10-13 03:24:51 -06:00
|
|
|
joined=joined,
|
2015-10-13 04:43:12 -06:00
|
|
|
invited=invited,
|
2015-10-19 10:26:18 -06:00
|
|
|
archived=archived,
|
2015-01-26 11:53:31 -07:00
|
|
|
next_batch=now_token,
|
|
|
|
))
|
2015-01-26 08:46:31 -07:00
|
|
|
|
2015-01-30 04:32:35 -07:00
|
|
|
@defer.inlineCallbacks
|
2015-01-30 04:35:20 -07:00
|
|
|
def load_filtered_recents(self, room_id, sync_config, now_token,
|
2016-01-27 10:06:52 -07:00
|
|
|
since_token=None, recents=None, newly_joined_room=False):
|
2015-11-13 03:31:15 -07:00
|
|
|
"""
|
2016-04-01 09:08:59 -06:00
|
|
|
Returns:
|
|
|
|
a Deferred TimelineBatch
|
2015-11-13 03:31:15 -07:00
|
|
|
"""
|
2016-02-09 04:31:04 -07:00
|
|
|
with Measure(self.clock, "load_filtered_recents"):
|
|
|
|
filtering_factor = 2
|
|
|
|
timeline_limit = sync_config.filter_collection.timeline_limit()
|
|
|
|
load_limit = max(timeline_limit * filtering_factor, 10)
|
|
|
|
max_repeat = 5 # Only try a few times per room, otherwise
|
|
|
|
room_key = now_token.room_key
|
|
|
|
end_key = room_key
|
|
|
|
|
2016-02-09 06:50:29 -07:00
|
|
|
if recents is None or newly_joined_room or timeline_limit < len(recents):
|
|
|
|
limited = True
|
|
|
|
else:
|
|
|
|
limited = False
|
2016-02-09 04:31:04 -07:00
|
|
|
|
|
|
|
if recents is not None:
|
|
|
|
recents = sync_config.filter_collection.filter_room_timeline(recents)
|
|
|
|
recents = yield self._filter_events_for_client(
|
|
|
|
sync_config.user.to_string(),
|
|
|
|
recents,
|
|
|
|
)
|
|
|
|
else:
|
|
|
|
recents = []
|
|
|
|
|
|
|
|
since_key = None
|
|
|
|
if since_token and not newly_joined_room:
|
|
|
|
since_key = since_token.room_key
|
|
|
|
|
|
|
|
while limited and len(recents) < timeline_limit and max_repeat:
|
|
|
|
events, end_key = yield self.store.get_room_events_stream_for_room(
|
|
|
|
room_id,
|
|
|
|
limit=load_limit + 1,
|
|
|
|
from_key=since_key,
|
|
|
|
to_key=end_key,
|
|
|
|
)
|
|
|
|
loaded_recents = sync_config.filter_collection.filter_room_timeline(
|
|
|
|
events
|
|
|
|
)
|
|
|
|
loaded_recents = yield self._filter_events_for_client(
|
|
|
|
sync_config.user.to_string(),
|
|
|
|
loaded_recents,
|
|
|
|
)
|
|
|
|
loaded_recents.extend(recents)
|
|
|
|
recents = loaded_recents
|
2016-01-27 10:06:52 -07:00
|
|
|
|
2016-02-09 04:31:04 -07:00
|
|
|
if len(events) <= load_limit:
|
|
|
|
limited = False
|
|
|
|
break
|
|
|
|
max_repeat -= 1
|
2015-01-30 04:32:35 -07:00
|
|
|
|
2016-02-09 04:31:04 -07:00
|
|
|
if len(recents) > timeline_limit:
|
|
|
|
limited = True
|
|
|
|
recents = recents[-timeline_limit:]
|
|
|
|
room_key = recents[0].internal_metadata.before
|
2015-01-30 04:32:35 -07:00
|
|
|
|
2016-02-09 04:31:04 -07:00
|
|
|
prev_batch_token = now_token.copy_and_replace(
|
|
|
|
"room_key", room_key
|
|
|
|
)
|
2015-01-30 04:32:35 -07:00
|
|
|
|
2015-10-01 10:53:07 -06:00
|
|
|
defer.returnValue(TimelineBatch(
|
2016-01-27 10:06:52 -07:00
|
|
|
events=recents,
|
|
|
|
prev_batch=prev_batch_token,
|
|
|
|
limited=limited or newly_joined_room
|
2015-10-01 10:53:07 -06:00
|
|
|
))
|
2015-01-30 04:32:35 -07:00
|
|
|
|
2015-01-26 08:46:31 -07:00
|
|
|
@defer.inlineCallbacks
|
2015-01-27 09:24:22 -07:00
|
|
|
def incremental_sync_with_gap_for_room(self, room_id, sync_config,
|
|
|
|
since_token, now_token,
|
2015-12-01 11:41:32 -07:00
|
|
|
ephemeral_by_room, tags_by_room,
|
2016-01-19 07:26:58 -07:00
|
|
|
account_data_by_room,
|
2016-01-27 10:06:52 -07:00
|
|
|
batch, full_state=False):
|
2016-02-01 08:59:40 -07:00
|
|
|
state = yield self.compute_state_delta(
|
|
|
|
room_id, batch, sync_config, since_token, now_token,
|
|
|
|
full_state=full_state
|
|
|
|
)
|
2016-01-25 03:10:44 -07:00
|
|
|
|
|
|
|
account_data = self.account_data_for_room(
|
|
|
|
room_id, tags_by_room, account_data_by_room
|
|
|
|
)
|
|
|
|
|
|
|
|
account_data = sync_config.filter_collection.filter_room_account_data(
|
|
|
|
account_data
|
|
|
|
)
|
|
|
|
|
|
|
|
ephemeral = sync_config.filter_collection.filter_room_ephemeral(
|
|
|
|
ephemeral_by_room.get(room_id, [])
|
|
|
|
)
|
|
|
|
|
2016-01-29 06:34:48 -07:00
|
|
|
unread_notifications = {}
|
2015-10-13 03:24:51 -06:00
|
|
|
room_sync = JoinedSyncResult(
|
2015-01-27 09:24:22 -07:00
|
|
|
room_id=room_id,
|
2015-10-01 10:53:07 -06:00
|
|
|
timeline=batch,
|
2015-11-10 11:27:23 -07:00
|
|
|
state=state,
|
2016-01-25 03:10:44 -07:00
|
|
|
ephemeral=ephemeral,
|
|
|
|
account_data=account_data,
|
2016-01-19 10:19:53 -07:00
|
|
|
unread_notifications=unread_notifications,
|
2015-01-27 09:24:22 -07:00
|
|
|
)
|
|
|
|
|
2016-01-29 06:34:48 -07:00
|
|
|
if room_sync:
|
|
|
|
notifs = yield self.unread_notifs_for_room_id(
|
2016-02-09 06:55:59 -07:00
|
|
|
room_id, sync_config
|
2016-01-29 06:34:48 -07:00
|
|
|
)
|
|
|
|
|
|
|
|
if notifs is not None:
|
2016-02-03 03:50:49 -07:00
|
|
|
unread_notifications["notification_count"] = notifs["notify_count"]
|
|
|
|
unread_notifications["highlight_count"] = notifs["highlight_count"]
|
2016-01-29 06:34:48 -07:00
|
|
|
|
2016-01-07 08:28:17 -07:00
|
|
|
logger.debug("Room sync: %r", room_sync)
|
2015-01-27 09:24:22 -07:00
|
|
|
|
|
|
|
defer.returnValue(room_sync)
|
|
|
|
|
2015-10-19 10:26:18 -06:00
|
|
|
@defer.inlineCallbacks
|
2016-01-27 10:06:52 -07:00
|
|
|
def incremental_sync_for_archived_room(self, sync_config, room_id, leave_event_id,
|
2015-12-01 11:41:32 -07:00
|
|
|
since_token, tags_by_room,
|
2016-01-27 10:06:52 -07:00
|
|
|
account_data_by_room, full_state,
|
|
|
|
leave_token=None):
|
2015-10-19 10:26:18 -06:00
|
|
|
""" Get the incremental delta needed to bring the client up to date for
|
|
|
|
the archived room.
|
|
|
|
Returns:
|
|
|
|
A Deferred ArchivedSyncResult
|
|
|
|
"""
|
|
|
|
|
2016-01-27 10:06:52 -07:00
|
|
|
if not leave_token:
|
|
|
|
stream_token = yield self.store.get_stream_token_for_event(
|
|
|
|
leave_event_id
|
|
|
|
)
|
2015-10-19 10:26:18 -06:00
|
|
|
|
2016-01-27 10:06:52 -07:00
|
|
|
leave_token = since_token.copy_and_replace("room_key", stream_token)
|
2015-10-19 10:26:18 -06:00
|
|
|
|
2016-01-27 10:06:52 -07:00
|
|
|
if since_token and since_token.is_after(leave_token):
|
2016-01-12 08:01:56 -07:00
|
|
|
defer.returnValue(None)
|
|
|
|
|
2015-10-19 10:26:18 -06:00
|
|
|
batch = yield self.load_filtered_recents(
|
2016-01-27 10:06:52 -07:00
|
|
|
room_id, sync_config, leave_token, since_token,
|
2015-10-19 10:26:18 -06:00
|
|
|
)
|
|
|
|
|
2016-01-14 02:56:26 -07:00
|
|
|
logger.debug("Recents %r", batch)
|
2015-10-19 10:26:18 -06:00
|
|
|
|
2016-02-01 08:59:40 -07:00
|
|
|
state_events_delta = yield self.compute_state_delta(
|
|
|
|
room_id, batch, sync_config, since_token, leave_token,
|
|
|
|
full_state=full_state
|
2015-10-19 10:26:18 -06:00
|
|
|
)
|
|
|
|
|
2016-01-25 03:10:44 -07:00
|
|
|
account_data = self.account_data_for_room(
|
2016-01-27 10:06:52 -07:00
|
|
|
room_id, tags_by_room, account_data_by_room
|
2016-01-25 03:10:44 -07:00
|
|
|
)
|
|
|
|
|
|
|
|
account_data = sync_config.filter_collection.filter_room_account_data(
|
|
|
|
account_data
|
|
|
|
)
|
|
|
|
|
2015-10-19 10:26:18 -06:00
|
|
|
room_sync = ArchivedSyncResult(
|
2016-01-27 10:06:52 -07:00
|
|
|
room_id=room_id,
|
2015-10-19 10:26:18 -06:00
|
|
|
timeline=batch,
|
|
|
|
state=state_events_delta,
|
2016-01-25 03:10:44 -07:00
|
|
|
account_data=account_data,
|
2015-10-19 10:26:18 -06:00
|
|
|
)
|
|
|
|
|
2016-01-07 08:28:17 -07:00
|
|
|
logger.debug("Room sync: %r", room_sync)
|
2015-10-19 10:26:18 -06:00
|
|
|
|
|
|
|
defer.returnValue(room_sync)
|
|
|
|
|
2015-01-27 09:24:22 -07:00
|
|
|
@defer.inlineCallbacks
|
2015-11-10 11:27:23 -07:00
|
|
|
def get_state_after_event(self, event):
|
|
|
|
"""
|
|
|
|
Get the room state after the given event
|
|
|
|
|
2016-04-01 09:08:59 -06:00
|
|
|
Args:
|
|
|
|
event(synapse.events.EventBase): event of interest
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
A Deferred map from ((type, state_key)->Event)
|
2015-11-10 11:27:23 -07:00
|
|
|
"""
|
|
|
|
state = yield self.store.get_state_for_event(event.event_id)
|
|
|
|
if event.is_state():
|
|
|
|
state = state.copy()
|
|
|
|
state[(event.type, event.state_key)] = event
|
|
|
|
defer.returnValue(state)
|
|
|
|
|
|
|
|
@defer.inlineCallbacks
|
|
|
|
def get_state_at(self, room_id, stream_position):
|
|
|
|
""" Get the room state at a particular stream position
|
2016-04-01 09:08:59 -06:00
|
|
|
|
|
|
|
Args:
|
|
|
|
room_id(str): room for which to get state
|
|
|
|
stream_position(StreamToken): point at which to get state
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
A Deferred map from ((type, state_key)->Event)
|
2015-01-27 09:24:22 -07:00
|
|
|
"""
|
|
|
|
last_events, token = yield self.store.get_recent_events_for_room(
|
2015-11-10 11:27:23 -07:00
|
|
|
room_id, end_token=stream_position.room_key, limit=1,
|
2015-01-27 09:24:22 -07:00
|
|
|
)
|
|
|
|
|
|
|
|
if last_events:
|
2015-11-10 11:27:23 -07:00
|
|
|
last_event = last_events[-1]
|
|
|
|
state = yield self.get_state_after_event(last_event)
|
|
|
|
|
2015-01-27 09:24:22 -07:00
|
|
|
else:
|
2015-11-10 11:27:23 -07:00
|
|
|
# no events in this room - so presumably no state
|
2015-11-12 09:34:42 -07:00
|
|
|
state = {}
|
2015-01-27 09:24:22 -07:00
|
|
|
defer.returnValue(state)
|
|
|
|
|
2016-02-01 08:59:40 -07:00
|
|
|
@defer.inlineCallbacks
|
|
|
|
def compute_state_delta(self, room_id, batch, sync_config, since_token, now_token,
|
|
|
|
full_state):
|
|
|
|
""" Works out the differnce in state between the start of the timeline
|
|
|
|
and the previous sync.
|
|
|
|
|
2016-04-01 09:08:59 -06:00
|
|
|
Args:
|
|
|
|
room_id(str):
|
|
|
|
batch(synapse.handlers.sync.TimelineBatch): The timeline batch for
|
|
|
|
the room that will be sent to the user.
|
|
|
|
sync_config(synapse.handlers.sync.SyncConfig):
|
|
|
|
since_token(str|None): Token of the end of the previous batch. May
|
|
|
|
be None.
|
|
|
|
now_token(str): Token of the end of the current batch.
|
|
|
|
full_state(bool): Whether to force returning the full state.
|
2015-11-13 03:31:15 -07:00
|
|
|
|
2016-04-01 09:08:59 -06:00
|
|
|
Returns:
|
|
|
|
A deferred new event dictionary
|
2015-01-27 09:24:22 -07:00
|
|
|
"""
|
|
|
|
# TODO(mjark) Check if the state events were received by the server
|
|
|
|
# after the previous sync, since we need to include those state
|
|
|
|
# updates even if they occured logically before the previous event.
|
|
|
|
# TODO(mjark) Check for new redactions in the state events.
|
2015-11-12 09:34:42 -07:00
|
|
|
|
2016-02-09 04:31:04 -07:00
|
|
|
with Measure(self.clock, "compute_state_delta"):
|
|
|
|
if full_state:
|
|
|
|
if batch:
|
2016-02-22 06:54:46 -07:00
|
|
|
current_state = yield self.store.get_state_for_event(
|
|
|
|
batch.events[-1].event_id
|
|
|
|
)
|
|
|
|
|
2016-02-09 04:31:04 -07:00
|
|
|
state = yield self.store.get_state_for_event(
|
|
|
|
batch.events[0].event_id
|
|
|
|
)
|
|
|
|
else:
|
2016-02-22 06:54:46 -07:00
|
|
|
current_state = yield self.get_state_at(
|
|
|
|
room_id, stream_position=now_token
|
|
|
|
)
|
|
|
|
|
2016-02-12 04:13:06 -07:00
|
|
|
state = current_state
|
2016-02-01 08:59:40 -07:00
|
|
|
|
2016-02-09 04:31:04 -07:00
|
|
|
timeline_state = {
|
|
|
|
(event.type, event.state_key): event
|
|
|
|
for event in batch.events if event.is_state()
|
|
|
|
}
|
2016-02-01 08:59:40 -07:00
|
|
|
|
2016-02-09 04:31:04 -07:00
|
|
|
state = _calculate_state(
|
|
|
|
timeline_contains=timeline_state,
|
|
|
|
timeline_start=state,
|
|
|
|
previous={},
|
2016-02-12 04:13:06 -07:00
|
|
|
current=current_state,
|
2016-02-09 04:31:04 -07:00
|
|
|
)
|
|
|
|
elif batch.limited:
|
|
|
|
state_at_previous_sync = yield self.get_state_at(
|
|
|
|
room_id, stream_position=since_token
|
|
|
|
)
|
2016-02-01 08:59:40 -07:00
|
|
|
|
2016-02-22 06:54:46 -07:00
|
|
|
current_state = yield self.store.get_state_for_event(
|
|
|
|
batch.events[-1].event_id
|
|
|
|
)
|
|
|
|
|
2016-02-09 04:31:04 -07:00
|
|
|
state_at_timeline_start = yield self.store.get_state_for_event(
|
|
|
|
batch.events[0].event_id
|
|
|
|
)
|
2016-02-01 08:59:40 -07:00
|
|
|
|
2016-02-09 04:31:04 -07:00
|
|
|
timeline_state = {
|
|
|
|
(event.type, event.state_key): event
|
|
|
|
for event in batch.events if event.is_state()
|
|
|
|
}
|
2016-02-01 08:59:40 -07:00
|
|
|
|
2016-02-09 04:31:04 -07:00
|
|
|
state = _calculate_state(
|
|
|
|
timeline_contains=timeline_state,
|
|
|
|
timeline_start=state_at_timeline_start,
|
|
|
|
previous=state_at_previous_sync,
|
2016-02-12 04:13:06 -07:00
|
|
|
current=current_state,
|
2016-02-09 04:31:04 -07:00
|
|
|
)
|
|
|
|
else:
|
|
|
|
state = {}
|
2016-02-01 08:59:40 -07:00
|
|
|
|
2016-02-09 04:31:04 -07:00
|
|
|
defer.returnValue({
|
|
|
|
(e.type, e.state_key): e
|
|
|
|
for e in sync_config.filter_collection.filter_room_state(state.values())
|
|
|
|
})
|
2015-01-30 06:33:41 -07:00
|
|
|
|
2015-11-10 11:27:23 -07:00
|
|
|
def check_joined_room(self, sync_config, state_delta):
|
2015-11-13 03:31:15 -07:00
|
|
|
"""
|
2015-11-10 11:27:23 -07:00
|
|
|
Check if the user has just joined the given room (so should
|
|
|
|
be given the full state)
|
2015-11-13 03:31:15 -07:00
|
|
|
|
2016-04-01 09:08:59 -06:00
|
|
|
Args:
|
|
|
|
sync_config(synapse.handlers.sync.SyncConfig):
|
|
|
|
state_delta(dict[(str,str), synapse.events.FrozenEvent]): the
|
|
|
|
difference in state since the last sync
|
2015-11-12 09:34:42 -07:00
|
|
|
|
2016-04-01 09:08:59 -06:00
|
|
|
Returns:
|
|
|
|
A deferred Tuple (state_delta, limited)
|
2015-11-13 03:31:15 -07:00
|
|
|
"""
|
2015-11-12 09:34:42 -07:00
|
|
|
join_event = state_delta.get((
|
|
|
|
EventTypes.Member, sync_config.user.to_string()), None)
|
|
|
|
if join_event is not None:
|
|
|
|
if join_event.content["membership"] == Membership.JOIN:
|
2015-11-10 11:27:23 -07:00
|
|
|
return True
|
|
|
|
return False
|
2015-12-18 10:47:00 -07:00
|
|
|
|
|
|
|
@defer.inlineCallbacks
|
2016-02-09 06:55:59 -07:00
|
|
|
def unread_notifs_for_room_id(self, room_id, sync_config):
|
2016-02-09 04:31:04 -07:00
|
|
|
with Measure(self.clock, "unread_notifs_for_room_id"):
|
2016-02-09 06:55:59 -07:00
|
|
|
last_unread_event_id = yield self.store.get_last_receipt_event_id_for_user(
|
|
|
|
user_id=sync_config.user.to_string(),
|
|
|
|
room_id=room_id,
|
|
|
|
receipt_type="m.read"
|
2015-12-18 10:47:00 -07:00
|
|
|
)
|
2016-01-19 04:35:50 -07:00
|
|
|
|
2016-02-09 04:31:04 -07:00
|
|
|
notifs = []
|
|
|
|
if last_unread_event_id:
|
|
|
|
notifs = yield self.store.get_unread_event_push_actions_by_room_for_user(
|
|
|
|
room_id, sync_config.user.to_string(), last_unread_event_id
|
|
|
|
)
|
|
|
|
defer.returnValue(notifs)
|
|
|
|
|
|
|
|
# There is no new information in this period, so your notification
|
|
|
|
# count is whatever it was last time.
|
|
|
|
defer.returnValue(None)
|
2016-01-19 04:35:50 -07:00
|
|
|
|
|
|
|
|
|
|
|
def _action_has_highlight(actions):
|
|
|
|
for action in actions:
|
|
|
|
try:
|
|
|
|
if action.get("set_tweak", None) == "highlight":
|
|
|
|
return action.get("value", True)
|
|
|
|
except AttributeError:
|
|
|
|
pass
|
|
|
|
|
|
|
|
return False
|
2016-02-01 08:59:40 -07:00
|
|
|
|
|
|
|
|
2016-02-12 04:13:06 -07:00
|
|
|
def _calculate_state(timeline_contains, timeline_start, previous, current):
|
2016-02-01 08:59:40 -07:00
|
|
|
"""Works out what state to include in a sync response.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
timeline_contains (dict): state in the timeline
|
|
|
|
timeline_start (dict): state at the start of the timeline
|
|
|
|
previous (dict): state at the end of the previous sync (or empty dict
|
2016-02-01 09:52:27 -07:00
|
|
|
if this is an initial sync)
|
2016-02-12 04:13:06 -07:00
|
|
|
current (dict): state at the end of the timeline
|
2016-02-01 08:59:40 -07:00
|
|
|
|
|
|
|
Returns:
|
|
|
|
dict
|
|
|
|
"""
|
|
|
|
event_id_to_state = {
|
|
|
|
e.event_id: e
|
|
|
|
for e in itertools.chain(
|
|
|
|
timeline_contains.values(),
|
|
|
|
previous.values(),
|
|
|
|
timeline_start.values(),
|
2016-02-12 04:13:06 -07:00
|
|
|
current.values(),
|
2016-02-01 08:59:40 -07:00
|
|
|
)
|
|
|
|
}
|
|
|
|
|
2016-02-12 04:13:06 -07:00
|
|
|
c_ids = set(e.event_id for e in current.values())
|
2016-02-01 08:59:40 -07:00
|
|
|
tc_ids = set(e.event_id for e in timeline_contains.values())
|
|
|
|
p_ids = set(e.event_id for e in previous.values())
|
|
|
|
ts_ids = set(e.event_id for e in timeline_start.values())
|
|
|
|
|
2016-02-12 04:13:06 -07:00
|
|
|
state_ids = ((c_ids | ts_ids) - p_ids) - tc_ids
|
2016-02-01 08:59:40 -07:00
|
|
|
|
|
|
|
evs = (event_id_to_state[e] for e in state_ids)
|
|
|
|
return {
|
|
|
|
(e.type, e.state_key): e
|
|
|
|
for e in evs
|
|
|
|
}
|