2016-01-05 11:12:37 -07:00
|
|
|
# Copyright 2014 - 2016 OpenMarket Ltd
|
2014-08-26 11:57:46 -06:00
|
|
|
#
|
|
|
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
|
|
# you may not use this file except in compliance with the License.
|
|
|
|
# You may obtain a copy of the License at
|
|
|
|
#
|
|
|
|
# http://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
#
|
|
|
|
# Unless required by applicable law or agreed to in writing, software
|
|
|
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
|
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
|
|
# See the License for the specific language governing permissions and
|
|
|
|
# limitations under the License.
|
|
|
|
|
2018-07-09 00:09:20 -06:00
|
|
|
import logging
|
2020-08-11 12:40:02 -06:00
|
|
|
from typing import (
|
2022-02-11 05:20:16 -07:00
|
|
|
TYPE_CHECKING,
|
2020-08-11 12:40:02 -06:00
|
|
|
Awaitable,
|
|
|
|
Callable,
|
2021-04-22 09:43:50 -06:00
|
|
|
Collection,
|
2020-08-11 12:40:02 -06:00
|
|
|
Dict,
|
|
|
|
Iterable,
|
|
|
|
List,
|
|
|
|
Optional,
|
|
|
|
Set,
|
|
|
|
Tuple,
|
|
|
|
TypeVar,
|
2020-09-11 05:22:55 -06:00
|
|
|
Union,
|
2020-08-11 12:40:02 -06:00
|
|
|
)
|
2018-07-09 00:09:20 -06:00
|
|
|
|
2020-10-28 06:11:45 -06:00
|
|
|
import attr
|
2018-07-09 00:09:20 -06:00
|
|
|
from prometheus_client import Counter
|
|
|
|
|
2018-04-27 05:52:30 -06:00
|
|
|
from twisted.internet import defer
|
2018-04-22 17:53:18 -06:00
|
|
|
|
2022-05-27 05:14:36 -06:00
|
|
|
from synapse.api.constants import EduTypes, EventTypes, HistoryVisibility, Membership
|
2015-11-12 09:45:28 -07:00
|
|
|
from synapse.api.errors import AuthError
|
2020-08-11 12:40:02 -06:00
|
|
|
from synapse.events import EventBase
|
2017-03-15 08:27:34 -06:00
|
|
|
from synapse.handlers.presence import format_user_presence_state
|
2021-05-11 04:04:03 -06:00
|
|
|
from synapse.logging import issue9533_logger
|
2019-07-03 08:07:04 -06:00
|
|
|
from synapse.logging.context import PreserveLoggingContext
|
2021-04-01 10:08:21 -06:00
|
|
|
from synapse.logging.opentracing import log_kv, start_active_span
|
2018-07-09 00:09:20 -06:00
|
|
|
from synapse.metrics import LaterGauge
|
2020-08-11 12:40:02 -06:00
|
|
|
from synapse.streams.config import PaginationConfig
|
2021-12-15 09:10:02 -07:00
|
|
|
from synapse.types import (
|
|
|
|
JsonDict,
|
|
|
|
PersistedEventPosition,
|
|
|
|
RoomStreamToken,
|
2023-01-25 14:34:37 -07:00
|
|
|
StrCollection,
|
2022-05-16 09:35:31 -06:00
|
|
|
StreamKeyType,
|
2021-12-15 09:10:02 -07:00
|
|
|
StreamToken,
|
|
|
|
UserID,
|
|
|
|
)
|
2018-09-19 04:19:47 -06:00
|
|
|
from synapse.util.async_helpers import ObservableDeferred, timeout_deferred
|
2016-08-19 10:33:12 -06:00
|
|
|
from synapse.util.metrics import Measure
|
2016-05-11 06:42:37 -06:00
|
|
|
from synapse.visibility import filter_events_for_client
|
2014-08-26 11:57:46 -06:00
|
|
|
|
2022-02-11 05:20:16 -07:00
|
|
|
if TYPE_CHECKING:
|
|
|
|
from synapse.server import HomeServer
|
|
|
|
|
2014-08-26 11:57:46 -06:00
|
|
|
logger = logging.getLogger(__name__)
|
|
|
|
|
2018-05-21 18:47:37 -06:00
|
|
|
notified_events_counter = Counter("synapse_notifier_notified_events", "")
|
2015-03-05 08:12:39 -07:00
|
|
|
|
2018-05-22 15:54:22 -06:00
|
|
|
users_woken_by_stream_counter = Counter(
|
|
|
|
"synapse_notifier_users_woken_by_stream", "", ["stream"]
|
|
|
|
)
|
2017-03-15 04:56:51 -06:00
|
|
|
|
2020-05-12 04:20:48 -06:00
|
|
|
T = TypeVar("T")
|
|
|
|
|
2015-03-05 08:12:39 -07:00
|
|
|
|
|
|
|
# TODO(paul): Should be shared somewhere
|
2020-05-12 04:20:48 -06:00
|
|
|
def count(func: Callable[[T], bool], it: Iterable[T]) -> int:
|
|
|
|
"""Return the number of items in it for which func returns true."""
|
2015-03-05 08:12:39 -07:00
|
|
|
n = 0
|
2020-05-12 04:20:48 -06:00
|
|
|
for x in it:
|
2015-03-05 08:12:39 -07:00
|
|
|
if func(x):
|
|
|
|
n += 1
|
|
|
|
return n
|
|
|
|
|
2014-08-26 11:57:46 -06:00
|
|
|
|
2020-09-04 04:54:56 -06:00
|
|
|
class _NotificationListener:
|
2014-08-27 10:04:47 -06:00
|
|
|
"""This represents a single client connection to the events stream.
|
|
|
|
The events stream handler will have yielded to the deferred, so to
|
|
|
|
notify the handler it is sufficient to resolve the deferred.
|
|
|
|
"""
|
2019-06-20 03:32:02 -06:00
|
|
|
|
2015-06-18 08:49:05 -06:00
|
|
|
__slots__ = ["deferred"]
|
2014-08-27 10:04:47 -06:00
|
|
|
|
2022-02-11 05:20:16 -07:00
|
|
|
def __init__(self, deferred: "defer.Deferred"):
|
2015-06-18 09:09:53 -06:00
|
|
|
self.deferred = deferred
|
2014-08-26 11:57:46 -06:00
|
|
|
|
2015-05-12 04:00:37 -06:00
|
|
|
|
2020-09-04 04:54:56 -06:00
|
|
|
class _NotifierUserStream:
|
2015-05-12 04:00:37 -06:00
|
|
|
"""This represents a user connected to the event stream.
|
|
|
|
It tracks the most recent stream token for that user.
|
|
|
|
At a given point a user may have a number of streams listening for
|
|
|
|
events.
|
|
|
|
|
|
|
|
This listener will also keep track of which rooms it is listening in
|
|
|
|
so that it can remove itself from the indexes in the Notifier class.
|
|
|
|
"""
|
|
|
|
|
2020-08-11 12:40:02 -06:00
|
|
|
def __init__(
|
|
|
|
self,
|
|
|
|
user_id: str,
|
2023-09-13 05:57:19 -06:00
|
|
|
rooms: StrCollection,
|
2020-08-11 12:40:02 -06:00
|
|
|
current_token: StreamToken,
|
|
|
|
time_now_ms: int,
|
|
|
|
):
|
2016-01-20 08:34:07 -07:00
|
|
|
self.user_id = user_id
|
2015-05-13 06:42:21 -06:00
|
|
|
self.rooms = set(rooms)
|
2015-05-12 04:00:37 -06:00
|
|
|
self.current_token = current_token
|
2017-03-14 09:03:46 -06:00
|
|
|
|
|
|
|
# The last token for which we should wake up any streams that have a
|
2020-07-09 07:52:58 -06:00
|
|
|
# token that comes before it. This gets updated every time we get poked.
|
2017-03-14 09:03:46 -06:00
|
|
|
# We start it at the current token since if we get any streams
|
|
|
|
# that have a token from before we have no idea whether they should be
|
|
|
|
# woken up or not, so lets just wake them up.
|
|
|
|
self.last_notified_token = current_token
|
2015-05-13 09:54:02 -06:00
|
|
|
self.last_notified_ms = time_now_ms
|
2015-05-12 04:00:37 -06:00
|
|
|
|
2021-07-28 13:55:50 -06:00
|
|
|
self.notify_deferred: ObservableDeferred[StreamToken] = ObservableDeferred(
|
|
|
|
defer.Deferred()
|
|
|
|
)
|
2015-06-18 08:49:05 -06:00
|
|
|
|
2020-09-11 05:22:55 -06:00
|
|
|
def notify(
|
|
|
|
self,
|
|
|
|
stream_key: str,
|
|
|
|
stream_id: Union[int, RoomStreamToken],
|
|
|
|
time_now_ms: int,
|
2022-02-11 05:20:16 -07:00
|
|
|
) -> None:
|
2015-05-14 07:35:07 -06:00
|
|
|
"""Notify any listeners for this user of a new event from an
|
|
|
|
event source.
|
|
|
|
Args:
|
2020-08-11 12:40:02 -06:00
|
|
|
stream_key: The stream the event came from.
|
|
|
|
stream_id: The new id for the stream the event came from.
|
|
|
|
time_now_ms: The current time in milliseconds.
|
2015-05-14 07:35:07 -06:00
|
|
|
"""
|
2015-05-18 06:17:36 -06:00
|
|
|
self.current_token = self.current_token.copy_and_advance(stream_key, stream_id)
|
2017-03-14 09:03:46 -06:00
|
|
|
self.last_notified_token = self.current_token
|
2015-06-18 08:49:05 -06:00
|
|
|
self.last_notified_ms = time_now_ms
|
2022-02-15 04:27:56 -07:00
|
|
|
notify_deferred = self.notify_deferred
|
2016-02-04 03:22:44 -07:00
|
|
|
|
2021-04-01 10:08:21 -06:00
|
|
|
log_kv(
|
|
|
|
{
|
|
|
|
"notify": self.user_id,
|
|
|
|
"stream": stream_key,
|
|
|
|
"stream_id": stream_id,
|
|
|
|
"listeners": self.count_listeners(),
|
|
|
|
}
|
|
|
|
)
|
|
|
|
|
2018-05-21 18:47:37 -06:00
|
|
|
users_woken_by_stream_counter.labels(stream_key).inc()
|
2017-03-15 04:56:51 -06:00
|
|
|
|
2016-02-04 03:22:44 -07:00
|
|
|
with PreserveLoggingContext():
|
|
|
|
self.notify_deferred = ObservableDeferred(defer.Deferred())
|
2022-02-15 04:27:56 -07:00
|
|
|
notify_deferred.callback(self.current_token)
|
2015-05-12 04:00:37 -06:00
|
|
|
|
2022-02-11 05:20:16 -07:00
|
|
|
def remove(self, notifier: "Notifier") -> None:
|
2015-05-12 04:00:37 -06:00
|
|
|
"""Remove this listener from all the indexes in the Notifier
|
|
|
|
it knows about.
|
|
|
|
"""
|
2015-04-08 06:31:06 -06:00
|
|
|
|
2014-08-27 07:03:27 -06:00
|
|
|
for room in self.rooms:
|
2015-05-12 04:00:37 -06:00
|
|
|
lst = notifier.room_to_user_streams.get(room, set())
|
2014-08-27 07:03:27 -06:00
|
|
|
lst.discard(self)
|
|
|
|
|
2016-01-20 08:34:07 -07:00
|
|
|
notifier.user_to_user_stream.pop(self.user_id)
|
2015-04-08 06:31:06 -06:00
|
|
|
|
2020-08-11 12:40:02 -06:00
|
|
|
def count_listeners(self) -> int:
|
2015-06-19 09:22:53 -06:00
|
|
|
return len(self.notify_deferred.observers())
|
2015-06-18 08:49:05 -06:00
|
|
|
|
2020-08-11 12:40:02 -06:00
|
|
|
def new_listener(self, token: StreamToken) -> _NotificationListener:
|
2015-06-18 08:49:05 -06:00
|
|
|
"""Returns a deferred that is resolved when there is a new token
|
|
|
|
greater than the given token.
|
2017-03-14 09:03:46 -06:00
|
|
|
|
|
|
|
Args:
|
|
|
|
token: The token from which we are streaming from, i.e. we shouldn't
|
|
|
|
notify for things that happened before this.
|
2015-06-18 08:49:05 -06:00
|
|
|
"""
|
2017-03-14 09:03:46 -06:00
|
|
|
# Immediately wake up stream if something has already since happened
|
|
|
|
# since their last token.
|
2020-09-29 14:48:33 -06:00
|
|
|
if self.last_notified_token != token:
|
2015-06-18 08:49:05 -06:00
|
|
|
return _NotificationListener(defer.succeed(self.current_token))
|
|
|
|
else:
|
|
|
|
return _NotificationListener(self.notify_deferred.observe())
|
|
|
|
|
2014-08-26 11:57:46 -06:00
|
|
|
|
2021-12-15 09:10:02 -07:00
|
|
|
@attr.s(slots=True, frozen=True, auto_attribs=True)
|
|
|
|
class EventStreamResult:
|
|
|
|
events: List[Union[JsonDict, EventBase]]
|
|
|
|
start_token: StreamToken
|
|
|
|
end_token: StreamToken
|
|
|
|
|
2022-02-11 05:20:16 -07:00
|
|
|
def __bool__(self) -> bool:
|
2016-01-29 08:21:38 -07:00
|
|
|
return bool(self.events)
|
2019-06-20 03:32:02 -06:00
|
|
|
|
2016-01-29 08:21:38 -07:00
|
|
|
|
2022-01-13 06:49:28 -07:00
|
|
|
@attr.s(slots=True, frozen=True, auto_attribs=True)
|
2020-10-28 06:11:45 -06:00
|
|
|
class _PendingRoomEventEntry:
|
2022-01-13 06:49:28 -07:00
|
|
|
event_pos: PersistedEventPosition
|
|
|
|
extra_users: Collection[UserID]
|
2020-10-28 06:11:45 -06:00
|
|
|
|
2022-01-13 06:49:28 -07:00
|
|
|
room_id: str
|
|
|
|
type: str
|
|
|
|
state_key: Optional[str]
|
|
|
|
membership: Optional[str]
|
2020-10-28 06:11:45 -06:00
|
|
|
|
|
|
|
|
2020-09-04 04:54:56 -06:00
|
|
|
class Notifier:
|
2014-08-27 10:04:47 -06:00
|
|
|
"""This class is responsible for notifying any listeners when there are
|
|
|
|
new events available for it.
|
|
|
|
|
|
|
|
Primarily used from the /events stream.
|
|
|
|
"""
|
2014-08-26 11:57:46 -06:00
|
|
|
|
2015-05-13 09:54:02 -06:00
|
|
|
UNUSED_STREAM_EXPIRY_MS = 10 * 60 * 1000
|
|
|
|
|
2022-02-11 05:20:16 -07:00
|
|
|
def __init__(self, hs: "HomeServer"):
|
2021-07-15 04:02:43 -06:00
|
|
|
self.user_to_user_stream: Dict[str, _NotifierUserStream] = {}
|
|
|
|
self.room_to_user_streams: Dict[str, Set[_NotifierUserStream]] = {}
|
2014-08-26 11:57:46 -06:00
|
|
|
|
2018-06-22 02:37:10 -06:00
|
|
|
self.hs = hs
|
2022-05-31 06:17:50 -06:00
|
|
|
self._storage_controllers = hs.get_storage_controllers()
|
2014-08-26 11:57:46 -06:00
|
|
|
self.event_sources = hs.get_event_sources()
|
2022-02-23 04:04:02 -07:00
|
|
|
self.store = hs.get_datastores().main
|
2021-07-15 04:02:43 -06:00
|
|
|
self.pending_new_room_events: List[_PendingRoomEventEntry] = []
|
2014-08-26 11:57:46 -06:00
|
|
|
|
2023-01-20 11:02:18 -07:00
|
|
|
self._replication_notifier = hs.get_replication_notifier()
|
2022-07-13 12:48:24 -06:00
|
|
|
self._new_join_in_room_callbacks: List[Callable[[str, str], None]] = []
|
2020-01-17 03:27:19 -07:00
|
|
|
|
2022-05-10 03:39:54 -06:00
|
|
|
self._federation_client = hs.get_federation_http_client()
|
2017-03-31 06:36:38 -06:00
|
|
|
|
2023-05-04 08:18:22 -06:00
|
|
|
self._third_party_rules = hs.get_module_api_callbacks().third_party_event_rules
|
2021-10-26 07:17:36 -06:00
|
|
|
|
2023-07-31 03:58:03 -06:00
|
|
|
# List of callbacks to be notified when a lock is released
|
|
|
|
self._lock_released_callback: List[Callable[[str, str, str], None]] = []
|
|
|
|
|
2014-11-19 09:37:43 -07:00
|
|
|
self.clock = hs.get_clock()
|
2016-05-31 06:53:48 -06:00
|
|
|
self.appservice_handler = hs.get_application_service_handler()
|
2020-09-10 06:24:43 -06:00
|
|
|
self._pusher_pool = hs.get_pusherpool()
|
2016-11-23 07:09:47 -07:00
|
|
|
|
2020-08-11 12:40:02 -06:00
|
|
|
self.federation_sender = None
|
2016-11-23 07:09:47 -07:00
|
|
|
if hs.should_send_federation():
|
|
|
|
self.federation_sender = hs.get_federation_sender()
|
|
|
|
|
2016-05-31 06:53:48 -06:00
|
|
|
self.state_handler = hs.get_state_handler()
|
2014-11-19 09:37:43 -07:00
|
|
|
|
2015-05-13 09:54:02 -06:00
|
|
|
self.clock.looping_call(
|
|
|
|
self.remove_expired_streams, self.UNUSED_STREAM_EXPIRY_MS
|
|
|
|
)
|
|
|
|
|
2015-03-05 08:12:39 -07:00
|
|
|
# This is not a very cheap test to perform, but it's only executed
|
|
|
|
# when rendering the metrics page, which is likely once per minute at
|
|
|
|
# most when scraping it.
|
2022-02-11 05:20:16 -07:00
|
|
|
def count_listeners() -> int:
|
2021-07-15 04:02:43 -06:00
|
|
|
all_user_streams: Set[_NotifierUserStream] = set()
|
2015-03-05 08:12:39 -07:00
|
|
|
|
2020-08-11 12:40:02 -06:00
|
|
|
for streams in list(self.room_to_user_streams.values()):
|
|
|
|
all_user_streams |= streams
|
|
|
|
for stream in list(self.user_to_user_stream.values()):
|
|
|
|
all_user_streams.add(stream)
|
2015-03-05 08:12:39 -07:00
|
|
|
|
2015-06-18 08:49:05 -06:00
|
|
|
return sum(stream.count_listeners() for stream in all_user_streams)
|
2019-06-20 03:32:02 -06:00
|
|
|
|
2018-05-22 15:28:23 -06:00
|
|
|
LaterGauge("synapse_notifier_listeners", "", [], count_listeners)
|
2015-03-05 08:12:39 -07:00
|
|
|
|
2018-05-21 18:47:37 -06:00
|
|
|
LaterGauge(
|
2018-05-22 15:28:23 -06:00
|
|
|
"synapse_notifier_rooms",
|
|
|
|
"",
|
|
|
|
[],
|
2018-10-19 04:45:45 -06:00
|
|
|
lambda: count(bool, list(self.room_to_user_streams.values())),
|
2015-03-05 08:12:39 -07:00
|
|
|
)
|
2018-05-21 18:47:37 -06:00
|
|
|
LaterGauge(
|
2015-05-12 04:00:37 -06:00
|
|
|
"synapse_notifier_users", "", [], lambda: len(self.user_to_user_stream)
|
2015-03-05 08:12:39 -07:00
|
|
|
)
|
|
|
|
|
2022-02-11 05:20:16 -07:00
|
|
|
def add_replication_callback(self, cb: Callable[[], None]) -> None:
|
2017-03-31 06:36:38 -06:00
|
|
|
"""Add a callback that will be called when some new data is available.
|
2019-03-04 11:31:18 -07:00
|
|
|
Callback is not given any arguments. It should *not* return a Deferred - if
|
|
|
|
it needs to do any asynchronous work, a background thread should be started and
|
|
|
|
wrapped with run_as_background_process.
|
2017-03-31 06:36:38 -06:00
|
|
|
"""
|
2023-01-20 11:02:18 -07:00
|
|
|
self._replication_notifier.add_replication_callback(cb)
|
2017-03-31 06:36:38 -06:00
|
|
|
|
2022-07-13 12:48:24 -06:00
|
|
|
def add_new_join_in_room_callback(self, cb: Callable[[str, str], None]) -> None:
|
|
|
|
"""Add a callback that will be called when a user joins a room.
|
|
|
|
|
|
|
|
This only fires on genuine membership changes, e.g. "invite" -> "join".
|
|
|
|
Membership transitions like "join" -> "join" (for e.g. displayname changes) do
|
|
|
|
not trigger the callback.
|
|
|
|
|
|
|
|
When called, the callback receives two arguments: the event ID and the room ID.
|
|
|
|
It should *not* return a Deferred - if it needs to do any asynchronous work, a
|
|
|
|
background thread should be started and wrapped with run_as_background_process.
|
|
|
|
"""
|
|
|
|
self._new_join_in_room_callbacks.append(cb)
|
|
|
|
|
2022-10-05 11:12:48 -06:00
|
|
|
async def on_new_room_events(
|
2020-08-11 12:40:02 -06:00
|
|
|
self,
|
2022-10-05 11:12:48 -06:00
|
|
|
events_and_pos: List[Tuple[EventBase, PersistedEventPosition]],
|
2020-09-24 06:24:17 -06:00
|
|
|
max_room_stream_token: RoomStreamToken,
|
2021-04-08 15:38:54 -06:00
|
|
|
extra_users: Optional[Collection[UserID]] = None,
|
2022-02-11 05:20:16 -07:00
|
|
|
) -> None:
|
2022-10-05 11:12:48 -06:00
|
|
|
"""Creates a _PendingRoomEventEntry for each of the listed events and calls
|
|
|
|
notify_new_room_events with the results."""
|
|
|
|
event_entries = []
|
|
|
|
for event, pos in events_and_pos:
|
|
|
|
entry = self.create_pending_room_event_entry(
|
|
|
|
pos,
|
|
|
|
extra_users,
|
|
|
|
event.room_id,
|
|
|
|
event.type,
|
|
|
|
event.get("state_key"),
|
|
|
|
event.content.get("membership"),
|
|
|
|
)
|
|
|
|
event_entries.append((entry, event.event_id))
|
|
|
|
await self.notify_new_room_events(event_entries, max_room_stream_token)
|
2020-10-28 06:11:45 -06:00
|
|
|
|
2023-01-23 08:44:39 -07:00
|
|
|
async def on_un_partial_stated_room(
|
|
|
|
self,
|
|
|
|
room_id: str,
|
|
|
|
new_token: int,
|
|
|
|
) -> None:
|
|
|
|
"""Used by the resync background processes to wake up all listeners
|
|
|
|
of this room when it is un-partial-stated.
|
|
|
|
|
|
|
|
It will also notify replication listeners of the change in stream.
|
|
|
|
"""
|
|
|
|
|
|
|
|
# Wake up all related user stream notifiers
|
|
|
|
user_streams = self.room_to_user_streams.get(room_id, set())
|
|
|
|
time_now_ms = self.clock.time_msec()
|
|
|
|
for user_stream in user_streams:
|
|
|
|
try:
|
|
|
|
user_stream.notify(
|
|
|
|
StreamKeyType.UN_PARTIAL_STATED_ROOMS, new_token, time_now_ms
|
|
|
|
)
|
|
|
|
except Exception:
|
|
|
|
logger.exception("Failed to notify listener")
|
|
|
|
|
|
|
|
# Poke the replication so that other workers also see the write to
|
|
|
|
# the un-partial-stated rooms stream.
|
|
|
|
self.notify_replication()
|
|
|
|
|
2022-10-05 11:12:48 -06:00
|
|
|
async def notify_new_room_events(
|
2020-10-28 06:11:45 -06:00
|
|
|
self,
|
2022-10-05 11:12:48 -06:00
|
|
|
event_entries: List[Tuple[_PendingRoomEventEntry, str]],
|
2020-10-28 06:11:45 -06:00
|
|
|
max_room_stream_token: RoomStreamToken,
|
2022-02-11 05:20:16 -07:00
|
|
|
) -> None:
|
2020-10-28 06:11:45 -06:00
|
|
|
"""Used by handlers to inform the notifier something has happened
|
2014-08-27 10:04:47 -06:00
|
|
|
in the room, room event wise.
|
|
|
|
|
|
|
|
This triggers the notifier to wake up any listeners that are
|
|
|
|
listening to the room, and any listeners for the users in the
|
|
|
|
`extra_users` param.
|
2015-05-14 07:35:07 -06:00
|
|
|
|
2021-10-26 07:17:36 -06:00
|
|
|
This also notifies modules listening on new events via the
|
|
|
|
`on_new_event` callback.
|
|
|
|
|
|
|
|
The events can be persisted out of order. The notifier will wait
|
2015-05-14 07:35:07 -06:00
|
|
|
until all previous events have been persisted before notifying
|
|
|
|
the client streams.
|
2014-08-27 10:04:47 -06:00
|
|
|
"""
|
2022-10-05 11:12:48 -06:00
|
|
|
for event_entry, event_id in event_entries:
|
|
|
|
self.pending_new_room_events.append(event_entry)
|
|
|
|
await self._third_party_rules.on_new_event(event_id)
|
2015-05-14 07:35:07 -06:00
|
|
|
|
2022-10-05 11:12:48 -06:00
|
|
|
self._notify_pending_new_room_events(max_room_stream_token)
|
2021-10-26 07:17:36 -06:00
|
|
|
|
2017-04-03 08:42:38 -06:00
|
|
|
self.notify_replication()
|
2016-03-01 07:49:41 -07:00
|
|
|
|
2022-10-05 11:12:48 -06:00
|
|
|
def create_pending_room_event_entry(
|
|
|
|
self,
|
|
|
|
event_pos: PersistedEventPosition,
|
|
|
|
extra_users: Optional[Collection[UserID]],
|
|
|
|
room_id: str,
|
|
|
|
event_type: str,
|
|
|
|
state_key: Optional[str],
|
|
|
|
membership: Optional[str],
|
|
|
|
) -> _PendingRoomEventEntry:
|
|
|
|
"""Creates and returns a _PendingRoomEventEntry"""
|
|
|
|
return _PendingRoomEventEntry(
|
|
|
|
event_pos=event_pos,
|
|
|
|
extra_users=extra_users or [],
|
|
|
|
room_id=room_id,
|
|
|
|
type=event_type,
|
|
|
|
state_key=state_key,
|
|
|
|
membership=membership,
|
|
|
|
)
|
|
|
|
|
2022-02-11 05:20:16 -07:00
|
|
|
def _notify_pending_new_room_events(
|
|
|
|
self, max_room_stream_token: RoomStreamToken
|
|
|
|
) -> None:
|
2015-05-14 07:35:07 -06:00
|
|
|
"""Notify for the room events that were queued waiting for a previous
|
|
|
|
event to be persisted.
|
|
|
|
Args:
|
2020-09-24 06:24:17 -06:00
|
|
|
max_room_stream_token: The highest stream_id below which all
|
2015-05-14 07:35:07 -06:00
|
|
|
events have been persisted.
|
|
|
|
"""
|
2015-05-18 07:04:58 -06:00
|
|
|
pending = self.pending_new_room_events
|
2015-05-14 07:35:07 -06:00
|
|
|
self.pending_new_room_events = []
|
2020-09-10 06:24:43 -06:00
|
|
|
|
2021-07-15 04:02:43 -06:00
|
|
|
users: Set[UserID] = set()
|
|
|
|
rooms: Set[str] = set()
|
2020-09-10 06:24:43 -06:00
|
|
|
|
2020-10-28 06:11:45 -06:00
|
|
|
for entry in pending:
|
|
|
|
if entry.event_pos.persisted_after(max_room_stream_token):
|
|
|
|
self.pending_new_room_events.append(entry)
|
2015-05-14 07:35:07 -06:00
|
|
|
else:
|
2020-09-10 06:24:43 -06:00
|
|
|
if (
|
2020-10-28 06:11:45 -06:00
|
|
|
entry.type == EventTypes.Member
|
|
|
|
and entry.membership == Membership.JOIN
|
|
|
|
and entry.state_key
|
2020-09-10 06:24:43 -06:00
|
|
|
):
|
2020-10-28 06:11:45 -06:00
|
|
|
self._user_joined_room(entry.state_key, entry.room_id)
|
2020-09-10 06:24:43 -06:00
|
|
|
|
2020-10-28 06:11:45 -06:00
|
|
|
users.update(entry.extra_users)
|
|
|
|
rooms.add(entry.room_id)
|
2020-09-10 06:24:43 -06:00
|
|
|
|
|
|
|
if users or rooms:
|
2020-09-11 05:22:55 -06:00
|
|
|
self.on_new_event(
|
2022-05-16 09:35:31 -06:00
|
|
|
StreamKeyType.ROOM,
|
2020-09-24 06:24:17 -06:00
|
|
|
max_room_stream_token,
|
|
|
|
users=users,
|
|
|
|
rooms=rooms,
|
2020-09-11 05:22:55 -06:00
|
|
|
)
|
2020-09-24 06:24:17 -06:00
|
|
|
self._on_updated_room_token(max_room_stream_token)
|
2020-09-10 06:24:43 -06:00
|
|
|
|
2022-02-11 05:20:16 -07:00
|
|
|
def _on_updated_room_token(self, max_room_stream_token: RoomStreamToken) -> None:
|
2020-09-10 06:24:43 -06:00
|
|
|
"""Poke services that might care that the room position has been
|
|
|
|
updated.
|
|
|
|
"""
|
2015-05-13 06:42:21 -06:00
|
|
|
|
2015-02-05 06:19:46 -07:00
|
|
|
# poke any interested application service.
|
2020-10-26 03:30:19 -06:00
|
|
|
self._notify_app_services(max_room_stream_token)
|
|
|
|
self._notify_pusher_pool(max_room_stream_token)
|
2014-08-29 09:01:01 -06:00
|
|
|
|
2020-09-10 06:24:43 -06:00
|
|
|
if self.federation_sender:
|
2020-10-14 06:27:51 -06:00
|
|
|
self.federation_sender.notify_new_events(max_room_stream_token)
|
2020-09-10 06:24:43 -06:00
|
|
|
|
2022-02-11 05:20:16 -07:00
|
|
|
def _notify_app_services(self, max_room_stream_token: RoomStreamToken) -> None:
|
2018-04-27 04:07:40 -06:00
|
|
|
try:
|
2020-10-26 03:30:19 -06:00
|
|
|
self.appservice_handler.notify_interested_services(max_room_stream_token)
|
2018-04-27 04:07:40 -06:00
|
|
|
except Exception:
|
|
|
|
logger.exception("Error notifying application services of event")
|
|
|
|
|
2022-02-11 05:20:16 -07:00
|
|
|
def _notify_pusher_pool(self, max_room_stream_token: RoomStreamToken) -> None:
|
2020-09-10 06:24:43 -06:00
|
|
|
try:
|
2020-10-26 03:30:19 -06:00
|
|
|
self._pusher_pool.on_new_notifications(max_room_stream_token)
|
2020-09-10 06:24:43 -06:00
|
|
|
except Exception:
|
|
|
|
logger.exception("Error pusher pool of event")
|
|
|
|
|
2020-08-11 12:40:02 -06:00
|
|
|
def on_new_event(
|
|
|
|
self,
|
|
|
|
stream_key: str,
|
2020-09-11 05:22:55 -06:00
|
|
|
new_token: Union[int, RoomStreamToken],
|
2021-04-08 15:38:54 -06:00
|
|
|
users: Optional[Collection[Union[str, UserID]]] = None,
|
2023-09-13 05:57:19 -06:00
|
|
|
rooms: Optional[StrCollection] = None,
|
2021-10-21 10:42:25 -06:00
|
|
|
) -> None:
|
2018-07-11 18:32:39 -06:00
|
|
|
"""Used to inform listeners that something has happened event wise.
|
2014-08-27 10:04:47 -06:00
|
|
|
|
|
|
|
Will wake up all listeners for the given users and rooms.
|
2021-10-21 10:42:25 -06:00
|
|
|
|
|
|
|
Args:
|
|
|
|
stream_key: The stream the event came from.
|
|
|
|
new_token: The value of the new stream token.
|
|
|
|
users: The users that should be informed of the new event.
|
|
|
|
rooms: A collection of room IDs for which each joined member will be
|
|
|
|
informed of the new event.
|
2014-08-27 10:04:47 -06:00
|
|
|
"""
|
2021-04-08 15:38:54 -06:00
|
|
|
users = users or []
|
|
|
|
rooms = rooms or []
|
|
|
|
|
2021-01-08 07:33:53 -07:00
|
|
|
with Measure(self.clock, "on_new_event"):
|
|
|
|
user_streams = set()
|
|
|
|
|
2021-04-01 10:08:21 -06:00
|
|
|
log_kv(
|
|
|
|
{
|
|
|
|
"waking_up_explicit_users": len(users),
|
|
|
|
"waking_up_explicit_rooms": len(rooms),
|
|
|
|
}
|
|
|
|
)
|
|
|
|
|
2021-01-08 07:33:53 -07:00
|
|
|
for user in users:
|
|
|
|
user_stream = self.user_to_user_stream.get(str(user))
|
|
|
|
if user_stream is not None:
|
|
|
|
user_streams.add(user_stream)
|
|
|
|
|
|
|
|
for room in rooms:
|
|
|
|
user_streams |= self.room_to_user_streams.get(room, set())
|
|
|
|
|
2022-05-16 09:35:31 -06:00
|
|
|
if stream_key == StreamKeyType.TO_DEVICE:
|
2021-05-11 04:04:03 -06:00
|
|
|
issue9533_logger.debug(
|
|
|
|
"to-device messages stream id %s, awaking streams for %s",
|
|
|
|
new_token,
|
|
|
|
users,
|
|
|
|
)
|
|
|
|
|
2021-01-08 07:33:53 -07:00
|
|
|
time_now_ms = self.clock.time_msec()
|
|
|
|
for user_stream in user_streams:
|
|
|
|
try:
|
|
|
|
user_stream.notify(stream_key, new_token, time_now_ms)
|
|
|
|
except Exception:
|
|
|
|
logger.exception("Failed to notify listener")
|
|
|
|
|
|
|
|
self.notify_replication()
|
|
|
|
|
2021-11-02 04:39:02 -06:00
|
|
|
# Notify appservices.
|
|
|
|
try:
|
|
|
|
self.appservice_handler.notify_interested_services_ephemeral(
|
|
|
|
stream_key,
|
|
|
|
new_token,
|
|
|
|
users,
|
|
|
|
)
|
|
|
|
except Exception:
|
2022-02-01 07:13:38 -07:00
|
|
|
logger.exception(
|
|
|
|
"Error notifying application services of ephemeral events"
|
|
|
|
)
|
2020-10-15 10:33:28 -06:00
|
|
|
|
2020-08-11 12:40:02 -06:00
|
|
|
def on_new_replication_data(self) -> None:
|
2020-10-23 10:38:40 -06:00
|
|
|
"""Used to inform replication listeners that something has happened
|
2016-03-15 11:41:06 -06:00
|
|
|
without waking up any of the normal user event streams"""
|
2017-11-28 07:37:11 -07:00
|
|
|
self.notify_replication()
|
2016-03-15 11:41:06 -06:00
|
|
|
|
2019-12-05 10:58:25 -07:00
|
|
|
async def wait_for_events(
|
2020-08-11 12:40:02 -06:00
|
|
|
self,
|
|
|
|
user_id: str,
|
|
|
|
timeout: int,
|
|
|
|
callback: Callable[[StreamToken, StreamToken], Awaitable[T]],
|
2023-09-13 05:57:19 -06:00
|
|
|
room_ids: Optional[StrCollection] = None,
|
2022-02-11 05:20:16 -07:00
|
|
|
from_token: StreamToken = StreamToken.START,
|
2020-08-11 12:40:02 -06:00
|
|
|
) -> T:
|
2015-01-27 13:09:52 -07:00
|
|
|
"""Wait until the callback returns a non empty response or the
|
|
|
|
timeout fires.
|
|
|
|
"""
|
2016-01-20 08:34:07 -07:00
|
|
|
user_stream = self.user_to_user_stream.get(user_id)
|
2015-05-12 04:00:37 -06:00
|
|
|
if user_stream is None:
|
2020-08-04 05:21:47 -06:00
|
|
|
current_token = self.event_sources.get_current_token()
|
2015-11-05 07:32:26 -07:00
|
|
|
if room_ids is None:
|
2019-12-05 10:58:25 -07:00
|
|
|
room_ids = await self.store.get_rooms_for_user(user_id)
|
2015-05-12 04:00:37 -06:00
|
|
|
user_stream = _NotifierUserStream(
|
2016-01-20 08:34:07 -07:00
|
|
|
user_id=user_id,
|
2015-11-05 07:32:26 -07:00
|
|
|
rooms=room_ids,
|
2015-05-12 04:00:37 -06:00
|
|
|
current_token=current_token,
|
2015-06-18 08:49:05 -06:00
|
|
|
time_now_ms=self.clock.time_msec(),
|
2015-05-12 04:00:37 -06:00
|
|
|
)
|
|
|
|
self._register_with_keys(user_stream)
|
|
|
|
|
2015-06-18 04:36:26 -06:00
|
|
|
result = None
|
2017-03-14 09:03:46 -06:00
|
|
|
prev_token = from_token
|
2015-01-27 13:09:52 -07:00
|
|
|
if timeout:
|
2016-12-09 08:43:18 -07:00
|
|
|
end_time = self.clock.time_msec() + timeout
|
2015-06-18 08:49:05 -06:00
|
|
|
|
|
|
|
while not result:
|
2021-06-03 09:01:30 -06:00
|
|
|
with start_active_span("wait_for_events"):
|
|
|
|
try:
|
|
|
|
now = self.clock.time_msec()
|
|
|
|
if end_time <= now:
|
|
|
|
break
|
|
|
|
|
|
|
|
# Now we wait for the _NotifierUserStream to be told there
|
|
|
|
# is a new token.
|
|
|
|
listener = user_stream.new_listener(prev_token)
|
|
|
|
listener.deferred = timeout_deferred(
|
|
|
|
listener.deferred,
|
|
|
|
(end_time - now) / 1000.0,
|
|
|
|
self.hs.get_reactor(),
|
|
|
|
)
|
2021-04-01 10:08:21 -06:00
|
|
|
|
|
|
|
log_kv(
|
|
|
|
{
|
|
|
|
"wait_for_events": "sleep",
|
|
|
|
"token": prev_token,
|
|
|
|
}
|
|
|
|
)
|
|
|
|
|
|
|
|
with PreserveLoggingContext():
|
|
|
|
await listener.deferred
|
|
|
|
|
|
|
|
log_kv(
|
|
|
|
{
|
|
|
|
"wait_for_events": "woken",
|
|
|
|
"token": user_stream.current_token,
|
|
|
|
}
|
|
|
|
)
|
2017-03-14 09:03:46 -06:00
|
|
|
|
2021-06-03 09:01:30 -06:00
|
|
|
current_token = user_stream.current_token
|
2017-03-14 09:03:46 -06:00
|
|
|
|
2021-06-03 09:01:30 -06:00
|
|
|
result = await callback(prev_token, current_token)
|
|
|
|
log_kv(
|
|
|
|
{
|
|
|
|
"wait_for_events": "result",
|
|
|
|
"result": bool(result),
|
|
|
|
}
|
|
|
|
)
|
|
|
|
if result:
|
|
|
|
break
|
|
|
|
|
|
|
|
# Update the prev_token to the current_token since nothing
|
|
|
|
# has happened between the old prev_token and the current_token
|
|
|
|
prev_token = current_token
|
|
|
|
except defer.TimeoutError:
|
|
|
|
log_kv({"wait_for_events": "timeout"})
|
|
|
|
break
|
|
|
|
except defer.CancelledError:
|
|
|
|
log_kv({"wait_for_events": "cancelled"})
|
2017-03-14 09:03:46 -06:00
|
|
|
break
|
|
|
|
|
|
|
|
if result is None:
|
|
|
|
# This happened if there was no timeout or if the timeout had
|
|
|
|
# already expired.
|
2015-06-18 08:49:05 -06:00
|
|
|
current_token = user_stream.current_token
|
2019-12-05 10:58:25 -07:00
|
|
|
result = await callback(prev_token, current_token)
|
2015-04-09 04:07:20 -06:00
|
|
|
|
2019-07-23 07:00:55 -06:00
|
|
|
return result
|
2015-01-27 13:09:52 -07:00
|
|
|
|
2019-12-05 10:58:25 -07:00
|
|
|
async def get_events_for(
|
2015-11-02 08:10:59 -07:00
|
|
|
self,
|
2020-08-11 12:40:02 -06:00
|
|
|
user: UserID,
|
|
|
|
pagination_config: PaginationConfig,
|
|
|
|
timeout: int,
|
|
|
|
is_guest: bool = False,
|
2021-04-05 07:10:18 -06:00
|
|
|
explicit_room_id: Optional[str] = None,
|
2020-08-11 12:40:02 -06:00
|
|
|
) -> EventStreamResult:
|
2014-08-27 10:04:47 -06:00
|
|
|
"""For the given user and rooms, return any new events for them. If
|
|
|
|
there are no new events wait for up to `timeout` milliseconds for any
|
|
|
|
new events to happen before returning.
|
2015-08-24 09:19:43 -06:00
|
|
|
|
2016-01-20 08:34:07 -07:00
|
|
|
If explicit_room_id is not set, the user's joined rooms will be polled
|
|
|
|
for events.
|
|
|
|
If explicit_room_id is set, that room will be polled for events only if
|
|
|
|
it is world readable or the user has joined the room.
|
2014-08-27 10:04:47 -06:00
|
|
|
"""
|
2020-09-08 08:00:17 -06:00
|
|
|
if pagination_config.from_token:
|
|
|
|
from_token = pagination_config.from_token
|
|
|
|
else:
|
2020-08-04 05:21:47 -06:00
|
|
|
from_token = self.event_sources.get_current_token()
|
2014-08-26 11:57:46 -06:00
|
|
|
|
2015-05-11 07:37:33 -06:00
|
|
|
limit = pagination_config.limit
|
2015-02-27 02:39:12 -07:00
|
|
|
|
2019-12-05 10:58:25 -07:00
|
|
|
room_ids, is_joined = await self._get_room_ids(user, explicit_room_id)
|
2016-01-20 08:34:07 -07:00
|
|
|
is_peeking = not is_joined
|
2015-11-05 07:32:26 -07:00
|
|
|
|
2020-08-11 12:40:02 -06:00
|
|
|
async def check_for_updates(
|
|
|
|
before_token: StreamToken, after_token: StreamToken
|
|
|
|
) -> EventStreamResult:
|
2020-09-29 14:48:33 -06:00
|
|
|
if after_token == before_token:
|
2021-12-15 09:10:02 -07:00
|
|
|
return EventStreamResult([], from_token, from_token)
|
2015-06-18 08:49:05 -06:00
|
|
|
|
2021-12-15 09:10:02 -07:00
|
|
|
# The events fetched from each source are a JsonDict, EventBase, or
|
|
|
|
# UserPresenceState, but see below for UserPresenceState being
|
|
|
|
# converted to JsonDict.
|
|
|
|
events: List[Union[JsonDict, EventBase]] = []
|
2015-05-11 07:37:33 -06:00
|
|
|
end_token = from_token
|
2015-11-12 09:45:28 -07:00
|
|
|
|
2021-09-21 11:34:26 -06:00
|
|
|
for name, source in self.event_sources.sources.get_sources():
|
2015-05-11 07:37:33 -06:00
|
|
|
keyname = "%s_key" % name
|
2015-05-13 08:08:24 -06:00
|
|
|
before_id = getattr(before_token, keyname)
|
|
|
|
after_id = getattr(after_token, keyname)
|
|
|
|
if before_id == after_id:
|
|
|
|
continue
|
2016-01-31 06:31:15 -07:00
|
|
|
|
2019-12-05 10:58:25 -07:00
|
|
|
new_events, new_key = await source.get_new_events(
|
2015-11-05 07:32:26 -07:00
|
|
|
user=user,
|
|
|
|
from_key=getattr(from_token, keyname),
|
2016-01-31 06:31:15 -07:00
|
|
|
limit=limit,
|
2016-01-20 08:34:07 -07:00
|
|
|
is_guest=is_peeking,
|
2015-11-05 07:32:26 -07:00
|
|
|
room_ids=room_ids,
|
2017-02-02 06:07:18 -07:00
|
|
|
explicit_room_id=explicit_room_id,
|
2015-05-11 07:37:33 -06:00
|
|
|
)
|
2015-11-05 07:32:26 -07:00
|
|
|
|
2015-11-12 09:45:28 -07:00
|
|
|
if name == "room":
|
2019-12-05 10:58:25 -07:00
|
|
|
new_events = await filter_events_for_client(
|
2022-05-31 06:17:50 -06:00
|
|
|
self._storage_controllers,
|
2019-10-23 10:25:54 -06:00
|
|
|
user.to_string(),
|
|
|
|
new_events,
|
|
|
|
is_peeking=is_peeking,
|
2015-11-05 07:32:26 -07:00
|
|
|
)
|
2017-03-15 08:27:34 -06:00
|
|
|
elif name == "presence":
|
|
|
|
now = self.clock.time_msec()
|
|
|
|
new_events[:] = [
|
|
|
|
{
|
2022-05-27 05:14:36 -06:00
|
|
|
"type": EduTypes.PRESENCE,
|
2017-03-15 08:27:34 -06:00
|
|
|
"content": format_user_presence_state(event, now),
|
|
|
|
}
|
|
|
|
for event in new_events
|
|
|
|
]
|
2015-11-05 07:32:26 -07:00
|
|
|
|
2015-07-20 07:32:12 -06:00
|
|
|
events.extend(new_events)
|
2015-05-12 04:54:18 -06:00
|
|
|
end_token = end_token.copy_and_replace(keyname, new_key)
|
2014-11-20 10:26:36 -07:00
|
|
|
|
2021-12-15 09:10:02 -07:00
|
|
|
return EventStreamResult(events, from_token, end_token)
|
2014-08-26 11:57:46 -06:00
|
|
|
|
2016-01-20 08:34:07 -07:00
|
|
|
user_id_for_stream = user.to_string()
|
|
|
|
if is_peeking:
|
|
|
|
# Internally, the notifier keeps an event stream per user_id.
|
|
|
|
# This is used by both /sync and /events.
|
|
|
|
# We want /events to be used for peeking independently of /sync,
|
|
|
|
# without polluting its contents. So we invent an illegal user ID
|
|
|
|
# (which thus cannot clash with any real users) for keying peeking
|
|
|
|
# over /events.
|
|
|
|
#
|
|
|
|
# I am sorry for what I have done.
|
2016-01-21 06:22:26 -07:00
|
|
|
user_id_for_stream = "_PEEKING_%s_%s" % (
|
|
|
|
explicit_room_id,
|
|
|
|
user_id_for_stream,
|
|
|
|
)
|
2016-01-20 08:34:07 -07:00
|
|
|
|
2019-12-05 10:58:25 -07:00
|
|
|
result = await self.wait_for_events(
|
2016-01-20 08:34:07 -07:00
|
|
|
user_id_for_stream,
|
|
|
|
timeout,
|
|
|
|
check_for_updates,
|
|
|
|
room_ids=room_ids,
|
|
|
|
from_token=from_token,
|
2015-05-11 07:37:33 -06:00
|
|
|
)
|
2014-08-27 10:21:30 -06:00
|
|
|
|
2019-07-23 07:00:55 -06:00
|
|
|
return result
|
2014-08-26 11:57:46 -06:00
|
|
|
|
2020-08-11 12:40:02 -06:00
|
|
|
async def _get_room_ids(
|
|
|
|
self, user: UserID, explicit_room_id: Optional[str]
|
2023-01-25 14:34:37 -07:00
|
|
|
) -> Tuple[StrCollection, bool]:
|
2020-05-01 13:14:49 -06:00
|
|
|
joined_room_ids = await self.store.get_rooms_for_user(user.to_string())
|
2016-01-20 08:34:07 -07:00
|
|
|
if explicit_room_id:
|
|
|
|
if explicit_room_id in joined_room_ids:
|
2019-08-30 09:28:26 -06:00
|
|
|
return [explicit_room_id], True
|
2020-05-01 13:14:49 -06:00
|
|
|
if await self._is_world_readable(explicit_room_id):
|
2019-08-30 09:28:26 -06:00
|
|
|
return [explicit_room_id], False
|
2016-01-20 08:34:07 -07:00
|
|
|
raise AuthError(403, "Non-joined access not allowed")
|
2019-08-30 09:28:26 -06:00
|
|
|
return joined_room_ids, True
|
2016-01-20 08:34:07 -07:00
|
|
|
|
2020-08-11 12:40:02 -06:00
|
|
|
async def _is_world_readable(self, room_id: str) -> bool:
|
2022-06-06 02:24:12 -06:00
|
|
|
state = await self._storage_controllers.state.get_current_state_event(
|
2016-09-02 07:53:38 -06:00
|
|
|
room_id, EventTypes.RoomHistoryVisibility, ""
|
2015-11-12 09:45:28 -07:00
|
|
|
)
|
|
|
|
if state and "history_visibility" in state.content:
|
2020-12-16 06:46:37 -07:00
|
|
|
return (
|
|
|
|
state.content["history_visibility"] == HistoryVisibility.WORLD_READABLE
|
|
|
|
)
|
2015-11-12 09:45:28 -07:00
|
|
|
else:
|
2019-07-23 07:00:55 -06:00
|
|
|
return False
|
2015-11-12 09:45:28 -07:00
|
|
|
|
2020-08-11 12:40:02 -06:00
|
|
|
def remove_expired_streams(self) -> None:
|
2015-05-13 09:54:02 -06:00
|
|
|
time_now_ms = self.clock.time_msec()
|
|
|
|
expired_streams = []
|
|
|
|
expire_before_ts = time_now_ms - self.UNUSED_STREAM_EXPIRY_MS
|
|
|
|
for stream in self.user_to_user_stream.values():
|
2015-06-18 08:49:05 -06:00
|
|
|
if stream.count_listeners():
|
2015-05-13 09:54:02 -06:00
|
|
|
continue
|
|
|
|
if stream.last_notified_ms < expire_before_ts:
|
|
|
|
expired_streams.append(stream)
|
|
|
|
|
|
|
|
for expired_stream in expired_streams:
|
|
|
|
expired_stream.remove(self)
|
|
|
|
|
2022-02-11 05:20:16 -07:00
|
|
|
def _register_with_keys(self, user_stream: _NotifierUserStream) -> None:
|
2016-01-20 08:34:07 -07:00
|
|
|
self.user_to_user_stream[user_stream.user_id] = user_stream
|
2014-08-26 11:57:46 -06:00
|
|
|
|
2015-05-12 04:00:37 -06:00
|
|
|
for room in user_stream.rooms:
|
2015-05-13 06:42:21 -06:00
|
|
|
s = self.room_to_user_streams.setdefault(room, set())
|
2015-05-12 04:00:37 -06:00
|
|
|
s.add(user_stream)
|
2014-08-26 11:57:46 -06:00
|
|
|
|
2022-02-11 05:20:16 -07:00
|
|
|
def _user_joined_room(self, user_id: str, room_id: str) -> None:
|
2016-06-07 04:33:36 -06:00
|
|
|
new_user_stream = self.user_to_user_stream.get(user_id)
|
2015-05-13 06:42:21 -06:00
|
|
|
if new_user_stream is not None:
|
|
|
|
room_streams = self.room_to_user_streams.setdefault(room_id, set())
|
|
|
|
room_streams.add(new_user_stream)
|
|
|
|
new_user_stream.rooms.add(room_id)
|
2016-03-01 07:49:41 -07:00
|
|
|
|
2020-08-11 12:40:02 -06:00
|
|
|
def notify_replication(self) -> None:
|
2016-03-01 07:49:41 -07:00
|
|
|
"""Notify the any replication listeners that there's a new event"""
|
2023-01-20 11:02:18 -07:00
|
|
|
self._replication_notifier.notify_replication()
|
2020-01-17 03:27:19 -07:00
|
|
|
|
2022-07-13 12:48:24 -06:00
|
|
|
def notify_user_joined_room(self, event_id: str, room_id: str) -> None:
|
|
|
|
for cb in self._new_join_in_room_callbacks:
|
|
|
|
cb(event_id, room_id)
|
|
|
|
|
2022-02-11 05:20:16 -07:00
|
|
|
def notify_remote_server_up(self, server: str) -> None:
|
2020-01-17 03:27:19 -07:00
|
|
|
"""Notify any replication that a remote server has come back up"""
|
|
|
|
# We call federation_sender directly rather than registering as a
|
|
|
|
# callback as a) we already have a reference to it and b) it introduces
|
|
|
|
# circular dependencies.
|
|
|
|
if self.federation_sender:
|
|
|
|
self.federation_sender.wake_destination(server)
|
2022-05-10 03:39:54 -06:00
|
|
|
|
|
|
|
# Tell the federation client about the fact the server is back up, so
|
|
|
|
# that any in flight requests can be immediately retried.
|
|
|
|
self._federation_client.wake_destination(server)
|
2023-01-20 11:02:18 -07:00
|
|
|
|
2023-07-31 03:58:03 -06:00
|
|
|
def add_lock_released_callback(
|
|
|
|
self, callback: Callable[[str, str, str], None]
|
|
|
|
) -> None:
|
|
|
|
"""Add a function to be called whenever we are notified about a released lock."""
|
|
|
|
self._lock_released_callback.append(callback)
|
|
|
|
|
|
|
|
def notify_lock_released(
|
|
|
|
self, instance_name: str, lock_name: str, lock_key: str
|
|
|
|
) -> None:
|
|
|
|
"""Notify the callbacks that a lock has been released."""
|
|
|
|
for cb in self._lock_released_callback:
|
|
|
|
cb(instance_name, lock_name, lock_key)
|
|
|
|
|
2023-01-20 11:02:18 -07:00
|
|
|
|
|
|
|
@attr.s(auto_attribs=True)
|
|
|
|
class ReplicationNotifier:
|
|
|
|
"""Tracks callbacks for things that need to know about stream changes.
|
|
|
|
|
|
|
|
This is separate from the notifier to avoid circular dependencies.
|
|
|
|
"""
|
|
|
|
|
|
|
|
_replication_callbacks: List[Callable[[], None]] = attr.Factory(list)
|
|
|
|
|
|
|
|
def add_replication_callback(self, cb: Callable[[], None]) -> None:
|
|
|
|
"""Add a callback that will be called when some new data is available.
|
|
|
|
Callback is not given any arguments. It should *not* return a Deferred - if
|
|
|
|
it needs to do any asynchronous work, a background thread should be started and
|
|
|
|
wrapped with run_as_background_process.
|
|
|
|
"""
|
|
|
|
self._replication_callbacks.append(cb)
|
|
|
|
|
|
|
|
def notify_replication(self) -> None:
|
|
|
|
"""Notify the any replication listeners that there's a new event"""
|
|
|
|
for cb in self._replication_callbacks:
|
|
|
|
cb()
|