From dab88a7b1feaf1a07b227898911808fc269609fc Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Thu, 29 Aug 2024 16:22:57 +0100 Subject: [PATCH 01/15] Sliding Sync: Make `PerConnectionState` immutable (#17600) This is so that we can cache it. We also move the sliding sync types to `synapse/types/handlers/sliding_sync.py`. This is mainly in-prep for The only change in behaviour is that `RoomSyncConfig.combine_sync_config(..)` now returns a new room sync config rather than mutating in-place. Reviewable commit-by-commit. --------- Co-authored-by: Eric Eastwood --- changelog.d/17600.misc | 1 + scripts-dev/mypy_synapse_plugin.py | 19 +- synapse/handlers/sliding_sync/__init__.py | 17 +- synapse/handlers/sliding_sync/extensions.py | 14 +- synapse/handlers/sliding_sync/room_lists.py | 34 +- synapse/handlers/sliding_sync/store.py | 10 +- synapse/types/handlers/__init__.py | 358 +-------------- .../handlers/sliding_sync.py} | 413 ++++++++++++++++-- tests/handlers/test_sliding_sync.py | 23 +- 9 files changed, 444 insertions(+), 445 deletions(-) create mode 100644 changelog.d/17600.misc rename synapse/{handlers/sliding_sync/types.py => types/handlers/sliding_sync.py} (52%) diff --git a/changelog.d/17600.misc b/changelog.d/17600.misc new file mode 100644 index 0000000000..a81c67f6d1 --- /dev/null +++ b/changelog.d/17600.misc @@ -0,0 +1 @@ +Make the sliding sync `PerConnectionState` class immutable. diff --git a/scripts-dev/mypy_synapse_plugin.py b/scripts-dev/mypy_synapse_plugin.py index 877b831751..509047b41b 100644 --- a/scripts-dev/mypy_synapse_plugin.py +++ b/scripts-dev/mypy_synapse_plugin.py @@ -38,6 +38,7 @@ from mypy.types import ( NoneType, TupleType, TypeAliasType, + TypeVarType, UninhabitedType, UnionType, ) @@ -233,6 +234,7 @@ IMMUTABLE_CUSTOM_TYPES = { "synapse.synapse_rust.push.FilteredPushRules", # This is technically not immutable, but close enough. "signedjson.types.VerifyKey", + "synapse.types.StrCollection", } # Immutable containers only if the values are also immutable. @@ -298,7 +300,7 @@ def is_cacheable( elif rt.type.fullname in MUTABLE_CONTAINER_TYPES: # Mutable containers are mutable regardless of their underlying type. - return False, None + return False, f"container {rt.type.fullname} is mutable" elif "attrs" in rt.type.metadata: # attrs classes are only cachable iff it is frozen (immutable itself) @@ -318,6 +320,9 @@ def is_cacheable( else: return False, "non-frozen attrs class" + elif rt.type.is_enum: + # We assume Enum values are immutable + return True, None else: # Ensure we fail for unknown types, these generally means that the # above code is not complete. @@ -326,6 +331,18 @@ def is_cacheable( f"Don't know how to handle {rt.type.fullname} return type instance", ) + elif isinstance(rt, TypeVarType): + # We consider TypeVars immutable if they are bound to a set of immutable + # types. + if rt.values: + for value in rt.values: + ok, note = is_cacheable(value, signature, verbose) + if not ok: + return False, f"TypeVar bound not cacheable {value}" + return True, None + + return False, "TypeVar is unbound" + elif isinstance(rt, NoneType): # None is cachable. return True, None diff --git a/synapse/handlers/sliding_sync/__init__.py b/synapse/handlers/sliding_sync/__init__.py index ccd464cd1c..d62d520abd 100644 --- a/synapse/handlers/sliding_sync/__init__.py +++ b/synapse/handlers/sliding_sync/__init__.py @@ -29,13 +29,6 @@ from synapse.handlers.sliding_sync.room_lists import ( _RoomMembershipForUser, ) from synapse.handlers.sliding_sync.store import SlidingSyncConnectionStore -from synapse.handlers.sliding_sync.types import ( - HaveSentRoomFlag, - MutablePerConnectionState, - PerConnectionState, - RoomSyncConfig, - StateValues, -) from synapse.logging.opentracing import ( SynapseTags, log_kv, @@ -57,7 +50,15 @@ from synapse.types import ( StreamKeyType, StreamToken, ) -from synapse.types.handlers import SlidingSyncConfig, SlidingSyncResult +from synapse.types.handlers.sliding_sync import ( + HaveSentRoomFlag, + MutablePerConnectionState, + PerConnectionState, + RoomSyncConfig, + SlidingSyncConfig, + SlidingSyncResult, + StateValues, +) from synapse.types.state import StateFilter from synapse.util.async_helpers import concurrently_execute from synapse.visibility import filter_events_for_client diff --git a/synapse/handlers/sliding_sync/extensions.py b/synapse/handlers/sliding_sync/extensions.py index a2d4f24f9c..d9f4c56e6e 100644 --- a/synapse/handlers/sliding_sync/extensions.py +++ b/synapse/handlers/sliding_sync/extensions.py @@ -20,11 +20,6 @@ from typing_extensions import assert_never from synapse.api.constants import AccountDataTypes, EduTypes from synapse.handlers.receipts import ReceiptEventSource -from synapse.handlers.sliding_sync.types import ( - HaveSentRoomFlag, - MutablePerConnectionState, - PerConnectionState, -) from synapse.logging.opentracing import trace from synapse.storage.databases.main.receipts import ReceiptInRoom from synapse.types import ( @@ -35,7 +30,14 @@ from synapse.types import ( StrCollection, StreamToken, ) -from synapse.types.handlers import OperationType, SlidingSyncConfig, SlidingSyncResult +from synapse.types.handlers.sliding_sync import ( + HaveSentRoomFlag, + MutablePerConnectionState, + OperationType, + PerConnectionState, + SlidingSyncConfig, + SlidingSyncResult, +) if TYPE_CHECKING: from synapse.server import HomeServer diff --git a/synapse/handlers/sliding_sync/room_lists.py b/synapse/handlers/sliding_sync/room_lists.py index 4718e8092b..0e6cb28524 100644 --- a/synapse/handlers/sliding_sync/room_lists.py +++ b/synapse/handlers/sliding_sync/room_lists.py @@ -40,11 +40,6 @@ from synapse.api.constants import ( ) from synapse.events import StrippedStateEvent from synapse.events.utils import parse_stripped_state_event -from synapse.handlers.sliding_sync.types import ( - HaveSentRoomFlag, - PerConnectionState, - RoomSyncConfig, -) from synapse.logging.opentracing import start_active_span, trace from synapse.storage.databases.main.state import ( ROOM_UNKNOWN_SENTINEL, @@ -61,7 +56,14 @@ from synapse.types import ( StreamToken, UserID, ) -from synapse.types.handlers import OperationType, SlidingSyncConfig, SlidingSyncResult +from synapse.types.handlers.sliding_sync import ( + HaveSentRoomFlag, + OperationType, + PerConnectionState, + RoomSyncConfig, + SlidingSyncConfig, + SlidingSyncResult, +) from synapse.types.state import StateFilter if TYPE_CHECKING: @@ -279,15 +281,11 @@ class SlidingSyncRoomLists: room_id ) if existing_room_sync_config is not None: - existing_room_sync_config.combine_room_sync_config( + room_sync_config = existing_room_sync_config.combine_room_sync_config( room_sync_config ) - else: - # Make a copy so if we modify it later, it doesn't - # affect all references. - relevant_room_map[room_id] = ( - room_sync_config.deep_copy() - ) + + relevant_room_map[room_id] = room_sync_config room_ids_in_list.append(room_id) @@ -351,11 +349,13 @@ class SlidingSyncRoomLists: # and need to fetch more info about. existing_room_sync_config = relevant_room_map.get(room_id) if existing_room_sync_config is not None: - existing_room_sync_config.combine_room_sync_config( - room_sync_config + room_sync_config = ( + existing_room_sync_config.combine_room_sync_config( + room_sync_config + ) ) - else: - relevant_room_map[room_id] = room_sync_config + + relevant_room_map[room_id] = room_sync_config # Filtered subset of `relevant_room_map` for rooms that may have updates # (in the event stream) diff --git a/synapse/handlers/sliding_sync/store.py b/synapse/handlers/sliding_sync/store.py index 3b727432fb..e38fe3556f 100644 --- a/synapse/handlers/sliding_sync/store.py +++ b/synapse/handlers/sliding_sync/store.py @@ -18,13 +18,13 @@ from typing import TYPE_CHECKING, Dict, Optional, Tuple import attr from synapse.api.errors import SlidingSyncUnknownPosition -from synapse.handlers.sliding_sync.types import ( - MutablePerConnectionState, - PerConnectionState, -) from synapse.logging.opentracing import trace from synapse.types import SlidingSyncStreamToken -from synapse.types.handlers import SlidingSyncConfig +from synapse.types.handlers.sliding_sync import ( + MutablePerConnectionState, + PerConnectionState, + SlidingSyncConfig, +) if TYPE_CHECKING: pass diff --git a/synapse/types/handlers/__init__.py b/synapse/types/handlers/__init__.py index b303bb1f96..463de1a814 100644 --- a/synapse/types/handlers/__init__.py +++ b/synapse/types/handlers/__init__.py @@ -17,33 +17,9 @@ # [This file includes modifications made by New Vector Limited] # # -from enum import Enum -from typing import TYPE_CHECKING, Dict, Final, List, Mapping, Optional, Sequence, Tuple -import attr -from typing_extensions import TypedDict -from synapse._pydantic_compat import HAS_PYDANTIC_V2 - -if TYPE_CHECKING or HAS_PYDANTIC_V2: - from pydantic.v1 import Extra -else: - from pydantic import Extra - -from synapse.events import EventBase -from synapse.types import ( - DeviceListUpdates, - JsonDict, - JsonMapping, - Requester, - SlidingSyncStreamToken, - StreamToken, - UserID, -) -from synapse.types.rest.client import SlidingSyncBody - -if TYPE_CHECKING: - from synapse.handlers.relations import BundledAggregations +from typing import List, Optional, TypedDict class ShutdownRoomParams(TypedDict): @@ -101,335 +77,3 @@ class ShutdownRoomResponse(TypedDict): failed_to_kick_users: List[str] local_aliases: List[str] new_room_id: Optional[str] - - -class SlidingSyncConfig(SlidingSyncBody): - """ - Inherit from `SlidingSyncBody` since we need all of the same fields and add a few - extra fields that we need in the handler - """ - - user: UserID - requester: Requester - - # Pydantic config - class Config: - # By default, ignore fields that we don't recognise. - extra = Extra.ignore - # By default, don't allow fields to be reassigned after parsing. - allow_mutation = False - # Allow custom types like `UserID` to be used in the model - arbitrary_types_allowed = True - - -class OperationType(Enum): - """ - Represents the operation types in a Sliding Sync window. - - Attributes: - SYNC: Sets a range of entries. Clients SHOULD discard what they previous knew about - entries in this range. - INSERT: Sets a single entry. If the position is not empty then clients MUST move - entries to the left or the right depending on where the closest empty space is. - DELETE: Remove a single entry. Often comes before an INSERT to allow entries to move - places. - INVALIDATE: Remove a range of entries. Clients MAY persist the invalidated range for - offline support, but they should be treated as empty when additional operations - which concern indexes in the range arrive from the server. - """ - - SYNC: Final = "SYNC" - INSERT: Final = "INSERT" - DELETE: Final = "DELETE" - INVALIDATE: Final = "INVALIDATE" - - -@attr.s(slots=True, frozen=True, auto_attribs=True) -class SlidingSyncResult: - """ - The Sliding Sync result to be serialized to JSON for a response. - - Attributes: - next_pos: The next position token in the sliding window to request (next_batch). - lists: Sliding window API. A map of list key to list results. - rooms: Room subscription API. A map of room ID to room results. - extensions: Extensions API. A map of extension key to extension results. - """ - - @attr.s(slots=True, frozen=True, auto_attribs=True) - class RoomResult: - """ - Attributes: - name: Room name or calculated room name. - avatar: Room avatar - heroes: List of stripped membership events (containing `user_id` and optionally - `avatar_url` and `displayname`) for the users used to calculate the room name. - is_dm: Flag to specify whether the room is a direct-message room (most likely - between two people). - initial: Flag which is set when this is the first time the server is sending this - data on this connection. Clients can use this flag to replace or update - their local state. When there is an update, servers MUST omit this flag - entirely and NOT send "initial":false as this is wasteful on bandwidth. The - absence of this flag means 'false'. - unstable_expanded_timeline: Flag which is set if we're returning more historic - events due to the timeline limit having increased. See "XXX: Odd behavior" - comment ing `synapse.handlers.sliding_sync`. - required_state: The current state of the room - timeline: Latest events in the room. The last event is the most recent. - bundled_aggregations: A mapping of event ID to the bundled aggregations for - the timeline events above. This allows clients to show accurate reaction - counts (or edits, threads), even if some of the reaction events were skipped - over in a gappy sync. - stripped_state: Stripped state events (for rooms where the usre is - invited/knocked). Same as `rooms.invite.$room_id.invite_state` in sync v2, - absent on joined/left rooms - prev_batch: A token that can be passed as a start parameter to the - `/rooms//messages` API to retrieve earlier messages. - limited: True if there are more events than `timeline_limit` looking - backwards from the `response.pos` to the `request.pos`. - num_live: The number of timeline events which have just occurred and are not historical. - The last N events are 'live' and should be treated as such. This is mostly - useful to determine whether a given @mention event should make a noise or not. - Clients cannot rely solely on the absence of `initial: true` to determine live - events because if a room not in the sliding window bumps into the window because - of an @mention it will have `initial: true` yet contain a single live event - (with potentially other old events in the timeline). - bump_stamp: The `stream_ordering` of the last event according to the - `bump_event_types`. This helps clients sort more readily without them - needing to pull in a bunch of the timeline to determine the last activity. - `bump_event_types` is a thing because for example, we don't want display - name changes to mark the room as unread and bump it to the top. For - encrypted rooms, we just have to consider any activity as a bump because we - can't see the content and the client has to figure it out for themselves. - joined_count: The number of users with membership of join, including the client's - own user ID. (same as sync `v2 m.joined_member_count`) - invited_count: The number of users with membership of invite. (same as sync v2 - `m.invited_member_count`) - notification_count: The total number of unread notifications for this room. (same - as sync v2) - highlight_count: The number of unread notifications for this room with the highlight - flag set. (same as sync v2) - """ - - @attr.s(slots=True, frozen=True, auto_attribs=True) - class StrippedHero: - user_id: str - display_name: Optional[str] - avatar_url: Optional[str] - - name: Optional[str] - avatar: Optional[str] - heroes: Optional[List[StrippedHero]] - is_dm: bool - initial: bool - unstable_expanded_timeline: bool - # Should be empty for invite/knock rooms with `stripped_state` - required_state: List[EventBase] - # Should be empty for invite/knock rooms with `stripped_state` - timeline_events: List[EventBase] - bundled_aggregations: Optional[Dict[str, "BundledAggregations"]] - # Optional because it's only relevant to invite/knock rooms - stripped_state: List[JsonDict] - # Only optional because it won't be included for invite/knock rooms with `stripped_state` - prev_batch: Optional[StreamToken] - # Only optional because it won't be included for invite/knock rooms with `stripped_state` - limited: Optional[bool] - # Only optional because it won't be included for invite/knock rooms with `stripped_state` - num_live: Optional[int] - bump_stamp: int - joined_count: int - invited_count: int - notification_count: int - highlight_count: int - - def __bool__(self) -> bool: - return ( - # If this is the first time the client is seeing the room, we should not filter it out - # under any circumstance. - self.initial - # We need to let the client know if there are any new events - or bool(self.required_state) - or bool(self.timeline_events) - or bool(self.stripped_state) - ) - - @attr.s(slots=True, frozen=True, auto_attribs=True) - class SlidingWindowList: - """ - Attributes: - count: The total number of entries in the list. Always present if this list - is. - ops: The sliding list operations to perform. - """ - - @attr.s(slots=True, frozen=True, auto_attribs=True) - class Operation: - """ - Attributes: - op: The operation type to perform. - range: Which index positions are affected by this operation. These are - both inclusive. - room_ids: Which room IDs are affected by this operation. These IDs match - up to the positions in the `range`, so the last room ID in this list - matches the 9th index. The room data is held in a separate object. - """ - - op: OperationType - range: Tuple[int, int] - room_ids: List[str] - - count: int - ops: List[Operation] - - @attr.s(slots=True, frozen=True, auto_attribs=True) - class Extensions: - """Responses for extensions - - Attributes: - to_device: The to-device extension (MSC3885) - e2ee: The E2EE device extension (MSC3884) - """ - - @attr.s(slots=True, frozen=True, auto_attribs=True) - class ToDeviceExtension: - """The to-device extension (MSC3885) - - Attributes: - next_batch: The to-device stream token the client should use - to get more results - events: A list of to-device messages for the client - """ - - next_batch: str - events: Sequence[JsonMapping] - - def __bool__(self) -> bool: - return bool(self.events) - - @attr.s(slots=True, frozen=True, auto_attribs=True) - class E2eeExtension: - """The E2EE device extension (MSC3884) - - Attributes: - device_list_updates: List of user_ids whose devices have changed or left (only - present on incremental syncs). - device_one_time_keys_count: Map from key algorithm to the number of - unclaimed one-time keys currently held on the server for this device. If - an algorithm is unlisted, the count for that algorithm is assumed to be - zero. If this entire parameter is missing, the count for all algorithms - is assumed to be zero. - device_unused_fallback_key_types: List of unused fallback key algorithms - for this device. - """ - - # Only present on incremental syncs - device_list_updates: Optional[DeviceListUpdates] - device_one_time_keys_count: Mapping[str, int] - device_unused_fallback_key_types: Sequence[str] - - def __bool__(self) -> bool: - # Note that "signed_curve25519" is always returned in key count responses - # regardless of whether we uploaded any keys for it. This is necessary until - # https://github.com/matrix-org/matrix-doc/issues/3298 is fixed. - # - # Also related: - # https://github.com/element-hq/element-android/issues/3725 and - # https://github.com/matrix-org/synapse/issues/10456 - default_otk = self.device_one_time_keys_count.get("signed_curve25519") - more_than_default_otk = len(self.device_one_time_keys_count) > 1 or ( - default_otk is not None and default_otk > 0 - ) - - return bool( - more_than_default_otk - or self.device_list_updates - or self.device_unused_fallback_key_types - ) - - @attr.s(slots=True, frozen=True, auto_attribs=True) - class AccountDataExtension: - """The Account Data extension (MSC3959) - - Attributes: - global_account_data_map: Mapping from `type` to `content` of global account - data events. - account_data_by_room_map: Mapping from room_id to mapping of `type` to - `content` of room account data events. - """ - - global_account_data_map: Mapping[str, JsonMapping] - account_data_by_room_map: Mapping[str, Mapping[str, JsonMapping]] - - def __bool__(self) -> bool: - return bool( - self.global_account_data_map or self.account_data_by_room_map - ) - - @attr.s(slots=True, frozen=True, auto_attribs=True) - class ReceiptsExtension: - """The Receipts extension (MSC3960) - - Attributes: - room_id_to_receipt_map: Mapping from room_id to `m.receipt` ephemeral - event (type, content) - """ - - room_id_to_receipt_map: Mapping[str, JsonMapping] - - def __bool__(self) -> bool: - return bool(self.room_id_to_receipt_map) - - @attr.s(slots=True, frozen=True, auto_attribs=True) - class TypingExtension: - """The Typing Notification extension (MSC3961) - - Attributes: - room_id_to_typing_map: Mapping from room_id to `m.typing` ephemeral - event (type, content) - """ - - room_id_to_typing_map: Mapping[str, JsonMapping] - - def __bool__(self) -> bool: - return bool(self.room_id_to_typing_map) - - to_device: Optional[ToDeviceExtension] = None - e2ee: Optional[E2eeExtension] = None - account_data: Optional[AccountDataExtension] = None - receipts: Optional[ReceiptsExtension] = None - typing: Optional[TypingExtension] = None - - def __bool__(self) -> bool: - return bool( - self.to_device - or self.e2ee - or self.account_data - or self.receipts - or self.typing - ) - - next_pos: SlidingSyncStreamToken - lists: Mapping[str, SlidingWindowList] - rooms: Dict[str, RoomResult] - extensions: Extensions - - def __bool__(self) -> bool: - """Make the result appear empty if there are no updates. This is used - to tell if the notifier needs to wait for more events when polling for - events. - """ - # We don't include `self.lists` here, as a) `lists` is always non-empty even if - # there are no changes, and b) since we're sorting rooms by `stream_ordering` of - # the latest activity, anything that would cause the order to change would end - # up in `self.rooms` and cause us to send down the change. - return bool(self.rooms or self.extensions) - - @staticmethod - def empty(next_pos: SlidingSyncStreamToken) -> "SlidingSyncResult": - "Return a new empty result" - return SlidingSyncResult( - next_pos=next_pos, - lists={}, - rooms={}, - extensions=SlidingSyncResult.Extensions(), - ) diff --git a/synapse/handlers/sliding_sync/types.py b/synapse/types/handlers/sliding_sync.py similarity index 52% rename from synapse/handlers/sliding_sync/types.py rename to synapse/types/handlers/sliding_sync.py index 003419d40a..bca1ff7b54 100644 --- a/synapse/handlers/sliding_sync/types.py +++ b/synapse/types/handlers/sliding_sync.py @@ -18,30 +18,382 @@ from collections import ChainMap from enum import Enum from typing import ( TYPE_CHECKING, + AbstractSet, Callable, Dict, Final, Generic, + List, Mapping, MutableMapping, Optional, + Sequence, Set, + Tuple, TypeVar, cast, ) import attr +from synapse._pydantic_compat import HAS_PYDANTIC_V2 from synapse.api.constants import EventTypes from synapse.types import MultiWriterStreamToken, RoomStreamToken, StrCollection, UserID -from synapse.types.handlers import SlidingSyncConfig + +if TYPE_CHECKING or HAS_PYDANTIC_V2: + from pydantic.v1 import Extra +else: + from pydantic import Extra + +from synapse.events import EventBase +from synapse.types import ( + DeviceListUpdates, + JsonDict, + JsonMapping, + Requester, + SlidingSyncStreamToken, + StreamToken, +) +from synapse.types.rest.client import SlidingSyncBody if TYPE_CHECKING: - pass + from synapse.handlers.relations import BundledAggregations logger = logging.getLogger(__name__) +class SlidingSyncConfig(SlidingSyncBody): + """ + Inherit from `SlidingSyncBody` since we need all of the same fields and add a few + extra fields that we need in the handler + """ + + user: UserID + requester: Requester + + # Pydantic config + class Config: + # By default, ignore fields that we don't recognise. + extra = Extra.ignore + # By default, don't allow fields to be reassigned after parsing. + allow_mutation = False + # Allow custom types like `UserID` to be used in the model + arbitrary_types_allowed = True + + +class OperationType(Enum): + """ + Represents the operation types in a Sliding Sync window. + + Attributes: + SYNC: Sets a range of entries. Clients SHOULD discard what they previous knew about + entries in this range. + INSERT: Sets a single entry. If the position is not empty then clients MUST move + entries to the left or the right depending on where the closest empty space is. + DELETE: Remove a single entry. Often comes before an INSERT to allow entries to move + places. + INVALIDATE: Remove a range of entries. Clients MAY persist the invalidated range for + offline support, but they should be treated as empty when additional operations + which concern indexes in the range arrive from the server. + """ + + SYNC: Final = "SYNC" + INSERT: Final = "INSERT" + DELETE: Final = "DELETE" + INVALIDATE: Final = "INVALIDATE" + + +@attr.s(slots=True, frozen=True, auto_attribs=True) +class SlidingSyncResult: + """ + The Sliding Sync result to be serialized to JSON for a response. + + Attributes: + next_pos: The next position token in the sliding window to request (next_batch). + lists: Sliding window API. A map of list key to list results. + rooms: Room subscription API. A map of room ID to room results. + extensions: Extensions API. A map of extension key to extension results. + """ + + @attr.s(slots=True, frozen=True, auto_attribs=True) + class RoomResult: + """ + Attributes: + name: Room name or calculated room name. + avatar: Room avatar + heroes: List of stripped membership events (containing `user_id` and optionally + `avatar_url` and `displayname`) for the users used to calculate the room name. + is_dm: Flag to specify whether the room is a direct-message room (most likely + between two people). + initial: Flag which is set when this is the first time the server is sending this + data on this connection. Clients can use this flag to replace or update + their local state. When there is an update, servers MUST omit this flag + entirely and NOT send "initial":false as this is wasteful on bandwidth. The + absence of this flag means 'false'. + unstable_expanded_timeline: Flag which is set if we're returning more historic + events due to the timeline limit having increased. See "XXX: Odd behavior" + comment ing `synapse.handlers.sliding_sync`. + required_state: The current state of the room + timeline: Latest events in the room. The last event is the most recent. + bundled_aggregations: A mapping of event ID to the bundled aggregations for + the timeline events above. This allows clients to show accurate reaction + counts (or edits, threads), even if some of the reaction events were skipped + over in a gappy sync. + stripped_state: Stripped state events (for rooms where the usre is + invited/knocked). Same as `rooms.invite.$room_id.invite_state` in sync v2, + absent on joined/left rooms + prev_batch: A token that can be passed as a start parameter to the + `/rooms//messages` API to retrieve earlier messages. + limited: True if there are more events than `timeline_limit` looking + backwards from the `response.pos` to the `request.pos`. + num_live: The number of timeline events which have just occurred and are not historical. + The last N events are 'live' and should be treated as such. This is mostly + useful to determine whether a given @mention event should make a noise or not. + Clients cannot rely solely on the absence of `initial: true` to determine live + events because if a room not in the sliding window bumps into the window because + of an @mention it will have `initial: true` yet contain a single live event + (with potentially other old events in the timeline). + bump_stamp: The `stream_ordering` of the last event according to the + `bump_event_types`. This helps clients sort more readily without them + needing to pull in a bunch of the timeline to determine the last activity. + `bump_event_types` is a thing because for example, we don't want display + name changes to mark the room as unread and bump it to the top. For + encrypted rooms, we just have to consider any activity as a bump because we + can't see the content and the client has to figure it out for themselves. + joined_count: The number of users with membership of join, including the client's + own user ID. (same as sync `v2 m.joined_member_count`) + invited_count: The number of users with membership of invite. (same as sync v2 + `m.invited_member_count`) + notification_count: The total number of unread notifications for this room. (same + as sync v2) + highlight_count: The number of unread notifications for this room with the highlight + flag set. (same as sync v2) + """ + + @attr.s(slots=True, frozen=True, auto_attribs=True) + class StrippedHero: + user_id: str + display_name: Optional[str] + avatar_url: Optional[str] + + name: Optional[str] + avatar: Optional[str] + heroes: Optional[List[StrippedHero]] + is_dm: bool + initial: bool + unstable_expanded_timeline: bool + # Should be empty for invite/knock rooms with `stripped_state` + required_state: List[EventBase] + # Should be empty for invite/knock rooms with `stripped_state` + timeline_events: List[EventBase] + bundled_aggregations: Optional[Dict[str, "BundledAggregations"]] + # Optional because it's only relevant to invite/knock rooms + stripped_state: List[JsonDict] + # Only optional because it won't be included for invite/knock rooms with `stripped_state` + prev_batch: Optional[StreamToken] + # Only optional because it won't be included for invite/knock rooms with `stripped_state` + limited: Optional[bool] + # Only optional because it won't be included for invite/knock rooms with `stripped_state` + num_live: Optional[int] + bump_stamp: int + joined_count: int + invited_count: int + notification_count: int + highlight_count: int + + def __bool__(self) -> bool: + return ( + # If this is the first time the client is seeing the room, we should not filter it out + # under any circumstance. + self.initial + # We need to let the client know if there are any new events + or bool(self.required_state) + or bool(self.timeline_events) + or bool(self.stripped_state) + ) + + @attr.s(slots=True, frozen=True, auto_attribs=True) + class SlidingWindowList: + """ + Attributes: + count: The total number of entries in the list. Always present if this list + is. + ops: The sliding list operations to perform. + """ + + @attr.s(slots=True, frozen=True, auto_attribs=True) + class Operation: + """ + Attributes: + op: The operation type to perform. + range: Which index positions are affected by this operation. These are + both inclusive. + room_ids: Which room IDs are affected by this operation. These IDs match + up to the positions in the `range`, so the last room ID in this list + matches the 9th index. The room data is held in a separate object. + """ + + op: OperationType + range: Tuple[int, int] + room_ids: List[str] + + count: int + ops: List[Operation] + + @attr.s(slots=True, frozen=True, auto_attribs=True) + class Extensions: + """Responses for extensions + + Attributes: + to_device: The to-device extension (MSC3885) + e2ee: The E2EE device extension (MSC3884) + """ + + @attr.s(slots=True, frozen=True, auto_attribs=True) + class ToDeviceExtension: + """The to-device extension (MSC3885) + + Attributes: + next_batch: The to-device stream token the client should use + to get more results + events: A list of to-device messages for the client + """ + + next_batch: str + events: Sequence[JsonMapping] + + def __bool__(self) -> bool: + return bool(self.events) + + @attr.s(slots=True, frozen=True, auto_attribs=True) + class E2eeExtension: + """The E2EE device extension (MSC3884) + + Attributes: + device_list_updates: List of user_ids whose devices have changed or left (only + present on incremental syncs). + device_one_time_keys_count: Map from key algorithm to the number of + unclaimed one-time keys currently held on the server for this device. If + an algorithm is unlisted, the count for that algorithm is assumed to be + zero. If this entire parameter is missing, the count for all algorithms + is assumed to be zero. + device_unused_fallback_key_types: List of unused fallback key algorithms + for this device. + """ + + # Only present on incremental syncs + device_list_updates: Optional[DeviceListUpdates] + device_one_time_keys_count: Mapping[str, int] + device_unused_fallback_key_types: Sequence[str] + + def __bool__(self) -> bool: + # Note that "signed_curve25519" is always returned in key count responses + # regardless of whether we uploaded any keys for it. This is necessary until + # https://github.com/matrix-org/matrix-doc/issues/3298 is fixed. + # + # Also related: + # https://github.com/element-hq/element-android/issues/3725 and + # https://github.com/matrix-org/synapse/issues/10456 + default_otk = self.device_one_time_keys_count.get("signed_curve25519") + more_than_default_otk = len(self.device_one_time_keys_count) > 1 or ( + default_otk is not None and default_otk > 0 + ) + + return bool( + more_than_default_otk + or self.device_list_updates + or self.device_unused_fallback_key_types + ) + + @attr.s(slots=True, frozen=True, auto_attribs=True) + class AccountDataExtension: + """The Account Data extension (MSC3959) + + Attributes: + global_account_data_map: Mapping from `type` to `content` of global account + data events. + account_data_by_room_map: Mapping from room_id to mapping of `type` to + `content` of room account data events. + """ + + global_account_data_map: Mapping[str, JsonMapping] + account_data_by_room_map: Mapping[str, Mapping[str, JsonMapping]] + + def __bool__(self) -> bool: + return bool( + self.global_account_data_map or self.account_data_by_room_map + ) + + @attr.s(slots=True, frozen=True, auto_attribs=True) + class ReceiptsExtension: + """The Receipts extension (MSC3960) + + Attributes: + room_id_to_receipt_map: Mapping from room_id to `m.receipt` ephemeral + event (type, content) + """ + + room_id_to_receipt_map: Mapping[str, JsonMapping] + + def __bool__(self) -> bool: + return bool(self.room_id_to_receipt_map) + + @attr.s(slots=True, frozen=True, auto_attribs=True) + class TypingExtension: + """The Typing Notification extension (MSC3961) + + Attributes: + room_id_to_typing_map: Mapping from room_id to `m.typing` ephemeral + event (type, content) + """ + + room_id_to_typing_map: Mapping[str, JsonMapping] + + def __bool__(self) -> bool: + return bool(self.room_id_to_typing_map) + + to_device: Optional[ToDeviceExtension] = None + e2ee: Optional[E2eeExtension] = None + account_data: Optional[AccountDataExtension] = None + receipts: Optional[ReceiptsExtension] = None + typing: Optional[TypingExtension] = None + + def __bool__(self) -> bool: + return bool( + self.to_device + or self.e2ee + or self.account_data + or self.receipts + or self.typing + ) + + next_pos: SlidingSyncStreamToken + lists: Mapping[str, SlidingWindowList] + rooms: Dict[str, RoomResult] + extensions: Extensions + + def __bool__(self) -> bool: + """Make the result appear empty if there are no updates. This is used + to tell if the notifier needs to wait for more events when polling for + events. + """ + # We don't include `self.lists` here, as a) `lists` is always non-empty even if + # there are no changes, and b) since we're sorting rooms by `stream_ordering` of + # the latest activity, anything that would cause the order to change would end + # up in `self.rooms` and cause us to send down the change. + return bool(self.rooms or self.extensions) + + @staticmethod + def empty(next_pos: SlidingSyncStreamToken) -> "SlidingSyncResult": + "Return a new empty result" + return SlidingSyncResult( + next_pos=next_pos, + lists={}, + rooms={}, + extensions=SlidingSyncResult.Extensions(), + ) + + class StateValues: """ Understood values of the (type, state_key) tuple in `required_state`. @@ -60,7 +412,7 @@ class StateValues: # We can't freeze this class because we want to update it in place with the # de-duplicated data. -@attr.s(slots=True, auto_attribs=True) +@attr.s(slots=True, auto_attribs=True, frozen=True) class RoomSyncConfig: """ Holds the config for what data we should fetch for a room in the sync response. @@ -74,7 +426,7 @@ class RoomSyncConfig: """ timeline_limit: int - required_state_map: Dict[str, Set[str]] + required_state_map: Mapping[str, AbstractSet[str]] @classmethod def from_room_config( @@ -146,27 +498,22 @@ class RoomSyncConfig: required_state_map=required_state_map, ) - def deep_copy(self) -> "RoomSyncConfig": - required_state_map: Dict[str, Set[str]] = { - state_type: state_key_set.copy() - for state_type, state_key_set in self.required_state_map.items() - } - - return RoomSyncConfig( - timeline_limit=self.timeline_limit, - required_state_map=required_state_map, - ) - def combine_room_sync_config( self, other_room_sync_config: "RoomSyncConfig" - ) -> None: + ) -> "RoomSyncConfig": """ - Combine this `RoomSyncConfig` with another `RoomSyncConfig` and take the + Combine this `RoomSyncConfig` with another `RoomSyncConfig` and return the superset union of the two. """ + timeline_limit = self.timeline_limit + required_state_map = { + event_type: set(state_keys) + for event_type, state_keys in self.required_state_map.items() + } + # Take the highest timeline limit - if self.timeline_limit < other_room_sync_config.timeline_limit: - self.timeline_limit = other_room_sync_config.timeline_limit + if timeline_limit < other_room_sync_config.timeline_limit: + timeline_limit = other_room_sync_config.timeline_limit # Union the required state for ( @@ -175,14 +522,14 @@ class RoomSyncConfig: ) in other_room_sync_config.required_state_map.items(): # If we already have a wildcard for everything, we don't need to add # anything else - if StateValues.WILDCARD in self.required_state_map.get( + if StateValues.WILDCARD in required_state_map.get( StateValues.WILDCARD, set() ): break # If we already have a wildcard `state_key` for this `state_type`, we don't need # to add anything else - if StateValues.WILDCARD in self.required_state_map.get(state_type, set()): + if StateValues.WILDCARD in required_state_map.get(state_type, set()): continue # If we're getting wildcards for the `state_type` and `state_key`, that's @@ -191,16 +538,14 @@ class RoomSyncConfig: state_type == StateValues.WILDCARD and StateValues.WILDCARD in state_key_set ): - self.required_state_map = {state_type: {StateValues.WILDCARD}} + required_state_map = {state_type: {StateValues.WILDCARD}} # We can break, since we don't need to add anything else break for state_key in state_key_set: # If we already have a wildcard for this specific `state_key`, we don't need # to add it since the wildcard already covers it. - if state_key in self.required_state_map.get( - StateValues.WILDCARD, set() - ): + if state_key in required_state_map.get(StateValues.WILDCARD, set()): continue # If we're getting a wildcard for the `state_type`, get rid of any other @@ -211,7 +556,7 @@ class RoomSyncConfig: # Make a copy so we don't run into an error: `dictionary changed size # during iteration`, when we remove items for existing_state_type, existing_state_key_set in list( - self.required_state_map.items() + required_state_map.items() ): # Make a copy so we don't run into an error: `Set changed size during # iteration`, when we filter out and remove items @@ -221,19 +566,21 @@ class RoomSyncConfig: # If we've the left the `set()` empty, remove it from the map if existing_state_key_set == set(): - self.required_state_map.pop(existing_state_type, None) + required_state_map.pop(existing_state_type, None) # If we're getting a wildcard `state_key`, get rid of any other state_keys # for this `state_type` since the wildcard will cover it already. if state_key == StateValues.WILDCARD: - self.required_state_map[state_type] = {state_key} + required_state_map[state_type] = {state_key} break # Otherwise, just add it to the set else: - if self.required_state_map.get(state_type) is None: - self.required_state_map[state_type] = {state_key} + if required_state_map.get(state_type) is None: + required_state_map[state_type] = {state_key} else: - self.required_state_map[state_type].add(state_key) + required_state_map[state_type].add(state_key) + + return RoomSyncConfig(timeline_limit, required_state_map) def must_await_full_state( self, @@ -324,7 +671,7 @@ class HaveSentRoomFlag(Enum): LIVE = "live" -T = TypeVar("T") +T = TypeVar("T", str, RoomStreamToken, MultiWriterStreamToken) @attr.s(auto_attribs=True, slots=True, frozen=True) @@ -439,7 +786,7 @@ class MutableRoomStatusMap(RoomStatusMap[T]): self._statuses[room_id] = HaveSentRoom.previously(from_token) -@attr.s(auto_attribs=True) +@attr.s(auto_attribs=True, frozen=True) class PerConnectionState: """The per-connection state. A snapshot of what we've sent down the connection before. diff --git a/tests/handlers/test_sliding_sync.py b/tests/handlers/test_sliding_sync.py index 2cf2f2982f..2ef9f665f9 100644 --- a/tests/handlers/test_sliding_sync.py +++ b/tests/handlers/test_sliding_sync.py @@ -18,7 +18,6 @@ # # import logging -from copy import deepcopy from typing import Dict, List, Optional from unittest.mock import patch @@ -47,7 +46,7 @@ from synapse.rest.client import knock, login, room from synapse.server import HomeServer from synapse.storage.util.id_generators import MultiWriterIdGenerator from synapse.types import JsonDict, StreamToken, UserID -from synapse.types.handlers import SlidingSyncConfig +from synapse.types.handlers.sliding_sync import SlidingSyncConfig from synapse.util import Clock from tests.replication._base import BaseMultiWorkerStreamTestCase @@ -566,23 +565,11 @@ class RoomSyncConfigTestCase(TestCase): """ Combine A into B and B into A to make sure we get the same result. """ - # Since we're mutating these in place, make a copy for each of our trials - room_sync_config_a = deepcopy(a) - room_sync_config_b = deepcopy(b) + combined_config = a.combine_room_sync_config(b) + self._assert_room_config_equal(combined_config, expected, "B into A") - # Combine B into A - room_sync_config_a.combine_room_sync_config(room_sync_config_b) - - self._assert_room_config_equal(room_sync_config_a, expected, "B into A") - - # Since we're mutating these in place, make a copy for each of our trials - room_sync_config_a = deepcopy(a) - room_sync_config_b = deepcopy(b) - - # Combine A into B - room_sync_config_b.combine_room_sync_config(room_sync_config_a) - - self._assert_room_config_equal(room_sync_config_b, expected, "A into B") + combined_config = a.combine_room_sync_config(b) + self._assert_room_config_equal(combined_config, expected, "A into B") class GetRoomMembershipForUserAtToTokenTestCase(HomeserverTestCase): From b913aaa788ac7c7cc9d48fbf293412f0837113e0 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Thu, 29 Aug 2024 16:26:58 +0100 Subject: [PATCH 02/15] Sliding sync: Store the per-connection state in the database. (#17599) Based on #17600 --------- Co-authored-by: Eric Eastwood --- changelog.d/17599.misc | 1 + synapse/app/generic_worker.py | 2 + synapse/handlers/sliding_sync/__init__.py | 9 +- synapse/handlers/sliding_sync/store.py | 140 ++--- synapse/storage/database.py | 43 ++ synapse/storage/databases/main/__init__.py | 2 + .../storage/databases/main/sliding_sync.py | 491 ++++++++++++++++++ synapse/storage/engines/_base.py | 5 + synapse/storage/engines/postgres.py | 7 + synapse/storage/engines/sqlite.py | 6 + synapse/storage/schema/__init__.py | 7 +- .../main/delta/87/02_per_connection_state.sql | 81 +++ synapse/types/handlers/sliding_sync.py | 6 + .../sliding_sync/test_rooms_required_state.py | 10 +- 14 files changed, 694 insertions(+), 116 deletions(-) create mode 100644 changelog.d/17599.misc create mode 100644 synapse/storage/databases/main/sliding_sync.py create mode 100644 synapse/storage/schema/main/delta/87/02_per_connection_state.sql diff --git a/changelog.d/17599.misc b/changelog.d/17599.misc new file mode 100644 index 0000000000..2f81356d12 --- /dev/null +++ b/changelog.d/17599.misc @@ -0,0 +1 @@ +Store sliding sync per-connection state in the database. diff --git a/synapse/app/generic_worker.py b/synapse/app/generic_worker.py index 53f1859256..18d294f2b2 100644 --- a/synapse/app/generic_worker.py +++ b/synapse/app/generic_worker.py @@ -98,6 +98,7 @@ from synapse.storage.databases.main.roommember import RoomMemberWorkerStore from synapse.storage.databases.main.search import SearchStore from synapse.storage.databases.main.session import SessionStore from synapse.storage.databases.main.signatures import SignatureWorkerStore +from synapse.storage.databases.main.sliding_sync import SlidingSyncStore from synapse.storage.databases.main.state import StateGroupWorkerStore from synapse.storage.databases.main.stats import StatsStore from synapse.storage.databases.main.stream import StreamWorkerStore @@ -159,6 +160,7 @@ class GenericWorkerStore( SessionStore, TaskSchedulerWorkerStore, ExperimentalFeaturesStore, + SlidingSyncStore, ): # Properties that multiple storage classes define. Tell mypy what the # expected type is. diff --git a/synapse/handlers/sliding_sync/__init__.py b/synapse/handlers/sliding_sync/__init__.py index d62d520abd..3133325809 100644 --- a/synapse/handlers/sliding_sync/__init__.py +++ b/synapse/handlers/sliding_sync/__init__.py @@ -100,7 +100,7 @@ class SlidingSyncHandler: self.rooms_to_exclude_globally = hs.config.server.rooms_to_exclude_from_sync self.is_mine_id = hs.is_mine_id - self.connection_store = SlidingSyncConnectionStore() + self.connection_store = SlidingSyncConnectionStore(self.store) self.extensions = SlidingSyncExtensionHandler(hs) self.room_lists = SlidingSyncRoomLists(hs) @@ -221,16 +221,11 @@ class SlidingSyncHandler: # amount of time (more with round-trips and re-processing) in the end to # get everything again. previous_connection_state = ( - await self.connection_store.get_per_connection_state( + await self.connection_store.get_and_clear_connection_positions( sync_config, from_token ) ) - await self.connection_store.mark_token_seen( - sync_config=sync_config, - from_token=from_token, - ) - # Get all of the room IDs that the user should be able to see in the sync # response has_lists = sync_config.lists is not None and len(sync_config.lists) > 0 diff --git a/synapse/handlers/sliding_sync/store.py b/synapse/handlers/sliding_sync/store.py index e38fe3556f..d24fccf76f 100644 --- a/synapse/handlers/sliding_sync/store.py +++ b/synapse/handlers/sliding_sync/store.py @@ -13,12 +13,12 @@ # import logging -from typing import TYPE_CHECKING, Dict, Optional, Tuple +from typing import TYPE_CHECKING, Optional import attr -from synapse.api.errors import SlidingSyncUnknownPosition from synapse.logging.opentracing import trace +from synapse.storage.databases.main import DataStore from synapse.types import SlidingSyncStreamToken from synapse.types.handlers.sliding_sync import ( MutablePerConnectionState, @@ -61,22 +61,9 @@ class SlidingSyncConnectionStore: to mapping of room ID to `HaveSentRoom`. """ - # `(user_id, conn_id)` -> `connection_position` -> `PerConnectionState` - _connections: Dict[Tuple[str, str], Dict[int, PerConnectionState]] = attr.Factory( - dict - ) + store: "DataStore" - async def is_valid_token( - self, sync_config: SlidingSyncConfig, connection_token: int - ) -> bool: - """Return whether the connection token is valid/recognized""" - if connection_token == 0: - return True - - conn_key = self._get_connection_key(sync_config) - return connection_token in self._connections.get(conn_key, {}) - - async def get_per_connection_state( + async def get_and_clear_connection_positions( self, sync_config: SlidingSyncConfig, from_token: Optional[SlidingSyncStreamToken], @@ -86,23 +73,21 @@ class SlidingSyncConnectionStore: Raises: SlidingSyncUnknownPosition if the connection_token is unknown """ - if from_token is None: + # If this is our first request, there is no previous connection state to fetch out of the database + if from_token is None or from_token.connection_position == 0: return PerConnectionState() - connection_position = from_token.connection_position - if connection_position == 0: - # Initial sync (request without a `from_token`) starts at `0` so - # there is no existing per-connection state - return PerConnectionState() + conn_id = sync_config.conn_id or "" - conn_key = self._get_connection_key(sync_config) - sync_statuses = self._connections.get(conn_key, {}) - connection_state = sync_statuses.get(connection_position) + device_id = sync_config.requester.device_id + assert device_id is not None - if connection_state is None: - raise SlidingSyncUnknownPosition() - - return connection_state + return await self.store.get_and_clear_connection_positions( + sync_config.user.to_string(), + device_id, + conn_id, + from_token.connection_position, + ) @trace async def record_new_state( @@ -116,85 +101,28 @@ class SlidingSyncConnectionStore: If there are no changes to the state this may return the same token as the existing per-connection state. """ - prev_connection_token = 0 - if from_token is not None: - prev_connection_token = from_token.connection_position - if not new_connection_state.has_updates(): - return prev_connection_token + if from_token is not None: + return from_token.connection_position + else: + return 0 - conn_key = self._get_connection_key(sync_config) - sync_statuses = self._connections.setdefault(conn_key, {}) + # A from token with a zero connection position means there was no + # previously stored connection state, so we treat a zero the same as + # there being no previous position. + previous_connection_position = None + if from_token is not None and from_token.connection_position != 0: + previous_connection_position = from_token.connection_position - # Generate a new token, removing any existing entries in that token - # (which can happen if requests get resent). - new_store_token = prev_connection_token + 1 - sync_statuses.pop(new_store_token, None) - - # We copy the `MutablePerConnectionState` so that the inner `ChainMap`s - # don't grow forever. - sync_statuses[new_store_token] = new_connection_state.copy() - - return new_store_token - - @trace - async def mark_token_seen( - self, - sync_config: SlidingSyncConfig, - from_token: Optional[SlidingSyncStreamToken], - ) -> None: - """We have received a request with the given token, so we can clear out - any other tokens associated with the connection. - - If there is no from token then we have started afresh, and so we delete - all tokens associated with the device. - """ - # Clear out any tokens for the connection that doesn't match the one - # from the request. - - conn_key = self._get_connection_key(sync_config) - sync_statuses = self._connections.pop(conn_key, {}) - if from_token is None: - return - - sync_statuses = { - connection_token: room_statuses - for connection_token, room_statuses in sync_statuses.items() - if connection_token == from_token.connection_position - } - if sync_statuses: - self._connections[conn_key] = sync_statuses - - @staticmethod - def _get_connection_key(sync_config: SlidingSyncConfig) -> Tuple[str, str]: - """Return a unique identifier for this connection. - - The first part is simply the user ID. - - The second part is generally a combination of device ID and conn_id. - However, both these two are optional (e.g. puppet access tokens don't - have device IDs), so this handles those edge cases. - - We use this over the raw `conn_id` to avoid clashes between different - clients that use the same `conn_id`. Imagine a user uses a web client - that uses `conn_id: main_sync_loop` and an Android client that also has - a `conn_id: main_sync_loop`. - """ - - user_id = sync_config.user.to_string() - - # Only one sliding sync connection is allowed per given conn_id (empty - # or not). conn_id = sync_config.conn_id or "" - if sync_config.requester.device_id: - return (user_id, f"D/{sync_config.requester.device_id}/{conn_id}") + device_id = sync_config.requester.device_id + assert device_id is not None - if sync_config.requester.access_token_id: - # If we don't have a device, then the access token ID should be a - # stable ID. - return (user_id, f"A/{sync_config.requester.access_token_id}/{conn_id}") - - # If we have neither then its likely an AS or some weird token. Either - # way we can just fail here. - raise Exception("Cannot use sliding sync with access token type") + return await self.store.persist_per_connection_state( + sync_config.user.to_string(), + device_id, + conn_id, + previous_connection_position, + new_connection_state, + ) diff --git a/synapse/storage/database.py b/synapse/storage/database.py index 569f618193..17ff02f847 100644 --- a/synapse/storage/database.py +++ b/synapse/storage/database.py @@ -64,6 +64,7 @@ from synapse.metrics.background_process_metrics import run_as_background_process from synapse.storage.background_updates import BackgroundUpdater from synapse.storage.engines import BaseDatabaseEngine, PostgresEngine, Sqlite3Engine from synapse.storage.types import Connection, Cursor, SQLQueryParameters +from synapse.types import StrCollection from synapse.util.async_helpers import delay_cancellation from synapse.util.iterutils import batch_iter @@ -1095,6 +1096,48 @@ class DatabasePool: txn.execute(sql, vals) + @staticmethod + def simple_insert_returning_txn( + txn: LoggingTransaction, + table: str, + values: Dict[str, Any], + returning: StrCollection, + ) -> Tuple[Any, ...]: + """Executes a `INSERT INTO... RETURNING...` statement (or equivalent for + SQLite versions that don't support it). + """ + + if txn.database_engine.supports_returning: + sql = "INSERT INTO %s (%s) VALUES(%s) RETURNING %s" % ( + table, + ", ".join(k for k in values.keys()), + ", ".join("?" for _ in values.keys()), + ", ".join(k for k in returning), + ) + + txn.execute(sql, list(values.values())) + row = txn.fetchone() + assert row is not None + return row + else: + # For old versions of SQLite we do a standard insert and then can + # use `last_insert_rowid` to get at the row we just inserted + DatabasePool.simple_insert_txn( + txn, + table=table, + values=values, + ) + txn.execute("SELECT last_insert_rowid()") + row = txn.fetchone() + assert row is not None + (rowid,) = row + + row = DatabasePool.simple_select_one_txn( + txn, table=table, keyvalues={"rowid": rowid}, retcols=returning + ) + assert row is not None + return row + async def simple_insert_many( self, table: str, diff --git a/synapse/storage/databases/main/__init__.py b/synapse/storage/databases/main/__init__.py index 586e84f2a4..9a43ab63e8 100644 --- a/synapse/storage/databases/main/__init__.py +++ b/synapse/storage/databases/main/__init__.py @@ -33,6 +33,7 @@ from synapse.storage.database import ( LoggingDatabaseConnection, LoggingTransaction, ) +from synapse.storage.databases.main.sliding_sync import SlidingSyncStore from synapse.storage.databases.main.stats import UserSortOrder from synapse.storage.engines import BaseDatabaseEngine from synapse.storage.types import Cursor @@ -156,6 +157,7 @@ class DataStore( LockStore, SessionStore, TaskSchedulerWorkerStore, + SlidingSyncStore, ): def __init__( self, diff --git a/synapse/storage/databases/main/sliding_sync.py b/synapse/storage/databases/main/sliding_sync.py new file mode 100644 index 0000000000..dc747d7ac0 --- /dev/null +++ b/synapse/storage/databases/main/sliding_sync.py @@ -0,0 +1,491 @@ +# +# This file is licensed under the Affero General Public License (AGPL) version 3. +# +# Copyright (C) 2023 New Vector, Ltd +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as +# published by the Free Software Foundation, either version 3 of the +# License, or (at your option) any later version. +# +# See the GNU Affero General Public License for more details: +# . +# + + +import logging +from typing import TYPE_CHECKING, Dict, List, Mapping, Optional, Set, cast + +import attr + +from synapse.api.errors import SlidingSyncUnknownPosition +from synapse.logging.opentracing import log_kv +from synapse.storage._base import SQLBaseStore, db_to_json +from synapse.storage.database import LoggingTransaction +from synapse.types import MultiWriterStreamToken, RoomStreamToken +from synapse.types.handlers.sliding_sync import ( + HaveSentRoom, + HaveSentRoomFlag, + MutablePerConnectionState, + PerConnectionState, + RoomStatusMap, + RoomSyncConfig, +) +from synapse.util import json_encoder +from synapse.util.caches.descriptors import cached + +if TYPE_CHECKING: + from synapse.storage.databases.main import DataStore + +logger = logging.getLogger(__name__) + + +class SlidingSyncStore(SQLBaseStore): + async def persist_per_connection_state( + self, + user_id: str, + device_id: str, + conn_id: str, + previous_connection_position: Optional[int], + per_connection_state: "MutablePerConnectionState", + ) -> int: + """Persist updates to the per-connection state for a sliding sync + connection. + + Returns: + The connection position of the newly persisted state. + """ + + # This cast is safe because the downstream code only cares about + # `store.get_id_for_instance(...)` and `StreamWorkerStore` is mixed + # alongside `SlidingSyncStore` wherever we create a store. + store = cast("DataStore", self) + + return await self.db_pool.runInteraction( + "persist_per_connection_state", + self.persist_per_connection_state_txn, + user_id=user_id, + device_id=device_id, + conn_id=conn_id, + previous_connection_position=previous_connection_position, + per_connection_state=await PerConnectionStateDB.from_state( + per_connection_state, store + ), + ) + + def persist_per_connection_state_txn( + self, + txn: LoggingTransaction, + user_id: str, + device_id: str, + conn_id: str, + previous_connection_position: Optional[int], + per_connection_state: "PerConnectionStateDB", + ) -> int: + # First we fetch (or create) the connection key associated with the + # previous connection position. + if previous_connection_position is not None: + # The `previous_connection_position` is a user-supplied value, so we + # need to make sure that the one they supplied is actually theirs. + sql = """ + SELECT connection_key + FROM sliding_sync_connection_positions + INNER JOIN sliding_sync_connections USING (connection_key) + WHERE + connection_position = ? + AND user_id = ? AND effective_device_id = ? AND conn_id = ? + """ + txn.execute( + sql, (previous_connection_position, user_id, device_id, conn_id) + ) + row = txn.fetchone() + if row is None: + raise SlidingSyncUnknownPosition() + + (connection_key,) = row + else: + # We're restarting the connection, so we clear the previous existing data we + # used to track it. We do this here to ensure that if we get lots of + # one-shot requests we don't stack up lots of entries. We have `ON DELETE + # CASCADE` setup on the dependent tables so this will clear out all the + # associated data. + self.db_pool.simple_delete_txn( + txn, + table="sliding_sync_connections", + keyvalues={ + "user_id": user_id, + "effective_device_id": device_id, + "conn_id": conn_id, + }, + ) + + (connection_key,) = self.db_pool.simple_insert_returning_txn( + txn, + table="sliding_sync_connections", + values={ + "user_id": user_id, + "effective_device_id": device_id, + "conn_id": conn_id, + "created_ts": self._clock.time_msec(), + }, + returning=("connection_key",), + ) + + # Define a new connection position for the updates + (connection_position,) = self.db_pool.simple_insert_returning_txn( + txn, + table="sliding_sync_connection_positions", + values={ + "connection_key": connection_key, + "created_ts": self._clock.time_msec(), + }, + returning=("connection_position",), + ) + + # We need to deduplicate the `required_state` JSON. We do this by + # fetching all JSON associated with the connection and comparing that + # with the updates to `required_state` + + # Dict from required state json -> required state ID + required_state_to_id: Dict[str, int] = {} + if previous_connection_position is not None: + rows = self.db_pool.simple_select_list_txn( + txn, + table="sliding_sync_connection_required_state", + keyvalues={"connection_key": connection_key}, + retcols=("required_state_id", "required_state"), + ) + for required_state_id, required_state in rows: + required_state_to_id[required_state] = required_state_id + + room_to_state_ids: Dict[str, int] = {} + unique_required_state: Dict[str, List[str]] = {} + for room_id, room_state in per_connection_state.room_configs.items(): + serialized_state = json_encoder.encode( + # We store the required state as a sorted list of event type / + # state key tuples. + sorted( + (event_type, state_key) + for event_type, state_keys in room_state.required_state_map.items() + for state_key in state_keys + ) + ) + + existing_state_id = required_state_to_id.get(serialized_state) + if existing_state_id is not None: + room_to_state_ids[room_id] = existing_state_id + else: + unique_required_state.setdefault(serialized_state, []).append(room_id) + + # Insert any new `required_state` json we haven't previously seen. + for serialized_required_state, room_ids in unique_required_state.items(): + (required_state_id,) = self.db_pool.simple_insert_returning_txn( + txn, + table="sliding_sync_connection_required_state", + values={ + "connection_key": connection_key, + "required_state": serialized_required_state, + }, + returning=("required_state_id",), + ) + for room_id in room_ids: + room_to_state_ids[room_id] = required_state_id + + # Copy over state from the previous connection position (we'll overwrite + # these rows with any changes). + if previous_connection_position is not None: + sql = """ + INSERT INTO sliding_sync_connection_streams + (connection_position, stream, room_id, room_status, last_token) + SELECT ?, stream, room_id, room_status, last_token + FROM sliding_sync_connection_streams + WHERE connection_position = ? + """ + txn.execute(sql, (connection_position, previous_connection_position)) + + sql = """ + INSERT INTO sliding_sync_connection_room_configs + (connection_position, room_id, timeline_limit, required_state_id) + SELECT ?, room_id, timeline_limit, required_state_id + FROM sliding_sync_connection_room_configs + WHERE connection_position = ? + """ + txn.execute(sql, (connection_position, previous_connection_position)) + + # We now upsert the changes to the various streams. + key_values = [] + value_values = [] + for room_id, have_sent_room in per_connection_state.rooms._statuses.items(): + key_values.append((connection_position, "rooms", room_id)) + value_values.append( + (have_sent_room.status.value, have_sent_room.last_token) + ) + + for room_id, have_sent_room in per_connection_state.receipts._statuses.items(): + key_values.append((connection_position, "receipts", room_id)) + value_values.append( + (have_sent_room.status.value, have_sent_room.last_token) + ) + + self.db_pool.simple_upsert_many_txn( + txn, + table="sliding_sync_connection_streams", + key_names=( + "connection_position", + "stream", + "room_id", + ), + key_values=key_values, + value_names=( + "room_status", + "last_token", + ), + value_values=value_values, + ) + + # ... and upsert changes to the room configs. + keys = [] + values = [] + for room_id, room_config in per_connection_state.room_configs.items(): + keys.append((connection_position, room_id)) + values.append((room_config.timeline_limit, room_to_state_ids[room_id])) + + self.db_pool.simple_upsert_many_txn( + txn, + table="sliding_sync_connection_room_configs", + key_names=( + "connection_position", + "room_id", + ), + key_values=keys, + value_names=( + "timeline_limit", + "required_state_id", + ), + value_values=values, + ) + + return connection_position + + @cached(iterable=True, max_entries=100000) + async def get_and_clear_connection_positions( + self, user_id: str, device_id: str, conn_id: str, connection_position: int + ) -> "PerConnectionState": + """Get the per-connection state for the given connection position.""" + + per_connection_state_db = await self.db_pool.runInteraction( + "get_and_clear_connection_positions", + self._get_and_clear_connection_positions_txn, + user_id=user_id, + device_id=device_id, + conn_id=conn_id, + connection_position=connection_position, + ) + + # This cast is safe because the downstream code only cares about + # `store.get_id_for_instance(...)` and `StreamWorkerStore` is mixed + # alongside `SlidingSyncStore` wherever we create a store. + store = cast("DataStore", self) + + return await per_connection_state_db.to_state(store) + + def _get_and_clear_connection_positions_txn( + self, + txn: LoggingTransaction, + user_id: str, + device_id: str, + conn_id: str, + connection_position: int, + ) -> "PerConnectionStateDB": + # The `previous_connection_position` is a user-supplied value, so we + # need to make sure that the one they supplied is actually theirs. + sql = """ + SELECT connection_key + FROM sliding_sync_connection_positions + INNER JOIN sliding_sync_connections USING (connection_key) + WHERE + connection_position = ? + AND user_id = ? AND effective_device_id = ? AND conn_id = ? + """ + txn.execute(sql, (connection_position, user_id, device_id, conn_id)) + row = txn.fetchone() + if row is None: + raise SlidingSyncUnknownPosition() + + (connection_key,) = row + + # Now that we have seen the client has received and used the connection + # position, we can delete all the other connection positions. + sql = """ + DELETE FROM sliding_sync_connection_positions + WHERE connection_key = ? AND connection_position != ? + """ + txn.execute(sql, (connection_key, connection_position)) + + # Fetch and create a mapping from required state ID to the actual + # required state for the connection. + rows = self.db_pool.simple_select_list_txn( + txn, + table="sliding_sync_connection_required_state", + keyvalues={"connection_key": connection_key}, + retcols=( + "required_state_id", + "required_state", + ), + ) + + required_state_map: Dict[int, Dict[str, Set[str]]] = {} + for row in rows: + state = required_state_map[row[0]] = {} + for event_type, state_keys in db_to_json(row[1]): + state[event_type] = set(state_keys) + + # Get all the room configs, looking up the required state from the map + # above. + room_config_rows = self.db_pool.simple_select_list_txn( + txn, + table="sliding_sync_connection_room_configs", + keyvalues={"connection_position": connection_position}, + retcols=( + "room_id", + "timeline_limit", + "required_state_id", + ), + ) + + room_configs: Dict[str, RoomSyncConfig] = {} + for ( + room_id, + timeline_limit, + required_state_id, + ) in room_config_rows: + room_configs[room_id] = RoomSyncConfig( + timeline_limit=timeline_limit, + required_state_map=required_state_map[required_state_id], + ) + + # Now look up the per-room stream data. + rooms: Dict[str, HaveSentRoom[str]] = {} + receipts: Dict[str, HaveSentRoom[str]] = {} + + receipt_rows = self.db_pool.simple_select_list_txn( + txn, + table="sliding_sync_connection_streams", + keyvalues={"connection_position": connection_position}, + retcols=( + "stream", + "room_id", + "room_status", + "last_token", + ), + ) + for stream, room_id, room_status, last_token in receipt_rows: + have_sent_room: HaveSentRoom[str] = HaveSentRoom( + status=HaveSentRoomFlag(room_status), last_token=last_token + ) + if stream == "rooms": + rooms[room_id] = have_sent_room + elif stream == "receipts": + receipts[room_id] = have_sent_room + else: + # For forwards compatibility we ignore unknown streams, as in + # future we want to be able to easily add more stream types. + logger.warning("Unrecognized sliding sync stream in DB %r", stream) + + return PerConnectionStateDB( + rooms=RoomStatusMap(rooms), + receipts=RoomStatusMap(receipts), + room_configs=room_configs, + ) + + +@attr.s(auto_attribs=True, frozen=True) +class PerConnectionStateDB: + """An equivalent to `PerConnectionState` that holds data in a format stored + in the DB. + + The principle difference is that the tokens for the different streams are + serialized to strings. + + When persisting this *only* contains updates to the state. + """ + + rooms: "RoomStatusMap[str]" + receipts: "RoomStatusMap[str]" + + room_configs: Mapping[str, "RoomSyncConfig"] + + @staticmethod + async def from_state( + per_connection_state: "MutablePerConnectionState", store: "DataStore" + ) -> "PerConnectionStateDB": + """Convert from a standard `PerConnectionState`""" + rooms = { + room_id: HaveSentRoom( + status=status.status, + last_token=( + await status.last_token.to_string(store) + if status.last_token is not None + else None + ), + ) + for room_id, status in per_connection_state.rooms.get_updates().items() + } + + receipts = { + room_id: HaveSentRoom( + status=status.status, + last_token=( + await status.last_token.to_string(store) + if status.last_token is not None + else None + ), + ) + for room_id, status in per_connection_state.receipts.get_updates().items() + } + + log_kv( + { + "rooms": rooms, + "receipts": receipts, + "room_configs": per_connection_state.room_configs.maps[0], + } + ) + + return PerConnectionStateDB( + rooms=RoomStatusMap(rooms), + receipts=RoomStatusMap(receipts), + room_configs=per_connection_state.room_configs.maps[0], + ) + + async def to_state(self, store: "DataStore") -> "PerConnectionState": + """Convert into a standard `PerConnectionState`""" + rooms = { + room_id: HaveSentRoom( + status=status.status, + last_token=( + await RoomStreamToken.parse(store, status.last_token) + if status.last_token is not None + else None + ), + ) + for room_id, status in self.rooms._statuses.items() + } + + receipts = { + room_id: HaveSentRoom( + status=status.status, + last_token=( + await MultiWriterStreamToken.parse(store, status.last_token) + if status.last_token is not None + else None + ), + ) + for room_id, status in self.receipts._statuses.items() + } + + return PerConnectionState( + rooms=RoomStatusMap(rooms), + receipts=RoomStatusMap(receipts), + room_configs=self.room_configs, + ) diff --git a/synapse/storage/engines/_base.py b/synapse/storage/engines/_base.py index ad222e7e2d..9d82c59384 100644 --- a/synapse/storage/engines/_base.py +++ b/synapse/storage/engines/_base.py @@ -28,6 +28,11 @@ if TYPE_CHECKING: from synapse.storage.database import LoggingDatabaseConnection +# A string that will be replaced with the appropriate auto increment directive +# for the database engine, expands to an auto incrementing integer primary key. +AUTO_INCREMENT_PRIMARY_KEYPLACEHOLDER = "$%AUTO_INCREMENT_PRIMARY_KEY%$" + + class IsolationLevel(IntEnum): READ_COMMITTED: int = 1 REPEATABLE_READ: int = 2 diff --git a/synapse/storage/engines/postgres.py b/synapse/storage/engines/postgres.py index 90641d5a18..8c8c6d0414 100644 --- a/synapse/storage/engines/postgres.py +++ b/synapse/storage/engines/postgres.py @@ -25,6 +25,7 @@ from typing import TYPE_CHECKING, Any, Mapping, NoReturn, Optional, Tuple, cast import psycopg2.extensions from synapse.storage.engines._base import ( + AUTO_INCREMENT_PRIMARY_KEYPLACEHOLDER, BaseDatabaseEngine, IncorrectDatabaseSetup, IsolationLevel, @@ -256,4 +257,10 @@ class PostgresEngine( executing the script in its own transaction. The script transaction is left open and it is the responsibility of the caller to commit it. """ + # Replace auto increment placeholder with the appropriate directive + script = script.replace( + AUTO_INCREMENT_PRIMARY_KEYPLACEHOLDER, + "BIGINT PRIMARY KEY GENERATED ALWAYS AS IDENTITY", + ) + cursor.execute(f"COMMIT; BEGIN TRANSACTION; {script}") diff --git a/synapse/storage/engines/sqlite.py b/synapse/storage/engines/sqlite.py index b11094c5c1..9d1795ebe5 100644 --- a/synapse/storage/engines/sqlite.py +++ b/synapse/storage/engines/sqlite.py @@ -25,6 +25,7 @@ import threading from typing import TYPE_CHECKING, Any, List, Mapping, Optional from synapse.storage.engines import BaseDatabaseEngine +from synapse.storage.engines._base import AUTO_INCREMENT_PRIMARY_KEYPLACEHOLDER from synapse.storage.types import Cursor if TYPE_CHECKING: @@ -168,6 +169,11 @@ class Sqlite3Engine(BaseDatabaseEngine[sqlite3.Connection, sqlite3.Cursor]): > first. No other implicit transaction control is performed; any transaction > control must be added to sql_script. """ + # Replace auto increment placeholder with the appropriate directive + script = script.replace( + AUTO_INCREMENT_PRIMARY_KEYPLACEHOLDER, "INTEGER PRIMARY KEY AUTOINCREMENT" + ) + # The implementation of `executescript` can be found at # https://github.com/python/cpython/blob/3.11/Modules/_sqlite/cursor.c#L1035. cursor.executescript(f"BEGIN TRANSACTION; {script}") diff --git a/synapse/storage/schema/__init__.py b/synapse/storage/schema/__init__.py index 581d00346b..3929fd235c 100644 --- a/synapse/storage/schema/__init__.py +++ b/synapse/storage/schema/__init__.py @@ -19,7 +19,7 @@ # # -SCHEMA_VERSION = 86 # remember to update the list below when updating +SCHEMA_VERSION = 87 # remember to update the list below when updating """Represents the expectations made by the codebase about the database schema This should be incremented whenever the codebase changes its requirements on the @@ -142,6 +142,11 @@ Changes in SCHEMA_VERSION = 85 Changes in SCHEMA_VERSION = 86 - Add a column `authenticated` to the tables `local_media_repository` and `remote_media_cache` + +Changes in SCHEMA_VERSION = 87 + - Add tables for storing the per-connection state for sliding sync requests: + sliding_sync_connections, sliding_sync_connection_positions, sliding_sync_connection_required_state, + sliding_sync_connection_room_configs, sliding_sync_connection_streams """ diff --git a/synapse/storage/schema/main/delta/87/02_per_connection_state.sql b/synapse/storage/schema/main/delta/87/02_per_connection_state.sql new file mode 100644 index 0000000000..59bc14a2c9 --- /dev/null +++ b/synapse/storage/schema/main/delta/87/02_per_connection_state.sql @@ -0,0 +1,81 @@ +-- +-- This file is licensed under the Affero General Public License (AGPL) version 3. +-- +-- Copyright (C) 2024 New Vector, Ltd +-- +-- This program is free software: you can redistribute it and/or modify +-- it under the terms of the GNU Affero General Public License as +-- published by the Free Software Foundation, either version 3 of the +-- License, or (at your option) any later version. +-- +-- See the GNU Affero General Public License for more details: +-- . + + +-- Table to track active sliding sync connections. +-- +-- A new connection will be created for every sliding sync request without a +-- `since` token for a given `conn_id` for a device.# +-- +-- Once a new connection is created and used we delete all other connections for +-- the `conn_id`. +CREATE TABLE sliding_sync_connections( + connection_key $%AUTO_INCREMENT_PRIMARY_KEY%$, + user_id TEXT NOT NULL, + -- Generally the device ID, but may be something else for e.g. puppeted accounts. + effective_device_id TEXT NOT NULL, + conn_id TEXT NOT NULL, + created_ts BIGINT NOT NULL +); + +CREATE INDEX sliding_sync_connections_idx ON sliding_sync_connections(user_id, effective_device_id, conn_id); +CREATE INDEX sliding_sync_connections_ts_idx ON sliding_sync_connections(created_ts); + +-- We track per-connection state by associating changes to the state with +-- connection positions. This ensures that we correctly track state even if we +-- see retries of requests. +-- +-- If the client starts a "new" connection (by not specifying a since token), +-- we'll clear out the other connections (to ensure that we don't end up with +-- lots of connection keys). +CREATE TABLE sliding_sync_connection_positions( + connection_position $%AUTO_INCREMENT_PRIMARY_KEY%$, + connection_key BIGINT NOT NULL REFERENCES sliding_sync_connections(connection_key) ON DELETE CASCADE, + created_ts BIGINT NOT NULL +); + +CREATE INDEX sliding_sync_connection_positions_key ON sliding_sync_connection_positions(connection_key); +CREATE INDEX sliding_sync_connection_positions_ts_idx ON sliding_sync_connection_positions(created_ts); + + +-- To save space we deduplicate the `required_state` json by assigning IDs to +-- different values. +CREATE TABLE sliding_sync_connection_required_state( + required_state_id $%AUTO_INCREMENT_PRIMARY_KEY%$, + connection_key BIGINT NOT NULL REFERENCES sliding_sync_connections(connection_key) ON DELETE CASCADE, + required_state TEXT NOT NULL -- We store this as a json list of event type / state key tuples. +); + +CREATE INDEX sliding_sync_connection_required_state_conn_pos ON sliding_sync_connection_required_state(connection_key); + + +-- Stores the room configs we have seen for rooms in a connection. +CREATE TABLE sliding_sync_connection_room_configs( + connection_position BIGINT NOT NULL REFERENCES sliding_sync_connection_positions(connection_position) ON DELETE CASCADE, + room_id TEXT NOT NULL, + timeline_limit BIGINT NOT NULL, + required_state_id BIGINT NOT NULL REFERENCES sliding_sync_connection_required_state(required_state_id) +); + +CREATE UNIQUE INDEX sliding_sync_connection_room_configs_idx ON sliding_sync_connection_room_configs(connection_position, room_id); + +-- Stores what data we have sent for given streams down given connections. +CREATE TABLE sliding_sync_connection_streams( + connection_position BIGINT NOT NULL REFERENCES sliding_sync_connection_positions(connection_position) ON DELETE CASCADE, + stream TEXT NOT NULL, -- e.g. "events" or "receipts" + room_id TEXT NOT NULL, + room_status TEXT NOT NULL, -- "live" or "previously", i.e. the `HaveSentRoomFlag` value + last_token TEXT -- For "previously" the token for the stream we have sent up to. +); + +CREATE UNIQUE INDEX sliding_sync_connection_streams_idx ON sliding_sync_connection_streams(connection_position, room_id, stream); diff --git a/synapse/types/handlers/sliding_sync.py b/synapse/types/handlers/sliding_sync.py index bca1ff7b54..84a88bf784 100644 --- a/synapse/types/handlers/sliding_sync.py +++ b/synapse/types/handlers/sliding_sync.py @@ -730,6 +730,9 @@ class RoomStatusMap(Generic[T]): return RoomStatusMap(statuses=dict(self._statuses)) + def __len__(self) -> int: + return len(self._statuses) + class MutableRoomStatusMap(RoomStatusMap[T]): """A mutable version of `RoomStatusMap`""" @@ -831,6 +834,9 @@ class PerConnectionState: room_configs=dict(self.room_configs), ) + def __len__(self) -> int: + return len(self.rooms) + len(self.receipts) + len(self.room_configs) + @attr.s(auto_attribs=True) class MutablePerConnectionState(PerConnectionState): diff --git a/tests/rest/client/sliding_sync/test_rooms_required_state.py b/tests/rest/client/sliding_sync/test_rooms_required_state.py index 823e7db569..498c921cbd 100644 --- a/tests/rest/client/sliding_sync/test_rooms_required_state.py +++ b/tests/rest/client/sliding_sync/test_rooms_required_state.py @@ -191,8 +191,14 @@ class SlidingSyncRoomsRequiredStateTestCase(SlidingSyncBase): } _, from_token = self.do_sync(sync_body, tok=user1_tok) - # Reset the in-memory cache - self.hs.get_sliding_sync_handler().connection_store._connections.clear() + # Reset the positions + self.get_success( + self.store.db_pool.simple_delete( + table="sliding_sync_connections", + keyvalues={"user_id": user1_id}, + desc="clear_sliding_sync_connections_cache", + ) + ) # Make the Sliding Sync request channel = self.make_request( From 53a3783750789d2627ff6e57857f07216bef9449 Mon Sep 17 00:00:00 2001 From: Michael Telatynski <7t3chguy@gmail.com> Date: Fri, 30 Aug 2024 13:52:57 +0100 Subject: [PATCH 03/15] Use custom stage UIA error for MAS cross-signing reset (#17509) Rather than 501 M_UNRECOGNISED Client side implementation at https://github.com/matrix-org/matrix-react-sdk/pull/12892/ --- changelog.d/17509.feature | 1 + synapse/rest/client/auth.py | 13 ++++++++++- synapse/rest/client/keys.py | 30 +++++++++++++++++++------ tests/handlers/test_oauth_delegation.py | 2 +- tests/rest/client/test_keys.py | 12 +++------- 5 files changed, 40 insertions(+), 18 deletions(-) create mode 100644 changelog.d/17509.feature diff --git a/changelog.d/17509.feature b/changelog.d/17509.feature new file mode 100644 index 0000000000..6d639ceb98 --- /dev/null +++ b/changelog.d/17509.feature @@ -0,0 +1 @@ +Improve cross-signing upload when using [MSC3861](https://github.com/matrix-org/matrix-spec-proposals/pull/3861) to use a custom UIA flow stage, with web fallback support. diff --git a/synapse/rest/client/auth.py b/synapse/rest/client/auth.py index 4221f35937..32eeecd662 100644 --- a/synapse/rest/client/auth.py +++ b/synapse/rest/client/auth.py @@ -27,7 +27,7 @@ from twisted.web.server import Request from synapse.api.constants import LoginType from synapse.api.errors import LoginError, SynapseError from synapse.api.urls import CLIENT_API_PREFIX -from synapse.http.server import HttpServer, respond_with_html +from synapse.http.server import HttpServer, respond_with_html, respond_with_redirect from synapse.http.servlet import RestServlet, parse_string from synapse.http.site import SynapseRequest @@ -66,6 +66,17 @@ class AuthRestServlet(RestServlet): if not session: raise SynapseError(400, "No session supplied") + if ( + self.hs.config.experimental.msc3861.enabled + and stagetype == "org.matrix.cross_signing_reset" + ): + config = self.hs.config.experimental.msc3861 + if config.account_management_url is not None: + url = f"{config.account_management_url}?action=org.matrix.cross_signing_reset" + else: + url = config.issuer + respond_with_redirect(request, str.encode(url)) + if stagetype == LoginType.RECAPTCHA: html = self.recaptcha_template.render( session=session, diff --git a/synapse/rest/client/keys.py b/synapse/rest/client/keys.py index eddad7d5b8..a33eb6c1f2 100644 --- a/synapse/rest/client/keys.py +++ b/synapse/rest/client/keys.py @@ -23,10 +23,13 @@ import logging import re from collections import Counter -from http import HTTPStatus from typing import TYPE_CHECKING, Any, Dict, Optional, Tuple -from synapse.api.errors import Codes, InvalidAPICallError, SynapseError +from synapse.api.errors import ( + InteractiveAuthIncompleteError, + InvalidAPICallError, + SynapseError, +) from synapse.http.server import HttpServer from synapse.http.servlet import ( RestServlet, @@ -409,11 +412,24 @@ class SigningKeyUploadServlet(RestServlet): else: url = config.issuer - raise SynapseError( - HTTPStatus.NOT_IMPLEMENTED, - "To reset your end-to-end encryption cross-signing identity, " - f"you first need to approve it at {url} and then try again.", - Codes.UNRECOGNIZED, + # We use a dummy session ID as this isn't really a UIA flow, but we + # reuse the same API shape for better client compatibility. + raise InteractiveAuthIncompleteError( + "dummy", + { + "session": "dummy", + "flows": [ + {"stages": ["org.matrix.cross_signing_reset"]}, + ], + "params": { + "org.matrix.cross_signing_reset": { + "url": url, + }, + }, + "msg": "To reset your end-to-end encryption cross-signing " + f"identity, you first need to approve it at {url} and " + "then try again.", + }, ) else: # Without MSC3861, we require UIA. diff --git a/tests/handlers/test_oauth_delegation.py b/tests/handlers/test_oauth_delegation.py index 036c539db2..5b5dc713d1 100644 --- a/tests/handlers/test_oauth_delegation.py +++ b/tests/handlers/test_oauth_delegation.py @@ -550,7 +550,7 @@ class MSC3861OAuthDelegation(HomeserverTestCase): access_token="mockAccessToken", ) - self.assertEqual(channel.code, HTTPStatus.NOT_IMPLEMENTED, channel.json_body) + self.assertEqual(channel.code, HTTPStatus.UNAUTHORIZED, channel.json_body) def expect_unauthorized( self, method: str, path: str, content: Union[bytes, str, JsonDict] = "" diff --git a/tests/rest/client/test_keys.py b/tests/rest/client/test_keys.py index 8bbd109092..d9a210b616 100644 --- a/tests/rest/client/test_keys.py +++ b/tests/rest/client/test_keys.py @@ -315,9 +315,7 @@ class SigningKeyUploadServletTestCase(unittest.HomeserverTestCase): "master_key": master_key2, }, ) - self.assertEqual( - channel.code, HTTPStatus.NOT_IMPLEMENTED, channel.json_body - ) + self.assertEqual(channel.code, HTTPStatus.UNAUTHORIZED, channel.json_body) # Pretend that MAS did UIA and allowed us to replace the master key. channel = self.make_request( @@ -349,9 +347,7 @@ class SigningKeyUploadServletTestCase(unittest.HomeserverTestCase): "master_key": master_key3, }, ) - self.assertEqual( - channel.code, HTTPStatus.NOT_IMPLEMENTED, channel.json_body - ) + self.assertEqual(channel.code, HTTPStatus.UNAUTHORIZED, channel.json_body) # Pretend that MAS did UIA and allowed us to replace the master key. channel = self.make_request( @@ -376,6 +372,4 @@ class SigningKeyUploadServletTestCase(unittest.HomeserverTestCase): "master_key": master_key3, }, ) - self.assertEqual( - channel.code, HTTPStatus.NOT_IMPLEMENTED, channel.json_body - ) + self.assertEqual(channel.code, HTTPStatus.UNAUTHORIZED, channel.json_body) From 48303fcbccd50798168940f303364f6b0d25415d Mon Sep 17 00:00:00 2001 From: Quentin Gliech Date: Fri, 30 Aug 2024 16:04:08 +0200 Subject: [PATCH 04/15] MSC3861: load the issuer and account management URLs from OIDC discovery (#17407) This will help mitigating any discrepancies between the issuer configured and the one returned by the OIDC provider. This also removes the need for configuring the `account_management_url` explicitely, as it will now be loaded from the OIDC discovery, as per MSC2965. Because we may now fetch stuff for the .well-known/matrix/client endpoint, this also transforms the client well-known resource to be asynchronous. --- changelog.d/17407.misc | 1 + synapse/api/auth/msc3861_delegated.py | 33 ++++++++++++++++++++++-- synapse/rest/client/auth.py | 16 ++++++++---- synapse/rest/client/auth_issuer.py | 10 ++++++-- synapse/rest/client/keys.py | 16 ++++++++---- synapse/rest/client/login.py | 2 +- synapse/rest/well_known.py | 37 +++++++++++++++------------ tests/rest/client/test_auth_issuer.py | 20 ++++++++++++++- tests/rest/test_well_known.py | 37 +++++++++++++++++---------- 9 files changed, 126 insertions(+), 46 deletions(-) create mode 100644 changelog.d/17407.misc diff --git a/changelog.d/17407.misc b/changelog.d/17407.misc new file mode 100644 index 0000000000..9ed6e61a5b --- /dev/null +++ b/changelog.d/17407.misc @@ -0,0 +1 @@ +MSC3861: load the issuer and account management URLs from OIDC discovery. diff --git a/synapse/api/auth/msc3861_delegated.py b/synapse/api/auth/msc3861_delegated.py index 7361666c77..6bd845c7e3 100644 --- a/synapse/api/auth/msc3861_delegated.py +++ b/synapse/api/auth/msc3861_delegated.py @@ -121,7 +121,9 @@ class MSC3861DelegatedAuth(BaseAuth): self._hostname = hs.hostname self._admin_token = self._config.admin_token - self._issuer_metadata = RetryOnExceptionCachedCall(self._load_metadata) + self._issuer_metadata = RetryOnExceptionCachedCall[OpenIDProviderMetadata]( + self._load_metadata + ) if isinstance(auth_method, PrivateKeyJWTWithKid): # Use the JWK as the client secret when using the private_key_jwt method @@ -145,6 +147,33 @@ class MSC3861DelegatedAuth(BaseAuth): # metadata.validate_introspection_endpoint() return metadata + async def issuer(self) -> str: + """ + Get the configured issuer + + This will use the issuer value set in the metadata, + falling back to the one set in the config if not set in the metadata + """ + metadata = await self._issuer_metadata.get() + return metadata.issuer or self._config.issuer + + async def account_management_url(self) -> Optional[str]: + """ + Get the configured account management URL + + This will discover the account management URL from the issuer if it's not set in the config + """ + if self._config.account_management_url is not None: + return self._config.account_management_url + + try: + metadata = await self._issuer_metadata.get() + return metadata.get("account_management_uri", None) + # We don't want to raise here if we can't load the metadata + except Exception: + logger.warning("Failed to load metadata:", exc_info=True) + return None + async def _introspection_endpoint(self) -> str: """ Returns the introspection endpoint of the issuer @@ -154,7 +183,7 @@ class MSC3861DelegatedAuth(BaseAuth): if self._config.introspection_endpoint is not None: return self._config.introspection_endpoint - metadata = await self._load_metadata() + metadata = await self._issuer_metadata.get() return metadata.get("introspection_endpoint") async def _introspect_token(self, token: str) -> IntrospectionToken: diff --git a/synapse/rest/client/auth.py b/synapse/rest/client/auth.py index 32eeecd662..b8dca7c797 100644 --- a/synapse/rest/client/auth.py +++ b/synapse/rest/client/auth.py @@ -20,7 +20,7 @@ # import logging -from typing import TYPE_CHECKING +from typing import TYPE_CHECKING, cast from twisted.web.server import Request @@ -70,11 +70,17 @@ class AuthRestServlet(RestServlet): self.hs.config.experimental.msc3861.enabled and stagetype == "org.matrix.cross_signing_reset" ): - config = self.hs.config.experimental.msc3861 - if config.account_management_url is not None: - url = f"{config.account_management_url}?action=org.matrix.cross_signing_reset" + # If MSC3861 is enabled, we can assume self._auth is an instance of MSC3861DelegatedAuth + # We import lazily here because of the authlib requirement + from synapse.api.auth.msc3861_delegated import MSC3861DelegatedAuth + + auth = cast(MSC3861DelegatedAuth, self.auth) + + url = await auth.account_management_url() + if url is not None: + url = f"{url}?action=org.matrix.cross_signing_reset" else: - url = config.issuer + url = await auth.issuer() respond_with_redirect(request, str.encode(url)) if stagetype == LoginType.RECAPTCHA: diff --git a/synapse/rest/client/auth_issuer.py b/synapse/rest/client/auth_issuer.py index 77b9720956..acd0399d85 100644 --- a/synapse/rest/client/auth_issuer.py +++ b/synapse/rest/client/auth_issuer.py @@ -13,7 +13,7 @@ # limitations under the License. import logging import typing -from typing import Tuple +from typing import Tuple, cast from synapse.api.errors import Codes, SynapseError from synapse.http.server import HttpServer @@ -43,10 +43,16 @@ class AuthIssuerServlet(RestServlet): def __init__(self, hs: "HomeServer"): super().__init__() self._config = hs.config + self._auth = hs.get_auth() async def on_GET(self, request: SynapseRequest) -> Tuple[int, JsonDict]: if self._config.experimental.msc3861.enabled: - return 200, {"issuer": self._config.experimental.msc3861.issuer} + # If MSC3861 is enabled, we can assume self._auth is an instance of MSC3861DelegatedAuth + # We import lazily here because of the authlib requirement + from synapse.api.auth.msc3861_delegated import MSC3861DelegatedAuth + + auth = cast(MSC3861DelegatedAuth, self._auth) + return 200, {"issuer": await auth.issuer()} else: # Wouldn't expect this to be reached: the servelet shouldn't have been # registered. Still, fail gracefully if we are registered for some reason. diff --git a/synapse/rest/client/keys.py b/synapse/rest/client/keys.py index a33eb6c1f2..7025662fdc 100644 --- a/synapse/rest/client/keys.py +++ b/synapse/rest/client/keys.py @@ -23,7 +23,7 @@ import logging import re from collections import Counter -from typing import TYPE_CHECKING, Any, Dict, Optional, Tuple +from typing import TYPE_CHECKING, Any, Dict, Optional, Tuple, cast from synapse.api.errors import ( InteractiveAuthIncompleteError, @@ -406,11 +406,17 @@ class SigningKeyUploadServlet(RestServlet): # explicitly mark the master key as replaceable. if self.hs.config.experimental.msc3861.enabled: if not master_key_updatable_without_uia: - config = self.hs.config.experimental.msc3861 - if config.account_management_url is not None: - url = f"{config.account_management_url}?action=org.matrix.cross_signing_reset" + # If MSC3861 is enabled, we can assume self.auth is an instance of MSC3861DelegatedAuth + # We import lazily here because of the authlib requirement + from synapse.api.auth.msc3861_delegated import MSC3861DelegatedAuth + + auth = cast(MSC3861DelegatedAuth, self.auth) + + uri = await auth.account_management_url() + if uri is not None: + url = f"{uri}?action=org.matrix.cross_signing_reset" else: - url = config.issuer + url = await auth.issuer() # We use a dummy session ID as this isn't really a UIA flow, but we # reuse the same API shape for better client compatibility. diff --git a/synapse/rest/client/login.py b/synapse/rest/client/login.py index ae691bcdba..03b1e7edc4 100644 --- a/synapse/rest/client/login.py +++ b/synapse/rest/client/login.py @@ -268,7 +268,7 @@ class LoginRestServlet(RestServlet): approval_notice_medium=ApprovalNoticeMedium.NONE, ) - well_known_data = self._well_known_builder.get_well_known() + well_known_data = await self._well_known_builder.get_well_known() if well_known_data: result["well_known"] = well_known_data return 200, result diff --git a/synapse/rest/well_known.py b/synapse/rest/well_known.py index d0ca8ca46b..989e570671 100644 --- a/synapse/rest/well_known.py +++ b/synapse/rest/well_known.py @@ -18,12 +18,13 @@ # # import logging -from typing import TYPE_CHECKING, Optional +from typing import TYPE_CHECKING, Optional, Tuple, cast from twisted.web.resource import Resource from twisted.web.server import Request -from synapse.http.server import set_cors_headers +from synapse.api.errors import NotFoundError +from synapse.http.server import DirectServeJsonResource from synapse.http.site import SynapseRequest from synapse.types import JsonDict from synapse.util import json_encoder @@ -38,8 +39,9 @@ logger = logging.getLogger(__name__) class WellKnownBuilder: def __init__(self, hs: "HomeServer"): self._config = hs.config + self._auth = hs.get_auth() - def get_well_known(self) -> Optional[JsonDict]: + async def get_well_known(self) -> Optional[JsonDict]: if not self._config.server.serve_client_wellknown: return None @@ -52,13 +54,20 @@ class WellKnownBuilder: # We use the MSC3861 values as they are used by multiple MSCs if self._config.experimental.msc3861.enabled: + # If MSC3861 is enabled, we can assume self._auth is an instance of MSC3861DelegatedAuth + # We import lazily here because of the authlib requirement + from synapse.api.auth.msc3861_delegated import MSC3861DelegatedAuth + + auth = cast(MSC3861DelegatedAuth, self._auth) + result["org.matrix.msc2965.authentication"] = { - "issuer": self._config.experimental.msc3861.issuer + "issuer": await auth.issuer(), } - if self._config.experimental.msc3861.account_management_url is not None: + account_management_url = await auth.account_management_url() + if account_management_url is not None: result["org.matrix.msc2965.authentication"][ "account" - ] = self._config.experimental.msc3861.account_management_url + ] = account_management_url if self._config.server.extra_well_known_client_content: for ( @@ -71,26 +80,22 @@ class WellKnownBuilder: return result -class ClientWellKnownResource(Resource): +class ClientWellKnownResource(DirectServeJsonResource): """A Twisted web resource which renders the .well-known/matrix/client file""" isLeaf = 1 def __init__(self, hs: "HomeServer"): - Resource.__init__(self) + super().__init__() self._well_known_builder = WellKnownBuilder(hs) - def render_GET(self, request: SynapseRequest) -> bytes: - set_cors_headers(request) - r = self._well_known_builder.get_well_known() + async def _async_render_GET(self, request: SynapseRequest) -> Tuple[int, JsonDict]: + r = await self._well_known_builder.get_well_known() if not r: - request.setResponseCode(404) - request.setHeader(b"Content-Type", b"text/plain") - return b".well-known not available" + raise NotFoundError(".well-known not available") logger.debug("returning: %s", r) - request.setHeader(b"Content-Type", b"application/json") - return json_encoder.encode(r).encode("utf-8") + return 200, r class ServerWellKnownResource(Resource): diff --git a/tests/rest/client/test_auth_issuer.py b/tests/rest/client/test_auth_issuer.py index 964baeec32..299475a35c 100644 --- a/tests/rest/client/test_auth_issuer.py +++ b/tests/rest/client/test_auth_issuer.py @@ -12,6 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. from http import HTTPStatus +from unittest.mock import AsyncMock from synapse.rest.client import auth_issuer @@ -50,10 +51,27 @@ class AuthIssuerTestCase(HomeserverTestCase): } ) def test_returns_issuer_when_oidc_enabled(self) -> None: - # Make an unauthenticated request for the discovery info. + # Patch the HTTP client to return the issuer metadata + req_mock = AsyncMock(return_value={"issuer": ISSUER}) + self.hs.get_proxied_http_client().get_json = req_mock # type: ignore[method-assign] + channel = self.make_request( "GET", "/_matrix/client/unstable/org.matrix.msc2965/auth_issuer", ) + self.assertEqual(channel.code, HTTPStatus.OK) self.assertEqual(channel.json_body, {"issuer": ISSUER}) + + req_mock.assert_called_with("https://account.example.com/.well-known/openid-configuration") + req_mock.reset_mock() + + # Second call it should use the cached value + channel = self.make_request( + "GET", + "/_matrix/client/unstable/org.matrix.msc2965/auth_issuer", + ) + + self.assertEqual(channel.code, HTTPStatus.OK) + self.assertEqual(channel.json_body, {"issuer": ISSUER}) + req_mock.assert_not_called() diff --git a/tests/rest/test_well_known.py b/tests/rest/test_well_known.py index e166c13bc1..ac992766e8 100644 --- a/tests/rest/test_well_known.py +++ b/tests/rest/test_well_known.py @@ -17,6 +17,8 @@ # [This file includes modifications made by New Vector Limited] # # +from unittest.mock import AsyncMock + from twisted.web.resource import Resource from synapse.rest.well_known import well_known_resource @@ -112,7 +114,6 @@ class WellKnownTests(unittest.HomeserverTestCase): "msc3861": { "enabled": True, "issuer": "https://issuer", - "account_management_url": "https://my-account.issuer", "client_id": "id", "client_auth_method": "client_secret_post", "client_secret": "secret", @@ -122,18 +123,26 @@ class WellKnownTests(unittest.HomeserverTestCase): } ) def test_client_well_known_msc3861_oauth_delegation(self) -> None: - channel = self.make_request( - "GET", "/.well-known/matrix/client", shorthand=False - ) + # Patch the HTTP client to return the issuer metadata + req_mock = AsyncMock(return_value={"issuer": "https://issuer", "account_management_uri": "https://my-account.issuer"}) + self.hs.get_proxied_http_client().get_json = req_mock # type: ignore[method-assign] - self.assertEqual(channel.code, 200) - self.assertEqual( - channel.json_body, - { - "m.homeserver": {"base_url": "https://homeserver/"}, - "org.matrix.msc2965.authentication": { - "issuer": "https://issuer", - "account": "https://my-account.issuer", + for _ in range(2): + channel = self.make_request( + "GET", "/.well-known/matrix/client", shorthand=False + ) + + self.assertEqual(channel.code, 200) + self.assertEqual( + channel.json_body, + { + "m.homeserver": {"base_url": "https://homeserver/"}, + "org.matrix.msc2965.authentication": { + "issuer": "https://issuer", + "account": "https://my-account.issuer", + }, }, - }, - ) + ) + + # It should have been called exactly once, because it gets cached + req_mock.assert_called_once_with("https://issuer/.well-known/openid-configuration") From 26c1330764911262a6ee16069b17ba27d33871a9 Mon Sep 17 00:00:00 2001 From: Quentin Gliech Date: Fri, 30 Aug 2024 10:07:46 +0200 Subject: [PATCH 05/15] Replace isort and black with ruff (#17620) Ruff now has decent parity with black and isort, so this is going to just save us a bunch of time --- .github/workflows/fix_lint.yaml | 14 +- .github/workflows/tests.yml | 11 +- changelog.d/17620.misc | 1 + docs/code_style.md | 4 +- poetry.lock | 126 ++-------- pylint.cfg | 280 --------------------- pyproject.toml | 38 ++- scripts-dev/lint.sh | 9 +- synapse/rest/key/v2/remote_key_resource.py | 2 +- synapse/storage/_base.py | 7 +- synmark/__init__.py | 4 +- 11 files changed, 56 insertions(+), 440 deletions(-) create mode 100644 changelog.d/17620.misc delete mode 100644 pylint.cfg diff --git a/.github/workflows/fix_lint.yaml b/.github/workflows/fix_lint.yaml index f1e35fcd99..5970b4e826 100644 --- a/.github/workflows/fix_lint.yaml +++ b/.github/workflows/fix_lint.yaml @@ -29,17 +29,9 @@ jobs: with: install-project: "false" - - name: Import order (isort) + - name: Run ruff continue-on-error: true - run: poetry run isort . - - - name: Code style (black) - continue-on-error: true - run: poetry run black . - - - name: Semantic checks (ruff) - continue-on-error: true - run: poetry run ruff --fix . + run: poetry run ruff check --fix . - run: cargo clippy --all-features --fix -- -D warnings continue-on-error: true @@ -49,4 +41,4 @@ jobs: - uses: stefanzweifel/git-auto-commit-action@v5 with: - commit_message: "Attempt to fix linting" + commit_message: "Attempt to fix linting" diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index 730f8552d9..add046ec6a 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -131,15 +131,8 @@ jobs: with: install-project: "false" - - name: Import order (isort) - run: poetry run isort --check --diff . - - - name: Code style (black) - run: poetry run black --check --diff . - - - name: Semantic checks (ruff) - # --quiet suppresses the update check. - run: poetry run ruff check --quiet . + - name: Check style + run: poetry run ruff check --output-format=github . lint-mypy: runs-on: ubuntu-latest diff --git a/changelog.d/17620.misc b/changelog.d/17620.misc new file mode 100644 index 0000000000..f583cdcb38 --- /dev/null +++ b/changelog.d/17620.misc @@ -0,0 +1 @@ +Replace `isort` and `black with `ruff`. diff --git a/docs/code_style.md b/docs/code_style.md index 026001b8a3..c28aaadad0 100644 --- a/docs/code_style.md +++ b/docs/code_style.md @@ -8,9 +8,7 @@ errors in code. The necessary tools are: -- [black](https://black.readthedocs.io/en/stable/), a source code formatter; -- [isort](https://pycqa.github.io/isort/), which organises each file's imports; -- [ruff](https://github.com/charliermarsh/ruff), which can spot common errors; and +- [ruff](https://github.com/charliermarsh/ruff), which can spot common errors and enforce a consistent style; and - [mypy](https://mypy.readthedocs.io/en/stable/), a type checker. See [the contributing guide](development/contributing_guide.md#run-the-linters) for instructions diff --git a/poetry.lock b/poetry.lock index 0fffa8e9ba..731adff9d4 100644 --- a/poetry.lock +++ b/poetry.lock @@ -105,52 +105,6 @@ files = [ tests = ["pytest (>=3.2.1,!=3.3.0)"] typecheck = ["mypy"] -[[package]] -name = "black" -version = "24.8.0" -description = "The uncompromising code formatter." -optional = false -python-versions = ">=3.8" -files = [ - {file = "black-24.8.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:09cdeb74d494ec023ded657f7092ba518e8cf78fa8386155e4a03fdcc44679e6"}, - {file = "black-24.8.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:81c6742da39f33b08e791da38410f32e27d632260e599df7245cccee2064afeb"}, - {file = "black-24.8.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:707a1ca89221bc8a1a64fb5e15ef39cd755633daa672a9db7498d1c19de66a42"}, - {file = "black-24.8.0-cp310-cp310-win_amd64.whl", hash = "sha256:d6417535d99c37cee4091a2f24eb2b6d5ec42b144d50f1f2e436d9fe1916fe1a"}, - {file = "black-24.8.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:fb6e2c0b86bbd43dee042e48059c9ad7830abd5c94b0bc518c0eeec57c3eddc1"}, - {file = "black-24.8.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:837fd281f1908d0076844bc2b801ad2d369c78c45cf800cad7b61686051041af"}, - {file = "black-24.8.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:62e8730977f0b77998029da7971fa896ceefa2c4c4933fcd593fa599ecbf97a4"}, - {file = "black-24.8.0-cp311-cp311-win_amd64.whl", hash = "sha256:72901b4913cbac8972ad911dc4098d5753704d1f3c56e44ae8dce99eecb0e3af"}, - {file = "black-24.8.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:7c046c1d1eeb7aea9335da62472481d3bbf3fd986e093cffd35f4385c94ae368"}, - {file = "black-24.8.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:649f6d84ccbae73ab767e206772cc2d7a393a001070a4c814a546afd0d423aed"}, - {file = "black-24.8.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2b59b250fdba5f9a9cd9d0ece6e6d993d91ce877d121d161e4698af3eb9c1018"}, - {file = "black-24.8.0-cp312-cp312-win_amd64.whl", hash = "sha256:6e55d30d44bed36593c3163b9bc63bf58b3b30e4611e4d88a0c3c239930ed5b2"}, - {file = "black-24.8.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:505289f17ceda596658ae81b61ebbe2d9b25aa78067035184ed0a9d855d18afd"}, - {file = "black-24.8.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:b19c9ad992c7883ad84c9b22aaa73562a16b819c1d8db7a1a1a49fb7ec13c7d2"}, - {file = "black-24.8.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1f13f7f386f86f8121d76599114bb8c17b69d962137fc70efe56137727c7047e"}, - {file = "black-24.8.0-cp38-cp38-win_amd64.whl", hash = "sha256:f490dbd59680d809ca31efdae20e634f3fae27fba3ce0ba3208333b713bc3920"}, - {file = "black-24.8.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:eab4dd44ce80dea27dc69db40dab62d4ca96112f87996bca68cd75639aeb2e4c"}, - {file = "black-24.8.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:3c4285573d4897a7610054af5a890bde7c65cb466040c5f0c8b732812d7f0e5e"}, - {file = "black-24.8.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9e84e33b37be070ba135176c123ae52a51f82306def9f7d063ee302ecab2cf47"}, - {file = "black-24.8.0-cp39-cp39-win_amd64.whl", hash = "sha256:73bbf84ed136e45d451a260c6b73ed674652f90a2b3211d6a35e78054563a9bb"}, - {file = "black-24.8.0-py3-none-any.whl", hash = "sha256:972085c618ee94f402da1af548a4f218c754ea7e5dc70acb168bfaca4c2542ed"}, - {file = "black-24.8.0.tar.gz", hash = "sha256:2500945420b6784c38b9ee885af039f5e7471ef284ab03fa35ecdde4688cd83f"}, -] - -[package.dependencies] -click = ">=8.0.0" -mypy-extensions = ">=0.4.3" -packaging = ">=22.0" -pathspec = ">=0.9.0" -platformdirs = ">=2" -tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} -typing-extensions = {version = ">=4.0.1", markers = "python_version < \"3.11\""} - -[package.extras] -colorama = ["colorama (>=0.4.3)"] -d = ["aiohttp (>=3.7.4)", "aiohttp (>=3.7.4,!=3.9.0)"] -jupyter = ["ipython (>=7.8.0)", "tokenize-rt (>=3.2.0)"] -uvloop = ["uvloop (>=0.15.2)"] - [[package]] name = "bleach" version = "6.1.0" @@ -832,20 +786,6 @@ tomli = {version = "*", markers = "python_version < \"3.11\""} [package.extras] scripts = ["click (>=6.0)"] -[[package]] -name = "isort" -version = "5.13.2" -description = "A Python utility / library to sort Python imports." -optional = false -python-versions = ">=3.8.0" -files = [ - {file = "isort-5.13.2-py3-none-any.whl", hash = "sha256:8ca5e72a8d85860d5a3fa69b8745237f2939afe12dbf656afbcb47fe72d947a6"}, - {file = "isort-5.13.2.tar.gz", hash = "sha256:48fdfcb9face5d58a4f6dde2e72a1fb8dcaf8ab26f95ab49fab84c2ddefb0109"}, -] - -[package.extras] -colors = ["colorama (>=0.4.6)"] - [[package]] name = "jaeger-client" version = "4.8.0" @@ -1494,17 +1434,6 @@ files = [ [package.extras] dev = ["jinja2"] -[[package]] -name = "pathspec" -version = "0.11.1" -description = "Utility library for gitignore style pattern matching of file paths." -optional = false -python-versions = ">=3.7" -files = [ - {file = "pathspec-0.11.1-py3-none-any.whl", hash = "sha256:d8af70af76652554bd134c22b3e8a1cc46ed7d91edcdd721ef1a0c51a84a5293"}, - {file = "pathspec-0.11.1.tar.gz", hash = "sha256:2798de800fa92780e33acca925945e9a19a133b715067cf165b8866c15a31687"}, -] - [[package]] name = "phonenumbers" version = "8.13.44" @@ -1638,21 +1567,6 @@ files = [ {file = "pkgutil_resolve_name-1.3.10.tar.gz", hash = "sha256:357d6c9e6a755653cfd78893817c0853af365dd51ec97f3d358a819373bbd174"}, ] -[[package]] -name = "platformdirs" -version = "3.1.1" -description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." -optional = false -python-versions = ">=3.7" -files = [ - {file = "platformdirs-3.1.1-py3-none-any.whl", hash = "sha256:e5986afb596e4bb5bde29a79ac9061aa955b94fca2399b7aaac4090860920dd8"}, - {file = "platformdirs-3.1.1.tar.gz", hash = "sha256:024996549ee88ec1a9aa99ff7f8fc819bb59e2c3477b410d90a16d32d6e707aa"}, -] - -[package.extras] -docs = ["furo (>=2022.12.7)", "proselint (>=0.13)", "sphinx (>=6.1.3)", "sphinx-autodoc-typehints (>=1.22,!=1.23.4)"] -test = ["appdirs (==1.4.4)", "covdefaults (>=2.2.2)", "pytest (>=7.2.1)", "pytest-cov (>=4)", "pytest-mock (>=3.10)"] - [[package]] name = "prometheus-client" version = "0.20.0" @@ -2354,29 +2268,29 @@ files = [ [[package]] name = "ruff" -version = "0.5.5" +version = "0.6.2" description = "An extremely fast Python linter and code formatter, written in Rust." optional = false python-versions = ">=3.7" files = [ - {file = "ruff-0.5.5-py3-none-linux_armv6l.whl", hash = "sha256:605d589ec35d1da9213a9d4d7e7a9c761d90bba78fc8790d1c5e65026c1b9eaf"}, - {file = "ruff-0.5.5-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:00817603822a3e42b80f7c3298c8269e09f889ee94640cd1fc7f9329788d7bf8"}, - {file = "ruff-0.5.5-py3-none-macosx_11_0_arm64.whl", hash = "sha256:187a60f555e9f865a2ff2c6984b9afeffa7158ba6e1eab56cb830404c942b0f3"}, - {file = "ruff-0.5.5-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fe26fc46fa8c6e0ae3f47ddccfbb136253c831c3289bba044befe68f467bfb16"}, - {file = "ruff-0.5.5-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:4ad25dd9c5faac95c8e9efb13e15803cd8bbf7f4600645a60ffe17c73f60779b"}, - {file = "ruff-0.5.5-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f70737c157d7edf749bcb952d13854e8f745cec695a01bdc6e29c29c288fc36e"}, - {file = "ruff-0.5.5-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:cfd7de17cef6ab559e9f5ab859f0d3296393bc78f69030967ca4d87a541b97a0"}, - {file = "ruff-0.5.5-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a09b43e02f76ac0145f86a08e045e2ea452066f7ba064fd6b0cdccb486f7c3e7"}, - {file = "ruff-0.5.5-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d0b856cb19c60cd40198be5d8d4b556228e3dcd545b4f423d1ad812bfdca5884"}, - {file = "ruff-0.5.5-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3687d002f911e8a5faf977e619a034d159a8373514a587249cc00f211c67a091"}, - {file = "ruff-0.5.5-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:ac9dc814e510436e30d0ba535f435a7f3dc97f895f844f5b3f347ec8c228a523"}, - {file = "ruff-0.5.5-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:af9bdf6c389b5add40d89b201425b531e0a5cceb3cfdcc69f04d3d531c6be74f"}, - {file = "ruff-0.5.5-py3-none-musllinux_1_2_i686.whl", hash = "sha256:d40a8533ed545390ef8315b8e25c4bb85739b90bd0f3fe1280a29ae364cc55d8"}, - {file = "ruff-0.5.5-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:cab904683bf9e2ecbbe9ff235bfe056f0eba754d0168ad5407832928d579e7ab"}, - {file = "ruff-0.5.5-py3-none-win32.whl", hash = "sha256:696f18463b47a94575db635ebb4c178188645636f05e934fdf361b74edf1bb2d"}, - {file = "ruff-0.5.5-py3-none-win_amd64.whl", hash = "sha256:50f36d77f52d4c9c2f1361ccbfbd09099a1b2ea5d2b2222c586ab08885cf3445"}, - {file = "ruff-0.5.5-py3-none-win_arm64.whl", hash = "sha256:3191317d967af701f1b73a31ed5788795936e423b7acce82a2b63e26eb3e89d6"}, - {file = "ruff-0.5.5.tar.gz", hash = "sha256:cc5516bdb4858d972fbc31d246bdb390eab8df1a26e2353be2dbc0c2d7f5421a"}, + {file = "ruff-0.6.2-py3-none-linux_armv6l.whl", hash = "sha256:5c8cbc6252deb3ea840ad6a20b0f8583caab0c5ef4f9cca21adc5a92b8f79f3c"}, + {file = "ruff-0.6.2-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:17002fe241e76544448a8e1e6118abecbe8cd10cf68fde635dad480dba594570"}, + {file = "ruff-0.6.2-py3-none-macosx_11_0_arm64.whl", hash = "sha256:3dbeac76ed13456f8158b8f4fe087bf87882e645c8e8b606dd17b0b66c2c1158"}, + {file = "ruff-0.6.2-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:094600ee88cda325988d3f54e3588c46de5c18dae09d683ace278b11f9d4d534"}, + {file = "ruff-0.6.2-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:316d418fe258c036ba05fbf7dfc1f7d3d4096db63431546163b472285668132b"}, + {file = "ruff-0.6.2-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d72b8b3abf8a2d51b7b9944a41307d2f442558ccb3859bbd87e6ae9be1694a5d"}, + {file = "ruff-0.6.2-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:2aed7e243be68487aa8982e91c6e260982d00da3f38955873aecd5a9204b1d66"}, + {file = "ruff-0.6.2-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d371f7fc9cec83497fe7cf5eaf5b76e22a8efce463de5f775a1826197feb9df8"}, + {file = "ruff-0.6.2-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a8f310d63af08f583363dfb844ba8f9417b558199c58a5999215082036d795a1"}, + {file = "ruff-0.6.2-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7db6880c53c56addb8638fe444818183385ec85eeada1d48fc5abe045301b2f1"}, + {file = "ruff-0.6.2-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:1175d39faadd9a50718f478d23bfc1d4da5743f1ab56af81a2b6caf0a2394f23"}, + {file = "ruff-0.6.2-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:5b939f9c86d51635fe486585389f54582f0d65b8238e08c327c1534844b3bb9a"}, + {file = "ruff-0.6.2-py3-none-musllinux_1_2_i686.whl", hash = "sha256:d0d62ca91219f906caf9b187dea50d17353f15ec9bb15aae4a606cd697b49b4c"}, + {file = "ruff-0.6.2-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:7438a7288f9d67ed3c8ce4d059e67f7ed65e9fe3aa2ab6f5b4b3610e57e3cb56"}, + {file = "ruff-0.6.2-py3-none-win32.whl", hash = "sha256:279d5f7d86696df5f9549b56b9b6a7f6c72961b619022b5b7999b15db392a4da"}, + {file = "ruff-0.6.2-py3-none-win_amd64.whl", hash = "sha256:d9f3469c7dd43cd22eb1c3fc16926fb8258d50cb1b216658a07be95dd117b0f2"}, + {file = "ruff-0.6.2-py3-none-win_arm64.whl", hash = "sha256:f28fcd2cd0e02bdf739297516d5643a945cc7caf09bd9bcb4d932540a5ea4fa9"}, + {file = "ruff-0.6.2.tar.gz", hash = "sha256:239ee6beb9e91feb8e0ec384204a763f36cb53fb895a1a364618c6abb076b3be"}, ] [[package]] @@ -3190,4 +3104,4 @@ user-search = ["pyicu"] [metadata] lock-version = "2.0" python-versions = "^3.8.0" -content-hash = "c165cdc1f6612c9f1b5bfd8063c23e2d595d717dd8ac1a468519e902be2cdf93" +content-hash = "2bf09e2b68f3abd1a0f9ff2227eb3026ac3d034845acfc120d0b1cb8167ea43b" diff --git a/pylint.cfg b/pylint.cfg deleted file mode 100644 index 2368997112..0000000000 --- a/pylint.cfg +++ /dev/null @@ -1,280 +0,0 @@ -[MASTER] - -# Specify a configuration file. -#rcfile= - -# Python code to execute, usually for sys.path manipulation such as -# pygtk.require(). -#init-hook= - -# Profiled execution. -profile=no - -# Add files or directories to the blacklist. They should be base names, not -# paths. -ignore=CVS - -# Pickle collected data for later comparisons. -persistent=yes - -# List of plugins (as comma separated values of python modules names) to load, -# usually to register additional checkers. -load-plugins= - - -[MESSAGES CONTROL] - -# Enable the message, report, category or checker with the given id(s). You can -# either give multiple identifier separated by comma (,) or put this option -# multiple time. See also the "--disable" option for examples. -#enable= - -# Disable the message, report, category or checker with the given id(s). You -# can either give multiple identifiers separated by comma (,) or put this -# option multiple times (only on the command line, not in the configuration -# file where it should appear only once).You can also use "--disable=all" to -# disable everything first and then reenable specific checks. For example, if -# you want to run only the similarities checker, you can use "--disable=all -# --enable=similarities". If you want to run only the classes checker, but have -# no Warning level messages displayed, use"--disable=all --enable=classes -# --disable=W" -disable=missing-docstring - - -[REPORTS] - -# Set the output format. Available formats are text, parseable, colorized, msvs -# (visual studio) and html. You can also give a reporter class, eg -# mypackage.mymodule.MyReporterClass. -output-format=text - -# Put messages in a separate file for each module / package specified on the -# command line instead of printing them on stdout. Reports (if any) will be -# written in a file name "pylint_global.[txt|html]". -files-output=no - -# Tells whether to display a full report or only the messages -reports=yes - -# Python expression which should return a note less than 10 (10 is the highest -# note). You have access to the variables errors warning, statement which -# respectively contain the number of errors / warnings messages and the total -# number of statements analyzed. This is used by the global evaluation report -# (RP0004). -evaluation=10.0 - ((float(5 * error + warning + refactor + convention) / statement) * 10) - -# Add a comment according to your evaluation note. This is used by the global -# evaluation report (RP0004). -comment=no - -# Template used to display messages. This is a python new-style format string -# used to format the message information. See doc for all details -#msg-template= - - -[TYPECHECK] - -# Tells whether missing members accessed in mixin class should be ignored. A -# mixin class is detected if its name ends with "mixin" (case insensitive). -ignore-mixin-members=yes - -# List of classes names for which member attributes should not be checked -# (useful for classes with attributes dynamically set). -ignored-classes=SQLObject - -# When zope mode is activated, add a predefined set of Zope acquired attributes -# to generated-members. -zope=no - -# List of members which are set dynamically and missed by pylint inference -# system, and so shouldn't trigger E0201 when accessed. Python regular -# expressions are accepted. -generated-members=REQUEST,acl_users,aq_parent - - -[MISCELLANEOUS] - -# List of note tags to take in consideration, separated by a comma. -notes=FIXME,XXX,TODO - - -[SIMILARITIES] - -# Minimum lines number of a similarity. -min-similarity-lines=4 - -# Ignore comments when computing similarities. -ignore-comments=yes - -# Ignore docstrings when computing similarities. -ignore-docstrings=yes - -# Ignore imports when computing similarities. -ignore-imports=no - - -[VARIABLES] - -# Tells whether we should check for unused import in __init__ files. -init-import=no - -# A regular expression matching the beginning of the name of dummy variables -# (i.e. not used). -dummy-variables-rgx=_$|dummy - -# List of additional names supposed to be defined in builtins. Remember that -# you should avoid to define new builtins when possible. -additional-builtins= - - -[BASIC] - -# Required attributes for module, separated by a comma -required-attributes= - -# List of builtins function names that should not be used, separated by a comma -bad-functions=map,filter,apply,input - -# Regular expression which should only match correct module names -module-rgx=(([a-z_][a-z0-9_]*)|([A-Z][a-zA-Z0-9]+))$ - -# Regular expression which should only match correct module level names -const-rgx=(([A-Z_][A-Z0-9_]*)|(__.*__))$ - -# Regular expression which should only match correct class names -class-rgx=[A-Z_][a-zA-Z0-9]+$ - -# Regular expression which should only match correct function names -function-rgx=[a-z_][a-z0-9_]{2,30}$ - -# Regular expression which should only match correct method names -method-rgx=[a-z_][a-z0-9_]{2,30}$ - -# Regular expression which should only match correct instance attribute names -attr-rgx=[a-z_][a-z0-9_]{2,30}$ - -# Regular expression which should only match correct argument names -argument-rgx=[a-z_][a-z0-9_]{2,30}$ - -# Regular expression which should only match correct variable names -variable-rgx=[a-z_][a-z0-9_]{2,30}$ - -# Regular expression which should only match correct attribute names in class -# bodies -class-attribute-rgx=([A-Za-z_][A-Za-z0-9_]{2,30}|(__.*__))$ - -# Regular expression which should only match correct list comprehension / -# generator expression variable names -inlinevar-rgx=[A-Za-z_][A-Za-z0-9_]*$ - -# Good variable names which should always be accepted, separated by a comma -good-names=i,j,k,ex,Run,_ - -# Bad variable names which should always be refused, separated by a comma -bad-names=foo,bar,baz,toto,tutu,tata - -# Regular expression which should only match function or class names that do -# not require a docstring. -no-docstring-rgx=__.*__ - -# Minimum line length for functions/classes that require docstrings, shorter -# ones are exempt. -docstring-min-length=-1 - - -[FORMAT] - -# Maximum number of characters on a single line. -max-line-length=80 - -# Regexp for a line that is allowed to be longer than the limit. -ignore-long-lines=^\s*(# )??$ - -# Allow the body of an if to be on the same line as the test if there is no -# else. -single-line-if-stmt=no - -# List of optional constructs for which whitespace checking is disabled -no-space-check=trailing-comma,dict-separator - -# Maximum number of lines in a module -max-module-lines=1000 - -# String used as indentation unit. This is usually " " (4 spaces) or "\t" (1 -# tab). -indent-string=' ' - - -[DESIGN] - -# Maximum number of arguments for function / method -max-args=5 - -# Argument names that match this expression will be ignored. Default to name -# with leading underscore -ignored-argument-names=_.* - -# Maximum number of locals for function / method body -max-locals=15 - -# Maximum number of return / yield for function / method body -max-returns=6 - -# Maximum number of branch for function / method body -max-branches=12 - -# Maximum number of statements in function / method body -max-statements=50 - -# Maximum number of parents for a class (see R0901). -max-parents=7 - -# Maximum number of attributes for a class (see R0902). -max-attributes=7 - -# Minimum number of public methods for a class (see R0903). -min-public-methods=2 - -# Maximum number of public methods for a class (see R0904). -max-public-methods=20 - - -[IMPORTS] - -# Deprecated modules which should not be used, separated by a comma -deprecated-modules=regsub,TERMIOS,Bastion,rexec - -# Create a graph of every (i.e. internal and external) dependencies in the -# given file (report RP0402 must not be disabled) -import-graph= - -# Create a graph of external dependencies in the given file (report RP0402 must -# not be disabled) -ext-import-graph= - -# Create a graph of internal dependencies in the given file (report RP0402 must -# not be disabled) -int-import-graph= - - -[CLASSES] - -# List of interface methods to ignore, separated by a comma. This is used for -# instance to not check methods defines in Zope's Interface base class. -ignore-iface-methods=isImplementedBy,deferred,extends,names,namesAndDescriptions,queryDescriptionFor,getBases,getDescriptionFor,getDoc,getName,getTaggedValue,getTaggedValueTags,isEqualOrExtendedBy,setTaggedValue,isImplementedByInstancesOf,adaptWith,is_implemented_by - -# List of method names used to declare (i.e. assign) instance attributes. -defining-attr-methods=__init__,__new__,setUp - -# List of valid names for the first argument in a class method. -valid-classmethod-first-arg=cls - -# List of valid names for the first argument in a metaclass class method. -valid-metaclass-classmethod-first-arg=mcs - - -[EXCEPTIONS] - -# Exceptions that will emit a warning when being caught. Defaults to -# "Exception" -overgeneral-exceptions=Exception diff --git a/pyproject.toml b/pyproject.toml index b31eca75ec..058c35c829 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -34,14 +34,9 @@ name = "Internal Changes" showcontent = true -[tool.black] -target-version = ['py38', 'py39', 'py310', 'py311'] -# black ignores everything in .gitignore by default, see -# https://black.readthedocs.io/en/stable/usage_and_configuration/file_collection_and_discovery.html#gitignore -# Use `extend-exclude` if you want to exclude something in addition to this. - [tool.ruff] line-length = 88 +target-version = "py38" [tool.ruff.lint] # See https://beta.ruff.rs/docs/rules/#error-e @@ -63,6 +58,8 @@ select = [ "W", # pyflakes "F", + # isort + "I001", # flake8-bugbear "B0", # flake8-comprehensions @@ -79,17 +76,20 @@ select = [ "EXE", ] -[tool.isort] -line_length = 88 -sections = ["FUTURE", "STDLIB", "THIRDPARTY", "TWISTED", "FIRSTPARTY", "TESTS", "LOCALFOLDER"] -default_section = "THIRDPARTY" -known_first_party = ["synapse"] -known_tests = ["tests"] -known_twisted = ["twisted", "OpenSSL"] -multi_line_output = 3 -include_trailing_comma = true -combine_as_imports = true -skip_gitignore = true +[tool.ruff.lint.isort] +combine-as-imports = true +section-order = ["future", "standard-library", "third-party", "twisted", "first-party", "testing", "local-folder"] +known-first-party = ["synapse"] + +[tool.ruff.lint.isort.sections] +twisted = ["twisted", "OpenSSL"] +testing = ["tests"] + +[tool.ruff.format] +quote-style = "double" +indent-style = "space" +skip-magic-trailing-comma = false +line-ending = "auto" [tool.maturin] manifest-path = "rust/Cargo.toml" @@ -320,9 +320,7 @@ all = [ # failing on new releases. Keeping lower bounds loose here means that dependabot # can bump versions without having to update the content-hash in the lockfile. # This helps prevents merge conflicts when running a batch of dependabot updates. -isort = ">=5.10.1" -black = ">=22.7.0" -ruff = "0.5.5" +ruff = "0.6.2" # Type checking only works with the pydantic.v1 compat module from pydantic v2 pydantic = "^2" diff --git a/scripts-dev/lint.sh b/scripts-dev/lint.sh index 8acf0a6fb8..fa6ff90708 100755 --- a/scripts-dev/lint.sh +++ b/scripts-dev/lint.sh @@ -1,8 +1,9 @@ #!/usr/bin/env bash # # Runs linting scripts over the local Synapse checkout -# black - opinionated code formatter # ruff - lints and finds mistakes +# mypy - typechecks python code +# cargo clippy - lints rust code set -e @@ -101,12 +102,6 @@ echo # Print out the commands being run set -x -# Ensure the sort order of imports. -isort "${files[@]}" - -# Ensure Python code conforms to an opinionated style. -python3 -m black "${files[@]}" - # Ensure the sample configuration file conforms to style checks. ./scripts-dev/config-lint.sh diff --git a/synapse/rest/key/v2/remote_key_resource.py b/synapse/rest/key/v2/remote_key_resource.py index a411ed614e..1975ebb477 100644 --- a/synapse/rest/key/v2/remote_key_resource.py +++ b/synapse/rest/key/v2/remote_key_resource.py @@ -28,7 +28,7 @@ from synapse._pydantic_compat import HAS_PYDANTIC_V2 if TYPE_CHECKING or HAS_PYDANTIC_V2: from pydantic.v1 import Extra, StrictInt, StrictStr else: - from pydantic import StrictInt, StrictStr, Extra + from pydantic import Extra, StrictInt, StrictStr from signedjson.sign import sign_json diff --git a/synapse/storage/_base.py b/synapse/storage/_base.py index e12ab94576..1ac85ad66d 100644 --- a/synapse/storage/_base.py +++ b/synapse/storage/_base.py @@ -23,8 +23,11 @@ import logging from abc import ABCMeta from typing import TYPE_CHECKING, Any, Collection, Dict, Iterable, Optional, Union -from synapse.storage.database import make_in_list_sql_clause # noqa: F401; noqa: F401 -from synapse.storage.database import DatabasePool, LoggingDatabaseConnection +from synapse.storage.database import ( + DatabasePool, + LoggingDatabaseConnection, + make_in_list_sql_clause, # noqa: F401 +) from synapse.types import get_domain_from_id from synapse.util import json_decoder from synapse.util.caches.descriptors import CachedFunction diff --git a/synmark/__init__.py b/synmark/__init__.py index 8c47e50c7c..887fec2f96 100644 --- a/synmark/__init__.py +++ b/synmark/__init__.py @@ -27,7 +27,9 @@ from synapse.types import ISynapseReactor try: from twisted.internet.epollreactor import EPollReactor as Reactor except ImportError: - from twisted.internet.pollreactor import PollReactor as Reactor # type: ignore[assignment] + from twisted.internet.pollreactor import ( # type: ignore[assignment] + PollReactor as Reactor, + ) from twisted.internet.main import installReactor From 7b75922020f308a30f0d93c56bd37d0560aba8bd Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Fri, 30 Aug 2024 15:35:18 +0100 Subject: [PATCH 06/15] 1.114.0rc2 --- CHANGES.md | 42 +++++++++++++++++++++++++++++++++++++++ changelog.d/17194.bugfix | 1 - changelog.d/17407.misc | 1 - changelog.d/17509.feature | 1 - changelog.d/17532.bugfix | 1 - changelog.d/17543.bugfix | 1 - changelog.d/17595.misc | 1 - changelog.d/17599.misc | 1 - changelog.d/17600.misc | 1 - changelog.d/17604.misc | 1 - changelog.d/17606.misc | 1 - changelog.d/17608.feature | 1 - changelog.d/17617.misc | 1 - changelog.d/17620.misc | 1 - changelog.d/17622.misc | 1 - debian/changelog | 6 ++++++ pyproject.toml | 2 +- 17 files changed, 49 insertions(+), 15 deletions(-) delete mode 100644 changelog.d/17194.bugfix delete mode 100644 changelog.d/17407.misc delete mode 100644 changelog.d/17509.feature delete mode 100644 changelog.d/17532.bugfix delete mode 100644 changelog.d/17543.bugfix delete mode 100644 changelog.d/17595.misc delete mode 100644 changelog.d/17599.misc delete mode 100644 changelog.d/17600.misc delete mode 100644 changelog.d/17604.misc delete mode 100644 changelog.d/17606.misc delete mode 100644 changelog.d/17608.feature delete mode 100644 changelog.d/17617.misc delete mode 100644 changelog.d/17620.misc delete mode 100644 changelog.d/17622.misc diff --git a/CHANGES.md b/CHANGES.md index 0a57cfb906..16995224e7 100644 --- a/CHANGES.md +++ b/CHANGES.md @@ -1,3 +1,45 @@ +# Synapse 1.114.0rc2 (2024-08-30) + +### Features + +- Improve cross-signing upload when using [MSC3861](https://github.com/matrix-org/matrix-spec-proposals/pull/3861) to use a custom UIA flow stage, with web fallback support. ([\#17509](https://github.com/element-hq/synapse/issues/17509)) +- Make `hash_password` accept password input from stdin. ([\#17608](https://github.com/element-hq/synapse/issues/17608)) + +### Bugfixes + +- Fix hierarchy returning 403 when room is accessible through federation. Contributed by Krishan (@kfiven). ([\#17194](https://github.com/element-hq/synapse/issues/17194)) +- Fix content-length on federation /thumbnail responses. ([\#17532](https://github.com/element-hq/synapse/issues/17532)) +- Fix authenticated media responses using a wrong limit when following redirects over federation. ([\#17543](https://github.com/element-hq/synapse/issues/17543)) + +### Internal Changes + +- MSC3861: load the issuer and account management URLs from OIDC discovery. ([\#17407](https://github.com/element-hq/synapse/issues/17407)) +- Refactor sliding sync class into multiple files. ([\#17595](https://github.com/element-hq/synapse/issues/17595)) +- Store sliding sync per-connection state in the database. ([\#17599](https://github.com/element-hq/synapse/issues/17599)) +- Make the sliding sync `PerConnectionState` class immutable. ([\#17600](https://github.com/element-hq/synapse/issues/17600)) +- Add support to `@tag_args` for standalone functions. ([\#17604](https://github.com/element-hq/synapse/issues/17604)) +- Speed up incremental syncs in sliding sync by adding some more caching. ([\#17606](https://github.com/element-hq/synapse/issues/17606)) +- Always return the user's own read receipts in sliding sync. ([\#17617](https://github.com/element-hq/synapse/issues/17617)) +- Replace `isort` and `black with `ruff`. ([\#17620](https://github.com/element-hq/synapse/issues/17620)) +- Refactor sliding sync code to move room list logic out into a separate class. ([\#17622](https://github.com/element-hq/synapse/issues/17622)) + + + +### Updates to locked dependencies + +* Bump attrs from 23.2.0 to 24.2.0. ([\#17609](https://github.com/element-hq/synapse/issues/17609)) +* Bump cryptography from 42.0.8 to 43.0.0. ([\#17584](https://github.com/element-hq/synapse/issues/17584)) +* Bump phonenumbers from 8.13.43 to 8.13.44. ([\#17610](https://github.com/element-hq/synapse/issues/17610)) +* Bump pygithub from 2.3.0 to 2.4.0. ([\#17612](https://github.com/element-hq/synapse/issues/17612)) +* Bump pyyaml from 6.0.1 to 6.0.2. ([\#17611](https://github.com/element-hq/synapse/issues/17611)) +* Bump sentry-sdk from 2.12.0 to 2.13.0. ([\#17585](https://github.com/element-hq/synapse/issues/17585)) +* Bump serde from 1.0.206 to 1.0.208. ([\#17581](https://github.com/element-hq/synapse/issues/17581)) +* Bump serde from 1.0.208 to 1.0.209. ([\#17613](https://github.com/element-hq/synapse/issues/17613)) +* Bump serde_json from 1.0.124 to 1.0.125. ([\#17582](https://github.com/element-hq/synapse/issues/17582)) +* Bump serde_json from 1.0.125 to 1.0.127. ([\#17614](https://github.com/element-hq/synapse/issues/17614)) +* Bump types-jsonschema from 4.23.0.20240712 to 4.23.0.20240813. ([\#17583](https://github.com/element-hq/synapse/issues/17583)) +* Bump types-setuptools from 71.1.0.20240726 to 71.1.0.20240818. ([\#17586](https://github.com/element-hq/synapse/issues/17586)) + # Synapse 1.114.0rc1 (2024-08-20) ### Features diff --git a/changelog.d/17194.bugfix b/changelog.d/17194.bugfix deleted file mode 100644 index 29ac56ceac..0000000000 --- a/changelog.d/17194.bugfix +++ /dev/null @@ -1 +0,0 @@ -Fix hierarchy returning 403 when room is accessible through federation. Contributed by Krishan (@kfiven). diff --git a/changelog.d/17407.misc b/changelog.d/17407.misc deleted file mode 100644 index 9ed6e61a5b..0000000000 --- a/changelog.d/17407.misc +++ /dev/null @@ -1 +0,0 @@ -MSC3861: load the issuer and account management URLs from OIDC discovery. diff --git a/changelog.d/17509.feature b/changelog.d/17509.feature deleted file mode 100644 index 6d639ceb98..0000000000 --- a/changelog.d/17509.feature +++ /dev/null @@ -1 +0,0 @@ -Improve cross-signing upload when using [MSC3861](https://github.com/matrix-org/matrix-spec-proposals/pull/3861) to use a custom UIA flow stage, with web fallback support. diff --git a/changelog.d/17532.bugfix b/changelog.d/17532.bugfix deleted file mode 100644 index 5b05f0f9ba..0000000000 --- a/changelog.d/17532.bugfix +++ /dev/null @@ -1 +0,0 @@ -Fix content-length on federation /thumbnail responses. diff --git a/changelog.d/17543.bugfix b/changelog.d/17543.bugfix deleted file mode 100644 index 152b305e58..0000000000 --- a/changelog.d/17543.bugfix +++ /dev/null @@ -1 +0,0 @@ -Fix authenticated media responses using a wrong limit when following redirects over federation. \ No newline at end of file diff --git a/changelog.d/17595.misc b/changelog.d/17595.misc deleted file mode 100644 index c8e040d87c..0000000000 --- a/changelog.d/17595.misc +++ /dev/null @@ -1 +0,0 @@ -Refactor sliding sync class into multiple files. diff --git a/changelog.d/17599.misc b/changelog.d/17599.misc deleted file mode 100644 index 2f81356d12..0000000000 --- a/changelog.d/17599.misc +++ /dev/null @@ -1 +0,0 @@ -Store sliding sync per-connection state in the database. diff --git a/changelog.d/17600.misc b/changelog.d/17600.misc deleted file mode 100644 index a81c67f6d1..0000000000 --- a/changelog.d/17600.misc +++ /dev/null @@ -1 +0,0 @@ -Make the sliding sync `PerConnectionState` class immutable. diff --git a/changelog.d/17604.misc b/changelog.d/17604.misc deleted file mode 100644 index 96cb213bbd..0000000000 --- a/changelog.d/17604.misc +++ /dev/null @@ -1 +0,0 @@ -Add support to `@tag_args` for standalone functions. diff --git a/changelog.d/17606.misc b/changelog.d/17606.misc deleted file mode 100644 index 47634b1305..0000000000 --- a/changelog.d/17606.misc +++ /dev/null @@ -1 +0,0 @@ -Speed up incremental syncs in sliding sync by adding some more caching. diff --git a/changelog.d/17608.feature b/changelog.d/17608.feature deleted file mode 100644 index adf9ac5533..0000000000 --- a/changelog.d/17608.feature +++ /dev/null @@ -1 +0,0 @@ -Make `hash_password` accept password input from stdin. \ No newline at end of file diff --git a/changelog.d/17617.misc b/changelog.d/17617.misc deleted file mode 100644 index ba05648965..0000000000 --- a/changelog.d/17617.misc +++ /dev/null @@ -1 +0,0 @@ -Always return the user's own read receipts in sliding sync. diff --git a/changelog.d/17620.misc b/changelog.d/17620.misc deleted file mode 100644 index f583cdcb38..0000000000 --- a/changelog.d/17620.misc +++ /dev/null @@ -1 +0,0 @@ -Replace `isort` and `black with `ruff`. diff --git a/changelog.d/17622.misc b/changelog.d/17622.misc deleted file mode 100644 index af064f7e13..0000000000 --- a/changelog.d/17622.misc +++ /dev/null @@ -1 +0,0 @@ -Refactor sliding sync code to move room list logic out into a separate class. diff --git a/debian/changelog b/debian/changelog index f32dcc0450..b09a914d84 100644 --- a/debian/changelog +++ b/debian/changelog @@ -1,3 +1,9 @@ +matrix-synapse-py3 (1.114.0~rc2) stable; urgency=medium + + * New Synapse release 1.114.0rc2. + + -- Synapse Packaging team Fri, 30 Aug 2024 15:35:13 +0100 + matrix-synapse-py3 (1.114.0~rc1) stable; urgency=medium * New synapse release 1.114.0rc1. diff --git a/pyproject.toml b/pyproject.toml index 058c35c829..bbe5eb791b 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -97,7 +97,7 @@ module-name = "synapse.synapse_rust" [tool.poetry] name = "matrix-synapse" -version = "1.114.0rc1" +version = "1.114.0rc2" description = "Homeserver for the Matrix decentralised comms protocol" authors = ["Matrix.org Team and Contributors "] license = "AGPL-3.0-or-later" From 1caff7552654337059b783e73ff8d917ba129c74 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Fri, 30 Aug 2024 15:36:52 +0100 Subject: [PATCH 07/15] Fixup changelog --- CHANGES.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CHANGES.md b/CHANGES.md index 16995224e7..24c5c72206 100644 --- a/CHANGES.md +++ b/CHANGES.md @@ -8,7 +8,7 @@ ### Bugfixes - Fix hierarchy returning 403 when room is accessible through federation. Contributed by Krishan (@kfiven). ([\#17194](https://github.com/element-hq/synapse/issues/17194)) -- Fix content-length on federation /thumbnail responses. ([\#17532](https://github.com/element-hq/synapse/issues/17532)) +- Fix content-length on federation `/thumbnail` responses. ([\#17532](https://github.com/element-hq/synapse/issues/17532)) - Fix authenticated media responses using a wrong limit when following redirects over federation. ([\#17543](https://github.com/element-hq/synapse/issues/17543)) ### Internal Changes From a5a454fc35d41f0631ecbe357d3ba76c65bac36e Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Fri, 30 Aug 2024 15:39:53 +0100 Subject: [PATCH 08/15] Fixup changelog --- CHANGES.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CHANGES.md b/CHANGES.md index 24c5c72206..f160105c95 100644 --- a/CHANGES.md +++ b/CHANGES.md @@ -3,7 +3,7 @@ ### Features - Improve cross-signing upload when using [MSC3861](https://github.com/matrix-org/matrix-spec-proposals/pull/3861) to use a custom UIA flow stage, with web fallback support. ([\#17509](https://github.com/element-hq/synapse/issues/17509)) -- Make `hash_password` accept password input from stdin. ([\#17608](https://github.com/element-hq/synapse/issues/17608)) +- Make `hash_password` script accept password input from stdin. ([\#17608](https://github.com/element-hq/synapse/issues/17608)) ### Bugfixes From da58e55a0bd4169cc14c3b55deec3b84ce23efa3 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Thu, 29 Aug 2024 13:26:17 +0100 Subject: [PATCH 09/15] Fix starting non-media repos (#17626) Regressed in #17543. The `max_download_size` config is not available on workers that don't load the media repo. Besides, we should honour the max_size param that was passed into the function. --- changelog.d/17626.bugfix | 1 + synapse/http/matrixfederationclient.py | 6 ++---- 2 files changed, 3 insertions(+), 4 deletions(-) create mode 100644 changelog.d/17626.bugfix diff --git a/changelog.d/17626.bugfix b/changelog.d/17626.bugfix new file mode 100644 index 0000000000..1dbb2a2f45 --- /dev/null +++ b/changelog.d/17626.bugfix @@ -0,0 +1 @@ +Fix authenticated media responses using a wrong limit when following redirects over federation. diff --git a/synapse/http/matrixfederationclient.py b/synapse/http/matrixfederationclient.py index 12c41c39e9..ecbbb6cfc4 100644 --- a/synapse/http/matrixfederationclient.py +++ b/synapse/http/matrixfederationclient.py @@ -464,8 +464,6 @@ class MatrixFederationHttpClient: self.max_long_retries = hs.config.federation.max_long_retries self.max_short_retries = hs.config.federation.max_short_retries - self.max_download_size = hs.config.media.max_upload_size - self._cooperator = Cooperator(scheduler=_make_scheduler(self.reactor)) self._sleeper = AwakenableSleeper(self.reactor) @@ -1759,9 +1757,9 @@ class MatrixFederationHttpClient: str_url, ) # We don't know how large the response will be upfront, so limit it to - # the `max_upload_size` config value. + # the `max_size` config value. length, headers, _, _ = await self._simple_http_client.get_file( - str_url, output_stream, self.max_download_size + str_url, output_stream, max_size ) logger.info( From d6125c583d774cae88af189ec1e98256fdaec92f Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Fri, 30 Aug 2024 16:38:08 +0100 Subject: [PATCH 10/15] 1.114.0rc3 --- CHANGES.md | 9 +++++++++ changelog.d/17626.bugfix | 1 - debian/changelog | 6 ++++++ pyproject.toml | 2 +- 4 files changed, 16 insertions(+), 2 deletions(-) delete mode 100644 changelog.d/17626.bugfix diff --git a/CHANGES.md b/CHANGES.md index f160105c95..24253486fa 100644 --- a/CHANGES.md +++ b/CHANGES.md @@ -1,3 +1,12 @@ +# Synapse 1.114.0rc3 (2024-08-30) + +### Bugfixes + +- Fix authenticated media responses using a wrong limit when following redirects over federation. ([\#17626](https://github.com/element-hq/synapse/issues/17626)) + + + + # Synapse 1.114.0rc2 (2024-08-30) ### Features diff --git a/changelog.d/17626.bugfix b/changelog.d/17626.bugfix deleted file mode 100644 index 1dbb2a2f45..0000000000 --- a/changelog.d/17626.bugfix +++ /dev/null @@ -1 +0,0 @@ -Fix authenticated media responses using a wrong limit when following redirects over federation. diff --git a/debian/changelog b/debian/changelog index b09a914d84..32a2332c4f 100644 --- a/debian/changelog +++ b/debian/changelog @@ -1,3 +1,9 @@ +matrix-synapse-py3 (1.114.0~rc3) stable; urgency=medium + + * New Synapse release 1.114.0rc3. + + -- Synapse Packaging team Fri, 30 Aug 2024 16:38:05 +0100 + matrix-synapse-py3 (1.114.0~rc2) stable; urgency=medium * New Synapse release 1.114.0rc2. diff --git a/pyproject.toml b/pyproject.toml index bbe5eb791b..bc88ca9c60 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -97,7 +97,7 @@ module-name = "synapse.synapse_rust" [tool.poetry] name = "matrix-synapse" -version = "1.114.0rc2" +version = "1.114.0rc3" description = "Homeserver for the Matrix decentralised comms protocol" authors = ["Matrix.org Team and Contributors "] license = "AGPL-3.0-or-later" From 966a50bb630290b079dfb56cd0e032f7492211f5 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Fri, 30 Aug 2024 16:38:53 +0100 Subject: [PATCH 11/15] Fixup changelog --- CHANGES.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CHANGES.md b/CHANGES.md index 24253486fa..a8d3a917e2 100644 --- a/CHANGES.md +++ b/CHANGES.md @@ -2,7 +2,7 @@ ### Bugfixes -- Fix authenticated media responses using a wrong limit when following redirects over federation. ([\#17626](https://github.com/element-hq/synapse/issues/17626)) +- Fix regression in v1.114.0rc2 that caused workers to fail to start. ([\#17626](https://github.com/element-hq/synapse/issues/17626)) From f729ef08c90b7cd337074b267a7b2768e44a7b6c Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Mon, 2 Sep 2024 15:09:04 +0100 Subject: [PATCH 12/15] Enable sliding sync support by default (#17648) Co-authored-by: Andrew Morgan <1342360+anoadragon453@users.noreply.github.com> --- changelog.d/17648.feature | 1 + synapse/config/experimental.py | 6 ++++-- 2 files changed, 5 insertions(+), 2 deletions(-) create mode 100644 changelog.d/17648.feature diff --git a/changelog.d/17648.feature b/changelog.d/17648.feature new file mode 100644 index 0000000000..49439179da --- /dev/null +++ b/changelog.d/17648.feature @@ -0,0 +1 @@ +Enable native sliding sync support ([MSC3575](https://github.com/matrix-org/matrix-spec-proposals/pull/3575) and [MSC4186](https://github.com/matrix-org/matrix-spec-proposals/pull/4186)) by default. diff --git a/synapse/config/experimental.py b/synapse/config/experimental.py index bae9cc8047..5d99c201a7 100644 --- a/synapse/config/experimental.py +++ b/synapse/config/experimental.py @@ -338,8 +338,10 @@ class ExperimentalConfig(Config): # MSC3391: Removing account data. self.msc3391_enabled = experimental.get("msc3391_enabled", False) - # MSC3575 (Sliding Sync API endpoints) - self.msc3575_enabled: bool = experimental.get("msc3575_enabled", False) + # MSC3575 (Sliding Sync) alternate endpoints, c.f. MSC4186. + # + # This is enabled by default as a replacement for the sliding sync proxy. + self.msc3575_enabled: bool = experimental.get("msc3575_enabled", True) # MSC3773: Thread notifications self.msc3773_enabled: bool = experimental.get("msc3773_enabled", False) From ac27c9e46a9c518df354f17c1ad64bea4839e324 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Mon, 2 Sep 2024 15:14:57 +0100 Subject: [PATCH 13/15] 1.114.0 --- CHANGES.md | 9 +++++++++ changelog.d/17648.feature | 1 - debian/changelog | 6 ++++++ pyproject.toml | 2 +- 4 files changed, 16 insertions(+), 2 deletions(-) delete mode 100644 changelog.d/17648.feature diff --git a/CHANGES.md b/CHANGES.md index a8d3a917e2..12c08f821e 100644 --- a/CHANGES.md +++ b/CHANGES.md @@ -1,3 +1,12 @@ +# Synapse 1.114.0 (2024-09-02) + +### Features + +- Enable native sliding sync support ([MSC3575](https://github.com/matrix-org/matrix-spec-proposals/pull/3575) and [MSC4186](https://github.com/matrix-org/matrix-spec-proposals/pull/4186)) by default. ([\#17648](https://github.com/element-hq/synapse/issues/17648)) + + + + # Synapse 1.114.0rc3 (2024-08-30) ### Bugfixes diff --git a/changelog.d/17648.feature b/changelog.d/17648.feature deleted file mode 100644 index 49439179da..0000000000 --- a/changelog.d/17648.feature +++ /dev/null @@ -1 +0,0 @@ -Enable native sliding sync support ([MSC3575](https://github.com/matrix-org/matrix-spec-proposals/pull/3575) and [MSC4186](https://github.com/matrix-org/matrix-spec-proposals/pull/4186)) by default. diff --git a/debian/changelog b/debian/changelog index 32a2332c4f..dfb48edc49 100644 --- a/debian/changelog +++ b/debian/changelog @@ -1,3 +1,9 @@ +matrix-synapse-py3 (1.114.0) stable; urgency=medium + + * New Synapse release 1.114.0. + + -- Synapse Packaging team Mon, 02 Sep 2024 15:14:53 +0100 + matrix-synapse-py3 (1.114.0~rc3) stable; urgency=medium * New Synapse release 1.114.0rc3. diff --git a/pyproject.toml b/pyproject.toml index bc88ca9c60..69a82b8e1c 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -97,7 +97,7 @@ module-name = "synapse.synapse_rust" [tool.poetry] name = "matrix-synapse" -version = "1.114.0rc3" +version = "1.114.0" description = "Homeserver for the Matrix decentralised comms protocol" authors = ["Matrix.org Team and Contributors "] license = "AGPL-3.0-or-later" From 6722adf04e8daad3e387095805ecb4150254932f Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Mon, 2 Sep 2024 16:27:13 +0100 Subject: [PATCH 14/15] Update changelog --- CHANGES.md | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/CHANGES.md b/CHANGES.md index 12c08f821e..943dd6b67f 100644 --- a/CHANGES.md +++ b/CHANGES.md @@ -1,5 +1,11 @@ # Synapse 1.114.0 (2024-09-02) +This release enables support for +[MSC4186](https://github.com/matrix-org/matrix-spec-proposals/pull/4186) — +Simplified Sliding Sync. This allows using the upcoming releases of the Element +X mobile apps without having to run a Sliding Sync Proxy. + + ### Features - Enable native sliding sync support ([MSC3575](https://github.com/matrix-org/matrix-spec-proposals/pull/3575) and [MSC4186](https://github.com/matrix-org/matrix-spec-proposals/pull/4186)) by default. ([\#17648](https://github.com/element-hq/synapse/issues/17648)) From 5eec67b6ef4b76caa2324a80e01e361bfa84a929 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Mon, 2 Sep 2024 17:08:34 +0100 Subject: [PATCH 15/15] Fix changelog --- CHANGES.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CHANGES.md b/CHANGES.md index 943dd6b67f..d3cec9cc15 100644 --- a/CHANGES.md +++ b/CHANGES.md @@ -44,7 +44,7 @@ X mobile apps without having to run a Sliding Sync Proxy. - Add support to `@tag_args` for standalone functions. ([\#17604](https://github.com/element-hq/synapse/issues/17604)) - Speed up incremental syncs in sliding sync by adding some more caching. ([\#17606](https://github.com/element-hq/synapse/issues/17606)) - Always return the user's own read receipts in sliding sync. ([\#17617](https://github.com/element-hq/synapse/issues/17617)) -- Replace `isort` and `black with `ruff`. ([\#17620](https://github.com/element-hq/synapse/issues/17620)) +- Replace `isort` and `black` with `ruff`. ([\#17620](https://github.com/element-hq/synapse/issues/17620)) - Refactor sliding sync code to move room list logic out into a separate class. ([\#17622](https://github.com/element-hq/synapse/issues/17622))