2016-01-06 21:26:29 -07:00
|
|
|
# Copyright 2014-2016 OpenMarket Ltd
|
2014-08-12 20:32:18 -06:00
|
|
|
#
|
|
|
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
|
|
# you may not use this file except in compliance with the License.
|
|
|
|
# You may obtain a copy of the License at
|
|
|
|
#
|
|
|
|
# http://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
#
|
|
|
|
# Unless required by applicable law or agreed to in writing, software
|
|
|
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
|
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
|
|
# See the License for the specific language governing permissions and
|
|
|
|
# limitations under the License.
|
2023-02-06 09:11:09 -07:00
|
|
|
from typing import (
|
|
|
|
Any,
|
|
|
|
Collection,
|
|
|
|
Dict,
|
|
|
|
Generator,
|
|
|
|
Iterable,
|
|
|
|
Iterator,
|
|
|
|
List,
|
|
|
|
Optional,
|
|
|
|
Set,
|
|
|
|
Tuple,
|
|
|
|
cast,
|
|
|
|
)
|
2021-04-09 11:44:38 -06:00
|
|
|
from unittest.mock import Mock
|
2018-07-09 00:09:20 -06:00
|
|
|
|
2014-08-12 08:10:52 -06:00
|
|
|
from twisted.internet import defer
|
|
|
|
|
2022-06-17 06:48:55 -06:00
|
|
|
from synapse.api.auth.internal import InternalAuth
|
2019-04-01 03:24:38 -06:00
|
|
|
from synapse.api.constants import EventTypes, Membership
|
|
|
|
from synapse.api.room_versions import RoomVersions
|
2023-02-06 09:11:09 -07:00
|
|
|
from synapse.events import EventBase, make_event_from_dict
|
2019-11-06 03:01:39 -07:00
|
|
|
from synapse.events.snapshot import EventContext
|
2022-07-15 05:06:41 -06:00
|
|
|
from synapse.state import StateHandler, StateResolutionHandler, _make_state_cache_entry
|
2023-02-06 09:11:09 -07:00
|
|
|
from synapse.types import MutableStateMap, StateMap
|
|
|
|
from synapse.types.state import StateFilter
|
2022-06-14 07:12:08 -06:00
|
|
|
from synapse.util import Clock
|
|
|
|
from synapse.util.macaroons import MacaroonGenerator
|
2014-08-12 08:10:52 -06:00
|
|
|
|
2018-07-09 00:09:20 -06:00
|
|
|
from tests import unittest
|
2014-08-12 08:10:52 -06:00
|
|
|
|
2019-04-08 10:10:55 -06:00
|
|
|
from .utils import MockClock, default_config
|
2014-09-12 10:11:00 -06:00
|
|
|
|
2015-01-21 09:27:04 -07:00
|
|
|
_next_event_id = 1000
|
|
|
|
|
|
|
|
|
|
|
|
def create_event(
|
2023-02-06 09:11:09 -07:00
|
|
|
name: Optional[str] = None,
|
|
|
|
type: Optional[str] = None,
|
|
|
|
state_key: Optional[str] = None,
|
|
|
|
depth: int = 2,
|
|
|
|
event_id: Optional[str] = None,
|
|
|
|
prev_events: Optional[List[Tuple[str, dict]]] = None,
|
|
|
|
**kwargs: Any,
|
|
|
|
) -> EventBase:
|
2015-01-21 09:27:04 -07:00
|
|
|
global _next_event_id
|
|
|
|
|
|
|
|
if not event_id:
|
|
|
|
_next_event_id += 1
|
2015-09-01 08:57:35 -06:00
|
|
|
event_id = "$%s:test" % (_next_event_id,)
|
2015-01-21 09:27:04 -07:00
|
|
|
|
|
|
|
if not name:
|
|
|
|
if state_key is not None:
|
|
|
|
name = "<%s-%s, %s>" % (type, state_key, event_id)
|
|
|
|
else:
|
|
|
|
name = "<%s, %s>" % (type, event_id)
|
|
|
|
|
|
|
|
d = {
|
|
|
|
"event_id": event_id,
|
|
|
|
"type": type,
|
|
|
|
"sender": "@user_id:example.com",
|
|
|
|
"room_id": "!room_id:example.com",
|
|
|
|
"depth": depth,
|
2021-04-08 15:38:54 -06:00
|
|
|
"prev_events": prev_events or [],
|
2015-01-21 09:27:04 -07:00
|
|
|
}
|
|
|
|
|
|
|
|
if state_key is not None:
|
|
|
|
d["state_key"] = state_key
|
|
|
|
|
|
|
|
d.update(kwargs)
|
|
|
|
|
2023-02-06 09:11:09 -07:00
|
|
|
return make_event_from_dict(d)
|
2015-01-21 09:27:04 -07:00
|
|
|
|
|
|
|
|
2022-03-01 05:49:54 -07:00
|
|
|
class _DummyStore:
|
2023-02-06 09:11:09 -07:00
|
|
|
def __init__(self) -> None:
|
|
|
|
self._event_to_state_group: Dict[str, int] = {}
|
|
|
|
self._group_to_state: Dict[int, MutableStateMap[str]] = {}
|
2015-01-21 09:27:04 -07:00
|
|
|
|
2023-02-06 09:11:09 -07:00
|
|
|
self._event_id_to_event: Dict[str, EventBase] = {}
|
2016-08-26 03:15:52 -06:00
|
|
|
|
2015-01-21 09:27:04 -07:00
|
|
|
self._next_group = 1
|
|
|
|
|
2023-02-06 09:11:09 -07:00
|
|
|
async def get_state_groups_ids(
|
|
|
|
self, room_id: str, event_ids: Collection[str]
|
|
|
|
) -> Dict[int, MutableStateMap[str]]:
|
2015-01-21 09:27:04 -07:00
|
|
|
groups = {}
|
|
|
|
for event_id in event_ids:
|
|
|
|
group = self._event_to_state_group.get(event_id)
|
|
|
|
if group:
|
|
|
|
groups[group] = self._group_to_state[group]
|
|
|
|
|
2020-08-27 15:24:46 -06:00
|
|
|
return groups
|
2015-01-21 09:27:04 -07:00
|
|
|
|
2023-02-06 09:11:09 -07:00
|
|
|
async def get_state_ids_for_group(
|
|
|
|
self, state_group: int, state_filter: Optional[StateFilter] = None
|
|
|
|
) -> MutableStateMap[str]:
|
2022-05-10 13:43:13 -06:00
|
|
|
return self._group_to_state[state_group]
|
|
|
|
|
2020-08-27 15:24:46 -06:00
|
|
|
async def store_state_group(
|
2023-02-06 09:11:09 -07:00
|
|
|
self,
|
|
|
|
event_id: str,
|
|
|
|
room_id: str,
|
|
|
|
prev_group: Optional[int],
|
|
|
|
delta_ids: Optional[StateMap[str]],
|
|
|
|
current_state_ids: Optional[StateMap[str]],
|
|
|
|
) -> int:
|
2018-02-06 07:31:24 -07:00
|
|
|
state_group = self._next_group
|
|
|
|
self._next_group += 1
|
2015-01-21 09:27:04 -07:00
|
|
|
|
2022-07-15 06:59:45 -06:00
|
|
|
if current_state_ids is None:
|
2023-02-06 09:11:09 -07:00
|
|
|
assert prev_group is not None
|
|
|
|
assert delta_ids is not None
|
2022-07-15 06:59:45 -06:00
|
|
|
current_state_ids = dict(self._group_to_state[prev_group])
|
|
|
|
current_state_ids.update(delta_ids)
|
|
|
|
|
2018-02-06 07:31:24 -07:00
|
|
|
self._group_to_state[state_group] = dict(current_state_ids)
|
2015-01-21 09:27:04 -07:00
|
|
|
|
2020-08-27 15:24:46 -06:00
|
|
|
return state_group
|
2015-01-21 09:27:04 -07:00
|
|
|
|
2023-02-06 09:11:09 -07:00
|
|
|
async def get_events(
|
|
|
|
self, event_ids: Collection[str], **kwargs: Any
|
|
|
|
) -> Dict[str, EventBase]:
|
2020-08-27 15:24:46 -06:00
|
|
|
return {
|
|
|
|
e_id: self._event_id_to_event[e_id]
|
|
|
|
for e_id in event_ids
|
|
|
|
if e_id in self._event_id_to_event
|
|
|
|
}
|
2016-08-26 03:15:52 -06:00
|
|
|
|
2022-03-01 05:49:54 -07:00
|
|
|
async def get_partial_state_events(
|
|
|
|
self, event_ids: Collection[str]
|
|
|
|
) -> Dict[str, bool]:
|
|
|
|
return {e: False for e in event_ids}
|
|
|
|
|
2023-02-06 09:11:09 -07:00
|
|
|
async def get_state_group_delta(
|
|
|
|
self, name: str
|
|
|
|
) -> Tuple[Optional[int], Optional[StateMap[str]]]:
|
2021-09-23 04:59:07 -06:00
|
|
|
return None, None
|
2018-02-06 07:31:24 -07:00
|
|
|
|
2023-02-06 09:11:09 -07:00
|
|
|
def register_events(self, events: Iterable[EventBase]) -> None:
|
2016-08-26 03:15:52 -06:00
|
|
|
for e in events:
|
|
|
|
self._event_id_to_event[e.event_id] = e
|
|
|
|
|
2023-02-06 09:11:09 -07:00
|
|
|
def register_event_context(self, event: EventBase, context: EventContext) -> None:
|
|
|
|
assert context.state_group is not None
|
2018-02-06 07:31:24 -07:00
|
|
|
self._event_to_state_group[event.event_id] = context.state_group
|
|
|
|
|
2023-02-06 09:11:09 -07:00
|
|
|
def register_event_id_state_group(self, event_id: str, state_group: int) -> None:
|
2018-02-06 07:31:24 -07:00
|
|
|
self._event_to_state_group[event_id] = state_group
|
|
|
|
|
2023-02-06 09:11:09 -07:00
|
|
|
async def get_room_version_id(self, room_id: str) -> str:
|
2020-08-27 15:24:46 -06:00
|
|
|
return RoomVersions.V1.identifier
|
2018-08-09 07:33:49 -06:00
|
|
|
|
2022-07-01 03:19:27 -06:00
|
|
|
async def get_state_group_for_events(
|
2023-02-06 09:11:09 -07:00
|
|
|
self, event_ids: Collection[str], await_full_state: bool = True
|
|
|
|
) -> Dict[str, int]:
|
2022-05-18 11:15:52 -06:00
|
|
|
res = {}
|
|
|
|
for event in event_ids:
|
|
|
|
res[event] = self._event_to_state_group[event]
|
|
|
|
return res
|
|
|
|
|
2023-02-06 09:11:09 -07:00
|
|
|
async def get_state_for_groups(
|
|
|
|
self, groups: Collection[int]
|
|
|
|
) -> Dict[int, MutableStateMap[str]]:
|
2022-05-18 11:15:52 -06:00
|
|
|
res = {}
|
|
|
|
for group in groups:
|
|
|
|
state = self._group_to_state[group]
|
|
|
|
res[group] = state
|
|
|
|
return res
|
|
|
|
|
2015-01-21 09:27:04 -07:00
|
|
|
|
|
|
|
class DictObj(dict):
|
2023-02-06 09:11:09 -07:00
|
|
|
def __init__(self, **kwargs: Any) -> None:
|
2020-09-18 07:56:44 -06:00
|
|
|
super().__init__(kwargs)
|
2015-01-21 09:27:04 -07:00
|
|
|
self.__dict__ = self
|
|
|
|
|
|
|
|
|
2020-09-04 04:54:56 -06:00
|
|
|
class Graph:
|
2023-02-06 09:11:09 -07:00
|
|
|
def __init__(self, nodes: Dict[str, DictObj], edges: Dict[str, List[str]]):
|
|
|
|
events: Dict[str, EventBase] = {}
|
|
|
|
clobbered: Set[str] = set()
|
2015-01-21 09:27:04 -07:00
|
|
|
|
|
|
|
for event_id, fields in nodes.items():
|
|
|
|
refs = edges.get(event_id)
|
|
|
|
if refs:
|
|
|
|
clobbered.difference_update(refs)
|
2023-02-06 09:11:09 -07:00
|
|
|
prev_events: List[Tuple[str, dict]] = [(r, {}) for r in refs]
|
2015-01-21 09:27:04 -07:00
|
|
|
else:
|
|
|
|
prev_events = []
|
|
|
|
|
|
|
|
events[event_id] = create_event(
|
|
|
|
event_id=event_id, prev_events=prev_events, **fields
|
|
|
|
)
|
|
|
|
|
|
|
|
self._leaves = clobbered
|
|
|
|
self._events = sorted(events.values(), key=lambda e: e.depth)
|
|
|
|
|
2023-02-06 09:11:09 -07:00
|
|
|
def walk(self) -> Iterator[EventBase]:
|
2015-01-21 09:27:04 -07:00
|
|
|
return iter(self._events)
|
|
|
|
|
|
|
|
|
2014-08-12 08:10:52 -06:00
|
|
|
class StateTestCase(unittest.TestCase):
|
2023-02-06 09:11:09 -07:00
|
|
|
def setUp(self) -> None:
|
2022-03-01 05:49:54 -07:00
|
|
|
self.dummy_store = _DummyStore()
|
2022-05-31 06:17:50 -06:00
|
|
|
storage_controllers = Mock(main=self.dummy_store, state=self.dummy_store)
|
2016-04-18 09:08:32 -06:00
|
|
|
hs = Mock(
|
|
|
|
spec_set=[
|
2019-04-08 10:10:55 -06:00
|
|
|
"config",
|
2022-02-23 04:04:02 -07:00
|
|
|
"get_datastores",
|
2022-05-31 06:17:50 -06:00
|
|
|
"get_storage_controllers",
|
2015-02-09 07:23:57 -07:00
|
|
|
"get_auth",
|
|
|
|
"get_state_handler",
|
|
|
|
"get_clock",
|
2018-01-27 02:15:45 -07:00
|
|
|
"get_state_resolution_handler",
|
2021-07-16 10:11:53 -06:00
|
|
|
"get_account_validity_handler",
|
2022-06-14 07:12:08 -06:00
|
|
|
"get_macaroon_generator",
|
2022-07-07 06:19:31 -06:00
|
|
|
"get_instance_name",
|
|
|
|
"get_simple_http_client",
|
2023-05-09 12:25:20 -06:00
|
|
|
"get_replication_client",
|
2020-11-17 03:51:25 -07:00
|
|
|
"hostname",
|
2015-02-09 07:23:57 -07:00
|
|
|
]
|
|
|
|
)
|
2022-06-14 07:12:08 -06:00
|
|
|
clock = cast(Clock, MockClock())
|
2019-05-13 14:01:14 -06:00
|
|
|
hs.config = default_config("tesths", True)
|
2022-03-01 05:49:54 -07:00
|
|
|
hs.get_datastores.return_value = Mock(main=self.dummy_store)
|
2015-01-21 09:27:04 -07:00
|
|
|
hs.get_state_handler.return_value = None
|
2022-06-14 07:12:08 -06:00
|
|
|
hs.get_clock.return_value = clock
|
|
|
|
hs.get_macaroon_generator.return_value = MacaroonGenerator(
|
|
|
|
clock, "tesths", b"verysecret"
|
|
|
|
)
|
2022-06-17 06:48:55 -06:00
|
|
|
hs.get_auth.return_value = InternalAuth(hs)
|
2018-01-27 02:15:45 -07:00
|
|
|
hs.get_state_resolution_handler = lambda: StateResolutionHandler(hs)
|
2022-05-31 06:17:50 -06:00
|
|
|
hs.get_storage_controllers.return_value = storage_controllers
|
2014-08-12 08:10:52 -06:00
|
|
|
|
2014-11-11 07:16:48 -07:00
|
|
|
self.state = StateHandler(hs)
|
|
|
|
self.event_id = 0
|
2014-08-12 08:10:52 -06:00
|
|
|
|
2015-01-21 09:27:04 -07:00
|
|
|
@defer.inlineCallbacks
|
2023-02-06 09:11:09 -07:00
|
|
|
def test_branch_no_conflict(self) -> Generator[defer.Deferred, Any, None]:
|
2015-01-21 09:27:04 -07:00
|
|
|
graph = Graph(
|
|
|
|
nodes={
|
|
|
|
"START": DictObj(
|
2018-09-06 10:58:18 -06:00
|
|
|
type=EventTypes.Create, state_key="", content={}, depth=1
|
2015-01-21 09:27:04 -07:00
|
|
|
),
|
|
|
|
"A": DictObj(type=EventTypes.Message, depth=2),
|
|
|
|
"B": DictObj(type=EventTypes.Message, depth=3),
|
|
|
|
"C": DictObj(type=EventTypes.Name, state_key="", depth=3),
|
|
|
|
"D": DictObj(type=EventTypes.Message, depth=4),
|
|
|
|
},
|
|
|
|
edges={"A": ["START"], "B": ["A"], "C": ["A"], "D": ["B", "C"]},
|
|
|
|
)
|
|
|
|
|
2022-03-01 05:49:54 -07:00
|
|
|
self.dummy_store.register_events(graph.walk())
|
2015-01-21 09:27:04 -07:00
|
|
|
|
2023-05-16 11:19:46 -06:00
|
|
|
context_store: Dict[str, EventContext] = {}
|
2015-01-21 09:27:04 -07:00
|
|
|
|
|
|
|
for event in graph.walk():
|
2020-07-24 08:59:51 -06:00
|
|
|
context = yield defer.ensureDeferred(
|
|
|
|
self.state.compute_event_context(event)
|
|
|
|
)
|
2022-03-01 05:49:54 -07:00
|
|
|
self.dummy_store.register_event_context(event, context)
|
2015-01-21 09:27:04 -07:00
|
|
|
context_store[event.event_id] = context
|
|
|
|
|
2019-11-06 03:01:39 -07:00
|
|
|
ctx_c = context_store["C"]
|
|
|
|
ctx_d = context_store["D"]
|
|
|
|
|
2023-02-06 09:11:09 -07:00
|
|
|
prev_state_ids: StateMap[str]
|
2020-07-27 11:40:22 -06:00
|
|
|
prev_state_ids = yield defer.ensureDeferred(ctx_d.get_prev_state_ids())
|
2018-07-23 06:33:49 -06:00
|
|
|
self.assertEqual(2, len(prev_state_ids))
|
2015-01-21 09:27:04 -07:00
|
|
|
|
2019-11-06 03:01:39 -07:00
|
|
|
self.assertEqual(ctx_c.state_group, ctx_d.state_group_before_event)
|
|
|
|
self.assertEqual(ctx_d.state_group_before_event, ctx_d.state_group)
|
|
|
|
|
2015-01-21 09:27:04 -07:00
|
|
|
@defer.inlineCallbacks
|
2023-02-06 09:11:09 -07:00
|
|
|
def test_branch_basic_conflict(
|
|
|
|
self,
|
|
|
|
) -> Generator["defer.Deferred[object]", Any, None]:
|
2015-01-21 09:27:04 -07:00
|
|
|
graph = Graph(
|
|
|
|
nodes={
|
|
|
|
"START": DictObj(
|
|
|
|
type=EventTypes.Create,
|
2015-09-01 08:42:03 -06:00
|
|
|
state_key="",
|
|
|
|
content={"creator": "@user_id:example.com"},
|
2015-01-21 09:27:04 -07:00
|
|
|
depth=1,
|
|
|
|
),
|
|
|
|
"A": DictObj(
|
|
|
|
type=EventTypes.Member,
|
|
|
|
state_key="@user_id:example.com",
|
|
|
|
content={"membership": Membership.JOIN},
|
|
|
|
membership=Membership.JOIN,
|
|
|
|
depth=2,
|
|
|
|
),
|
|
|
|
"B": DictObj(type=EventTypes.Name, state_key="", depth=3),
|
|
|
|
"C": DictObj(type=EventTypes.Name, state_key="", depth=4),
|
|
|
|
"D": DictObj(type=EventTypes.Message, depth=5),
|
|
|
|
},
|
|
|
|
edges={"A": ["START"], "B": ["A"], "C": ["A"], "D": ["B", "C"]},
|
|
|
|
)
|
|
|
|
|
2022-03-01 05:49:54 -07:00
|
|
|
self.dummy_store.register_events(graph.walk())
|
2015-01-21 09:27:04 -07:00
|
|
|
|
2023-02-06 09:11:09 -07:00
|
|
|
context_store: Dict[str, EventContext] = {}
|
2015-01-21 09:27:04 -07:00
|
|
|
|
|
|
|
for event in graph.walk():
|
2020-07-24 08:59:51 -06:00
|
|
|
context = yield defer.ensureDeferred(
|
|
|
|
self.state.compute_event_context(event)
|
|
|
|
)
|
2022-03-01 05:49:54 -07:00
|
|
|
self.dummy_store.register_event_context(event, context)
|
2015-01-21 09:27:04 -07:00
|
|
|
context_store[event.event_id] = context
|
|
|
|
|
2019-11-06 03:01:39 -07:00
|
|
|
# C ends up winning the resolution between B and C
|
|
|
|
|
|
|
|
ctx_c = context_store["C"]
|
|
|
|
ctx_d = context_store["D"]
|
2018-07-23 06:33:49 -06:00
|
|
|
|
2023-02-06 09:11:09 -07:00
|
|
|
prev_state_ids: StateMap[str]
|
2020-07-27 11:40:22 -06:00
|
|
|
prev_state_ids = yield defer.ensureDeferred(ctx_d.get_prev_state_ids())
|
2020-02-21 05:15:07 -07:00
|
|
|
self.assertSetEqual({"START", "A", "C"}, set(prev_state_ids.values()))
|
2015-01-21 09:27:04 -07:00
|
|
|
|
2019-11-06 03:01:39 -07:00
|
|
|
self.assertEqual(ctx_c.state_group, ctx_d.state_group_before_event)
|
|
|
|
self.assertEqual(ctx_d.state_group_before_event, ctx_d.state_group)
|
|
|
|
|
2015-01-21 09:27:04 -07:00
|
|
|
@defer.inlineCallbacks
|
2023-02-06 09:11:09 -07:00
|
|
|
def test_branch_have_banned_conflict(
|
|
|
|
self,
|
|
|
|
) -> Generator["defer.Deferred[object]", Any, None]:
|
2015-01-21 09:27:04 -07:00
|
|
|
graph = Graph(
|
|
|
|
nodes={
|
|
|
|
"START": DictObj(
|
|
|
|
type=EventTypes.Create,
|
2015-09-01 08:42:03 -06:00
|
|
|
state_key="",
|
|
|
|
content={"creator": "@user_id:example.com"},
|
2015-01-21 09:27:04 -07:00
|
|
|
depth=1,
|
|
|
|
),
|
|
|
|
"A": DictObj(
|
|
|
|
type=EventTypes.Member,
|
|
|
|
state_key="@user_id:example.com",
|
|
|
|
content={"membership": Membership.JOIN},
|
|
|
|
membership=Membership.JOIN,
|
|
|
|
depth=2,
|
|
|
|
),
|
|
|
|
"B": DictObj(type=EventTypes.Name, state_key="", depth=3),
|
|
|
|
"C": DictObj(
|
|
|
|
type=EventTypes.Member,
|
|
|
|
state_key="@user_id_2:example.com",
|
|
|
|
content={"membership": Membership.BAN},
|
|
|
|
membership=Membership.BAN,
|
|
|
|
depth=4,
|
|
|
|
),
|
|
|
|
"D": DictObj(
|
|
|
|
type=EventTypes.Name,
|
|
|
|
state_key="",
|
|
|
|
depth=4,
|
|
|
|
sender="@user_id_2:example.com",
|
|
|
|
),
|
|
|
|
"E": DictObj(type=EventTypes.Message, depth=5),
|
|
|
|
},
|
|
|
|
edges={"A": ["START"], "B": ["A"], "C": ["B"], "D": ["B"], "E": ["C", "D"]},
|
|
|
|
)
|
|
|
|
|
2022-03-01 05:49:54 -07:00
|
|
|
self.dummy_store.register_events(graph.walk())
|
2015-01-21 09:27:04 -07:00
|
|
|
|
2023-02-06 09:11:09 -07:00
|
|
|
context_store: Dict[str, EventContext] = {}
|
2015-01-21 09:27:04 -07:00
|
|
|
|
|
|
|
for event in graph.walk():
|
2020-07-24 08:59:51 -06:00
|
|
|
context = yield defer.ensureDeferred(
|
|
|
|
self.state.compute_event_context(event)
|
|
|
|
)
|
2022-03-01 05:49:54 -07:00
|
|
|
self.dummy_store.register_event_context(event, context)
|
2015-01-21 09:27:04 -07:00
|
|
|
context_store[event.event_id] = context
|
|
|
|
|
2019-11-06 03:01:39 -07:00
|
|
|
# C ends up winning the resolution between C and D because bans win over other
|
|
|
|
# changes
|
|
|
|
|
|
|
|
ctx_c = context_store["C"]
|
|
|
|
ctx_e = context_store["E"]
|
2018-07-23 06:33:49 -06:00
|
|
|
|
2023-02-06 09:11:09 -07:00
|
|
|
prev_state_ids: StateMap[str]
|
2020-07-27 11:40:22 -06:00
|
|
|
prev_state_ids = yield defer.ensureDeferred(ctx_e.get_prev_state_ids())
|
2020-02-21 05:15:07 -07:00
|
|
|
self.assertSetEqual({"START", "A", "B", "C"}, set(prev_state_ids.values()))
|
2019-11-06 03:01:39 -07:00
|
|
|
self.assertEqual(ctx_c.state_group, ctx_e.state_group_before_event)
|
|
|
|
self.assertEqual(ctx_e.state_group_before_event, ctx_e.state_group)
|
2015-01-21 09:27:04 -07:00
|
|
|
|
2015-11-12 06:10:25 -07:00
|
|
|
@defer.inlineCallbacks
|
2023-02-06 09:11:09 -07:00
|
|
|
def test_branch_have_perms_conflict(
|
|
|
|
self,
|
|
|
|
) -> Generator["defer.Deferred[object]", Any, None]:
|
2015-11-12 06:10:25 -07:00
|
|
|
userid1 = "@user_id:example.com"
|
|
|
|
userid2 = "@user_id2:example.com"
|
|
|
|
|
|
|
|
nodes = {
|
|
|
|
"A1": DictObj(
|
|
|
|
type=EventTypes.Create,
|
|
|
|
state_key="",
|
|
|
|
content={"creator": userid1},
|
|
|
|
depth=1,
|
|
|
|
),
|
|
|
|
"A2": DictObj(
|
|
|
|
type=EventTypes.Member,
|
|
|
|
state_key=userid1,
|
|
|
|
content={"membership": Membership.JOIN},
|
|
|
|
membership=Membership.JOIN,
|
|
|
|
),
|
|
|
|
"A3": DictObj(
|
|
|
|
type=EventTypes.Member,
|
|
|
|
state_key=userid2,
|
|
|
|
content={"membership": Membership.JOIN},
|
|
|
|
membership=Membership.JOIN,
|
|
|
|
),
|
|
|
|
"A4": DictObj(
|
|
|
|
type=EventTypes.PowerLevels,
|
|
|
|
state_key="",
|
|
|
|
content={
|
|
|
|
"events": {"m.room.name": 50},
|
|
|
|
"users": {userid1: 100, userid2: 60},
|
|
|
|
},
|
|
|
|
),
|
|
|
|
"A5": DictObj(type=EventTypes.Name, state_key=""),
|
|
|
|
"B": DictObj(
|
|
|
|
type=EventTypes.PowerLevels,
|
|
|
|
state_key="",
|
|
|
|
content={"events": {"m.room.name": 50}, "users": {userid2: 30}},
|
|
|
|
),
|
2018-06-14 07:21:29 -06:00
|
|
|
"C": DictObj(type=EventTypes.Name, state_key="", sender=userid2),
|
2015-11-12 06:10:25 -07:00
|
|
|
"D": DictObj(type=EventTypes.Message),
|
|
|
|
}
|
|
|
|
edges = {
|
|
|
|
"A2": ["A1"],
|
|
|
|
"A3": ["A2"],
|
|
|
|
"A4": ["A3"],
|
|
|
|
"A5": ["A4"],
|
|
|
|
"B": ["A5"],
|
|
|
|
"C": ["A5"],
|
|
|
|
"D": ["B", "C"],
|
|
|
|
}
|
|
|
|
self._add_depths(nodes, edges)
|
|
|
|
graph = Graph(nodes, edges)
|
|
|
|
|
2022-03-01 05:49:54 -07:00
|
|
|
self.dummy_store.register_events(graph.walk())
|
2015-11-12 06:10:25 -07:00
|
|
|
|
2023-02-06 09:11:09 -07:00
|
|
|
context_store: Dict[str, EventContext] = {}
|
2015-11-12 06:10:25 -07:00
|
|
|
|
|
|
|
for event in graph.walk():
|
2020-07-24 08:59:51 -06:00
|
|
|
context = yield defer.ensureDeferred(
|
|
|
|
self.state.compute_event_context(event)
|
|
|
|
)
|
2022-03-01 05:49:54 -07:00
|
|
|
self.dummy_store.register_event_context(event, context)
|
2015-11-12 06:10:25 -07:00
|
|
|
context_store[event.event_id] = context
|
|
|
|
|
2019-11-06 03:01:39 -07:00
|
|
|
# B ends up winning the resolution between B and C because power levels
|
|
|
|
# win over other changes.
|
|
|
|
|
|
|
|
ctx_b = context_store["B"]
|
|
|
|
ctx_d = context_store["D"]
|
2018-07-23 06:33:49 -06:00
|
|
|
|
2023-02-06 09:11:09 -07:00
|
|
|
prev_state_ids: StateMap[str]
|
2020-07-27 11:40:22 -06:00
|
|
|
prev_state_ids = yield defer.ensureDeferred(ctx_d.get_prev_state_ids())
|
2020-02-21 05:15:07 -07:00
|
|
|
self.assertSetEqual({"A1", "A2", "A3", "A5", "B"}, set(prev_state_ids.values()))
|
2015-11-12 06:10:25 -07:00
|
|
|
|
2019-11-06 03:01:39 -07:00
|
|
|
self.assertEqual(ctx_b.state_group, ctx_d.state_group_before_event)
|
|
|
|
self.assertEqual(ctx_d.state_group_before_event, ctx_d.state_group)
|
|
|
|
|
2023-02-06 09:11:09 -07:00
|
|
|
def _add_depths(
|
|
|
|
self, nodes: Dict[str, DictObj], edges: Dict[str, List[str]]
|
|
|
|
) -> None:
|
|
|
|
def _get_depth(ev: str) -> int:
|
2015-11-12 06:10:25 -07:00
|
|
|
node = nodes[ev]
|
|
|
|
if "depth" not in node:
|
|
|
|
prevs = edges[ev]
|
|
|
|
depth = max(_get_depth(prev) for prev in prevs) + 1
|
|
|
|
node["depth"] = depth
|
|
|
|
return node["depth"]
|
|
|
|
|
|
|
|
for n in nodes:
|
|
|
|
_get_depth(n)
|
|
|
|
|
2014-09-12 10:11:00 -06:00
|
|
|
@defer.inlineCallbacks
|
2023-02-06 09:11:09 -07:00
|
|
|
def test_annotate_with_old_message(
|
|
|
|
self,
|
|
|
|
) -> Generator["defer.Deferred[object]", Any, None]:
|
2015-01-21 09:27:04 -07:00
|
|
|
event = create_event(type="test_message", name="event")
|
2014-09-12 10:11:00 -06:00
|
|
|
|
2014-11-11 07:16:48 -07:00
|
|
|
old_state = [
|
2015-01-21 09:27:04 -07:00
|
|
|
create_event(type="test1", state_key="1"),
|
|
|
|
create_event(type="test1", state_key="2"),
|
|
|
|
create_event(type="test2", state_key=""),
|
2014-11-11 07:16:48 -07:00
|
|
|
]
|
2014-09-12 10:11:00 -06:00
|
|
|
|
2023-02-06 09:11:09 -07:00
|
|
|
context: EventContext
|
2020-07-24 08:59:51 -06:00
|
|
|
context = yield defer.ensureDeferred(
|
2022-05-26 03:48:12 -06:00
|
|
|
self.state.compute_event_context(
|
|
|
|
event,
|
|
|
|
state_ids_before_event={
|
|
|
|
(e.type, e.state_key): e.event_id for e in old_state
|
|
|
|
},
|
2022-07-26 05:39:23 -06:00
|
|
|
partial_state=False,
|
2022-05-26 03:48:12 -06:00
|
|
|
)
|
2020-07-24 08:59:51 -06:00
|
|
|
)
|
2014-09-12 10:11:00 -06:00
|
|
|
|
2023-02-06 09:11:09 -07:00
|
|
|
prev_state_ids: StateMap[str]
|
2020-07-27 11:40:22 -06:00
|
|
|
prev_state_ids = yield defer.ensureDeferred(context.get_prev_state_ids())
|
2019-11-06 03:01:39 -07:00
|
|
|
self.assertCountEqual((e.event_id for e in old_state), prev_state_ids.values())
|
2018-07-23 06:33:49 -06:00
|
|
|
|
2023-02-06 09:11:09 -07:00
|
|
|
current_state_ids: StateMap[str]
|
2020-07-24 08:59:51 -06:00
|
|
|
current_state_ids = yield defer.ensureDeferred(context.get_current_state_ids())
|
2019-11-06 03:01:39 -07:00
|
|
|
self.assertCountEqual(
|
|
|
|
(e.event_id for e in old_state), current_state_ids.values()
|
2014-12-16 08:07:38 -07:00
|
|
|
)
|
2014-09-12 10:11:00 -06:00
|
|
|
|
2019-11-06 03:01:39 -07:00
|
|
|
self.assertIsNotNone(context.state_group_before_event)
|
|
|
|
self.assertEqual(context.state_group_before_event, context.state_group)
|
2014-09-12 10:11:00 -06:00
|
|
|
|
2014-08-12 08:10:52 -06:00
|
|
|
@defer.inlineCallbacks
|
2023-02-06 09:11:09 -07:00
|
|
|
def test_annotate_with_old_state(
|
|
|
|
self,
|
|
|
|
) -> Generator["defer.Deferred[object]", Any, None]:
|
2015-01-21 09:27:04 -07:00
|
|
|
event = create_event(type="state", state_key="", name="event")
|
2014-08-12 08:10:52 -06:00
|
|
|
|
2014-11-11 07:16:48 -07:00
|
|
|
old_state = [
|
2015-01-21 09:27:04 -07:00
|
|
|
create_event(type="test1", state_key="1"),
|
|
|
|
create_event(type="test1", state_key="2"),
|
|
|
|
create_event(type="test2", state_key=""),
|
2014-11-11 07:16:48 -07:00
|
|
|
]
|
2014-08-12 08:10:52 -06:00
|
|
|
|
2023-02-06 09:11:09 -07:00
|
|
|
context: EventContext
|
2020-07-24 08:59:51 -06:00
|
|
|
context = yield defer.ensureDeferred(
|
2022-05-26 03:48:12 -06:00
|
|
|
self.state.compute_event_context(
|
|
|
|
event,
|
|
|
|
state_ids_before_event={
|
|
|
|
(e.type, e.state_key): e.event_id for e in old_state
|
|
|
|
},
|
2022-07-26 05:39:23 -06:00
|
|
|
partial_state=False,
|
2022-05-26 03:48:12 -06:00
|
|
|
)
|
2020-07-24 08:59:51 -06:00
|
|
|
)
|
2014-08-12 08:10:52 -06:00
|
|
|
|
2023-02-06 09:11:09 -07:00
|
|
|
prev_state_ids: StateMap[str]
|
2020-07-27 11:40:22 -06:00
|
|
|
prev_state_ids = yield defer.ensureDeferred(context.get_prev_state_ids())
|
2019-11-06 03:01:39 -07:00
|
|
|
self.assertCountEqual((e.event_id for e in old_state), prev_state_ids.values())
|
2018-07-23 06:33:49 -06:00
|
|
|
|
2023-02-06 09:11:09 -07:00
|
|
|
current_state_ids: StateMap[str]
|
2020-07-24 08:59:51 -06:00
|
|
|
current_state_ids = yield defer.ensureDeferred(context.get_current_state_ids())
|
2019-11-06 03:01:39 -07:00
|
|
|
self.assertCountEqual(
|
|
|
|
(e.event_id for e in old_state + [event]), current_state_ids.values()
|
2014-08-12 08:10:52 -06:00
|
|
|
)
|
|
|
|
|
2019-11-06 03:01:39 -07:00
|
|
|
self.assertIsNotNone(context.state_group_before_event)
|
|
|
|
self.assertNotEqual(context.state_group_before_event, context.state_group)
|
|
|
|
self.assertEqual(context.state_group_before_event, context.prev_group)
|
|
|
|
self.assertEqual({("state", ""): event.event_id}, context.delta_ids)
|
|
|
|
|
2014-08-12 08:10:52 -06:00
|
|
|
@defer.inlineCallbacks
|
2023-02-06 09:11:09 -07:00
|
|
|
def test_trivial_annotate_message(
|
|
|
|
self,
|
|
|
|
) -> Generator["defer.Deferred[object]", Any, None]:
|
2018-02-06 07:31:24 -07:00
|
|
|
prev_event_id = "prev_event_id"
|
|
|
|
event = create_event(
|
|
|
|
type="test_message", name="event2", prev_events=[(prev_event_id, {})]
|
|
|
|
)
|
2014-11-11 07:16:48 -07:00
|
|
|
|
|
|
|
old_state = [
|
2015-01-21 09:27:04 -07:00
|
|
|
create_event(type="test1", state_key="1"),
|
|
|
|
create_event(type="test1", state_key="2"),
|
|
|
|
create_event(type="test2", state_key=""),
|
2014-11-11 07:16:48 -07:00
|
|
|
]
|
2014-08-12 08:10:52 -06:00
|
|
|
|
2020-08-27 15:24:46 -06:00
|
|
|
group_name = yield defer.ensureDeferred(
|
2022-03-01 05:49:54 -07:00
|
|
|
self.dummy_store.store_state_group(
|
2020-08-27 15:24:46 -06:00
|
|
|
prev_event_id,
|
|
|
|
event.room_id,
|
|
|
|
None,
|
|
|
|
None,
|
|
|
|
{(e.type, e.state_key): e.event_id for e in old_state},
|
|
|
|
)
|
2018-02-06 07:31:24 -07:00
|
|
|
)
|
2022-03-01 05:49:54 -07:00
|
|
|
self.dummy_store.register_event_id_state_group(prev_event_id, group_name)
|
2014-08-12 08:10:52 -06:00
|
|
|
|
2023-02-06 09:11:09 -07:00
|
|
|
context: EventContext
|
2020-07-24 08:59:51 -06:00
|
|
|
context = yield defer.ensureDeferred(self.state.compute_event_context(event))
|
2014-08-12 08:10:52 -06:00
|
|
|
|
2023-02-06 09:11:09 -07:00
|
|
|
current_state_ids: StateMap[str]
|
2020-07-24 08:59:51 -06:00
|
|
|
current_state_ids = yield defer.ensureDeferred(context.get_current_state_ids())
|
2018-07-23 06:33:49 -06:00
|
|
|
|
2014-11-11 07:16:48 -07:00
|
|
|
self.assertEqual(
|
2020-02-21 05:15:07 -07:00
|
|
|
{e.event_id for e in old_state}, set(current_state_ids.values())
|
2014-08-12 08:10:52 -06:00
|
|
|
)
|
|
|
|
|
2014-12-16 08:07:38 -07:00
|
|
|
self.assertEqual(group_name, context.state_group)
|
2014-08-12 08:10:52 -06:00
|
|
|
|
|
|
|
@defer.inlineCallbacks
|
2023-02-06 09:11:09 -07:00
|
|
|
def test_trivial_annotate_state(
|
|
|
|
self,
|
|
|
|
) -> Generator["defer.Deferred[object]", Any, None]:
|
2018-02-06 07:31:24 -07:00
|
|
|
prev_event_id = "prev_event_id"
|
|
|
|
event = create_event(
|
|
|
|
type="state", state_key="", name="event2", prev_events=[(prev_event_id, {})]
|
|
|
|
)
|
2014-11-11 07:16:48 -07:00
|
|
|
|
|
|
|
old_state = [
|
2015-01-21 09:27:04 -07:00
|
|
|
create_event(type="test1", state_key="1"),
|
|
|
|
create_event(type="test1", state_key="2"),
|
|
|
|
create_event(type="test2", state_key=""),
|
2014-11-11 07:16:48 -07:00
|
|
|
]
|
2014-08-12 08:10:52 -06:00
|
|
|
|
2020-08-27 15:24:46 -06:00
|
|
|
group_name = yield defer.ensureDeferred(
|
2022-03-01 05:49:54 -07:00
|
|
|
self.dummy_store.store_state_group(
|
2020-08-27 15:24:46 -06:00
|
|
|
prev_event_id,
|
|
|
|
event.room_id,
|
|
|
|
None,
|
|
|
|
None,
|
|
|
|
{(e.type, e.state_key): e.event_id for e in old_state},
|
|
|
|
)
|
2018-02-06 07:31:24 -07:00
|
|
|
)
|
2022-03-01 05:49:54 -07:00
|
|
|
self.dummy_store.register_event_id_state_group(prev_event_id, group_name)
|
2014-08-15 04:47:01 -06:00
|
|
|
|
2023-02-06 09:11:09 -07:00
|
|
|
context: EventContext
|
2020-07-24 08:59:51 -06:00
|
|
|
context = yield defer.ensureDeferred(self.state.compute_event_context(event))
|
2014-08-12 08:10:52 -06:00
|
|
|
|
2023-02-06 09:11:09 -07:00
|
|
|
prev_state_ids: StateMap[str]
|
2020-07-27 11:40:22 -06:00
|
|
|
prev_state_ids = yield defer.ensureDeferred(context.get_prev_state_ids())
|
2018-07-23 06:33:49 -06:00
|
|
|
|
2020-02-21 05:15:07 -07:00
|
|
|
self.assertEqual({e.event_id for e in old_state}, set(prev_state_ids.values()))
|
2014-08-12 08:10:52 -06:00
|
|
|
|
2016-08-31 06:55:02 -06:00
|
|
|
self.assertIsNotNone(context.state_group)
|
2014-08-12 08:10:52 -06:00
|
|
|
|
2014-09-08 12:50:46 -06:00
|
|
|
@defer.inlineCallbacks
|
2023-02-06 09:11:09 -07:00
|
|
|
def test_resolve_message_conflict(
|
|
|
|
self,
|
|
|
|
) -> Generator["defer.Deferred[Any]", Any, None]:
|
2018-02-06 07:31:24 -07:00
|
|
|
prev_event_id1 = "event_id1"
|
|
|
|
prev_event_id2 = "event_id2"
|
|
|
|
event = create_event(
|
|
|
|
type="test_message",
|
|
|
|
name="event3",
|
|
|
|
prev_events=[(prev_event_id1, {}), (prev_event_id2, {})],
|
|
|
|
)
|
2014-11-11 07:16:48 -07:00
|
|
|
|
2015-09-01 08:42:03 -06:00
|
|
|
creation = create_event(type=EventTypes.Create, state_key="")
|
|
|
|
|
2014-11-11 07:16:48 -07:00
|
|
|
old_state_1 = [
|
2015-09-01 08:42:03 -06:00
|
|
|
creation,
|
2015-01-21 09:27:04 -07:00
|
|
|
create_event(type="test1", state_key="1"),
|
|
|
|
create_event(type="test1", state_key="2"),
|
|
|
|
create_event(type="test2", state_key=""),
|
2014-09-08 12:50:46 -06:00
|
|
|
]
|
|
|
|
|
2014-11-11 07:16:48 -07:00
|
|
|
old_state_2 = [
|
2015-09-01 08:42:03 -06:00
|
|
|
creation,
|
2015-01-21 09:27:04 -07:00
|
|
|
create_event(type="test1", state_key="1"),
|
|
|
|
create_event(type="test3", state_key="2"),
|
|
|
|
create_event(type="test4", state_key=""),
|
2014-11-11 07:16:48 -07:00
|
|
|
]
|
2014-09-08 12:50:46 -06:00
|
|
|
|
2022-03-01 05:49:54 -07:00
|
|
|
self.dummy_store.register_events(old_state_1)
|
|
|
|
self.dummy_store.register_events(old_state_2)
|
2016-08-26 03:15:52 -06:00
|
|
|
|
2023-02-06 09:11:09 -07:00
|
|
|
context: EventContext
|
2018-02-06 07:31:24 -07:00
|
|
|
context = yield self._get_context(
|
|
|
|
event, prev_event_id1, old_state_1, prev_event_id2, old_state_2
|
|
|
|
)
|
2014-09-08 12:50:46 -06:00
|
|
|
|
2023-02-06 09:11:09 -07:00
|
|
|
current_state_ids: StateMap[str]
|
2020-07-24 08:59:51 -06:00
|
|
|
current_state_ids = yield defer.ensureDeferred(context.get_current_state_ids())
|
2018-07-23 06:33:49 -06:00
|
|
|
|
|
|
|
self.assertEqual(len(current_state_ids), 6)
|
2014-09-08 12:50:46 -06:00
|
|
|
|
2016-08-31 06:55:02 -06:00
|
|
|
self.assertIsNotNone(context.state_group)
|
2014-09-08 12:50:46 -06:00
|
|
|
|
|
|
|
@defer.inlineCallbacks
|
2023-02-06 09:11:09 -07:00
|
|
|
def test_resolve_state_conflict(
|
|
|
|
self,
|
|
|
|
) -> Generator["defer.Deferred[Any]", Any, None]:
|
2018-02-06 07:31:24 -07:00
|
|
|
prev_event_id1 = "event_id1"
|
|
|
|
prev_event_id2 = "event_id2"
|
|
|
|
event = create_event(
|
|
|
|
type="test4",
|
|
|
|
state_key="",
|
|
|
|
name="event",
|
|
|
|
prev_events=[(prev_event_id1, {}), (prev_event_id2, {})],
|
|
|
|
)
|
2014-11-11 07:16:48 -07:00
|
|
|
|
2015-09-01 08:42:03 -06:00
|
|
|
creation = create_event(type=EventTypes.Create, state_key="")
|
|
|
|
|
2014-11-11 07:16:48 -07:00
|
|
|
old_state_1 = [
|
2015-09-01 08:42:03 -06:00
|
|
|
creation,
|
2015-01-21 09:27:04 -07:00
|
|
|
create_event(type="test1", state_key="1"),
|
|
|
|
create_event(type="test1", state_key="2"),
|
|
|
|
create_event(type="test2", state_key=""),
|
2014-09-08 12:50:46 -06:00
|
|
|
]
|
|
|
|
|
2014-11-11 07:16:48 -07:00
|
|
|
old_state_2 = [
|
2015-09-01 08:42:03 -06:00
|
|
|
creation,
|
2015-01-21 09:27:04 -07:00
|
|
|
create_event(type="test1", state_key="1"),
|
|
|
|
create_event(type="test3", state_key="2"),
|
|
|
|
create_event(type="test4", state_key=""),
|
2014-11-11 07:16:48 -07:00
|
|
|
]
|
2014-09-08 13:13:27 -06:00
|
|
|
|
2022-03-01 05:49:54 -07:00
|
|
|
store = _DummyStore()
|
2016-08-26 03:15:52 -06:00
|
|
|
store.register_events(old_state_1)
|
|
|
|
store.register_events(old_state_2)
|
2023-02-06 09:11:09 -07:00
|
|
|
self.dummy_store.get_events = store.get_events # type: ignore[assignment]
|
2016-08-26 03:15:52 -06:00
|
|
|
|
2023-02-06 09:11:09 -07:00
|
|
|
context: EventContext
|
2018-02-06 07:31:24 -07:00
|
|
|
context = yield self._get_context(
|
|
|
|
event, prev_event_id1, old_state_1, prev_event_id2, old_state_2
|
|
|
|
)
|
2014-09-08 13:13:27 -06:00
|
|
|
|
2023-02-06 09:11:09 -07:00
|
|
|
current_state_ids: StateMap[str]
|
2020-07-24 08:59:51 -06:00
|
|
|
current_state_ids = yield defer.ensureDeferred(context.get_current_state_ids())
|
2018-07-23 06:33:49 -06:00
|
|
|
|
|
|
|
self.assertEqual(len(current_state_ids), 6)
|
2014-09-08 13:13:27 -06:00
|
|
|
|
2016-08-31 06:55:02 -06:00
|
|
|
self.assertIsNotNone(context.state_group)
|
2014-08-12 08:10:52 -06:00
|
|
|
|
2015-01-21 09:27:04 -07:00
|
|
|
@defer.inlineCallbacks
|
2023-02-06 09:11:09 -07:00
|
|
|
def test_standard_depth_conflict(
|
|
|
|
self,
|
|
|
|
) -> Generator["defer.Deferred[Any]", Any, None]:
|
2018-02-06 07:31:24 -07:00
|
|
|
prev_event_id1 = "event_id1"
|
|
|
|
prev_event_id2 = "event_id2"
|
|
|
|
event = create_event(
|
|
|
|
type="test4",
|
|
|
|
name="event",
|
|
|
|
prev_events=[(prev_event_id1, {}), (prev_event_id2, {})],
|
|
|
|
)
|
2015-01-21 09:27:04 -07:00
|
|
|
|
|
|
|
member_event = create_event(
|
|
|
|
type=EventTypes.Member,
|
|
|
|
state_key="@user_id:example.com",
|
|
|
|
content={"membership": Membership.JOIN},
|
|
|
|
)
|
2014-08-12 08:10:52 -06:00
|
|
|
|
2018-06-14 07:21:29 -06:00
|
|
|
power_levels = create_event(
|
|
|
|
type=EventTypes.PowerLevels,
|
|
|
|
state_key="",
|
|
|
|
content={"users": {"@foo:bar": "100", "@user_id:example.com": "100"}},
|
|
|
|
)
|
|
|
|
|
2015-09-01 08:42:03 -06:00
|
|
|
creation = create_event(
|
|
|
|
type=EventTypes.Create, state_key="", content={"creator": "@foo:bar"}
|
|
|
|
)
|
|
|
|
|
2015-01-21 09:27:04 -07:00
|
|
|
old_state_1 = [
|
2015-09-01 08:42:03 -06:00
|
|
|
creation,
|
2018-06-14 07:21:29 -06:00
|
|
|
power_levels,
|
2015-01-21 09:27:04 -07:00
|
|
|
member_event,
|
|
|
|
create_event(type="test1", state_key="1", depth=1),
|
|
|
|
]
|
|
|
|
|
|
|
|
old_state_2 = [
|
2015-09-01 08:42:03 -06:00
|
|
|
creation,
|
2018-06-14 07:21:29 -06:00
|
|
|
power_levels,
|
2015-01-21 09:27:04 -07:00
|
|
|
member_event,
|
|
|
|
create_event(type="test1", state_key="1", depth=2),
|
|
|
|
]
|
2014-08-12 08:10:52 -06:00
|
|
|
|
2022-03-01 05:49:54 -07:00
|
|
|
store = _DummyStore()
|
2016-08-26 03:15:52 -06:00
|
|
|
store.register_events(old_state_1)
|
|
|
|
store.register_events(old_state_2)
|
2023-02-06 09:11:09 -07:00
|
|
|
self.dummy_store.get_events = store.get_events # type: ignore[assignment]
|
2016-08-26 03:15:52 -06:00
|
|
|
|
2023-02-06 09:11:09 -07:00
|
|
|
context: EventContext
|
2018-02-06 07:31:24 -07:00
|
|
|
context = yield self._get_context(
|
|
|
|
event, prev_event_id1, old_state_1, prev_event_id2, old_state_2
|
|
|
|
)
|
2014-08-12 08:10:52 -06:00
|
|
|
|
2023-02-06 09:11:09 -07:00
|
|
|
current_state_ids: StateMap[str]
|
2020-07-24 08:59:51 -06:00
|
|
|
current_state_ids = yield defer.ensureDeferred(context.get_current_state_ids())
|
2018-07-23 06:33:49 -06:00
|
|
|
|
|
|
|
self.assertEqual(old_state_2[3].event_id, current_state_ids[("test1", "1")])
|
2015-01-21 09:27:04 -07:00
|
|
|
|
|
|
|
# Reverse the depth to make sure we are actually using the depths
|
|
|
|
# during state resolution.
|
|
|
|
|
|
|
|
old_state_1 = [
|
2015-09-01 08:42:03 -06:00
|
|
|
creation,
|
2018-06-14 07:21:29 -06:00
|
|
|
power_levels,
|
2015-01-21 09:27:04 -07:00
|
|
|
member_event,
|
|
|
|
create_event(type="test1", state_key="1", depth=2),
|
|
|
|
]
|
|
|
|
|
|
|
|
old_state_2 = [
|
2015-09-01 08:42:03 -06:00
|
|
|
creation,
|
2018-06-14 07:21:29 -06:00
|
|
|
power_levels,
|
2015-01-21 09:27:04 -07:00
|
|
|
member_event,
|
|
|
|
create_event(type="test1", state_key="1", depth=1),
|
|
|
|
]
|
|
|
|
|
2016-08-26 03:15:52 -06:00
|
|
|
store.register_events(old_state_1)
|
|
|
|
store.register_events(old_state_2)
|
|
|
|
|
2018-02-06 07:31:24 -07:00
|
|
|
context = yield self._get_context(
|
|
|
|
event, prev_event_id1, old_state_1, prev_event_id2, old_state_2
|
|
|
|
)
|
2015-01-21 09:27:04 -07:00
|
|
|
|
2020-07-24 08:59:51 -06:00
|
|
|
current_state_ids = yield defer.ensureDeferred(context.get_current_state_ids())
|
2018-07-23 06:33:49 -06:00
|
|
|
|
|
|
|
self.assertEqual(old_state_1[3].event_id, current_state_ids[("test1", "1")])
|
2014-08-12 08:10:52 -06:00
|
|
|
|
2020-07-24 08:59:51 -06:00
|
|
|
@defer.inlineCallbacks
|
2018-02-06 07:31:24 -07:00
|
|
|
def _get_context(
|
2023-02-06 09:11:09 -07:00
|
|
|
self,
|
|
|
|
event: EventBase,
|
|
|
|
prev_event_id_1: str,
|
|
|
|
old_state_1: Collection[EventBase],
|
|
|
|
prev_event_id_2: str,
|
|
|
|
old_state_2: Collection[EventBase],
|
|
|
|
) -> Generator["defer.Deferred[object]", Any, EventContext]:
|
|
|
|
sg1: int
|
2020-08-27 15:24:46 -06:00
|
|
|
sg1 = yield defer.ensureDeferred(
|
2022-03-01 05:49:54 -07:00
|
|
|
self.dummy_store.store_state_group(
|
2020-08-27 15:24:46 -06:00
|
|
|
prev_event_id_1,
|
|
|
|
event.room_id,
|
|
|
|
None,
|
|
|
|
None,
|
|
|
|
{(e.type, e.state_key): e.event_id for e in old_state_1},
|
|
|
|
)
|
2018-02-06 07:31:24 -07:00
|
|
|
)
|
2022-03-01 05:49:54 -07:00
|
|
|
self.dummy_store.register_event_id_state_group(prev_event_id_1, sg1)
|
2014-12-16 08:07:38 -07:00
|
|
|
|
2023-02-06 09:11:09 -07:00
|
|
|
sg2: int
|
2020-08-27 15:24:46 -06:00
|
|
|
sg2 = yield defer.ensureDeferred(
|
2022-03-01 05:49:54 -07:00
|
|
|
self.dummy_store.store_state_group(
|
2020-08-27 15:24:46 -06:00
|
|
|
prev_event_id_2,
|
|
|
|
event.room_id,
|
|
|
|
None,
|
|
|
|
None,
|
|
|
|
{(e.type, e.state_key): e.event_id for e in old_state_2},
|
|
|
|
)
|
2018-02-06 07:31:24 -07:00
|
|
|
)
|
2022-03-01 05:49:54 -07:00
|
|
|
self.dummy_store.register_event_id_state_group(prev_event_id_2, sg2)
|
2014-08-12 08:10:52 -06:00
|
|
|
|
2020-07-24 08:59:51 -06:00
|
|
|
result = yield defer.ensureDeferred(self.state.compute_event_context(event))
|
|
|
|
return result
|
2022-07-15 05:06:41 -06:00
|
|
|
|
2023-02-06 09:11:09 -07:00
|
|
|
def test_make_state_cache_entry(self) -> None:
|
2022-07-15 05:06:41 -06:00
|
|
|
"Test that calculating a prev_group and delta is correct"
|
|
|
|
|
|
|
|
new_state = {
|
|
|
|
("a", ""): "E",
|
|
|
|
("b", ""): "E",
|
|
|
|
("c", ""): "E",
|
|
|
|
("d", ""): "E",
|
|
|
|
}
|
|
|
|
|
|
|
|
# old_state_1 has fewer differences to new_state than old_state_2, but
|
|
|
|
# the delta involves deleting a key, which isn't allowed in the deltas,
|
|
|
|
# so we should pick old_state_2 as the prev_group.
|
|
|
|
|
|
|
|
# `old_state_1` has two differences: `a` and `e`
|
|
|
|
old_state_1 = {
|
|
|
|
("a", ""): "F",
|
|
|
|
("b", ""): "E",
|
|
|
|
("c", ""): "E",
|
|
|
|
("d", ""): "E",
|
|
|
|
("e", ""): "E",
|
|
|
|
}
|
|
|
|
|
|
|
|
# `old_state_2` has three differences: `a`, `c` and `d`
|
|
|
|
old_state_2 = {
|
|
|
|
("a", ""): "F",
|
|
|
|
("b", ""): "E",
|
|
|
|
("c", ""): "F",
|
|
|
|
("d", ""): "F",
|
|
|
|
}
|
|
|
|
|
|
|
|
entry = _make_state_cache_entry(new_state, {1: old_state_1, 2: old_state_2})
|
|
|
|
|
|
|
|
self.assertEqual(entry.prev_group, 2)
|
|
|
|
|
|
|
|
# There are three changes from `old_state_2` to `new_state`
|
|
|
|
self.assertEqual(
|
|
|
|
entry.delta_ids, {("a", ""): "E", ("c", ""): "E", ("d", ""): "E"}
|
|
|
|
)
|