2016-01-06 21:26:29 -07:00
|
|
|
# Copyright 2014-2016 OpenMarket Ltd
|
2018-02-23 03:33:55 -07:00
|
|
|
# Copyright 2018 New Vector Ltd
|
2015-12-01 11:41:32 -07:00
|
|
|
#
|
|
|
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
|
|
# you may not use this file except in compliance with the License.
|
|
|
|
# You may obtain a copy of the License at
|
|
|
|
#
|
|
|
|
# http://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
#
|
|
|
|
# Unless required by applicable law or agreed to in writing, software
|
|
|
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
|
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
|
|
# See the License for the specific language governing permissions and
|
|
|
|
# limitations under the License.
|
|
|
|
|
2018-07-09 00:09:20 -06:00
|
|
|
import logging
|
2022-03-15 12:06:05 -06:00
|
|
|
from typing import (
|
|
|
|
TYPE_CHECKING,
|
|
|
|
Any,
|
|
|
|
Dict,
|
|
|
|
FrozenSet,
|
|
|
|
Iterable,
|
|
|
|
List,
|
2023-02-10 07:22:16 -07:00
|
|
|
Mapping,
|
2022-03-15 12:06:05 -06:00
|
|
|
Optional,
|
|
|
|
Tuple,
|
|
|
|
cast,
|
|
|
|
)
|
2015-12-01 11:41:32 -07:00
|
|
|
|
2020-10-05 07:28:05 -06:00
|
|
|
from synapse.api.constants import AccountDataTypes
|
2023-01-13 07:57:43 -07:00
|
|
|
from synapse.replication.tcp.streams import AccountDataStream
|
2021-12-13 09:28:10 -07:00
|
|
|
from synapse.storage._base import db_to_json
|
|
|
|
from synapse.storage.database import (
|
|
|
|
DatabasePool,
|
|
|
|
LoggingDatabaseConnection,
|
|
|
|
LoggingTransaction,
|
|
|
|
)
|
|
|
|
from synapse.storage.databases.main.cache import CacheInvalidationWorkerStore
|
2022-01-24 06:37:00 -07:00
|
|
|
from synapse.storage.databases.main.push_rule import PushRulesWorkerStore
|
2021-01-18 08:47:59 -07:00
|
|
|
from synapse.storage.engines import PostgresEngine
|
2021-12-13 09:28:10 -07:00
|
|
|
from synapse.storage.util.id_generators import (
|
|
|
|
AbstractStreamIdGenerator,
|
|
|
|
MultiWriterIdGenerator,
|
|
|
|
StreamIdGenerator,
|
|
|
|
)
|
2023-09-18 07:55:04 -06:00
|
|
|
from synapse.types import JsonDict, JsonMapping
|
2020-08-07 06:02:55 -06:00
|
|
|
from synapse.util import json_encoder
|
2021-01-07 06:03:38 -07:00
|
|
|
from synapse.util.caches.descriptors import cached
|
2018-07-09 00:09:20 -06:00
|
|
|
from synapse.util.caches.stream_change_cache import StreamChangeCache
|
2015-12-01 11:41:32 -07:00
|
|
|
|
2021-10-22 11:15:41 -06:00
|
|
|
if TYPE_CHECKING:
|
|
|
|
from synapse.server import HomeServer
|
|
|
|
|
2015-12-01 11:41:32 -07:00
|
|
|
logger = logging.getLogger(__name__)
|
|
|
|
|
|
|
|
|
2022-01-24 06:37:00 -07:00
|
|
|
class AccountDataWorkerStore(PushRulesWorkerStore, CacheInvalidationWorkerStore):
|
2021-12-13 09:28:10 -07:00
|
|
|
def __init__(
|
|
|
|
self,
|
|
|
|
database: DatabasePool,
|
|
|
|
db_conn: LoggingDatabaseConnection,
|
|
|
|
hs: "HomeServer",
|
|
|
|
):
|
|
|
|
super().__init__(database, db_conn, hs)
|
2018-02-16 05:08:42 -07:00
|
|
|
|
2022-11-16 15:16:46 -07:00
|
|
|
self._can_write_to_account_data = (
|
|
|
|
self._instance_name in hs.config.worker.writers.account_data
|
|
|
|
)
|
2021-01-18 08:47:59 -07:00
|
|
|
|
2023-03-02 11:27:00 -07:00
|
|
|
self._account_data_id_gen: AbstractStreamIdGenerator
|
|
|
|
|
2021-01-18 08:47:59 -07:00
|
|
|
if isinstance(database.engine, PostgresEngine):
|
|
|
|
self._account_data_id_gen = MultiWriterIdGenerator(
|
|
|
|
db_conn=db_conn,
|
|
|
|
db=database,
|
2023-01-20 11:02:18 -07:00
|
|
|
notifier=hs.get_replication_notifier(),
|
2021-01-18 08:47:59 -07:00
|
|
|
stream_name="account_data",
|
|
|
|
instance_name=self._instance_name,
|
|
|
|
tables=[
|
|
|
|
("room_account_data", "instance_name", "stream_id"),
|
|
|
|
("room_tags_revisions", "instance_name", "stream_id"),
|
|
|
|
("account_data", "instance_name", "stream_id"),
|
|
|
|
],
|
|
|
|
sequence_name="account_data_sequence",
|
|
|
|
writers=hs.config.worker.writers.account_data,
|
|
|
|
)
|
|
|
|
else:
|
2023-05-16 13:56:38 -06:00
|
|
|
# Multiple writers are not supported for SQLite.
|
|
|
|
#
|
2021-01-18 08:47:59 -07:00
|
|
|
# We shouldn't be running in worker mode with SQLite, but its useful
|
|
|
|
# to support it for unit tests.
|
2022-11-16 15:16:46 -07:00
|
|
|
self._account_data_id_gen = StreamIdGenerator(
|
|
|
|
db_conn,
|
2023-01-20 11:02:18 -07:00
|
|
|
hs.get_replication_notifier(),
|
2022-11-16 15:16:46 -07:00
|
|
|
"room_account_data",
|
|
|
|
"stream_id",
|
|
|
|
extra_tables=[("room_tags_revisions", "stream_id")],
|
|
|
|
is_writer=self._instance_name in hs.config.worker.writers.account_data,
|
|
|
|
)
|
2021-01-18 08:47:59 -07:00
|
|
|
|
2018-02-16 05:08:42 -07:00
|
|
|
account_max = self.get_max_account_data_stream_id()
|
|
|
|
self._account_data_stream_cache = StreamChangeCache(
|
2019-04-03 03:07:29 -06:00
|
|
|
"AccountDataAndTagsChangeCache", account_max
|
2018-02-16 05:08:42 -07:00
|
|
|
)
|
|
|
|
|
2022-02-02 04:37:18 -07:00
|
|
|
self.db_pool.updates.register_background_update_handler(
|
|
|
|
"delete_account_data_for_deactivated_users",
|
|
|
|
self._delete_account_data_for_deactivated_users,
|
|
|
|
)
|
|
|
|
|
2021-01-18 08:47:59 -07:00
|
|
|
def get_max_account_data_stream_id(self) -> int:
|
2018-02-16 05:08:42 -07:00
|
|
|
"""Get the current max stream ID for account data stream
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
int
|
|
|
|
"""
|
2021-01-18 08:47:59 -07:00
|
|
|
return self._account_data_id_gen.get_current_token()
|
2015-12-01 11:41:32 -07:00
|
|
|
|
2016-05-03 09:01:24 -06:00
|
|
|
@cached()
|
2023-02-10 07:22:16 -07:00
|
|
|
async def get_global_account_data_for_user(
|
2020-09-01 09:04:17 -06:00
|
|
|
self, user_id: str
|
2023-09-18 07:55:04 -06:00
|
|
|
) -> Mapping[str, JsonMapping]:
|
2022-12-31 20:40:46 -07:00
|
|
|
"""
|
2023-02-10 07:22:16 -07:00
|
|
|
Get all the global client account_data for a user.
|
2022-12-31 20:40:46 -07:00
|
|
|
|
|
|
|
If experimental MSC3391 support is enabled, any entries with an empty
|
|
|
|
content body are excluded; as this means they have been deleted.
|
2015-12-01 11:41:32 -07:00
|
|
|
|
|
|
|
Args:
|
2020-09-01 09:04:17 -06:00
|
|
|
user_id: The user to get the account_data for.
|
2023-02-10 07:22:16 -07:00
|
|
|
|
2015-12-01 11:41:32 -07:00
|
|
|
Returns:
|
2023-02-10 07:22:16 -07:00
|
|
|
The global account_data.
|
2015-12-01 11:41:32 -07:00
|
|
|
"""
|
|
|
|
|
2023-02-10 07:22:16 -07:00
|
|
|
def get_global_account_data_for_user(
|
2021-12-13 09:28:10 -07:00
|
|
|
txn: LoggingTransaction,
|
2023-02-10 07:22:16 -07:00
|
|
|
) -> Dict[str, JsonDict]:
|
2022-12-31 20:40:46 -07:00
|
|
|
# The 'content != '{}' condition below prevents us from using
|
|
|
|
# `simple_select_list_txn` here, as it doesn't support conditions
|
|
|
|
# other than 'equals'.
|
|
|
|
sql = """
|
|
|
|
SELECT account_data_type, content FROM account_data
|
|
|
|
WHERE user_id = ?
|
|
|
|
"""
|
|
|
|
|
|
|
|
# If experimental MSC3391 support is enabled, then account data entries
|
|
|
|
# with an empty content are considered "deleted". So skip adding them to
|
|
|
|
# the results.
|
|
|
|
if self.hs.config.experimental.msc3391_enabled:
|
|
|
|
sql += " AND content != '{}'"
|
|
|
|
|
|
|
|
txn.execute(sql, (user_id,))
|
|
|
|
rows = self.db_pool.cursor_to_dict(txn)
|
2015-12-01 11:41:32 -07:00
|
|
|
|
2023-02-10 07:22:16 -07:00
|
|
|
return {
|
2020-07-16 09:32:19 -06:00
|
|
|
row["account_data_type"]: db_to_json(row["content"]) for row in rows
|
2015-12-01 11:41:32 -07:00
|
|
|
}
|
|
|
|
|
2023-02-10 07:22:16 -07:00
|
|
|
return await self.db_pool.runInteraction(
|
|
|
|
"get_global_account_data_for_user", get_global_account_data_for_user
|
|
|
|
)
|
|
|
|
|
|
|
|
@cached()
|
|
|
|
async def get_room_account_data_for_user(
|
|
|
|
self, user_id: str
|
2023-09-18 07:55:04 -06:00
|
|
|
) -> Mapping[str, Mapping[str, JsonMapping]]:
|
2023-02-10 07:22:16 -07:00
|
|
|
"""
|
|
|
|
Get all of the per-room client account_data for a user.
|
|
|
|
|
|
|
|
If experimental MSC3391 support is enabled, any entries with an empty
|
|
|
|
content body are excluded; as this means they have been deleted.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
user_id: The user to get the account_data for.
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
A dict mapping from room_id string to per-room account_data dicts.
|
|
|
|
"""
|
|
|
|
|
|
|
|
def get_room_account_data_for_user_txn(
|
|
|
|
txn: LoggingTransaction,
|
|
|
|
) -> Dict[str, Dict[str, JsonDict]]:
|
2022-12-31 20:40:46 -07:00
|
|
|
# The 'content != '{}' condition below prevents us from using
|
|
|
|
# `simple_select_list_txn` here, as it doesn't support conditions
|
|
|
|
# other than 'equals'.
|
|
|
|
sql = """
|
|
|
|
SELECT room_id, account_data_type, content FROM room_account_data
|
|
|
|
WHERE user_id = ?
|
|
|
|
"""
|
|
|
|
|
|
|
|
# If experimental MSC3391 support is enabled, then account data entries
|
|
|
|
# with an empty content are considered "deleted". So skip adding them to
|
|
|
|
# the results.
|
|
|
|
if self.hs.config.experimental.msc3391_enabled:
|
|
|
|
sql += " AND content != '{}'"
|
|
|
|
|
|
|
|
txn.execute(sql, (user_id,))
|
|
|
|
rows = self.db_pool.cursor_to_dict(txn)
|
2015-12-01 11:41:32 -07:00
|
|
|
|
2021-12-13 09:28:10 -07:00
|
|
|
by_room: Dict[str, Dict[str, JsonDict]] = {}
|
2015-12-01 11:41:32 -07:00
|
|
|
for row in rows:
|
|
|
|
room_data = by_room.setdefault(row["room_id"], {})
|
2022-12-31 20:40:46 -07:00
|
|
|
|
2020-07-16 09:32:19 -06:00
|
|
|
room_data[row["account_data_type"]] = db_to_json(row["content"])
|
2015-12-01 11:41:32 -07:00
|
|
|
|
2023-02-10 07:22:16 -07:00
|
|
|
return by_room
|
2015-12-01 11:41:32 -07:00
|
|
|
|
2020-09-01 09:04:17 -06:00
|
|
|
return await self.db_pool.runInteraction(
|
2023-02-10 07:22:16 -07:00
|
|
|
"get_room_account_data_for_user_txn", get_room_account_data_for_user_txn
|
2015-12-01 11:41:32 -07:00
|
|
|
)
|
|
|
|
|
2022-01-21 01:38:36 -07:00
|
|
|
@cached(num_args=2, max_entries=5000, tree=True)
|
2020-08-12 07:29:06 -06:00
|
|
|
async def get_global_account_data_by_type_for_user(
|
2022-01-21 01:38:36 -07:00
|
|
|
self, user_id: str, data_type: str
|
2023-09-18 07:55:04 -06:00
|
|
|
) -> Optional[JsonMapping]:
|
2016-05-03 09:01:24 -06:00
|
|
|
"""
|
|
|
|
Returns:
|
2020-08-12 07:29:06 -06:00
|
|
|
The account data.
|
2016-05-03 09:01:24 -06:00
|
|
|
"""
|
2020-08-12 07:29:06 -06:00
|
|
|
result = await self.db_pool.simple_select_one_onecol(
|
2016-05-03 09:01:24 -06:00
|
|
|
table="account_data",
|
2019-04-03 03:07:29 -06:00
|
|
|
keyvalues={"user_id": user_id, "account_data_type": data_type},
|
2016-05-03 09:01:24 -06:00
|
|
|
retcol="content",
|
|
|
|
desc="get_global_account_data_by_type_for_user",
|
|
|
|
allow_none=True,
|
|
|
|
)
|
|
|
|
|
|
|
|
if result:
|
2020-07-16 09:32:19 -06:00
|
|
|
return db_to_json(result)
|
2016-05-03 09:01:24 -06:00
|
|
|
else:
|
2019-07-23 07:00:55 -06:00
|
|
|
return None
|
2016-05-03 09:01:24 -06:00
|
|
|
|
2023-02-28 10:11:26 -07:00
|
|
|
async def get_latest_stream_id_for_global_account_data_by_type_for_user(
|
|
|
|
self, user_id: str, data_type: str
|
|
|
|
) -> Optional[int]:
|
|
|
|
"""
|
|
|
|
Returns:
|
|
|
|
The stream ID of the account data,
|
|
|
|
or None if there is no such account data.
|
|
|
|
"""
|
|
|
|
|
|
|
|
def get_latest_stream_id_for_global_account_data_by_type_for_user_txn(
|
|
|
|
txn: LoggingTransaction,
|
|
|
|
) -> Optional[int]:
|
|
|
|
sql = """
|
|
|
|
SELECT stream_id FROM account_data
|
|
|
|
WHERE user_id = ? AND account_data_type = ?
|
|
|
|
ORDER BY stream_id DESC
|
|
|
|
LIMIT 1
|
|
|
|
"""
|
|
|
|
txn.execute(sql, (user_id, data_type))
|
|
|
|
|
|
|
|
row = txn.fetchone()
|
|
|
|
if row:
|
|
|
|
return row[0]
|
|
|
|
else:
|
|
|
|
return None
|
|
|
|
|
|
|
|
return await self.db_pool.runInteraction(
|
|
|
|
"get_latest_stream_id_for_global_account_data_by_type_for_user",
|
|
|
|
get_latest_stream_id_for_global_account_data_by_type_for_user_txn,
|
|
|
|
)
|
|
|
|
|
2022-01-24 06:37:00 -07:00
|
|
|
@cached(num_args=2, tree=True)
|
2020-09-01 09:04:17 -06:00
|
|
|
async def get_account_data_for_room(
|
|
|
|
self, user_id: str, room_id: str
|
2023-09-18 07:55:04 -06:00
|
|
|
) -> Mapping[str, JsonMapping]:
|
2015-12-01 11:41:32 -07:00
|
|
|
"""Get all the client account_data for a user for a room.
|
|
|
|
|
|
|
|
Args:
|
2020-09-01 09:04:17 -06:00
|
|
|
user_id: The user to get the account_data for.
|
|
|
|
room_id: The room to get the account_data for.
|
2015-12-01 11:41:32 -07:00
|
|
|
Returns:
|
2020-09-01 09:04:17 -06:00
|
|
|
A dict of the room account_data
|
2015-12-01 11:41:32 -07:00
|
|
|
"""
|
2019-04-03 03:07:29 -06:00
|
|
|
|
2021-12-13 09:28:10 -07:00
|
|
|
def get_account_data_for_room_txn(
|
|
|
|
txn: LoggingTransaction,
|
|
|
|
) -> Dict[str, JsonDict]:
|
2020-08-05 14:38:57 -06:00
|
|
|
rows = self.db_pool.simple_select_list_txn(
|
2019-04-03 03:07:29 -06:00
|
|
|
txn,
|
|
|
|
"room_account_data",
|
|
|
|
{"user_id": user_id, "room_id": room_id},
|
|
|
|
["account_data_type", "content"],
|
2015-12-01 11:41:32 -07:00
|
|
|
)
|
|
|
|
|
|
|
|
return {
|
2020-07-16 09:32:19 -06:00
|
|
|
row["account_data_type"]: db_to_json(row["content"]) for row in rows
|
2015-12-01 11:41:32 -07:00
|
|
|
}
|
|
|
|
|
2020-09-01 09:04:17 -06:00
|
|
|
return await self.db_pool.runInteraction(
|
2015-12-01 11:41:32 -07:00
|
|
|
"get_account_data_for_room", get_account_data_for_room_txn
|
|
|
|
)
|
|
|
|
|
2022-01-21 01:01:37 -07:00
|
|
|
@cached(num_args=3, max_entries=5000, tree=True)
|
2020-09-01 09:04:17 -06:00
|
|
|
async def get_account_data_for_room_and_type(
|
|
|
|
self, user_id: str, room_id: str, account_data_type: str
|
2023-09-18 07:55:04 -06:00
|
|
|
) -> Optional[JsonMapping]:
|
2018-03-01 10:00:35 -07:00
|
|
|
"""Get the client account_data of given type for a user for a room.
|
2018-03-01 08:53:04 -07:00
|
|
|
|
|
|
|
Args:
|
2020-09-01 09:04:17 -06:00
|
|
|
user_id: The user to get the account_data for.
|
|
|
|
room_id: The room to get the account_data for.
|
|
|
|
account_data_type: The account data type to get.
|
2018-03-01 08:53:04 -07:00
|
|
|
Returns:
|
2020-09-01 09:04:17 -06:00
|
|
|
The room account_data for that type, or None if there isn't any set.
|
2018-03-01 08:53:04 -07:00
|
|
|
"""
|
2019-04-03 03:07:29 -06:00
|
|
|
|
2021-12-13 09:28:10 -07:00
|
|
|
def get_account_data_for_room_and_type_txn(
|
|
|
|
txn: LoggingTransaction,
|
|
|
|
) -> Optional[JsonDict]:
|
2020-08-05 14:38:57 -06:00
|
|
|
content_json = self.db_pool.simple_select_one_onecol_txn(
|
2018-03-01 08:53:04 -07:00
|
|
|
txn,
|
|
|
|
table="room_account_data",
|
|
|
|
keyvalues={
|
|
|
|
"user_id": user_id,
|
|
|
|
"room_id": room_id,
|
|
|
|
"account_data_type": account_data_type,
|
|
|
|
},
|
|
|
|
retcol="content",
|
2019-04-03 03:07:29 -06:00
|
|
|
allow_none=True,
|
2018-03-01 08:53:04 -07:00
|
|
|
)
|
|
|
|
|
2020-07-16 09:32:19 -06:00
|
|
|
return db_to_json(content_json) if content_json else None
|
2018-03-01 08:53:04 -07:00
|
|
|
|
2020-09-01 09:04:17 -06:00
|
|
|
return await self.db_pool.runInteraction(
|
2019-04-03 03:07:29 -06:00
|
|
|
"get_account_data_for_room_and_type", get_account_data_for_room_and_type_txn
|
2018-03-01 08:53:04 -07:00
|
|
|
)
|
|
|
|
|
2020-05-15 12:03:25 -06:00
|
|
|
async def get_updated_global_account_data(
|
|
|
|
self, last_id: int, current_id: int, limit: int
|
|
|
|
) -> List[Tuple[int, str, str]]:
|
|
|
|
"""Get the global account_data that has changed, for the account_data stream
|
|
|
|
|
2016-03-01 07:49:41 -07:00
|
|
|
Args:
|
2020-05-15 12:03:25 -06:00
|
|
|
last_id: the last stream_id from the previous batch.
|
|
|
|
current_id: the maximum stream_id to return up to
|
|
|
|
limit: the maximum number of rows to return
|
|
|
|
|
2016-03-01 07:49:41 -07:00
|
|
|
Returns:
|
2020-05-15 12:03:25 -06:00
|
|
|
A list of tuples of stream_id int, user_id string,
|
|
|
|
and type string.
|
2016-03-01 07:49:41 -07:00
|
|
|
"""
|
2020-05-15 12:03:25 -06:00
|
|
|
if last_id == current_id:
|
|
|
|
return []
|
2016-06-08 04:33:30 -06:00
|
|
|
|
2021-12-13 09:28:10 -07:00
|
|
|
def get_updated_global_account_data_txn(
|
|
|
|
txn: LoggingTransaction,
|
|
|
|
) -> List[Tuple[int, str, str]]:
|
2016-03-01 07:49:41 -07:00
|
|
|
sql = (
|
2019-11-08 04:42:55 -07:00
|
|
|
"SELECT stream_id, user_id, account_data_type"
|
2016-03-01 07:49:41 -07:00
|
|
|
" FROM account_data WHERE ? < stream_id AND stream_id <= ?"
|
|
|
|
" ORDER BY stream_id ASC LIMIT ?"
|
|
|
|
)
|
2020-05-15 12:03:25 -06:00
|
|
|
txn.execute(sql, (last_id, current_id, limit))
|
2021-12-13 09:28:10 -07:00
|
|
|
return cast(List[Tuple[int, str, str]], txn.fetchall())
|
2020-05-15 12:03:25 -06:00
|
|
|
|
2020-08-05 14:38:57 -06:00
|
|
|
return await self.db_pool.runInteraction(
|
2020-05-15 12:03:25 -06:00
|
|
|
"get_updated_global_account_data", get_updated_global_account_data_txn
|
|
|
|
)
|
|
|
|
|
|
|
|
async def get_updated_room_account_data(
|
|
|
|
self, last_id: int, current_id: int, limit: int
|
|
|
|
) -> List[Tuple[int, str, str, str]]:
|
|
|
|
"""Get the global account_data that has changed, for the account_data stream
|
2016-03-01 07:49:41 -07:00
|
|
|
|
2020-05-15 12:03:25 -06:00
|
|
|
Args:
|
|
|
|
last_id: the last stream_id from the previous batch.
|
|
|
|
current_id: the maximum stream_id to return up to
|
|
|
|
limit: the maximum number of rows to return
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
A list of tuples of stream_id int, user_id string,
|
|
|
|
room_id string and type string.
|
|
|
|
"""
|
|
|
|
if last_id == current_id:
|
|
|
|
return []
|
|
|
|
|
2021-12-13 09:28:10 -07:00
|
|
|
def get_updated_room_account_data_txn(
|
|
|
|
txn: LoggingTransaction,
|
|
|
|
) -> List[Tuple[int, str, str, str]]:
|
2016-03-01 07:49:41 -07:00
|
|
|
sql = (
|
2019-11-08 04:42:55 -07:00
|
|
|
"SELECT stream_id, user_id, room_id, account_data_type"
|
2016-03-01 07:49:41 -07:00
|
|
|
" FROM room_account_data WHERE ? < stream_id AND stream_id <= ?"
|
|
|
|
" ORDER BY stream_id ASC LIMIT ?"
|
|
|
|
)
|
2020-05-15 12:03:25 -06:00
|
|
|
txn.execute(sql, (last_id, current_id, limit))
|
2021-12-13 09:28:10 -07:00
|
|
|
return cast(List[Tuple[int, str, str, str]], txn.fetchall())
|
2019-04-03 03:07:29 -06:00
|
|
|
|
2020-08-05 14:38:57 -06:00
|
|
|
return await self.db_pool.runInteraction(
|
2020-05-15 12:03:25 -06:00
|
|
|
"get_updated_room_account_data", get_updated_room_account_data_txn
|
2016-03-01 07:49:41 -07:00
|
|
|
)
|
|
|
|
|
2023-02-10 07:22:16 -07:00
|
|
|
async def get_updated_global_account_data_for_user(
|
2020-09-01 09:04:17 -06:00
|
|
|
self, user_id: str, stream_id: int
|
2023-09-18 07:55:04 -06:00
|
|
|
) -> Mapping[str, JsonMapping]:
|
2023-02-10 07:22:16 -07:00
|
|
|
"""Get all the global account_data that's changed for a user.
|
2015-12-01 11:41:32 -07:00
|
|
|
|
|
|
|
Args:
|
2020-09-01 09:04:17 -06:00
|
|
|
user_id: The user to get the account_data for.
|
|
|
|
stream_id: The point in the stream since which to get updates
|
2023-02-10 07:22:16 -07:00
|
|
|
|
2015-12-01 11:41:32 -07:00
|
|
|
Returns:
|
2023-02-10 07:22:16 -07:00
|
|
|
A dict of global account_data.
|
2015-12-01 11:41:32 -07:00
|
|
|
"""
|
|
|
|
|
2023-02-10 07:22:16 -07:00
|
|
|
def get_updated_global_account_data_for_user(
|
2021-12-13 09:28:10 -07:00
|
|
|
txn: LoggingTransaction,
|
2023-02-10 07:22:16 -07:00
|
|
|
) -> Dict[str, JsonDict]:
|
|
|
|
sql = """
|
|
|
|
SELECT account_data_type, content FROM account_data
|
|
|
|
WHERE user_id = ? AND stream_id > ?
|
|
|
|
"""
|
2015-12-01 11:41:32 -07:00
|
|
|
txn.execute(sql, (user_id, stream_id))
|
|
|
|
|
2023-02-10 07:22:16 -07:00
|
|
|
return {row[0]: db_to_json(row[1]) for row in txn}
|
2015-12-01 11:41:32 -07:00
|
|
|
|
2023-02-10 07:22:16 -07:00
|
|
|
changed = self._account_data_stream_cache.has_entity_changed(
|
|
|
|
user_id, int(stream_id)
|
|
|
|
)
|
|
|
|
if not changed:
|
|
|
|
return {}
|
|
|
|
|
|
|
|
return await self.db_pool.runInteraction(
|
|
|
|
"get_updated_global_account_data_for_user",
|
|
|
|
get_updated_global_account_data_for_user,
|
|
|
|
)
|
|
|
|
|
|
|
|
async def get_updated_room_account_data_for_user(
|
|
|
|
self, user_id: str, stream_id: int
|
|
|
|
) -> Dict[str, Dict[str, JsonDict]]:
|
|
|
|
"""Get all the room account_data that's changed for a user.
|
2015-12-01 11:41:32 -07:00
|
|
|
|
2023-02-10 07:22:16 -07:00
|
|
|
Args:
|
|
|
|
user_id: The user to get the account_data for.
|
|
|
|
stream_id: The point in the stream since which to get updates
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
A dict mapping from room_id string to per room account_data dicts.
|
|
|
|
"""
|
|
|
|
|
|
|
|
def get_updated_room_account_data_for_user_txn(
|
|
|
|
txn: LoggingTransaction,
|
|
|
|
) -> Dict[str, Dict[str, JsonDict]]:
|
|
|
|
sql = """
|
|
|
|
SELECT room_id, account_data_type, content FROM room_account_data
|
|
|
|
WHERE user_id = ? AND stream_id > ?
|
|
|
|
"""
|
2015-12-01 11:41:32 -07:00
|
|
|
txn.execute(sql, (user_id, stream_id))
|
|
|
|
|
2021-12-13 09:28:10 -07:00
|
|
|
account_data_by_room: Dict[str, Dict[str, JsonDict]] = {}
|
2017-03-23 11:53:49 -06:00
|
|
|
for row in txn:
|
2015-12-01 11:41:32 -07:00
|
|
|
room_account_data = account_data_by_room.setdefault(row[0], {})
|
2020-07-16 09:32:19 -06:00
|
|
|
room_account_data[row[1]] = db_to_json(row[2])
|
2015-12-01 11:41:32 -07:00
|
|
|
|
2023-02-10 07:22:16 -07:00
|
|
|
return account_data_by_room
|
2015-12-01 11:41:32 -07:00
|
|
|
|
2016-01-28 09:39:18 -07:00
|
|
|
changed = self._account_data_stream_cache.has_entity_changed(
|
2016-01-28 09:37:41 -07:00
|
|
|
user_id, int(stream_id)
|
|
|
|
)
|
|
|
|
if not changed:
|
2023-02-10 07:22:16 -07:00
|
|
|
return {}
|
2016-01-28 09:37:41 -07:00
|
|
|
|
2020-09-01 09:04:17 -06:00
|
|
|
return await self.db_pool.runInteraction(
|
2023-02-10 07:22:16 -07:00
|
|
|
"get_updated_room_account_data_for_user",
|
|
|
|
get_updated_room_account_data_for_user_txn,
|
2015-12-01 11:41:32 -07:00
|
|
|
)
|
|
|
|
|
2021-01-07 06:03:38 -07:00
|
|
|
@cached(max_entries=5000, iterable=True)
|
2022-03-15 12:06:05 -06:00
|
|
|
async def ignored_by(self, user_id: str) -> FrozenSet[str]:
|
2021-01-07 06:03:38 -07:00
|
|
|
"""
|
|
|
|
Get users which ignore the given user.
|
2018-02-16 05:08:42 -07:00
|
|
|
|
2021-01-07 06:03:38 -07:00
|
|
|
Params:
|
|
|
|
user_id: The user ID which might be ignored.
|
|
|
|
|
|
|
|
Return:
|
|
|
|
The user IDs which ignore the given user.
|
|
|
|
"""
|
2022-03-15 12:06:05 -06:00
|
|
|
return frozenset(
|
2021-01-07 06:03:38 -07:00
|
|
|
await self.db_pool.simple_select_onecol(
|
|
|
|
table="ignored_users",
|
|
|
|
keyvalues={"ignored_user_id": user_id},
|
|
|
|
retcol="ignorer_user_id",
|
|
|
|
desc="ignored_by",
|
|
|
|
)
|
|
|
|
)
|
2018-02-16 05:08:42 -07:00
|
|
|
|
2022-03-15 12:06:05 -06:00
|
|
|
@cached(max_entries=5000, iterable=True)
|
|
|
|
async def ignored_users(self, user_id: str) -> FrozenSet[str]:
|
|
|
|
"""
|
|
|
|
Get users which the given user ignores.
|
|
|
|
|
|
|
|
Params:
|
|
|
|
user_id: The user ID which is making the request.
|
|
|
|
|
|
|
|
Return:
|
|
|
|
The user IDs which are ignored by the given user.
|
|
|
|
"""
|
|
|
|
return frozenset(
|
|
|
|
await self.db_pool.simple_select_onecol(
|
|
|
|
table="ignored_users",
|
|
|
|
keyvalues={"ignorer_user_id": user_id},
|
|
|
|
retcol="ignored_user_id",
|
|
|
|
desc="ignored_users",
|
|
|
|
)
|
|
|
|
)
|
|
|
|
|
2022-07-18 07:28:14 -06:00
|
|
|
def process_replication_rows(
|
2021-12-13 09:28:10 -07:00
|
|
|
self,
|
|
|
|
stream_name: str,
|
|
|
|
instance_name: str,
|
|
|
|
token: int,
|
|
|
|
rows: Iterable[Any],
|
|
|
|
) -> None:
|
2023-01-04 04:49:26 -07:00
|
|
|
if stream_name == AccountDataStream.NAME:
|
2021-01-18 08:47:59 -07:00
|
|
|
for row in rows:
|
|
|
|
if not row.room_id:
|
|
|
|
self.get_global_account_data_by_type_for_user.invalidate(
|
2022-01-21 01:38:36 -07:00
|
|
|
(row.user_id, row.data_type)
|
2021-01-18 08:47:59 -07:00
|
|
|
)
|
2023-02-10 07:22:16 -07:00
|
|
|
self.get_global_account_data_for_user.invalidate((row.user_id,))
|
|
|
|
self.get_room_account_data_for_user.invalidate((row.user_id,))
|
2021-01-18 08:47:59 -07:00
|
|
|
self.get_account_data_for_room.invalidate((row.user_id, row.room_id))
|
|
|
|
self.get_account_data_for_room_and_type.invalidate(
|
|
|
|
(row.user_id, row.room_id, row.data_type)
|
|
|
|
)
|
|
|
|
self._account_data_stream_cache.entity_has_changed(row.user_id, token)
|
2021-12-13 09:28:10 -07:00
|
|
|
|
2022-07-18 07:28:14 -06:00
|
|
|
super().process_replication_rows(stream_name, instance_name, token, rows)
|
2018-02-16 05:08:42 -07:00
|
|
|
|
2023-01-04 04:49:26 -07:00
|
|
|
def process_replication_position(
|
|
|
|
self, stream_name: str, instance_name: str, token: int
|
|
|
|
) -> None:
|
2023-01-13 07:57:43 -07:00
|
|
|
if stream_name == AccountDataStream.NAME:
|
2023-01-04 04:49:26 -07:00
|
|
|
self._account_data_id_gen.advance(instance_name, token)
|
|
|
|
super().process_replication_position(stream_name, instance_name, token)
|
|
|
|
|
2020-08-12 07:29:06 -06:00
|
|
|
async def add_account_data_to_room(
|
|
|
|
self, user_id: str, room_id: str, account_data_type: str, content: JsonDict
|
|
|
|
) -> int:
|
2015-12-01 11:41:32 -07:00
|
|
|
"""Add some account_data to a room for a user.
|
2020-08-12 07:29:06 -06:00
|
|
|
|
2015-12-01 11:41:32 -07:00
|
|
|
Args:
|
2020-08-12 07:29:06 -06:00
|
|
|
user_id: The user to add a tag for.
|
|
|
|
room_id: The room to add a tag for.
|
|
|
|
account_data_type: The type of account_data to add.
|
|
|
|
content: A json object to associate with the tag.
|
|
|
|
|
2015-12-01 11:41:32 -07:00
|
|
|
Returns:
|
2020-08-12 07:29:06 -06:00
|
|
|
The maximum stream ID.
|
2015-12-01 11:41:32 -07:00
|
|
|
"""
|
2021-01-18 08:47:59 -07:00
|
|
|
assert self._can_write_to_account_data
|
|
|
|
|
2020-08-07 06:02:55 -06:00
|
|
|
content_json = json_encoder.encode(content)
|
2015-12-01 11:41:32 -07:00
|
|
|
|
2020-09-23 09:11:18 -06:00
|
|
|
async with self._account_data_id_gen.get_next() as next_id:
|
2020-08-12 07:29:06 -06:00
|
|
|
await self.db_pool.simple_upsert(
|
2017-11-16 11:07:01 -07:00
|
|
|
desc="add_room_account_data",
|
2015-12-01 11:41:32 -07:00
|
|
|
table="room_account_data",
|
|
|
|
keyvalues={
|
|
|
|
"user_id": user_id,
|
|
|
|
"room_id": room_id,
|
|
|
|
"account_data_type": account_data_type,
|
|
|
|
},
|
2019-04-03 03:07:29 -06:00
|
|
|
values={"stream_id": next_id, "content": content_json},
|
2016-01-29 09:41:51 -07:00
|
|
|
)
|
2015-12-01 11:41:32 -07:00
|
|
|
|
2017-11-16 11:07:01 -07:00
|
|
|
self._account_data_stream_cache.entity_has_changed(user_id, next_id)
|
2023-02-10 07:22:16 -07:00
|
|
|
self.get_room_account_data_for_user.invalidate((user_id,))
|
2019-04-03 03:07:29 -06:00
|
|
|
self.get_account_data_for_room.invalidate((user_id, room_id))
|
2018-03-01 08:53:04 -07:00
|
|
|
self.get_account_data_for_room_and_type.prefill(
|
2019-04-03 03:07:29 -06:00
|
|
|
(user_id, room_id, account_data_type), content
|
2018-03-01 08:53:04 -07:00
|
|
|
)
|
2015-12-01 11:41:32 -07:00
|
|
|
|
2020-08-12 07:29:06 -06:00
|
|
|
return self._account_data_id_gen.get_current_token()
|
2015-12-01 11:41:32 -07:00
|
|
|
|
2022-12-31 20:40:46 -07:00
|
|
|
async def remove_account_data_for_room(
|
|
|
|
self, user_id: str, room_id: str, account_data_type: str
|
2023-03-03 03:51:57 -07:00
|
|
|
) -> int:
|
2022-12-31 20:40:46 -07:00
|
|
|
"""Delete the room account data for the user of a given type.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
user_id: The user to remove account_data for.
|
|
|
|
room_id: The room ID to scope the request to.
|
|
|
|
account_data_type: The account data type to delete.
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
The maximum stream position, or None if there was no matching room account
|
|
|
|
data to delete.
|
|
|
|
"""
|
|
|
|
assert self._can_write_to_account_data
|
|
|
|
|
|
|
|
def _remove_account_data_for_room_txn(
|
|
|
|
txn: LoggingTransaction, next_id: int
|
|
|
|
) -> bool:
|
|
|
|
"""
|
|
|
|
Args:
|
|
|
|
txn: The transaction object.
|
|
|
|
next_id: The stream_id to update any existing rows to.
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
True if an entry in room_account_data had its content set to '{}',
|
|
|
|
otherwise False. This informs callers of whether there actually was an
|
|
|
|
existing room account data entry to delete, or if the call was a no-op.
|
|
|
|
"""
|
|
|
|
# We can't use `simple_update` as it doesn't have the ability to specify
|
|
|
|
# where clauses other than '=', which we need for `content != '{}'` below.
|
|
|
|
sql = """
|
|
|
|
UPDATE room_account_data
|
|
|
|
SET stream_id = ?, content = '{}'
|
|
|
|
WHERE user_id = ?
|
|
|
|
AND room_id = ?
|
|
|
|
AND account_data_type = ?
|
|
|
|
AND content != '{}'
|
|
|
|
"""
|
|
|
|
txn.execute(
|
|
|
|
sql,
|
|
|
|
(next_id, user_id, room_id, account_data_type),
|
|
|
|
)
|
|
|
|
# Return true if any rows were updated.
|
|
|
|
return txn.rowcount != 0
|
|
|
|
|
|
|
|
async with self._account_data_id_gen.get_next() as next_id:
|
|
|
|
row_updated = await self.db_pool.runInteraction(
|
|
|
|
"remove_account_data_for_room",
|
|
|
|
_remove_account_data_for_room_txn,
|
|
|
|
next_id,
|
|
|
|
)
|
|
|
|
|
2023-03-03 03:51:57 -07:00
|
|
|
if row_updated:
|
|
|
|
self._account_data_stream_cache.entity_has_changed(user_id, next_id)
|
|
|
|
self.get_room_account_data_for_user.invalidate((user_id,))
|
|
|
|
self.get_account_data_for_room.invalidate((user_id, room_id))
|
|
|
|
self.get_account_data_for_room_and_type.prefill(
|
|
|
|
(user_id, room_id, account_data_type), {}
|
|
|
|
)
|
2022-12-31 20:40:46 -07:00
|
|
|
|
|
|
|
return self._account_data_id_gen.get_current_token()
|
|
|
|
|
2020-08-12 07:29:06 -06:00
|
|
|
async def add_account_data_for_user(
|
|
|
|
self, user_id: str, account_data_type: str, content: JsonDict
|
|
|
|
) -> int:
|
2022-01-10 08:38:22 -07:00
|
|
|
"""Add some global account_data for a user.
|
2020-08-12 07:29:06 -06:00
|
|
|
|
2015-12-01 11:41:32 -07:00
|
|
|
Args:
|
2020-08-12 07:29:06 -06:00
|
|
|
user_id: The user to add a tag for.
|
|
|
|
account_data_type: The type of account_data to add.
|
|
|
|
content: A json object to associate with the tag.
|
|
|
|
|
2015-12-01 11:41:32 -07:00
|
|
|
Returns:
|
2020-08-12 07:29:06 -06:00
|
|
|
The maximum stream ID.
|
2015-12-01 11:41:32 -07:00
|
|
|
"""
|
2021-01-18 08:47:59 -07:00
|
|
|
assert self._can_write_to_account_data
|
|
|
|
|
2020-09-23 09:11:18 -06:00
|
|
|
async with self._account_data_id_gen.get_next() as next_id:
|
2021-01-07 06:03:38 -07:00
|
|
|
await self.db_pool.runInteraction(
|
|
|
|
"add_user_account_data",
|
|
|
|
self._add_account_data_for_user,
|
|
|
|
next_id,
|
|
|
|
user_id,
|
|
|
|
account_data_type,
|
|
|
|
content,
|
2015-12-01 11:41:32 -07:00
|
|
|
)
|
2017-11-16 11:07:01 -07:00
|
|
|
|
2019-04-03 03:07:29 -06:00
|
|
|
self._account_data_stream_cache.entity_has_changed(user_id, next_id)
|
2023-02-10 07:22:16 -07:00
|
|
|
self.get_global_account_data_for_user.invalidate((user_id,))
|
2017-11-16 11:07:01 -07:00
|
|
|
self.get_global_account_data_by_type_for_user.invalidate(
|
2022-01-21 01:38:36 -07:00
|
|
|
(user_id, account_data_type)
|
2016-05-03 09:01:24 -06:00
|
|
|
)
|
2015-12-01 11:41:32 -07:00
|
|
|
|
2020-08-12 07:29:06 -06:00
|
|
|
return self._account_data_id_gen.get_current_token()
|
2015-12-01 11:41:32 -07:00
|
|
|
|
2021-01-07 06:03:38 -07:00
|
|
|
def _add_account_data_for_user(
|
|
|
|
self,
|
2021-12-13 09:28:10 -07:00
|
|
|
txn: LoggingTransaction,
|
2021-01-07 06:03:38 -07:00
|
|
|
next_id: int,
|
|
|
|
user_id: str,
|
|
|
|
account_data_type: str,
|
|
|
|
content: JsonDict,
|
|
|
|
) -> None:
|
|
|
|
content_json = json_encoder.encode(content)
|
|
|
|
|
|
|
|
self.db_pool.simple_upsert_txn(
|
|
|
|
txn,
|
|
|
|
table="account_data",
|
|
|
|
keyvalues={"user_id": user_id, "account_data_type": account_data_type},
|
|
|
|
values={"stream_id": next_id, "content": content_json},
|
|
|
|
)
|
|
|
|
|
|
|
|
# Ignored users get denormalized into a separate table as an optimisation.
|
|
|
|
if account_data_type != AccountDataTypes.IGNORED_USER_LIST:
|
|
|
|
return
|
|
|
|
|
|
|
|
# Insert / delete to sync the list of ignored users.
|
|
|
|
previously_ignored_users = set(
|
|
|
|
self.db_pool.simple_select_onecol_txn(
|
|
|
|
txn,
|
|
|
|
table="ignored_users",
|
|
|
|
keyvalues={"ignorer_user_id": user_id},
|
|
|
|
retcol="ignored_user_id",
|
|
|
|
)
|
|
|
|
)
|
|
|
|
|
|
|
|
# If the data is invalid, no one is ignored.
|
|
|
|
ignored_users_content = content.get("ignored_users", {})
|
|
|
|
if isinstance(ignored_users_content, dict):
|
|
|
|
currently_ignored_users = set(ignored_users_content)
|
|
|
|
else:
|
|
|
|
currently_ignored_users = set()
|
|
|
|
|
2022-03-15 12:06:05 -06:00
|
|
|
# If the data has not changed, nothing to do.
|
|
|
|
if previously_ignored_users == currently_ignored_users:
|
|
|
|
return
|
|
|
|
|
2021-01-07 06:03:38 -07:00
|
|
|
# Delete entries which are no longer ignored.
|
|
|
|
self.db_pool.simple_delete_many_txn(
|
|
|
|
txn,
|
|
|
|
table="ignored_users",
|
|
|
|
column="ignored_user_id",
|
2021-09-20 03:26:13 -06:00
|
|
|
values=previously_ignored_users - currently_ignored_users,
|
2021-01-07 06:03:38 -07:00
|
|
|
keyvalues={"ignorer_user_id": user_id},
|
|
|
|
)
|
|
|
|
|
|
|
|
# Add entries which are newly ignored.
|
|
|
|
self.db_pool.simple_insert_many_txn(
|
|
|
|
txn,
|
|
|
|
table="ignored_users",
|
2022-01-13 17:44:18 -07:00
|
|
|
keys=("ignorer_user_id", "ignored_user_id"),
|
2021-01-07 06:03:38 -07:00
|
|
|
values=[
|
2022-01-13 17:44:18 -07:00
|
|
|
(user_id, u) for u in currently_ignored_users - previously_ignored_users
|
2021-01-07 06:03:38 -07:00
|
|
|
],
|
|
|
|
)
|
|
|
|
|
|
|
|
# Invalidate the cache for any ignored users which were added or removed.
|
|
|
|
for ignored_user_id in previously_ignored_users ^ currently_ignored_users:
|
|
|
|
self._invalidate_cache_and_stream(txn, self.ignored_by, (ignored_user_id,))
|
2022-03-15 12:06:05 -06:00
|
|
|
self._invalidate_cache_and_stream(txn, self.ignored_users, (user_id,))
|
2021-01-18 08:47:59 -07:00
|
|
|
|
2022-12-31 20:40:46 -07:00
|
|
|
async def remove_account_data_for_user(
|
|
|
|
self,
|
|
|
|
user_id: str,
|
|
|
|
account_data_type: str,
|
2023-03-03 03:51:57 -07:00
|
|
|
) -> int:
|
2022-12-31 20:40:46 -07:00
|
|
|
"""
|
|
|
|
Delete a single piece of user account data by type.
|
|
|
|
|
|
|
|
A "delete" is performed by updating a potentially existing row in the
|
|
|
|
"account_data" database table for (user_id, account_data_type) and
|
|
|
|
setting its content to "{}".
|
|
|
|
|
|
|
|
Args:
|
|
|
|
user_id: The user ID to modify the account data of.
|
|
|
|
account_data_type: The type to remove.
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
The maximum stream position, or None if there was no matching account data
|
|
|
|
to delete.
|
|
|
|
"""
|
|
|
|
assert self._can_write_to_account_data
|
|
|
|
|
|
|
|
def _remove_account_data_for_user_txn(
|
|
|
|
txn: LoggingTransaction, next_id: int
|
|
|
|
) -> bool:
|
|
|
|
"""
|
|
|
|
Args:
|
|
|
|
txn: The transaction object.
|
|
|
|
next_id: The stream_id to update any existing rows to.
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
True if an entry in account_data had its content set to '{}', otherwise
|
|
|
|
False. This informs callers of whether there actually was an existing
|
|
|
|
account data entry to delete, or if the call was a no-op.
|
|
|
|
"""
|
|
|
|
# We can't use `simple_update` as it doesn't have the ability to specify
|
|
|
|
# where clauses other than '=', which we need for `content != '{}'` below.
|
|
|
|
sql = """
|
|
|
|
UPDATE account_data
|
|
|
|
SET stream_id = ?, content = '{}'
|
|
|
|
WHERE user_id = ?
|
|
|
|
AND account_data_type = ?
|
|
|
|
AND content != '{}'
|
|
|
|
"""
|
|
|
|
txn.execute(sql, (next_id, user_id, account_data_type))
|
|
|
|
if txn.rowcount == 0:
|
|
|
|
# We didn't update any rows. This means that there was no matching room
|
|
|
|
# account data entry to delete in the first place.
|
|
|
|
return False
|
|
|
|
|
|
|
|
# Ignored users get denormalized into a separate table as an optimisation.
|
|
|
|
if account_data_type == AccountDataTypes.IGNORED_USER_LIST:
|
|
|
|
# If this method was called with the ignored users account data type, we
|
|
|
|
# simply delete all ignored users.
|
|
|
|
|
|
|
|
# First pull all the users that this user ignores.
|
|
|
|
previously_ignored_users = set(
|
|
|
|
self.db_pool.simple_select_onecol_txn(
|
|
|
|
txn,
|
|
|
|
table="ignored_users",
|
|
|
|
keyvalues={"ignorer_user_id": user_id},
|
|
|
|
retcol="ignored_user_id",
|
|
|
|
)
|
|
|
|
)
|
|
|
|
|
|
|
|
# Then delete them from the database.
|
|
|
|
self.db_pool.simple_delete_txn(
|
|
|
|
txn,
|
|
|
|
table="ignored_users",
|
|
|
|
keyvalues={"ignorer_user_id": user_id},
|
|
|
|
)
|
|
|
|
|
|
|
|
# Invalidate the cache for ignored users which were removed.
|
|
|
|
for ignored_user_id in previously_ignored_users:
|
|
|
|
self._invalidate_cache_and_stream(
|
|
|
|
txn, self.ignored_by, (ignored_user_id,)
|
|
|
|
)
|
|
|
|
|
|
|
|
# Invalidate for this user the cache tracking ignored users.
|
|
|
|
self._invalidate_cache_and_stream(txn, self.ignored_users, (user_id,))
|
|
|
|
|
|
|
|
return True
|
|
|
|
|
|
|
|
async with self._account_data_id_gen.get_next() as next_id:
|
|
|
|
row_updated = await self.db_pool.runInteraction(
|
|
|
|
"remove_account_data_for_user",
|
|
|
|
_remove_account_data_for_user_txn,
|
|
|
|
next_id,
|
|
|
|
)
|
|
|
|
|
2023-03-03 03:51:57 -07:00
|
|
|
if row_updated:
|
|
|
|
self._account_data_stream_cache.entity_has_changed(user_id, next_id)
|
|
|
|
self.get_global_account_data_for_user.invalidate((user_id,))
|
|
|
|
self.get_global_account_data_by_type_for_user.prefill(
|
|
|
|
(user_id, account_data_type), {}
|
|
|
|
)
|
2022-12-31 20:40:46 -07:00
|
|
|
|
|
|
|
return self._account_data_id_gen.get_current_token()
|
|
|
|
|
2022-01-24 06:37:00 -07:00
|
|
|
async def purge_account_data_for_user(self, user_id: str) -> None:
|
|
|
|
"""
|
2022-02-02 04:37:18 -07:00
|
|
|
Removes ALL the account data for a user.
|
|
|
|
Intended to be used upon user deactivation.
|
2022-01-24 06:37:00 -07:00
|
|
|
|
2022-02-02 04:37:18 -07:00
|
|
|
Also purges the user from the ignored_users cache table
|
|
|
|
and the push_rules cache tables.
|
|
|
|
"""
|
2022-01-24 06:37:00 -07:00
|
|
|
|
2022-02-02 04:37:18 -07:00
|
|
|
await self.db_pool.runInteraction(
|
|
|
|
"purge_account_data_for_user_txn",
|
|
|
|
self._purge_account_data_for_user_txn,
|
|
|
|
user_id,
|
|
|
|
)
|
|
|
|
|
|
|
|
def _purge_account_data_for_user_txn(
|
|
|
|
self, txn: LoggingTransaction, user_id: str
|
|
|
|
) -> None:
|
2022-01-24 06:37:00 -07:00
|
|
|
"""
|
2022-02-02 04:37:18 -07:00
|
|
|
See `purge_account_data_for_user`.
|
|
|
|
"""
|
|
|
|
# Purge from the primary account_data tables.
|
|
|
|
self.db_pool.simple_delete_txn(
|
|
|
|
txn, table="account_data", keyvalues={"user_id": user_id}
|
|
|
|
)
|
2022-01-24 06:37:00 -07:00
|
|
|
|
2022-02-02 04:37:18 -07:00
|
|
|
self.db_pool.simple_delete_txn(
|
|
|
|
txn, table="room_account_data", keyvalues={"user_id": user_id}
|
|
|
|
)
|
2022-01-24 06:37:00 -07:00
|
|
|
|
2022-02-02 04:37:18 -07:00
|
|
|
# Purge from ignored_users where this user is the ignorer.
|
|
|
|
# N.B. We don't purge where this user is the ignoree, because that
|
|
|
|
# interferes with other users' account data.
|
|
|
|
# It's also not this user's data to delete!
|
|
|
|
self.db_pool.simple_delete_txn(
|
|
|
|
txn, table="ignored_users", keyvalues={"ignorer_user_id": user_id}
|
|
|
|
)
|
2022-01-24 06:37:00 -07:00
|
|
|
|
2022-02-02 04:37:18 -07:00
|
|
|
# Remove the push rules
|
|
|
|
self.db_pool.simple_delete_txn(
|
|
|
|
txn, table="push_rules", keyvalues={"user_name": user_id}
|
|
|
|
)
|
|
|
|
self.db_pool.simple_delete_txn(
|
|
|
|
txn, table="push_rules_enable", keyvalues={"user_name": user_id}
|
|
|
|
)
|
|
|
|
self.db_pool.simple_delete_txn(
|
|
|
|
txn, table="push_rules_stream", keyvalues={"user_id": user_id}
|
|
|
|
)
|
2022-01-24 06:37:00 -07:00
|
|
|
|
2022-02-02 04:37:18 -07:00
|
|
|
# Invalidate caches as appropriate
|
|
|
|
self._invalidate_cache_and_stream(
|
|
|
|
txn, self.get_account_data_for_room_and_type, (user_id,)
|
|
|
|
)
|
|
|
|
self._invalidate_cache_and_stream(
|
2023-02-10 07:22:16 -07:00
|
|
|
txn, self.get_global_account_data_for_user, (user_id,)
|
|
|
|
)
|
|
|
|
self._invalidate_cache_and_stream(
|
|
|
|
txn, self.get_room_account_data_for_user, (user_id,)
|
2022-02-02 04:37:18 -07:00
|
|
|
)
|
|
|
|
self._invalidate_cache_and_stream(
|
|
|
|
txn, self.get_global_account_data_by_type_for_user, (user_id,)
|
|
|
|
)
|
|
|
|
self._invalidate_cache_and_stream(
|
|
|
|
txn, self.get_account_data_for_room, (user_id,)
|
|
|
|
)
|
|
|
|
self._invalidate_cache_and_stream(txn, self.get_push_rules_for_user, (user_id,))
|
|
|
|
# This user might be contained in the ignored_by cache for other users,
|
|
|
|
# so we have to invalidate it all.
|
|
|
|
self._invalidate_all_cache_and_stream(txn, self.ignored_by)
|
2022-01-24 06:37:00 -07:00
|
|
|
|
2022-02-02 04:37:18 -07:00
|
|
|
async def _delete_account_data_for_deactivated_users(
|
|
|
|
self, progress: dict, batch_size: int
|
|
|
|
) -> int:
|
|
|
|
"""
|
|
|
|
Retroactively purges account data for users that have already been deactivated.
|
|
|
|
Gets run as a background update caused by a schema delta.
|
|
|
|
"""
|
2022-01-24 06:37:00 -07:00
|
|
|
|
2022-02-02 04:37:18 -07:00
|
|
|
last_user: str = progress.get("last_user", "")
|
|
|
|
|
|
|
|
def _delete_account_data_for_deactivated_users_txn(
|
|
|
|
txn: LoggingTransaction,
|
|
|
|
) -> int:
|
|
|
|
sql = """
|
|
|
|
SELECT name FROM users
|
|
|
|
WHERE deactivated = ? and name > ?
|
|
|
|
ORDER BY name ASC
|
|
|
|
LIMIT ?
|
|
|
|
"""
|
|
|
|
|
|
|
|
txn.execute(sql, (1, last_user, batch_size))
|
|
|
|
users = [row[0] for row in txn]
|
|
|
|
|
|
|
|
for user in users:
|
|
|
|
self._purge_account_data_for_user_txn(txn, user_id=user)
|
|
|
|
|
|
|
|
if users:
|
|
|
|
self.db_pool.updates._background_update_progress_txn(
|
|
|
|
txn,
|
|
|
|
"delete_account_data_for_deactivated_users",
|
|
|
|
{"last_user": users[-1]},
|
|
|
|
)
|
|
|
|
|
|
|
|
return len(users)
|
|
|
|
|
|
|
|
number_deleted = await self.db_pool.runInteraction(
|
|
|
|
"_delete_account_data_for_deactivated_users",
|
|
|
|
_delete_account_data_for_deactivated_users_txn,
|
2022-01-24 06:37:00 -07:00
|
|
|
)
|
|
|
|
|
2022-02-02 04:37:18 -07:00
|
|
|
if number_deleted < batch_size:
|
|
|
|
await self.db_pool.updates._end_background_update(
|
|
|
|
"delete_account_data_for_deactivated_users"
|
|
|
|
)
|
|
|
|
|
|
|
|
return number_deleted
|
|
|
|
|
2021-01-18 08:47:59 -07:00
|
|
|
|
|
|
|
class AccountDataStore(AccountDataWorkerStore):
|
|
|
|
pass
|