2016-01-06 21:26:29 -07:00
|
|
|
# Copyright 2014-2016 OpenMarket Ltd
|
2021-01-15 08:57:37 -07:00
|
|
|
# Copyright 2020-2021 The Matrix.org Foundation C.I.C.
|
2014-12-02 08:09:51 -07:00
|
|
|
#
|
|
|
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
|
|
# you may not use this file except in compliance with the License.
|
|
|
|
# You may obtain a copy of the License at
|
|
|
|
#
|
|
|
|
# http://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
#
|
|
|
|
# Unless required by applicable law or agreed to in writing, software
|
|
|
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
|
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
|
|
# See the License for the specific language governing permissions and
|
|
|
|
# limitations under the License.
|
2021-02-22 12:38:51 -07:00
|
|
|
from enum import Enum
|
2021-11-12 09:05:26 -07:00
|
|
|
from typing import (
|
|
|
|
TYPE_CHECKING,
|
|
|
|
Collection,
|
|
|
|
Iterable,
|
|
|
|
List,
|
|
|
|
Optional,
|
|
|
|
Tuple,
|
|
|
|
Union,
|
2021-12-29 06:04:28 -07:00
|
|
|
cast,
|
2021-11-12 09:05:26 -07:00
|
|
|
)
|
2020-08-26 05:19:32 -06:00
|
|
|
|
2023-10-31 11:13:28 -06:00
|
|
|
import attr
|
|
|
|
|
2023-02-01 14:35:24 -07:00
|
|
|
from synapse.api.constants import Direction
|
2023-07-05 09:22:21 -06:00
|
|
|
from synapse.logging.opentracing import trace
|
2023-10-06 08:12:43 -06:00
|
|
|
from synapse.media._base import ThumbnailInfo
|
2019-12-04 08:09:36 -07:00
|
|
|
from synapse.storage._base import SQLBaseStore
|
2021-11-12 09:05:26 -07:00
|
|
|
from synapse.storage.database import (
|
|
|
|
DatabasePool,
|
|
|
|
LoggingDatabaseConnection,
|
|
|
|
LoggingTransaction,
|
|
|
|
)
|
|
|
|
from synapse.types import JsonDict, UserID
|
2014-12-02 08:09:51 -07:00
|
|
|
|
2021-10-22 11:15:41 -06:00
|
|
|
if TYPE_CHECKING:
|
|
|
|
from synapse.server import HomeServer
|
|
|
|
|
2021-04-06 06:36:05 -06:00
|
|
|
BG_UPDATE_REMOVE_MEDIA_REPO_INDEX_WITHOUT_METHOD_2 = (
|
|
|
|
"media_repository_drop_index_wo_method_2"
|
|
|
|
)
|
2020-09-08 10:19:50 -06:00
|
|
|
|
2014-12-02 08:09:51 -07:00
|
|
|
|
2023-10-31 11:13:28 -06:00
|
|
|
@attr.s(slots=True, frozen=True, auto_attribs=True)
|
|
|
|
class LocalMedia:
|
|
|
|
media_id: str
|
|
|
|
media_type: str
|
2023-11-15 07:19:24 -07:00
|
|
|
media_length: Optional[int]
|
2023-10-31 11:13:28 -06:00
|
|
|
upload_name: str
|
|
|
|
created_ts: int
|
2023-11-09 09:00:30 -07:00
|
|
|
url_cache: Optional[str]
|
2023-10-31 11:13:28 -06:00
|
|
|
last_access_ts: int
|
|
|
|
quarantined_by: Optional[str]
|
|
|
|
safe_from_quarantine: bool
|
2023-11-15 07:19:24 -07:00
|
|
|
user_id: Optional[str]
|
2023-10-31 11:13:28 -06:00
|
|
|
|
|
|
|
|
2023-11-09 09:00:30 -07:00
|
|
|
@attr.s(slots=True, frozen=True, auto_attribs=True)
|
|
|
|
class RemoteMedia:
|
|
|
|
media_origin: str
|
|
|
|
media_id: str
|
|
|
|
media_type: str
|
|
|
|
media_length: int
|
|
|
|
upload_name: Optional[str]
|
|
|
|
filesystem_id: str
|
|
|
|
created_ts: int
|
|
|
|
last_access_ts: int
|
|
|
|
quarantined_by: Optional[str]
|
|
|
|
|
|
|
|
|
|
|
|
@attr.s(slots=True, frozen=True, auto_attribs=True)
|
|
|
|
class UrlCache:
|
|
|
|
response_code: int
|
|
|
|
expires_ts: int
|
|
|
|
og: Union[str, bytes]
|
|
|
|
|
|
|
|
|
2021-02-22 12:38:51 -07:00
|
|
|
class MediaSortOrder(Enum):
|
|
|
|
"""
|
|
|
|
Enum to define the sorting method used when returning media with
|
|
|
|
get_local_media_by_user_paginate
|
|
|
|
"""
|
|
|
|
|
|
|
|
MEDIA_ID = "media_id"
|
|
|
|
UPLOAD_NAME = "upload_name"
|
|
|
|
CREATED_TS = "created_ts"
|
|
|
|
LAST_ACCESS_TS = "last_access_ts"
|
|
|
|
MEDIA_LENGTH = "media_length"
|
|
|
|
MEDIA_TYPE = "media_type"
|
|
|
|
QUARANTINED_BY = "quarantined_by"
|
|
|
|
SAFE_FROM_QUARANTINE = "safe_from_quarantine"
|
|
|
|
|
|
|
|
|
2019-12-04 08:09:36 -07:00
|
|
|
class MediaRepositoryBackgroundUpdateStore(SQLBaseStore):
|
2021-11-12 09:05:26 -07:00
|
|
|
def __init__(
|
|
|
|
self,
|
|
|
|
database: DatabasePool,
|
|
|
|
db_conn: LoggingDatabaseConnection,
|
|
|
|
hs: "HomeServer",
|
|
|
|
):
|
2020-09-18 07:56:44 -06:00
|
|
|
super().__init__(database, db_conn, hs)
|
2017-11-21 04:03:21 -07:00
|
|
|
|
2020-08-05 14:38:57 -06:00
|
|
|
self.db_pool.updates.register_background_index_update(
|
2017-11-21 04:03:21 -07:00
|
|
|
update_name="local_media_repository_url_idx",
|
|
|
|
index_name="local_media_repository_url_idx",
|
|
|
|
table="local_media_repository",
|
|
|
|
columns=["created_ts"],
|
|
|
|
where_clause="url_cache IS NOT NULL",
|
|
|
|
)
|
|
|
|
|
2020-09-08 10:19:50 -06:00
|
|
|
# The following the updates add the method to the unique constraint of
|
|
|
|
# the thumbnail databases. That fixes an issue, where thumbnails of the
|
|
|
|
# same resolution, but different methods could overwrite one another.
|
|
|
|
# This can happen with custom thumbnail configs or with dynamic thumbnailing.
|
|
|
|
self.db_pool.updates.register_background_index_update(
|
|
|
|
update_name="local_media_repository_thumbnails_method_idx",
|
|
|
|
index_name="local_media_repository_thumbn_media_id_width_height_method_key",
|
|
|
|
table="local_media_repository_thumbnails",
|
|
|
|
columns=[
|
|
|
|
"media_id",
|
|
|
|
"thumbnail_width",
|
|
|
|
"thumbnail_height",
|
|
|
|
"thumbnail_type",
|
|
|
|
"thumbnail_method",
|
|
|
|
],
|
|
|
|
unique=True,
|
|
|
|
)
|
|
|
|
|
|
|
|
self.db_pool.updates.register_background_index_update(
|
|
|
|
update_name="remote_media_repository_thumbnails_method_idx",
|
|
|
|
index_name="remote_media_repository_thumbn_media_origin_id_width_height_method_key",
|
|
|
|
table="remote_media_cache_thumbnails",
|
|
|
|
columns=[
|
|
|
|
"media_origin",
|
|
|
|
"media_id",
|
|
|
|
"thumbnail_width",
|
|
|
|
"thumbnail_height",
|
|
|
|
"thumbnail_type",
|
|
|
|
"thumbnail_method",
|
|
|
|
],
|
|
|
|
unique=True,
|
|
|
|
)
|
|
|
|
|
|
|
|
self.db_pool.updates.register_background_update_handler(
|
2021-04-06 06:36:05 -06:00
|
|
|
BG_UPDATE_REMOVE_MEDIA_REPO_INDEX_WITHOUT_METHOD_2,
|
2020-09-08 10:19:50 -06:00
|
|
|
self._drop_media_index_without_method,
|
|
|
|
)
|
|
|
|
|
2023-11-15 07:19:24 -07:00
|
|
|
if hs.config.media.can_load_media_repo:
|
|
|
|
self.unused_expiration_time: Optional[
|
|
|
|
int
|
|
|
|
] = hs.config.media.unused_expiration_time
|
|
|
|
else:
|
|
|
|
self.unused_expiration_time = None
|
|
|
|
|
2021-11-12 09:05:26 -07:00
|
|
|
async def _drop_media_index_without_method(
|
|
|
|
self, progress: JsonDict, batch_size: int
|
|
|
|
) -> int:
|
2021-04-06 06:36:05 -06:00
|
|
|
"""background update handler which removes the old constraints.
|
|
|
|
|
|
|
|
Note that this is only run on postgres.
|
|
|
|
"""
|
|
|
|
|
2021-11-12 09:05:26 -07:00
|
|
|
def f(txn: LoggingTransaction) -> None:
|
2020-09-08 10:19:50 -06:00
|
|
|
txn.execute(
|
|
|
|
"ALTER TABLE local_media_repository_thumbnails DROP CONSTRAINT IF EXISTS local_media_repository_thumbn_media_id_thumbnail_width_thum_key"
|
|
|
|
)
|
|
|
|
txn.execute(
|
2021-04-06 06:36:05 -06:00
|
|
|
"ALTER TABLE remote_media_cache_thumbnails DROP CONSTRAINT IF EXISTS remote_media_cache_thumbnails_media_origin_media_id_thumbna_key"
|
2020-09-08 10:19:50 -06:00
|
|
|
)
|
|
|
|
|
|
|
|
await self.db_pool.runInteraction("drop_media_indices_without_method", f)
|
|
|
|
await self.db_pool.updates._end_background_update(
|
2021-04-06 06:36:05 -06:00
|
|
|
BG_UPDATE_REMOVE_MEDIA_REPO_INDEX_WITHOUT_METHOD_2
|
2020-09-08 10:19:50 -06:00
|
|
|
)
|
|
|
|
return 1
|
|
|
|
|
2019-10-03 10:30:22 -06:00
|
|
|
|
|
|
|
class MediaRepositoryStore(MediaRepositoryBackgroundUpdateStore):
|
|
|
|
"""Persistence for attachments and avatars"""
|
|
|
|
|
2021-11-12 09:05:26 -07:00
|
|
|
def __init__(
|
|
|
|
self,
|
|
|
|
database: DatabasePool,
|
|
|
|
db_conn: LoggingDatabaseConnection,
|
|
|
|
hs: "HomeServer",
|
|
|
|
):
|
2020-09-18 07:56:44 -06:00
|
|
|
super().__init__(database, db_conn, hs)
|
2022-03-28 12:11:14 -06:00
|
|
|
self.server_name: str = hs.hostname
|
2019-10-03 10:30:22 -06:00
|
|
|
|
2023-11-09 09:00:30 -07:00
|
|
|
async def get_local_media(self, media_id: str) -> Optional[LocalMedia]:
|
2014-12-02 12:51:47 -07:00
|
|
|
"""Get the metadata for a local piece of media
|
2020-08-26 05:19:32 -06:00
|
|
|
|
2014-12-02 12:51:47 -07:00
|
|
|
Returns:
|
2016-04-02 17:31:57 -06:00
|
|
|
None if the media_id doesn't exist.
|
2014-12-02 12:51:47 -07:00
|
|
|
"""
|
2023-11-09 09:00:30 -07:00
|
|
|
row = await self.db_pool.simple_select_one(
|
2014-12-02 08:09:51 -07:00
|
|
|
"local_media_repository",
|
|
|
|
{"media_id": media_id},
|
2017-06-23 04:14:11 -06:00
|
|
|
(
|
2019-04-03 03:07:29 -06:00
|
|
|
"media_type",
|
|
|
|
"media_length",
|
|
|
|
"upload_name",
|
|
|
|
"created_ts",
|
|
|
|
"quarantined_by",
|
|
|
|
"url_cache",
|
2023-11-09 09:00:30 -07:00
|
|
|
"last_access_ts",
|
2021-05-26 04:19:47 -06:00
|
|
|
"safe_from_quarantine",
|
2023-11-15 07:19:24 -07:00
|
|
|
"user_id",
|
2017-06-23 04:14:11 -06:00
|
|
|
),
|
2014-12-04 07:22:31 -07:00
|
|
|
allow_none=True,
|
2015-03-20 09:59:18 -06:00
|
|
|
desc="get_local_media",
|
2014-12-02 08:09:51 -07:00
|
|
|
)
|
2023-11-09 09:00:30 -07:00
|
|
|
if row is None:
|
|
|
|
return None
|
2023-11-09 09:13:31 -07:00
|
|
|
return LocalMedia(
|
|
|
|
media_id=media_id,
|
|
|
|
media_type=row[0],
|
|
|
|
media_length=row[1],
|
|
|
|
upload_name=row[2],
|
|
|
|
created_ts=row[3],
|
|
|
|
quarantined_by=row[4],
|
|
|
|
url_cache=row[5],
|
|
|
|
last_access_ts=row[6],
|
|
|
|
safe_from_quarantine=row[7],
|
2023-11-15 07:19:24 -07:00
|
|
|
user_id=row[8],
|
2023-11-09 09:13:31 -07:00
|
|
|
)
|
2014-12-02 08:09:51 -07:00
|
|
|
|
2020-10-27 08:12:31 -06:00
|
|
|
async def get_local_media_by_user_paginate(
|
2021-02-22 12:38:51 -07:00
|
|
|
self,
|
|
|
|
start: int,
|
|
|
|
limit: int,
|
|
|
|
user_id: str,
|
2021-03-03 06:09:39 -07:00
|
|
|
order_by: str = MediaSortOrder.CREATED_TS.value,
|
2023-02-01 14:35:24 -07:00
|
|
|
direction: Direction = Direction.FORWARDS,
|
2023-10-31 11:13:28 -06:00
|
|
|
) -> Tuple[List[LocalMedia], int]:
|
2020-10-27 08:12:31 -06:00
|
|
|
"""Get a paginated list of metadata for a local piece of media
|
|
|
|
which an user_id has uploaded
|
|
|
|
|
|
|
|
Args:
|
|
|
|
start: offset in the list
|
|
|
|
limit: maximum amount of media_ids to retrieve
|
|
|
|
user_id: fully-qualified user id
|
2021-02-22 12:38:51 -07:00
|
|
|
order_by: the sort order of the returned list
|
|
|
|
direction: sort ascending or descending
|
2020-10-27 08:12:31 -06:00
|
|
|
Returns:
|
|
|
|
A paginated list of all metadata of user's media,
|
|
|
|
plus the total count of all the user's media
|
|
|
|
"""
|
|
|
|
|
2021-11-12 09:05:26 -07:00
|
|
|
def get_local_media_by_user_paginate_txn(
|
|
|
|
txn: LoggingTransaction,
|
2023-10-31 11:13:28 -06:00
|
|
|
) -> Tuple[List[LocalMedia], int]:
|
2021-02-22 12:38:51 -07:00
|
|
|
# Set ordering
|
|
|
|
order_by_column = MediaSortOrder(order_by).value
|
|
|
|
|
2023-02-01 14:35:24 -07:00
|
|
|
if direction == Direction.BACKWARDS:
|
2021-02-22 12:38:51 -07:00
|
|
|
order = "DESC"
|
|
|
|
else:
|
|
|
|
order = "ASC"
|
|
|
|
|
2021-11-12 09:05:26 -07:00
|
|
|
args: List[Union[str, int]] = [user_id]
|
2020-10-27 08:12:31 -06:00
|
|
|
sql = """
|
|
|
|
SELECT COUNT(*) as total_media
|
|
|
|
FROM local_media_repository
|
|
|
|
WHERE user_id = ?
|
|
|
|
"""
|
|
|
|
txn.execute(sql, args)
|
2021-12-29 06:04:28 -07:00
|
|
|
count = cast(Tuple[int], txn.fetchone())[0]
|
2020-10-27 08:12:31 -06:00
|
|
|
|
|
|
|
sql = """
|
|
|
|
SELECT
|
2023-10-31 11:13:28 -06:00
|
|
|
media_id,
|
|
|
|
media_type,
|
|
|
|
media_length,
|
|
|
|
upload_name,
|
|
|
|
created_ts,
|
2023-11-09 09:00:30 -07:00
|
|
|
url_cache,
|
2023-10-31 11:13:28 -06:00
|
|
|
last_access_ts,
|
|
|
|
quarantined_by,
|
2023-11-15 07:19:24 -07:00
|
|
|
safe_from_quarantine,
|
|
|
|
user_id
|
2020-10-27 08:12:31 -06:00
|
|
|
FROM local_media_repository
|
|
|
|
WHERE user_id = ?
|
2021-02-22 12:38:51 -07:00
|
|
|
ORDER BY {order_by_column} {order}, media_id ASC
|
2020-10-27 08:12:31 -06:00
|
|
|
LIMIT ? OFFSET ?
|
2021-02-22 12:38:51 -07:00
|
|
|
""".format(
|
|
|
|
order_by_column=order_by_column,
|
|
|
|
order=order,
|
|
|
|
)
|
2020-10-27 08:12:31 -06:00
|
|
|
|
|
|
|
args += [limit, start]
|
|
|
|
txn.execute(sql, args)
|
2023-10-31 11:13:28 -06:00
|
|
|
media = [
|
|
|
|
LocalMedia(
|
|
|
|
media_id=row[0],
|
|
|
|
media_type=row[1],
|
|
|
|
media_length=row[2],
|
|
|
|
upload_name=row[3],
|
|
|
|
created_ts=row[4],
|
2023-11-09 09:00:30 -07:00
|
|
|
url_cache=row[5],
|
|
|
|
last_access_ts=row[6],
|
|
|
|
quarantined_by=row[7],
|
|
|
|
safe_from_quarantine=bool(row[8]),
|
2023-11-15 07:19:24 -07:00
|
|
|
user_id=row[9],
|
2023-10-31 11:13:28 -06:00
|
|
|
)
|
|
|
|
for row in txn
|
|
|
|
]
|
2020-10-27 08:12:31 -06:00
|
|
|
return media, count
|
|
|
|
|
|
|
|
return await self.db_pool.runInteraction(
|
|
|
|
"get_local_media_by_user_paginate_txn", get_local_media_by_user_paginate_txn
|
|
|
|
)
|
|
|
|
|
2022-06-07 04:53:47 -06:00
|
|
|
async def get_local_media_ids(
|
2020-10-26 11:02:28 -06:00
|
|
|
self,
|
|
|
|
before_ts: int,
|
|
|
|
size_gt: int,
|
|
|
|
keep_profiles: bool,
|
2022-06-07 04:53:47 -06:00
|
|
|
include_quarantined_media: bool,
|
|
|
|
include_protected_media: bool,
|
2021-01-15 08:57:37 -07:00
|
|
|
) -> List[str]:
|
2022-06-07 04:53:47 -06:00
|
|
|
"""
|
|
|
|
Retrieve a list of media IDs from the local media store.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
before_ts: Only retrieve IDs from media that was either last accessed
|
|
|
|
(or if never accessed, created) before the given UNIX timestamp in ms.
|
|
|
|
size_gt: Only retrieve IDs from media that has a size (in bytes) greater than
|
|
|
|
the given integer.
|
|
|
|
keep_profiles: If True, exclude media IDs from the results that are used in the
|
|
|
|
following situations:
|
|
|
|
* global profile user avatar
|
|
|
|
* per-room profile user avatar
|
|
|
|
* room avatar
|
|
|
|
* a user's avatar in the user directory
|
|
|
|
include_quarantined_media: If False, exclude media IDs from the results that have
|
|
|
|
been marked as quarantined.
|
|
|
|
include_protected_media: If False, exclude media IDs from the results that have
|
|
|
|
been marked as protected from quarantine.
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
A list of local media IDs.
|
|
|
|
"""
|
2020-10-26 11:02:28 -06:00
|
|
|
|
|
|
|
# to find files that have never been accessed (last_access_ts IS NULL)
|
|
|
|
# compare with `created_ts`
|
|
|
|
sql = """
|
|
|
|
SELECT media_id
|
|
|
|
FROM local_media_repository AS lmr
|
|
|
|
WHERE
|
|
|
|
( last_access_ts < ?
|
|
|
|
OR ( created_ts < ? AND last_access_ts IS NULL ) )
|
|
|
|
AND media_length > ?
|
|
|
|
"""
|
|
|
|
|
|
|
|
if keep_profiles:
|
|
|
|
sql_keep = """
|
|
|
|
AND (
|
|
|
|
NOT EXISTS
|
|
|
|
(SELECT 1
|
|
|
|
FROM profiles
|
|
|
|
WHERE profiles.avatar_url = '{media_prefix}' || lmr.media_id)
|
|
|
|
AND NOT EXISTS
|
|
|
|
(SELECT 1
|
|
|
|
FROM room_memberships
|
|
|
|
WHERE room_memberships.avatar_url = '{media_prefix}' || lmr.media_id)
|
|
|
|
AND NOT EXISTS
|
|
|
|
(SELECT 1
|
|
|
|
FROM user_directory
|
|
|
|
WHERE user_directory.avatar_url = '{media_prefix}' || lmr.media_id)
|
|
|
|
AND NOT EXISTS
|
|
|
|
(SELECT 1
|
|
|
|
FROM room_stats_state
|
|
|
|
WHERE room_stats_state.avatar = '{media_prefix}' || lmr.media_id)
|
|
|
|
)
|
|
|
|
""".format(
|
|
|
|
media_prefix="mxc://%s/" % (self.server_name,),
|
|
|
|
)
|
|
|
|
sql += sql_keep
|
|
|
|
|
2022-06-07 04:53:47 -06:00
|
|
|
if include_quarantined_media is False:
|
|
|
|
# Do not include media that has been quarantined
|
|
|
|
sql += """
|
|
|
|
AND quarantined_by IS NULL
|
|
|
|
"""
|
|
|
|
|
|
|
|
if include_protected_media is False:
|
|
|
|
# Do not include media that has been protected from quarantine
|
|
|
|
sql += """
|
2022-06-07 05:28:08 -06:00
|
|
|
AND NOT safe_from_quarantine
|
2022-06-07 04:53:47 -06:00
|
|
|
"""
|
|
|
|
|
|
|
|
def _get_local_media_ids_txn(txn: LoggingTransaction) -> List[str]:
|
2020-10-26 11:02:28 -06:00
|
|
|
txn.execute(sql, (before_ts, before_ts, size_gt))
|
|
|
|
return [row[0] for row in txn]
|
|
|
|
|
|
|
|
return await self.db_pool.runInteraction(
|
2022-06-07 04:53:47 -06:00
|
|
|
"get_local_media_ids", _get_local_media_ids_txn
|
2020-10-26 11:02:28 -06:00
|
|
|
)
|
|
|
|
|
2023-11-15 07:19:24 -07:00
|
|
|
@trace
|
|
|
|
async def store_local_media_id(
|
|
|
|
self,
|
|
|
|
media_id: str,
|
|
|
|
time_now_ms: int,
|
|
|
|
user_id: UserID,
|
|
|
|
) -> None:
|
|
|
|
await self.db_pool.simple_insert(
|
|
|
|
"local_media_repository",
|
|
|
|
{
|
|
|
|
"media_id": media_id,
|
|
|
|
"created_ts": time_now_ms,
|
|
|
|
"user_id": user_id.to_string(),
|
|
|
|
},
|
|
|
|
desc="store_local_media_id",
|
|
|
|
)
|
|
|
|
|
2023-07-05 09:22:21 -06:00
|
|
|
@trace
|
2020-08-27 11:38:41 -06:00
|
|
|
async def store_local_media(
|
2019-04-03 03:07:29 -06:00
|
|
|
self,
|
2021-11-12 09:05:26 -07:00
|
|
|
media_id: str,
|
|
|
|
media_type: str,
|
|
|
|
time_now_ms: int,
|
|
|
|
upload_name: Optional[str],
|
|
|
|
media_length: int,
|
|
|
|
user_id: UserID,
|
|
|
|
url_cache: Optional[str] = None,
|
2020-08-27 11:38:41 -06:00
|
|
|
) -> None:
|
|
|
|
await self.db_pool.simple_insert(
|
2014-12-02 08:09:51 -07:00
|
|
|
"local_media_repository",
|
|
|
|
{
|
|
|
|
"media_id": media_id,
|
|
|
|
"media_type": media_type,
|
|
|
|
"created_ts": time_now_ms,
|
|
|
|
"upload_name": upload_name,
|
|
|
|
"media_length": media_length,
|
2014-12-02 12:51:47 -07:00
|
|
|
"user_id": user_id.to_string(),
|
2017-06-23 04:14:11 -06:00
|
|
|
"url_cache": url_cache,
|
2015-03-20 09:59:18 -06:00
|
|
|
},
|
|
|
|
desc="store_local_media",
|
2014-12-02 08:09:51 -07:00
|
|
|
)
|
|
|
|
|
2023-11-15 07:19:24 -07:00
|
|
|
async def update_local_media(
|
|
|
|
self,
|
|
|
|
media_id: str,
|
|
|
|
media_type: str,
|
|
|
|
upload_name: Optional[str],
|
|
|
|
media_length: int,
|
|
|
|
user_id: UserID,
|
|
|
|
url_cache: Optional[str] = None,
|
|
|
|
) -> None:
|
|
|
|
await self.db_pool.simple_update_one(
|
|
|
|
"local_media_repository",
|
|
|
|
keyvalues={
|
|
|
|
"user_id": user_id.to_string(),
|
|
|
|
"media_id": media_id,
|
|
|
|
},
|
|
|
|
updatevalues={
|
|
|
|
"media_type": media_type,
|
|
|
|
"upload_name": upload_name,
|
|
|
|
"media_length": media_length,
|
|
|
|
"url_cache": url_cache,
|
|
|
|
},
|
|
|
|
desc="update_local_media",
|
|
|
|
)
|
|
|
|
|
2021-05-26 04:19:47 -06:00
|
|
|
async def mark_local_media_as_safe(self, media_id: str, safe: bool = True) -> None:
|
|
|
|
"""Mark a local media as safe or unsafe from quarantining."""
|
2020-08-27 05:08:38 -06:00
|
|
|
await self.db_pool.simple_update_one(
|
2020-06-22 06:04:14 -06:00
|
|
|
table="local_media_repository",
|
|
|
|
keyvalues={"media_id": media_id},
|
2021-05-26 04:19:47 -06:00
|
|
|
updatevalues={"safe_from_quarantine": safe},
|
2020-06-22 06:04:14 -06:00
|
|
|
desc="mark_local_media_as_safe",
|
|
|
|
)
|
|
|
|
|
2023-11-15 07:19:24 -07:00
|
|
|
async def count_pending_media(self, user_id: UserID) -> Tuple[int, int]:
|
|
|
|
"""Count the number of pending media for a user.
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
A tuple of two integers: the total pending media requests and the earliest
|
|
|
|
expiration timestamp.
|
|
|
|
"""
|
|
|
|
|
|
|
|
def get_pending_media_txn(txn: LoggingTransaction) -> Tuple[int, int]:
|
|
|
|
sql = """
|
|
|
|
SELECT COUNT(*), MIN(created_ts)
|
|
|
|
FROM local_media_repository
|
|
|
|
WHERE user_id = ?
|
|
|
|
AND created_ts > ?
|
|
|
|
AND media_length IS NULL
|
|
|
|
"""
|
|
|
|
assert self.unused_expiration_time is not None
|
|
|
|
txn.execute(
|
|
|
|
sql,
|
|
|
|
(
|
|
|
|
user_id.to_string(),
|
|
|
|
self._clock.time_msec() - self.unused_expiration_time,
|
|
|
|
),
|
|
|
|
)
|
|
|
|
row = txn.fetchone()
|
|
|
|
if not row:
|
|
|
|
return 0, 0
|
|
|
|
return row[0], (row[1] + self.unused_expiration_time if row[1] else 0)
|
|
|
|
|
|
|
|
return await self.db_pool.runInteraction(
|
|
|
|
"get_pending_media", get_pending_media_txn
|
|
|
|
)
|
|
|
|
|
2023-11-09 09:00:30 -07:00
|
|
|
async def get_url_cache(self, url: str, ts: int) -> Optional[UrlCache]:
|
2016-04-02 17:31:57 -06:00
|
|
|
"""Get the media_id and ts for a cached URL as of the given timestamp
|
|
|
|
Returns:
|
|
|
|
None if the URL isn't cached.
|
|
|
|
"""
|
2019-04-03 03:07:29 -06:00
|
|
|
|
2023-11-09 09:00:30 -07:00
|
|
|
def get_url_cache_txn(txn: LoggingTransaction) -> Optional[UrlCache]:
|
2016-04-02 17:31:57 -06:00
|
|
|
# get the most recently cached result (relative to the given ts)
|
2023-11-09 09:00:30 -07:00
|
|
|
sql = """
|
|
|
|
SELECT response_code, expires_ts, og
|
|
|
|
FROM local_media_repository_url_cache
|
|
|
|
WHERE url = ? AND download_ts <= ?
|
|
|
|
ORDER BY download_ts DESC LIMIT 1
|
|
|
|
"""
|
2016-04-02 17:31:57 -06:00
|
|
|
txn.execute(sql, (url, ts))
|
|
|
|
row = txn.fetchone()
|
|
|
|
|
2016-04-08 12:04:29 -06:00
|
|
|
if not row:
|
2016-04-02 17:31:57 -06:00
|
|
|
# ...or if we've requested a timestamp older than the oldest
|
|
|
|
# copy in the cache, return the oldest copy (if any)
|
2023-11-09 09:00:30 -07:00
|
|
|
sql = """
|
|
|
|
SELECT response_code, expires_ts, og
|
|
|
|
FROM local_media_repository_url_cache
|
|
|
|
WHERE url = ? AND download_ts > ?
|
|
|
|
ORDER BY download_ts ASC LIMIT 1
|
|
|
|
"""
|
2016-04-02 17:31:57 -06:00
|
|
|
txn.execute(sql, (url, ts))
|
|
|
|
row = txn.fetchone()
|
|
|
|
|
2016-04-08 12:04:29 -06:00
|
|
|
if not row:
|
2016-04-02 17:31:57 -06:00
|
|
|
return None
|
|
|
|
|
2023-11-09 09:00:30 -07:00
|
|
|
return UrlCache(response_code=row[0], expires_ts=row[1], og=row[2])
|
2016-04-02 17:31:57 -06:00
|
|
|
|
2020-09-01 09:04:17 -06:00
|
|
|
return await self.db_pool.runInteraction("get_url_cache", get_url_cache_txn)
|
2016-04-02 17:31:57 -06:00
|
|
|
|
2020-08-27 11:38:41 -06:00
|
|
|
async def store_url_cache(
|
2022-04-27 06:05:00 -06:00
|
|
|
self,
|
|
|
|
url: str,
|
|
|
|
response_code: int,
|
|
|
|
etag: Optional[str],
|
|
|
|
expires_ts: int,
|
2023-11-09 09:00:30 -07:00
|
|
|
og: str,
|
2022-04-27 06:05:00 -06:00
|
|
|
media_id: str,
|
|
|
|
download_ts: int,
|
2021-11-12 09:05:26 -07:00
|
|
|
) -> None:
|
2020-08-27 11:38:41 -06:00
|
|
|
await self.db_pool.simple_insert(
|
2016-04-02 17:31:57 -06:00
|
|
|
"local_media_repository_url_cache",
|
|
|
|
{
|
|
|
|
"url": url,
|
|
|
|
"response_code": response_code,
|
|
|
|
"etag": etag,
|
2017-09-28 05:37:53 -06:00
|
|
|
"expires_ts": expires_ts,
|
2016-04-02 17:31:57 -06:00
|
|
|
"og": og,
|
|
|
|
"media_id": media_id,
|
|
|
|
"download_ts": download_ts,
|
|
|
|
},
|
|
|
|
desc="store_url_cache",
|
|
|
|
)
|
|
|
|
|
2023-10-06 08:12:43 -06:00
|
|
|
async def get_local_media_thumbnails(self, media_id: str) -> List[ThumbnailInfo]:
|
2023-10-26 11:01:36 -06:00
|
|
|
rows = cast(
|
|
|
|
List[Tuple[int, int, str, str, int]],
|
|
|
|
await self.db_pool.simple_select_list(
|
|
|
|
"local_media_repository_thumbnails",
|
|
|
|
{"media_id": media_id},
|
|
|
|
(
|
|
|
|
"thumbnail_width",
|
|
|
|
"thumbnail_height",
|
|
|
|
"thumbnail_method",
|
|
|
|
"thumbnail_type",
|
|
|
|
"thumbnail_length",
|
|
|
|
),
|
|
|
|
desc="get_local_media_thumbnails",
|
2015-03-20 09:59:18 -06:00
|
|
|
),
|
2014-12-02 08:09:51 -07:00
|
|
|
)
|
2023-10-06 08:12:43 -06:00
|
|
|
return [
|
|
|
|
ThumbnailInfo(
|
2023-10-26 11:01:36 -06:00
|
|
|
width=row[0], height=row[1], method=row[2], type=row[3], length=row[4]
|
2023-10-06 08:12:43 -06:00
|
|
|
)
|
|
|
|
for row in rows
|
|
|
|
]
|
2014-12-02 08:09:51 -07:00
|
|
|
|
2023-07-05 09:22:21 -06:00
|
|
|
@trace
|
2020-08-27 11:38:41 -06:00
|
|
|
async def store_local_thumbnail(
|
2019-04-03 03:07:29 -06:00
|
|
|
self,
|
2021-11-12 09:05:26 -07:00
|
|
|
media_id: str,
|
|
|
|
thumbnail_width: int,
|
|
|
|
thumbnail_height: int,
|
|
|
|
thumbnail_type: str,
|
|
|
|
thumbnail_method: str,
|
|
|
|
thumbnail_length: int,
|
|
|
|
) -> None:
|
2021-02-18 09:22:21 -07:00
|
|
|
await self.db_pool.simple_upsert(
|
|
|
|
table="local_media_repository_thumbnails",
|
|
|
|
keyvalues={
|
2014-12-02 08:09:51 -07:00
|
|
|
"media_id": media_id,
|
|
|
|
"thumbnail_width": thumbnail_width,
|
|
|
|
"thumbnail_height": thumbnail_height,
|
2014-12-05 09:30:18 -07:00
|
|
|
"thumbnail_method": thumbnail_method,
|
2014-12-02 08:09:51 -07:00
|
|
|
"thumbnail_type": thumbnail_type,
|
2015-03-20 09:59:18 -06:00
|
|
|
},
|
2021-02-18 09:22:21 -07:00
|
|
|
values={"thumbnail_length": thumbnail_length},
|
2015-03-20 09:59:18 -06:00
|
|
|
desc="store_local_thumbnail",
|
2014-12-02 08:09:51 -07:00
|
|
|
)
|
|
|
|
|
2020-08-26 05:19:32 -06:00
|
|
|
async def get_cached_remote_media(
|
2022-04-27 06:05:00 -06:00
|
|
|
self, origin: str, media_id: str
|
2023-11-09 09:00:30 -07:00
|
|
|
) -> Optional[RemoteMedia]:
|
|
|
|
row = await self.db_pool.simple_select_one(
|
2014-12-02 08:09:51 -07:00
|
|
|
"remote_media_cache",
|
|
|
|
{"media_origin": origin, "media_id": media_id},
|
2014-12-04 07:22:31 -07:00
|
|
|
(
|
2019-04-03 03:07:29 -06:00
|
|
|
"media_type",
|
|
|
|
"media_length",
|
|
|
|
"upload_name",
|
|
|
|
"created_ts",
|
|
|
|
"filesystem_id",
|
2023-11-09 09:00:30 -07:00
|
|
|
"last_access_ts",
|
2019-04-03 03:07:29 -06:00
|
|
|
"quarantined_by",
|
2014-12-04 07:22:31 -07:00
|
|
|
),
|
|
|
|
allow_none=True,
|
2015-03-20 09:59:18 -06:00
|
|
|
desc="get_cached_remote_media",
|
2014-12-02 08:09:51 -07:00
|
|
|
)
|
2023-11-09 09:00:30 -07:00
|
|
|
if row is None:
|
|
|
|
return row
|
2023-11-09 09:13:31 -07:00
|
|
|
return RemoteMedia(
|
|
|
|
media_origin=origin,
|
|
|
|
media_id=media_id,
|
|
|
|
media_type=row[0],
|
|
|
|
media_length=row[1],
|
|
|
|
upload_name=row[2],
|
|
|
|
created_ts=row[3],
|
|
|
|
filesystem_id=row[4],
|
|
|
|
last_access_ts=row[5],
|
|
|
|
quarantined_by=row[6],
|
|
|
|
)
|
2014-12-02 08:09:51 -07:00
|
|
|
|
2020-08-27 11:38:41 -06:00
|
|
|
async def store_cached_remote_media(
|
2019-04-03 03:07:29 -06:00
|
|
|
self,
|
2021-11-12 09:05:26 -07:00
|
|
|
origin: str,
|
|
|
|
media_id: str,
|
|
|
|
media_type: str,
|
|
|
|
media_length: int,
|
|
|
|
time_now_ms: int,
|
|
|
|
upload_name: Optional[str],
|
|
|
|
filesystem_id: str,
|
|
|
|
) -> None:
|
2020-08-27 11:38:41 -06:00
|
|
|
await self.db_pool.simple_insert(
|
2014-12-02 08:09:51 -07:00
|
|
|
"remote_media_cache",
|
|
|
|
{
|
|
|
|
"media_origin": origin,
|
|
|
|
"media_id": media_id,
|
|
|
|
"media_type": media_type,
|
|
|
|
"media_length": media_length,
|
|
|
|
"created_ts": time_now_ms,
|
|
|
|
"upload_name": upload_name,
|
|
|
|
"filesystem_id": filesystem_id,
|
2016-06-29 04:41:20 -06:00
|
|
|
"last_access_ts": time_now_ms,
|
2015-03-20 09:59:18 -06:00
|
|
|
},
|
|
|
|
desc="store_cached_remote_media",
|
2014-12-02 08:09:51 -07:00
|
|
|
)
|
|
|
|
|
2020-09-01 09:04:17 -06:00
|
|
|
async def update_cached_last_access_time(
|
|
|
|
self,
|
|
|
|
local_media: Iterable[str],
|
|
|
|
remote_media: Iterable[Tuple[str, str]],
|
|
|
|
time_ms: int,
|
2021-11-12 09:05:26 -07:00
|
|
|
) -> None:
|
2018-01-12 09:42:43 -07:00
|
|
|
"""Updates the last access time of the given media
|
|
|
|
|
|
|
|
Args:
|
2020-09-01 09:04:17 -06:00
|
|
|
local_media: Set of media_ids
|
|
|
|
remote_media: Set of (server_name, media_id)
|
2018-01-12 09:42:43 -07:00
|
|
|
time_ms: Current time in milliseconds
|
|
|
|
"""
|
2019-04-03 03:07:29 -06:00
|
|
|
|
2021-11-12 09:05:26 -07:00
|
|
|
def update_cache_txn(txn: LoggingTransaction) -> None:
|
2016-06-29 04:41:20 -06:00
|
|
|
sql = (
|
|
|
|
"UPDATE remote_media_cache SET last_access_ts = ?"
|
|
|
|
" WHERE media_origin = ? AND media_id = ?"
|
|
|
|
)
|
|
|
|
|
2021-01-21 07:44:12 -07:00
|
|
|
txn.execute_batch(
|
2019-04-03 03:07:29 -06:00
|
|
|
sql,
|
|
|
|
(
|
|
|
|
(time_ms, media_origin, media_id)
|
|
|
|
for media_origin, media_id in remote_media
|
|
|
|
),
|
|
|
|
)
|
2018-01-12 09:42:43 -07:00
|
|
|
|
|
|
|
sql = (
|
|
|
|
"UPDATE local_media_repository SET last_access_ts = ?"
|
|
|
|
" WHERE media_id = ?"
|
|
|
|
)
|
|
|
|
|
2021-01-21 07:44:12 -07:00
|
|
|
txn.execute_batch(sql, ((time_ms, media_id) for media_id in local_media))
|
2016-06-29 04:41:20 -06:00
|
|
|
|
2021-11-12 09:05:26 -07:00
|
|
|
await self.db_pool.runInteraction(
|
2019-12-04 06:52:46 -07:00
|
|
|
"update_cached_last_access_time", update_cache_txn
|
|
|
|
)
|
2016-06-29 04:41:20 -06:00
|
|
|
|
2020-08-27 05:08:38 -06:00
|
|
|
async def get_remote_media_thumbnails(
|
|
|
|
self, origin: str, media_id: str
|
2023-10-06 08:12:43 -06:00
|
|
|
) -> List[ThumbnailInfo]:
|
2023-10-26 11:01:36 -06:00
|
|
|
rows = cast(
|
|
|
|
List[Tuple[int, int, str, str, int]],
|
|
|
|
await self.db_pool.simple_select_list(
|
|
|
|
"remote_media_cache_thumbnails",
|
|
|
|
{"media_origin": origin, "media_id": media_id},
|
|
|
|
(
|
|
|
|
"thumbnail_width",
|
|
|
|
"thumbnail_height",
|
|
|
|
"thumbnail_method",
|
|
|
|
"thumbnail_type",
|
|
|
|
"thumbnail_length",
|
|
|
|
),
|
|
|
|
desc="get_remote_media_thumbnails",
|
2015-03-20 09:59:18 -06:00
|
|
|
),
|
2014-12-02 08:09:51 -07:00
|
|
|
)
|
2023-10-06 08:12:43 -06:00
|
|
|
return [
|
|
|
|
ThumbnailInfo(
|
2023-10-26 11:01:36 -06:00
|
|
|
width=row[0], height=row[1], method=row[2], type=row[3], length=row[4]
|
2023-10-06 08:12:43 -06:00
|
|
|
)
|
|
|
|
for row in rows
|
|
|
|
]
|
2014-12-02 08:09:51 -07:00
|
|
|
|
2023-07-05 09:22:21 -06:00
|
|
|
@trace
|
2020-10-30 04:55:24 -06:00
|
|
|
async def get_remote_media_thumbnail(
|
|
|
|
self,
|
|
|
|
origin: str,
|
|
|
|
media_id: str,
|
|
|
|
t_width: int,
|
|
|
|
t_height: int,
|
|
|
|
t_type: str,
|
2023-11-09 09:00:30 -07:00
|
|
|
) -> Optional[ThumbnailInfo]:
|
2020-10-30 04:55:24 -06:00
|
|
|
"""Fetch the thumbnail info of given width, height and type."""
|
|
|
|
|
2023-11-09 09:00:30 -07:00
|
|
|
row = await self.db_pool.simple_select_one(
|
2020-10-30 04:55:24 -06:00
|
|
|
table="remote_media_cache_thumbnails",
|
|
|
|
keyvalues={
|
|
|
|
"media_origin": origin,
|
|
|
|
"media_id": media_id,
|
|
|
|
"thumbnail_width": t_width,
|
|
|
|
"thumbnail_height": t_height,
|
|
|
|
"thumbnail_type": t_type,
|
|
|
|
},
|
|
|
|
retcols=(
|
|
|
|
"thumbnail_width",
|
|
|
|
"thumbnail_height",
|
|
|
|
"thumbnail_method",
|
|
|
|
"thumbnail_type",
|
|
|
|
"thumbnail_length",
|
|
|
|
),
|
|
|
|
allow_none=True,
|
|
|
|
desc="get_remote_media_thumbnail",
|
|
|
|
)
|
2023-11-09 09:00:30 -07:00
|
|
|
if row is None:
|
|
|
|
return None
|
|
|
|
return ThumbnailInfo(
|
2023-11-09 09:13:31 -07:00
|
|
|
width=row[0], height=row[1], method=row[2], type=row[3], length=row[4]
|
2023-11-09 09:00:30 -07:00
|
|
|
)
|
2020-10-30 04:55:24 -06:00
|
|
|
|
2023-07-05 09:22:21 -06:00
|
|
|
@trace
|
2020-08-27 11:38:41 -06:00
|
|
|
async def store_remote_media_thumbnail(
|
2019-04-03 03:07:29 -06:00
|
|
|
self,
|
2021-11-12 09:05:26 -07:00
|
|
|
origin: str,
|
|
|
|
media_id: str,
|
|
|
|
filesystem_id: str,
|
|
|
|
thumbnail_width: int,
|
|
|
|
thumbnail_height: int,
|
|
|
|
thumbnail_type: str,
|
|
|
|
thumbnail_method: str,
|
|
|
|
thumbnail_length: int,
|
|
|
|
) -> None:
|
2021-02-18 09:22:21 -07:00
|
|
|
await self.db_pool.simple_upsert(
|
|
|
|
table="remote_media_cache_thumbnails",
|
|
|
|
keyvalues={
|
2014-12-02 08:09:51 -07:00
|
|
|
"media_origin": origin,
|
|
|
|
"media_id": media_id,
|
|
|
|
"thumbnail_width": thumbnail_width,
|
|
|
|
"thumbnail_height": thumbnail_height,
|
2014-12-05 09:30:18 -07:00
|
|
|
"thumbnail_method": thumbnail_method,
|
2014-12-02 08:09:51 -07:00
|
|
|
"thumbnail_type": thumbnail_type,
|
2015-03-20 09:59:18 -06:00
|
|
|
},
|
2021-02-18 09:22:21 -07:00
|
|
|
values={"thumbnail_length": thumbnail_length},
|
|
|
|
insertion_values={"filesystem_id": filesystem_id},
|
2015-03-20 09:59:18 -06:00
|
|
|
desc="store_remote_media_thumbnail",
|
2014-12-02 08:09:51 -07:00
|
|
|
)
|
2016-06-29 07:57:59 -06:00
|
|
|
|
2022-06-07 04:53:47 -06:00
|
|
|
async def get_remote_media_ids(
|
|
|
|
self, before_ts: int, include_quarantined_media: bool
|
2023-10-26 13:12:28 -06:00
|
|
|
) -> List[Tuple[str, str, str]]:
|
2022-06-07 04:53:47 -06:00
|
|
|
"""
|
|
|
|
Retrieve a list of server name, media ID tuples from the remote media cache.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
before_ts: Only retrieve IDs from media that was either last accessed
|
|
|
|
(or if never accessed, created) before the given UNIX timestamp in ms.
|
|
|
|
include_quarantined_media: If False, exclude media IDs from the results that have
|
|
|
|
been marked as quarantined.
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
A list of tuples containing:
|
|
|
|
* The server name of homeserver where the media originates from,
|
|
|
|
* The ID of the media.
|
2023-10-26 13:12:28 -06:00
|
|
|
* The filesystem ID.
|
|
|
|
"""
|
|
|
|
|
|
|
|
sql = """
|
|
|
|
SELECT media_origin, media_id, filesystem_id
|
|
|
|
FROM remote_media_cache
|
|
|
|
WHERE last_access_ts < ?
|
2022-06-07 04:53:47 -06:00
|
|
|
"""
|
2016-06-29 07:57:59 -06:00
|
|
|
|
2022-06-07 04:53:47 -06:00
|
|
|
if include_quarantined_media is False:
|
|
|
|
# Only include media that has not been quarantined
|
|
|
|
sql += """
|
|
|
|
AND quarantined_by IS NULL
|
|
|
|
"""
|
|
|
|
|
2023-10-26 13:12:28 -06:00
|
|
|
return cast(
|
|
|
|
List[Tuple[str, str, str]],
|
|
|
|
await self.db_pool.execute("get_remote_media_ids", sql, before_ts),
|
2016-06-29 07:57:59 -06:00
|
|
|
)
|
|
|
|
|
2020-09-01 09:04:17 -06:00
|
|
|
async def delete_remote_media(self, media_origin: str, media_id: str) -> None:
|
2022-04-27 06:05:00 -06:00
|
|
|
def delete_remote_media_txn(txn: LoggingTransaction) -> None:
|
2020-08-05 14:38:57 -06:00
|
|
|
self.db_pool.simple_delete_txn(
|
2016-06-29 07:57:59 -06:00
|
|
|
txn,
|
|
|
|
"remote_media_cache",
|
2019-04-03 03:07:29 -06:00
|
|
|
keyvalues={"media_origin": media_origin, "media_id": media_id},
|
2016-06-29 07:57:59 -06:00
|
|
|
)
|
2020-08-05 14:38:57 -06:00
|
|
|
self.db_pool.simple_delete_txn(
|
2016-06-29 07:57:59 -06:00
|
|
|
txn,
|
|
|
|
"remote_media_cache_thumbnails",
|
2019-04-03 03:07:29 -06:00
|
|
|
keyvalues={"media_origin": media_origin, "media_id": media_id},
|
2016-06-29 07:57:59 -06:00
|
|
|
)
|
2019-04-03 03:07:29 -06:00
|
|
|
|
2020-09-01 09:04:17 -06:00
|
|
|
await self.db_pool.runInteraction(
|
2020-08-05 14:38:57 -06:00
|
|
|
"delete_remote_media", delete_remote_media_txn
|
|
|
|
)
|
2017-09-28 05:18:06 -06:00
|
|
|
|
2020-09-01 09:04:17 -06:00
|
|
|
async def get_expired_url_cache(self, now_ts: int) -> List[str]:
|
2017-09-28 05:18:06 -06:00
|
|
|
sql = (
|
|
|
|
"SELECT media_id FROM local_media_repository_url_cache"
|
2017-09-28 05:37:53 -06:00
|
|
|
" WHERE expires_ts < ?"
|
|
|
|
" ORDER BY expires_ts ASC"
|
2017-09-28 07:27:27 -06:00
|
|
|
" LIMIT 500"
|
2017-09-28 05:18:06 -06:00
|
|
|
)
|
|
|
|
|
2021-11-12 09:05:26 -07:00
|
|
|
def _get_expired_url_cache_txn(txn: LoggingTransaction) -> List[str]:
|
2017-09-28 05:18:06 -06:00
|
|
|
txn.execute(sql, (now_ts,))
|
|
|
|
return [row[0] for row in txn]
|
|
|
|
|
2020-09-01 09:04:17 -06:00
|
|
|
return await self.db_pool.runInteraction(
|
2019-12-04 06:52:46 -07:00
|
|
|
"get_expired_url_cache", _get_expired_url_cache_txn
|
|
|
|
)
|
2017-09-28 05:18:06 -06:00
|
|
|
|
2021-11-12 09:05:26 -07:00
|
|
|
async def delete_url_cache(self, media_ids: Collection[str]) -> None:
|
2017-11-04 01:47:25 -06:00
|
|
|
if len(media_ids) == 0:
|
|
|
|
return
|
|
|
|
|
2019-11-21 05:00:14 -07:00
|
|
|
sql = "DELETE FROM local_media_repository_url_cache WHERE media_id = ?"
|
2017-09-28 05:18:06 -06:00
|
|
|
|
2021-11-12 09:05:26 -07:00
|
|
|
def _delete_url_cache_txn(txn: LoggingTransaction) -> None:
|
2021-01-21 07:44:12 -07:00
|
|
|
txn.execute_batch(sql, [(media_id,) for media_id in media_ids])
|
2017-09-28 05:18:06 -06:00
|
|
|
|
2021-11-12 09:05:26 -07:00
|
|
|
await self.db_pool.runInteraction("delete_url_cache", _delete_url_cache_txn)
|
2017-09-28 05:18:06 -06:00
|
|
|
|
2020-09-01 09:04:17 -06:00
|
|
|
async def get_url_cache_media_before(self, before_ts: int) -> List[str]:
|
2017-09-28 05:18:06 -06:00
|
|
|
sql = (
|
|
|
|
"SELECT media_id FROM local_media_repository"
|
2017-09-28 06:48:14 -06:00
|
|
|
" WHERE created_ts < ? AND url_cache IS NOT NULL"
|
2017-09-28 05:18:06 -06:00
|
|
|
" ORDER BY created_ts ASC"
|
2017-09-28 07:27:27 -06:00
|
|
|
" LIMIT 500"
|
2017-09-28 05:18:06 -06:00
|
|
|
)
|
|
|
|
|
2021-11-12 09:05:26 -07:00
|
|
|
def _get_url_cache_media_before_txn(txn: LoggingTransaction) -> List[str]:
|
2017-09-28 05:18:06 -06:00
|
|
|
txn.execute(sql, (before_ts,))
|
|
|
|
return [row[0] for row in txn]
|
|
|
|
|
2020-09-01 09:04:17 -06:00
|
|
|
return await self.db_pool.runInteraction(
|
2019-04-03 03:07:29 -06:00
|
|
|
"get_url_cache_media_before", _get_url_cache_media_before_txn
|
2017-09-28 05:18:06 -06:00
|
|
|
)
|
|
|
|
|
2021-11-12 09:05:26 -07:00
|
|
|
async def delete_url_cache_media(self, media_ids: Collection[str]) -> None:
|
2017-11-04 01:47:25 -06:00
|
|
|
if len(media_ids) == 0:
|
|
|
|
return
|
|
|
|
|
2021-11-12 09:05:26 -07:00
|
|
|
def _delete_url_cache_media_txn(txn: LoggingTransaction) -> None:
|
2019-11-21 05:00:14 -07:00
|
|
|
sql = "DELETE FROM local_media_repository WHERE media_id = ?"
|
2017-09-28 05:18:06 -06:00
|
|
|
|
2021-01-21 07:44:12 -07:00
|
|
|
txn.execute_batch(sql, [(media_id,) for media_id in media_ids])
|
2017-09-28 05:18:06 -06:00
|
|
|
|
2019-11-21 05:00:14 -07:00
|
|
|
sql = "DELETE FROM local_media_repository_thumbnails WHERE media_id = ?"
|
2017-09-28 05:18:06 -06:00
|
|
|
|
2021-01-21 07:44:12 -07:00
|
|
|
txn.execute_batch(sql, [(media_id,) for media_id in media_ids])
|
2017-09-28 05:18:06 -06:00
|
|
|
|
2021-11-12 09:05:26 -07:00
|
|
|
await self.db_pool.runInteraction(
|
2019-04-03 03:07:29 -06:00
|
|
|
"delete_url_cache_media", _delete_url_cache_media_txn
|
2017-09-28 05:18:06 -06:00
|
|
|
)
|