2014-08-12 08:10:52 -06:00
|
|
|
# -*- coding: utf-8 -*-
|
2016-01-06 21:26:29 -07:00
|
|
|
# Copyright 2014-2016 OpenMarket Ltd
|
2014-08-12 08:10:52 -06:00
|
|
|
#
|
|
|
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
|
|
# you may not use this file except in compliance with the License.
|
|
|
|
# You may obtain a copy of the License at
|
|
|
|
#
|
|
|
|
# http://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
#
|
|
|
|
# Unless required by applicable law or agreed to in writing, software
|
|
|
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
|
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
|
|
# See the License for the specific language governing permissions and
|
|
|
|
# limitations under the License.
|
2014-08-12 20:14:34 -06:00
|
|
|
|
2018-07-09 00:09:20 -06:00
|
|
|
import collections
|
|
|
|
import logging
|
|
|
|
import re
|
|
|
|
|
|
|
|
from canonicaljson import json
|
|
|
|
|
2014-08-12 08:10:52 -06:00
|
|
|
from twisted.internet import defer
|
|
|
|
|
|
|
|
from synapse.api.errors import StoreError
|
2018-03-01 04:39:45 -07:00
|
|
|
from synapse.storage._base import SQLBaseStore
|
2018-02-03 15:57:33 -07:00
|
|
|
from synapse.storage.search import SearchStore
|
2017-05-10 04:05:43 -06:00
|
|
|
from synapse.util.caches.descriptors import cached, cachedInlineCallbacks
|
2014-08-12 08:10:52 -06:00
|
|
|
|
|
|
|
logger = logging.getLogger(__name__)
|
|
|
|
|
|
|
|
|
2015-03-20 09:05:44 -06:00
|
|
|
OpsLevel = collections.namedtuple(
|
|
|
|
"OpsLevel",
|
|
|
|
("ban_level", "kick_level", "redact_level",)
|
2014-11-20 10:26:36 -07:00
|
|
|
)
|
2014-09-01 09:15:34 -06:00
|
|
|
|
2017-05-10 04:05:43 -06:00
|
|
|
RatelimitOverride = collections.namedtuple(
|
|
|
|
"RatelimitOverride",
|
|
|
|
("messages_per_second", "burst_count",)
|
|
|
|
)
|
|
|
|
|
2014-09-01 09:15:34 -06:00
|
|
|
|
2018-03-01 04:39:45 -07:00
|
|
|
class RoomWorkerStore(SQLBaseStore):
|
2018-07-26 06:31:59 -06:00
|
|
|
def get_room(self, room_id):
|
|
|
|
"""Retrieve a room.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
room_id (str): The ID of the room to retrieve.
|
|
|
|
Returns:
|
|
|
|
A namedtuple containing the room information, or an empty list.
|
|
|
|
"""
|
|
|
|
return self._simple_select_one(
|
|
|
|
table="rooms",
|
|
|
|
keyvalues={"room_id": room_id},
|
|
|
|
retcols=("room_id", "is_public", "creator"),
|
|
|
|
desc="get_room",
|
|
|
|
allow_none=True,
|
|
|
|
)
|
|
|
|
|
2018-03-01 04:39:45 -07:00
|
|
|
def get_public_room_ids(self):
|
|
|
|
return self._simple_select_onecol(
|
|
|
|
table="rooms",
|
|
|
|
keyvalues={
|
|
|
|
"is_public": True,
|
|
|
|
},
|
|
|
|
retcol="room_id",
|
|
|
|
desc="get_public_room_ids",
|
|
|
|
)
|
|
|
|
|
|
|
|
@cached(num_args=2, max_entries=100)
|
|
|
|
def get_public_room_ids_at_stream_id(self, stream_id, network_tuple):
|
|
|
|
"""Get pulbic rooms for a particular list, or across all lists.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
stream_id (int)
|
|
|
|
network_tuple (ThirdPartyInstanceID): The list to use (None, None)
|
|
|
|
means the main list, None means all lsits.
|
|
|
|
"""
|
|
|
|
return self.runInteraction(
|
|
|
|
"get_public_room_ids_at_stream_id",
|
|
|
|
self.get_public_room_ids_at_stream_id_txn,
|
|
|
|
stream_id, network_tuple=network_tuple
|
|
|
|
)
|
|
|
|
|
|
|
|
def get_public_room_ids_at_stream_id_txn(self, txn, stream_id,
|
|
|
|
network_tuple):
|
|
|
|
return {
|
|
|
|
rm
|
|
|
|
for rm, vis in self.get_published_at_stream_id_txn(
|
|
|
|
txn, stream_id, network_tuple=network_tuple
|
|
|
|
).items()
|
|
|
|
if vis
|
|
|
|
}
|
|
|
|
|
|
|
|
def get_published_at_stream_id_txn(self, txn, stream_id, network_tuple):
|
|
|
|
if network_tuple:
|
|
|
|
# We want to get from a particular list. No aggregation required.
|
|
|
|
|
|
|
|
sql = ("""
|
|
|
|
SELECT room_id, visibility FROM public_room_list_stream
|
|
|
|
INNER JOIN (
|
|
|
|
SELECT room_id, max(stream_id) AS stream_id
|
|
|
|
FROM public_room_list_stream
|
|
|
|
WHERE stream_id <= ? %s
|
|
|
|
GROUP BY room_id
|
|
|
|
) grouped USING (room_id, stream_id)
|
|
|
|
""")
|
|
|
|
|
|
|
|
if network_tuple.appservice_id is not None:
|
|
|
|
txn.execute(
|
|
|
|
sql % ("AND appservice_id = ? AND network_id = ?",),
|
|
|
|
(stream_id, network_tuple.appservice_id, network_tuple.network_id,)
|
|
|
|
)
|
|
|
|
else:
|
|
|
|
txn.execute(
|
|
|
|
sql % ("AND appservice_id IS NULL",),
|
|
|
|
(stream_id,)
|
|
|
|
)
|
|
|
|
return dict(txn)
|
|
|
|
else:
|
|
|
|
# We want to get from all lists, so we need to aggregate the results
|
|
|
|
|
|
|
|
logger.info("Executing full list")
|
|
|
|
|
|
|
|
sql = ("""
|
|
|
|
SELECT room_id, visibility
|
|
|
|
FROM public_room_list_stream
|
|
|
|
INNER JOIN (
|
|
|
|
SELECT
|
|
|
|
room_id, max(stream_id) AS stream_id, appservice_id,
|
|
|
|
network_id
|
|
|
|
FROM public_room_list_stream
|
|
|
|
WHERE stream_id <= ?
|
|
|
|
GROUP BY room_id, appservice_id, network_id
|
|
|
|
) grouped USING (room_id, stream_id)
|
|
|
|
""")
|
|
|
|
|
|
|
|
txn.execute(
|
|
|
|
sql,
|
|
|
|
(stream_id,)
|
|
|
|
)
|
|
|
|
|
|
|
|
results = {}
|
|
|
|
# A room is visible if its visible on any list.
|
|
|
|
for room_id, visibility in txn:
|
|
|
|
results[room_id] = bool(visibility) or results.get(room_id, False)
|
|
|
|
|
|
|
|
return results
|
|
|
|
|
|
|
|
def get_public_room_changes(self, prev_stream_id, new_stream_id,
|
|
|
|
network_tuple):
|
|
|
|
def get_public_room_changes_txn(txn):
|
|
|
|
then_rooms = self.get_public_room_ids_at_stream_id_txn(
|
|
|
|
txn, prev_stream_id, network_tuple
|
|
|
|
)
|
|
|
|
|
|
|
|
now_rooms_dict = self.get_published_at_stream_id_txn(
|
|
|
|
txn, new_stream_id, network_tuple
|
|
|
|
)
|
|
|
|
|
|
|
|
now_rooms_visible = set(
|
|
|
|
rm for rm, vis in now_rooms_dict.items() if vis
|
|
|
|
)
|
|
|
|
now_rooms_not_visible = set(
|
|
|
|
rm for rm, vis in now_rooms_dict.items() if not vis
|
|
|
|
)
|
|
|
|
|
|
|
|
newly_visible = now_rooms_visible - then_rooms
|
|
|
|
newly_unpublished = now_rooms_not_visible & then_rooms
|
|
|
|
|
|
|
|
return newly_visible, newly_unpublished
|
|
|
|
|
|
|
|
return self.runInteraction(
|
|
|
|
"get_public_room_changes", get_public_room_changes_txn
|
|
|
|
)
|
|
|
|
|
2018-03-01 08:20:54 -07:00
|
|
|
@cached(max_entries=10000)
|
|
|
|
def is_room_blocked(self, room_id):
|
|
|
|
return self._simple_select_one_onecol(
|
|
|
|
table="blocked_rooms",
|
|
|
|
keyvalues={
|
|
|
|
"room_id": room_id,
|
|
|
|
},
|
|
|
|
retcol="1",
|
|
|
|
allow_none=True,
|
|
|
|
desc="is_room_blocked",
|
|
|
|
)
|
|
|
|
|
2018-03-01 04:39:45 -07:00
|
|
|
|
|
|
|
class RoomStore(RoomWorkerStore, SearchStore):
|
2014-08-12 08:10:52 -06:00
|
|
|
|
|
|
|
@defer.inlineCallbacks
|
|
|
|
def store_room(self, room_id, room_creator_user_id, is_public):
|
|
|
|
"""Stores a room.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
room_id (str): The desired room ID, can be None.
|
|
|
|
room_creator_user_id (str): The user ID of the room creator.
|
|
|
|
is_public (bool): True to indicate that this room should appear in
|
|
|
|
public room lists.
|
|
|
|
Raises:
|
|
|
|
StoreError if the room could not be stored.
|
|
|
|
"""
|
|
|
|
try:
|
2016-09-14 10:01:02 -06:00
|
|
|
def store_room_txn(txn, next_id):
|
|
|
|
self._simple_insert_txn(
|
|
|
|
txn,
|
|
|
|
"rooms",
|
|
|
|
{
|
|
|
|
"room_id": room_id,
|
|
|
|
"creator": room_creator_user_id,
|
|
|
|
"is_public": is_public,
|
|
|
|
},
|
|
|
|
)
|
|
|
|
if is_public:
|
|
|
|
self._simple_insert_txn(
|
|
|
|
txn,
|
|
|
|
table="public_room_list_stream",
|
|
|
|
values={
|
|
|
|
"stream_id": next_id,
|
|
|
|
"room_id": room_id,
|
|
|
|
"visibility": is_public,
|
|
|
|
}
|
|
|
|
)
|
|
|
|
with self._public_room_id_gen.get_next() as next_id:
|
|
|
|
yield self.runInteraction(
|
|
|
|
"store_room_txn",
|
|
|
|
store_room_txn, next_id,
|
|
|
|
)
|
2014-08-12 08:10:52 -06:00
|
|
|
except Exception as e:
|
|
|
|
logger.error("store_room with room_id=%s failed: %s", room_id, e)
|
|
|
|
raise StoreError(500, "Problem creating room.")
|
|
|
|
|
2016-09-14 10:01:02 -06:00
|
|
|
@defer.inlineCallbacks
|
2016-03-21 08:03:20 -06:00
|
|
|
def set_room_is_public(self, room_id, is_public):
|
2016-09-14 10:01:02 -06:00
|
|
|
def set_room_is_public_txn(txn, next_id):
|
|
|
|
self._simple_update_one_txn(
|
|
|
|
txn,
|
|
|
|
table="rooms",
|
|
|
|
keyvalues={"room_id": room_id},
|
|
|
|
updatevalues={"is_public": is_public},
|
|
|
|
)
|
|
|
|
|
|
|
|
entries = self._simple_select_list_txn(
|
|
|
|
txn,
|
|
|
|
table="public_room_list_stream",
|
2016-12-06 03:43:48 -07:00
|
|
|
keyvalues={
|
|
|
|
"room_id": room_id,
|
|
|
|
"appservice_id": None,
|
|
|
|
"network_id": None,
|
|
|
|
},
|
2016-09-14 10:01:02 -06:00
|
|
|
retcols=("stream_id", "visibility"),
|
|
|
|
)
|
|
|
|
|
|
|
|
entries.sort(key=lambda r: r["stream_id"])
|
|
|
|
|
|
|
|
add_to_stream = True
|
|
|
|
if entries:
|
|
|
|
add_to_stream = bool(entries[-1]["visibility"]) != is_public
|
|
|
|
|
|
|
|
if add_to_stream:
|
|
|
|
self._simple_insert_txn(
|
|
|
|
txn,
|
|
|
|
table="public_room_list_stream",
|
|
|
|
values={
|
|
|
|
"stream_id": next_id,
|
|
|
|
"room_id": room_id,
|
|
|
|
"visibility": is_public,
|
2016-12-06 03:43:48 -07:00
|
|
|
"appservice_id": None,
|
|
|
|
"network_id": None,
|
2016-09-14 10:01:02 -06:00
|
|
|
}
|
|
|
|
)
|
|
|
|
|
|
|
|
with self._public_room_id_gen.get_next() as next_id:
|
|
|
|
yield self.runInteraction(
|
|
|
|
"set_room_is_public",
|
|
|
|
set_room_is_public_txn, next_id,
|
|
|
|
)
|
2016-12-12 07:28:15 -07:00
|
|
|
self.hs.get_notifier().on_new_replication_data()
|
2016-03-21 08:03:20 -06:00
|
|
|
|
2016-12-06 03:43:48 -07:00
|
|
|
@defer.inlineCallbacks
|
|
|
|
def set_room_is_public_appservice(self, room_id, appservice_id, network_id,
|
|
|
|
is_public):
|
|
|
|
"""Edit the appservice/network specific public room list.
|
2016-12-12 04:00:27 -07:00
|
|
|
|
|
|
|
Each appservice can have a number of published room lists associated
|
|
|
|
with them, keyed off of an appservice defined `network_id`, which
|
|
|
|
basically represents a single instance of a bridge to a third party
|
|
|
|
network.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
room_id (str)
|
|
|
|
appservice_id (str)
|
|
|
|
network_id (str)
|
|
|
|
is_public (bool): Whether to publish or unpublish the room from the
|
|
|
|
list.
|
2016-12-06 03:43:48 -07:00
|
|
|
"""
|
|
|
|
def set_room_is_public_appservice_txn(txn, next_id):
|
|
|
|
if is_public:
|
|
|
|
try:
|
|
|
|
self._simple_insert_txn(
|
|
|
|
txn,
|
|
|
|
table="appservice_room_list",
|
|
|
|
values={
|
|
|
|
"appservice_id": appservice_id,
|
2016-12-12 07:28:15 -07:00
|
|
|
"network_id": network_id,
|
2016-12-06 03:43:48 -07:00
|
|
|
"room_id": room_id
|
|
|
|
},
|
|
|
|
)
|
|
|
|
except self.database_engine.module.IntegrityError:
|
|
|
|
# We've already inserted, nothing to do.
|
|
|
|
return
|
|
|
|
else:
|
|
|
|
self._simple_delete_txn(
|
|
|
|
txn,
|
|
|
|
table="appservice_room_list",
|
|
|
|
keyvalues={
|
|
|
|
"appservice_id": appservice_id,
|
|
|
|
"network_id": network_id,
|
|
|
|
"room_id": room_id
|
|
|
|
},
|
|
|
|
)
|
|
|
|
|
|
|
|
entries = self._simple_select_list_txn(
|
|
|
|
txn,
|
|
|
|
table="public_room_list_stream",
|
|
|
|
keyvalues={
|
|
|
|
"room_id": room_id,
|
|
|
|
"appservice_id": appservice_id,
|
|
|
|
"network_id": network_id,
|
|
|
|
},
|
|
|
|
retcols=("stream_id", "visibility"),
|
|
|
|
)
|
|
|
|
|
|
|
|
entries.sort(key=lambda r: r["stream_id"])
|
|
|
|
|
|
|
|
add_to_stream = True
|
|
|
|
if entries:
|
|
|
|
add_to_stream = bool(entries[-1]["visibility"]) != is_public
|
|
|
|
|
|
|
|
if add_to_stream:
|
|
|
|
self._simple_insert_txn(
|
|
|
|
txn,
|
|
|
|
table="public_room_list_stream",
|
|
|
|
values={
|
|
|
|
"stream_id": next_id,
|
|
|
|
"room_id": room_id,
|
|
|
|
"visibility": is_public,
|
|
|
|
"appservice_id": appservice_id,
|
|
|
|
"network_id": network_id,
|
|
|
|
}
|
|
|
|
)
|
|
|
|
|
|
|
|
with self._public_room_id_gen.get_next() as next_id:
|
|
|
|
yield self.runInteraction(
|
|
|
|
"set_room_is_public_appservice",
|
|
|
|
set_room_is_public_appservice_txn, next_id,
|
|
|
|
)
|
2016-12-12 07:28:15 -07:00
|
|
|
self.hs.get_notifier().on_new_replication_data()
|
2016-12-06 03:43:48 -07:00
|
|
|
|
2016-02-03 06:23:32 -07:00
|
|
|
def get_room_count(self):
|
|
|
|
"""Retrieve a list of all rooms
|
2014-08-12 08:10:52 -06:00
|
|
|
"""
|
2014-08-15 03:26:35 -06:00
|
|
|
|
2015-02-11 11:56:13 -07:00
|
|
|
def f(txn):
|
2016-02-03 06:23:32 -07:00
|
|
|
sql = "SELECT count(*) FROM rooms"
|
|
|
|
txn.execute(sql)
|
|
|
|
row = txn.fetchone()
|
|
|
|
return row[0] or 0
|
2014-08-12 08:10:52 -06:00
|
|
|
|
2016-02-03 06:23:32 -07:00
|
|
|
return self.runInteraction(
|
2015-02-11 11:56:13 -07:00
|
|
|
"get_rooms", f
|
|
|
|
)
|
2014-08-15 03:26:35 -06:00
|
|
|
|
2014-08-26 07:31:48 -06:00
|
|
|
def _store_room_topic_txn(self, txn, event):
|
2014-12-19 08:16:48 -07:00
|
|
|
if hasattr(event, "content") and "topic" in event.content:
|
2014-11-10 11:24:43 -07:00
|
|
|
self._simple_insert_txn(
|
|
|
|
txn,
|
|
|
|
"topics",
|
|
|
|
{
|
|
|
|
"event_id": event.event_id,
|
|
|
|
"room_id": event.room_id,
|
2014-12-19 08:16:48 -07:00
|
|
|
"topic": event.content["topic"],
|
2015-03-20 09:59:18 -06:00
|
|
|
},
|
2014-11-10 11:24:43 -07:00
|
|
|
)
|
2014-08-12 08:10:52 -06:00
|
|
|
|
2018-02-03 15:57:33 -07:00
|
|
|
self.store_event_search_txn(
|
|
|
|
txn, event, "content.topic", event.content["topic"],
|
2015-10-12 03:49:53 -06:00
|
|
|
)
|
|
|
|
|
2014-08-26 07:31:48 -06:00
|
|
|
def _store_room_name_txn(self, txn, event):
|
2014-12-19 08:16:48 -07:00
|
|
|
if hasattr(event, "content") and "name" in event.content:
|
2014-11-10 11:24:43 -07:00
|
|
|
self._simple_insert_txn(
|
|
|
|
txn,
|
|
|
|
"room_names",
|
|
|
|
{
|
|
|
|
"event_id": event.event_id,
|
|
|
|
"room_id": event.room_id,
|
2014-12-19 08:16:48 -07:00
|
|
|
"name": event.content["name"],
|
2014-11-10 11:24:43 -07:00
|
|
|
}
|
|
|
|
)
|
2014-08-12 08:10:52 -06:00
|
|
|
|
2018-02-03 15:57:33 -07:00
|
|
|
self.store_event_search_txn(
|
|
|
|
txn, event, "content.name", event.content["name"],
|
2015-10-12 03:49:53 -06:00
|
|
|
)
|
|
|
|
|
|
|
|
def _store_room_message_txn(self, txn, event):
|
|
|
|
if hasattr(event, "content") and "body" in event.content:
|
2018-02-03 15:57:33 -07:00
|
|
|
self.store_event_search_txn(
|
|
|
|
txn, event, "content.body", event.content["body"],
|
2015-10-12 03:49:53 -06:00
|
|
|
)
|
|
|
|
|
2015-11-05 07:32:26 -07:00
|
|
|
def _store_history_visibility_txn(self, txn, event):
|
2015-11-10 10:10:27 -07:00
|
|
|
self._store_content_index_txn(txn, event, "history_visibility")
|
|
|
|
|
|
|
|
def _store_guest_access_txn(self, txn, event):
|
|
|
|
self._store_content_index_txn(txn, event, "guest_access")
|
|
|
|
|
|
|
|
def _store_content_index_txn(self, txn, event, key):
|
|
|
|
if hasattr(event, "content") and key in event.content:
|
2015-11-05 07:32:26 -07:00
|
|
|
sql = (
|
2015-11-10 10:10:27 -07:00
|
|
|
"INSERT INTO %(key)s"
|
|
|
|
" (event_id, room_id, %(key)s)"
|
|
|
|
" VALUES (?, ?, ?)" % {"key": key}
|
2015-11-05 07:32:26 -07:00
|
|
|
)
|
|
|
|
txn.execute(sql, (
|
|
|
|
event.event_id,
|
|
|
|
event.room_id,
|
2015-11-10 10:10:27 -07:00
|
|
|
event.content[key]
|
2015-11-05 07:32:26 -07:00
|
|
|
))
|
|
|
|
|
2016-05-04 08:19:12 -06:00
|
|
|
def add_event_report(self, room_id, event_id, user_id, reason, content,
|
|
|
|
received_ts):
|
|
|
|
next_id = self._event_reports_id_gen.get_next()
|
2016-05-04 04:28:10 -06:00
|
|
|
return self._simple_insert(
|
|
|
|
table="event_reports",
|
|
|
|
values={
|
2016-05-04 08:19:12 -06:00
|
|
|
"id": next_id,
|
|
|
|
"received_ts": received_ts,
|
2016-05-04 04:28:10 -06:00
|
|
|
"room_id": room_id,
|
|
|
|
"event_id": event_id,
|
|
|
|
"user_id": user_id,
|
|
|
|
"reason": reason,
|
|
|
|
"content": json.dumps(content),
|
|
|
|
},
|
|
|
|
desc="add_event_report"
|
|
|
|
)
|
2016-09-15 04:27:04 -06:00
|
|
|
|
|
|
|
def get_current_public_room_stream_id(self):
|
|
|
|
return self._public_room_id_gen.get_current_token()
|
|
|
|
|
2016-09-15 04:47:23 -06:00
|
|
|
def get_all_new_public_rooms(self, prev_id, current_id, limit):
|
|
|
|
def get_all_new_public_rooms(txn):
|
|
|
|
sql = ("""
|
2016-12-06 03:43:48 -07:00
|
|
|
SELECT stream_id, room_id, visibility, appservice_id, network_id
|
|
|
|
FROM public_room_list_stream
|
2016-09-15 04:47:23 -06:00
|
|
|
WHERE stream_id > ? AND stream_id <= ?
|
|
|
|
ORDER BY stream_id ASC
|
|
|
|
LIMIT ?
|
|
|
|
""")
|
|
|
|
|
|
|
|
txn.execute(sql, (prev_id, current_id, limit,))
|
|
|
|
return txn.fetchall()
|
|
|
|
|
2016-09-23 08:31:47 -06:00
|
|
|
if prev_id == current_id:
|
|
|
|
return defer.succeed([])
|
|
|
|
|
2016-09-15 04:47:23 -06:00
|
|
|
return self.runInteraction(
|
|
|
|
"get_all_new_public_rooms", get_all_new_public_rooms
|
|
|
|
)
|
2017-05-10 04:05:43 -06:00
|
|
|
|
|
|
|
@cachedInlineCallbacks(max_entries=10000)
|
|
|
|
def get_ratelimit_for_user(self, user_id):
|
|
|
|
"""Check if there are any overrides for ratelimiting for the given
|
|
|
|
user
|
|
|
|
|
|
|
|
Args:
|
|
|
|
user_id (str)
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
RatelimitOverride if there is an override, else None. If the contents
|
|
|
|
of RatelimitOverride are None or 0 then ratelimitng has been
|
|
|
|
disabled for that user entirely.
|
|
|
|
"""
|
|
|
|
row = yield self._simple_select_one(
|
|
|
|
table="ratelimit_override",
|
|
|
|
keyvalues={"user_id": user_id},
|
|
|
|
retcols=("messages_per_second", "burst_count"),
|
|
|
|
allow_none=True,
|
|
|
|
desc="get_ratelimit_for_user",
|
|
|
|
)
|
|
|
|
|
|
|
|
if row:
|
|
|
|
defer.returnValue(RatelimitOverride(
|
|
|
|
messages_per_second=row["messages_per_second"],
|
|
|
|
burst_count=row["burst_count"],
|
|
|
|
))
|
|
|
|
else:
|
|
|
|
defer.returnValue(None)
|
2017-06-19 05:36:28 -06:00
|
|
|
|
|
|
|
@defer.inlineCallbacks
|
|
|
|
def block_room(self, room_id, user_id):
|
|
|
|
yield self._simple_insert(
|
|
|
|
table="blocked_rooms",
|
|
|
|
values={
|
|
|
|
"room_id": room_id,
|
|
|
|
"user_id": user_id,
|
|
|
|
},
|
|
|
|
desc="block_room",
|
|
|
|
)
|
2018-03-01 08:20:54 -07:00
|
|
|
yield self.runInteraction(
|
|
|
|
"block_room_invalidation",
|
|
|
|
self._invalidate_cache_and_stream,
|
|
|
|
self.is_room_blocked, (room_id,),
|
|
|
|
)
|
2017-06-19 10:39:21 -06:00
|
|
|
|
2018-01-20 22:25:23 -07:00
|
|
|
def get_media_mxcs_in_room(self, room_id):
|
2018-01-31 08:07:41 -07:00
|
|
|
"""Retrieves all the local and remote media MXC URIs in a given room
|
|
|
|
|
|
|
|
Args:
|
|
|
|
room_id (str)
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
The local and remote media as a lists of tuples where the key is
|
|
|
|
the hostname and the value is the media ID.
|
|
|
|
"""
|
|
|
|
def _get_media_mxcs_in_room_txn(txn):
|
2018-01-31 15:30:38 -07:00
|
|
|
local_mxcs, remote_mxcs = self._get_media_mxcs_in_room_txn(txn, room_id)
|
2018-01-20 22:25:23 -07:00
|
|
|
local_media_mxcs = []
|
|
|
|
remote_media_mxcs = []
|
|
|
|
|
|
|
|
# Convert the IDs to MXC URIs
|
2018-01-31 15:30:38 -07:00
|
|
|
for media_id in local_mxcs:
|
2018-04-26 04:31:22 -06:00
|
|
|
local_media_mxcs.append("mxc://%s/%s" % (self.hs.hostname, media_id))
|
2018-01-31 15:30:38 -07:00
|
|
|
for hostname, media_id in remote_mxcs:
|
2018-01-20 22:25:23 -07:00
|
|
|
remote_media_mxcs.append("mxc://%s/%s" % (hostname, media_id))
|
|
|
|
|
|
|
|
return local_media_mxcs, remote_media_mxcs
|
2018-01-31 08:07:41 -07:00
|
|
|
return self.runInteraction("get_media_ids_in_room", _get_media_mxcs_in_room_txn)
|
2018-01-20 22:25:23 -07:00
|
|
|
|
2017-06-19 10:39:21 -06:00
|
|
|
def quarantine_media_ids_in_room(self, room_id, quarantined_by):
|
|
|
|
"""For a room loops through all events with media and quarantines
|
|
|
|
the associated media
|
|
|
|
"""
|
2018-01-31 08:07:41 -07:00
|
|
|
def _quarantine_media_in_room_txn(txn):
|
2018-01-31 15:30:38 -07:00
|
|
|
local_mxcs, remote_mxcs = self._get_media_mxcs_in_room_txn(txn, room_id)
|
2018-01-20 22:25:23 -07:00
|
|
|
total_media_quarantined = 0
|
2017-06-19 10:39:21 -06:00
|
|
|
|
2018-01-20 22:25:23 -07:00
|
|
|
# Now update all the tables to set the quarantined_by flag
|
2017-06-19 10:39:21 -06:00
|
|
|
|
2018-01-20 22:25:23 -07:00
|
|
|
txn.executemany("""
|
|
|
|
UPDATE local_media_repository
|
|
|
|
SET quarantined_by = ?
|
|
|
|
WHERE media_id = ?
|
2018-01-31 15:30:38 -07:00
|
|
|
""", ((quarantined_by, media_id) for media_id in local_mxcs))
|
2017-06-19 10:39:21 -06:00
|
|
|
|
2018-01-20 22:25:23 -07:00
|
|
|
txn.executemany(
|
2017-06-19 10:39:21 -06:00
|
|
|
"""
|
2018-01-20 22:25:23 -07:00
|
|
|
UPDATE remote_media_cache
|
2017-06-19 10:39:21 -06:00
|
|
|
SET quarantined_by = ?
|
2018-02-01 18:05:47 -07:00
|
|
|
WHERE media_origin = ? AND media_id = ?
|
2018-01-20 22:25:23 -07:00
|
|
|
""",
|
|
|
|
(
|
|
|
|
(quarantined_by, origin, media_id)
|
2018-01-31 15:30:38 -07:00
|
|
|
for origin, media_id in remote_mxcs
|
2017-06-19 10:39:21 -06:00
|
|
|
)
|
2018-01-20 22:25:23 -07:00
|
|
|
)
|
2017-06-19 10:39:21 -06:00
|
|
|
|
2018-01-31 15:30:38 -07:00
|
|
|
total_media_quarantined += len(local_mxcs)
|
|
|
|
total_media_quarantined += len(remote_mxcs)
|
2017-06-19 10:39:21 -06:00
|
|
|
|
|
|
|
return total_media_quarantined
|
|
|
|
|
2018-01-31 15:30:38 -07:00
|
|
|
return self.runInteraction(
|
|
|
|
"quarantine_media_in_room",
|
|
|
|
_quarantine_media_in_room_txn,
|
|
|
|
)
|
2018-01-20 22:25:23 -07:00
|
|
|
|
2018-01-31 08:07:41 -07:00
|
|
|
def _get_media_mxcs_in_room_txn(self, txn, room_id):
|
|
|
|
"""Retrieves all the local and remote media MXC URIs in a given room
|
|
|
|
|
|
|
|
Args:
|
|
|
|
txn (cursor)
|
|
|
|
room_id (str)
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
The local and remote media as a lists of tuples where the key is
|
|
|
|
the hostname and the value is the media ID.
|
|
|
|
"""
|
2018-01-20 22:25:23 -07:00
|
|
|
mxc_re = re.compile("^mxc://([^/]+)/([^/#?]+)")
|
|
|
|
|
|
|
|
next_token = self.get_current_events_token() + 1
|
|
|
|
local_media_mxcs = []
|
|
|
|
remote_media_mxcs = []
|
|
|
|
|
|
|
|
while next_token:
|
|
|
|
sql = """
|
2018-03-29 16:05:33 -06:00
|
|
|
SELECT stream_ordering, json FROM events
|
2018-04-25 08:11:18 -06:00
|
|
|
JOIN event_json USING (room_id, event_id)
|
2018-01-20 22:25:23 -07:00
|
|
|
WHERE room_id = ?
|
|
|
|
AND stream_ordering < ?
|
|
|
|
AND contains_url = ? AND outlier = ?
|
|
|
|
ORDER BY stream_ordering DESC
|
|
|
|
LIMIT ?
|
|
|
|
"""
|
|
|
|
txn.execute(sql, (room_id, next_token, True, False, 100))
|
|
|
|
|
|
|
|
next_token = None
|
|
|
|
for stream_ordering, content_json in txn:
|
|
|
|
next_token = stream_ordering
|
2018-03-29 16:05:33 -06:00
|
|
|
event_json = json.loads(content_json)
|
|
|
|
content = event_json["content"]
|
2018-01-20 22:25:23 -07:00
|
|
|
content_url = content.get("url")
|
|
|
|
thumbnail_url = content.get("info", {}).get("thumbnail_url")
|
|
|
|
|
|
|
|
for url in (content_url, thumbnail_url):
|
|
|
|
if not url:
|
|
|
|
continue
|
|
|
|
matches = mxc_re.match(url)
|
|
|
|
if matches:
|
|
|
|
hostname = matches.group(1)
|
|
|
|
media_id = matches.group(2)
|
2018-04-26 04:31:22 -06:00
|
|
|
if hostname == self.hs.hostname:
|
2018-01-20 22:25:23 -07:00
|
|
|
local_media_mxcs.append(media_id)
|
|
|
|
else:
|
|
|
|
remote_media_mxcs.append((hostname, media_id))
|
|
|
|
|
|
|
|
return local_media_mxcs, remote_media_mxcs
|