2016-01-06 21:26:29 -07:00
|
|
|
# Copyright 2015, 2016 OpenMarket Ltd
|
2017-10-10 04:21:41 -06:00
|
|
|
# Copyright 2017 New Vector Ltd
|
2015-12-09 08:51:34 -07:00
|
|
|
#
|
|
|
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
|
|
# you may not use this file except in compliance with the License.
|
|
|
|
# You may obtain a copy of the License at
|
|
|
|
#
|
|
|
|
# http://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
#
|
|
|
|
# Unless required by applicable law or agreed to in writing, software
|
|
|
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
|
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
|
|
# See the License for the specific language governing permissions and
|
|
|
|
# limitations under the License.
|
|
|
|
|
|
|
|
import logging
|
|
|
|
import re
|
2022-08-16 05:22:17 -06:00
|
|
|
from typing import (
|
|
|
|
Any,
|
|
|
|
Dict,
|
|
|
|
List,
|
|
|
|
Mapping,
|
|
|
|
Optional,
|
|
|
|
Pattern,
|
|
|
|
Sequence,
|
|
|
|
Set,
|
|
|
|
Tuple,
|
|
|
|
Union,
|
|
|
|
)
|
2015-12-09 08:51:34 -07:00
|
|
|
|
2022-01-05 04:41:49 -07:00
|
|
|
from matrix_common.regex import glob_to_regex, to_word_pattern
|
|
|
|
|
2020-04-16 08:52:55 -06:00
|
|
|
from synapse.events import EventBase
|
2022-02-28 10:40:24 -07:00
|
|
|
from synapse.types import UserID
|
2016-01-19 09:01:05 -07:00
|
|
|
from synapse.util.caches.lrucache import LruCache
|
2016-01-18 07:09:47 -07:00
|
|
|
|
2015-12-09 08:51:34 -07:00
|
|
|
logger = logging.getLogger(__name__)
|
|
|
|
|
|
|
|
|
2016-01-18 07:09:47 -07:00
|
|
|
GLOB_REGEX = re.compile(r"\\\[(\\\!|)(.*)\\\]")
|
|
|
|
IS_GLOB = re.compile(r"[\?\*\[\]]")
|
|
|
|
INEQUALITY_EXPR = re.compile("^([=<>]*)([0-9]*)$")
|
|
|
|
|
|
|
|
|
2020-12-11 09:43:53 -07:00
|
|
|
def _room_member_count(
|
2022-08-16 05:22:17 -06:00
|
|
|
ev: EventBase, condition: Mapping[str, Any], room_member_count: int
|
2020-12-11 09:43:53 -07:00
|
|
|
) -> bool:
|
2017-10-05 05:39:18 -06:00
|
|
|
return _test_ineq_condition(condition, room_member_count)
|
|
|
|
|
2017-10-05 06:08:02 -06:00
|
|
|
|
2020-12-11 09:43:53 -07:00
|
|
|
def _sender_notification_permission(
|
|
|
|
ev: EventBase,
|
2022-08-16 05:22:17 -06:00
|
|
|
condition: Mapping[str, Any],
|
2020-12-11 09:43:53 -07:00
|
|
|
sender_power_level: int,
|
|
|
|
power_levels: Dict[str, Union[int, Dict[str, int]]],
|
|
|
|
) -> bool:
|
2017-10-10 08:23:00 -06:00
|
|
|
notif_level_key = condition.get("key")
|
|
|
|
if notif_level_key is None:
|
|
|
|
return False
|
|
|
|
|
|
|
|
notif_levels = power_levels.get("notifications", {})
|
2020-12-11 09:43:53 -07:00
|
|
|
assert isinstance(notif_levels, dict)
|
2017-10-10 08:23:00 -06:00
|
|
|
room_notif_level = notif_levels.get(notif_level_key, 50)
|
|
|
|
|
2017-10-10 08:53:34 -06:00
|
|
|
return sender_power_level >= room_notif_level
|
2017-10-05 05:39:18 -06:00
|
|
|
|
2017-10-05 06:08:02 -06:00
|
|
|
|
2022-08-16 05:22:17 -06:00
|
|
|
def _test_ineq_condition(condition: Mapping[str, Any], number: int) -> bool:
|
2016-01-18 07:09:47 -07:00
|
|
|
if "is" not in condition:
|
|
|
|
return False
|
|
|
|
m = INEQUALITY_EXPR.match(condition["is"])
|
|
|
|
if not m:
|
|
|
|
return False
|
|
|
|
ineq = m.group(1)
|
|
|
|
rhs = m.group(2)
|
|
|
|
if not rhs.isdigit():
|
|
|
|
return False
|
2020-04-16 08:52:55 -06:00
|
|
|
rhs_int = int(rhs)
|
2016-01-18 07:09:47 -07:00
|
|
|
|
|
|
|
if ineq == "" or ineq == "==":
|
2020-04-16 08:52:55 -06:00
|
|
|
return number == rhs_int
|
2016-01-18 07:09:47 -07:00
|
|
|
elif ineq == "<":
|
2020-04-16 08:52:55 -06:00
|
|
|
return number < rhs_int
|
2016-01-18 07:09:47 -07:00
|
|
|
elif ineq == ">":
|
2020-04-16 08:52:55 -06:00
|
|
|
return number > rhs_int
|
2016-01-18 07:09:47 -07:00
|
|
|
elif ineq == ">=":
|
2020-04-16 08:52:55 -06:00
|
|
|
return number >= rhs_int
|
2016-01-18 07:09:47 -07:00
|
|
|
elif ineq == "<=":
|
2020-04-16 08:52:55 -06:00
|
|
|
return number <= rhs_int
|
2016-01-18 07:09:47 -07:00
|
|
|
else:
|
|
|
|
return False
|
|
|
|
|
2017-10-05 06:08:02 -06:00
|
|
|
|
2020-07-06 04:43:41 -06:00
|
|
|
def tweaks_for_actions(actions: List[Union[str, Dict]]) -> Dict[str, Any]:
|
|
|
|
"""
|
|
|
|
Converts a list of actions into a `tweaks` dict (which can then be passed to
|
|
|
|
the push gateway).
|
|
|
|
|
|
|
|
This function ignores all actions other than `set_tweak` actions, and treats
|
|
|
|
absent `value`s as `True`, which agrees with the only spec-defined treatment
|
|
|
|
of absent `value`s (namely, for `highlight` tweaks).
|
|
|
|
|
|
|
|
Args:
|
|
|
|
actions: list of actions
|
|
|
|
e.g. [
|
|
|
|
{"set_tweak": "a", "value": "AAA"},
|
|
|
|
{"set_tweak": "b", "value": "BBB"},
|
|
|
|
{"set_tweak": "highlight"},
|
|
|
|
"notify"
|
|
|
|
]
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
dictionary of tweaks for those actions
|
|
|
|
e.g. {"a": "AAA", "b": "BBB", "highlight": True}
|
|
|
|
"""
|
2016-04-07 09:31:38 -06:00
|
|
|
tweaks = {}
|
|
|
|
for a in actions:
|
|
|
|
if not isinstance(a, dict):
|
|
|
|
continue
|
2020-07-06 04:43:41 -06:00
|
|
|
if "set_tweak" in a:
|
|
|
|
# value is allowed to be absent in which case the value assumed
|
|
|
|
# should be True.
|
|
|
|
tweaks[a["set_tweak"]] = a.get("value", True)
|
2016-04-07 09:31:38 -06:00
|
|
|
return tweaks
|
2015-12-09 08:51:34 -07:00
|
|
|
|
|
|
|
|
2020-09-04 04:54:56 -06:00
|
|
|
class PushRuleEvaluatorForEvent:
|
2020-04-16 08:52:55 -06:00
|
|
|
def __init__(
|
|
|
|
self,
|
|
|
|
event: EventBase,
|
|
|
|
room_member_count: int,
|
|
|
|
sender_power_level: int,
|
2020-12-11 09:43:53 -07:00
|
|
|
power_levels: Dict[str, Union[int, Dict[str, int]]],
|
2022-05-24 07:23:23 -06:00
|
|
|
relations: Dict[str, Set[Tuple[str, str]]],
|
|
|
|
relations_match_enabled: bool,
|
2020-04-16 08:52:55 -06:00
|
|
|
):
|
2016-01-18 07:09:47 -07:00
|
|
|
self._event = event
|
|
|
|
self._room_member_count = room_member_count
|
2017-10-05 05:39:18 -06:00
|
|
|
self._sender_power_level = sender_power_level
|
2017-10-10 08:23:00 -06:00
|
|
|
self._power_levels = power_levels
|
2022-05-24 07:23:23 -06:00
|
|
|
self._relations = relations
|
|
|
|
self._relations_match_enabled = relations_match_enabled
|
2016-01-18 07:09:47 -07:00
|
|
|
|
2016-01-18 08:42:23 -07:00
|
|
|
# Maps strings of e.g. 'content.body' -> event["content"]["body"]
|
2016-01-18 07:09:47 -07:00
|
|
|
self._value_cache = _flatten_dict(event)
|
2015-12-09 08:51:34 -07:00
|
|
|
|
2022-05-10 05:54:30 -06:00
|
|
|
# Maps cache keys to final values.
|
|
|
|
self._condition_cache: Dict[str, bool] = {}
|
|
|
|
|
|
|
|
def check_conditions(
|
2022-08-16 05:22:17 -06:00
|
|
|
self, conditions: Sequence[Mapping], uid: str, display_name: Optional[str]
|
2022-05-10 05:54:30 -06:00
|
|
|
) -> bool:
|
|
|
|
"""
|
|
|
|
Returns true if a user's conditions/user ID/display name match the event.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
conditions: The user's conditions to match.
|
|
|
|
uid: The user's MXID.
|
|
|
|
display_name: The display name.
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
True if all conditions match the event, False otherwise.
|
|
|
|
"""
|
|
|
|
for cond in conditions:
|
|
|
|
_cache_key = cond.get("_cache_key", None)
|
|
|
|
if _cache_key:
|
|
|
|
res = self._condition_cache.get(_cache_key, None)
|
|
|
|
if res is False:
|
|
|
|
return False
|
|
|
|
elif res is True:
|
|
|
|
continue
|
|
|
|
|
|
|
|
res = self.matches(cond, uid, display_name)
|
|
|
|
if _cache_key:
|
|
|
|
self._condition_cache[_cache_key] = bool(res)
|
|
|
|
|
|
|
|
if not res:
|
|
|
|
return False
|
|
|
|
|
|
|
|
return True
|
|
|
|
|
2020-12-11 09:43:53 -07:00
|
|
|
def matches(
|
2022-08-16 05:22:17 -06:00
|
|
|
self, condition: Mapping[str, Any], user_id: str, display_name: Optional[str]
|
2020-12-11 09:43:53 -07:00
|
|
|
) -> bool:
|
2022-05-10 05:54:30 -06:00
|
|
|
"""
|
|
|
|
Returns true if a user's condition/user ID/display name match the event.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
condition: The user's condition to match.
|
|
|
|
uid: The user's MXID.
|
|
|
|
display_name: The display name, or None if there is not one.
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
True if the condition matches the event, False otherwise.
|
|
|
|
"""
|
2016-01-18 07:09:47 -07:00
|
|
|
if condition["kind"] == "event_match":
|
2016-01-18 03:09:14 -07:00
|
|
|
return self._event_match(condition, user_id)
|
2015-12-09 08:51:34 -07:00
|
|
|
elif condition["kind"] == "contains_display_name":
|
2016-01-18 07:09:47 -07:00
|
|
|
return self._contains_display_name(display_name)
|
2015-12-09 08:51:34 -07:00
|
|
|
elif condition["kind"] == "room_member_count":
|
2016-01-18 07:09:47 -07:00
|
|
|
return _room_member_count(self._event, condition, self._room_member_count)
|
2017-10-10 08:23:00 -06:00
|
|
|
elif condition["kind"] == "sender_notification_permission":
|
|
|
|
return _sender_notification_permission(
|
|
|
|
self._event, condition, self._sender_power_level, self._power_levels
|
2017-10-05 05:39:18 -06:00
|
|
|
)
|
2022-05-24 07:23:23 -06:00
|
|
|
elif (
|
|
|
|
condition["kind"] == "org.matrix.msc3772.relation_match"
|
|
|
|
and self._relations_match_enabled
|
|
|
|
):
|
|
|
|
return self._relation_match(condition, user_id)
|
2015-12-09 08:51:34 -07:00
|
|
|
else:
|
2022-05-24 07:23:23 -06:00
|
|
|
# XXX This looks incorrect -- we have reached an unknown condition
|
|
|
|
# kind and are unconditionally returning that it matches. Note
|
|
|
|
# that it seems possible to provide a condition to the /pushrules
|
|
|
|
# endpoint with an unknown kind, see _rule_tuple_from_request_object.
|
2015-12-09 08:51:34 -07:00
|
|
|
return True
|
|
|
|
|
2022-08-16 05:22:17 -06:00
|
|
|
def _event_match(self, condition: Mapping, user_id: str) -> bool:
|
2022-05-10 05:54:30 -06:00
|
|
|
"""
|
|
|
|
Check an "event_match" push rule condition.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
condition: The "event_match" push rule condition to match.
|
|
|
|
user_id: The user's MXID.
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
True if the condition matches the event, False otherwise.
|
|
|
|
"""
|
2016-01-18 07:09:47 -07:00
|
|
|
pattern = condition.get("pattern", None)
|
|
|
|
|
2016-01-18 03:09:14 -07:00
|
|
|
if not pattern:
|
|
|
|
pattern_type = condition.get("pattern_type", None)
|
|
|
|
if pattern_type == "user_id":
|
|
|
|
pattern = user_id
|
|
|
|
elif pattern_type == "user_localpart":
|
|
|
|
pattern = UserID.from_string(user_id).localpart
|
|
|
|
|
2016-01-18 07:09:47 -07:00
|
|
|
if not pattern:
|
2019-10-31 04:23:24 -06:00
|
|
|
logger.warning("event_match condition with no pattern")
|
2016-01-18 07:09:47 -07:00
|
|
|
return False
|
|
|
|
|
|
|
|
# XXX: optimisation: cache our pattern regexps
|
|
|
|
if condition["key"] == "content.body":
|
2018-11-02 07:44:12 -06:00
|
|
|
body = self._event.content.get("body", None)
|
2020-06-15 14:20:34 -06:00
|
|
|
if not body or not isinstance(body, str):
|
2016-01-18 09:48:17 -07:00
|
|
|
return False
|
2016-01-18 07:09:47 -07:00
|
|
|
|
2016-01-18 09:48:17 -07:00
|
|
|
return _glob_matches(pattern, body, word_boundary=True)
|
2016-01-18 07:09:47 -07:00
|
|
|
else:
|
2022-05-10 05:54:30 -06:00
|
|
|
haystack = self._value_cache.get(condition["key"], None)
|
2016-01-18 07:09:47 -07:00
|
|
|
if haystack is None:
|
|
|
|
return False
|
|
|
|
|
2016-01-18 09:48:17 -07:00
|
|
|
return _glob_matches(pattern, haystack)
|
2016-01-18 07:09:47 -07:00
|
|
|
|
2021-11-02 07:55:52 -06:00
|
|
|
def _contains_display_name(self, display_name: Optional[str]) -> bool:
|
2022-05-10 05:54:30 -06:00
|
|
|
"""
|
|
|
|
Check an "event_match" push rule condition.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
display_name: The display name, or None if there is not one.
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
True if the display name is found in the event body, False otherwise.
|
|
|
|
"""
|
2016-01-18 07:09:47 -07:00
|
|
|
if not display_name:
|
|
|
|
return False
|
|
|
|
|
2018-11-02 07:44:12 -06:00
|
|
|
body = self._event.content.get("body", None)
|
2020-06-15 14:20:34 -06:00
|
|
|
if not body or not isinstance(body, str):
|
2016-01-18 09:48:17 -07:00
|
|
|
return False
|
2016-01-18 07:09:47 -07:00
|
|
|
|
2020-04-16 08:52:55 -06:00
|
|
|
# Similar to _glob_matches, but do not treat display_name as a glob.
|
|
|
|
r = regex_cache.get((display_name, False, True), None)
|
|
|
|
if not r:
|
2020-10-16 08:56:39 -06:00
|
|
|
r1 = re.escape(display_name)
|
2022-01-05 04:41:49 -07:00
|
|
|
r1 = to_word_pattern(r1)
|
2020-10-16 08:56:39 -06:00
|
|
|
r = re.compile(r1, flags=re.IGNORECASE)
|
2020-04-16 08:52:55 -06:00
|
|
|
regex_cache[(display_name, False, True)] = r
|
|
|
|
|
2020-10-16 08:56:39 -06:00
|
|
|
return bool(r.search(body))
|
2016-01-18 07:09:47 -07:00
|
|
|
|
2022-08-16 05:22:17 -06:00
|
|
|
def _relation_match(self, condition: Mapping, user_id: str) -> bool:
|
2022-05-24 07:23:23 -06:00
|
|
|
"""
|
|
|
|
Check an "relation_match" push rule condition.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
condition: The "event_match" push rule condition to match.
|
|
|
|
user_id: The user's MXID.
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
True if the condition matches the event, False otherwise.
|
|
|
|
"""
|
|
|
|
rel_type = condition.get("rel_type")
|
|
|
|
if not rel_type:
|
|
|
|
logger.warning("relation_match condition missing rel_type")
|
|
|
|
return False
|
|
|
|
|
|
|
|
sender_pattern = condition.get("sender")
|
|
|
|
if sender_pattern is None:
|
|
|
|
sender_type = condition.get("sender_type")
|
|
|
|
if sender_type == "user_id":
|
|
|
|
sender_pattern = user_id
|
|
|
|
type_pattern = condition.get("type")
|
|
|
|
|
|
|
|
# If any other relations matches, return True.
|
|
|
|
for sender, event_type in self._relations.get(rel_type, ()):
|
|
|
|
if sender_pattern and not _glob_matches(sender_pattern, sender):
|
|
|
|
continue
|
|
|
|
if type_pattern and not _glob_matches(type_pattern, event_type):
|
|
|
|
continue
|
|
|
|
# All values must have matched.
|
|
|
|
return True
|
|
|
|
|
|
|
|
# No relations matched.
|
|
|
|
return False
|
|
|
|
|
2015-12-09 08:51:34 -07:00
|
|
|
|
2020-04-16 08:52:55 -06:00
|
|
|
# Caches (string, is_glob, word_boundary) -> regex for push. See _glob_matches
|
2021-07-15 04:02:43 -06:00
|
|
|
regex_cache: LruCache[Tuple[str, bool, bool], Pattern] = LruCache(
|
2020-10-16 08:56:39 -06:00
|
|
|
50000, "regex_push_cache"
|
2021-07-15 04:02:43 -06:00
|
|
|
)
|
2017-03-29 08:53:14 -06:00
|
|
|
|
|
|
|
|
2020-04-16 08:52:55 -06:00
|
|
|
def _glob_matches(glob: str, value: str, word_boundary: bool = False) -> bool:
|
2016-01-18 09:48:17 -07:00
|
|
|
"""Tests if value matches glob.
|
2016-01-18 07:09:47 -07:00
|
|
|
|
2016-01-18 09:48:17 -07:00
|
|
|
Args:
|
2020-04-16 08:52:55 -06:00
|
|
|
glob
|
|
|
|
value: String to test against glob.
|
|
|
|
word_boundary: Whether to match against word boundaries or entire
|
2016-01-18 09:48:17 -07:00
|
|
|
string. Defaults to False.
|
|
|
|
"""
|
2016-01-18 10:04:36 -07:00
|
|
|
|
2017-03-29 08:53:14 -06:00
|
|
|
try:
|
2020-04-16 08:52:55 -06:00
|
|
|
r = regex_cache.get((glob, True, word_boundary), None)
|
2017-03-29 08:53:14 -06:00
|
|
|
if not r:
|
2022-01-05 04:41:49 -07:00
|
|
|
r = glob_to_regex(glob, word_boundary=word_boundary)
|
2020-04-16 08:52:55 -06:00
|
|
|
regex_cache[(glob, True, word_boundary)] = r
|
2020-10-16 08:56:39 -06:00
|
|
|
return bool(r.search(value))
|
2016-01-19 07:43:24 -07:00
|
|
|
except re.error:
|
2019-10-31 04:23:24 -06:00
|
|
|
logger.warning("Failed to parse glob to regex: %r", glob)
|
2016-01-19 07:43:24 -07:00
|
|
|
return False
|
2016-01-18 07:09:47 -07:00
|
|
|
|
|
|
|
|
2020-12-11 09:43:53 -07:00
|
|
|
def _flatten_dict(
|
2022-02-28 10:40:24 -07:00
|
|
|
d: Union[EventBase, Mapping[str, Any]],
|
2020-12-11 09:43:53 -07:00
|
|
|
prefix: Optional[List[str]] = None,
|
|
|
|
result: Optional[Dict[str, str]] = None,
|
|
|
|
) -> Dict[str, str]:
|
|
|
|
if prefix is None:
|
|
|
|
prefix = []
|
2017-07-04 17:28:43 -06:00
|
|
|
if result is None:
|
|
|
|
result = {}
|
2016-01-18 07:09:47 -07:00
|
|
|
for key, value in d.items():
|
2020-06-16 06:51:47 -06:00
|
|
|
if isinstance(value, str):
|
2016-01-18 07:09:47 -07:00
|
|
|
result[".".join(prefix + [key])] = value.lower()
|
2022-02-28 10:40:24 -07:00
|
|
|
elif isinstance(value, Mapping):
|
2016-02-02 10:18:50 -07:00
|
|
|
_flatten_dict(value, prefix=(prefix + [key]), result=result)
|
2016-01-18 07:09:47 -07:00
|
|
|
|
|
|
|
return result
|