2015-12-09 08:51:34 -07:00
|
|
|
# -*- coding: utf-8 -*-
|
2016-01-06 21:26:29 -07:00
|
|
|
# Copyright 2015, 2016 OpenMarket Ltd
|
2015-12-09 08:51:34 -07:00
|
|
|
#
|
|
|
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
|
|
# you may not use this file except in compliance with the License.
|
|
|
|
# You may obtain a copy of the License at
|
|
|
|
#
|
|
|
|
# http://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
#
|
|
|
|
# Unless required by applicable law or agreed to in writing, software
|
|
|
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
|
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
|
|
# See the License for the specific language governing permissions and
|
|
|
|
# limitations under the License.
|
|
|
|
|
|
|
|
from twisted.internet import defer
|
|
|
|
|
|
|
|
import baserules
|
|
|
|
|
|
|
|
import logging
|
|
|
|
import simplejson as json
|
|
|
|
import re
|
|
|
|
|
2016-01-18 07:09:47 -07:00
|
|
|
from synapse.types import UserID
|
2016-01-19 09:01:05 -07:00
|
|
|
from synapse.util.caches.lrucache import LruCache
|
2016-01-18 07:09:47 -07:00
|
|
|
|
2015-12-09 08:51:34 -07:00
|
|
|
logger = logging.getLogger(__name__)
|
|
|
|
|
|
|
|
|
2016-01-18 07:09:47 -07:00
|
|
|
GLOB_REGEX = re.compile(r'\\\[(\\\!|)(.*)\\\]')
|
|
|
|
IS_GLOB = re.compile(r'[\?\*\[\]]')
|
|
|
|
INEQUALITY_EXPR = re.compile("^([=<>]*)([0-9]*)$")
|
|
|
|
|
|
|
|
|
2015-12-09 08:51:34 -07:00
|
|
|
@defer.inlineCallbacks
|
2016-01-13 06:08:59 -07:00
|
|
|
def evaluator_for_user_id_and_profile_tag(user_id, profile_tag, room_id, store):
|
|
|
|
rawrules = yield store.get_push_rules_for_user(user_id)
|
|
|
|
enabled_map = yield store.get_push_rules_enabled_for_user(user_id)
|
2015-12-09 08:51:34 -07:00
|
|
|
our_member_event = yield store.get_current_state(
|
|
|
|
room_id=room_id,
|
|
|
|
event_type='m.room.member',
|
2016-01-13 06:08:59 -07:00
|
|
|
state_key=user_id,
|
2015-12-09 08:51:34 -07:00
|
|
|
)
|
|
|
|
|
|
|
|
defer.returnValue(PushRuleEvaluator(
|
2016-01-13 06:08:59 -07:00
|
|
|
user_id, profile_tag, rawrules, enabled_map,
|
2015-12-09 08:51:34 -07:00
|
|
|
room_id, our_member_event, store
|
|
|
|
))
|
|
|
|
|
|
|
|
|
2016-01-18 07:09:47 -07:00
|
|
|
def _room_member_count(ev, condition, room_member_count):
|
|
|
|
if 'is' not in condition:
|
|
|
|
return False
|
|
|
|
m = INEQUALITY_EXPR.match(condition['is'])
|
|
|
|
if not m:
|
|
|
|
return False
|
|
|
|
ineq = m.group(1)
|
|
|
|
rhs = m.group(2)
|
|
|
|
if not rhs.isdigit():
|
|
|
|
return False
|
|
|
|
rhs = int(rhs)
|
|
|
|
|
|
|
|
if ineq == '' or ineq == '==':
|
|
|
|
return room_member_count == rhs
|
|
|
|
elif ineq == '<':
|
|
|
|
return room_member_count < rhs
|
|
|
|
elif ineq == '>':
|
|
|
|
return room_member_count > rhs
|
|
|
|
elif ineq == '>=':
|
|
|
|
return room_member_count >= rhs
|
|
|
|
elif ineq == '<=':
|
|
|
|
return room_member_count <= rhs
|
|
|
|
else:
|
|
|
|
return False
|
|
|
|
|
|
|
|
|
2015-12-09 08:51:34 -07:00
|
|
|
class PushRuleEvaluator:
|
2015-12-10 11:40:28 -07:00
|
|
|
DEFAULT_ACTIONS = []
|
2015-12-09 08:51:34 -07:00
|
|
|
|
2016-01-13 06:08:59 -07:00
|
|
|
def __init__(self, user_id, profile_tag, raw_rules, enabled_map, room_id,
|
2015-12-09 08:51:34 -07:00
|
|
|
our_member_event, store):
|
2016-01-13 06:08:59 -07:00
|
|
|
self.user_id = user_id
|
2015-12-09 08:51:34 -07:00
|
|
|
self.profile_tag = profile_tag
|
|
|
|
self.room_id = room_id
|
|
|
|
self.our_member_event = our_member_event
|
|
|
|
self.store = store
|
|
|
|
|
|
|
|
rules = []
|
|
|
|
for raw_rule in raw_rules:
|
|
|
|
rule = dict(raw_rule)
|
|
|
|
rule['conditions'] = json.loads(raw_rule['conditions'])
|
|
|
|
rule['actions'] = json.loads(raw_rule['actions'])
|
|
|
|
rules.append(rule)
|
|
|
|
|
2016-01-18 03:09:14 -07:00
|
|
|
self.rules = baserules.list_with_base_rules(rules)
|
2015-12-09 08:51:34 -07:00
|
|
|
|
|
|
|
self.enabled_map = enabled_map
|
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def tweaks_for_actions(actions):
|
|
|
|
tweaks = {}
|
|
|
|
for a in actions:
|
|
|
|
if not isinstance(a, dict):
|
|
|
|
continue
|
|
|
|
if 'set_tweak' in a and 'value' in a:
|
|
|
|
tweaks[a['set_tweak']] = a['value']
|
|
|
|
return tweaks
|
|
|
|
|
|
|
|
@defer.inlineCallbacks
|
|
|
|
def actions_for_event(self, ev):
|
|
|
|
"""
|
|
|
|
This should take into account notification settings that the user
|
|
|
|
has configured both globally and per-room when we have the ability
|
|
|
|
to do such things.
|
|
|
|
"""
|
2016-01-13 06:08:59 -07:00
|
|
|
if ev['user_id'] == self.user_id:
|
2015-12-09 08:51:34 -07:00
|
|
|
# let's assume you probably know about messages you sent yourself
|
2015-12-10 11:40:28 -07:00
|
|
|
defer.returnValue([])
|
2015-12-09 08:51:34 -07:00
|
|
|
|
|
|
|
room_id = ev['room_id']
|
|
|
|
|
|
|
|
# get *our* member event for display name matching
|
|
|
|
my_display_name = None
|
|
|
|
|
|
|
|
if self.our_member_event:
|
|
|
|
my_display_name = self.our_member_event[0].content.get("displayname")
|
|
|
|
|
|
|
|
room_members = yield self.store.get_users_in_room(room_id)
|
|
|
|
room_member_count = len(room_members)
|
|
|
|
|
2016-01-18 09:48:17 -07:00
|
|
|
evaluator = PushRuleEvaluatorForEvent(ev, room_member_count)
|
2016-01-18 07:09:47 -07:00
|
|
|
|
2015-12-09 08:51:34 -07:00
|
|
|
for r in self.rules:
|
2016-01-18 10:34:02 -07:00
|
|
|
enabled = self.enabled_map.get(r['rule_id'], None)
|
|
|
|
if enabled is not None and not enabled:
|
2016-01-18 07:39:34 -07:00
|
|
|
continue
|
|
|
|
|
|
|
|
if not r.get("enabled", True):
|
2015-12-09 08:51:34 -07:00
|
|
|
continue
|
|
|
|
|
|
|
|
conditions = r['conditions']
|
|
|
|
actions = r['actions']
|
|
|
|
|
|
|
|
# ignore rules with no actions (we have an explict 'dont_notify')
|
|
|
|
if len(actions) == 0:
|
|
|
|
logger.warn(
|
|
|
|
"Ignoring rule id %s with no actions for user %s",
|
2016-01-13 06:08:59 -07:00
|
|
|
r['rule_id'], self.user_id
|
2015-12-09 08:51:34 -07:00
|
|
|
)
|
|
|
|
continue
|
2016-01-18 07:09:47 -07:00
|
|
|
|
|
|
|
matches = True
|
|
|
|
for c in conditions:
|
2016-01-18 03:09:14 -07:00
|
|
|
matches = evaluator.matches(
|
|
|
|
c, self.user_id, my_display_name, self.profile_tag
|
|
|
|
)
|
2016-01-18 07:09:47 -07:00
|
|
|
if not matches:
|
|
|
|
break
|
|
|
|
|
|
|
|
logger.debug(
|
|
|
|
"Rule %s %s",
|
|
|
|
r['rule_id'], "matches" if matches else "doesn't match"
|
|
|
|
)
|
|
|
|
|
2015-12-09 08:51:34 -07:00
|
|
|
if matches:
|
2016-01-18 08:44:04 -07:00
|
|
|
logger.debug(
|
2015-12-09 08:51:34 -07:00
|
|
|
"%s matches for user %s, event %s",
|
2016-01-13 06:08:59 -07:00
|
|
|
r['rule_id'], self.user_id, ev['event_id']
|
2015-12-09 08:51:34 -07:00
|
|
|
)
|
2015-12-10 11:40:28 -07:00
|
|
|
|
|
|
|
# filter out dont_notify as we treat an empty actions list
|
|
|
|
# as dont_notify, and this doesn't take up a row in our database
|
|
|
|
actions = [x for x in actions if x != 'dont_notify']
|
|
|
|
|
2015-12-09 08:51:34 -07:00
|
|
|
defer.returnValue(actions)
|
|
|
|
|
2016-01-18 08:44:04 -07:00
|
|
|
logger.debug(
|
2015-12-09 08:51:34 -07:00
|
|
|
"No rules match for user %s, event %s",
|
2016-01-13 06:08:59 -07:00
|
|
|
self.user_id, ev['event_id']
|
2015-12-09 08:51:34 -07:00
|
|
|
)
|
|
|
|
defer.returnValue(PushRuleEvaluator.DEFAULT_ACTIONS)
|
|
|
|
|
|
|
|
|
2016-01-18 07:09:47 -07:00
|
|
|
class PushRuleEvaluatorForEvent(object):
|
2016-01-18 09:48:17 -07:00
|
|
|
def __init__(self, event, room_member_count):
|
2016-01-18 07:09:47 -07:00
|
|
|
self._event = event
|
|
|
|
self._room_member_count = room_member_count
|
|
|
|
|
2016-01-18 08:42:23 -07:00
|
|
|
# Maps strings of e.g. 'content.body' -> event["content"]["body"]
|
2016-01-18 07:09:47 -07:00
|
|
|
self._value_cache = _flatten_dict(event)
|
2015-12-09 08:51:34 -07:00
|
|
|
|
2016-01-18 03:09:14 -07:00
|
|
|
def matches(self, condition, user_id, display_name, profile_tag):
|
2016-01-18 07:09:47 -07:00
|
|
|
if condition['kind'] == 'event_match':
|
2016-01-18 03:09:14 -07:00
|
|
|
return self._event_match(condition, user_id)
|
2015-12-09 08:51:34 -07:00
|
|
|
elif condition['kind'] == 'device':
|
|
|
|
if 'profile_tag' not in condition:
|
|
|
|
return True
|
2015-12-22 08:19:34 -07:00
|
|
|
return condition['profile_tag'] == profile_tag
|
2015-12-09 08:51:34 -07:00
|
|
|
elif condition['kind'] == 'contains_display_name':
|
2016-01-18 07:09:47 -07:00
|
|
|
return self._contains_display_name(display_name)
|
2015-12-09 08:51:34 -07:00
|
|
|
elif condition['kind'] == 'room_member_count':
|
2016-01-18 07:09:47 -07:00
|
|
|
return _room_member_count(
|
|
|
|
self._event, condition, self._room_member_count
|
|
|
|
)
|
2015-12-09 08:51:34 -07:00
|
|
|
else:
|
|
|
|
return True
|
|
|
|
|
2016-01-18 03:09:14 -07:00
|
|
|
def _event_match(self, condition, user_id):
|
2016-01-18 07:09:47 -07:00
|
|
|
pattern = condition.get('pattern', None)
|
|
|
|
|
2016-01-18 03:09:14 -07:00
|
|
|
if not pattern:
|
|
|
|
pattern_type = condition.get('pattern_type', None)
|
|
|
|
if pattern_type == "user_id":
|
|
|
|
pattern = user_id
|
|
|
|
elif pattern_type == "user_localpart":
|
|
|
|
pattern = UserID.from_string(user_id).localpart
|
|
|
|
|
2016-01-18 07:09:47 -07:00
|
|
|
if not pattern:
|
|
|
|
logger.warn("event_match condition with no pattern")
|
|
|
|
return False
|
|
|
|
|
|
|
|
# XXX: optimisation: cache our pattern regexps
|
|
|
|
if condition['key'] == 'content.body':
|
2016-01-18 09:48:17 -07:00
|
|
|
body = self._event["content"].get("body", None)
|
|
|
|
if not body:
|
|
|
|
return False
|
2016-01-18 07:09:47 -07:00
|
|
|
|
2016-01-18 09:48:17 -07:00
|
|
|
return _glob_matches(pattern, body, word_boundary=True)
|
2016-01-18 07:09:47 -07:00
|
|
|
else:
|
|
|
|
haystack = self._get_value(condition['key'])
|
|
|
|
if haystack is None:
|
|
|
|
return False
|
|
|
|
|
2016-01-18 09:48:17 -07:00
|
|
|
return _glob_matches(pattern, haystack)
|
2016-01-18 07:09:47 -07:00
|
|
|
|
|
|
|
def _contains_display_name(self, display_name):
|
|
|
|
if not display_name:
|
|
|
|
return False
|
|
|
|
|
2016-01-18 09:48:17 -07:00
|
|
|
body = self._event["content"].get("body", None)
|
|
|
|
if not body:
|
|
|
|
return False
|
2016-01-18 07:09:47 -07:00
|
|
|
|
2016-01-18 09:48:17 -07:00
|
|
|
return _glob_matches(display_name, body, word_boundary=True)
|
2016-01-18 07:09:47 -07:00
|
|
|
|
|
|
|
def _get_value(self, dotted_key):
|
|
|
|
return self._value_cache.get(dotted_key, None)
|
|
|
|
|
2015-12-09 08:51:34 -07:00
|
|
|
|
2016-01-18 09:48:17 -07:00
|
|
|
def _glob_matches(glob, value, word_boundary=False):
|
|
|
|
"""Tests if value matches glob.
|
2016-01-18 07:09:47 -07:00
|
|
|
|
2016-01-18 09:48:17 -07:00
|
|
|
Args:
|
|
|
|
glob (string)
|
|
|
|
value (string): String to test against glob.
|
|
|
|
word_boundary (bool): Whether to match against word boundaries or entire
|
|
|
|
string. Defaults to False.
|
2016-01-18 07:09:47 -07:00
|
|
|
|
2016-01-18 09:48:17 -07:00
|
|
|
Returns:
|
|
|
|
bool
|
|
|
|
"""
|
2016-01-19 07:43:24 -07:00
|
|
|
try:
|
|
|
|
if IS_GLOB.search(glob):
|
|
|
|
r = re.escape(glob)
|
|
|
|
|
|
|
|
r = r.replace(r'\*', '.*?')
|
|
|
|
r = r.replace(r'\?', '.')
|
|
|
|
|
|
|
|
# handle [abc], [a-z] and [!a-z] style ranges.
|
|
|
|
r = GLOB_REGEX.sub(
|
|
|
|
lambda x: (
|
|
|
|
'[%s%s]' % (
|
|
|
|
x.group(1) and '^' or '',
|
|
|
|
x.group(2).replace(r'\\\-', '-')
|
|
|
|
)
|
|
|
|
),
|
|
|
|
r,
|
|
|
|
)
|
|
|
|
if word_boundary:
|
|
|
|
r = r"\b%s\b" % (r,)
|
2016-01-19 09:01:05 -07:00
|
|
|
r = _compile_regex(r)
|
2016-01-19 07:43:24 -07:00
|
|
|
|
|
|
|
return r.search(value)
|
|
|
|
else:
|
|
|
|
r = r + "$"
|
2016-01-19 09:01:05 -07:00
|
|
|
r = _compile_regex(r)
|
2016-01-19 07:43:24 -07:00
|
|
|
|
|
|
|
return r.match(value)
|
|
|
|
elif word_boundary:
|
|
|
|
r = re.escape(glob)
|
2016-01-18 10:20:44 -07:00
|
|
|
r = r"\b%s\b" % (r,)
|
2016-01-19 09:01:05 -07:00
|
|
|
r = _compile_regex(r)
|
2016-01-18 10:04:36 -07:00
|
|
|
|
|
|
|
return r.search(value)
|
|
|
|
else:
|
2016-01-19 07:43:24 -07:00
|
|
|
return value.lower() == glob.lower()
|
|
|
|
except re.error:
|
|
|
|
logger.warn("Failed to parse glob to regex: %r", glob)
|
|
|
|
return False
|
2016-01-18 07:09:47 -07:00
|
|
|
|
|
|
|
|
|
|
|
def _flatten_dict(d, prefix=[], result={}):
|
|
|
|
for key, value in d.items():
|
|
|
|
if isinstance(value, basestring):
|
|
|
|
result[".".join(prefix + [key])] = value.lower()
|
|
|
|
elif hasattr(value, "items"):
|
2016-02-02 10:18:50 -07:00
|
|
|
_flatten_dict(value, prefix=(prefix + [key]), result=result)
|
2016-01-18 07:09:47 -07:00
|
|
|
|
|
|
|
return result
|
2016-01-19 09:01:05 -07:00
|
|
|
|
|
|
|
|
2016-01-19 09:07:07 -07:00
|
|
|
regex_cache = LruCache(5000)
|
2016-01-19 09:01:05 -07:00
|
|
|
|
|
|
|
|
|
|
|
def _compile_regex(regex_str):
|
|
|
|
r = regex_cache.get(regex_str, None)
|
|
|
|
if r:
|
|
|
|
return r
|
|
|
|
|
|
|
|
r = re.compile(regex_str, flags=re.IGNORECASE)
|
|
|
|
regex_cache[regex_str] = r
|
|
|
|
return r
|