Merge branch 'develop' of github.com:matrix-org/synapse into state-chache
This commit is contained in:
commit
eae0842bc1
|
@ -157,15 +157,19 @@ class TransactionQueue(object):
|
||||||
else:
|
else:
|
||||||
logger.info("TX [%s] is ready for retry", destination)
|
logger.info("TX [%s] is ready for retry", destination)
|
||||||
|
|
||||||
logger.info("TX [%s] _attempt_new_transaction", destination)
|
|
||||||
|
|
||||||
if destination in self.pending_transactions:
|
if destination in self.pending_transactions:
|
||||||
# XXX: pending_transactions can get stuck on by a never-ending
|
# XXX: pending_transactions can get stuck on by a never-ending
|
||||||
# request at which point pending_pdus_by_dest just keeps growing.
|
# request at which point pending_pdus_by_dest just keeps growing.
|
||||||
# we need application-layer timeouts of some flavour of these
|
# we need application-layer timeouts of some flavour of these
|
||||||
# requests
|
# requests
|
||||||
|
logger.info(
|
||||||
|
"TX [%s] Transaction already in progress",
|
||||||
|
destination
|
||||||
|
)
|
||||||
return
|
return
|
||||||
|
|
||||||
|
logger.info("TX [%s] _attempt_new_transaction", destination)
|
||||||
|
|
||||||
# list of (pending_pdu, deferred, order)
|
# list of (pending_pdu, deferred, order)
|
||||||
pending_pdus = self.pending_pdus_by_dest.pop(destination, [])
|
pending_pdus = self.pending_pdus_by_dest.pop(destination, [])
|
||||||
pending_edus = self.pending_edus_by_dest.pop(destination, [])
|
pending_edus = self.pending_edus_by_dest.pop(destination, [])
|
||||||
|
@ -176,6 +180,7 @@ class TransactionQueue(object):
|
||||||
destination, len(pending_pdus))
|
destination, len(pending_pdus))
|
||||||
|
|
||||||
if not pending_pdus and not pending_edus and not pending_failures:
|
if not pending_pdus and not pending_edus and not pending_failures:
|
||||||
|
logger.info("TX [%s] Nothing to send", destination)
|
||||||
return
|
return
|
||||||
|
|
||||||
logger.debug(
|
logger.debug(
|
||||||
|
|
|
@ -658,7 +658,9 @@ class PresenceHandler(BaseHandler):
|
||||||
|
|
||||||
observers = set(self._remote_recvmap.get(user, set()))
|
observers = set(self._remote_recvmap.get(user, set()))
|
||||||
if observers:
|
if observers:
|
||||||
logger.debug(" | %d interested local observers %r", len(observers), observers)
|
logger.debug(
|
||||||
|
" | %d interested local observers %r", len(observers), observers
|
||||||
|
)
|
||||||
|
|
||||||
rm_handler = self.homeserver.get_handlers().room_member_handler
|
rm_handler = self.homeserver.get_handlers().room_member_handler
|
||||||
room_ids = yield rm_handler.get_rooms_for_user(user)
|
room_ids = yield rm_handler.get_rooms_for_user(user)
|
||||||
|
|
|
@ -105,17 +105,20 @@ class RegistrationHandler(BaseHandler):
|
||||||
# do it here.
|
# do it here.
|
||||||
try:
|
try:
|
||||||
auth_user = UserID.from_string(user_id)
|
auth_user = UserID.from_string(user_id)
|
||||||
identicon_resource = self.hs.get_resource_for_media_repository().getChildWithDefault("identicon", None)
|
media_repository = self.hs.get_resource_for_media_repository()
|
||||||
upload_resource = self.hs.get_resource_for_media_repository().getChildWithDefault("upload", None)
|
identicon_resource = media_repository.getChildWithDefault("identicon", None)
|
||||||
|
upload_resource = media_repository.getChildWithDefault("upload", None)
|
||||||
identicon_bytes = identicon_resource.generate_identicon(user_id, 320, 320)
|
identicon_bytes = identicon_resource.generate_identicon(user_id, 320, 320)
|
||||||
content_uri = yield upload_resource.create_content(
|
content_uri = yield upload_resource.create_content(
|
||||||
"image/png", None, identicon_bytes, len(identicon_bytes), auth_user
|
"image/png", None, identicon_bytes, len(identicon_bytes), auth_user
|
||||||
)
|
)
|
||||||
profile_handler = self.hs.get_handlers().profile_handler
|
profile_handler = self.hs.get_handlers().profile_handler
|
||||||
profile_handler.set_avatar_url(auth_user, auth_user, ("%s#auto" % content_uri))
|
profile_handler.set_avatar_url(
|
||||||
|
auth_user, auth_user, ("%s#auto" % (content_uri,))
|
||||||
|
)
|
||||||
except NotImplementedError:
|
except NotImplementedError:
|
||||||
pass # make tests pass without messing around creating default avatars
|
pass # make tests pass without messing around creating default avatars
|
||||||
|
|
||||||
defer.returnValue((user_id, token))
|
defer.returnValue((user_id, token))
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
|
|
|
@ -140,7 +140,7 @@ class Pusher(object):
|
||||||
lambda x: ('[%s%s]' % (x.group(1) and '^' or '',
|
lambda x: ('[%s%s]' % (x.group(1) and '^' or '',
|
||||||
re.sub(r'\\\-', '-', x.group(2)))), r)
|
re.sub(r'\\\-', '-', x.group(2)))), r)
|
||||||
return r
|
return r
|
||||||
|
|
||||||
def _event_fulfills_condition(self, ev, condition, display_name, room_member_count):
|
def _event_fulfills_condition(self, ev, condition, display_name, room_member_count):
|
||||||
if condition['kind'] == 'event_match':
|
if condition['kind'] == 'event_match':
|
||||||
if 'pattern' not in condition:
|
if 'pattern' not in condition:
|
||||||
|
@ -170,8 +170,10 @@ class Pusher(object):
|
||||||
return False
|
return False
|
||||||
if not display_name:
|
if not display_name:
|
||||||
return False
|
return False
|
||||||
return re.search("\b%s\b" % re.escape(display_name),
|
return re.search(
|
||||||
ev['content']['body'], flags=re.IGNORECASE) is not None
|
"\b%s\b" % re.escape(display_name), ev['content']['body'],
|
||||||
|
flags=re.IGNORECASE
|
||||||
|
) is not None
|
||||||
|
|
||||||
elif condition['kind'] == 'room_member_count':
|
elif condition['kind'] == 'room_member_count':
|
||||||
if 'is' not in condition:
|
if 'is' not in condition:
|
||||||
|
|
|
@ -1,5 +1,6 @@
|
||||||
from synapse.push.rulekinds import PRIORITY_CLASS_MAP, PRIORITY_CLASS_INVERSE_MAP
|
from synapse.push.rulekinds import PRIORITY_CLASS_MAP, PRIORITY_CLASS_INVERSE_MAP
|
||||||
|
|
||||||
|
|
||||||
def list_with_base_rules(rawrules, user_name):
|
def list_with_base_rules(rawrules, user_name):
|
||||||
ruleslist = []
|
ruleslist = []
|
||||||
|
|
||||||
|
@ -9,9 +10,9 @@ def list_with_base_rules(rawrules, user_name):
|
||||||
if r['priority_class'] < current_prio_class:
|
if r['priority_class'] < current_prio_class:
|
||||||
while r['priority_class'] < current_prio_class:
|
while r['priority_class'] < current_prio_class:
|
||||||
ruleslist.extend(make_base_rules(
|
ruleslist.extend(make_base_rules(
|
||||||
user_name,
|
user_name,
|
||||||
PRIORITY_CLASS_INVERSE_MAP[current_prio_class])
|
PRIORITY_CLASS_INVERSE_MAP[current_prio_class]
|
||||||
)
|
))
|
||||||
current_prio_class -= 1
|
current_prio_class -= 1
|
||||||
|
|
||||||
ruleslist.append(r)
|
ruleslist.append(r)
|
||||||
|
@ -19,8 +20,8 @@ def list_with_base_rules(rawrules, user_name):
|
||||||
while current_prio_class > 0:
|
while current_prio_class > 0:
|
||||||
ruleslist.extend(make_base_rules(
|
ruleslist.extend(make_base_rules(
|
||||||
user_name,
|
user_name,
|
||||||
PRIORITY_CLASS_INVERSE_MAP[current_prio_class])
|
PRIORITY_CLASS_INVERSE_MAP[current_prio_class]
|
||||||
)
|
))
|
||||||
current_prio_class -= 1
|
current_prio_class -= 1
|
||||||
|
|
||||||
return ruleslist
|
return ruleslist
|
||||||
|
|
|
@ -1,8 +1,8 @@
|
||||||
PRIORITY_CLASS_MAP = {
|
PRIORITY_CLASS_MAP = {
|
||||||
'underride': 1,
|
'underride': 1,
|
||||||
'sender': 2,
|
'sender': 2,
|
||||||
'room': 3,
|
'room': 3,
|
||||||
'content': 4,
|
'content': 4,
|
||||||
'override': 5,
|
'override': 5,
|
||||||
}
|
}
|
||||||
PRIORITY_CLASS_INVERSE_MAP = {v: k for k, v in PRIORITY_CLASS_MAP.items()}
|
PRIORITY_CLASS_INVERSE_MAP = {v: k for k, v in PRIORITY_CLASS_MAP.items()}
|
||||||
|
|
|
@ -19,10 +19,11 @@ REQUIREMENTS = {
|
||||||
"pydenticon": ["pydenticon"],
|
"pydenticon": ["pydenticon"],
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
def github_link(project, version, egg):
|
def github_link(project, version, egg):
|
||||||
return "https://github.com/%s/tarball/%s/#egg=%s" % (project, version, egg)
|
return "https://github.com/%s/tarball/%s/#egg=%s" % (project, version, egg)
|
||||||
|
|
||||||
DEPENDENCY_LINKS=[
|
DEPENDENCY_LINKS = [
|
||||||
github_link(
|
github_link(
|
||||||
project="matrix-org/syutil",
|
project="matrix-org/syutil",
|
||||||
version="v0.0.2",
|
version="v0.0.2",
|
||||||
|
@ -101,6 +102,7 @@ def check_requirements():
|
||||||
% (dependency, file_path, version, required_version)
|
% (dependency, file_path, version, required_version)
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def list_requirements():
|
def list_requirements():
|
||||||
result = []
|
result = []
|
||||||
linked = []
|
linked = []
|
||||||
|
@ -111,7 +113,7 @@ def list_requirements():
|
||||||
for requirement in REQUIREMENTS:
|
for requirement in REQUIREMENTS:
|
||||||
is_linked = False
|
is_linked = False
|
||||||
for link in linked:
|
for link in linked:
|
||||||
if requirement.replace('-','_').startswith(link):
|
if requirement.replace('-', '_').startswith(link):
|
||||||
is_linked = True
|
is_linked = True
|
||||||
if not is_linked:
|
if not is_linked:
|
||||||
result.append(requirement)
|
result.append(requirement)
|
||||||
|
|
|
@ -15,12 +15,17 @@
|
||||||
|
|
||||||
from twisted.internet import defer
|
from twisted.internet import defer
|
||||||
|
|
||||||
from synapse.api.errors import SynapseError, Codes, UnrecognizedRequestError, NotFoundError, \
|
from synapse.api.errors import (
|
||||||
StoreError
|
SynapseError, Codes, UnrecognizedRequestError, NotFoundError, StoreError
|
||||||
|
)
|
||||||
from .base import ClientV1RestServlet, client_path_pattern
|
from .base import ClientV1RestServlet, client_path_pattern
|
||||||
from synapse.storage.push_rule import InconsistentRuleException, RuleNotFoundException
|
from synapse.storage.push_rule import (
|
||||||
|
InconsistentRuleException, RuleNotFoundException
|
||||||
|
)
|
||||||
import synapse.push.baserules as baserules
|
import synapse.push.baserules as baserules
|
||||||
from synapse.push.rulekinds import PRIORITY_CLASS_MAP, PRIORITY_CLASS_INVERSE_MAP
|
from synapse.push.rulekinds import (
|
||||||
|
PRIORITY_CLASS_MAP, PRIORITY_CLASS_INVERSE_MAP
|
||||||
|
)
|
||||||
|
|
||||||
import json
|
import json
|
||||||
|
|
||||||
|
@ -105,7 +110,9 @@ class PushRuleRestServlet(ClientV1RestServlet):
|
||||||
# we build up the full structure and then decide which bits of it
|
# we build up the full structure and then decide which bits of it
|
||||||
# to send which means doing unnecessary work sometimes but is
|
# to send which means doing unnecessary work sometimes but is
|
||||||
# is probably not going to make a whole lot of difference
|
# is probably not going to make a whole lot of difference
|
||||||
rawrules = yield self.hs.get_datastore().get_push_rules_for_user_name(user.to_string())
|
rawrules = yield self.hs.get_datastore().get_push_rules_for_user_name(
|
||||||
|
user.to_string()
|
||||||
|
)
|
||||||
|
|
||||||
for r in rawrules:
|
for r in rawrules:
|
||||||
r["conditions"] = json.loads(r["conditions"])
|
r["conditions"] = json.loads(r["conditions"])
|
||||||
|
@ -383,6 +390,7 @@ def _namespaced_rule_id_from_spec(spec):
|
||||||
def _rule_id_from_namespaced(in_rule_id):
|
def _rule_id_from_namespaced(in_rule_id):
|
||||||
return in_rule_id.split('/')[-1]
|
return in_rule_id.split('/')[-1]
|
||||||
|
|
||||||
|
|
||||||
class InvalidRuleException(Exception):
|
class InvalidRuleException(Exception):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
|
@ -34,8 +34,8 @@ class PusherRestServlet(ClientV1RestServlet):
|
||||||
pusher_pool = self.hs.get_pusherpool()
|
pusher_pool = self.hs.get_pusherpool()
|
||||||
|
|
||||||
if ('pushkey' in content and 'app_id' in content
|
if ('pushkey' in content and 'app_id' in content
|
||||||
and 'kind' in content and
|
and 'kind' in content and
|
||||||
content['kind'] is None):
|
content['kind'] is None):
|
||||||
yield pusher_pool.remove_pusher(
|
yield pusher_pool.remove_pusher(
|
||||||
content['app_id'], content['pushkey']
|
content['app_id'], content['pushkey']
|
||||||
)
|
)
|
||||||
|
|
|
@ -38,9 +38,10 @@ class UploadResource(BaseMediaResource):
|
||||||
def render_OPTIONS(self, request):
|
def render_OPTIONS(self, request):
|
||||||
respond_with_json(request, 200, {}, send_cors=True)
|
respond_with_json(request, 200, {}, send_cors=True)
|
||||||
return NOT_DONE_YET
|
return NOT_DONE_YET
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
def create_content(self, media_type, upload_name, content, content_length, auth_user):
|
def create_content(self, media_type, upload_name, content, content_length,
|
||||||
|
auth_user):
|
||||||
media_id = random_string(24)
|
media_id = random_string(24)
|
||||||
|
|
||||||
fname = self.filepaths.local_media_filepath(media_id)
|
fname = self.filepaths.local_media_filepath(media_id)
|
||||||
|
@ -65,7 +66,7 @@ class UploadResource(BaseMediaResource):
|
||||||
}
|
}
|
||||||
|
|
||||||
yield self._generate_local_thumbnails(media_id, media_info)
|
yield self._generate_local_thumbnails(media_id, media_info)
|
||||||
|
|
||||||
defer.returnValue("mxc://%s/%s" % (self.server_name, media_id))
|
defer.returnValue("mxc://%s/%s" % (self.server_name, media_id))
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
|
|
|
@ -84,7 +84,7 @@ class PerformanceCounters(object):
|
||||||
|
|
||||||
def update(self, key, start_time, end_time=None):
|
def update(self, key, start_time, end_time=None):
|
||||||
if end_time is None:
|
if end_time is None:
|
||||||
end_time = time.time() * 1000;
|
end_time = time.time() * 1000
|
||||||
duration = end_time - start_time
|
duration = end_time - start_time
|
||||||
count, cum_time = self.current_counters.get(key, (0, 0))
|
count, cum_time = self.current_counters.get(key, (0, 0))
|
||||||
count += 1
|
count += 1
|
||||||
|
@ -588,7 +588,7 @@ class SQLBaseStore(object):
|
||||||
"LIMIT 1 "
|
"LIMIT 1 "
|
||||||
)
|
)
|
||||||
|
|
||||||
start_time = time.time() * 1000;
|
start_time = time.time() * 1000
|
||||||
|
|
||||||
txn.execute(sql, (event_id,))
|
txn.execute(sql, (event_id,))
|
||||||
|
|
||||||
|
@ -613,7 +613,7 @@ class SQLBaseStore(object):
|
||||||
def _get_event_from_row_txn(self, txn, internal_metadata, js, redacted,
|
def _get_event_from_row_txn(self, txn, internal_metadata, js, redacted,
|
||||||
check_redacted=True, get_prev_content=False):
|
check_redacted=True, get_prev_content=False):
|
||||||
|
|
||||||
start_time = time.time() * 1000;
|
start_time = time.time() * 1000
|
||||||
update_counter = self._get_event_counters.update
|
update_counter = self._get_event_counters.update
|
||||||
|
|
||||||
d = json.loads(js)
|
d = json.loads(js)
|
||||||
|
|
|
@ -91,7 +91,9 @@ class PushRuleStore(SQLBaseStore):
|
||||||
txn.execute(sql, (user_name, relative_to_rule))
|
txn.execute(sql, (user_name, relative_to_rule))
|
||||||
res = txn.fetchall()
|
res = txn.fetchall()
|
||||||
if not res:
|
if not res:
|
||||||
raise RuleNotFoundException("before/after rule not found: %s" % (relative_to_rule))
|
raise RuleNotFoundException(
|
||||||
|
"before/after rule not found: %s" % (relative_to_rule,)
|
||||||
|
)
|
||||||
priority_class, base_rule_priority = res[0]
|
priority_class, base_rule_priority = res[0]
|
||||||
|
|
||||||
if 'priority_class' in kwargs and kwargs['priority_class'] != priority_class:
|
if 'priority_class' in kwargs and kwargs['priority_class'] != priority_class:
|
||||||
|
|
Loading…
Reference in New Issue