Consistently use six's iteritems and wrap lazy keys/values in list() if they're not meant to be lazy (#3307)
This commit is contained in:
parent
872cf43516
commit
c936a52a9e
|
@ -15,6 +15,8 @@
|
||||||
|
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
|
from six import itervalues
|
||||||
|
|
||||||
import pymacaroons
|
import pymacaroons
|
||||||
from twisted.internet import defer
|
from twisted.internet import defer
|
||||||
|
|
||||||
|
@ -66,7 +68,7 @@ class Auth(object):
|
||||||
)
|
)
|
||||||
auth_events = yield self.store.get_events(auth_events_ids)
|
auth_events = yield self.store.get_events(auth_events_ids)
|
||||||
auth_events = {
|
auth_events = {
|
||||||
(e.type, e.state_key): e for e in auth_events.values()
|
(e.type, e.state_key): e for e in itervalues(auth_events)
|
||||||
}
|
}
|
||||||
self.check(event, auth_events=auth_events, do_sig_check=do_sig_check)
|
self.check(event, auth_events=auth_events, do_sig_check=do_sig_check)
|
||||||
|
|
||||||
|
|
|
@ -411,7 +411,7 @@ class Filter(object):
|
||||||
return room_ids
|
return room_ids
|
||||||
|
|
||||||
def filter(self, events):
|
def filter(self, events):
|
||||||
return filter(self.check, events)
|
return list(filter(self.check, events))
|
||||||
|
|
||||||
def limit(self):
|
def limit(self):
|
||||||
return self.filter_json.get("limit", 10)
|
return self.filter_json.get("limit", 10)
|
||||||
|
|
|
@ -471,14 +471,14 @@ def _check_power_levels(event, auth_events):
|
||||||
]
|
]
|
||||||
|
|
||||||
old_list = current_state.content.get("users", {})
|
old_list = current_state.content.get("users", {})
|
||||||
for user in set(old_list.keys() + user_list.keys()):
|
for user in set(list(old_list) + list(user_list)):
|
||||||
levels_to_check.append(
|
levels_to_check.append(
|
||||||
(user, "users")
|
(user, "users")
|
||||||
)
|
)
|
||||||
|
|
||||||
old_list = current_state.content.get("events", {})
|
old_list = current_state.content.get("events", {})
|
||||||
new_list = event.content.get("events", {})
|
new_list = event.content.get("events", {})
|
||||||
for ev_id in set(old_list.keys() + new_list.keys()):
|
for ev_id in set(list(old_list) + list(new_list)):
|
||||||
levels_to_check.append(
|
levels_to_check.append(
|
||||||
(ev_id, "events")
|
(ev_id, "events")
|
||||||
)
|
)
|
||||||
|
|
|
@ -146,7 +146,7 @@ class EventBase(object):
|
||||||
return field in self._event_dict
|
return field in self._event_dict
|
||||||
|
|
||||||
def items(self):
|
def items(self):
|
||||||
return self._event_dict.items()
|
return list(self._event_dict.items())
|
||||||
|
|
||||||
|
|
||||||
class FrozenEvent(EventBase):
|
class FrozenEvent(EventBase):
|
||||||
|
|
|
@ -391,7 +391,7 @@ class FederationClient(FederationBase):
|
||||||
"""
|
"""
|
||||||
if return_local:
|
if return_local:
|
||||||
seen_events = yield self.store.get_events(event_ids, allow_rejected=True)
|
seen_events = yield self.store.get_events(event_ids, allow_rejected=True)
|
||||||
signed_events = seen_events.values()
|
signed_events = list(seen_events.values())
|
||||||
else:
|
else:
|
||||||
seen_events = yield self.store.have_seen_events(event_ids)
|
seen_events = yield self.store.have_seen_events(event_ids)
|
||||||
signed_events = []
|
signed_events = []
|
||||||
|
@ -589,7 +589,7 @@ class FederationClient(FederationBase):
|
||||||
}
|
}
|
||||||
|
|
||||||
valid_pdus = yield self._check_sigs_and_hash_and_fetch(
|
valid_pdus = yield self._check_sigs_and_hash_and_fetch(
|
||||||
destination, pdus.values(),
|
destination, list(pdus.values()),
|
||||||
outlier=True,
|
outlier=True,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
|
@ -197,7 +197,7 @@ class FederationRemoteSendQueue(object):
|
||||||
|
|
||||||
# We only want to send presence for our own users, so lets always just
|
# We only want to send presence for our own users, so lets always just
|
||||||
# filter here just in case.
|
# filter here just in case.
|
||||||
local_states = filter(lambda s: self.is_mine_id(s.user_id), states)
|
local_states = list(filter(lambda s: self.is_mine_id(s.user_id), states))
|
||||||
|
|
||||||
self.presence_map.update({state.user_id: state for state in local_states})
|
self.presence_map.update({state.user_id: state for state in local_states})
|
||||||
self.presence_changed[pos] = [state.user_id for state in local_states]
|
self.presence_changed[pos] = [state.user_id for state in local_states]
|
||||||
|
|
|
@ -35,6 +35,8 @@ from synapse.metrics import (
|
||||||
|
|
||||||
from prometheus_client import Counter
|
from prometheus_client import Counter
|
||||||
|
|
||||||
|
from six import itervalues
|
||||||
|
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
|
|
||||||
|
@ -234,7 +236,7 @@ class TransactionQueue(object):
|
||||||
yield logcontext.make_deferred_yieldable(defer.gatherResults(
|
yield logcontext.make_deferred_yieldable(defer.gatherResults(
|
||||||
[
|
[
|
||||||
logcontext.run_in_background(handle_room_events, evs)
|
logcontext.run_in_background(handle_room_events, evs)
|
||||||
for evs in events_by_room.itervalues()
|
for evs in itervalues(events_by_room)
|
||||||
],
|
],
|
||||||
consumeErrors=True
|
consumeErrors=True
|
||||||
))
|
))
|
||||||
|
@ -325,7 +327,7 @@ class TransactionQueue(object):
|
||||||
if not states_map:
|
if not states_map:
|
||||||
break
|
break
|
||||||
|
|
||||||
yield self._process_presence_inner(states_map.values())
|
yield self._process_presence_inner(list(states_map.values()))
|
||||||
except Exception:
|
except Exception:
|
||||||
logger.exception("Error sending presence states to servers")
|
logger.exception("Error sending presence states to servers")
|
||||||
finally:
|
finally:
|
||||||
|
|
|
@ -114,14 +114,14 @@ class BaseHandler(object):
|
||||||
if guest_access != "can_join":
|
if guest_access != "can_join":
|
||||||
if context:
|
if context:
|
||||||
current_state = yield self.store.get_events(
|
current_state = yield self.store.get_events(
|
||||||
context.current_state_ids.values()
|
list(context.current_state_ids.values())
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
current_state = yield self.state_handler.get_current_state(
|
current_state = yield self.state_handler.get_current_state(
|
||||||
event.room_id
|
event.room_id
|
||||||
)
|
)
|
||||||
|
|
||||||
current_state = current_state.values()
|
current_state = list(current_state.values())
|
||||||
|
|
||||||
logger.info("maybe_kick_guest_users %r", current_state)
|
logger.info("maybe_kick_guest_users %r", current_state)
|
||||||
yield self.kick_guest_users(current_state)
|
yield self.kick_guest_users(current_state)
|
||||||
|
|
|
@ -15,6 +15,8 @@
|
||||||
|
|
||||||
from twisted.internet import defer
|
from twisted.internet import defer
|
||||||
|
|
||||||
|
from six import itervalues
|
||||||
|
|
||||||
import synapse
|
import synapse
|
||||||
from synapse.api.constants import EventTypes
|
from synapse.api.constants import EventTypes
|
||||||
from synapse.util.metrics import Measure
|
from synapse.util.metrics import Measure
|
||||||
|
@ -119,7 +121,7 @@ class ApplicationServicesHandler(object):
|
||||||
|
|
||||||
yield make_deferred_yieldable(defer.gatherResults([
|
yield make_deferred_yieldable(defer.gatherResults([
|
||||||
run_in_background(handle_room_events, evs)
|
run_in_background(handle_room_events, evs)
|
||||||
for evs in events_by_room.itervalues()
|
for evs in itervalues(events_by_room)
|
||||||
], consumeErrors=True))
|
], consumeErrors=True))
|
||||||
|
|
||||||
yield self.store.set_appservice_last_pos(upper_bound)
|
yield self.store.set_appservice_last_pos(upper_bound)
|
||||||
|
|
|
@ -249,7 +249,7 @@ class AuthHandler(BaseHandler):
|
||||||
errordict = e.error_dict()
|
errordict = e.error_dict()
|
||||||
|
|
||||||
for f in flows:
|
for f in flows:
|
||||||
if len(set(f) - set(creds.keys())) == 0:
|
if len(set(f) - set(creds)) == 0:
|
||||||
# it's very useful to know what args are stored, but this can
|
# it's very useful to know what args are stored, but this can
|
||||||
# include the password in the case of registering, so only log
|
# include the password in the case of registering, so only log
|
||||||
# the keys (confusingly, clientdict may contain a password
|
# the keys (confusingly, clientdict may contain a password
|
||||||
|
@ -257,12 +257,12 @@ class AuthHandler(BaseHandler):
|
||||||
# and is not sensitive).
|
# and is not sensitive).
|
||||||
logger.info(
|
logger.info(
|
||||||
"Auth completed with creds: %r. Client dict has keys: %r",
|
"Auth completed with creds: %r. Client dict has keys: %r",
|
||||||
creds, clientdict.keys()
|
creds, list(clientdict)
|
||||||
)
|
)
|
||||||
defer.returnValue((creds, clientdict, session['id']))
|
defer.returnValue((creds, clientdict, session['id']))
|
||||||
|
|
||||||
ret = self._auth_dict_for_flows(flows, session)
|
ret = self._auth_dict_for_flows(flows, session)
|
||||||
ret['completed'] = creds.keys()
|
ret['completed'] = list(creds)
|
||||||
ret.update(errordict)
|
ret.update(errordict)
|
||||||
raise InteractiveAuthIncompleteError(
|
raise InteractiveAuthIncompleteError(
|
||||||
ret,
|
ret,
|
||||||
|
|
|
@ -114,7 +114,7 @@ class DeviceHandler(BaseHandler):
|
||||||
user_id, device_id=None
|
user_id, device_id=None
|
||||||
)
|
)
|
||||||
|
|
||||||
devices = device_map.values()
|
devices = list(device_map.values())
|
||||||
for device in devices:
|
for device in devices:
|
||||||
_update_device_from_client_ips(device, ips)
|
_update_device_from_client_ips(device, ips)
|
||||||
|
|
||||||
|
@ -187,7 +187,7 @@ class DeviceHandler(BaseHandler):
|
||||||
defer.Deferred:
|
defer.Deferred:
|
||||||
"""
|
"""
|
||||||
device_map = yield self.store.get_devices_by_user(user_id)
|
device_map = yield self.store.get_devices_by_user(user_id)
|
||||||
device_ids = device_map.keys()
|
device_ids = list(device_map)
|
||||||
if except_device_id is not None:
|
if except_device_id is not None:
|
||||||
device_ids = [d for d in device_ids if d != except_device_id]
|
device_ids = [d for d in device_ids if d != except_device_id]
|
||||||
yield self.delete_devices(user_id, device_ids)
|
yield self.delete_devices(user_id, device_ids)
|
||||||
|
|
|
@ -52,7 +52,6 @@ from synapse.util.retryutils import NotRetryingDestination
|
||||||
|
|
||||||
from synapse.util.distributor import user_joined_room
|
from synapse.util.distributor import user_joined_room
|
||||||
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
@ -480,8 +479,8 @@ class FederationHandler(BaseHandler):
|
||||||
# to get all state ids that we're interested in.
|
# to get all state ids that we're interested in.
|
||||||
event_map = yield self.store.get_events([
|
event_map = yield self.store.get_events([
|
||||||
e_id
|
e_id
|
||||||
for key_to_eid in event_to_state_ids.itervalues()
|
for key_to_eid in list(event_to_state_ids.values())
|
||||||
for key, e_id in key_to_eid.iteritems()
|
for key, e_id in key_to_eid.items()
|
||||||
if key[0] != EventTypes.Member or check_match(key[1])
|
if key[0] != EventTypes.Member or check_match(key[1])
|
||||||
])
|
])
|
||||||
|
|
||||||
|
@ -1149,13 +1148,13 @@ class FederationHandler(BaseHandler):
|
||||||
user = UserID.from_string(event.state_key)
|
user = UserID.from_string(event.state_key)
|
||||||
yield user_joined_room(self.distributor, user, event.room_id)
|
yield user_joined_room(self.distributor, user, event.room_id)
|
||||||
|
|
||||||
state_ids = context.prev_state_ids.values()
|
state_ids = list(context.prev_state_ids.values())
|
||||||
auth_chain = yield self.store.get_auth_chain(state_ids)
|
auth_chain = yield self.store.get_auth_chain(state_ids)
|
||||||
|
|
||||||
state = yield self.store.get_events(context.prev_state_ids.values())
|
state = yield self.store.get_events(list(context.prev_state_ids.values()))
|
||||||
|
|
||||||
defer.returnValue({
|
defer.returnValue({
|
||||||
"state": state.values(),
|
"state": list(state.values()),
|
||||||
"auth_chain": auth_chain,
|
"auth_chain": auth_chain,
|
||||||
})
|
})
|
||||||
|
|
||||||
|
@ -1405,7 +1404,7 @@ class FederationHandler(BaseHandler):
|
||||||
else:
|
else:
|
||||||
del results[(event.type, event.state_key)]
|
del results[(event.type, event.state_key)]
|
||||||
|
|
||||||
res = results.values()
|
res = list(results.values())
|
||||||
for event in res:
|
for event in res:
|
||||||
# We sign these again because there was a bug where we
|
# We sign these again because there was a bug where we
|
||||||
# incorrectly signed things the first time round
|
# incorrectly signed things the first time round
|
||||||
|
@ -1446,7 +1445,7 @@ class FederationHandler(BaseHandler):
|
||||||
else:
|
else:
|
||||||
results.pop((event.type, event.state_key), None)
|
results.pop((event.type, event.state_key), None)
|
||||||
|
|
||||||
defer.returnValue(results.values())
|
defer.returnValue(list(results.values()))
|
||||||
else:
|
else:
|
||||||
defer.returnValue([])
|
defer.returnValue([])
|
||||||
|
|
||||||
|
@ -1915,7 +1914,7 @@ class FederationHandler(BaseHandler):
|
||||||
})
|
})
|
||||||
|
|
||||||
new_state = self.state_handler.resolve_events(
|
new_state = self.state_handler.resolve_events(
|
||||||
[local_view.values(), remote_view.values()],
|
[list(local_view.values()), list(remote_view.values())],
|
||||||
event
|
event
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
|
@ -325,7 +325,7 @@ class PresenceHandler(object):
|
||||||
|
|
||||||
if to_notify:
|
if to_notify:
|
||||||
notified_presence_counter.inc(len(to_notify))
|
notified_presence_counter.inc(len(to_notify))
|
||||||
yield self._persist_and_notify(to_notify.values())
|
yield self._persist_and_notify(list(to_notify.values()))
|
||||||
|
|
||||||
self.unpersisted_users_changes |= set(s.user_id for s in new_states)
|
self.unpersisted_users_changes |= set(s.user_id for s in new_states)
|
||||||
self.unpersisted_users_changes -= set(to_notify.keys())
|
self.unpersisted_users_changes -= set(to_notify.keys())
|
||||||
|
@ -687,7 +687,7 @@ class PresenceHandler(object):
|
||||||
"""
|
"""
|
||||||
|
|
||||||
updates = yield self.current_state_for_users(target_user_ids)
|
updates = yield self.current_state_for_users(target_user_ids)
|
||||||
updates = updates.values()
|
updates = list(updates.values())
|
||||||
|
|
||||||
for user_id in set(target_user_ids) - set(u.user_id for u in updates):
|
for user_id in set(target_user_ids) - set(u.user_id for u in updates):
|
||||||
updates.append(UserPresenceState.default(user_id))
|
updates.append(UserPresenceState.default(user_id))
|
||||||
|
@ -753,11 +753,11 @@ class PresenceHandler(object):
|
||||||
self._push_to_remotes([state])
|
self._push_to_remotes([state])
|
||||||
else:
|
else:
|
||||||
user_ids = yield self.store.get_users_in_room(room_id)
|
user_ids = yield self.store.get_users_in_room(room_id)
|
||||||
user_ids = filter(self.is_mine_id, user_ids)
|
user_ids = list(filter(self.is_mine_id, user_ids))
|
||||||
|
|
||||||
states = yield self.current_state_for_users(user_ids)
|
states = yield self.current_state_for_users(user_ids)
|
||||||
|
|
||||||
self._push_to_remotes(states.values())
|
self._push_to_remotes(list(states.values()))
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
def get_presence_list(self, observer_user, accepted=None):
|
def get_presence_list(self, observer_user, accepted=None):
|
||||||
|
@ -1051,7 +1051,7 @@ class PresenceEventSource(object):
|
||||||
updates = yield presence.current_state_for_users(user_ids_changed)
|
updates = yield presence.current_state_for_users(user_ids_changed)
|
||||||
|
|
||||||
if include_offline:
|
if include_offline:
|
||||||
defer.returnValue((updates.values(), max_token))
|
defer.returnValue((list(updates.values()), max_token))
|
||||||
else:
|
else:
|
||||||
defer.returnValue(([
|
defer.returnValue(([
|
||||||
s for s in itervalues(updates)
|
s for s in itervalues(updates)
|
||||||
|
@ -1112,7 +1112,7 @@ def handle_timeouts(user_states, is_mine_fn, syncing_user_ids, now):
|
||||||
if new_state:
|
if new_state:
|
||||||
changes[state.user_id] = new_state
|
changes[state.user_id] = new_state
|
||||||
|
|
||||||
return changes.values()
|
return list(changes.values())
|
||||||
|
|
||||||
|
|
||||||
def handle_timeout(state, is_mine, syncing_user_ids, now):
|
def handle_timeout(state, is_mine, syncing_user_ids, now):
|
||||||
|
|
|
@ -455,7 +455,7 @@ class RoomContextHandler(BaseHandler):
|
||||||
state = yield self.store.get_state_for_events(
|
state = yield self.store.get_state_for_events(
|
||||||
[last_event_id], None
|
[last_event_id], None
|
||||||
)
|
)
|
||||||
results["state"] = state[last_event_id].values()
|
results["state"] = list(state[last_event_id].values())
|
||||||
|
|
||||||
results["start"] = now_token.copy_and_replace(
|
results["start"] = now_token.copy_and_replace(
|
||||||
"room_key", results["start"]
|
"room_key", results["start"]
|
||||||
|
|
|
@ -15,6 +15,7 @@
|
||||||
|
|
||||||
from twisted.internet import defer
|
from twisted.internet import defer
|
||||||
|
|
||||||
|
from six import iteritems
|
||||||
from six.moves import range
|
from six.moves import range
|
||||||
|
|
||||||
from ._base import BaseHandler
|
from ._base import BaseHandler
|
||||||
|
@ -307,7 +308,7 @@ class RoomListHandler(BaseHandler):
|
||||||
)
|
)
|
||||||
|
|
||||||
event_map = yield self.store.get_events([
|
event_map = yield self.store.get_events([
|
||||||
event_id for key, event_id in current_state_ids.iteritems()
|
event_id for key, event_id in iteritems(current_state_ids)
|
||||||
if key[0] in (
|
if key[0] in (
|
||||||
EventTypes.JoinRules,
|
EventTypes.JoinRules,
|
||||||
EventTypes.Name,
|
EventTypes.Name,
|
||||||
|
|
|
@ -348,7 +348,7 @@ class SearchHandler(BaseHandler):
|
||||||
rooms = set(e.room_id for e in allowed_events)
|
rooms = set(e.room_id for e in allowed_events)
|
||||||
for room_id in rooms:
|
for room_id in rooms:
|
||||||
state = yield self.state_handler.get_current_state(room_id)
|
state = yield self.state_handler.get_current_state(room_id)
|
||||||
state_results[room_id] = state.values()
|
state_results[room_id] = list(state.values())
|
||||||
|
|
||||||
state_results.values()
|
state_results.values()
|
||||||
|
|
||||||
|
|
|
@ -541,11 +541,11 @@ class SyncHandler(object):
|
||||||
|
|
||||||
state = {}
|
state = {}
|
||||||
if state_ids:
|
if state_ids:
|
||||||
state = yield self.store.get_events(state_ids.values())
|
state = yield self.store.get_events(list(state_ids.values()))
|
||||||
|
|
||||||
defer.returnValue({
|
defer.returnValue({
|
||||||
(e.type, e.state_key): e
|
(e.type, e.state_key): e
|
||||||
for e in sync_config.filter_collection.filter_room_state(state.values())
|
for e in sync_config.filter_collection.filter_room_state(list(state.values()))
|
||||||
})
|
})
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
|
@ -894,7 +894,7 @@ class SyncHandler(object):
|
||||||
presence.extend(states)
|
presence.extend(states)
|
||||||
|
|
||||||
# Deduplicate the presence entries so that there's at most one per user
|
# Deduplicate the presence entries so that there's at most one per user
|
||||||
presence = {p.user_id: p for p in presence}.values()
|
presence = list({p.user_id: p for p in presence}.values())
|
||||||
|
|
||||||
presence = sync_config.filter_collection.filter_presence(
|
presence = sync_config.filter_collection.filter_presence(
|
||||||
presence
|
presence
|
||||||
|
|
|
@ -39,7 +39,7 @@ def list_with_base_rules(rawrules):
|
||||||
rawrules = [r for r in rawrules if r['priority_class'] >= 0]
|
rawrules = [r for r in rawrules if r['priority_class'] >= 0]
|
||||||
|
|
||||||
# shove the server default rules for each kind onto the end of each
|
# shove the server default rules for each kind onto the end of each
|
||||||
current_prio_class = PRIORITY_CLASS_INVERSE_MAP.keys()[-1]
|
current_prio_class = list(PRIORITY_CLASS_INVERSE_MAP)[-1]
|
||||||
|
|
||||||
ruleslist.extend(make_base_prepend_rules(
|
ruleslist.extend(make_base_prepend_rules(
|
||||||
PRIORITY_CLASS_INVERSE_MAP[current_prio_class], modified_base_rules
|
PRIORITY_CLASS_INVERSE_MAP[current_prio_class], modified_base_rules
|
||||||
|
|
|
@ -229,7 +229,8 @@ class Mailer(object):
|
||||||
if room_vars['notifs'] and 'messages' in room_vars['notifs'][-1]:
|
if room_vars['notifs'] and 'messages' in room_vars['notifs'][-1]:
|
||||||
prev_messages = room_vars['notifs'][-1]['messages']
|
prev_messages = room_vars['notifs'][-1]['messages']
|
||||||
for message in notifvars['messages']:
|
for message in notifvars['messages']:
|
||||||
pm = filter(lambda pm: pm['id'] == message['id'], prev_messages)
|
pm = list(filter(lambda pm: pm['id'] == message['id'],
|
||||||
|
prev_messages))
|
||||||
if pm:
|
if pm:
|
||||||
if not message["is_historical"]:
|
if not message["is_historical"]:
|
||||||
pm[0]["is_historical"] = False
|
pm[0]["is_historical"] = False
|
||||||
|
|
|
@ -113,7 +113,7 @@ def calculate_room_name(store, room_state_ids, user_id, fallback_to_members=True
|
||||||
# so find out who is in the room that isn't the user.
|
# so find out who is in the room that isn't the user.
|
||||||
if "m.room.member" in room_state_bytype_ids:
|
if "m.room.member" in room_state_bytype_ids:
|
||||||
member_events = yield store.get_events(
|
member_events = yield store.get_events(
|
||||||
room_state_bytype_ids["m.room.member"].values()
|
list(room_state_bytype_ids["m.room.member"].values())
|
||||||
)
|
)
|
||||||
all_members = [
|
all_members = [
|
||||||
ev for ev in member_events.values()
|
ev for ev in member_events.values()
|
||||||
|
|
|
@ -104,7 +104,7 @@ class HttpTransactionCache(object):
|
||||||
|
|
||||||
def _cleanup(self):
|
def _cleanup(self):
|
||||||
now = self.clock.time_msec()
|
now = self.clock.time_msec()
|
||||||
for key in self.transactions.keys():
|
for key in list(self.transactions):
|
||||||
ts = self.transactions[key][1]
|
ts = self.transactions[key][1]
|
||||||
if now > (ts + CLEANUP_PERIOD_MS): # after cleanup period
|
if now > (ts + CLEANUP_PERIOD_MS): # after cleanup period
|
||||||
del self.transactions[key]
|
del self.transactions[key]
|
||||||
|
|
|
@ -132,7 +132,8 @@ class StateHandler(object):
|
||||||
defer.returnValue(event)
|
defer.returnValue(event)
|
||||||
return
|
return
|
||||||
|
|
||||||
state_map = yield self.store.get_events(state.values(), get_prev_content=False)
|
state_map = yield self.store.get_events(list(state.values()),
|
||||||
|
get_prev_content=False)
|
||||||
state = {
|
state = {
|
||||||
key: state_map[e_id] for key, e_id in iteritems(state) if e_id in state_map
|
key: state_map[e_id] for key, e_id in iteritems(state) if e_id in state_map
|
||||||
}
|
}
|
||||||
|
|
|
@ -40,6 +40,9 @@ import synapse.metrics
|
||||||
from synapse.events import EventBase # noqa: F401
|
from synapse.events import EventBase # noqa: F401
|
||||||
from synapse.events.snapshot import EventContext # noqa: F401
|
from synapse.events.snapshot import EventContext # noqa: F401
|
||||||
|
|
||||||
|
from six.moves import range
|
||||||
|
from six import itervalues, iteritems
|
||||||
|
|
||||||
from prometheus_client import Counter
|
from prometheus_client import Counter
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
@ -245,7 +248,7 @@ class EventsStore(EventsWorkerStore):
|
||||||
partitioned.setdefault(event.room_id, []).append((event, ctx))
|
partitioned.setdefault(event.room_id, []).append((event, ctx))
|
||||||
|
|
||||||
deferreds = []
|
deferreds = []
|
||||||
for room_id, evs_ctxs in partitioned.iteritems():
|
for room_id, evs_ctxs in iteritems(partitioned):
|
||||||
d = self._event_persist_queue.add_to_queue(
|
d = self._event_persist_queue.add_to_queue(
|
||||||
room_id, evs_ctxs,
|
room_id, evs_ctxs,
|
||||||
backfilled=backfilled,
|
backfilled=backfilled,
|
||||||
|
@ -330,7 +333,7 @@ class EventsStore(EventsWorkerStore):
|
||||||
|
|
||||||
chunks = [
|
chunks = [
|
||||||
events_and_contexts[x:x + 100]
|
events_and_contexts[x:x + 100]
|
||||||
for x in xrange(0, len(events_and_contexts), 100)
|
for x in range(0, len(events_and_contexts), 100)
|
||||||
]
|
]
|
||||||
|
|
||||||
for chunk in chunks:
|
for chunk in chunks:
|
||||||
|
@ -364,7 +367,7 @@ class EventsStore(EventsWorkerStore):
|
||||||
(event, context)
|
(event, context)
|
||||||
)
|
)
|
||||||
|
|
||||||
for room_id, ev_ctx_rm in events_by_room.iteritems():
|
for room_id, ev_ctx_rm in iteritems(events_by_room):
|
||||||
# Work out new extremities by recursively adding and removing
|
# Work out new extremities by recursively adding and removing
|
||||||
# the new events.
|
# the new events.
|
||||||
latest_event_ids = yield self.get_latest_event_ids_in_room(
|
latest_event_ids = yield self.get_latest_event_ids_in_room(
|
||||||
|
@ -459,12 +462,12 @@ class EventsStore(EventsWorkerStore):
|
||||||
|
|
||||||
event_counter.labels(event.type, origin_type, origin_entity).inc()
|
event_counter.labels(event.type, origin_type, origin_entity).inc()
|
||||||
|
|
||||||
for room_id, new_state in current_state_for_room.iteritems():
|
for room_id, new_state in iteritems(current_state_for_room):
|
||||||
self.get_current_state_ids.prefill(
|
self.get_current_state_ids.prefill(
|
||||||
(room_id, ), new_state
|
(room_id, ), new_state
|
||||||
)
|
)
|
||||||
|
|
||||||
for room_id, latest_event_ids in new_forward_extremeties.iteritems():
|
for room_id, latest_event_ids in iteritems(new_forward_extremeties):
|
||||||
self.get_latest_event_ids_in_room.prefill(
|
self.get_latest_event_ids_in_room.prefill(
|
||||||
(room_id,), list(latest_event_ids)
|
(room_id,), list(latest_event_ids)
|
||||||
)
|
)
|
||||||
|
@ -641,20 +644,20 @@ class EventsStore(EventsWorkerStore):
|
||||||
"""
|
"""
|
||||||
existing_state = yield self.get_current_state_ids(room_id)
|
existing_state = yield self.get_current_state_ids(room_id)
|
||||||
|
|
||||||
existing_events = set(existing_state.itervalues())
|
existing_events = set(itervalues(existing_state))
|
||||||
new_events = set(ev_id for ev_id in current_state.itervalues())
|
new_events = set(ev_id for ev_id in itervalues(current_state))
|
||||||
changed_events = existing_events ^ new_events
|
changed_events = existing_events ^ new_events
|
||||||
|
|
||||||
if not changed_events:
|
if not changed_events:
|
||||||
return
|
return
|
||||||
|
|
||||||
to_delete = {
|
to_delete = {
|
||||||
key: ev_id for key, ev_id in existing_state.iteritems()
|
key: ev_id for key, ev_id in iteritems(existing_state)
|
||||||
if ev_id in changed_events
|
if ev_id in changed_events
|
||||||
}
|
}
|
||||||
events_to_insert = (new_events - existing_events)
|
events_to_insert = (new_events - existing_events)
|
||||||
to_insert = {
|
to_insert = {
|
||||||
key: ev_id for key, ev_id in current_state.iteritems()
|
key: ev_id for key, ev_id in iteritems(current_state)
|
||||||
if ev_id in events_to_insert
|
if ev_id in events_to_insert
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -757,11 +760,11 @@ class EventsStore(EventsWorkerStore):
|
||||||
)
|
)
|
||||||
|
|
||||||
def _update_current_state_txn(self, txn, state_delta_by_room, max_stream_order):
|
def _update_current_state_txn(self, txn, state_delta_by_room, max_stream_order):
|
||||||
for room_id, current_state_tuple in state_delta_by_room.iteritems():
|
for room_id, current_state_tuple in iteritems(state_delta_by_room):
|
||||||
to_delete, to_insert = current_state_tuple
|
to_delete, to_insert = current_state_tuple
|
||||||
txn.executemany(
|
txn.executemany(
|
||||||
"DELETE FROM current_state_events WHERE event_id = ?",
|
"DELETE FROM current_state_events WHERE event_id = ?",
|
||||||
[(ev_id,) for ev_id in to_delete.itervalues()],
|
[(ev_id,) for ev_id in itervalues(to_delete)],
|
||||||
)
|
)
|
||||||
|
|
||||||
self._simple_insert_many_txn(
|
self._simple_insert_many_txn(
|
||||||
|
@ -774,7 +777,7 @@ class EventsStore(EventsWorkerStore):
|
||||||
"type": key[0],
|
"type": key[0],
|
||||||
"state_key": key[1],
|
"state_key": key[1],
|
||||||
}
|
}
|
||||||
for key, ev_id in to_insert.iteritems()
|
for key, ev_id in iteritems(to_insert)
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -793,7 +796,7 @@ class EventsStore(EventsWorkerStore):
|
||||||
"event_id": ev_id,
|
"event_id": ev_id,
|
||||||
"prev_event_id": to_delete.get(key, None),
|
"prev_event_id": to_delete.get(key, None),
|
||||||
}
|
}
|
||||||
for key, ev_id in state_deltas.iteritems()
|
for key, ev_id in iteritems(state_deltas)
|
||||||
]
|
]
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -836,7 +839,7 @@ class EventsStore(EventsWorkerStore):
|
||||||
|
|
||||||
def _update_forward_extremities_txn(self, txn, new_forward_extremities,
|
def _update_forward_extremities_txn(self, txn, new_forward_extremities,
|
||||||
max_stream_order):
|
max_stream_order):
|
||||||
for room_id, new_extrem in new_forward_extremities.iteritems():
|
for room_id, new_extrem in iteritems(new_forward_extremities):
|
||||||
self._simple_delete_txn(
|
self._simple_delete_txn(
|
||||||
txn,
|
txn,
|
||||||
table="event_forward_extremities",
|
table="event_forward_extremities",
|
||||||
|
@ -854,7 +857,7 @@ class EventsStore(EventsWorkerStore):
|
||||||
"event_id": ev_id,
|
"event_id": ev_id,
|
||||||
"room_id": room_id,
|
"room_id": room_id,
|
||||||
}
|
}
|
||||||
for room_id, new_extrem in new_forward_extremities.iteritems()
|
for room_id, new_extrem in iteritems(new_forward_extremities)
|
||||||
for ev_id in new_extrem
|
for ev_id in new_extrem
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
|
@ -871,7 +874,7 @@ class EventsStore(EventsWorkerStore):
|
||||||
"event_id": event_id,
|
"event_id": event_id,
|
||||||
"stream_ordering": max_stream_order,
|
"stream_ordering": max_stream_order,
|
||||||
}
|
}
|
||||||
for room_id, new_extrem in new_forward_extremities.iteritems()
|
for room_id, new_extrem in iteritems(new_forward_extremities)
|
||||||
for event_id in new_extrem
|
for event_id in new_extrem
|
||||||
]
|
]
|
||||||
)
|
)
|
||||||
|
@ -899,7 +902,7 @@ class EventsStore(EventsWorkerStore):
|
||||||
new_events_and_contexts[event.event_id] = (event, context)
|
new_events_and_contexts[event.event_id] = (event, context)
|
||||||
else:
|
else:
|
||||||
new_events_and_contexts[event.event_id] = (event, context)
|
new_events_and_contexts[event.event_id] = (event, context)
|
||||||
return new_events_and_contexts.values()
|
return list(new_events_and_contexts.values())
|
||||||
|
|
||||||
def _update_room_depths_txn(self, txn, events_and_contexts, backfilled):
|
def _update_room_depths_txn(self, txn, events_and_contexts, backfilled):
|
||||||
"""Update min_depth for each room
|
"""Update min_depth for each room
|
||||||
|
@ -925,7 +928,7 @@ class EventsStore(EventsWorkerStore):
|
||||||
event.depth, depth_updates.get(event.room_id, event.depth)
|
event.depth, depth_updates.get(event.room_id, event.depth)
|
||||||
)
|
)
|
||||||
|
|
||||||
for room_id, depth in depth_updates.iteritems():
|
for room_id, depth in iteritems(depth_updates):
|
||||||
self._update_min_depth_for_room_txn(txn, room_id, depth)
|
self._update_min_depth_for_room_txn(txn, room_id, depth)
|
||||||
|
|
||||||
def _update_outliers_txn(self, txn, events_and_contexts):
|
def _update_outliers_txn(self, txn, events_and_contexts):
|
||||||
|
@ -1309,7 +1312,7 @@ class EventsStore(EventsWorkerStore):
|
||||||
" WHERE e.event_id IN (%s)"
|
" WHERE e.event_id IN (%s)"
|
||||||
) % (",".join(["?"] * len(ev_map)),)
|
) % (",".join(["?"] * len(ev_map)),)
|
||||||
|
|
||||||
txn.execute(sql, ev_map.keys())
|
txn.execute(sql, list(ev_map))
|
||||||
rows = self.cursor_to_dict(txn)
|
rows = self.cursor_to_dict(txn)
|
||||||
for row in rows:
|
for row in rows:
|
||||||
event = ev_map[row["event_id"]]
|
event = ev_map[row["event_id"]]
|
||||||
|
@ -1572,7 +1575,7 @@ class EventsStore(EventsWorkerStore):
|
||||||
|
|
||||||
chunks = [
|
chunks = [
|
||||||
event_ids[i:i + 100]
|
event_ids[i:i + 100]
|
||||||
for i in xrange(0, len(event_ids), 100)
|
for i in range(0, len(event_ids), 100)
|
||||||
]
|
]
|
||||||
for chunk in chunks:
|
for chunk in chunks:
|
||||||
ev_rows = self._simple_select_many_txn(
|
ev_rows = self._simple_select_many_txn(
|
||||||
|
@ -1986,7 +1989,7 @@ class EventsStore(EventsWorkerStore):
|
||||||
logger.info("[purge] finding state groups which depend on redundant"
|
logger.info("[purge] finding state groups which depend on redundant"
|
||||||
" state groups")
|
" state groups")
|
||||||
remaining_state_groups = []
|
remaining_state_groups = []
|
||||||
for i in xrange(0, len(state_rows), 100):
|
for i in range(0, len(state_rows), 100):
|
||||||
chunk = [sg for sg, in state_rows[i:i + 100]]
|
chunk = [sg for sg, in state_rows[i:i + 100]]
|
||||||
# look for state groups whose prev_state_group is one we are about
|
# look for state groups whose prev_state_group is one we are about
|
||||||
# to delete
|
# to delete
|
||||||
|
@ -2042,7 +2045,7 @@ class EventsStore(EventsWorkerStore):
|
||||||
"state_key": key[1],
|
"state_key": key[1],
|
||||||
"event_id": state_id,
|
"event_id": state_id,
|
||||||
}
|
}
|
||||||
for key, state_id in curr_state.iteritems()
|
for key, state_id in iteritems(curr_state)
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
|
@ -16,6 +16,7 @@
|
||||||
from ._base import SQLBaseStore
|
from ._base import SQLBaseStore
|
||||||
from synapse.api.constants import PresenceState
|
from synapse.api.constants import PresenceState
|
||||||
from synapse.util.caches.descriptors import cached, cachedInlineCallbacks, cachedList
|
from synapse.util.caches.descriptors import cached, cachedInlineCallbacks, cachedList
|
||||||
|
from synapse.util import batch_iter
|
||||||
|
|
||||||
from collections import namedtuple
|
from collections import namedtuple
|
||||||
from twisted.internet import defer
|
from twisted.internet import defer
|
||||||
|
@ -115,11 +116,7 @@ class PresenceStore(SQLBaseStore):
|
||||||
" AND user_id IN (%s)"
|
" AND user_id IN (%s)"
|
||||||
)
|
)
|
||||||
|
|
||||||
batches = (
|
for states in batch_iter(presence_states, 50):
|
||||||
presence_states[i:i + 50]
|
|
||||||
for i in xrange(0, len(presence_states), 50)
|
|
||||||
)
|
|
||||||
for states in batches:
|
|
||||||
args = [stream_id]
|
args = [stream_id]
|
||||||
args.extend(s.user_id for s in states)
|
args.extend(s.user_id for s in states)
|
||||||
txn.execute(
|
txn.execute(
|
||||||
|
|
|
@ -448,7 +448,7 @@ class SearchStore(BackgroundUpdateStore):
|
||||||
"search_msgs", self.cursor_to_dict, sql, *args
|
"search_msgs", self.cursor_to_dict, sql, *args
|
||||||
)
|
)
|
||||||
|
|
||||||
results = filter(lambda row: row["room_id"] in room_ids, results)
|
results = list(filter(lambda row: row["room_id"] in room_ids, results))
|
||||||
|
|
||||||
events = yield self._get_events([r["event_id"] for r in results])
|
events = yield self._get_events([r["event_id"] for r in results])
|
||||||
|
|
||||||
|
@ -603,7 +603,7 @@ class SearchStore(BackgroundUpdateStore):
|
||||||
"search_rooms", self.cursor_to_dict, sql, *args
|
"search_rooms", self.cursor_to_dict, sql, *args
|
||||||
)
|
)
|
||||||
|
|
||||||
results = filter(lambda row: row["room_id"] in room_ids, results)
|
results = list(filter(lambda row: row["room_id"] in room_ids, results))
|
||||||
|
|
||||||
events = yield self._get_events([r["event_id"] for r in results])
|
events = yield self._get_events([r["event_id"] for r in results])
|
||||||
|
|
||||||
|
|
|
@ -16,6 +16,9 @@
|
||||||
from collections import namedtuple
|
from collections import namedtuple
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
|
from six import iteritems, itervalues
|
||||||
|
from six.moves import range
|
||||||
|
|
||||||
from twisted.internet import defer
|
from twisted.internet import defer
|
||||||
|
|
||||||
from synapse.storage.background_updates import BackgroundUpdateStore
|
from synapse.storage.background_updates import BackgroundUpdateStore
|
||||||
|
@ -134,7 +137,7 @@ class StateGroupWorkerStore(SQLBaseStore):
|
||||||
event_ids,
|
event_ids,
|
||||||
)
|
)
|
||||||
|
|
||||||
groups = set(event_to_groups.itervalues())
|
groups = set(itervalues(event_to_groups))
|
||||||
group_to_state = yield self._get_state_for_groups(groups)
|
group_to_state = yield self._get_state_for_groups(groups)
|
||||||
|
|
||||||
defer.returnValue(group_to_state)
|
defer.returnValue(group_to_state)
|
||||||
|
@ -166,18 +169,18 @@ class StateGroupWorkerStore(SQLBaseStore):
|
||||||
|
|
||||||
state_event_map = yield self.get_events(
|
state_event_map = yield self.get_events(
|
||||||
[
|
[
|
||||||
ev_id for group_ids in group_to_ids.itervalues()
|
ev_id for group_ids in itervalues(group_to_ids)
|
||||||
for ev_id in group_ids.itervalues()
|
for ev_id in itervalues(group_ids)
|
||||||
],
|
],
|
||||||
get_prev_content=False
|
get_prev_content=False
|
||||||
)
|
)
|
||||||
|
|
||||||
defer.returnValue({
|
defer.returnValue({
|
||||||
group: [
|
group: [
|
||||||
state_event_map[v] for v in event_id_map.itervalues()
|
state_event_map[v] for v in itervalues(event_id_map)
|
||||||
if v in state_event_map
|
if v in state_event_map
|
||||||
]
|
]
|
||||||
for group, event_id_map in group_to_ids.iteritems()
|
for group, event_id_map in iteritems(group_to_ids)
|
||||||
})
|
})
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
|
@ -186,7 +189,7 @@ class StateGroupWorkerStore(SQLBaseStore):
|
||||||
"""
|
"""
|
||||||
results = {}
|
results = {}
|
||||||
|
|
||||||
chunks = [groups[i:i + 100] for i in xrange(0, len(groups), 100)]
|
chunks = [groups[i:i + 100] for i in range(0, len(groups), 100)]
|
||||||
for chunk in chunks:
|
for chunk in chunks:
|
||||||
res = yield self.runInteraction(
|
res = yield self.runInteraction(
|
||||||
"_get_state_groups_from_groups",
|
"_get_state_groups_from_groups",
|
||||||
|
@ -347,21 +350,21 @@ class StateGroupWorkerStore(SQLBaseStore):
|
||||||
event_ids,
|
event_ids,
|
||||||
)
|
)
|
||||||
|
|
||||||
groups = set(event_to_groups.itervalues())
|
groups = set(itervalues(event_to_groups))
|
||||||
group_to_state = yield self._get_state_for_groups(groups, types)
|
group_to_state = yield self._get_state_for_groups(groups, types)
|
||||||
|
|
||||||
state_event_map = yield self.get_events(
|
state_event_map = yield self.get_events(
|
||||||
[ev_id for sd in group_to_state.itervalues() for ev_id in sd.itervalues()],
|
[ev_id for sd in itervalues(group_to_state) for ev_id in itervalues(sd)],
|
||||||
get_prev_content=False
|
get_prev_content=False
|
||||||
)
|
)
|
||||||
|
|
||||||
event_to_state = {
|
event_to_state = {
|
||||||
event_id: {
|
event_id: {
|
||||||
k: state_event_map[v]
|
k: state_event_map[v]
|
||||||
for k, v in group_to_state[group].iteritems()
|
for k, v in iteritems(group_to_state[group])
|
||||||
if v in state_event_map
|
if v in state_event_map
|
||||||
}
|
}
|
||||||
for event_id, group in event_to_groups.iteritems()
|
for event_id, group in iteritems(event_to_groups)
|
||||||
}
|
}
|
||||||
|
|
||||||
defer.returnValue({event: event_to_state[event] for event in event_ids})
|
defer.returnValue({event: event_to_state[event] for event in event_ids})
|
||||||
|
@ -384,12 +387,12 @@ class StateGroupWorkerStore(SQLBaseStore):
|
||||||
event_ids,
|
event_ids,
|
||||||
)
|
)
|
||||||
|
|
||||||
groups = set(event_to_groups.itervalues())
|
groups = set(itervalues(event_to_groups))
|
||||||
group_to_state = yield self._get_state_for_groups(groups, types)
|
group_to_state = yield self._get_state_for_groups(groups, types)
|
||||||
|
|
||||||
event_to_state = {
|
event_to_state = {
|
||||||
event_id: group_to_state[group]
|
event_id: group_to_state[group]
|
||||||
for event_id, group in event_to_groups.iteritems()
|
for event_id, group in iteritems(event_to_groups)
|
||||||
}
|
}
|
||||||
|
|
||||||
defer.returnValue({event: event_to_state[event] for event in event_ids})
|
defer.returnValue({event: event_to_state[event] for event in event_ids})
|
||||||
|
@ -503,7 +506,7 @@ class StateGroupWorkerStore(SQLBaseStore):
|
||||||
got_all = is_all or not missing_types
|
got_all = is_all or not missing_types
|
||||||
|
|
||||||
return {
|
return {
|
||||||
k: v for k, v in state_dict_ids.iteritems()
|
k: v for k, v in iteritems(state_dict_ids)
|
||||||
if include(k[0], k[1])
|
if include(k[0], k[1])
|
||||||
}, missing_types, got_all
|
}, missing_types, got_all
|
||||||
|
|
||||||
|
@ -562,12 +565,12 @@ class StateGroupWorkerStore(SQLBaseStore):
|
||||||
|
|
||||||
# Now we want to update the cache with all the things we fetched
|
# Now we want to update the cache with all the things we fetched
|
||||||
# from the database.
|
# from the database.
|
||||||
for group, group_state_dict in group_to_state_dict.iteritems():
|
for group, group_state_dict in iteritems(group_to_state_dict):
|
||||||
state_dict = results[group]
|
state_dict = results[group]
|
||||||
|
|
||||||
state_dict.update(
|
state_dict.update(
|
||||||
((intern_string(k[0]), intern_string(k[1])), to_ascii(v))
|
((intern_string(k[0]), intern_string(k[1])), to_ascii(v))
|
||||||
for k, v in group_state_dict.iteritems()
|
for k, v in iteritems(group_state_dict)
|
||||||
)
|
)
|
||||||
|
|
||||||
self._state_group_cache.update(
|
self._state_group_cache.update(
|
||||||
|
@ -654,7 +657,7 @@ class StateGroupWorkerStore(SQLBaseStore):
|
||||||
"state_key": key[1],
|
"state_key": key[1],
|
||||||
"event_id": state_id,
|
"event_id": state_id,
|
||||||
}
|
}
|
||||||
for key, state_id in delta_ids.iteritems()
|
for key, state_id in iteritems(delta_ids)
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
|
@ -669,7 +672,7 @@ class StateGroupWorkerStore(SQLBaseStore):
|
||||||
"state_key": key[1],
|
"state_key": key[1],
|
||||||
"event_id": state_id,
|
"event_id": state_id,
|
||||||
}
|
}
|
||||||
for key, state_id in current_state_ids.iteritems()
|
for key, state_id in iteritems(current_state_ids)
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -794,11 +797,11 @@ class StateStore(StateGroupWorkerStore, BackgroundUpdateStore):
|
||||||
"state_group": state_group_id,
|
"state_group": state_group_id,
|
||||||
"event_id": event_id,
|
"event_id": event_id,
|
||||||
}
|
}
|
||||||
for event_id, state_group_id in state_groups.iteritems()
|
for event_id, state_group_id in iteritems(state_groups)
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
|
|
||||||
for event_id, state_group_id in state_groups.iteritems():
|
for event_id, state_group_id in iteritems(state_groups):
|
||||||
txn.call_after(
|
txn.call_after(
|
||||||
self._get_state_group_for_event.prefill,
|
self._get_state_group_for_event.prefill,
|
||||||
(event_id,), state_group_id
|
(event_id,), state_group_id
|
||||||
|
@ -826,7 +829,7 @@ class StateStore(StateGroupWorkerStore, BackgroundUpdateStore):
|
||||||
|
|
||||||
def reindex_txn(txn):
|
def reindex_txn(txn):
|
||||||
new_last_state_group = last_state_group
|
new_last_state_group = last_state_group
|
||||||
for count in xrange(batch_size):
|
for count in range(batch_size):
|
||||||
txn.execute(
|
txn.execute(
|
||||||
"SELECT id, room_id FROM state_groups"
|
"SELECT id, room_id FROM state_groups"
|
||||||
" WHERE ? < id AND id <= ?"
|
" WHERE ? < id AND id <= ?"
|
||||||
|
@ -884,7 +887,7 @@ class StateStore(StateGroupWorkerStore, BackgroundUpdateStore):
|
||||||
# of keys
|
# of keys
|
||||||
|
|
||||||
delta_state = {
|
delta_state = {
|
||||||
key: value for key, value in curr_state.iteritems()
|
key: value for key, value in iteritems(curr_state)
|
||||||
if prev_state.get(key, None) != value
|
if prev_state.get(key, None) != value
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -924,7 +927,7 @@ class StateStore(StateGroupWorkerStore, BackgroundUpdateStore):
|
||||||
"state_key": key[1],
|
"state_key": key[1],
|
||||||
"event_id": state_id,
|
"event_id": state_id,
|
||||||
}
|
}
|
||||||
for key, state_id in delta_state.iteritems()
|
for key, state_id in iteritems(delta_state)
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
|
@ -22,6 +22,8 @@ from synapse.api.constants import EventTypes, JoinRules
|
||||||
from synapse.storage.engines import PostgresEngine, Sqlite3Engine
|
from synapse.storage.engines import PostgresEngine, Sqlite3Engine
|
||||||
from synapse.types import get_domain_from_id, get_localpart_from_id
|
from synapse.types import get_domain_from_id, get_localpart_from_id
|
||||||
|
|
||||||
|
from six import iteritems
|
||||||
|
|
||||||
import re
|
import re
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
|
@ -100,7 +102,7 @@ class UserDirectoryStore(SQLBaseStore):
|
||||||
user_id, get_localpart_from_id(user_id), get_domain_from_id(user_id),
|
user_id, get_localpart_from_id(user_id), get_domain_from_id(user_id),
|
||||||
profile.display_name,
|
profile.display_name,
|
||||||
)
|
)
|
||||||
for user_id, profile in users_with_profile.iteritems()
|
for user_id, profile in iteritems(users_with_profile)
|
||||||
)
|
)
|
||||||
elif isinstance(self.database_engine, Sqlite3Engine):
|
elif isinstance(self.database_engine, Sqlite3Engine):
|
||||||
sql = """
|
sql = """
|
||||||
|
@ -112,7 +114,7 @@ class UserDirectoryStore(SQLBaseStore):
|
||||||
user_id,
|
user_id,
|
||||||
"%s %s" % (user_id, p.display_name,) if p.display_name else user_id
|
"%s %s" % (user_id, p.display_name,) if p.display_name else user_id
|
||||||
)
|
)
|
||||||
for user_id, p in users_with_profile.iteritems()
|
for user_id, p in iteritems(users_with_profile)
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
# This should be unreachable.
|
# This should be unreachable.
|
||||||
|
@ -130,7 +132,7 @@ class UserDirectoryStore(SQLBaseStore):
|
||||||
"display_name": profile.display_name,
|
"display_name": profile.display_name,
|
||||||
"avatar_url": profile.avatar_url,
|
"avatar_url": profile.avatar_url,
|
||||||
}
|
}
|
||||||
for user_id, profile in users_with_profile.iteritems()
|
for user_id, profile in iteritems(users_with_profile)
|
||||||
]
|
]
|
||||||
)
|
)
|
||||||
for user_id in users_with_profile:
|
for user_id in users_with_profile:
|
||||||
|
|
|
@ -569,7 +569,7 @@ class CacheListDescriptor(_CacheDescriptorBase):
|
||||||
return results
|
return results
|
||||||
|
|
||||||
return logcontext.make_deferred_yieldable(defer.gatherResults(
|
return logcontext.make_deferred_yieldable(defer.gatherResults(
|
||||||
cached_defers.values(),
|
list(cached_defers.values()),
|
||||||
consumeErrors=True,
|
consumeErrors=True,
|
||||||
).addCallback(update_results_dict).addErrback(
|
).addCallback(update_results_dict).addErrback(
|
||||||
unwrapFirstError
|
unwrapFirstError
|
||||||
|
|
|
@ -1,3 +1,5 @@
|
||||||
|
from six import itervalues
|
||||||
|
|
||||||
SENTINEL = object()
|
SENTINEL = object()
|
||||||
|
|
||||||
|
|
||||||
|
@ -49,7 +51,7 @@ class TreeCache(object):
|
||||||
if popped is SENTINEL:
|
if popped is SENTINEL:
|
||||||
return default
|
return default
|
||||||
|
|
||||||
node_and_keys = zip(nodes, key)
|
node_and_keys = list(zip(nodes, key))
|
||||||
node_and_keys.reverse()
|
node_and_keys.reverse()
|
||||||
node_and_keys.append((self.root, None))
|
node_and_keys.append((self.root, None))
|
||||||
|
|
||||||
|
@ -76,7 +78,7 @@ def iterate_tree_cache_entry(d):
|
||||||
can contain dicts.
|
can contain dicts.
|
||||||
"""
|
"""
|
||||||
if isinstance(d, dict):
|
if isinstance(d, dict):
|
||||||
for value_d in d.itervalues():
|
for value_d in itervalues(d):
|
||||||
for value in iterate_tree_cache_entry(value_d):
|
for value in iterate_tree_cache_entry(value_d):
|
||||||
yield value
|
yield value
|
||||||
else:
|
else:
|
||||||
|
|
Loading…
Reference in New Issue