Merge branch 'hotfixes-v0.10.0-r2' of github.com:matrix-org/synapse
This commit is contained in:
commit
16026e60c5
|
@ -1,3 +1,12 @@
|
||||||
|
Changes in synapse v0.10.0-r2 (2015-09-16)
|
||||||
|
==========================================
|
||||||
|
|
||||||
|
* Fix bug where we always fetched remote server signing keys instead of using
|
||||||
|
ones in our cache.
|
||||||
|
* Fix adding threepids to an existing account.
|
||||||
|
* Fix bug with invinting over federation where remote server was already in
|
||||||
|
the room. (PR #281, SYN-392)
|
||||||
|
|
||||||
Changes in synapse v0.10.0-r1 (2015-09-08)
|
Changes in synapse v0.10.0-r1 (2015-09-08)
|
||||||
==========================================
|
==========================================
|
||||||
|
|
||||||
|
|
|
@ -16,4 +16,4 @@
|
||||||
""" This is a reference implementation of a Matrix home server.
|
""" This is a reference implementation of a Matrix home server.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
__version__ = "0.10.0-r1"
|
__version__ = "0.10.0-r2"
|
||||||
|
|
|
@ -162,7 +162,9 @@ class Keyring(object):
|
||||||
def remove_deferreds(res, server_name, group_id):
|
def remove_deferreds(res, server_name, group_id):
|
||||||
server_to_gids[server_name].discard(group_id)
|
server_to_gids[server_name].discard(group_id)
|
||||||
if not server_to_gids[server_name]:
|
if not server_to_gids[server_name]:
|
||||||
server_to_deferred.pop(server_name).callback(None)
|
d = server_to_deferred.pop(server_name, None)
|
||||||
|
if d:
|
||||||
|
d.callback(None)
|
||||||
return res
|
return res
|
||||||
|
|
||||||
for g_id, deferred in deferreds.items():
|
for g_id, deferred in deferreds.items():
|
||||||
|
@ -200,8 +202,15 @@ class Keyring(object):
|
||||||
else:
|
else:
|
||||||
break
|
break
|
||||||
|
|
||||||
for server_name, deferred in server_to_deferred:
|
for server_name, deferred in server_to_deferred.items():
|
||||||
self.key_downloads[server_name] = ObservableDeferred(deferred)
|
d = ObservableDeferred(deferred)
|
||||||
|
self.key_downloads[server_name] = d
|
||||||
|
|
||||||
|
def rm(r, server_name):
|
||||||
|
self.key_downloads.pop(server_name, None)
|
||||||
|
return r
|
||||||
|
|
||||||
|
d.addBoth(rm, server_name)
|
||||||
|
|
||||||
def get_server_verify_keys(self, group_id_to_group, group_id_to_deferred):
|
def get_server_verify_keys(self, group_id_to_group, group_id_to_deferred):
|
||||||
"""Takes a dict of KeyGroups and tries to find at least one key for
|
"""Takes a dict of KeyGroups and tries to find at least one key for
|
||||||
|
@ -220,9 +229,8 @@ class Keyring(object):
|
||||||
merged_results = {}
|
merged_results = {}
|
||||||
|
|
||||||
missing_keys = {
|
missing_keys = {
|
||||||
group.server_name: key_id
|
group.server_name: set(group.key_ids)
|
||||||
for group in group_id_to_group.values()
|
for group in group_id_to_group.values()
|
||||||
for key_id in group.key_ids
|
|
||||||
}
|
}
|
||||||
|
|
||||||
for fn in key_fetch_fns:
|
for fn in key_fetch_fns:
|
||||||
|
@ -279,16 +287,15 @@ class Keyring(object):
|
||||||
def get_keys_from_store(self, server_name_and_key_ids):
|
def get_keys_from_store(self, server_name_and_key_ids):
|
||||||
res = yield defer.gatherResults(
|
res = yield defer.gatherResults(
|
||||||
[
|
[
|
||||||
self.store.get_server_verify_keys(server_name, key_ids)
|
self.store.get_server_verify_keys(
|
||||||
|
server_name, key_ids
|
||||||
|
).addCallback(lambda ks, server: (server, ks), server_name)
|
||||||
for server_name, key_ids in server_name_and_key_ids
|
for server_name, key_ids in server_name_and_key_ids
|
||||||
],
|
],
|
||||||
consumeErrors=True,
|
consumeErrors=True,
|
||||||
).addErrback(unwrapFirstError)
|
).addErrback(unwrapFirstError)
|
||||||
|
|
||||||
defer.returnValue(dict(zip(
|
defer.returnValue(dict(res))
|
||||||
[server_name for server_name, _ in server_name_and_key_ids],
|
|
||||||
res
|
|
||||||
)))
|
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
def get_keys_from_perspectives(self, server_name_and_key_ids):
|
def get_keys_from_perspectives(self, server_name_and_key_ids):
|
||||||
|
|
|
@ -96,6 +96,7 @@ class ThreepidRestServlet(RestServlet):
|
||||||
self.hs = hs
|
self.hs = hs
|
||||||
self.identity_handler = hs.get_handlers().identity_handler
|
self.identity_handler = hs.get_handlers().identity_handler
|
||||||
self.auth = hs.get_auth()
|
self.auth = hs.get_auth()
|
||||||
|
self.auth_handler = hs.get_handlers().auth_handler
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
def on_GET(self, request):
|
def on_GET(self, request):
|
||||||
|
|
|
@ -303,6 +303,15 @@ class EventFederationStore(SQLBaseStore):
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
|
|
||||||
|
self._update_extremeties(txn, events)
|
||||||
|
|
||||||
|
def _update_extremeties(self, txn, events):
|
||||||
|
"""Updates the event_*_extremities tables based on the new/updated
|
||||||
|
events being persisted.
|
||||||
|
|
||||||
|
This is called for new events *and* for events that were outliers, but
|
||||||
|
are are now being persisted as non-outliers.
|
||||||
|
"""
|
||||||
events_by_room = {}
|
events_by_room = {}
|
||||||
for ev in events:
|
for ev in events:
|
||||||
events_by_room.setdefault(ev.room_id, []).append(ev)
|
events_by_room.setdefault(ev.room_id, []).append(ev)
|
||||||
|
|
|
@ -275,6 +275,8 @@ class EventsStore(SQLBaseStore):
|
||||||
(False, event.event_id,)
|
(False, event.event_id,)
|
||||||
)
|
)
|
||||||
|
|
||||||
|
self._update_extremeties(txn, [event])
|
||||||
|
|
||||||
events_and_contexts = filter(
|
events_and_contexts = filter(
|
||||||
lambda ec: ec[0] not in to_remove,
|
lambda ec: ec[0] not in to_remove,
|
||||||
events_and_contexts
|
events_and_contexts
|
||||||
|
|
Loading…
Reference in New Issue