2016-01-06 21:26:29 -07:00
|
|
|
# Copyright 2015, 2016 OpenMarket Ltd
|
2015-04-24 03:36:51 -06:00
|
|
|
#
|
|
|
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
|
|
# you may not use this file except in compliance with the License.
|
|
|
|
# You may obtain a copy of the License at
|
|
|
|
#
|
|
|
|
# http://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
#
|
|
|
|
# Unless required by applicable law or agreed to in writing, software
|
|
|
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
|
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
|
|
# See the License for the specific language governing permissions and
|
|
|
|
# limitations under the License.
|
|
|
|
|
2018-05-10 05:10:27 -06:00
|
|
|
from synapse.http.server import (
|
|
|
|
respond_with_json_bytes, wrap_json_request_handler,
|
|
|
|
)
|
2016-03-11 09:41:03 -07:00
|
|
|
from synapse.http.servlet import parse_integer, parse_json_object_from_request
|
2015-04-22 07:21:08 -06:00
|
|
|
from synapse.api.errors import SynapseError, Codes
|
2016-08-10 03:44:37 -06:00
|
|
|
from synapse.crypto.keyring import KeyLookupError
|
2015-04-22 07:21:08 -06:00
|
|
|
|
|
|
|
from twisted.web.resource import Resource
|
|
|
|
from twisted.web.server import NOT_DONE_YET
|
|
|
|
from twisted.internet import defer
|
|
|
|
|
|
|
|
|
|
|
|
from io import BytesIO
|
|
|
|
import logging
|
|
|
|
logger = logging.getLogger(__name__)
|
|
|
|
|
|
|
|
|
|
|
|
class RemoteKey(Resource):
|
|
|
|
"""HTTP resource for retreiving the TLS certificate and NACL signature
|
|
|
|
verification keys for a collection of servers. Checks that the reported
|
|
|
|
X.509 TLS certificate matches the one used in the HTTPS connection. Checks
|
|
|
|
that the NACL signature for the remote server is valid. Returns a dict of
|
|
|
|
JSON signed by both the remote server and by this server.
|
|
|
|
|
|
|
|
Supports individual GET APIs and a bulk query POST API.
|
|
|
|
|
|
|
|
Requsts:
|
|
|
|
|
|
|
|
GET /_matrix/key/v2/query/remote.server.example.com HTTP/1.1
|
|
|
|
|
|
|
|
GET /_matrix/key/v2/query/remote.server.example.com/a.key.id HTTP/1.1
|
|
|
|
|
|
|
|
POST /_matrix/v2/query HTTP/1.1
|
|
|
|
Content-Type: application/json
|
|
|
|
{
|
2015-04-29 04:57:26 -06:00
|
|
|
"server_keys": {
|
|
|
|
"remote.server.example.com": {
|
|
|
|
"a.key.id": {
|
|
|
|
"minimum_valid_until_ts": 1234567890123
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2015-04-22 07:21:08 -06:00
|
|
|
}
|
|
|
|
|
|
|
|
Response:
|
|
|
|
|
|
|
|
HTTP/1.1 200 OK
|
|
|
|
Content-Type: application/json
|
|
|
|
{
|
|
|
|
"server_keys": [
|
|
|
|
{
|
|
|
|
"server_name": "remote.server.example.com"
|
2015-04-23 09:39:13 -06:00
|
|
|
"valid_until_ts": # posix timestamp
|
2015-04-22 07:21:08 -06:00
|
|
|
"verify_keys": {
|
|
|
|
"a.key.id": { # The identifier for a key.
|
|
|
|
key: "" # base64 encoded verification key.
|
|
|
|
}
|
|
|
|
}
|
|
|
|
"old_verify_keys": {
|
|
|
|
"an.old.key.id": { # The identifier for an old key.
|
|
|
|
key: "", # base64 encoded key
|
2015-04-23 09:39:13 -06:00
|
|
|
"expired_ts": 0, # when the key stop being used.
|
2015-04-22 07:21:08 -06:00
|
|
|
}
|
|
|
|
}
|
|
|
|
"tls_fingerprints": [
|
|
|
|
{ "sha256": # fingerprint }
|
|
|
|
]
|
|
|
|
"signatures": {
|
|
|
|
"remote.server.example.com": {...}
|
|
|
|
"this.server.example.com": {...}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
]
|
|
|
|
}
|
|
|
|
"""
|
|
|
|
|
|
|
|
isLeaf = True
|
|
|
|
|
|
|
|
def __init__(self, hs):
|
|
|
|
self.keyring = hs.get_keyring()
|
|
|
|
self.store = hs.get_datastore()
|
|
|
|
self.clock = hs.get_clock()
|
2018-01-22 11:11:18 -07:00
|
|
|
self.federation_domain_whitelist = hs.config.federation_domain_whitelist
|
2015-04-22 07:21:08 -06:00
|
|
|
|
|
|
|
def render_GET(self, request):
|
|
|
|
self.async_render_GET(request)
|
|
|
|
return NOT_DONE_YET
|
|
|
|
|
2018-05-10 05:10:27 -06:00
|
|
|
@wrap_json_request_handler
|
2015-04-22 07:21:08 -06:00
|
|
|
@defer.inlineCallbacks
|
|
|
|
def async_render_GET(self, request):
|
|
|
|
if len(request.postpath) == 1:
|
|
|
|
server, = request.postpath
|
2015-04-29 04:57:26 -06:00
|
|
|
query = {server: {}}
|
2015-04-22 07:21:08 -06:00
|
|
|
elif len(request.postpath) == 2:
|
|
|
|
server, key_id = request.postpath
|
2015-04-29 04:57:26 -06:00
|
|
|
minimum_valid_until_ts = parse_integer(
|
|
|
|
request, "minimum_valid_until_ts"
|
|
|
|
)
|
|
|
|
arguments = {}
|
|
|
|
if minimum_valid_until_ts is not None:
|
|
|
|
arguments["minimum_valid_until_ts"] = minimum_valid_until_ts
|
|
|
|
query = {server: {key_id: arguments}}
|
2015-04-22 07:21:08 -06:00
|
|
|
else:
|
|
|
|
raise SynapseError(
|
|
|
|
404, "Not found %r" % request.postpath, Codes.NOT_FOUND
|
|
|
|
)
|
|
|
|
yield self.query_keys(request, query, query_remote_on_cache_miss=True)
|
|
|
|
|
|
|
|
def render_POST(self, request):
|
|
|
|
self.async_render_POST(request)
|
|
|
|
return NOT_DONE_YET
|
|
|
|
|
2018-05-10 05:10:27 -06:00
|
|
|
@wrap_json_request_handler
|
2015-04-22 07:21:08 -06:00
|
|
|
@defer.inlineCallbacks
|
|
|
|
def async_render_POST(self, request):
|
2016-03-11 09:41:03 -07:00
|
|
|
content = parse_json_object_from_request(request)
|
2015-04-22 07:21:08 -06:00
|
|
|
|
|
|
|
query = content["server_keys"]
|
|
|
|
|
|
|
|
yield self.query_keys(request, query, query_remote_on_cache_miss=True)
|
|
|
|
|
|
|
|
@defer.inlineCallbacks
|
|
|
|
def query_keys(self, request, query, query_remote_on_cache_miss=False):
|
2015-04-29 04:57:26 -06:00
|
|
|
logger.info("Handling query for keys %r", query)
|
2015-04-22 07:21:08 -06:00
|
|
|
store_queries = []
|
|
|
|
for server_name, key_ids in query.items():
|
2018-01-22 11:11:18 -07:00
|
|
|
if (
|
|
|
|
self.federation_domain_whitelist is not None and
|
|
|
|
server_name not in self.federation_domain_whitelist
|
|
|
|
):
|
|
|
|
logger.debug("Federation denied with %s", server_name)
|
|
|
|
continue
|
|
|
|
|
2015-04-29 04:57:26 -06:00
|
|
|
if not key_ids:
|
|
|
|
key_ids = (None,)
|
2015-04-22 07:21:08 -06:00
|
|
|
for key_id in key_ids:
|
|
|
|
store_queries.append((server_name, key_id, None))
|
|
|
|
|
|
|
|
cached = yield self.store.get_server_keys_json(store_queries)
|
|
|
|
|
2015-04-23 09:39:13 -06:00
|
|
|
json_results = set()
|
2015-04-22 07:21:08 -06:00
|
|
|
|
|
|
|
time_now_ms = self.clock.time_msec()
|
|
|
|
|
|
|
|
cache_misses = dict()
|
|
|
|
for (server_name, key_id, from_server), results in cached.items():
|
|
|
|
results = [
|
|
|
|
(result["ts_added_ms"], result) for result in results
|
|
|
|
]
|
|
|
|
|
2015-04-23 09:39:13 -06:00
|
|
|
if not results and key_id is not None:
|
|
|
|
cache_misses.setdefault(server_name, set()).add(key_id)
|
2015-04-22 07:21:08 -06:00
|
|
|
continue
|
|
|
|
|
|
|
|
if key_id is not None:
|
2015-04-23 09:39:13 -06:00
|
|
|
ts_added_ms, most_recent_result = max(results)
|
|
|
|
ts_valid_until_ms = most_recent_result["ts_valid_until_ms"]
|
2015-04-29 04:57:26 -06:00
|
|
|
req_key = query.get(server_name, {}).get(key_id, {})
|
|
|
|
req_valid_until = req_key.get("minimum_valid_until_ts")
|
|
|
|
miss = False
|
|
|
|
if req_valid_until is not None:
|
|
|
|
if ts_valid_until_ms < req_valid_until:
|
|
|
|
logger.debug(
|
|
|
|
"Cached response for %r/%r is older than requested"
|
|
|
|
": valid_until (%r) < minimum_valid_until (%r)",
|
|
|
|
server_name, key_id,
|
|
|
|
ts_valid_until_ms, req_valid_until
|
|
|
|
)
|
|
|
|
miss = True
|
|
|
|
else:
|
|
|
|
logger.debug(
|
|
|
|
"Cached response for %r/%r is newer than requested"
|
|
|
|
": valid_until (%r) >= minimum_valid_until (%r)",
|
|
|
|
server_name, key_id,
|
|
|
|
ts_valid_until_ms, req_valid_until
|
|
|
|
)
|
|
|
|
elif (ts_added_ms + ts_valid_until_ms) / 2 < time_now_ms:
|
|
|
|
logger.debug(
|
|
|
|
"Cached response for %r/%r is too old"
|
|
|
|
": (added (%r) + valid_until (%r)) / 2 < now (%r)",
|
|
|
|
server_name, key_id,
|
|
|
|
ts_added_ms, ts_valid_until_ms, time_now_ms
|
|
|
|
)
|
2015-04-23 09:39:13 -06:00
|
|
|
# We more than half way through the lifetime of the
|
|
|
|
# response. We should fetch a fresh copy.
|
2015-04-29 04:57:26 -06:00
|
|
|
miss = True
|
|
|
|
else:
|
|
|
|
logger.debug(
|
|
|
|
"Cached response for %r/%r is still valid"
|
|
|
|
": (added (%r) + valid_until (%r)) / 2 < now (%r)",
|
|
|
|
server_name, key_id,
|
|
|
|
ts_added_ms, ts_valid_until_ms, time_now_ms
|
|
|
|
)
|
|
|
|
|
|
|
|
if miss:
|
2015-04-23 09:39:13 -06:00
|
|
|
cache_misses.setdefault(server_name, set()).add(key_id)
|
|
|
|
json_results.add(bytes(most_recent_result["key_json"]))
|
2015-04-22 07:21:08 -06:00
|
|
|
else:
|
2015-04-23 09:39:13 -06:00
|
|
|
for ts_added, result in results:
|
|
|
|
json_results.add(bytes(result["key_json"]))
|
2015-04-22 07:21:08 -06:00
|
|
|
|
|
|
|
if cache_misses and query_remote_on_cache_miss:
|
|
|
|
for server_name, key_ids in cache_misses.items():
|
|
|
|
try:
|
|
|
|
yield self.keyring.get_server_verify_key_v2_direct(
|
|
|
|
server_name, key_ids
|
|
|
|
)
|
2016-08-10 03:44:37 -06:00
|
|
|
except KeyLookupError as e:
|
|
|
|
logger.info("Failed to fetch key: %s", e)
|
2017-10-23 08:52:32 -06:00
|
|
|
except Exception:
|
2015-04-22 07:21:08 -06:00
|
|
|
logger.exception("Failed to get key for %r", server_name)
|
|
|
|
yield self.query_keys(
|
|
|
|
request, query, query_remote_on_cache_miss=False
|
|
|
|
)
|
|
|
|
else:
|
|
|
|
result_io = BytesIO()
|
|
|
|
result_io.write(b"{\"server_keys\":")
|
|
|
|
sep = b"["
|
|
|
|
for json_bytes in json_results:
|
|
|
|
result_io.write(sep)
|
|
|
|
result_io.write(json_bytes)
|
|
|
|
sep = b","
|
|
|
|
if sep == b"[":
|
|
|
|
result_io.write(sep)
|
|
|
|
result_io.write(b"]}")
|
|
|
|
|
|
|
|
respond_with_json_bytes(
|
|
|
|
request, 200, result_io.getvalue(),
|
|
|
|
)
|