2014-10-15 10:09:04 -06:00
|
|
|
# -*- coding: utf-8 -*-
|
|
|
|
|
2016-01-06 21:26:29 -07:00
|
|
|
# Copyright 2014-2016 OpenMarket Ltd
|
2014-10-15 10:09:04 -06:00
|
|
|
#
|
|
|
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
|
|
# you may not use this file except in compliance with the License.
|
|
|
|
# You may obtain a copy of the License at
|
|
|
|
#
|
|
|
|
# http://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
#
|
|
|
|
# Unless required by applicable law or agreed to in writing, software
|
|
|
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
|
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
|
|
# See the License for the specific language governing permissions and
|
|
|
|
# limitations under the License.
|
|
|
|
|
|
|
|
|
2014-11-14 09:45:39 -07:00
|
|
|
from synapse.api.errors import SynapseError, Codes
|
2015-08-24 09:17:38 -06:00
|
|
|
from synapse.events.utils import prune_event
|
|
|
|
|
|
|
|
from canonicaljson import encode_canonical_json
|
|
|
|
from unpaddedbase64 import encode_base64, decode_base64
|
|
|
|
from signedjson.sign import sign_json
|
2014-10-15 10:09:04 -06:00
|
|
|
|
|
|
|
import hashlib
|
2014-10-17 12:41:32 -06:00
|
|
|
import logging
|
|
|
|
|
|
|
|
logger = logging.getLogger(__name__)
|
2014-10-15 10:09:04 -06:00
|
|
|
|
|
|
|
|
2014-11-03 10:51:42 -07:00
|
|
|
def check_event_content_hash(event, hash_algorithm=hashlib.sha256):
|
2014-10-15 10:09:04 -06:00
|
|
|
"""Check whether the hash for this PDU matches the contents"""
|
2014-12-03 09:07:21 -07:00
|
|
|
name, expected_hash = compute_content_hash(event, hash_algorithm)
|
|
|
|
logger.debug("Expecting hash: %s", encode_base64(expected_hash))
|
2017-11-28 07:06:12 -07:00
|
|
|
|
|
|
|
# some malformed events lack a 'hashes'. Protect against it being missing
|
|
|
|
# or a weird type by basically treating it the same as an unhashed event.
|
|
|
|
hashes = event.get("hashes")
|
|
|
|
if not isinstance(hashes, dict):
|
|
|
|
raise SynapseError(400, "Malformed 'hashes'", Codes.UNAUTHORIZED)
|
|
|
|
|
|
|
|
if name not in hashes:
|
2014-11-14 09:45:39 -07:00
|
|
|
raise SynapseError(
|
|
|
|
400,
|
|
|
|
"Algorithm %s not in hashes %s" % (
|
2017-11-28 07:06:12 -07:00
|
|
|
name, list(hashes),
|
2014-11-14 09:45:39 -07:00
|
|
|
),
|
|
|
|
Codes.UNAUTHORIZED,
|
|
|
|
)
|
2017-11-28 07:06:12 -07:00
|
|
|
message_hash_base64 = hashes[name]
|
2014-10-15 10:09:04 -06:00
|
|
|
try:
|
|
|
|
message_hash_bytes = decode_base64(message_hash_base64)
|
2017-10-23 08:52:32 -06:00
|
|
|
except Exception:
|
2014-11-14 09:45:39 -07:00
|
|
|
raise SynapseError(
|
|
|
|
400,
|
|
|
|
"Invalid base64: %s" % (message_hash_base64,),
|
|
|
|
Codes.UNAUTHORIZED,
|
|
|
|
)
|
2014-12-03 09:07:21 -07:00
|
|
|
return message_hash_bytes == expected_hash
|
2014-10-15 10:09:04 -06:00
|
|
|
|
|
|
|
|
2014-12-03 09:07:21 -07:00
|
|
|
def compute_content_hash(event, hash_algorithm):
|
2014-11-14 14:25:02 -07:00
|
|
|
event_json = event.get_pdu_json()
|
2014-11-03 10:51:42 -07:00
|
|
|
event_json.pop("age_ts", None)
|
|
|
|
event_json.pop("unsigned", None)
|
|
|
|
event_json.pop("signatures", None)
|
|
|
|
event_json.pop("hashes", None)
|
2014-11-14 12:10:52 -07:00
|
|
|
event_json.pop("outlier", None)
|
|
|
|
event_json.pop("destinations", None)
|
2014-12-03 09:07:21 -07:00
|
|
|
|
2014-11-03 10:51:42 -07:00
|
|
|
event_json_bytes = encode_canonical_json(event_json)
|
2014-12-03 09:07:21 -07:00
|
|
|
|
|
|
|
hashed = hash_algorithm(event_json_bytes)
|
|
|
|
return (hashed.name, hashed.digest())
|
2014-10-17 04:40:35 -06:00
|
|
|
|
|
|
|
|
2014-10-31 09:35:39 -06:00
|
|
|
def compute_event_reference_hash(event, hash_algorithm=hashlib.sha256):
|
2014-11-10 03:21:32 -07:00
|
|
|
tmp_event = prune_event(event)
|
2014-11-14 14:25:02 -07:00
|
|
|
event_json = tmp_event.get_pdu_json()
|
2014-10-31 09:35:39 -06:00
|
|
|
event_json.pop("signatures", None)
|
2014-11-03 10:51:42 -07:00
|
|
|
event_json.pop("age_ts", None)
|
|
|
|
event_json.pop("unsigned", None)
|
2014-10-31 09:35:39 -06:00
|
|
|
event_json_bytes = encode_canonical_json(event_json)
|
|
|
|
hashed = hash_algorithm(event_json_bytes)
|
|
|
|
return (hashed.name, hashed.digest())
|
|
|
|
|
|
|
|
|
2014-11-03 10:51:42 -07:00
|
|
|
def compute_event_signature(event, signature_name, signing_key):
|
2014-11-10 03:21:32 -07:00
|
|
|
tmp_event = prune_event(event)
|
2014-11-14 14:25:02 -07:00
|
|
|
redact_json = tmp_event.get_pdu_json()
|
2014-11-03 10:51:42 -07:00
|
|
|
redact_json.pop("age_ts", None)
|
|
|
|
redact_json.pop("unsigned", None)
|
2014-12-10 03:06:12 -07:00
|
|
|
logger.debug("Signing event: %s", encode_canonical_json(redact_json))
|
2014-10-31 11:08:52 -06:00
|
|
|
redact_json = sign_json(redact_json, signature_name, signing_key)
|
2014-12-10 03:06:12 -07:00
|
|
|
logger.debug("Signed event: %s", encode_canonical_json(redact_json))
|
2014-11-03 10:51:42 -07:00
|
|
|
return redact_json["signatures"]
|
|
|
|
|
|
|
|
|
|
|
|
def add_hashes_and_signatures(event, signature_name, signing_key,
|
|
|
|
hash_algorithm=hashlib.sha256):
|
2014-12-03 09:07:21 -07:00
|
|
|
# if hasattr(event, "old_state_events"):
|
|
|
|
# state_json_bytes = encode_canonical_json(
|
|
|
|
# [e.event_id for e in event.old_state_events.values()]
|
|
|
|
# )
|
|
|
|
# hashed = hash_algorithm(state_json_bytes)
|
|
|
|
# event.state_hash = {
|
|
|
|
# hashed.name: encode_base64(hashed.digest())
|
|
|
|
# }
|
2014-11-07 04:36:40 -07:00
|
|
|
|
2014-12-03 09:07:21 -07:00
|
|
|
name, digest = compute_content_hash(event, hash_algorithm=hash_algorithm)
|
2014-10-31 11:08:52 -06:00
|
|
|
|
2014-11-03 04:32:12 -07:00
|
|
|
if not hasattr(event, "hashes"):
|
|
|
|
event.hashes = {}
|
2014-12-03 09:07:21 -07:00
|
|
|
event.hashes[name] = encode_base64(digest)
|
2014-11-03 10:51:42 -07:00
|
|
|
|
|
|
|
event.signatures = compute_event_signature(
|
|
|
|
event,
|
|
|
|
signature_name=signature_name,
|
|
|
|
signing_key=signing_key,
|
|
|
|
)
|