Acutally add scripts-dev/
This commit is contained in:
parent
3c92231094
commit
673375fe2d
|
@ -0,0 +1,65 @@
|
||||||
|
from synapse.events import FrozenEvent
|
||||||
|
from synapse.api.auth import Auth
|
||||||
|
|
||||||
|
from mock import Mock
|
||||||
|
|
||||||
|
import argparse
|
||||||
|
import itertools
|
||||||
|
import json
|
||||||
|
import sys
|
||||||
|
|
||||||
|
|
||||||
|
def check_auth(auth, auth_chain, events):
|
||||||
|
auth_chain.sort(key=lambda e: e.depth)
|
||||||
|
|
||||||
|
auth_map = {
|
||||||
|
e.event_id: e
|
||||||
|
for e in auth_chain
|
||||||
|
}
|
||||||
|
|
||||||
|
create_events = {}
|
||||||
|
for e in auth_chain:
|
||||||
|
if e.type == "m.room.create":
|
||||||
|
create_events[e.room_id] = e
|
||||||
|
|
||||||
|
for e in itertools.chain(auth_chain, events):
|
||||||
|
auth_events_list = [auth_map[i] for i, _ in e.auth_events]
|
||||||
|
|
||||||
|
auth_events = {
|
||||||
|
(e.type, e.state_key): e
|
||||||
|
for e in auth_events_list
|
||||||
|
}
|
||||||
|
|
||||||
|
auth_events[("m.room.create", "")] = create_events[e.room_id]
|
||||||
|
|
||||||
|
try:
|
||||||
|
auth.check(e, auth_events=auth_events)
|
||||||
|
except Exception as ex:
|
||||||
|
print "Failed:", e.event_id, e.type, e.state_key
|
||||||
|
print "Auth_events:", auth_events
|
||||||
|
print ex
|
||||||
|
print json.dumps(e.get_dict(), sort_keys=True, indent=4)
|
||||||
|
# raise
|
||||||
|
print "Success:", e.event_id, e.type, e.state_key
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
parser = argparse.ArgumentParser()
|
||||||
|
|
||||||
|
parser.add_argument(
|
||||||
|
'json',
|
||||||
|
nargs='?',
|
||||||
|
type=argparse.FileType('r'),
|
||||||
|
default=sys.stdin,
|
||||||
|
)
|
||||||
|
|
||||||
|
args = parser.parse_args()
|
||||||
|
|
||||||
|
js = json.load(args.json)
|
||||||
|
|
||||||
|
|
||||||
|
auth = Auth(Mock())
|
||||||
|
check_auth(
|
||||||
|
auth,
|
||||||
|
[FrozenEvent(d) for d in js["auth_chain"]],
|
||||||
|
[FrozenEvent(d) for d in js["pdus"]],
|
||||||
|
)
|
|
@ -0,0 +1,50 @@
|
||||||
|
from synapse.crypto.event_signing import *
|
||||||
|
from syutil.base64util import encode_base64
|
||||||
|
|
||||||
|
import argparse
|
||||||
|
import hashlib
|
||||||
|
import sys
|
||||||
|
import json
|
||||||
|
|
||||||
|
|
||||||
|
class dictobj(dict):
|
||||||
|
def __init__(self, *args, **kargs):
|
||||||
|
dict.__init__(self, *args, **kargs)
|
||||||
|
self.__dict__ = self
|
||||||
|
|
||||||
|
def get_dict(self):
|
||||||
|
return dict(self)
|
||||||
|
|
||||||
|
def get_full_dict(self):
|
||||||
|
return dict(self)
|
||||||
|
|
||||||
|
def get_pdu_json(self):
|
||||||
|
return dict(self)
|
||||||
|
|
||||||
|
|
||||||
|
def main():
|
||||||
|
parser = argparse.ArgumentParser()
|
||||||
|
parser.add_argument("input_json", nargs="?", type=argparse.FileType('r'),
|
||||||
|
default=sys.stdin)
|
||||||
|
args = parser.parse_args()
|
||||||
|
logging.basicConfig()
|
||||||
|
|
||||||
|
event_json = dictobj(json.load(args.input_json))
|
||||||
|
|
||||||
|
algorithms = {
|
||||||
|
"sha256": hashlib.sha256,
|
||||||
|
}
|
||||||
|
|
||||||
|
for alg_name in event_json.hashes:
|
||||||
|
if check_event_content_hash(event_json, algorithms[alg_name]):
|
||||||
|
print "PASS content hash %s" % (alg_name,)
|
||||||
|
else:
|
||||||
|
print "FAIL content hash %s" % (alg_name,)
|
||||||
|
|
||||||
|
for algorithm in algorithms.values():
|
||||||
|
name, h_bytes = compute_event_reference_hash(event_json, algorithm)
|
||||||
|
print "Reference hash %s: %s" % (name, encode_base64(h_bytes))
|
||||||
|
|
||||||
|
if __name__=="__main__":
|
||||||
|
main()
|
||||||
|
|
|
@ -0,0 +1,73 @@
|
||||||
|
|
||||||
|
from syutil.crypto.jsonsign import verify_signed_json
|
||||||
|
from syutil.crypto.signing_key import (
|
||||||
|
decode_verify_key_bytes, write_signing_keys
|
||||||
|
)
|
||||||
|
from syutil.base64util import decode_base64
|
||||||
|
|
||||||
|
import urllib2
|
||||||
|
import json
|
||||||
|
import sys
|
||||||
|
import dns.resolver
|
||||||
|
import pprint
|
||||||
|
import argparse
|
||||||
|
import logging
|
||||||
|
|
||||||
|
def get_targets(server_name):
|
||||||
|
if ":" in server_name:
|
||||||
|
target, port = server_name.split(":")
|
||||||
|
yield (target, int(port))
|
||||||
|
return
|
||||||
|
try:
|
||||||
|
answers = dns.resolver.query("_matrix._tcp." + server_name, "SRV")
|
||||||
|
for srv in answers:
|
||||||
|
yield (srv.target, srv.port)
|
||||||
|
except dns.resolver.NXDOMAIN:
|
||||||
|
yield (server_name, 8448)
|
||||||
|
|
||||||
|
def get_server_keys(server_name, target, port):
|
||||||
|
url = "https://%s:%i/_matrix/key/v1" % (target, port)
|
||||||
|
keys = json.load(urllib2.urlopen(url))
|
||||||
|
verify_keys = {}
|
||||||
|
for key_id, key_base64 in keys["verify_keys"].items():
|
||||||
|
verify_key = decode_verify_key_bytes(key_id, decode_base64(key_base64))
|
||||||
|
verify_signed_json(keys, server_name, verify_key)
|
||||||
|
verify_keys[key_id] = verify_key
|
||||||
|
return verify_keys
|
||||||
|
|
||||||
|
def main():
|
||||||
|
|
||||||
|
parser = argparse.ArgumentParser()
|
||||||
|
parser.add_argument("signature_name")
|
||||||
|
parser.add_argument("input_json", nargs="?", type=argparse.FileType('r'),
|
||||||
|
default=sys.stdin)
|
||||||
|
|
||||||
|
args = parser.parse_args()
|
||||||
|
logging.basicConfig()
|
||||||
|
|
||||||
|
server_name = args.signature_name
|
||||||
|
keys = {}
|
||||||
|
for target, port in get_targets(server_name):
|
||||||
|
try:
|
||||||
|
keys = get_server_keys(server_name, target, port)
|
||||||
|
print "Using keys from https://%s:%s/_matrix/key/v1" % (target, port)
|
||||||
|
write_signing_keys(sys.stdout, keys.values())
|
||||||
|
break
|
||||||
|
except:
|
||||||
|
logging.exception("Error talking to %s:%s", target, port)
|
||||||
|
|
||||||
|
json_to_check = json.load(args.input_json)
|
||||||
|
print "Checking JSON:"
|
||||||
|
for key_id in json_to_check["signatures"][args.signature_name]:
|
||||||
|
try:
|
||||||
|
key = keys[key_id]
|
||||||
|
verify_signed_json(json_to_check, args.signature_name, key)
|
||||||
|
print "PASS %s" % (key_id,)
|
||||||
|
except:
|
||||||
|
logging.exception("Check for key %s failed" % (key_id,))
|
||||||
|
print "FAIL %s" % (key_id,)
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
main()
|
||||||
|
|
|
@ -0,0 +1,33 @@
|
||||||
|
#!/usr/bin/perl -pi
|
||||||
|
# Copyright 2015 OpenMarket Ltd
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
|
||||||
|
$copyright = <<EOT;
|
||||||
|
/* Copyright 2015 OpenMarket Ltd
|
||||||
|
*
|
||||||
|
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
* you may not use this file except in compliance with the License.
|
||||||
|
* You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing, software
|
||||||
|
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
* See the License for the specific language governing permissions and
|
||||||
|
* limitations under the License.
|
||||||
|
*/
|
||||||
|
EOT
|
||||||
|
|
||||||
|
s/^(# -\*- coding: utf-8 -\*-\n)?/$1$copyright/ if ($. == 1);
|
|
@ -0,0 +1,33 @@
|
||||||
|
#!/usr/bin/perl -pi
|
||||||
|
# Copyright 2014 OpenMarket Ltd
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
|
||||||
|
$copyright = <<EOT;
|
||||||
|
# Copyright 2015 OpenMarket Ltd
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
|
||||||
|
EOT
|
||||||
|
|
||||||
|
s/^(# -\*- coding: utf-8 -\*-\n)?/$1$copyright/ if ($. == 1);
|
|
@ -0,0 +1,16 @@
|
||||||
|
#!/bin/sh
|
||||||
|
|
||||||
|
# This script will write a dump file of local user state if you want to splat
|
||||||
|
# your entire server database and start again but preserve the identity of
|
||||||
|
# local users and their access tokens.
|
||||||
|
#
|
||||||
|
# To restore it, use
|
||||||
|
#
|
||||||
|
# $ sqlite3 homeserver.db < table-save.sql
|
||||||
|
|
||||||
|
sqlite3 "$1" <<'EOF' >table-save.sql
|
||||||
|
.dump users
|
||||||
|
.dump access_tokens
|
||||||
|
.dump presence
|
||||||
|
.dump profiles
|
||||||
|
EOF
|
|
@ -0,0 +1,146 @@
|
||||||
|
import nacl.signing
|
||||||
|
import json
|
||||||
|
import base64
|
||||||
|
import requests
|
||||||
|
import sys
|
||||||
|
import srvlookup
|
||||||
|
|
||||||
|
|
||||||
|
def encode_base64(input_bytes):
|
||||||
|
"""Encode bytes as a base64 string without any padding."""
|
||||||
|
|
||||||
|
input_len = len(input_bytes)
|
||||||
|
output_len = 4 * ((input_len + 2) // 3) + (input_len + 2) % 3 - 2
|
||||||
|
output_bytes = base64.b64encode(input_bytes)
|
||||||
|
output_string = output_bytes[:output_len].decode("ascii")
|
||||||
|
return output_string
|
||||||
|
|
||||||
|
|
||||||
|
def decode_base64(input_string):
|
||||||
|
"""Decode a base64 string to bytes inferring padding from the length of the
|
||||||
|
string."""
|
||||||
|
|
||||||
|
input_bytes = input_string.encode("ascii")
|
||||||
|
input_len = len(input_bytes)
|
||||||
|
padding = b"=" * (3 - ((input_len + 3) % 4))
|
||||||
|
output_len = 3 * ((input_len + 2) // 4) + (input_len + 2) % 4 - 2
|
||||||
|
output_bytes = base64.b64decode(input_bytes + padding)
|
||||||
|
return output_bytes[:output_len]
|
||||||
|
|
||||||
|
|
||||||
|
def encode_canonical_json(value):
|
||||||
|
return json.dumps(
|
||||||
|
value,
|
||||||
|
# Encode code-points outside of ASCII as UTF-8 rather than \u escapes
|
||||||
|
ensure_ascii=False,
|
||||||
|
# Remove unecessary white space.
|
||||||
|
separators=(',',':'),
|
||||||
|
# Sort the keys of dictionaries.
|
||||||
|
sort_keys=True,
|
||||||
|
# Encode the resulting unicode as UTF-8 bytes.
|
||||||
|
).encode("UTF-8")
|
||||||
|
|
||||||
|
|
||||||
|
def sign_json(json_object, signing_key, signing_name):
|
||||||
|
signatures = json_object.pop("signatures", {})
|
||||||
|
unsigned = json_object.pop("unsigned", None)
|
||||||
|
|
||||||
|
signed = signing_key.sign(encode_canonical_json(json_object))
|
||||||
|
signature_base64 = encode_base64(signed.signature)
|
||||||
|
|
||||||
|
key_id = "%s:%s" % (signing_key.alg, signing_key.version)
|
||||||
|
signatures.setdefault(signing_name, {})[key_id] = signature_base64
|
||||||
|
|
||||||
|
json_object["signatures"] = signatures
|
||||||
|
if unsigned is not None:
|
||||||
|
json_object["unsigned"] = unsigned
|
||||||
|
|
||||||
|
return json_object
|
||||||
|
|
||||||
|
|
||||||
|
NACL_ED25519 = "ed25519"
|
||||||
|
|
||||||
|
def decode_signing_key_base64(algorithm, version, key_base64):
|
||||||
|
"""Decode a base64 encoded signing key
|
||||||
|
Args:
|
||||||
|
algorithm (str): The algorithm the key is for (currently "ed25519").
|
||||||
|
version (str): Identifies this key out of the keys for this entity.
|
||||||
|
key_base64 (str): Base64 encoded bytes of the key.
|
||||||
|
Returns:
|
||||||
|
A SigningKey object.
|
||||||
|
"""
|
||||||
|
if algorithm == NACL_ED25519:
|
||||||
|
key_bytes = decode_base64(key_base64)
|
||||||
|
key = nacl.signing.SigningKey(key_bytes)
|
||||||
|
key.version = version
|
||||||
|
key.alg = NACL_ED25519
|
||||||
|
return key
|
||||||
|
else:
|
||||||
|
raise ValueError("Unsupported algorithm %s" % (algorithm,))
|
||||||
|
|
||||||
|
|
||||||
|
def read_signing_keys(stream):
|
||||||
|
"""Reads a list of keys from a stream
|
||||||
|
Args:
|
||||||
|
stream : A stream to iterate for keys.
|
||||||
|
Returns:
|
||||||
|
list of SigningKey objects.
|
||||||
|
"""
|
||||||
|
keys = []
|
||||||
|
for line in stream:
|
||||||
|
algorithm, version, key_base64 = line.split()
|
||||||
|
keys.append(decode_signing_key_base64(algorithm, version, key_base64))
|
||||||
|
return keys
|
||||||
|
|
||||||
|
|
||||||
|
def lookup(destination, path):
|
||||||
|
if ":" in destination:
|
||||||
|
return "https://%s%s" % (destination, path)
|
||||||
|
else:
|
||||||
|
try:
|
||||||
|
srv = srvlookup.lookup("matrix", "tcp", destination)[0]
|
||||||
|
return "https://%s:%d%s" % (srv.host, srv.port, path)
|
||||||
|
except:
|
||||||
|
return "https://%s:%d%s" % (destination, 8448, path)
|
||||||
|
|
||||||
|
def get_json(origin_name, origin_key, destination, path):
|
||||||
|
request_json = {
|
||||||
|
"method": "GET",
|
||||||
|
"uri": path,
|
||||||
|
"origin": origin_name,
|
||||||
|
"destination": destination,
|
||||||
|
}
|
||||||
|
|
||||||
|
signed_json = sign_json(request_json, origin_key, origin_name)
|
||||||
|
|
||||||
|
authorization_headers = []
|
||||||
|
|
||||||
|
for key, sig in signed_json["signatures"][origin_name].items():
|
||||||
|
authorization_headers.append(bytes(
|
||||||
|
"X-Matrix origin=%s,key=\"%s\",sig=\"%s\"" % (
|
||||||
|
origin_name, key, sig,
|
||||||
|
)
|
||||||
|
))
|
||||||
|
|
||||||
|
result = requests.get(
|
||||||
|
lookup(destination, path),
|
||||||
|
headers={"Authorization": authorization_headers[0]},
|
||||||
|
verify=False,
|
||||||
|
)
|
||||||
|
return result.json()
|
||||||
|
|
||||||
|
|
||||||
|
def main():
|
||||||
|
origin_name, keyfile, destination, path = sys.argv[1:]
|
||||||
|
|
||||||
|
with open(keyfile) as f:
|
||||||
|
key = read_signing_keys(f)[0]
|
||||||
|
|
||||||
|
result = get_json(
|
||||||
|
origin_name, key, destination, "/_matrix/federation/v1/" + path
|
||||||
|
)
|
||||||
|
|
||||||
|
json.dump(result, sys.stdout)
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main()
|
|
@ -0,0 +1,69 @@
|
||||||
|
from synapse.storage.pdu import PduStore
|
||||||
|
from synapse.storage.signatures import SignatureStore
|
||||||
|
from synapse.storage._base import SQLBaseStore
|
||||||
|
from synapse.federation.units import Pdu
|
||||||
|
from synapse.crypto.event_signing import (
|
||||||
|
add_event_pdu_content_hash, compute_pdu_event_reference_hash
|
||||||
|
)
|
||||||
|
from synapse.api.events.utils import prune_pdu
|
||||||
|
from syutil.base64util import encode_base64, decode_base64
|
||||||
|
from syutil.jsonutil import encode_canonical_json
|
||||||
|
import sqlite3
|
||||||
|
import sys
|
||||||
|
|
||||||
|
class Store(object):
|
||||||
|
_get_pdu_tuples = PduStore.__dict__["_get_pdu_tuples"]
|
||||||
|
_get_pdu_content_hashes_txn = SignatureStore.__dict__["_get_pdu_content_hashes_txn"]
|
||||||
|
_get_prev_pdu_hashes_txn = SignatureStore.__dict__["_get_prev_pdu_hashes_txn"]
|
||||||
|
_get_pdu_origin_signatures_txn = SignatureStore.__dict__["_get_pdu_origin_signatures_txn"]
|
||||||
|
_store_pdu_content_hash_txn = SignatureStore.__dict__["_store_pdu_content_hash_txn"]
|
||||||
|
_store_pdu_reference_hash_txn = SignatureStore.__dict__["_store_pdu_reference_hash_txn"]
|
||||||
|
_store_prev_pdu_hash_txn = SignatureStore.__dict__["_store_prev_pdu_hash_txn"]
|
||||||
|
_simple_insert_txn = SQLBaseStore.__dict__["_simple_insert_txn"]
|
||||||
|
|
||||||
|
|
||||||
|
store = Store()
|
||||||
|
|
||||||
|
|
||||||
|
def select_pdus(cursor):
|
||||||
|
cursor.execute(
|
||||||
|
"SELECT pdu_id, origin FROM pdus ORDER BY depth ASC"
|
||||||
|
)
|
||||||
|
|
||||||
|
ids = cursor.fetchall()
|
||||||
|
|
||||||
|
pdu_tuples = store._get_pdu_tuples(cursor, ids)
|
||||||
|
|
||||||
|
pdus = [Pdu.from_pdu_tuple(p) for p in pdu_tuples]
|
||||||
|
|
||||||
|
reference_hashes = {}
|
||||||
|
|
||||||
|
for pdu in pdus:
|
||||||
|
try:
|
||||||
|
if pdu.prev_pdus:
|
||||||
|
print "PROCESS", pdu.pdu_id, pdu.origin, pdu.prev_pdus
|
||||||
|
for pdu_id, origin, hashes in pdu.prev_pdus:
|
||||||
|
ref_alg, ref_hsh = reference_hashes[(pdu_id, origin)]
|
||||||
|
hashes[ref_alg] = encode_base64(ref_hsh)
|
||||||
|
store._store_prev_pdu_hash_txn(cursor, pdu.pdu_id, pdu.origin, pdu_id, origin, ref_alg, ref_hsh)
|
||||||
|
print "SUCCESS", pdu.pdu_id, pdu.origin, pdu.prev_pdus
|
||||||
|
pdu = add_event_pdu_content_hash(pdu)
|
||||||
|
ref_alg, ref_hsh = compute_pdu_event_reference_hash(pdu)
|
||||||
|
reference_hashes[(pdu.pdu_id, pdu.origin)] = (ref_alg, ref_hsh)
|
||||||
|
store._store_pdu_reference_hash_txn(cursor, pdu.pdu_id, pdu.origin, ref_alg, ref_hsh)
|
||||||
|
|
||||||
|
for alg, hsh_base64 in pdu.hashes.items():
|
||||||
|
print alg, hsh_base64
|
||||||
|
store._store_pdu_content_hash_txn(cursor, pdu.pdu_id, pdu.origin, alg, decode_base64(hsh_base64))
|
||||||
|
|
||||||
|
except:
|
||||||
|
print "FAILED_", pdu.pdu_id, pdu.origin, pdu.prev_pdus
|
||||||
|
|
||||||
|
def main():
|
||||||
|
conn = sqlite3.connect(sys.argv[1])
|
||||||
|
cursor = conn.cursor()
|
||||||
|
select_pdus(cursor)
|
||||||
|
conn.commit()
|
||||||
|
|
||||||
|
if __name__=='__main__':
|
||||||
|
main()
|
|
@ -0,0 +1,39 @@
|
||||||
|
#!/usr/bin/env perl
|
||||||
|
|
||||||
|
use strict;
|
||||||
|
use warnings;
|
||||||
|
|
||||||
|
use DBI;
|
||||||
|
use DBD::SQLite;
|
||||||
|
use JSON;
|
||||||
|
use Getopt::Long;
|
||||||
|
|
||||||
|
my $db; # = "homeserver.db";
|
||||||
|
my $server = "http://localhost:8008";
|
||||||
|
my $size = 320;
|
||||||
|
|
||||||
|
GetOptions("db|d=s", \$db,
|
||||||
|
"server|s=s", \$server,
|
||||||
|
"width|w=i", \$size) or usage();
|
||||||
|
|
||||||
|
usage() unless $db;
|
||||||
|
|
||||||
|
my $dbh = DBI->connect("dbi:SQLite:dbname=$db","","") || die $DBI::errstr;
|
||||||
|
|
||||||
|
my $res = $dbh->selectall_arrayref("select token, name from access_tokens, users where access_tokens.user_id = users.id group by user_id") || die $DBI::errstr;
|
||||||
|
|
||||||
|
foreach (@$res) {
|
||||||
|
my ($token, $mxid) = ($_->[0], $_->[1]);
|
||||||
|
my ($user_id) = ($mxid =~ m/@(.*):/);
|
||||||
|
my ($url) = $dbh->selectrow_array("select avatar_url from profiles where user_id=?", undef, $user_id);
|
||||||
|
if (!$url || $url =~ /#auto$/) {
|
||||||
|
`curl -s -o tmp.png "$server/_matrix/media/v1/identicon?name=${mxid}&width=$size&height=$size"`;
|
||||||
|
my $json = `curl -s -X POST -H "Content-Type: image/png" -T "tmp.png" $server/_matrix/media/v1/upload?access_token=$token`;
|
||||||
|
my $content_uri = from_json($json)->{content_uri};
|
||||||
|
`curl -X PUT -H "Content-Type: application/json" --data '{ "avatar_url": "${content_uri}#auto"}' $server/_matrix/client/api/v1/profile/${mxid}/avatar_url?access_token=$token`;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
sub usage {
|
||||||
|
die "usage: ./make-identicons.pl\n\t-d database [e.g. homeserver.db]\n\t-s homeserver (default: http://localhost:8008)\n\t-w identicon size in pixels (default 320)";
|
||||||
|
}
|
|
@ -0,0 +1,24 @@
|
||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
## CAUTION:
|
||||||
|
## This script will remove (hopefully) all trace of the given room ID from
|
||||||
|
## your homeserver.db
|
||||||
|
|
||||||
|
## Do not run it lightly.
|
||||||
|
|
||||||
|
ROOMID="$1"
|
||||||
|
|
||||||
|
sqlite3 homeserver.db <<EOF
|
||||||
|
DELETE FROM context_depth WHERE context = '$ROOMID';
|
||||||
|
DELETE FROM current_state WHERE context = '$ROOMID';
|
||||||
|
DELETE FROM feedback WHERE room_id = '$ROOMID';
|
||||||
|
DELETE FROM messages WHERE room_id = '$ROOMID';
|
||||||
|
DELETE FROM pdu_backward_extremities WHERE context = '$ROOMID';
|
||||||
|
DELETE FROM pdu_edges WHERE context = '$ROOMID';
|
||||||
|
DELETE FROM pdu_forward_extremities WHERE context = '$ROOMID';
|
||||||
|
DELETE FROM pdus WHERE context = '$ROOMID';
|
||||||
|
DELETE FROM room_data WHERE room_id = '$ROOMID';
|
||||||
|
DELETE FROM room_memberships WHERE room_id = '$ROOMID';
|
||||||
|
DELETE FROM rooms WHERE room_id = '$ROOMID';
|
||||||
|
DELETE FROM state_pdus WHERE context = '$ROOMID';
|
||||||
|
EOF
|
|
@ -0,0 +1 @@
|
||||||
|
sphinx-apidoc -o docs/sphinx/ synapse/ -ef
|
Loading…
Reference in New Issue