Run `black` on the scripts (#9981)

Turns out these scripts weren't getting linted.
This commit is contained in:
Richard van der Hoff 2021-05-14 11:46:35 +01:00 committed by GitHub
parent 5090f26b63
commit 6482075c95
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
8 changed files with 141 additions and 76 deletions

1
changelog.d/9981.misc Normal file
View File

@ -0,0 +1 @@
Run `black` on files in the `scripts` directory.

View File

@ -21,18 +21,18 @@ DISTS = (
"debian:buster", "debian:buster",
"debian:bullseye", "debian:bullseye",
"debian:sid", "debian:sid",
"ubuntu:bionic", # 18.04 LTS (our EOL forced by Py36 on 2021-12-23) "ubuntu:bionic", # 18.04 LTS (our EOL forced by Py36 on 2021-12-23)
"ubuntu:focal", # 20.04 LTS (our EOL forced by Py38 on 2024-10-14) "ubuntu:focal", # 20.04 LTS (our EOL forced by Py38 on 2024-10-14)
"ubuntu:groovy", # 20.10 (EOL 2021-07-07) "ubuntu:groovy", # 20.10 (EOL 2021-07-07)
"ubuntu:hirsute", # 21.04 (EOL 2022-01-05) "ubuntu:hirsute", # 21.04 (EOL 2022-01-05)
) )
DESC = '''\ DESC = """\
Builds .debs for synapse, using a Docker image for the build environment. Builds .debs for synapse, using a Docker image for the build environment.
By default, builds for all known distributions, but a list of distributions By default, builds for all known distributions, but a list of distributions
can be passed on the commandline for debugging. can be passed on the commandline for debugging.
''' """
class Builder(object): class Builder(object):
@ -46,7 +46,7 @@ class Builder(object):
"""Build deb for a single distribution""" """Build deb for a single distribution"""
if self._failed: if self._failed:
print("not building %s due to earlier failure" % (dist, )) print("not building %s due to earlier failure" % (dist,))
raise Exception("failed") raise Exception("failed")
try: try:
@ -68,48 +68,65 @@ class Builder(object):
# we tend to get source packages which are full of debs. (We could hack # we tend to get source packages which are full of debs. (We could hack
# around that with more magic in the build_debian.sh script, but that # around that with more magic in the build_debian.sh script, but that
# doesn't solve the problem for natively-run dpkg-buildpakage). # doesn't solve the problem for natively-run dpkg-buildpakage).
debsdir = os.path.join(projdir, '../debs') debsdir = os.path.join(projdir, "../debs")
os.makedirs(debsdir, exist_ok=True) os.makedirs(debsdir, exist_ok=True)
if self.redirect_stdout: if self.redirect_stdout:
logfile = os.path.join(debsdir, "%s.buildlog" % (tag, )) logfile = os.path.join(debsdir, "%s.buildlog" % (tag,))
print("building %s: directing output to %s" % (dist, logfile)) print("building %s: directing output to %s" % (dist, logfile))
stdout = open(logfile, "w") stdout = open(logfile, "w")
else: else:
stdout = None stdout = None
# first build a docker image for the build environment # first build a docker image for the build environment
subprocess.check_call([ subprocess.check_call(
"docker", "build", [
"--tag", "dh-venv-builder:" + tag, "docker",
"--build-arg", "distro=" + dist, "build",
"-f", "docker/Dockerfile-dhvirtualenv", "--tag",
"docker", "dh-venv-builder:" + tag,
], stdout=stdout, stderr=subprocess.STDOUT) "--build-arg",
"distro=" + dist,
"-f",
"docker/Dockerfile-dhvirtualenv",
"docker",
],
stdout=stdout,
stderr=subprocess.STDOUT,
)
container_name = "synapse_build_" + tag container_name = "synapse_build_" + tag
with self._lock: with self._lock:
self.active_containers.add(container_name) self.active_containers.add(container_name)
# then run the build itself # then run the build itself
subprocess.check_call([ subprocess.check_call(
"docker", "run", [
"--rm", "docker",
"--name", container_name, "run",
"--volume=" + projdir + ":/synapse/source:ro", "--rm",
"--volume=" + debsdir + ":/debs", "--name",
"-e", "TARGET_USERID=%i" % (os.getuid(), ), container_name,
"-e", "TARGET_GROUPID=%i" % (os.getgid(), ), "--volume=" + projdir + ":/synapse/source:ro",
"-e", "DEB_BUILD_OPTIONS=%s" % ("nocheck" if skip_tests else ""), "--volume=" + debsdir + ":/debs",
"dh-venv-builder:" + tag, "-e",
], stdout=stdout, stderr=subprocess.STDOUT) "TARGET_USERID=%i" % (os.getuid(),),
"-e",
"TARGET_GROUPID=%i" % (os.getgid(),),
"-e",
"DEB_BUILD_OPTIONS=%s" % ("nocheck" if skip_tests else ""),
"dh-venv-builder:" + tag,
],
stdout=stdout,
stderr=subprocess.STDOUT,
)
with self._lock: with self._lock:
self.active_containers.remove(container_name) self.active_containers.remove(container_name)
if stdout is not None: if stdout is not None:
stdout.close() stdout.close()
print("Completed build of %s" % (dist, )) print("Completed build of %s" % (dist,))
def kill_containers(self): def kill_containers(self):
with self._lock: with self._lock:
@ -117,9 +134,14 @@ class Builder(object):
for c in active: for c in active:
print("killing container %s" % (c,)) print("killing container %s" % (c,))
subprocess.run([ subprocess.run(
"docker", "kill", c, [
], stdout=subprocess.DEVNULL) "docker",
"kill",
c,
],
stdout=subprocess.DEVNULL,
)
with self._lock: with self._lock:
self.active_containers.remove(c) self.active_containers.remove(c)
@ -130,31 +152,38 @@ def run_builds(dists, jobs=1, skip_tests=False):
def sig(signum, _frame): def sig(signum, _frame):
print("Caught SIGINT") print("Caught SIGINT")
builder.kill_containers() builder.kill_containers()
signal.signal(signal.SIGINT, sig) signal.signal(signal.SIGINT, sig)
with ThreadPoolExecutor(max_workers=jobs) as e: with ThreadPoolExecutor(max_workers=jobs) as e:
res = e.map(lambda dist: builder.run_build(dist, skip_tests), dists) res = e.map(lambda dist: builder.run_build(dist, skip_tests), dists)
# make sure we consume the iterable so that exceptions are raised. # make sure we consume the iterable so that exceptions are raised.
for r in res: for _ in res:
pass pass
if __name__ == '__main__': if __name__ == "__main__":
parser = argparse.ArgumentParser( parser = argparse.ArgumentParser(
description=DESC, description=DESC,
) )
parser.add_argument( parser.add_argument(
'-j', '--jobs', type=int, default=1, "-j",
help='specify the number of builds to run in parallel', "--jobs",
type=int,
default=1,
help="specify the number of builds to run in parallel",
) )
parser.add_argument( parser.add_argument(
'--no-check', action='store_true', "--no-check",
help='skip running tests after building', action="store_true",
help="skip running tests after building",
) )
parser.add_argument( parser.add_argument(
'dist', nargs='*', default=DISTS, "dist",
help='a list of distributions to build for. Default: %(default)s', nargs="*",
default=DISTS,
help="a list of distributions to build for. Default: %(default)s",
) )
args = parser.parse_args() args = parser.parse_args()
run_builds(dists=args.dist, jobs=args.jobs, skip_tests=args.no_check) run_builds(dists=args.dist, jobs=args.jobs, skip_tests=args.no_check)

View File

@ -80,8 +80,22 @@ else
# then lint everything! # then lint everything!
if [[ -z ${files+x} ]]; then if [[ -z ${files+x} ]]; then
# Lint all source code files and directories # Lint all source code files and directories
# Note: this list aims the mirror the one in tox.ini # Note: this list aims to mirror the one in tox.ini
files=("synapse" "docker" "tests" "scripts-dev" "scripts" "contrib" "synctl" "setup.py" "synmark" "stubs" ".buildkite") files=(
"synapse" "docker" "tests"
# annoyingly, black doesn't find these so we have to list them
"scripts/export_signing_key"
"scripts/generate_config"
"scripts/generate_log_config"
"scripts/hash_password"
"scripts/register_new_matrix_user"
"scripts/synapse_port_db"
"scripts-dev"
"scripts-dev/build_debian_packages"
"scripts-dev/sign_json"
"scripts-dev/update_database"
"contrib" "synctl" "setup.py" "synmark" "stubs" ".buildkite"
)
fi fi
fi fi

View File

@ -30,7 +30,11 @@ def exit(status: int = 0, message: Optional[str] = None):
def format_plain(public_key: nacl.signing.VerifyKey): def format_plain(public_key: nacl.signing.VerifyKey):
print( print(
"%s:%s %s" "%s:%s %s"
% (public_key.alg, public_key.version, encode_verify_key_base64(public_key),) % (
public_key.alg,
public_key.version,
encode_verify_key_base64(public_key),
)
) )
@ -50,7 +54,10 @@ if __name__ == "__main__":
parser = argparse.ArgumentParser() parser = argparse.ArgumentParser()
parser.add_argument( parser.add_argument(
"key_file", nargs="+", type=argparse.FileType("r"), help="The key file to read", "key_file",
nargs="+",
type=argparse.FileType("r"),
help="The key file to read",
) )
parser.add_argument( parser.add_argument(
@ -63,7 +70,7 @@ if __name__ == "__main__":
parser.add_argument( parser.add_argument(
"--expiry-ts", "--expiry-ts",
type=int, type=int,
default=int(time.time() * 1000) + 6*3600000, default=int(time.time() * 1000) + 6 * 3600000,
help=( help=(
"The expiry time to use for -x, in milliseconds since 1970. The default " "The expiry time to use for -x, in milliseconds since 1970. The default "
"is (now+6h)." "is (now+6h)."

View File

@ -11,23 +11,22 @@ if __name__ == "__main__":
parser.add_argument( parser.add_argument(
"--config-dir", "--config-dir",
default="CONFDIR", default="CONFDIR",
help="The path where the config files are kept. Used to create filenames for " help="The path where the config files are kept. Used to create filenames for "
"things like the log config and the signing key. Default: %(default)s", "things like the log config and the signing key. Default: %(default)s",
) )
parser.add_argument( parser.add_argument(
"--data-dir", "--data-dir",
default="DATADIR", default="DATADIR",
help="The path where the data files are kept. Used to create filenames for " help="The path where the data files are kept. Used to create filenames for "
"things like the database and media store. Default: %(default)s", "things like the database and media store. Default: %(default)s",
) )
parser.add_argument( parser.add_argument(
"--server-name", "--server-name",
default="SERVERNAME", default="SERVERNAME",
help="The server name. Used to initialise the server_name config param, but also " help="The server name. Used to initialise the server_name config param, but also "
"used in the names of some of the config files. Default: %(default)s", "used in the names of some of the config files. Default: %(default)s",
) )
parser.add_argument( parser.add_argument(
@ -41,21 +40,22 @@ if __name__ == "__main__":
"--generate-secrets", "--generate-secrets",
action="store_true", action="store_true",
help="Enable generation of new secrets for things like the macaroon_secret_key." help="Enable generation of new secrets for things like the macaroon_secret_key."
"By default, these parameters will be left unset." "By default, these parameters will be left unset.",
) )
parser.add_argument( parser.add_argument(
"-o", "--output-file", "-o",
type=argparse.FileType('w'), "--output-file",
type=argparse.FileType("w"),
default=sys.stdout, default=sys.stdout,
help="File to write the configuration to. Default: stdout", help="File to write the configuration to. Default: stdout",
) )
parser.add_argument( parser.add_argument(
"--header-file", "--header-file",
type=argparse.FileType('r'), type=argparse.FileType("r"),
help="File from which to read a header, which will be printed before the " help="File from which to read a header, which will be printed before the "
"generated config.", "generated config.",
) )
args = parser.parse_args() args = parser.parse_args()

View File

@ -41,7 +41,7 @@ if __name__ == "__main__":
parser.add_argument( parser.add_argument(
"-c", "-c",
"--config", "--config",
type=argparse.FileType('r'), type=argparse.FileType("r"),
help=( help=(
"Path to server config file. " "Path to server config file. "
"Used to read in bcrypt_rounds and password_pepper." "Used to read in bcrypt_rounds and password_pepper."
@ -72,8 +72,8 @@ if __name__ == "__main__":
pw = unicodedata.normalize("NFKC", password) pw = unicodedata.normalize("NFKC", password)
hashed = bcrypt.hashpw( hashed = bcrypt.hashpw(
pw.encode('utf8') + password_pepper.encode("utf8"), pw.encode("utf8") + password_pepper.encode("utf8"),
bcrypt.gensalt(bcrypt_rounds), bcrypt.gensalt(bcrypt_rounds),
).decode('ascii') ).decode("ascii")
print(hashed) print(hashed)

View File

@ -294,8 +294,7 @@ class Porter(object):
return table, already_ported, total_to_port, forward_chunk, backward_chunk return table, already_ported, total_to_port, forward_chunk, backward_chunk
async def get_table_constraints(self) -> Dict[str, Set[str]]: async def get_table_constraints(self) -> Dict[str, Set[str]]:
"""Returns a map of tables that have foreign key constraints to tables they depend on. """Returns a map of tables that have foreign key constraints to tables they depend on."""
"""
def _get_constraints(txn): def _get_constraints(txn):
# We can pull the information about foreign key constraints out from # We can pull the information about foreign key constraints out from
@ -504,7 +503,9 @@ class Porter(object):
return return
def build_db_store( def build_db_store(
self, db_config: DatabaseConnectionConfig, allow_outdated_version: bool = False, self,
db_config: DatabaseConnectionConfig,
allow_outdated_version: bool = False,
): ):
"""Builds and returns a database store using the provided configuration. """Builds and returns a database store using the provided configuration.
@ -740,7 +741,7 @@ class Porter(object):
return col return col
outrows = [] outrows = []
for i, row in enumerate(rows): for row in rows:
try: try:
outrows.append( outrows.append(
tuple(conv(j, col) for j, col in enumerate(row) if j > 0) tuple(conv(j, col) for j, col in enumerate(row) if j > 0)
@ -890,8 +891,7 @@ class Porter(object):
await self.postgres_store.db_pool.runInteraction("setup_user_id_seq", r) await self.postgres_store.db_pool.runInteraction("setup_user_id_seq", r)
async def _setup_events_stream_seqs(self) -> None: async def _setup_events_stream_seqs(self) -> None:
"""Set the event stream sequences to the correct values. """Set the event stream sequences to the correct values."""
"""
# We get called before we've ported the events table, so we need to # We get called before we've ported the events table, so we need to
# fetch the current positions from the SQLite store. # fetch the current positions from the SQLite store.
@ -920,12 +920,14 @@ class Porter(object):
) )
await self.postgres_store.db_pool.runInteraction( await self.postgres_store.db_pool.runInteraction(
"_setup_events_stream_seqs", _setup_events_stream_seqs_set_pos, "_setup_events_stream_seqs",
_setup_events_stream_seqs_set_pos,
) )
async def _setup_sequence(self, sequence_name: str, stream_id_tables: Iterable[str]) -> None: async def _setup_sequence(
"""Set a sequence to the correct value. self, sequence_name: str, stream_id_tables: Iterable[str]
""" ) -> None:
"""Set a sequence to the correct value."""
current_stream_ids = [] current_stream_ids = []
for stream_id_table in stream_id_tables: for stream_id_table in stream_id_tables:
max_stream_id = await self.sqlite_store.db_pool.simple_select_one_onecol( max_stream_id = await self.sqlite_store.db_pool.simple_select_one_onecol(
@ -939,14 +941,19 @@ class Porter(object):
next_id = max(current_stream_ids) + 1 next_id = max(current_stream_ids) + 1
def r(txn): def r(txn):
sql = "ALTER SEQUENCE %s RESTART WITH" % (sequence_name, ) sql = "ALTER SEQUENCE %s RESTART WITH" % (sequence_name,)
txn.execute(sql + " %s", (next_id, )) txn.execute(sql + " %s", (next_id,))
await self.postgres_store.db_pool.runInteraction("_setup_%s" % (sequence_name,), r) await self.postgres_store.db_pool.runInteraction(
"_setup_%s" % (sequence_name,), r
)
async def _setup_auth_chain_sequence(self) -> None: async def _setup_auth_chain_sequence(self) -> None:
curr_chain_id = await self.sqlite_store.db_pool.simple_select_one_onecol( curr_chain_id = await self.sqlite_store.db_pool.simple_select_one_onecol(
table="event_auth_chains", keyvalues={}, retcol="MAX(chain_id)", allow_none=True table="event_auth_chains",
keyvalues={},
retcol="MAX(chain_id)",
allow_none=True,
) )
def r(txn): def r(txn):
@ -968,8 +975,7 @@ class Porter(object):
class Progress(object): class Progress(object):
"""Used to report progress of the port """Used to report progress of the port"""
"""
def __init__(self): def __init__(self):
self.tables = {} self.tables = {}
@ -994,8 +1000,7 @@ class Progress(object):
class CursesProgress(Progress): class CursesProgress(Progress):
"""Reports progress to a curses window """Reports progress to a curses window"""
"""
def __init__(self, stdscr): def __init__(self, stdscr):
self.stdscr = stdscr self.stdscr = stdscr
@ -1020,7 +1025,7 @@ class CursesProgress(Progress):
self.total_processed = 0 self.total_processed = 0
self.total_remaining = 0 self.total_remaining = 0
for table, data in self.tables.items(): for data in self.tables.values():
self.total_processed += data["num_done"] - data["start"] self.total_processed += data["num_done"] - data["start"]
self.total_remaining += data["total"] - data["num_done"] self.total_remaining += data["total"] - data["num_done"]
@ -1111,8 +1116,7 @@ class CursesProgress(Progress):
class TerminalProgress(Progress): class TerminalProgress(Progress):
"""Just prints progress to the terminal """Just prints progress to the terminal"""
"""
def update(self, table, num_done): def update(self, table, num_done):
super(TerminalProgress, self).update(table, num_done) super(TerminalProgress, self).update(table, num_done)

10
tox.ini
View File

@ -34,7 +34,17 @@ lint_targets =
synapse synapse
tests tests
scripts scripts
# annoyingly, black doesn't find these so we have to list them
scripts/export_signing_key
scripts/generate_config
scripts/generate_log_config
scripts/hash_password
scripts/register_new_matrix_user
scripts/synapse_port_db
scripts-dev scripts-dev
scripts-dev/build_debian_packages
scripts-dev/sign_json
scripts-dev/update_database
stubs stubs
contrib contrib
synctl synctl