2021-06-11 07:45:53 -06:00
|
|
|
# Copyright 2014 - 2021 The Matrix.org Foundation C.I.C.
|
2015-10-13 04:38:48 -06:00
|
|
|
#
|
|
|
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
|
|
# you may not use this file except in compliance with the License.
|
|
|
|
# You may obtain a copy of the License at
|
|
|
|
#
|
|
|
|
# http://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
#
|
|
|
|
# Unless required by applicable law or agreed to in writing, software
|
|
|
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
|
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
|
|
# See the License for the specific language governing permissions and
|
|
|
|
# limitations under the License.
|
2021-03-31 05:04:27 -06:00
|
|
|
import importlib.util
|
2015-10-13 04:38:48 -06:00
|
|
|
import logging
|
|
|
|
import os
|
|
|
|
import re
|
2019-12-19 08:07:37 -07:00
|
|
|
from collections import Counter
|
2021-04-22 09:43:50 -06:00
|
|
|
from typing import Collection, Generator, Iterable, List, Optional, TextIO, Tuple
|
2015-10-13 04:38:48 -06:00
|
|
|
|
2019-10-21 09:08:40 -06:00
|
|
|
import attr
|
2020-12-30 06:09:53 -07:00
|
|
|
from typing_extensions import Counter as CounterType
|
2019-10-21 09:08:40 -06:00
|
|
|
|
2020-09-07 04:41:50 -06:00
|
|
|
from synapse.config.homeserver import HomeServerConfig
|
2020-10-02 08:20:45 -06:00
|
|
|
from synapse.storage.database import LoggingDatabaseConnection
|
2023-02-09 12:57:01 -07:00
|
|
|
from synapse.storage.engines import BaseDatabaseEngine, PostgresEngine, Sqlite3Engine
|
2021-06-11 07:45:53 -06:00
|
|
|
from synapse.storage.schema import SCHEMA_COMPAT_VERSION, SCHEMA_VERSION
|
2020-10-02 08:20:45 -06:00
|
|
|
from synapse.storage.types import Cursor
|
2019-06-03 06:02:47 -06:00
|
|
|
|
2015-10-13 04:38:48 -06:00
|
|
|
logger = logging.getLogger(__name__)
|
|
|
|
|
|
|
|
|
2021-05-07 03:22:05 -06:00
|
|
|
schema_path = os.path.join(os.path.abspath(os.path.dirname(__file__)), "schema")
|
2015-10-13 04:38:48 -06:00
|
|
|
|
|
|
|
|
|
|
|
class PrepareDatabaseException(Exception):
|
|
|
|
pass
|
|
|
|
|
|
|
|
|
|
|
|
class UpgradeDatabaseException(PrepareDatabaseException):
|
|
|
|
pass
|
|
|
|
|
|
|
|
|
2020-09-07 06:04:10 -06:00
|
|
|
OUTDATED_SCHEMA_ON_WORKER_ERROR = (
|
|
|
|
"Expected database schema version %i but got %i: run the main synapse process to "
|
|
|
|
"upgrade the database schema before starting worker processes."
|
|
|
|
)
|
|
|
|
|
|
|
|
EMPTY_DATABASE_ON_WORKER_ERROR = (
|
|
|
|
"Uninitialised database: run the main synapse process to prepare the database "
|
|
|
|
"schema before starting worker processes."
|
|
|
|
)
|
|
|
|
|
|
|
|
UNAPPLIED_DELTA_ON_WORKER_ERROR = (
|
|
|
|
"Database schema delta %s has not been applied: run the main synapse process to "
|
|
|
|
"upgrade the database schema before starting worker processes."
|
|
|
|
)
|
|
|
|
|
|
|
|
|
2021-06-11 07:45:53 -06:00
|
|
|
@attr.s
|
|
|
|
class _SchemaState:
|
|
|
|
current_version: int = attr.ib()
|
|
|
|
"""The current schema version of the database"""
|
|
|
|
|
|
|
|
compat_version: Optional[int] = attr.ib()
|
|
|
|
"""The SCHEMA_VERSION of the oldest version of Synapse for this database
|
|
|
|
|
|
|
|
If this is None, we have an old version of the database without the necessary
|
|
|
|
table.
|
|
|
|
"""
|
|
|
|
|
|
|
|
applied_deltas: Collection[str] = attr.ib(factory=tuple)
|
|
|
|
"""Any delta files for `current_version` which have already been applied"""
|
|
|
|
|
|
|
|
upgraded: bool = attr.ib(default=False)
|
|
|
|
"""Whether the current state was reached by applying deltas.
|
|
|
|
|
|
|
|
If False, we have run the full schema for `current_version`, and have applied no
|
|
|
|
deltas since. If True, we have run some deltas since the original creation."""
|
|
|
|
|
|
|
|
|
2020-09-07 04:41:50 -06:00
|
|
|
def prepare_database(
|
2020-10-02 08:20:45 -06:00
|
|
|
db_conn: LoggingDatabaseConnection,
|
2020-09-07 04:41:50 -06:00
|
|
|
database_engine: BaseDatabaseEngine,
|
|
|
|
config: Optional[HomeServerConfig],
|
2020-12-30 06:09:53 -07:00
|
|
|
databases: Collection[str] = ("main", "state"),
|
2022-05-12 08:33:50 -06:00
|
|
|
) -> None:
|
2020-08-05 14:38:57 -06:00
|
|
|
"""Prepares a physical database for usage. Will either create all necessary tables
|
2015-10-13 04:38:48 -06:00
|
|
|
or upgrade from an older schema version.
|
2016-04-06 07:08:18 -06:00
|
|
|
|
|
|
|
If `config` is None then prepare_database will assert that no upgrade is
|
|
|
|
necessary, *or* will create a fresh database if the database is empty.
|
2017-10-30 09:16:21 -06:00
|
|
|
|
|
|
|
Args:
|
|
|
|
db_conn:
|
|
|
|
database_engine:
|
2020-09-07 04:41:50 -06:00
|
|
|
config :
|
2017-10-30 09:16:21 -06:00
|
|
|
application config, or None if we are connecting to an existing
|
|
|
|
database which we expect to be configured already
|
2020-09-07 04:41:50 -06:00
|
|
|
databases: The name of the databases that will be used
|
2020-08-05 14:38:57 -06:00
|
|
|
with this physical database. Defaults to all databases.
|
2015-10-13 04:38:48 -06:00
|
|
|
"""
|
2019-10-21 09:08:40 -06:00
|
|
|
|
2015-10-13 04:38:48 -06:00
|
|
|
try:
|
2020-10-02 08:20:45 -06:00
|
|
|
cur = db_conn.cursor(txn_name="prepare_database")
|
2020-09-07 04:41:50 -06:00
|
|
|
|
|
|
|
# sqlite does not automatically start transactions for DDL / SELECT statements,
|
|
|
|
# so we start one before running anything. This ensures that any upgrades
|
|
|
|
# are either applied completely, or not at all.
|
|
|
|
#
|
2023-02-09 12:57:01 -07:00
|
|
|
# psycopg2 does not automatically start transactions when in autocommit mode.
|
|
|
|
# While it is technically harmless to nest transactions in postgres, doing so
|
|
|
|
# results in a warning in Postgres' logs per query. And we'd rather like to
|
|
|
|
# avoid doing that.
|
|
|
|
if isinstance(database_engine, Sqlite3Engine) or (
|
|
|
|
isinstance(database_engine, PostgresEngine) and db_conn.autocommit
|
|
|
|
):
|
|
|
|
cur.execute("BEGIN TRANSACTION")
|
2020-09-07 04:41:50 -06:00
|
|
|
|
2020-09-07 06:36:02 -06:00
|
|
|
logger.info("%r: Checking existing schema version", databases)
|
2015-10-13 04:38:48 -06:00
|
|
|
version_info = _get_or_create_schema_state(cur, database_engine)
|
|
|
|
|
|
|
|
if version_info:
|
2020-09-07 06:36:02 -06:00
|
|
|
logger.info(
|
|
|
|
"%r: Existing schema is %i (+%i deltas)",
|
|
|
|
databases,
|
2021-06-11 07:45:53 -06:00
|
|
|
version_info.current_version,
|
|
|
|
len(version_info.applied_deltas),
|
2020-09-07 06:36:02 -06:00
|
|
|
)
|
2015-10-13 04:38:48 -06:00
|
|
|
|
2020-09-07 06:04:10 -06:00
|
|
|
# config should only be None when we are preparing an in-memory SQLite db,
|
|
|
|
# which should be empty.
|
2016-04-06 07:08:18 -06:00
|
|
|
if config is None:
|
2020-09-07 06:04:10 -06:00
|
|
|
raise ValueError(
|
2021-02-12 09:01:48 -07:00
|
|
|
"config==None in prepare_database, but database is not empty"
|
2016-04-06 07:08:18 -06:00
|
|
|
)
|
2020-09-07 06:04:10 -06:00
|
|
|
|
2021-11-15 10:34:15 -07:00
|
|
|
# This should be run on all processes, master or worker. The master will
|
|
|
|
# apply the deltas, while workers will check if any outstanding deltas
|
|
|
|
# exist and raise an PrepareDatabaseException if they do.
|
|
|
|
_upgrade_existing_database(
|
|
|
|
cur,
|
|
|
|
version_info,
|
|
|
|
database_engine,
|
|
|
|
config,
|
|
|
|
databases=databases,
|
|
|
|
)
|
2020-09-07 06:04:10 -06:00
|
|
|
|
2016-04-06 07:08:18 -06:00
|
|
|
else:
|
2020-09-07 06:36:02 -06:00
|
|
|
logger.info("%r: Initialising new database", databases)
|
|
|
|
|
2020-09-07 06:04:10 -06:00
|
|
|
# if it's a worker app, refuse to upgrade the database, to avoid multiple
|
|
|
|
# workers doing it at once.
|
2021-09-13 11:07:12 -06:00
|
|
|
if config and config.worker.worker_app is not None:
|
2020-09-07 06:04:10 -06:00
|
|
|
raise UpgradeDatabaseException(EMPTY_DATABASE_ON_WORKER_ERROR)
|
|
|
|
|
2020-08-05 14:38:57 -06:00
|
|
|
_setup_new_database(cur, database_engine, databases=databases)
|
2015-10-13 04:38:48 -06:00
|
|
|
|
2017-10-30 09:16:21 -06:00
|
|
|
# check if any of our configured dynamic modules want a database
|
|
|
|
if config is not None:
|
|
|
|
_apply_module_schemas(cur, database_engine, config)
|
|
|
|
|
2015-10-13 04:38:48 -06:00
|
|
|
cur.close()
|
|
|
|
db_conn.commit()
|
2017-10-23 08:52:32 -06:00
|
|
|
except Exception:
|
2015-10-13 04:38:48 -06:00
|
|
|
db_conn.rollback()
|
|
|
|
raise
|
|
|
|
|
|
|
|
|
2020-12-30 06:09:53 -07:00
|
|
|
def _setup_new_database(
|
|
|
|
cur: Cursor, database_engine: BaseDatabaseEngine, databases: Collection[str]
|
|
|
|
) -> None:
|
2020-08-05 14:38:57 -06:00
|
|
|
"""Sets up the physical database by finding a base set of "full schemas" and
|
|
|
|
then applying any necessary deltas, including schemas from the given data
|
2019-10-22 11:43:31 -06:00
|
|
|
stores.
|
2015-10-13 04:38:48 -06:00
|
|
|
|
|
|
|
The "full_schemas" directory has subdirectories named after versions. This
|
|
|
|
function searches for the highest version less than or equal to
|
|
|
|
`SCHEMA_VERSION` and executes all .sql files in that directory.
|
|
|
|
|
|
|
|
The function will then apply all deltas for all versions after the base
|
|
|
|
version.
|
|
|
|
|
|
|
|
Example directory structure:
|
|
|
|
|
2021-05-07 03:22:05 -06:00
|
|
|
schema/
|
|
|
|
common/
|
|
|
|
delta/
|
|
|
|
...
|
|
|
|
full_schemas/
|
|
|
|
11/
|
|
|
|
foo.sql
|
|
|
|
main/
|
2015-10-13 04:38:48 -06:00
|
|
|
delta/
|
|
|
|
...
|
|
|
|
full_schemas/
|
|
|
|
3/
|
|
|
|
test.sql
|
|
|
|
...
|
|
|
|
11/
|
|
|
|
bar.sql
|
|
|
|
...
|
|
|
|
|
|
|
|
In the example foo.sql and bar.sql would be run, and then any delta files
|
|
|
|
for versions strictly greater than 11.
|
2019-10-22 11:43:31 -06:00
|
|
|
|
2021-05-07 03:22:05 -06:00
|
|
|
Note: we apply the full schemas and deltas from the `schema/common`
|
|
|
|
folder as well those in the databases specified.
|
2019-10-22 11:43:31 -06:00
|
|
|
|
|
|
|
Args:
|
2020-12-30 06:09:53 -07:00
|
|
|
cur: a database cursor
|
|
|
|
database_engine
|
|
|
|
databases: The names of the databases to instantiate on the given physical database.
|
2015-10-13 04:38:48 -06:00
|
|
|
"""
|
2020-01-28 06:44:21 -07:00
|
|
|
|
|
|
|
# We're about to set up a brand new database so we check that its
|
|
|
|
# configured to our liking.
|
|
|
|
database_engine.check_new_database(cur)
|
|
|
|
|
2021-05-07 03:22:05 -06:00
|
|
|
full_schemas_dir = os.path.join(schema_path, "common", "full_schemas")
|
2015-10-13 04:38:48 -06:00
|
|
|
|
2019-10-21 09:08:40 -06:00
|
|
|
# First we find the highest full schema version we have
|
|
|
|
valid_versions = []
|
2019-06-03 06:02:47 -06:00
|
|
|
|
2021-05-07 03:22:05 -06:00
|
|
|
for filename in os.listdir(full_schemas_dir):
|
2019-10-21 09:08:40 -06:00
|
|
|
try:
|
|
|
|
ver = int(filename)
|
|
|
|
except ValueError:
|
|
|
|
continue
|
2015-10-13 04:38:48 -06:00
|
|
|
|
2019-10-21 09:08:40 -06:00
|
|
|
if ver <= SCHEMA_VERSION:
|
|
|
|
valid_versions.append(ver)
|
|
|
|
|
|
|
|
if not valid_versions:
|
2015-10-13 04:38:48 -06:00
|
|
|
raise PrepareDatabaseException(
|
|
|
|
"Could not find a suitable base set of full schemas"
|
|
|
|
)
|
|
|
|
|
2019-10-21 09:08:40 -06:00
|
|
|
max_current_ver = max(valid_versions)
|
2015-10-13 04:38:48 -06:00
|
|
|
|
|
|
|
logger.debug("Initialising schema v%d", max_current_ver)
|
|
|
|
|
2021-05-07 03:22:05 -06:00
|
|
|
# Now let's find all the full schema files, both in the common schema and
|
|
|
|
# in database schemas.
|
|
|
|
directories = [os.path.join(full_schemas_dir, str(max_current_ver))]
|
2019-10-21 09:08:40 -06:00
|
|
|
directories.extend(
|
|
|
|
os.path.join(
|
2021-05-07 03:22:05 -06:00
|
|
|
schema_path,
|
2020-08-05 14:38:57 -06:00
|
|
|
database,
|
2019-10-21 09:08:40 -06:00
|
|
|
"full_schemas",
|
|
|
|
str(max_current_ver),
|
|
|
|
)
|
2020-08-05 14:38:57 -06:00
|
|
|
for database in databases
|
2019-10-21 09:08:40 -06:00
|
|
|
)
|
|
|
|
|
2021-07-15 10:46:54 -06:00
|
|
|
directory_entries: List[_DirectoryListing] = []
|
2019-10-21 09:08:40 -06:00
|
|
|
for directory in directories:
|
|
|
|
directory_entries.extend(
|
|
|
|
_DirectoryListing(file_name, os.path.join(directory, file_name))
|
|
|
|
for file_name in os.listdir(directory)
|
|
|
|
)
|
|
|
|
|
|
|
|
if isinstance(database_engine, PostgresEngine):
|
|
|
|
specific = "postgres"
|
|
|
|
else:
|
|
|
|
specific = "sqlite"
|
2015-10-13 04:38:48 -06:00
|
|
|
|
2019-10-21 09:08:40 -06:00
|
|
|
directory_entries.sort()
|
|
|
|
for entry in directory_entries:
|
|
|
|
if entry.file_name.endswith(".sql") or entry.file_name.endswith(
|
|
|
|
".sql." + specific
|
|
|
|
):
|
|
|
|
logger.debug("Applying schema %s", entry.absolute_path)
|
2022-09-26 11:28:32 -06:00
|
|
|
database_engine.execute_script_file(cur, entry.absolute_path)
|
2015-10-13 04:38:48 -06:00
|
|
|
|
|
|
|
cur.execute(
|
2020-10-02 08:20:45 -06:00
|
|
|
"INSERT INTO schema_version (version, upgraded) VALUES (?,?)",
|
2019-04-03 03:07:29 -06:00
|
|
|
(max_current_ver, False),
|
2015-10-13 04:38:48 -06:00
|
|
|
)
|
|
|
|
|
|
|
|
_upgrade_existing_database(
|
|
|
|
cur,
|
2021-06-11 07:45:53 -06:00
|
|
|
_SchemaState(current_version=max_current_ver, compat_version=None),
|
2015-10-13 04:38:48 -06:00
|
|
|
database_engine=database_engine,
|
2016-04-06 07:08:18 -06:00
|
|
|
config=None,
|
2020-08-05 14:38:57 -06:00
|
|
|
databases=databases,
|
2016-04-06 07:08:18 -06:00
|
|
|
is_empty=True,
|
2015-10-13 04:38:48 -06:00
|
|
|
)
|
|
|
|
|
|
|
|
|
2019-04-03 03:07:29 -06:00
|
|
|
def _upgrade_existing_database(
|
2020-12-30 06:09:53 -07:00
|
|
|
cur: Cursor,
|
2021-06-11 07:45:53 -06:00
|
|
|
current_schema_state: _SchemaState,
|
2020-12-30 06:09:53 -07:00
|
|
|
database_engine: BaseDatabaseEngine,
|
|
|
|
config: Optional[HomeServerConfig],
|
|
|
|
databases: Collection[str],
|
|
|
|
is_empty: bool = False,
|
|
|
|
) -> None:
|
2020-08-05 14:38:57 -06:00
|
|
|
"""Upgrades an existing physical database.
|
2015-10-13 04:38:48 -06:00
|
|
|
|
|
|
|
Delta files can either be SQL stored in *.sql files, or python modules
|
|
|
|
in *.py.
|
|
|
|
|
|
|
|
There can be multiple delta files per version. Synapse will keep track of
|
|
|
|
which delta files have been applied, and will apply any that haven't been
|
|
|
|
even if there has been no version bump. This is useful for development
|
|
|
|
where orthogonal schema changes may happen on separate branches.
|
|
|
|
|
|
|
|
Different delta files for the same version *must* be orthogonal and give
|
|
|
|
the same result when applied in any order. No guarantees are made on the
|
|
|
|
order of execution of these scripts.
|
|
|
|
|
|
|
|
This is a no-op of current_version == SCHEMA_VERSION.
|
|
|
|
|
|
|
|
Example directory structure:
|
|
|
|
|
|
|
|
schema/
|
|
|
|
delta/
|
|
|
|
11/
|
|
|
|
foo.sql
|
|
|
|
...
|
|
|
|
12/
|
|
|
|
foo.sql
|
|
|
|
bar.py
|
|
|
|
...
|
|
|
|
full_schemas/
|
|
|
|
...
|
|
|
|
|
|
|
|
In the example, if current_version is 11, then foo.sql will be run if and
|
|
|
|
only if `upgraded` is True. Then `foo.sql` and `bar.py` would be run in
|
|
|
|
some arbitrary order.
|
|
|
|
|
2019-10-22 11:43:31 -06:00
|
|
|
Note: we apply the delta files from the specified data stores as well as
|
|
|
|
those in the top-level schema. We apply all delta files across data stores
|
|
|
|
for a version before applying those in the next version.
|
|
|
|
|
2015-10-13 04:38:48 -06:00
|
|
|
Args:
|
2020-12-30 06:09:53 -07:00
|
|
|
cur
|
2021-06-11 07:45:53 -06:00
|
|
|
current_schema_state: The current version of the schema, as
|
|
|
|
returned by _get_or_create_schema_state
|
2020-12-30 06:09:53 -07:00
|
|
|
database_engine
|
|
|
|
config:
|
2020-02-25 10:46:00 -07:00
|
|
|
None if we are initialising a blank database, otherwise the application
|
|
|
|
config
|
2020-12-30 06:09:53 -07:00
|
|
|
databases: The names of the databases to instantiate
|
2020-08-05 14:38:57 -06:00
|
|
|
on the given physical database.
|
2020-12-30 06:09:53 -07:00
|
|
|
is_empty: Is this a blank database? I.e. do we need to run the
|
2019-10-22 11:43:31 -06:00
|
|
|
upgrade portions of the delta scripts.
|
2015-10-13 04:38:48 -06:00
|
|
|
"""
|
2020-02-25 10:46:00 -07:00
|
|
|
if is_empty:
|
2021-06-11 07:45:53 -06:00
|
|
|
assert not current_schema_state.applied_deltas
|
2020-02-25 10:46:00 -07:00
|
|
|
else:
|
|
|
|
assert config
|
2015-10-13 04:38:48 -06:00
|
|
|
|
2021-09-13 11:07:12 -06:00
|
|
|
is_worker = config and config.worker.worker_app is not None
|
2020-09-07 06:04:10 -06:00
|
|
|
|
2021-11-15 10:34:15 -07:00
|
|
|
# If the schema version needs to be updated, and we are on a worker, we immediately
|
|
|
|
# know to bail out as workers cannot update the database schema. Only one process
|
|
|
|
# must update the database at the time, therefore we delegate this task to the master.
|
|
|
|
if is_worker and current_schema_state.current_version < SCHEMA_VERSION:
|
|
|
|
# If the DB is on an older version than we expect then we refuse
|
|
|
|
# to start the worker (as the main process needs to run first to
|
|
|
|
# update the schema).
|
|
|
|
raise UpgradeDatabaseException(
|
|
|
|
OUTDATED_SCHEMA_ON_WORKER_ERROR
|
|
|
|
% (SCHEMA_VERSION, current_schema_state.current_version)
|
|
|
|
)
|
|
|
|
|
2021-06-11 07:45:53 -06:00
|
|
|
if (
|
|
|
|
current_schema_state.compat_version is not None
|
|
|
|
and current_schema_state.compat_version > SCHEMA_VERSION
|
|
|
|
):
|
2015-10-13 04:38:48 -06:00
|
|
|
raise ValueError(
|
2019-04-03 03:07:29 -06:00
|
|
|
"Cannot use this database as it is too "
|
|
|
|
+ "new for the server to understand"
|
2015-10-13 04:38:48 -06:00
|
|
|
)
|
|
|
|
|
2021-09-29 04:44:15 -06:00
|
|
|
# some of the deltas assume that server_name is set correctly, so now
|
2020-02-25 10:46:00 -07:00
|
|
|
# is a good time to run the sanity check.
|
2020-08-05 14:38:57 -06:00
|
|
|
if not is_empty and "main" in databases:
|
|
|
|
from synapse.storage.databases.main import check_database_before_upgrade
|
2020-02-25 10:46:00 -07:00
|
|
|
|
2020-12-30 06:09:53 -07:00
|
|
|
assert config is not None
|
2020-02-25 10:46:00 -07:00
|
|
|
check_database_before_upgrade(cur, database_engine, config)
|
|
|
|
|
2021-06-11 07:45:53 -06:00
|
|
|
# update schema_compat_version before we run any upgrades, so that if synapse
|
|
|
|
# gets downgraded again, it won't try to run against the upgraded database.
|
|
|
|
if (
|
|
|
|
current_schema_state.compat_version is None
|
|
|
|
or current_schema_state.compat_version < SCHEMA_COMPAT_VERSION
|
|
|
|
):
|
|
|
|
cur.execute("DELETE FROM schema_compat_version")
|
|
|
|
cur.execute(
|
|
|
|
"INSERT INTO schema_compat_version(compat_version) VALUES (?)",
|
|
|
|
(SCHEMA_COMPAT_VERSION,),
|
|
|
|
)
|
|
|
|
|
|
|
|
start_ver = current_schema_state.current_version
|
2021-05-07 03:22:05 -06:00
|
|
|
|
|
|
|
# if we got to this schema version by running a full_schema rather than a series
|
|
|
|
# of deltas, we should not run the deltas for this version.
|
2021-06-11 07:45:53 -06:00
|
|
|
if not current_schema_state.upgraded:
|
2015-10-13 04:38:48 -06:00
|
|
|
start_ver += 1
|
|
|
|
|
2021-06-11 07:45:53 -06:00
|
|
|
logger.debug("applied_delta_files: %s", current_schema_state.applied_deltas)
|
2015-10-13 04:38:48 -06:00
|
|
|
|
2019-08-27 01:52:20 -06:00
|
|
|
if isinstance(database_engine, PostgresEngine):
|
|
|
|
specific_engine_extension = ".postgres"
|
|
|
|
else:
|
|
|
|
specific_engine_extension = ".sqlite"
|
|
|
|
|
2019-08-27 02:39:11 -06:00
|
|
|
specific_engine_extensions = (".sqlite", ".postgres")
|
2019-08-27 01:52:20 -06:00
|
|
|
|
2015-10-13 04:38:48 -06:00
|
|
|
for v in range(start_ver, SCHEMA_VERSION + 1):
|
2021-01-07 04:33:36 -07:00
|
|
|
if not is_worker:
|
|
|
|
logger.info("Applying schema deltas for v%d", v)
|
|
|
|
|
|
|
|
cur.execute("DELETE FROM schema_version")
|
|
|
|
cur.execute(
|
|
|
|
"INSERT INTO schema_version (version, upgraded) VALUES (?,?)",
|
|
|
|
(v, True),
|
|
|
|
)
|
|
|
|
else:
|
|
|
|
logger.info("Checking schema deltas for v%d", v)
|
2015-10-13 04:38:48 -06:00
|
|
|
|
2019-10-21 09:08:40 -06:00
|
|
|
# We need to search both the global and per data store schema
|
|
|
|
# directories for schema updates.
|
2015-10-13 04:38:48 -06:00
|
|
|
|
2019-10-21 09:08:40 -06:00
|
|
|
# First we find the directories to search in
|
2021-05-07 03:22:05 -06:00
|
|
|
delta_dir = os.path.join(schema_path, "common", "delta", str(v))
|
2019-10-21 09:08:40 -06:00
|
|
|
directories = [delta_dir]
|
2020-08-05 14:38:57 -06:00
|
|
|
for database in databases:
|
2021-05-07 03:22:05 -06:00
|
|
|
directories.append(os.path.join(schema_path, database, "delta", str(v)))
|
2015-10-13 04:38:48 -06:00
|
|
|
|
2019-12-19 08:07:37 -07:00
|
|
|
# Used to check if we have any duplicate file names
|
2021-07-15 10:46:54 -06:00
|
|
|
file_name_counter: CounterType[str] = Counter()
|
2019-12-19 08:07:37 -07:00
|
|
|
|
2019-10-21 09:08:40 -06:00
|
|
|
# Now find which directories have anything of interest.
|
2021-07-15 10:46:54 -06:00
|
|
|
directory_entries: List[_DirectoryListing] = []
|
2019-10-21 09:08:40 -06:00
|
|
|
for directory in directories:
|
|
|
|
logger.debug("Looking for schema deltas in %s", directory)
|
|
|
|
try:
|
|
|
|
file_names = os.listdir(directory)
|
|
|
|
directory_entries.extend(
|
|
|
|
_DirectoryListing(file_name, os.path.join(directory, file_name))
|
|
|
|
for file_name in file_names
|
|
|
|
)
|
2019-12-19 08:07:37 -07:00
|
|
|
|
|
|
|
for file_name in file_names:
|
|
|
|
file_name_counter[file_name] += 1
|
2019-10-21 09:08:40 -06:00
|
|
|
except FileNotFoundError:
|
|
|
|
# Data stores can have empty entries for a given version delta.
|
|
|
|
pass
|
|
|
|
except OSError:
|
|
|
|
raise UpgradeDatabaseException(
|
2019-10-22 11:43:31 -06:00
|
|
|
"Could not open delta dir for version %d: %s" % (v, directory)
|
2019-10-21 09:08:40 -06:00
|
|
|
)
|
|
|
|
|
2020-02-21 05:15:07 -07:00
|
|
|
duplicates = {
|
2019-12-19 08:07:37 -07:00
|
|
|
file_name for file_name, count in file_name_counter.items() if count > 1
|
2020-02-21 05:15:07 -07:00
|
|
|
}
|
2019-12-19 08:07:37 -07:00
|
|
|
if duplicates:
|
|
|
|
# We don't support using the same file name in the same delta version.
|
|
|
|
raise PrepareDatabaseException(
|
2020-05-27 09:26:59 -06:00
|
|
|
"Found multiple delta files with the same name in v%d: %s"
|
|
|
|
% (
|
|
|
|
v,
|
|
|
|
duplicates,
|
|
|
|
)
|
2019-12-19 08:07:37 -07:00
|
|
|
)
|
|
|
|
|
2019-10-22 11:43:31 -06:00
|
|
|
# We sort to ensure that we apply the delta files in a consistent
|
|
|
|
# order (to avoid bugs caused by inconsistent directory listing order)
|
2015-10-13 04:38:48 -06:00
|
|
|
directory_entries.sort()
|
2019-10-21 09:08:40 -06:00
|
|
|
for entry in directory_entries:
|
|
|
|
file_name = entry.file_name
|
2015-10-13 04:38:48 -06:00
|
|
|
relative_path = os.path.join(str(v), file_name)
|
2019-10-21 09:08:40 -06:00
|
|
|
absolute_path = entry.absolute_path
|
|
|
|
|
2019-10-22 11:43:31 -06:00
|
|
|
logger.debug("Found file: %s (%s)", relative_path, absolute_path)
|
2021-06-11 07:45:53 -06:00
|
|
|
if relative_path in current_schema_state.applied_deltas:
|
2015-10-13 04:38:48 -06:00
|
|
|
continue
|
|
|
|
|
|
|
|
root_name, ext = os.path.splitext(file_name)
|
2020-09-07 06:04:10 -06:00
|
|
|
|
2015-10-13 04:38:48 -06:00
|
|
|
if ext == ".py":
|
|
|
|
# This is a python upgrade module. We need to import into some
|
|
|
|
# package and then execute its `run_upgrade` function.
|
2020-09-07 06:04:10 -06:00
|
|
|
if is_worker:
|
|
|
|
raise PrepareDatabaseException(
|
|
|
|
UNAPPLIED_DELTA_ON_WORKER_ERROR % relative_path
|
|
|
|
)
|
|
|
|
|
2019-04-03 03:07:29 -06:00
|
|
|
module_name = "synapse.storage.v%d_%s" % (v, root_name)
|
2021-03-31 05:04:27 -06:00
|
|
|
|
|
|
|
spec = importlib.util.spec_from_file_location(
|
|
|
|
module_name, absolute_path
|
|
|
|
)
|
2021-10-08 07:49:41 -06:00
|
|
|
if spec is None:
|
|
|
|
raise RuntimeError(
|
|
|
|
f"Could not build a module spec for {module_name} at {absolute_path}"
|
|
|
|
)
|
2021-03-31 05:04:27 -06:00
|
|
|
module = importlib.util.module_from_spec(spec)
|
|
|
|
spec.loader.exec_module(module) # type: ignore
|
|
|
|
|
2022-01-25 07:11:13 -07:00
|
|
|
if hasattr(module, "run_create"):
|
|
|
|
logger.info("Running %s:run_create", relative_path)
|
2022-04-27 07:03:44 -06:00
|
|
|
module.run_create(cur, database_engine)
|
2022-01-25 07:11:13 -07:00
|
|
|
|
|
|
|
if not is_empty and hasattr(module, "run_upgrade"):
|
|
|
|
logger.info("Running %s:run_upgrade", relative_path)
|
2022-04-27 07:03:44 -06:00
|
|
|
module.run_upgrade(cur, database_engine, config=config)
|
2018-11-20 15:46:51 -07:00
|
|
|
elif ext == ".pyc" or file_name == "__pycache__":
|
2015-10-13 04:38:48 -06:00
|
|
|
# Sometimes .pyc files turn up anyway even though we've
|
|
|
|
# disabled their generation; e.g. from distribution package
|
|
|
|
# installers. Silently skip it
|
2019-08-27 01:52:20 -06:00
|
|
|
continue
|
2015-10-13 04:38:48 -06:00
|
|
|
elif ext == ".sql":
|
|
|
|
# A plain old .sql file, just read and execute it
|
2020-09-07 06:04:10 -06:00
|
|
|
if is_worker:
|
|
|
|
raise PrepareDatabaseException(
|
|
|
|
UNAPPLIED_DELTA_ON_WORKER_ERROR % relative_path
|
|
|
|
)
|
2016-09-08 06:40:46 -06:00
|
|
|
logger.info("Applying schema %s", relative_path)
|
2022-09-26 11:28:32 -06:00
|
|
|
database_engine.execute_script_file(cur, absolute_path)
|
2019-08-27 01:52:20 -06:00
|
|
|
elif ext == specific_engine_extension and root_name.endswith(".sql"):
|
|
|
|
# A .sql file specific to our engine; just read and execute it
|
2020-09-07 06:04:10 -06:00
|
|
|
if is_worker:
|
|
|
|
raise PrepareDatabaseException(
|
|
|
|
UNAPPLIED_DELTA_ON_WORKER_ERROR % relative_path
|
|
|
|
)
|
2019-08-27 01:52:20 -06:00
|
|
|
logger.info("Applying engine-specific schema %s", relative_path)
|
2022-09-26 11:28:32 -06:00
|
|
|
database_engine.execute_script_file(cur, absolute_path)
|
2019-08-27 01:52:20 -06:00
|
|
|
elif ext in specific_engine_extensions and root_name.endswith(".sql"):
|
|
|
|
# A .sql file for a different engine; skip it.
|
|
|
|
continue
|
2015-10-13 04:38:48 -06:00
|
|
|
else:
|
|
|
|
# Not a valid delta file.
|
2019-08-27 01:52:20 -06:00
|
|
|
logger.warning(
|
|
|
|
"Found directory entry that did not end in .py or .sql: %s",
|
2015-10-13 04:38:48 -06:00
|
|
|
relative_path,
|
|
|
|
)
|
|
|
|
continue
|
|
|
|
|
|
|
|
# Mark as done.
|
|
|
|
cur.execute(
|
2020-10-02 08:20:45 -06:00
|
|
|
"INSERT INTO applied_schema_deltas (version, file) VALUES (?,?)",
|
2019-04-03 03:07:29 -06:00
|
|
|
(v, relative_path),
|
2015-10-13 04:38:48 -06:00
|
|
|
)
|
|
|
|
|
2020-09-07 06:04:10 -06:00
|
|
|
logger.info("Schema now up to date")
|
|
|
|
|
2015-10-13 04:38:48 -06:00
|
|
|
|
2020-12-30 06:09:53 -07:00
|
|
|
def _apply_module_schemas(
|
|
|
|
txn: Cursor, database_engine: BaseDatabaseEngine, config: HomeServerConfig
|
|
|
|
) -> None:
|
2017-10-30 09:16:21 -06:00
|
|
|
"""Apply the module schemas for the dynamic modules, if any
|
|
|
|
|
|
|
|
Args:
|
|
|
|
cur: database cursor
|
2020-12-30 06:09:53 -07:00
|
|
|
database_engine:
|
|
|
|
config: application config
|
2017-10-30 09:16:21 -06:00
|
|
|
"""
|
2021-10-13 05:21:52 -06:00
|
|
|
# This is the old way for password_auth_provider modules to make changes
|
|
|
|
# to the database. This should instead be done using the module API
|
2021-09-23 05:13:34 -06:00
|
|
|
for mod, _config in config.authproviders.password_providers:
|
2017-10-30 09:16:21 -06:00
|
|
|
if not hasattr(mod, "get_db_schema_files"):
|
|
|
|
continue
|
|
|
|
modname = ".".join((mod.__module__, mod.__name__))
|
|
|
|
_apply_module_schema_files(
|
2019-04-03 03:07:29 -06:00
|
|
|
txn, database_engine, modname, mod.get_db_schema_files()
|
2017-10-30 09:16:21 -06:00
|
|
|
)
|
|
|
|
|
|
|
|
|
2020-12-30 06:09:53 -07:00
|
|
|
def _apply_module_schema_files(
|
|
|
|
cur: Cursor,
|
|
|
|
database_engine: BaseDatabaseEngine,
|
|
|
|
modname: str,
|
|
|
|
names_and_streams: Iterable[Tuple[str, TextIO]],
|
|
|
|
) -> None:
|
2017-10-30 09:16:21 -06:00
|
|
|
"""Apply the module schemas for a single module
|
|
|
|
|
|
|
|
Args:
|
|
|
|
cur: database cursor
|
|
|
|
database_engine: synapse database engine class
|
2020-12-30 06:09:53 -07:00
|
|
|
modname: fully qualified name of the module
|
|
|
|
names_and_streams: the names and streams of schemas to be applied
|
2017-10-30 09:16:21 -06:00
|
|
|
"""
|
|
|
|
cur.execute(
|
2020-10-02 08:20:45 -06:00
|
|
|
"SELECT file FROM applied_module_schemas WHERE module_name = ?",
|
|
|
|
(modname,),
|
2017-10-30 09:16:21 -06:00
|
|
|
)
|
2020-02-21 05:15:07 -07:00
|
|
|
applied_deltas = {d for d, in cur}
|
2017-10-30 09:16:21 -06:00
|
|
|
for name, stream in names_and_streams:
|
|
|
|
if name in applied_deltas:
|
|
|
|
continue
|
|
|
|
|
|
|
|
root_name, ext = os.path.splitext(name)
|
|
|
|
if ext != ".sql":
|
|
|
|
raise PrepareDatabaseException(
|
2019-04-03 03:07:29 -06:00
|
|
|
"only .sql files are currently supported for module schemas"
|
2017-10-30 09:16:21 -06:00
|
|
|
)
|
|
|
|
|
|
|
|
logger.info("applying schema %s for %s", name, modname)
|
2020-05-26 04:43:17 -06:00
|
|
|
execute_statements_from_stream(cur, stream)
|
2017-10-30 09:16:21 -06:00
|
|
|
|
|
|
|
# Mark as done.
|
|
|
|
cur.execute(
|
2020-10-02 08:20:45 -06:00
|
|
|
"INSERT INTO applied_module_schemas (module_name, file) VALUES (?,?)",
|
2019-04-03 03:07:29 -06:00
|
|
|
(modname, name),
|
2017-10-30 09:16:21 -06:00
|
|
|
)
|
|
|
|
|
|
|
|
|
2020-12-30 06:09:53 -07:00
|
|
|
def get_statements(f: Iterable[str]) -> Generator[str, None, None]:
|
2015-10-13 04:38:48 -06:00
|
|
|
statement_buffer = ""
|
|
|
|
in_comment = False # If we're in a /* ... */ style comment
|
|
|
|
|
|
|
|
for line in f:
|
|
|
|
line = line.strip()
|
|
|
|
|
|
|
|
if in_comment:
|
|
|
|
# Check if this line contains an end to the comment
|
|
|
|
comments = line.split("*/", 1)
|
|
|
|
if len(comments) == 1:
|
|
|
|
continue
|
|
|
|
line = comments[1]
|
|
|
|
in_comment = False
|
|
|
|
|
|
|
|
# Remove inline block comments
|
|
|
|
line = re.sub(r"/\*.*\*/", " ", line)
|
|
|
|
|
|
|
|
# Does this line start a comment?
|
|
|
|
comments = line.split("/*", 1)
|
|
|
|
if len(comments) > 1:
|
|
|
|
line = comments[0]
|
|
|
|
in_comment = True
|
|
|
|
|
|
|
|
# Deal with line comments
|
|
|
|
line = line.split("--", 1)[0]
|
|
|
|
line = line.split("//", 1)[0]
|
|
|
|
|
|
|
|
# Find *all* semicolons. We need to treat first and last entry
|
|
|
|
# specially.
|
|
|
|
statements = line.split(";")
|
|
|
|
|
|
|
|
# We must prepend statement_buffer to the first statement
|
2019-04-03 03:07:29 -06:00
|
|
|
first_statement = "%s %s" % (statement_buffer.strip(), statements[0].strip())
|
2015-10-13 04:38:48 -06:00
|
|
|
statements[0] = first_statement
|
|
|
|
|
|
|
|
# Every entry, except the last, is a full statement
|
|
|
|
for statement in statements[:-1]:
|
|
|
|
yield statement.strip()
|
|
|
|
|
|
|
|
# The last entry did *not* end in a semicolon, so we store it for the
|
|
|
|
# next semicolon we find
|
|
|
|
statement_buffer = statements[-1].strip()
|
|
|
|
|
|
|
|
|
2020-12-30 06:09:53 -07:00
|
|
|
def executescript(txn: Cursor, schema_path: str) -> None:
|
2021-07-19 08:28:05 -06:00
|
|
|
with open(schema_path) as f:
|
2020-05-26 04:43:17 -06:00
|
|
|
execute_statements_from_stream(txn, f)
|
|
|
|
|
|
|
|
|
2020-12-30 06:09:53 -07:00
|
|
|
def execute_statements_from_stream(cur: Cursor, f: TextIO) -> None:
|
2020-05-26 04:43:17 -06:00
|
|
|
for statement in get_statements(f):
|
|
|
|
cur.execute(statement)
|
2015-10-13 04:38:48 -06:00
|
|
|
|
|
|
|
|
2020-12-30 06:09:53 -07:00
|
|
|
def _get_or_create_schema_state(
|
|
|
|
txn: Cursor, database_engine: BaseDatabaseEngine
|
2021-06-11 07:45:53 -06:00
|
|
|
) -> Optional[_SchemaState]:
|
2015-10-13 04:38:48 -06:00
|
|
|
# Bluntly try creating the schema_version tables.
|
2021-05-07 03:22:05 -06:00
|
|
|
sql_path = os.path.join(schema_path, "common", "schema_version.sql")
|
2022-09-26 11:28:32 -06:00
|
|
|
database_engine.execute_script_file(txn, sql_path)
|
2015-10-13 04:38:48 -06:00
|
|
|
|
|
|
|
txn.execute("SELECT version, upgraded FROM schema_version")
|
|
|
|
row = txn.fetchone()
|
|
|
|
|
2021-06-11 07:45:53 -06:00
|
|
|
if row is None:
|
|
|
|
# new database
|
|
|
|
return None
|
|
|
|
|
|
|
|
current_version = int(row[0])
|
|
|
|
upgraded = bool(row[1])
|
|
|
|
|
|
|
|
compat_version: Optional[int] = None
|
|
|
|
txn.execute("SELECT compat_version FROM schema_compat_version")
|
|
|
|
row = txn.fetchone()
|
2021-02-05 13:39:19 -07:00
|
|
|
if row is not None:
|
2021-06-11 07:45:53 -06:00
|
|
|
compat_version = int(row[0])
|
|
|
|
|
|
|
|
txn.execute(
|
|
|
|
"SELECT file FROM applied_schema_deltas WHERE version >= ?",
|
|
|
|
(current_version,),
|
|
|
|
)
|
|
|
|
applied_deltas = tuple(d for d, in txn)
|
2015-10-13 04:38:48 -06:00
|
|
|
|
2021-06-11 07:45:53 -06:00
|
|
|
return _SchemaState(
|
|
|
|
current_version=current_version,
|
|
|
|
compat_version=compat_version,
|
|
|
|
applied_deltas=applied_deltas,
|
|
|
|
upgraded=upgraded,
|
|
|
|
)
|
2019-10-21 09:08:40 -06:00
|
|
|
|
|
|
|
|
2022-01-13 06:49:28 -07:00
|
|
|
@attr.s(slots=True, auto_attribs=True)
|
2020-09-04 04:54:56 -06:00
|
|
|
class _DirectoryListing:
|
2019-10-21 09:08:40 -06:00
|
|
|
"""Helper class to store schema file name and the
|
|
|
|
absolute path to it.
|
2019-10-22 11:43:31 -06:00
|
|
|
|
|
|
|
These entries get sorted, so for consistency we want to ensure that
|
|
|
|
`file_name` attr is kept first.
|
2019-10-21 09:08:40 -06:00
|
|
|
"""
|
|
|
|
|
2022-01-13 06:49:28 -07:00
|
|
|
file_name: str
|
|
|
|
absolute_path: str
|