Merge remote-tracking branch 'origin/develop' into anoa/public_rooms_module_api
This commit is contained in:
commit
99fefd5501
|
@ -29,11 +29,12 @@ IS_PR = os.environ["GITHUB_REF"].startswith("refs/pull/")
|
||||||
|
|
||||||
# First calculate the various trial jobs.
|
# First calculate the various trial jobs.
|
||||||
#
|
#
|
||||||
# For each type of test we only run on Py3.7 on PRs
|
# For PRs, we only run each type of test with the oldest Python version supported (which
|
||||||
|
# is Python 3.8 right now)
|
||||||
|
|
||||||
trial_sqlite_tests = [
|
trial_sqlite_tests = [
|
||||||
{
|
{
|
||||||
"python-version": "3.7",
|
"python-version": "3.8",
|
||||||
"database": "sqlite",
|
"database": "sqlite",
|
||||||
"extras": "all",
|
"extras": "all",
|
||||||
}
|
}
|
||||||
|
@ -46,13 +47,12 @@ if not IS_PR:
|
||||||
"database": "sqlite",
|
"database": "sqlite",
|
||||||
"extras": "all",
|
"extras": "all",
|
||||||
}
|
}
|
||||||
for version in ("3.8", "3.9", "3.10", "3.11")
|
for version in ("3.9", "3.10", "3.11", "3.12.0-rc.2")
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
trial_postgres_tests = [
|
trial_postgres_tests = [
|
||||||
{
|
{
|
||||||
"python-version": "3.7",
|
"python-version": "3.8",
|
||||||
"database": "postgres",
|
"database": "postgres",
|
||||||
"postgres-version": "11",
|
"postgres-version": "11",
|
||||||
"extras": "all",
|
"extras": "all",
|
||||||
|
@ -64,14 +64,14 @@ if not IS_PR:
|
||||||
{
|
{
|
||||||
"python-version": "3.11",
|
"python-version": "3.11",
|
||||||
"database": "postgres",
|
"database": "postgres",
|
||||||
"postgres-version": "15",
|
"postgres-version": "16",
|
||||||
"extras": "all",
|
"extras": "all",
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
|
||||||
trial_no_extra_tests = [
|
trial_no_extra_tests = [
|
||||||
{
|
{
|
||||||
"python-version": "3.7",
|
"python-version": "3.8",
|
||||||
"database": "sqlite",
|
"database": "sqlite",
|
||||||
"extras": "",
|
"extras": "",
|
||||||
}
|
}
|
||||||
|
@ -133,11 +133,6 @@ if not IS_PR:
|
||||||
"sytest-tag": "testing",
|
"sytest-tag": "testing",
|
||||||
"postgres": "postgres",
|
"postgres": "postgres",
|
||||||
},
|
},
|
||||||
{
|
|
||||||
"sytest-tag": "buster",
|
|
||||||
"postgres": "multi-postgres",
|
|
||||||
"workers": "workers",
|
|
||||||
},
|
|
||||||
]
|
]
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
|
@ -1,49 +0,0 @@
|
||||||
name: Write changelog for dependabot PR
|
|
||||||
on:
|
|
||||||
pull_request:
|
|
||||||
types:
|
|
||||||
- opened
|
|
||||||
- reopened # For debugging!
|
|
||||||
|
|
||||||
permissions:
|
|
||||||
# Needed to be able to push the commit. See
|
|
||||||
# https://docs.github.com/en/code-security/dependabot/working-with-dependabot/automating-dependabot-with-github-actions#enable-auto-merge-on-a-pull-request
|
|
||||||
# for a similar example
|
|
||||||
contents: write
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
add-changelog:
|
|
||||||
runs-on: 'ubuntu-latest'
|
|
||||||
if: ${{ github.actor == 'dependabot[bot]' }}
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v3
|
|
||||||
with:
|
|
||||||
ref: ${{ github.event.pull_request.head.ref }}
|
|
||||||
- name: Write, commit and push changelog
|
|
||||||
env:
|
|
||||||
PR_TITLE: ${{ github.event.pull_request.title }}
|
|
||||||
PR_NUMBER: ${{ github.event.pull_request.number }}
|
|
||||||
run: |
|
|
||||||
echo "${PR_TITLE}." > "changelog.d/${PR_NUMBER}".misc
|
|
||||||
git add changelog.d
|
|
||||||
git config user.email "github-actions[bot]@users.noreply.github.com"
|
|
||||||
git config user.name "GitHub Actions"
|
|
||||||
git commit -m "Changelog"
|
|
||||||
git push
|
|
||||||
shell: bash
|
|
||||||
# The `git push` above does not trigger CI on the dependabot PR.
|
|
||||||
#
|
|
||||||
# By default, workflows can't trigger other workflows when they're just using the
|
|
||||||
# default `GITHUB_TOKEN` access token. (This is intended to stop you from writing
|
|
||||||
# recursive workflow loops by accident, because that'll get very expensive very
|
|
||||||
# quickly.) Instead, you have to manually call out to another workflow, or else
|
|
||||||
# make your changes (i.e. the `git push` above) using a personal access token.
|
|
||||||
# See
|
|
||||||
# https://docs.github.com/en/actions/using-workflows/triggering-a-workflow#triggering-a-workflow-from-a-workflow
|
|
||||||
#
|
|
||||||
# I have tried and failed to find a way to trigger CI on the "merge ref" of the PR.
|
|
||||||
# See git commit history for previous attempts. If anyone desperately wants to try
|
|
||||||
# again in the future, make a matrix-bot account and use its access token to git push.
|
|
||||||
|
|
||||||
# THIS WORKFLOW HAS WRITE PERMISSIONS---do not add other jobs here unless they
|
|
||||||
# are sufficiently locked down to dependabot only as above.
|
|
|
@ -18,25 +18,35 @@ jobs:
|
||||||
steps:
|
steps:
|
||||||
- name: Set up QEMU
|
- name: Set up QEMU
|
||||||
id: qemu
|
id: qemu
|
||||||
uses: docker/setup-qemu-action@v2
|
uses: docker/setup-qemu-action@v3
|
||||||
with:
|
with:
|
||||||
platforms: arm64
|
platforms: arm64
|
||||||
|
|
||||||
- name: Set up Docker Buildx
|
- name: Set up Docker Buildx
|
||||||
id: buildx
|
id: buildx
|
||||||
uses: docker/setup-buildx-action@v2
|
uses: docker/setup-buildx-action@v3
|
||||||
|
|
||||||
- name: Inspect builder
|
- name: Inspect builder
|
||||||
run: docker buildx inspect
|
run: docker buildx inspect
|
||||||
|
|
||||||
|
- name: Checkout repository
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Extract version from pyproject.toml
|
||||||
|
# Note: explicitly requesting bash will mean bash is invoked with `-eo pipefail`, see
|
||||||
|
# https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idstepsshell
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
echo "SYNAPSE_VERSION=$(grep "^version" pyproject.toml | sed -E 's/version\s*=\s*["]([^"]*)["]/\1/')" >> $GITHUB_ENV
|
||||||
|
|
||||||
- name: Log in to DockerHub
|
- name: Log in to DockerHub
|
||||||
uses: docker/login-action@v2
|
uses: docker/login-action@v3
|
||||||
with:
|
with:
|
||||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||||
|
|
||||||
- name: Log in to GHCR
|
- name: Log in to GHCR
|
||||||
uses: docker/login-action@v2
|
uses: docker/login-action@v3
|
||||||
with:
|
with:
|
||||||
registry: ghcr.io
|
registry: ghcr.io
|
||||||
username: ${{ github.repository_owner }}
|
username: ${{ github.repository_owner }}
|
||||||
|
@ -58,10 +68,12 @@ jobs:
|
||||||
type=pep440,pattern={{raw}}
|
type=pep440,pattern={{raw}}
|
||||||
|
|
||||||
- name: Build and push all platforms
|
- name: Build and push all platforms
|
||||||
uses: docker/build-push-action@v4
|
uses: docker/build-push-action@v5
|
||||||
with:
|
with:
|
||||||
push: true
|
push: true
|
||||||
labels: "gitsha1=${{ github.sha }}"
|
labels: |
|
||||||
|
gitsha1=${{ github.sha }}
|
||||||
|
org.opencontainers.image.version=${{ env.SYNAPSE_VERSION }}
|
||||||
tags: "${{ steps.set-tag.outputs.tags }}"
|
tags: "${{ steps.set-tag.outputs.tags }}"
|
||||||
file: "docker/Dockerfile"
|
file: "docker/Dockerfile"
|
||||||
platforms: linux/amd64,linux/arm64
|
platforms: linux/amd64,linux/arm64
|
||||||
|
|
|
@ -14,7 +14,7 @@ jobs:
|
||||||
# There's a 'download artifact' action, but it hasn't been updated for the workflow_run action
|
# There's a 'download artifact' action, but it hasn't been updated for the workflow_run action
|
||||||
# (https://github.com/actions/download-artifact/issues/60) so instead we get this mess:
|
# (https://github.com/actions/download-artifact/issues/60) so instead we get this mess:
|
||||||
- name: 📥 Download artifact
|
- name: 📥 Download artifact
|
||||||
uses: dawidd6/action-download-artifact@246dbf436b23d7c49e21a7ab8204ca9ecd1fe615 # v2.27.0
|
uses: dawidd6/action-download-artifact@268677152d06ba59fcec7a7f0b5d961b6ccd7e1e # v2.28.0
|
||||||
with:
|
with:
|
||||||
workflow: docs-pr.yaml
|
workflow: docs-pr.yaml
|
||||||
run_id: ${{ github.event.workflow_run.id }}
|
run_id: ${{ github.event.workflow_run.id }}
|
||||||
|
|
|
@ -12,7 +12,7 @@ jobs:
|
||||||
name: GitHub Pages
|
name: GitHub Pages
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
- name: Setup mdbook
|
- name: Setup mdbook
|
||||||
uses: peaceiris/actions-mdbook@adeb05db28a0c0004681db83893d56c0388ea9ea # v1.2.0
|
uses: peaceiris/actions-mdbook@adeb05db28a0c0004681db83893d56c0388ea9ea # v1.2.0
|
||||||
|
@ -39,7 +39,7 @@ jobs:
|
||||||
name: Check links in documentation
|
name: Check links in documentation
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
- name: Setup mdbook
|
- name: Setup mdbook
|
||||||
uses: peaceiris/actions-mdbook@adeb05db28a0c0004681db83893d56c0388ea9ea # v1.2.0
|
uses: peaceiris/actions-mdbook@adeb05db28a0c0004681db83893d56c0388ea9ea # v1.2.0
|
||||||
|
|
|
@ -50,7 +50,7 @@ jobs:
|
||||||
needs:
|
needs:
|
||||||
- pre
|
- pre
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
- name: Setup mdbook
|
- name: Setup mdbook
|
||||||
uses: peaceiris/actions-mdbook@adeb05db28a0c0004681db83893d56c0388ea9ea # v1.2.0
|
uses: peaceiris/actions-mdbook@adeb05db28a0c0004681db83893d56c0388ea9ea # v1.2.0
|
||||||
|
@ -80,7 +80,7 @@ jobs:
|
||||||
needs:
|
needs:
|
||||||
- pre
|
- pre
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
- name: "Set up Sphinx"
|
- name: "Set up Sphinx"
|
||||||
uses: matrix-org/setup-python-poetry@v1
|
uses: matrix-org/setup-python-poetry@v1
|
||||||
|
|
|
@ -22,10 +22,24 @@ concurrency:
|
||||||
cancel-in-progress: true
|
cancel-in-progress: true
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
|
check_repo:
|
||||||
|
# Prevent this workflow from running on any fork of Synapse other than matrix-org/synapse, as it is
|
||||||
|
# only useful to the Synapse core team.
|
||||||
|
# All other workflow steps depend on this one, thus if 'should_run_workflow' is not 'true', the rest
|
||||||
|
# of the workflow will be skipped as well.
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
outputs:
|
||||||
|
should_run_workflow: ${{ steps.check_condition.outputs.should_run_workflow }}
|
||||||
|
steps:
|
||||||
|
- id: check_condition
|
||||||
|
run: echo "should_run_workflow=${{ github.repository == 'matrix-org/synapse' }}" >> "$GITHUB_OUTPUT"
|
||||||
|
|
||||||
mypy:
|
mypy:
|
||||||
|
needs: check_repo
|
||||||
|
if: needs.check_repo.outputs.should_run_workflow == 'true'
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v4
|
||||||
- name: Install Rust
|
- name: Install Rust
|
||||||
uses: dtolnay/rust-toolchain@stable
|
uses: dtolnay/rust-toolchain@stable
|
||||||
- uses: Swatinem/rust-cache@v2
|
- uses: Swatinem/rust-cache@v2
|
||||||
|
@ -43,10 +57,12 @@ jobs:
|
||||||
# `pip install matrix-synapse[all]` as closely as possible.
|
# `pip install matrix-synapse[all]` as closely as possible.
|
||||||
- run: poetry update --no-dev
|
- run: poetry update --no-dev
|
||||||
- run: poetry run pip list > after.txt && (diff -u before.txt after.txt || true)
|
- run: poetry run pip list > after.txt && (diff -u before.txt after.txt || true)
|
||||||
- name: Remove warn_unused_ignores from mypy config
|
- name: Remove unhelpful options from mypy config
|
||||||
run: sed '/warn_unused_ignores = True/d' -i mypy.ini
|
run: sed -e '/warn_unused_ignores = True/d' -e '/warn_redundant_casts = True/d' -i mypy.ini
|
||||||
- run: poetry run mypy
|
- run: poetry run mypy
|
||||||
trial:
|
trial:
|
||||||
|
needs: check_repo
|
||||||
|
if: needs.check_repo.outputs.should_run_workflow == 'true'
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
strategy:
|
strategy:
|
||||||
matrix:
|
matrix:
|
||||||
|
@ -56,7 +72,7 @@ jobs:
|
||||||
postgres-version: "14"
|
postgres-version: "14"
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
- name: Install Rust
|
- name: Install Rust
|
||||||
uses: dtolnay/rust-toolchain@stable
|
uses: dtolnay/rust-toolchain@stable
|
||||||
|
@ -105,6 +121,8 @@ jobs:
|
||||||
|
|
||||||
|
|
||||||
sytest:
|
sytest:
|
||||||
|
needs: check_repo
|
||||||
|
if: needs.check_repo.outputs.should_run_workflow == 'true'
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
container:
|
container:
|
||||||
image: matrixdotorg/sytest-synapse:testing
|
image: matrixdotorg/sytest-synapse:testing
|
||||||
|
@ -127,7 +145,7 @@ jobs:
|
||||||
BLACKLIST: ${{ matrix.workers && 'synapse-blacklist-with-workers' }}
|
BLACKLIST: ${{ matrix.workers && 'synapse-blacklist-with-workers' }}
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
- name: Install Rust
|
- name: Install Rust
|
||||||
uses: dtolnay/rust-toolchain@stable
|
uses: dtolnay/rust-toolchain@stable
|
||||||
|
@ -156,7 +174,8 @@ jobs:
|
||||||
|
|
||||||
|
|
||||||
complement:
|
complement:
|
||||||
if: "${{ !failure() && !cancelled() }}"
|
needs: check_repo
|
||||||
|
if: "!failure() && !cancelled() && needs.check_repo.outputs.should_run_workflow == 'true'"
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
|
||||||
strategy:
|
strategy:
|
||||||
|
@ -173,8 +192,8 @@ jobs:
|
||||||
database: Postgres
|
database: Postgres
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Run actions/checkout@v3 for synapse
|
- name: Run actions/checkout@v4 for synapse
|
||||||
uses: actions/checkout@v3
|
uses: actions/checkout@v4
|
||||||
with:
|
with:
|
||||||
path: synapse
|
path: synapse
|
||||||
|
|
||||||
|
@ -192,7 +211,7 @@ jobs:
|
||||||
# Open an issue if the build fails, so we know about it.
|
# Open an issue if the build fails, so we know about it.
|
||||||
# Only do this if we're not experimenting with this action in a PR.
|
# Only do this if we're not experimenting with this action in a PR.
|
||||||
open-issue:
|
open-issue:
|
||||||
if: "failure() && github.event_name != 'push' && github.event_name != 'pull_request'"
|
if: "failure() && github.event_name != 'push' && github.event_name != 'pull_request' && needs.check_repo.outputs.should_run_workflow == 'true'"
|
||||||
needs:
|
needs:
|
||||||
# TODO: should mypy be included here? It feels more brittle than the others.
|
# TODO: should mypy be included here? It feels more brittle than the others.
|
||||||
- mypy
|
- mypy
|
||||||
|
@ -203,7 +222,7 @@ jobs:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v4
|
||||||
- uses: JasonEtco/create-an-issue@e27dddc79c92bc6e4562f268fffa5ed752639abd # v2.9.1
|
- uses: JasonEtco/create-an-issue@e27dddc79c92bc6e4562f268fffa5ed752639abd # v2.9.1
|
||||||
env:
|
env:
|
||||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
|
|
@ -16,7 +16,7 @@ jobs:
|
||||||
name: "Check locked dependencies have sdists"
|
name: "Check locked dependencies have sdists"
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v4
|
||||||
- uses: actions/setup-python@v4
|
- uses: actions/setup-python@v4
|
||||||
with:
|
with:
|
||||||
python-version: '3.x'
|
python-version: '3.x'
|
||||||
|
|
|
@ -33,29 +33,29 @@ jobs:
|
||||||
packages: write
|
packages: write
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout specific branch (debug build)
|
- name: Checkout specific branch (debug build)
|
||||||
uses: actions/checkout@v3
|
uses: actions/checkout@v4
|
||||||
if: github.event_name == 'workflow_dispatch'
|
if: github.event_name == 'workflow_dispatch'
|
||||||
with:
|
with:
|
||||||
ref: ${{ inputs.branch }}
|
ref: ${{ inputs.branch }}
|
||||||
- name: Checkout clean copy of develop (scheduled build)
|
- name: Checkout clean copy of develop (scheduled build)
|
||||||
uses: actions/checkout@v3
|
uses: actions/checkout@v4
|
||||||
if: github.event_name == 'schedule'
|
if: github.event_name == 'schedule'
|
||||||
with:
|
with:
|
||||||
ref: develop
|
ref: develop
|
||||||
- name: Checkout clean copy of master (on-push)
|
- name: Checkout clean copy of master (on-push)
|
||||||
uses: actions/checkout@v3
|
uses: actions/checkout@v4
|
||||||
if: github.event_name == 'push'
|
if: github.event_name == 'push'
|
||||||
with:
|
with:
|
||||||
ref: master
|
ref: master
|
||||||
- name: Login to registry
|
- name: Login to registry
|
||||||
uses: docker/login-action@v2
|
uses: docker/login-action@v3
|
||||||
with:
|
with:
|
||||||
registry: ghcr.io
|
registry: ghcr.io
|
||||||
username: ${{ github.actor }}
|
username: ${{ github.actor }}
|
||||||
password: ${{ secrets.GITHUB_TOKEN }}
|
password: ${{ secrets.GITHUB_TOKEN }}
|
||||||
- name: Work out labels for complement image
|
- name: Work out labels for complement image
|
||||||
id: meta
|
id: meta
|
||||||
uses: docker/metadata-action@v4
|
uses: docker/metadata-action@v5
|
||||||
with:
|
with:
|
||||||
images: ghcr.io/${{ github.repository }}/complement-synapse
|
images: ghcr.io/${{ github.repository }}/complement-synapse
|
||||||
tags: |
|
tags: |
|
||||||
|
|
|
@ -27,13 +27,14 @@ jobs:
|
||||||
name: "Calculate list of debian distros"
|
name: "Calculate list of debian distros"
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v4
|
||||||
- uses: actions/setup-python@v4
|
- uses: actions/setup-python@v4
|
||||||
with:
|
with:
|
||||||
python-version: '3.x'
|
python-version: '3.x'
|
||||||
- id: set-distros
|
- id: set-distros
|
||||||
run: |
|
run: |
|
||||||
# if we're running from a tag, get the full list of distros; otherwise just use debian:sid
|
# if we're running from a tag, get the full list of distros; otherwise just use debian:sid
|
||||||
|
# NOTE: inside the actual Dockerfile-dhvirtualenv, the image name is expanded into its full image path
|
||||||
dists='["debian:sid"]'
|
dists='["debian:sid"]'
|
||||||
if [[ $GITHUB_REF == refs/tags/* ]]; then
|
if [[ $GITHUB_REF == refs/tags/* ]]; then
|
||||||
dists=$(scripts-dev/build_debian_packages.py --show-dists-json)
|
dists=$(scripts-dev/build_debian_packages.py --show-dists-json)
|
||||||
|
@ -54,13 +55,13 @@ jobs:
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout
|
- name: Checkout
|
||||||
uses: actions/checkout@v3
|
uses: actions/checkout@v4
|
||||||
with:
|
with:
|
||||||
path: src
|
path: src
|
||||||
|
|
||||||
- name: Set up Docker Buildx
|
- name: Set up Docker Buildx
|
||||||
id: buildx
|
id: buildx
|
||||||
uses: docker/setup-buildx-action@v2
|
uses: docker/setup-buildx-action@v3
|
||||||
with:
|
with:
|
||||||
install: true
|
install: true
|
||||||
|
|
||||||
|
@ -120,7 +121,7 @@ jobs:
|
||||||
arch: aarch64
|
arch: aarch64
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
- uses: actions/setup-python@v4
|
- uses: actions/setup-python@v4
|
||||||
with:
|
with:
|
||||||
|
@ -133,7 +134,7 @@ jobs:
|
||||||
|
|
||||||
- name: Set up QEMU to emulate aarch64
|
- name: Set up QEMU to emulate aarch64
|
||||||
if: matrix.arch == 'aarch64'
|
if: matrix.arch == 'aarch64'
|
||||||
uses: docker/setup-qemu-action@v2
|
uses: docker/setup-qemu-action@v3
|
||||||
with:
|
with:
|
||||||
platforms: arm64
|
platforms: arm64
|
||||||
|
|
||||||
|
@ -143,7 +144,7 @@ jobs:
|
||||||
|
|
||||||
- name: Only build a single wheel on PR
|
- name: Only build a single wheel on PR
|
||||||
if: startsWith(github.ref, 'refs/pull/')
|
if: startsWith(github.ref, 'refs/pull/')
|
||||||
run: echo "CIBW_BUILD="cp37-manylinux_${{ matrix.arch }}"" >> $GITHUB_ENV
|
run: echo "CIBW_BUILD="cp38-manylinux_${{ matrix.arch }}"" >> $GITHUB_ENV
|
||||||
|
|
||||||
- name: Build wheels
|
- name: Build wheels
|
||||||
run: python -m cibuildwheel --output-dir wheelhouse
|
run: python -m cibuildwheel --output-dir wheelhouse
|
||||||
|
@ -166,7 +167,7 @@ jobs:
|
||||||
if: ${{ !startsWith(github.ref, 'refs/pull/') }}
|
if: ${{ !startsWith(github.ref, 'refs/pull/') }}
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v4
|
||||||
- uses: actions/setup-python@v4
|
- uses: actions/setup-python@v4
|
||||||
with:
|
with:
|
||||||
python-version: '3.10'
|
python-version: '3.10'
|
||||||
|
|
|
@ -12,12 +12,19 @@ concurrency:
|
||||||
cancel-in-progress: true
|
cancel-in-progress: true
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
|
check-signoff:
|
||||||
|
if: "github.event_name == 'pull_request'"
|
||||||
|
uses: "matrix-org/backend-meta/.github/workflows/sign-off.yml@v2"
|
||||||
|
|
||||||
# Job to detect what has changed so we don't run e.g. Rust checks on PRs that
|
# Job to detect what has changed so we don't run e.g. Rust checks on PRs that
|
||||||
# don't modify Rust code.
|
# don't modify Rust code.
|
||||||
changes:
|
changes:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
outputs:
|
outputs:
|
||||||
rust: ${{ !startsWith(github.ref, 'refs/pull/') || steps.filter.outputs.rust }}
|
rust: ${{ !startsWith(github.ref, 'refs/pull/') || steps.filter.outputs.rust }}
|
||||||
|
trial: ${{ !startsWith(github.ref, 'refs/pull/') || steps.filter.outputs.trial }}
|
||||||
|
integration: ${{ !startsWith(github.ref, 'refs/pull/') || steps.filter.outputs.integration }}
|
||||||
|
linting: ${{ !startsWith(github.ref, 'refs/pull/') || steps.filter.outputs.linting }}
|
||||||
steps:
|
steps:
|
||||||
- uses: dorny/paths-filter@v2
|
- uses: dorny/paths-filter@v2
|
||||||
id: filter
|
id: filter
|
||||||
|
@ -29,13 +36,49 @@ jobs:
|
||||||
- 'rust/**'
|
- 'rust/**'
|
||||||
- 'Cargo.toml'
|
- 'Cargo.toml'
|
||||||
- 'Cargo.lock'
|
- 'Cargo.lock'
|
||||||
|
- '.rustfmt.toml'
|
||||||
|
|
||||||
|
trial:
|
||||||
|
- 'synapse/**'
|
||||||
|
- 'tests/**'
|
||||||
|
- 'rust/**'
|
||||||
|
- 'Cargo.toml'
|
||||||
|
- 'Cargo.lock'
|
||||||
|
- 'pyproject.toml'
|
||||||
|
- 'poetry.lock'
|
||||||
|
|
||||||
|
integration:
|
||||||
|
- 'synapse/**'
|
||||||
|
- 'rust/**'
|
||||||
|
- 'docker/**'
|
||||||
|
- 'Cargo.toml'
|
||||||
|
- 'Cargo.lock'
|
||||||
|
- 'pyproject.toml'
|
||||||
|
- 'poetry.lock'
|
||||||
|
- 'docker/**'
|
||||||
|
|
||||||
|
linting:
|
||||||
|
- 'synapse/**'
|
||||||
|
- 'docker/**'
|
||||||
|
- 'tests/**'
|
||||||
|
- 'scripts-dev/**'
|
||||||
|
- 'contrib/**'
|
||||||
|
- 'synmark/**'
|
||||||
|
- 'stubs/**'
|
||||||
|
- '.ci/**'
|
||||||
|
- 'mypy.ini'
|
||||||
|
- 'pyproject.toml'
|
||||||
|
- 'poetry.lock'
|
||||||
|
|
||||||
check-sampleconfig:
|
check-sampleconfig:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
needs: changes
|
||||||
|
if: ${{ needs.changes.outputs.linting == 'true' }}
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v4
|
||||||
- name: Install Rust
|
- name: Install Rust
|
||||||
uses: dtolnay/rust-toolchain@1.58.1
|
uses: dtolnay/rust-toolchain@1.61.0
|
||||||
- uses: Swatinem/rust-cache@v2
|
- uses: Swatinem/rust-cache@v2
|
||||||
- uses: matrix-org/setup-python-poetry@v1
|
- uses: matrix-org/setup-python-poetry@v1
|
||||||
with:
|
with:
|
||||||
|
@ -47,8 +90,11 @@ jobs:
|
||||||
|
|
||||||
check-schema-delta:
|
check-schema-delta:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
needs: changes
|
||||||
|
if: ${{ needs.changes.outputs.linting == 'true' }}
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v4
|
||||||
- uses: actions/setup-python@v4
|
- uses: actions/setup-python@v4
|
||||||
with:
|
with:
|
||||||
python-version: "3.x"
|
python-version: "3.x"
|
||||||
|
@ -58,7 +104,7 @@ jobs:
|
||||||
check-lockfile:
|
check-lockfile:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v4
|
||||||
- uses: actions/setup-python@v4
|
- uses: actions/setup-python@v4
|
||||||
with:
|
with:
|
||||||
python-version: "3.x"
|
python-version: "3.x"
|
||||||
|
@ -66,9 +112,12 @@ jobs:
|
||||||
|
|
||||||
lint:
|
lint:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
needs: changes
|
||||||
|
if: ${{ needs.changes.outputs.linting == 'true' }}
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout repository
|
- name: Checkout repository
|
||||||
uses: actions/checkout@v3
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
- name: Setup Poetry
|
- name: Setup Poetry
|
||||||
uses: matrix-org/setup-python-poetry@v1
|
uses: matrix-org/setup-python-poetry@v1
|
||||||
|
@ -88,9 +137,16 @@ jobs:
|
||||||
lint-mypy:
|
lint-mypy:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
name: Typechecking
|
name: Typechecking
|
||||||
|
needs: changes
|
||||||
|
if: ${{ needs.changes.outputs.linting == 'true' }}
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout repository
|
- name: Checkout repository
|
||||||
uses: actions/checkout@v3
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Install Rust
|
||||||
|
uses: dtolnay/rust-toolchain@1.61.0
|
||||||
|
- uses: Swatinem/rust-cache@v2
|
||||||
|
|
||||||
- name: Setup Poetry
|
- name: Setup Poetry
|
||||||
uses: matrix-org/setup-python-poetry@v1
|
uses: matrix-org/setup-python-poetry@v1
|
||||||
|
@ -103,10 +159,6 @@ jobs:
|
||||||
# To make CI green, err towards caution and install the project.
|
# To make CI green, err towards caution and install the project.
|
||||||
install-project: "true"
|
install-project: "true"
|
||||||
|
|
||||||
- name: Install Rust
|
|
||||||
uses: dtolnay/rust-toolchain@1.58.1
|
|
||||||
- uses: Swatinem/rust-cache@v2
|
|
||||||
|
|
||||||
# Cribbed from
|
# Cribbed from
|
||||||
# https://github.com/AustinScola/mypy-cache-github-action/blob/85ea4f2972abed39b33bd02c36e341b28ca59213/src/restore.ts#L10-L17
|
# https://github.com/AustinScola/mypy-cache-github-action/blob/85ea4f2972abed39b33bd02c36e341b28ca59213/src/restore.ts#L10-L17
|
||||||
- name: Restore/persist mypy's cache
|
- name: Restore/persist mypy's cache
|
||||||
|
@ -123,7 +175,7 @@ jobs:
|
||||||
lint-crlf:
|
lint-crlf:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v4
|
||||||
- name: Check line endings
|
- name: Check line endings
|
||||||
run: scripts-dev/check_line_terminators.sh
|
run: scripts-dev/check_line_terminators.sh
|
||||||
|
|
||||||
|
@ -131,7 +183,7 @@ jobs:
|
||||||
if: ${{ (github.base_ref == 'develop' || contains(github.base_ref, 'release-')) && github.actor != 'dependabot[bot]' }}
|
if: ${{ (github.base_ref == 'develop' || contains(github.base_ref, 'release-')) && github.actor != 'dependabot[bot]' }}
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v4
|
||||||
with:
|
with:
|
||||||
ref: ${{ github.event.pull_request.head.sha }}
|
ref: ${{ github.event.pull_request.head.sha }}
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
|
@ -145,12 +197,15 @@ jobs:
|
||||||
|
|
||||||
lint-pydantic:
|
lint-pydantic:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
needs: changes
|
||||||
|
if: ${{ needs.changes.outputs.linting == 'true' }}
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v4
|
||||||
with:
|
with:
|
||||||
ref: ${{ github.event.pull_request.head.sha }}
|
ref: ${{ github.event.pull_request.head.sha }}
|
||||||
- name: Install Rust
|
- name: Install Rust
|
||||||
uses: dtolnay/rust-toolchain@1.58.1
|
uses: dtolnay/rust-toolchain@1.61.0
|
||||||
- uses: Swatinem/rust-cache@v2
|
- uses: Swatinem/rust-cache@v2
|
||||||
- uses: matrix-org/setup-python-poetry@v1
|
- uses: matrix-org/setup-python-poetry@v1
|
||||||
with:
|
with:
|
||||||
|
@ -164,10 +219,10 @@ jobs:
|
||||||
if: ${{ needs.changes.outputs.rust == 'true' }}
|
if: ${{ needs.changes.outputs.rust == 'true' }}
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
- name: Install Rust
|
- name: Install Rust
|
||||||
uses: dtolnay/rust-toolchain@1.58.1
|
uses: dtolnay/rust-toolchain@1.61.0
|
||||||
with:
|
with:
|
||||||
components: clippy
|
components: clippy
|
||||||
- uses: Swatinem/rust-cache@v2
|
- uses: Swatinem/rust-cache@v2
|
||||||
|
@ -182,7 +237,7 @@ jobs:
|
||||||
if: ${{ needs.changes.outputs.rust == 'true' }}
|
if: ${{ needs.changes.outputs.rust == 'true' }}
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
- name: Install Rust
|
- name: Install Rust
|
||||||
uses: dtolnay/rust-toolchain@master
|
uses: dtolnay/rust-toolchain@master
|
||||||
|
@ -199,7 +254,7 @@ jobs:
|
||||||
if: ${{ needs.changes.outputs.rust == 'true' }}
|
if: ${{ needs.changes.outputs.rust == 'true' }}
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
- name: Install Rust
|
- name: Install Rust
|
||||||
uses: dtolnay/rust-toolchain@master
|
uses: dtolnay/rust-toolchain@master
|
||||||
|
@ -225,6 +280,7 @@ jobs:
|
||||||
- check-lockfile
|
- check-lockfile
|
||||||
- lint-clippy
|
- lint-clippy
|
||||||
- lint-rustfmt
|
- lint-rustfmt
|
||||||
|
- check-signoff
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- run: "true"
|
- run: "true"
|
||||||
|
@ -234,7 +290,7 @@ jobs:
|
||||||
needs: linting-done
|
needs: linting-done
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v4
|
||||||
- uses: actions/setup-python@v4
|
- uses: actions/setup-python@v4
|
||||||
with:
|
with:
|
||||||
python-version: "3.x"
|
python-version: "3.x"
|
||||||
|
@ -245,15 +301,17 @@ jobs:
|
||||||
sytest_test_matrix: ${{ steps.get-matrix.outputs.sytest_test_matrix }}
|
sytest_test_matrix: ${{ steps.get-matrix.outputs.sytest_test_matrix }}
|
||||||
|
|
||||||
trial:
|
trial:
|
||||||
if: ${{ !cancelled() && !failure() }} # Allow previous steps to be skipped, but not fail
|
if: ${{ !cancelled() && !failure() && needs.changes.outputs.trial == 'true' }} # Allow previous steps to be skipped, but not fail
|
||||||
needs: calculate-test-jobs
|
needs:
|
||||||
|
- calculate-test-jobs
|
||||||
|
- changes
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
strategy:
|
strategy:
|
||||||
matrix:
|
matrix:
|
||||||
job: ${{ fromJson(needs.calculate-test-jobs.outputs.trial_test_matrix) }}
|
job: ${{ fromJson(needs.calculate-test-jobs.outputs.trial_test_matrix) }}
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v4
|
||||||
- run: sudo apt-get -qq install xmlsec1
|
- run: sudo apt-get -qq install xmlsec1
|
||||||
- name: Set up PostgreSQL ${{ matrix.job.postgres-version }}
|
- name: Set up PostgreSQL ${{ matrix.job.postgres-version }}
|
||||||
if: ${{ matrix.job.postgres-version }}
|
if: ${{ matrix.job.postgres-version }}
|
||||||
|
@ -268,7 +326,7 @@ jobs:
|
||||||
postgres:${{ matrix.job.postgres-version }}
|
postgres:${{ matrix.job.postgres-version }}
|
||||||
|
|
||||||
- name: Install Rust
|
- name: Install Rust
|
||||||
uses: dtolnay/rust-toolchain@1.58.1
|
uses: dtolnay/rust-toolchain@1.61.0
|
||||||
- uses: Swatinem/rust-cache@v2
|
- uses: Swatinem/rust-cache@v2
|
||||||
|
|
||||||
- uses: matrix-org/setup-python-poetry@v1
|
- uses: matrix-org/setup-python-poetry@v1
|
||||||
|
@ -301,26 +359,28 @@ jobs:
|
||||||
|
|
||||||
trial-olddeps:
|
trial-olddeps:
|
||||||
# Note: sqlite only; no postgres
|
# Note: sqlite only; no postgres
|
||||||
if: ${{ !cancelled() && !failure() }} # Allow previous steps to be skipped, but not fail
|
if: ${{ !cancelled() && !failure() && needs.changes.outputs.trial == 'true' }} # Allow previous steps to be skipped, but not fail
|
||||||
needs: linting-done
|
needs:
|
||||||
|
- linting-done
|
||||||
|
- changes
|
||||||
runs-on: ubuntu-20.04
|
runs-on: ubuntu-20.04
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
- name: Install Rust
|
- name: Install Rust
|
||||||
uses: dtolnay/rust-toolchain@1.58.1
|
uses: dtolnay/rust-toolchain@1.61.0
|
||||||
- uses: Swatinem/rust-cache@v2
|
- uses: Swatinem/rust-cache@v2
|
||||||
|
|
||||||
# There aren't wheels for some of the older deps, so we need to install
|
# There aren't wheels for some of the older deps, so we need to install
|
||||||
# their build dependencies
|
# their build dependencies
|
||||||
- run: |
|
- run: |
|
||||||
sudo apt update
|
sudo apt-get -qq update
|
||||||
sudo apt-get -qq install build-essential libffi-dev python-dev \
|
sudo apt-get -qq install build-essential libffi-dev python-dev \
|
||||||
libxml2-dev libxslt-dev xmlsec1 zlib1g-dev libjpeg-dev libwebp-dev
|
libxml2-dev libxslt-dev xmlsec1 zlib1g-dev libjpeg-dev libwebp-dev
|
||||||
|
|
||||||
- uses: actions/setup-python@v4
|
- uses: actions/setup-python@v4
|
||||||
with:
|
with:
|
||||||
python-version: '3.7'
|
python-version: '3.8'
|
||||||
|
|
||||||
- name: Prepare old deps
|
- name: Prepare old deps
|
||||||
if: steps.cache-poetry-old-deps.outputs.cache-hit != 'true'
|
if: steps.cache-poetry-old-deps.outputs.cache-hit != 'true'
|
||||||
|
@ -357,16 +417,18 @@ jobs:
|
||||||
trial-pypy:
|
trial-pypy:
|
||||||
# Very slow; only run if the branch name includes 'pypy'
|
# Very slow; only run if the branch name includes 'pypy'
|
||||||
# Note: sqlite only; no postgres. Completely untested since poetry move.
|
# Note: sqlite only; no postgres. Completely untested since poetry move.
|
||||||
if: ${{ contains(github.ref, 'pypy') && !failure() && !cancelled() }}
|
if: ${{ contains(github.ref, 'pypy') && !failure() && !cancelled() && needs.changes.outputs.trial == 'true' }}
|
||||||
needs: linting-done
|
needs:
|
||||||
|
- linting-done
|
||||||
|
- changes
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
strategy:
|
strategy:
|
||||||
matrix:
|
matrix:
|
||||||
python-version: ["pypy-3.7"]
|
python-version: ["pypy-3.8"]
|
||||||
extras: ["all"]
|
extras: ["all"]
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v4
|
||||||
# Install libs necessary for PyPy to build binary wheels for dependencies
|
# Install libs necessary for PyPy to build binary wheels for dependencies
|
||||||
- run: sudo apt-get -qq install xmlsec1 libxml2-dev libxslt-dev
|
- run: sudo apt-get -qq install xmlsec1 libxml2-dev libxslt-dev
|
||||||
- uses: matrix-org/setup-python-poetry@v1
|
- uses: matrix-org/setup-python-poetry@v1
|
||||||
|
@ -389,8 +451,10 @@ jobs:
|
||||||
|| true
|
|| true
|
||||||
|
|
||||||
sytest:
|
sytest:
|
||||||
if: ${{ !failure() && !cancelled() }}
|
if: ${{ !failure() && !cancelled() && needs.changes.outputs.integration == 'true' }}
|
||||||
needs: calculate-test-jobs
|
needs:
|
||||||
|
- calculate-test-jobs
|
||||||
|
- changes
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
container:
|
container:
|
||||||
image: matrixdotorg/sytest-synapse:${{ matrix.job.sytest-tag }}
|
image: matrixdotorg/sytest-synapse:${{ matrix.job.sytest-tag }}
|
||||||
|
@ -399,8 +463,8 @@ jobs:
|
||||||
env:
|
env:
|
||||||
SYTEST_BRANCH: ${{ github.head_ref }}
|
SYTEST_BRANCH: ${{ github.head_ref }}
|
||||||
POSTGRES: ${{ matrix.job.postgres && 1}}
|
POSTGRES: ${{ matrix.job.postgres && 1}}
|
||||||
MULTI_POSTGRES: ${{ (matrix.job.postgres == 'multi-postgres') && 1}}
|
MULTI_POSTGRES: ${{ (matrix.job.postgres == 'multi-postgres') || '' }}
|
||||||
ASYNCIO_REACTOR: ${{ (matrix.job.reactor == 'asyncio') && 1 }}
|
ASYNCIO_REACTOR: ${{ (matrix.job.reactor == 'asyncio') || '' }}
|
||||||
WORKERS: ${{ matrix.job.workers && 1 }}
|
WORKERS: ${{ matrix.job.workers && 1 }}
|
||||||
BLACKLIST: ${{ matrix.job.workers && 'synapse-blacklist-with-workers' }}
|
BLACKLIST: ${{ matrix.job.workers && 'synapse-blacklist-with-workers' }}
|
||||||
TOP: ${{ github.workspace }}
|
TOP: ${{ github.workspace }}
|
||||||
|
@ -411,12 +475,12 @@ jobs:
|
||||||
job: ${{ fromJson(needs.calculate-test-jobs.outputs.sytest_test_matrix) }}
|
job: ${{ fromJson(needs.calculate-test-jobs.outputs.sytest_test_matrix) }}
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v4
|
||||||
- name: Prepare test blacklist
|
- name: Prepare test blacklist
|
||||||
run: cat sytest-blacklist .ci/worker-blacklist > synapse-blacklist-with-workers
|
run: cat sytest-blacklist .ci/worker-blacklist > synapse-blacklist-with-workers
|
||||||
|
|
||||||
- name: Install Rust
|
- name: Install Rust
|
||||||
uses: dtolnay/rust-toolchain@1.58.1
|
uses: dtolnay/rust-toolchain@1.61.0
|
||||||
- uses: Swatinem/rust-cache@v2
|
- uses: Swatinem/rust-cache@v2
|
||||||
|
|
||||||
- name: Run SyTest
|
- name: Run SyTest
|
||||||
|
@ -435,8 +499,8 @@ jobs:
|
||||||
/logs/**/*.log*
|
/logs/**/*.log*
|
||||||
|
|
||||||
export-data:
|
export-data:
|
||||||
if: ${{ !failure() && !cancelled() }} # Allow previous steps to be skipped, but not fail
|
if: ${{ !failure() && !cancelled() && needs.changes.outputs.integration == 'true'}} # Allow previous steps to be skipped, but not fail
|
||||||
needs: [linting-done, portdb]
|
needs: [linting-done, portdb, changes]
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
env:
|
env:
|
||||||
TOP: ${{ github.workspace }}
|
TOP: ${{ github.workspace }}
|
||||||
|
@ -456,7 +520,7 @@ jobs:
|
||||||
--health-retries 5
|
--health-retries 5
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v4
|
||||||
- run: sudo apt-get -qq install xmlsec1 postgresql-client
|
- run: sudo apt-get -qq install xmlsec1 postgresql-client
|
||||||
- uses: matrix-org/setup-python-poetry@v1
|
- uses: matrix-org/setup-python-poetry@v1
|
||||||
with:
|
with:
|
||||||
|
@ -471,13 +535,15 @@ jobs:
|
||||||
|
|
||||||
|
|
||||||
portdb:
|
portdb:
|
||||||
if: ${{ !failure() && !cancelled() }} # Allow previous steps to be skipped, but not fail
|
if: ${{ !failure() && !cancelled() && needs.changes.outputs.integration == 'true'}} # Allow previous steps to be skipped, but not fail
|
||||||
needs: linting-done
|
needs:
|
||||||
|
- linting-done
|
||||||
|
- changes
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
strategy:
|
strategy:
|
||||||
matrix:
|
matrix:
|
||||||
include:
|
include:
|
||||||
- python-version: "3.7"
|
- python-version: "3.8"
|
||||||
postgres-version: "11"
|
postgres-version: "11"
|
||||||
|
|
||||||
- python-version: "3.11"
|
- python-version: "3.11"
|
||||||
|
@ -498,7 +564,7 @@ jobs:
|
||||||
--health-retries 5
|
--health-retries 5
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v4
|
||||||
- name: Add PostgreSQL apt repository
|
- name: Add PostgreSQL apt repository
|
||||||
# We need a version of pg_dump that can handle the version of
|
# We need a version of pg_dump that can handle the version of
|
||||||
# PostgreSQL being tested against. The Ubuntu package repository lags
|
# PostgreSQL being tested against. The Ubuntu package repository lags
|
||||||
|
@ -532,8 +598,10 @@ jobs:
|
||||||
schema_diff
|
schema_diff
|
||||||
|
|
||||||
complement:
|
complement:
|
||||||
if: "${{ !failure() && !cancelled() }}"
|
if: "${{ !failure() && !cancelled() && needs.changes.outputs.integration == 'true' }}"
|
||||||
needs: linting-done
|
needs:
|
||||||
|
- linting-done
|
||||||
|
- changes
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
|
||||||
strategy:
|
strategy:
|
||||||
|
@ -550,13 +618,13 @@ jobs:
|
||||||
database: Postgres
|
database: Postgres
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Run actions/checkout@v3 for synapse
|
- name: Run actions/checkout@v4 for synapse
|
||||||
uses: actions/checkout@v3
|
uses: actions/checkout@v4
|
||||||
with:
|
with:
|
||||||
path: synapse
|
path: synapse
|
||||||
|
|
||||||
- name: Install Rust
|
- name: Install Rust
|
||||||
uses: dtolnay/rust-toolchain@1.58.1
|
uses: dtolnay/rust-toolchain@1.61.0
|
||||||
- uses: Swatinem/rust-cache@v2
|
- uses: Swatinem/rust-cache@v2
|
||||||
|
|
||||||
- uses: actions/setup-go@v4
|
- uses: actions/setup-go@v4
|
||||||
|
@ -581,10 +649,10 @@ jobs:
|
||||||
- changes
|
- changes
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
- name: Install Rust
|
- name: Install Rust
|
||||||
uses: dtolnay/rust-toolchain@1.58.1
|
uses: dtolnay/rust-toolchain@1.61.0
|
||||||
- uses: Swatinem/rust-cache@v2
|
- uses: Swatinem/rust-cache@v2
|
||||||
|
|
||||||
- run: cargo test
|
- run: cargo test
|
||||||
|
@ -599,7 +667,7 @@ jobs:
|
||||||
- changes
|
- changes
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
- name: Install Rust
|
- name: Install Rust
|
||||||
uses: dtolnay/rust-toolchain@master
|
uses: dtolnay/rust-toolchain@master
|
||||||
|
@ -627,9 +695,16 @@ jobs:
|
||||||
with:
|
with:
|
||||||
needs: ${{ toJSON(needs) }}
|
needs: ${{ toJSON(needs) }}
|
||||||
|
|
||||||
# The newsfile lint may be skipped on non PR builds
|
# Various bits are skipped if there was no applicable changes.
|
||||||
# Cargo test is skipped if there is no changes on Rust code
|
# The newsfile and signoff lint may be skipped on non PR builds.
|
||||||
skippable: |
|
skippable: |
|
||||||
|
trial
|
||||||
|
trial-olddeps
|
||||||
|
sytest
|
||||||
|
portdb
|
||||||
|
export-data
|
||||||
|
complement
|
||||||
|
check-signoff
|
||||||
lint-newsfile
|
lint-newsfile
|
||||||
cargo-test
|
cargo-test
|
||||||
cargo-bench
|
cargo-bench
|
||||||
|
|
|
@ -5,6 +5,9 @@ on:
|
||||||
- cron: 0 8 * * *
|
- cron: 0 8 * * *
|
||||||
|
|
||||||
workflow_dispatch:
|
workflow_dispatch:
|
||||||
|
# NB: inputs are only present when this workflow is dispatched manually.
|
||||||
|
# (The default below is the default field value in the form to trigger
|
||||||
|
# a manual dispatch). Otherwise the inputs will evaluate to null.
|
||||||
inputs:
|
inputs:
|
||||||
twisted_ref:
|
twisted_ref:
|
||||||
description: Commit, branch or tag to checkout from upstream Twisted.
|
description: Commit, branch or tag to checkout from upstream Twisted.
|
||||||
|
@ -18,11 +21,26 @@ concurrency:
|
||||||
cancel-in-progress: true
|
cancel-in-progress: true
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
|
check_repo:
|
||||||
|
# Prevent this workflow from running on any fork of Synapse other than matrix-org/synapse, as it is
|
||||||
|
# only useful to the Synapse core team.
|
||||||
|
# All other workflow steps depend on this one, thus if 'should_run_workflow' is not 'true', the rest
|
||||||
|
# of the workflow will be skipped as well.
|
||||||
|
if: github.repository == 'matrix-org/synapse'
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
outputs:
|
||||||
|
should_run_workflow: ${{ steps.check_condition.outputs.should_run_workflow }}
|
||||||
|
steps:
|
||||||
|
- id: check_condition
|
||||||
|
run: echo "should_run_workflow=${{ github.repository == 'matrix-org/synapse' }}" >> "$GITHUB_OUTPUT"
|
||||||
|
|
||||||
mypy:
|
mypy:
|
||||||
|
needs: check_repo
|
||||||
|
if: needs.check_repo.outputs.should_run_workflow == 'true'
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
- name: Install Rust
|
- name: Install Rust
|
||||||
uses: dtolnay/rust-toolchain@stable
|
uses: dtolnay/rust-toolchain@stable
|
||||||
|
@ -34,17 +52,19 @@ jobs:
|
||||||
extras: "all"
|
extras: "all"
|
||||||
- run: |
|
- run: |
|
||||||
poetry remove twisted
|
poetry remove twisted
|
||||||
poetry add --extras tls git+https://github.com/twisted/twisted.git#${{ inputs.twisted_ref }}
|
poetry add --extras tls git+https://github.com/twisted/twisted.git#${{ inputs.twisted_ref || 'trunk' }}
|
||||||
poetry install --no-interaction --extras "all test"
|
poetry install --no-interaction --extras "all test"
|
||||||
- name: Remove warn_unused_ignores from mypy config
|
- name: Remove unhelpful options from mypy config
|
||||||
run: sed '/warn_unused_ignores = True/d' -i mypy.ini
|
run: sed -e '/warn_unused_ignores = True/d' -e '/warn_redundant_casts = True/d' -i mypy.ini
|
||||||
- run: poetry run mypy
|
- run: poetry run mypy
|
||||||
|
|
||||||
trial:
|
trial:
|
||||||
|
needs: check_repo
|
||||||
|
if: needs.check_repo.outputs.should_run_workflow == 'true'
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v4
|
||||||
- run: sudo apt-get -qq install xmlsec1
|
- run: sudo apt-get -qq install xmlsec1
|
||||||
|
|
||||||
- name: Install Rust
|
- name: Install Rust
|
||||||
|
@ -75,14 +95,20 @@ jobs:
|
||||||
|| true
|
|| true
|
||||||
|
|
||||||
sytest:
|
sytest:
|
||||||
|
needs: check_repo
|
||||||
|
if: needs.check_repo.outputs.should_run_workflow == 'true'
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
container:
|
container:
|
||||||
image: matrixdotorg/sytest-synapse:buster
|
# We're using ubuntu:focal because it uses Python 3.8 which is our minimum supported Python version.
|
||||||
|
# This job is a canary to warn us about unreleased twisted changes that would cause problems for us if
|
||||||
|
# they were to be released immediately. For simplicity's sake (and to save CI runners) we use the oldest
|
||||||
|
# version, assuming that any incompatibilities on newer versions would also be present on the oldest.
|
||||||
|
image: matrixdotorg/sytest-synapse:focal
|
||||||
volumes:
|
volumes:
|
||||||
- ${{ github.workspace }}:/src
|
- ${{ github.workspace }}:/src
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
- name: Install Rust
|
- name: Install Rust
|
||||||
uses: dtolnay/rust-toolchain@stable
|
uses: dtolnay/rust-toolchain@stable
|
||||||
|
@ -119,7 +145,8 @@ jobs:
|
||||||
/logs/**/*.log*
|
/logs/**/*.log*
|
||||||
|
|
||||||
complement:
|
complement:
|
||||||
if: "${{ !failure() && !cancelled() }}"
|
needs: check_repo
|
||||||
|
if: "!failure() && !cancelled() && needs.check_repo.outputs.should_run_workflow == 'true'"
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
|
||||||
strategy:
|
strategy:
|
||||||
|
@ -136,8 +163,8 @@ jobs:
|
||||||
database: Postgres
|
database: Postgres
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Run actions/checkout@v3 for synapse
|
- name: Run actions/checkout@v4 for synapse
|
||||||
uses: actions/checkout@v3
|
uses: actions/checkout@v4
|
||||||
with:
|
with:
|
||||||
path: synapse
|
path: synapse
|
||||||
|
|
||||||
|
@ -166,7 +193,7 @@ jobs:
|
||||||
|
|
||||||
# open an issue if the build fails, so we know about it.
|
# open an issue if the build fails, so we know about it.
|
||||||
open-issue:
|
open-issue:
|
||||||
if: failure()
|
if: failure() && needs.check_repo.outputs.should_run_workflow == 'true'
|
||||||
needs:
|
needs:
|
||||||
- mypy
|
- mypy
|
||||||
- trial
|
- trial
|
||||||
|
@ -176,7 +203,7 @@ jobs:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v4
|
||||||
- uses: JasonEtco/create-an-issue@e27dddc79c92bc6e4562f268fffa5ed752639abd # v2.9.1
|
- uses: JasonEtco/create-an-issue@e27dddc79c92bc6e4562f268fffa5ed752639abd # v2.9.1
|
||||||
env:
|
env:
|
||||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
|
|
@ -34,6 +34,7 @@ __pycache__/
|
||||||
/logs
|
/logs
|
||||||
/media_store/
|
/media_store/
|
||||||
/uploads
|
/uploads
|
||||||
|
/homeserver-config-overrides.d
|
||||||
|
|
||||||
# For direnv users
|
# For direnv users
|
||||||
/.envrc
|
/.envrc
|
||||||
|
|
3697
CHANGES.md
3697
CHANGES.md
File diff suppressed because it is too large
Load Diff
|
@ -4,18 +4,18 @@ version = 3
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "aho-corasick"
|
name = "aho-corasick"
|
||||||
version = "0.7.19"
|
version = "1.0.2"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "b4f55bd91a0978cbfd91c457a164bab8b4001c833b7f323132c0a4e1922dd44e"
|
checksum = "43f6cb1bf222025340178f382c426f13757b2960e89779dfcb319c32542a5a41"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"memchr",
|
"memchr",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "anyhow"
|
name = "anyhow"
|
||||||
version = "1.0.71"
|
version = "1.0.75"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "9c7d0618f0e0b7e8ff11427422b64564d5fb0be1940354bfe2e0529b18a9d9b8"
|
checksum = "a4668cab20f66d8d020e1fbc0ebe47217433c1b6c8f2040faf858554e394ace6"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "arc-swap"
|
name = "arc-swap"
|
||||||
|
@ -132,24 +132,21 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "log"
|
name = "log"
|
||||||
version = "0.4.17"
|
version = "0.4.20"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "abb12e687cfb44aa40f41fc3978ef76448f9b6038cad6aef4259d3c095a2382e"
|
checksum = "b5e6163cb8c49088c2c36f57875e58ccd8c87c7427f7fbd50ea6710b2f3f2e8f"
|
||||||
dependencies = [
|
|
||||||
"cfg-if",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "memchr"
|
name = "memchr"
|
||||||
version = "2.5.0"
|
version = "2.6.3"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "2dffe52ecf27772e601905b7522cb4ef790d2cc203488bbd0e2fe85fcb74566d"
|
checksum = "8f232d6ef707e1956a43342693d2a31e72989554d58299d7a88738cc95b0d35c"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "memoffset"
|
name = "memoffset"
|
||||||
version = "0.6.5"
|
version = "0.9.0"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "5aa361d4faea93603064a027415f07bd8e1d5c88c9fbf68bf56a285428fd79ce"
|
checksum = "5a634b1c61a95585bd15607c6ab0c4e5b226e695ff2800ba0cdccddf208c406c"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"autocfg",
|
"autocfg",
|
||||||
]
|
]
|
||||||
|
@ -185,18 +182,18 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "proc-macro2"
|
name = "proc-macro2"
|
||||||
version = "1.0.52"
|
version = "1.0.64"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "1d0e1ae9e836cc3beddd63db0df682593d7e2d3d891ae8c9083d2113e1744224"
|
checksum = "78803b62cbf1f46fde80d7c0e803111524b9877184cfe7c3033659490ac7a7da"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"unicode-ident",
|
"unicode-ident",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "pyo3"
|
name = "pyo3"
|
||||||
version = "0.17.3"
|
version = "0.19.2"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "268be0c73583c183f2b14052337465768c07726936a260f480f0857cb95ba543"
|
checksum = "e681a6cfdc4adcc93b4d3cf993749a4552018ee0a9b65fc0ccfad74352c72a38"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"anyhow",
|
"anyhow",
|
||||||
"cfg-if",
|
"cfg-if",
|
||||||
|
@ -212,9 +209,9 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "pyo3-build-config"
|
name = "pyo3-build-config"
|
||||||
version = "0.17.3"
|
version = "0.19.2"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "28fcd1e73f06ec85bf3280c48c67e731d8290ad3d730f8be9dc07946923005c8"
|
checksum = "076c73d0bc438f7a4ef6fdd0c3bb4732149136abd952b110ac93e4edb13a6ba5"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"once_cell",
|
"once_cell",
|
||||||
"target-lexicon",
|
"target-lexicon",
|
||||||
|
@ -222,9 +219,9 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "pyo3-ffi"
|
name = "pyo3-ffi"
|
||||||
version = "0.17.3"
|
version = "0.19.2"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "0f6cb136e222e49115b3c51c32792886defbfb0adead26a688142b346a0b9ffc"
|
checksum = "e53cee42e77ebe256066ba8aa77eff722b3bb91f3419177cf4cd0f304d3284d9"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"libc",
|
"libc",
|
||||||
"pyo3-build-config",
|
"pyo3-build-config",
|
||||||
|
@ -232,9 +229,9 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "pyo3-log"
|
name = "pyo3-log"
|
||||||
version = "0.8.1"
|
version = "0.8.3"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "f9c8b57fe71fb5dcf38970ebedc2b1531cf1c14b1b9b4c560a182a57e115575c"
|
checksum = "f47b0777feb17f61eea78667d61103758b243a871edc09a7786500a50467b605"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"arc-swap",
|
"arc-swap",
|
||||||
"log",
|
"log",
|
||||||
|
@ -243,9 +240,9 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "pyo3-macros"
|
name = "pyo3-macros"
|
||||||
version = "0.17.3"
|
version = "0.19.2"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "94144a1266e236b1c932682136dc35a9dee8d3589728f68130c7c3861ef96b28"
|
checksum = "dfeb4c99597e136528c6dd7d5e3de5434d1ceaf487436a3f03b2d56b6fc9efd1"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"proc-macro2",
|
"proc-macro2",
|
||||||
"pyo3-macros-backend",
|
"pyo3-macros-backend",
|
||||||
|
@ -255,9 +252,9 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "pyo3-macros-backend"
|
name = "pyo3-macros-backend"
|
||||||
version = "0.17.3"
|
version = "0.19.2"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "c8df9be978a2d2f0cdebabb03206ed73b11314701a5bfe71b0d753b81997777f"
|
checksum = "947dc12175c254889edc0c02e399476c2f652b4b9ebd123aa655c224de259536"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"proc-macro2",
|
"proc-macro2",
|
||||||
"quote",
|
"quote",
|
||||||
|
@ -266,9 +263,9 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "pythonize"
|
name = "pythonize"
|
||||||
version = "0.17.0"
|
version = "0.19.0"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "0f7f0c136f5fbc01868185eef462800e49659eb23acca83b9e884367a006acb6"
|
checksum = "8e35b716d430ace57e2d1b4afb51c9e5b7c46d2bce72926e07f9be6a98ced03e"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"pyo3",
|
"pyo3",
|
||||||
"serde",
|
"serde",
|
||||||
|
@ -276,9 +273,9 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "quote"
|
name = "quote"
|
||||||
version = "1.0.26"
|
version = "1.0.29"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "4424af4bf778aae2051a77b60283332f386554255d722233d09fbfc7e30da2fc"
|
checksum = "573015e8ab27661678357f27dc26460738fd2b6c86e46f386fde94cb5d913105"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"proc-macro2",
|
"proc-macro2",
|
||||||
]
|
]
|
||||||
|
@ -294,9 +291,21 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "regex"
|
name = "regex"
|
||||||
version = "1.7.3"
|
version = "1.9.6"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "8b1f693b24f6ac912f4893ef08244d70b6067480d2f1a46e950c9691e6749d1d"
|
checksum = "ebee201405406dbf528b8b672104ae6d6d63e6d118cb10e4d51abbc7b58044ff"
|
||||||
|
dependencies = [
|
||||||
|
"aho-corasick",
|
||||||
|
"memchr",
|
||||||
|
"regex-automata",
|
||||||
|
"regex-syntax",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "regex-automata"
|
||||||
|
version = "0.3.9"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "59b23e92ee4318893fa3fe3e6fb365258efbfe6ac6ab30f090cdcbb7aa37efa9"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"aho-corasick",
|
"aho-corasick",
|
||||||
"memchr",
|
"memchr",
|
||||||
|
@ -305,9 +314,9 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "regex-syntax"
|
name = "regex-syntax"
|
||||||
version = "0.6.29"
|
version = "0.7.5"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "f162c6dd7b008981e4d40210aca20b4bd0f9b60ca9271061b07f78537722f2e1"
|
checksum = "dbb5fb1acd8a1a18b3dd5be62d25485eb770e05afb408a9627d14d451bae12da"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "ryu"
|
name = "ryu"
|
||||||
|
@ -323,29 +332,29 @@ checksum = "d29ab0c6d3fc0ee92fe66e2d99f700eab17a8d57d1c1d3b748380fb20baa78cd"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "serde"
|
name = "serde"
|
||||||
version = "1.0.163"
|
version = "1.0.188"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "2113ab51b87a539ae008b5c6c02dc020ffa39afd2d83cffcb3f4eb2722cebec2"
|
checksum = "cf9e0fcba69a370eed61bcf2b728575f726b50b55cba78064753d708ddc7549e"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"serde_derive",
|
"serde_derive",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "serde_derive"
|
name = "serde_derive"
|
||||||
version = "1.0.163"
|
version = "1.0.188"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "8c805777e3930c8883389c602315a24224bcc738b63905ef87cd1420353ea93e"
|
checksum = "4eca7ac642d82aa35b60049a6eccb4be6be75e599bd2e9adb5f875a737654af2"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"proc-macro2",
|
"proc-macro2",
|
||||||
"quote",
|
"quote",
|
||||||
"syn 2.0.10",
|
"syn 2.0.28",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "serde_json"
|
name = "serde_json"
|
||||||
version = "1.0.96"
|
version = "1.0.107"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "057d394a50403bcac12672b2b18fb387ab6d289d957dab67dd201875391e52f1"
|
checksum = "6b420ce6e3d8bd882e9b243c6eed35dbc9a6110c9769e74b584e0d68d1f20c65"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"itoa",
|
"itoa",
|
||||||
"ryu",
|
"ryu",
|
||||||
|
@ -377,9 +386,9 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "syn"
|
name = "syn"
|
||||||
version = "2.0.10"
|
version = "2.0.28"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "5aad1363ed6d37b84299588d62d3a7d95b5a5c2d9aad5c85609fda12afaa1f40"
|
checksum = "04361975b3f5e348b2189d8dc55bc942f278b2d482a6a0365de5bdd62d351567"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"proc-macro2",
|
"proc-macro2",
|
||||||
"quote",
|
"quote",
|
||||||
|
|
|
@ -3,3 +3,4 @@
|
||||||
|
|
||||||
[workspace]
|
[workspace]
|
||||||
members = ["rust"]
|
members = ["rust"]
|
||||||
|
resolver = "2"
|
||||||
|
|
|
@ -1 +0,0 @@
|
||||||
Fix a long-standing bug where setting the read marker could fail when using message retention. Contributed by Nick @ Beeper (@fizzadar).
|
|
|
@ -1 +0,0 @@
|
||||||
Add not null constraint to column full_user_id of tables profiles and user_filters.
|
|
|
@ -1 +0,0 @@
|
||||||
Print full error and stack-trace of any exception that occurs during startup/initialization.
|
|
|
@ -1 +0,0 @@
|
||||||
Fix a long-standing bug where the `url_preview_url_blacklist` configuration setting was not applied to oEmbed or image URLs found while previewing a URL.
|
|
|
@ -1 +0,0 @@
|
||||||
Run mypy type checking with the minimum supported Python version to catch new usage that isn't backwards-compatible.
|
|
|
@ -1 +0,0 @@
|
||||||
Fix subscriptable type usage in Python <3.9.
|
|
|
@ -1 +0,0 @@
|
||||||
Update internal terminology.
|
|
|
@ -1 +0,0 @@
|
||||||
Add a new admin API to create a new device for a user.
|
|
|
@ -1 +0,0 @@
|
||||||
Warn users that at least 3.75GB of space is needed for the nix Synapse development environment.
|
|
|
@ -1 +0,0 @@
|
||||||
Fix a bug introduced in Synapse 1.82.0 where the error message displayed when validation of the `app_service_config_files` config option fails would be incorrectly formatted.
|
|
|
@ -1 +0,0 @@
|
||||||
Re-type config paths in `ConfigError`s to be `StrSequence`s instead of `Iterable[str]`s.
|
|
|
@ -1 +0,0 @@
|
||||||
Update internal terminology.
|
|
|
@ -1 +0,0 @@
|
||||||
Update Mutual Rooms (MSC2666) implementation to match new proposal text.
|
|
|
@ -1 +0,0 @@
|
||||||
Fix a long-standing bug where deactivated users were still able to login using the custom `org.matrix.login.jwt` login type (if enabled).
|
|
|
@ -1 +0,0 @@
|
||||||
Remove the unstable identifiers from faster joins ([MSC3706](https://github.com/matrix-org/matrix-spec-proposals/pull/3706).
|
|
|
@ -1 +0,0 @@
|
||||||
Fix the olddeps CI.
|
|
|
@ -1 +0,0 @@
|
||||||
Fix two memory leaks in `trial` test runs.
|
|
|
@ -0,0 +1 @@
|
||||||
|
Bump pyo3 from 0.17.1 to 0.19.2.
|
|
@ -0,0 +1 @@
|
||||||
|
Clean-up unused tables.
|
|
@ -0,0 +1 @@
|
||||||
|
Remove legacy unspecced `knock_state_events` field returned in some responses.
|
|
@ -0,0 +1 @@
|
||||||
|
Fixes possbile `AttributeError` when `_matrix/client/v3/account/whoami` is called over a unix socket. Contributed by @Sir-Photch.
|
|
@ -0,0 +1 @@
|
||||||
|
Update registration of media repository URLs.
|
|
@ -0,0 +1 @@
|
||||||
|
Document internal background update mechanism.
|
|
@ -0,0 +1 @@
|
||||||
|
Improve type hints.
|
|
@ -0,0 +1 @@
|
||||||
|
Refactor some code to simplify and better type receipts stream adjacent code.
|
|
@ -0,0 +1 @@
|
||||||
|
Factor out `MultiWriter` token from `RoomStreamToken`.
|
|
@ -0,0 +1 @@
|
||||||
|
Improve code comments.
|
|
@ -0,0 +1 @@
|
||||||
|
Reduce memory allocations.
|
|
@ -0,0 +1 @@
|
||||||
|
Reduce memory allocations.
|
|
@ -0,0 +1 @@
|
||||||
|
Reduce memory allocations.
|
|
@ -0,0 +1 @@
|
||||||
|
Remove unused method.
|
|
@ -0,0 +1 @@
|
||||||
|
Reduce memory allocations.
|
|
@ -0,0 +1 @@
|
||||||
|
Improve rate limiting logic.
|
|
@ -769,7 +769,7 @@ def main(server_url, identity_server_url, username, token, config_path):
|
||||||
global CONFIG_JSON
|
global CONFIG_JSON
|
||||||
CONFIG_JSON = config_path # bit cheeky, but just overwrite the global
|
CONFIG_JSON = config_path # bit cheeky, but just overwrite the global
|
||||||
try:
|
try:
|
||||||
with open(config_path, "r") as config:
|
with open(config_path) as config:
|
||||||
syn_cmd.config = json.load(config)
|
syn_cmd.config = json.load(config)
|
||||||
try:
|
try:
|
||||||
http_client.verbose = "on" == syn_cmd.config["verbose"]
|
http_client.verbose = "on" == syn_cmd.config["verbose"]
|
||||||
|
|
|
@ -37,7 +37,6 @@ class HttpClient:
|
||||||
Deferred: Succeeds when we get a 2xx HTTP response. The result
|
Deferred: Succeeds when we get a 2xx HTTP response. The result
|
||||||
will be the decoded JSON body.
|
will be the decoded JSON body.
|
||||||
"""
|
"""
|
||||||
pass
|
|
||||||
|
|
||||||
def get_json(self, url, args=None):
|
def get_json(self, url, args=None):
|
||||||
"""Gets some json from the given host homeserver and path
|
"""Gets some json from the given host homeserver and path
|
||||||
|
@ -53,7 +52,6 @@ class HttpClient:
|
||||||
Deferred: Succeeds when we get a 2xx HTTP response. The result
|
Deferred: Succeeds when we get a 2xx HTTP response. The result
|
||||||
will be the decoded JSON body.
|
will be the decoded JSON body.
|
||||||
"""
|
"""
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
class TwistedHttpClient(HttpClient):
|
class TwistedHttpClient(HttpClient):
|
||||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -29,7 +29,7 @@
|
||||||
"level": "error"
|
"level": "error"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"line": "my-matrix-server-federation-sender-1 | 2023-01-25 20:56:20,995 - synapse.http.matrixfederationclient - 709 - WARNING - federation_transaction_transmission_loop-3 - {PUT-O-3} [example.com] Request failed: PUT matrix://example.com/_matrix/federation/v1/send/1674680155797: HttpResponseException('403: Forbidden')",
|
"line": "my-matrix-server-federation-sender-1 | 2023-01-25 20:56:20,995 - synapse.http.matrixfederationclient - 709 - WARNING - federation_transaction_transmission_loop-3 - {PUT-O-3} [example.com] Request failed: PUT matrix-federation://example.com/_matrix/federation/v1/send/1674680155797: HttpResponseException('403: Forbidden')",
|
||||||
"level": "warning"
|
"level": "warning"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
|
|
|
@ -1,3 +1,183 @@
|
||||||
|
matrix-synapse-py3 (1.94.0~rc1) stable; urgency=medium
|
||||||
|
|
||||||
|
* New Synapse release 1.94.0rc1.
|
||||||
|
|
||||||
|
-- Synapse Packaging team <packages@matrix.org> Tue, 03 Oct 2023 11:48:18 +0100
|
||||||
|
|
||||||
|
matrix-synapse-py3 (1.93.0) stable; urgency=medium
|
||||||
|
|
||||||
|
* New Synapse release 1.93.0.
|
||||||
|
|
||||||
|
-- Synapse Packaging team <packages@matrix.org> Tue, 26 Sep 2023 15:54:40 +0100
|
||||||
|
|
||||||
|
matrix-synapse-py3 (1.93.0~rc1) stable; urgency=medium
|
||||||
|
|
||||||
|
* New synapse release 1.93.0rc1.
|
||||||
|
|
||||||
|
-- Synapse Packaging team <packages@matrix.org> Tue, 19 Sep 2023 11:55:00 +0000
|
||||||
|
|
||||||
|
matrix-synapse-py3 (1.92.3) stable; urgency=medium
|
||||||
|
|
||||||
|
* New Synapse release 1.92.3.
|
||||||
|
|
||||||
|
-- Synapse Packaging team <packages@matrix.org> Mon, 18 Sep 2023 15:05:04 +0200
|
||||||
|
|
||||||
|
matrix-synapse-py3 (1.92.2) stable; urgency=medium
|
||||||
|
|
||||||
|
* New Synapse release 1.92.2.
|
||||||
|
|
||||||
|
-- Synapse Packaging team <packages@matrix.org> Fri, 15 Sep 2023 13:17:41 +0100
|
||||||
|
|
||||||
|
matrix-synapse-py3 (1.92.1) stable; urgency=medium
|
||||||
|
|
||||||
|
* New Synapse release 1.92.1.
|
||||||
|
|
||||||
|
-- Synapse Packaging team <packages@matrix.org> Tue, 12 Sep 2023 13:19:42 +0200
|
||||||
|
|
||||||
|
matrix-synapse-py3 (1.92.0) stable; urgency=medium
|
||||||
|
|
||||||
|
* New Synapse release 1.92.0.
|
||||||
|
|
||||||
|
-- Synapse Packaging team <packages@matrix.org> Tue, 12 Sep 2023 11:59:23 +0200
|
||||||
|
|
||||||
|
matrix-synapse-py3 (1.91.2) stable; urgency=medium
|
||||||
|
|
||||||
|
* New synapse release 1.91.2.
|
||||||
|
|
||||||
|
-- Synapse Packaging team <packages@matrix.org> Wed, 06 Sep 2023 14:59:30 +0000
|
||||||
|
|
||||||
|
matrix-synapse-py3 (1.92.0~rc1) stable; urgency=medium
|
||||||
|
|
||||||
|
* New Synapse release 1.92.0rc1.
|
||||||
|
|
||||||
|
-- Synapse Packaging team <packages@matrix.org> Tue, 05 Sep 2023 11:21:43 +0100
|
||||||
|
|
||||||
|
matrix-synapse-py3 (1.91.1) stable; urgency=medium
|
||||||
|
|
||||||
|
* New Synapse release 1.91.1.
|
||||||
|
|
||||||
|
-- Synapse Packaging team <packages@matrix.org> Mon, 04 Sep 2023 14:03:18 +0100
|
||||||
|
|
||||||
|
matrix-synapse-py3 (1.91.0) stable; urgency=medium
|
||||||
|
|
||||||
|
* New Synapse release 1.91.0.
|
||||||
|
|
||||||
|
-- Synapse Packaging team <packages@matrix.org> Wed, 30 Aug 2023 11:18:10 +0100
|
||||||
|
|
||||||
|
matrix-synapse-py3 (1.91.0~rc1) stable; urgency=medium
|
||||||
|
|
||||||
|
* New Synapse release 1.91.0rc1.
|
||||||
|
|
||||||
|
-- Synapse Packaging team <packages@matrix.org> Wed, 23 Aug 2023 09:47:18 -0700
|
||||||
|
|
||||||
|
matrix-synapse-py3 (1.90.0) stable; urgency=medium
|
||||||
|
|
||||||
|
* New Synapse release 1.90.0.
|
||||||
|
|
||||||
|
-- Synapse Packaging team <packages@matrix.org> Tue, 15 Aug 2023 11:17:34 +0100
|
||||||
|
|
||||||
|
matrix-synapse-py3 (1.90.0~rc1) stable; urgency=medium
|
||||||
|
|
||||||
|
* New Synapse release 1.90.0rc1.
|
||||||
|
|
||||||
|
-- Synapse Packaging team <packages@matrix.org> Tue, 08 Aug 2023 15:29:34 +0100
|
||||||
|
|
||||||
|
matrix-synapse-py3 (1.89.0) stable; urgency=medium
|
||||||
|
|
||||||
|
* New Synapse release 1.89.0.
|
||||||
|
|
||||||
|
-- Synapse Packaging team <packages@matrix.org> Tue, 01 Aug 2023 11:07:15 +0100
|
||||||
|
|
||||||
|
matrix-synapse-py3 (1.89.0~rc1) stable; urgency=medium
|
||||||
|
|
||||||
|
* New Synapse release 1.89.0rc1.
|
||||||
|
|
||||||
|
-- Synapse Packaging team <packages@matrix.org> Tue, 25 Jul 2023 14:31:07 +0200
|
||||||
|
|
||||||
|
matrix-synapse-py3 (1.88.0) stable; urgency=medium
|
||||||
|
|
||||||
|
* New Synapse release 1.88.0.
|
||||||
|
|
||||||
|
-- Synapse Packaging team <packages@matrix.org> Tue, 18 Jul 2023 13:59:28 +0100
|
||||||
|
|
||||||
|
matrix-synapse-py3 (1.88.0~rc1) stable; urgency=medium
|
||||||
|
|
||||||
|
* New Synapse release 1.88.0rc1.
|
||||||
|
|
||||||
|
-- Synapse Packaging team <packages@matrix.org> Tue, 11 Jul 2023 10:20:19 +0100
|
||||||
|
|
||||||
|
matrix-synapse-py3 (1.87.0) stable; urgency=medium
|
||||||
|
|
||||||
|
* New Synapse release 1.87.0.
|
||||||
|
|
||||||
|
-- Synapse Packaging team <packages@matrix.org> Tue, 04 Jul 2023 16:24:00 +0100
|
||||||
|
|
||||||
|
matrix-synapse-py3 (1.87.0~rc1) stable; urgency=medium
|
||||||
|
|
||||||
|
* New synapse release 1.87.0rc1.
|
||||||
|
|
||||||
|
-- Synapse Packaging team <packages@matrix.org> Tue, 27 Jun 2023 15:27:04 +0000
|
||||||
|
|
||||||
|
matrix-synapse-py3 (1.86.0) stable; urgency=medium
|
||||||
|
|
||||||
|
* New Synapse release 1.86.0.
|
||||||
|
|
||||||
|
-- Synapse Packaging team <packages@matrix.org> Tue, 20 Jun 2023 17:22:46 +0200
|
||||||
|
|
||||||
|
matrix-synapse-py3 (1.86.0~rc2) stable; urgency=medium
|
||||||
|
|
||||||
|
* New Synapse release 1.86.0rc2.
|
||||||
|
|
||||||
|
-- Synapse Packaging team <packages@matrix.org> Wed, 14 Jun 2023 12:16:27 +0200
|
||||||
|
|
||||||
|
matrix-synapse-py3 (1.86.0~rc1) stable; urgency=medium
|
||||||
|
|
||||||
|
* New Synapse release 1.86.0rc1.
|
||||||
|
|
||||||
|
-- Synapse Packaging team <packages@matrix.org> Tue, 13 Jun 2023 14:30:45 +0200
|
||||||
|
|
||||||
|
matrix-synapse-py3 (1.85.2) stable; urgency=medium
|
||||||
|
|
||||||
|
* New Synapse release 1.85.2.
|
||||||
|
|
||||||
|
-- Synapse Packaging team <packages@matrix.org> Thu, 08 Jun 2023 13:04:18 +0100
|
||||||
|
|
||||||
|
matrix-synapse-py3 (1.85.1) stable; urgency=medium
|
||||||
|
|
||||||
|
* New Synapse release 1.85.1.
|
||||||
|
|
||||||
|
-- Synapse Packaging team <packages@matrix.org> Wed, 07 Jun 2023 10:51:12 +0100
|
||||||
|
|
||||||
|
matrix-synapse-py3 (1.85.0) stable; urgency=medium
|
||||||
|
|
||||||
|
* New Synapse release 1.85.0.
|
||||||
|
|
||||||
|
-- Synapse Packaging team <packages@matrix.org> Tue, 06 Jun 2023 09:39:29 +0100
|
||||||
|
|
||||||
|
matrix-synapse-py3 (1.85.0~rc2) stable; urgency=medium
|
||||||
|
|
||||||
|
* New Synapse release 1.85.0rc2.
|
||||||
|
|
||||||
|
-- Synapse Packaging team <packages@matrix.org> Thu, 01 Jun 2023 09:16:18 -0700
|
||||||
|
|
||||||
|
matrix-synapse-py3 (1.85.0~rc1) stable; urgency=medium
|
||||||
|
|
||||||
|
* New Synapse release 1.85.0rc1.
|
||||||
|
|
||||||
|
-- Synapse Packaging team <packages@matrix.org> Tue, 30 May 2023 13:56:54 +0100
|
||||||
|
|
||||||
|
matrix-synapse-py3 (1.84.1) stable; urgency=medium
|
||||||
|
|
||||||
|
* New Synapse release 1.84.1.
|
||||||
|
|
||||||
|
-- Synapse Packaging team <packages@matrix.org> Fri, 26 May 2023 16:15:30 +0100
|
||||||
|
|
||||||
|
matrix-synapse-py3 (1.84.0) stable; urgency=medium
|
||||||
|
|
||||||
|
* New Synapse release 1.84.0.
|
||||||
|
|
||||||
|
-- Synapse Packaging team <packages@matrix.org> Tue, 23 May 2023 10:57:22 +0100
|
||||||
|
|
||||||
matrix-synapse-py3 (1.84.0~rc1) stable; urgency=medium
|
matrix-synapse-py3 (1.84.0~rc1) stable; urgency=medium
|
||||||
|
|
||||||
* New Synapse release 1.84.0rc1.
|
* New Synapse release 1.84.0rc1.
|
||||||
|
|
|
@ -25,9 +25,9 @@ ARG PYTHON_VERSION=3.11
|
||||||
###
|
###
|
||||||
### Stage 0: generate requirements.txt
|
### Stage 0: generate requirements.txt
|
||||||
###
|
###
|
||||||
# We hardcode the use of Debian bullseye here because this could change upstream
|
# We hardcode the use of Debian bookworm here because this could change upstream
|
||||||
# and other Dockerfiles used for testing are expecting bullseye.
|
# and other Dockerfiles used for testing are expecting bookworm.
|
||||||
FROM docker.io/python:${PYTHON_VERSION}-slim-bullseye as requirements
|
FROM docker.io/library/python:${PYTHON_VERSION}-slim-bookworm as requirements
|
||||||
|
|
||||||
# RUN --mount is specific to buildkit and is documented at
|
# RUN --mount is specific to buildkit and is documented at
|
||||||
# https://github.com/moby/buildkit/blob/master/frontend/dockerfile/docs/syntax.md#build-mounts-run---mount.
|
# https://github.com/moby/buildkit/blob/master/frontend/dockerfile/docs/syntax.md#build-mounts-run---mount.
|
||||||
|
@ -87,7 +87,7 @@ RUN if [ -z "$TEST_ONLY_IGNORE_POETRY_LOCKFILE" ]; then \
|
||||||
###
|
###
|
||||||
### Stage 1: builder
|
### Stage 1: builder
|
||||||
###
|
###
|
||||||
FROM docker.io/python:${PYTHON_VERSION}-slim-bullseye as builder
|
FROM docker.io/library/python:${PYTHON_VERSION}-slim-bookworm as builder
|
||||||
|
|
||||||
# install the OS build deps
|
# install the OS build deps
|
||||||
RUN \
|
RUN \
|
||||||
|
@ -158,7 +158,7 @@ RUN --mount=type=cache,target=/synapse/target,sharing=locked \
|
||||||
### Stage 2: runtime
|
### Stage 2: runtime
|
||||||
###
|
###
|
||||||
|
|
||||||
FROM docker.io/python:${PYTHON_VERSION}-slim-bullseye
|
FROM docker.io/library/python:${PYTHON_VERSION}-slim-bookworm
|
||||||
|
|
||||||
LABEL org.opencontainers.image.url='https://matrix.org/docs/projects/server/synapse'
|
LABEL org.opencontainers.image.url='https://matrix.org/docs/projects/server/synapse'
|
||||||
LABEL org.opencontainers.image.documentation='https://github.com/matrix-org/synapse/blob/master/docker/README.md'
|
LABEL org.opencontainers.image.documentation='https://github.com/matrix-org/synapse/blob/master/docker/README.md'
|
||||||
|
@ -173,10 +173,10 @@ RUN \
|
||||||
gosu \
|
gosu \
|
||||||
libjpeg62-turbo \
|
libjpeg62-turbo \
|
||||||
libpq5 \
|
libpq5 \
|
||||||
libwebp6 \
|
libwebp7 \
|
||||||
xmlsec1 \
|
xmlsec1 \
|
||||||
libjemalloc2 \
|
libjemalloc2 \
|
||||||
libicu67 \
|
libicu72 \
|
||||||
libssl-dev \
|
libssl-dev \
|
||||||
openssl \
|
openssl \
|
||||||
&& rm -rf /var/lib/apt/lists/*
|
&& rm -rf /var/lib/apt/lists/*
|
||||||
|
|
|
@ -24,16 +24,16 @@ ARG distro=""
|
||||||
# https://launchpad.net/~jyrki-pulliainen/+archive/ubuntu/dh-virtualenv, but
|
# https://launchpad.net/~jyrki-pulliainen/+archive/ubuntu/dh-virtualenv, but
|
||||||
# it's not obviously easier to use that than to build our own.)
|
# it's not obviously easier to use that than to build our own.)
|
||||||
|
|
||||||
FROM ${distro} as builder
|
FROM docker.io/library/${distro} as builder
|
||||||
|
|
||||||
RUN apt-get update -qq -o Acquire::Languages=none
|
RUN apt-get update -qq -o Acquire::Languages=none
|
||||||
RUN env DEBIAN_FRONTEND=noninteractive apt-get install \
|
RUN env DEBIAN_FRONTEND=noninteractive apt-get install \
|
||||||
-yqq --no-install-recommends \
|
-yqq --no-install-recommends \
|
||||||
build-essential \
|
build-essential \
|
||||||
ca-certificates \
|
ca-certificates \
|
||||||
devscripts \
|
devscripts \
|
||||||
equivs \
|
equivs \
|
||||||
wget
|
wget
|
||||||
|
|
||||||
# fetch and unpack the package
|
# fetch and unpack the package
|
||||||
# We are temporarily using a fork of dh-virtualenv due to an incompatibility with Python 3.11, which ships with
|
# We are temporarily using a fork of dh-virtualenv due to an incompatibility with Python 3.11, which ships with
|
||||||
|
@ -55,40 +55,36 @@ RUN cd /dh-virtualenv && DEB_BUILD_OPTIONS=nodoc dpkg-buildpackage -us -uc -b
|
||||||
###
|
###
|
||||||
### Stage 1
|
### Stage 1
|
||||||
###
|
###
|
||||||
FROM ${distro}
|
FROM docker.io/library/${distro}
|
||||||
|
|
||||||
# Get the distro we want to pull from as a dynamic build variable
|
# Get the distro we want to pull from as a dynamic build variable
|
||||||
# (We need to define it in each build stage)
|
# (We need to define it in each build stage)
|
||||||
ARG distro=""
|
ARG distro=""
|
||||||
ENV distro ${distro}
|
ENV distro ${distro}
|
||||||
|
|
||||||
# Python < 3.7 assumes LANG="C" means ASCII-only and throws on printing unicode
|
|
||||||
# http://bugs.python.org/issue19846
|
|
||||||
ENV LANG C.UTF-8
|
|
||||||
|
|
||||||
# Install the build dependencies
|
# Install the build dependencies
|
||||||
#
|
#
|
||||||
# NB: keep this list in sync with the list of build-deps in debian/control
|
# NB: keep this list in sync with the list of build-deps in debian/control
|
||||||
# TODO: it would be nice to do that automatically.
|
# TODO: it would be nice to do that automatically.
|
||||||
RUN apt-get update -qq -o Acquire::Languages=none \
|
RUN apt-get update -qq -o Acquire::Languages=none \
|
||||||
&& env DEBIAN_FRONTEND=noninteractive apt-get install \
|
&& env DEBIAN_FRONTEND=noninteractive apt-get install \
|
||||||
-yqq --no-install-recommends -o Dpkg::Options::=--force-unsafe-io \
|
-yqq --no-install-recommends -o Dpkg::Options::=--force-unsafe-io \
|
||||||
build-essential \
|
build-essential \
|
||||||
curl \
|
curl \
|
||||||
debhelper \
|
debhelper \
|
||||||
devscripts \
|
devscripts \
|
||||||
libsystemd-dev \
|
libsystemd-dev \
|
||||||
lsb-release \
|
lsb-release \
|
||||||
pkg-config \
|
pkg-config \
|
||||||
python3-dev \
|
python3-dev \
|
||||||
python3-pip \
|
python3-pip \
|
||||||
python3-setuptools \
|
python3-setuptools \
|
||||||
python3-venv \
|
python3-venv \
|
||||||
sqlite3 \
|
sqlite3 \
|
||||||
libpq-dev \
|
libpq-dev \
|
||||||
libicu-dev \
|
libicu-dev \
|
||||||
pkg-config \
|
pkg-config \
|
||||||
xmlsec1
|
xmlsec1
|
||||||
|
|
||||||
# Install rust and ensure it's in the PATH
|
# Install rust and ensure it's in the PATH
|
||||||
ENV RUSTUP_HOME=/rust
|
ENV RUSTUP_HOME=/rust
|
||||||
|
|
|
@ -7,7 +7,7 @@ ARG FROM=matrixdotorg/synapse:$SYNAPSE_VERSION
|
||||||
# target image. For repeated rebuilds, this is much faster than apt installing
|
# target image. For repeated rebuilds, this is much faster than apt installing
|
||||||
# each time.
|
# each time.
|
||||||
|
|
||||||
FROM debian:bullseye-slim AS deps_base
|
FROM docker.io/library/debian:bookworm-slim AS deps_base
|
||||||
RUN \
|
RUN \
|
||||||
--mount=type=cache,target=/var/cache/apt,sharing=locked \
|
--mount=type=cache,target=/var/cache/apt,sharing=locked \
|
||||||
--mount=type=cache,target=/var/lib/apt,sharing=locked \
|
--mount=type=cache,target=/var/lib/apt,sharing=locked \
|
||||||
|
@ -21,7 +21,7 @@ FROM debian:bullseye-slim AS deps_base
|
||||||
# which makes it much easier to copy (but we need to make sure we use an image
|
# which makes it much easier to copy (but we need to make sure we use an image
|
||||||
# based on the same debian version as the synapse image, to make sure we get
|
# based on the same debian version as the synapse image, to make sure we get
|
||||||
# the expected version of libc.
|
# the expected version of libc.
|
||||||
FROM redis:6-bullseye AS redis_base
|
FROM docker.io/library/redis:7-bookworm AS redis_base
|
||||||
|
|
||||||
# now build the final image, based on the the regular Synapse docker image
|
# now build the final image, based on the the regular Synapse docker image
|
||||||
FROM $FROM
|
FROM $FROM
|
||||||
|
|
|
@ -73,7 +73,8 @@ The following environment variables are supported in `generate` mode:
|
||||||
will log sensitive information such as access tokens.
|
will log sensitive information such as access tokens.
|
||||||
This should not be needed unless you are a developer attempting to debug something
|
This should not be needed unless you are a developer attempting to debug something
|
||||||
particularly tricky.
|
particularly tricky.
|
||||||
|
* `SYNAPSE_LOG_TESTING`: if set, Synapse will log additional information useful
|
||||||
|
for testing.
|
||||||
|
|
||||||
## Postgres
|
## Postgres
|
||||||
|
|
||||||
|
|
|
@ -7,6 +7,7 @@
|
||||||
# https://github.com/matrix-org/synapse/blob/develop/docker/README-testing.md#testing-with-postgresql-and-single-or-multi-process-synapse
|
# https://github.com/matrix-org/synapse/blob/develop/docker/README-testing.md#testing-with-postgresql-and-single-or-multi-process-synapse
|
||||||
|
|
||||||
ARG SYNAPSE_VERSION=latest
|
ARG SYNAPSE_VERSION=latest
|
||||||
|
# This is an intermediate image, to be built locally (not pulled from a registry).
|
||||||
ARG FROM=matrixdotorg/synapse-workers:$SYNAPSE_VERSION
|
ARG FROM=matrixdotorg/synapse-workers:$SYNAPSE_VERSION
|
||||||
|
|
||||||
FROM $FROM
|
FROM $FROM
|
||||||
|
@ -19,8 +20,8 @@ FROM $FROM
|
||||||
# the same debian version as Synapse's docker image (so the versions of the
|
# the same debian version as Synapse's docker image (so the versions of the
|
||||||
# shared libraries match).
|
# shared libraries match).
|
||||||
RUN adduser --system --uid 999 postgres --home /var/lib/postgresql
|
RUN adduser --system --uid 999 postgres --home /var/lib/postgresql
|
||||||
COPY --from=postgres:13-bullseye /usr/lib/postgresql /usr/lib/postgresql
|
COPY --from=docker.io/library/postgres:13-bookworm /usr/lib/postgresql /usr/lib/postgresql
|
||||||
COPY --from=postgres:13-bullseye /usr/share/postgresql /usr/share/postgresql
|
COPY --from=docker.io/library/postgres:13-bookworm /usr/share/postgresql /usr/share/postgresql
|
||||||
RUN mkdir /var/run/postgresql && chown postgres /var/run/postgresql
|
RUN mkdir /var/run/postgresql && chown postgres /var/run/postgresql
|
||||||
ENV PATH="${PATH}:/usr/lib/postgresql/13/bin"
|
ENV PATH="${PATH}:/usr/lib/postgresql/13/bin"
|
||||||
ENV PGDATA=/var/lib/postgresql/data
|
ENV PGDATA=/var/lib/postgresql/data
|
||||||
|
|
|
@ -92,8 +92,6 @@ allow_device_name_lookup_over_federation: true
|
||||||
## Experimental Features ##
|
## Experimental Features ##
|
||||||
|
|
||||||
experimental_features:
|
experimental_features:
|
||||||
# Enable history backfilling support
|
|
||||||
msc2716_enabled: true
|
|
||||||
# client-side support for partial state in /send_join responses
|
# client-side support for partial state in /send_join responses
|
||||||
faster_joins: true
|
faster_joins: true
|
||||||
# Enable support for polls
|
# Enable support for polls
|
||||||
|
|
|
@ -35,7 +35,11 @@ server {
|
||||||
|
|
||||||
# Send all other traffic to the main process
|
# Send all other traffic to the main process
|
||||||
location ~* ^(\\/_matrix|\\/_synapse) {
|
location ~* ^(\\/_matrix|\\/_synapse) {
|
||||||
|
{% if using_unix_sockets %}
|
||||||
|
proxy_pass http://unix:/run/main_public.sock;
|
||||||
|
{% else %}
|
||||||
proxy_pass http://localhost:8080;
|
proxy_pass http://localhost:8080;
|
||||||
|
{% endif %}
|
||||||
proxy_set_header X-Forwarded-For $remote_addr;
|
proxy_set_header X-Forwarded-For $remote_addr;
|
||||||
proxy_set_header X-Forwarded-Proto $scheme;
|
proxy_set_header X-Forwarded-Proto $scheme;
|
||||||
proxy_set_header Host $host;
|
proxy_set_header Host $host;
|
||||||
|
|
|
@ -6,6 +6,9 @@
|
||||||
{% if enable_redis %}
|
{% if enable_redis %}
|
||||||
redis:
|
redis:
|
||||||
enabled: true
|
enabled: true
|
||||||
|
{% if using_unix_sockets %}
|
||||||
|
path: /tmp/redis.sock
|
||||||
|
{% endif %}
|
||||||
{% endif %}
|
{% endif %}
|
||||||
|
|
||||||
{% if appservice_registrations is not none %}
|
{% if appservice_registrations is not none %}
|
||||||
|
|
|
@ -19,7 +19,11 @@ username=www-data
|
||||||
autorestart=true
|
autorestart=true
|
||||||
|
|
||||||
[program:redis]
|
[program:redis]
|
||||||
|
{% if using_unix_sockets %}
|
||||||
|
command=/usr/local/bin/prefix-log /usr/local/bin/redis-server --unixsocket /tmp/redis.sock
|
||||||
|
{% else %}
|
||||||
command=/usr/local/bin/prefix-log /usr/local/bin/redis-server
|
command=/usr/local/bin/prefix-log /usr/local/bin/redis-server
|
||||||
|
{% endif %}
|
||||||
priority=1
|
priority=1
|
||||||
stdout_logfile=/dev/stdout
|
stdout_logfile=/dev/stdout
|
||||||
stdout_logfile_maxbytes=0
|
stdout_logfile_maxbytes=0
|
||||||
|
|
|
@ -8,7 +8,11 @@ worker_name: "{{ name }}"
|
||||||
|
|
||||||
worker_listeners:
|
worker_listeners:
|
||||||
- type: http
|
- type: http
|
||||||
|
{% if using_unix_sockets %}
|
||||||
|
path: "/run/worker.{{ port }}"
|
||||||
|
{% else %}
|
||||||
port: {{ port }}
|
port: {{ port }}
|
||||||
|
{% endif %}
|
||||||
{% if listener_resources %}
|
{% if listener_resources %}
|
||||||
resources:
|
resources:
|
||||||
- names:
|
- names:
|
||||||
|
|
|
@ -36,12 +36,17 @@ listeners:
|
||||||
|
|
||||||
# Allow configuring in case we want to reverse proxy 8008
|
# Allow configuring in case we want to reverse proxy 8008
|
||||||
# using another process in the same container
|
# using another process in the same container
|
||||||
|
{% if SYNAPSE_USE_UNIX_SOCKET %}
|
||||||
|
# Unix sockets don't care about TLS or IP addresses or ports
|
||||||
|
- path: '/run/main_public.sock'
|
||||||
|
type: http
|
||||||
|
{% else %}
|
||||||
- port: {{ SYNAPSE_HTTP_PORT or 8008 }}
|
- port: {{ SYNAPSE_HTTP_PORT or 8008 }}
|
||||||
tls: false
|
tls: false
|
||||||
bind_addresses: ['::']
|
bind_addresses: ['::']
|
||||||
type: http
|
type: http
|
||||||
x_forwarded: false
|
x_forwarded: false
|
||||||
|
{% endif %}
|
||||||
resources:
|
resources:
|
||||||
- names: [client]
|
- names: [client]
|
||||||
compress: true
|
compress: true
|
||||||
|
@ -57,8 +62,11 @@ database:
|
||||||
user: "{{ POSTGRES_USER or "synapse" }}"
|
user: "{{ POSTGRES_USER or "synapse" }}"
|
||||||
password: "{{ POSTGRES_PASSWORD }}"
|
password: "{{ POSTGRES_PASSWORD }}"
|
||||||
database: "{{ POSTGRES_DB or "synapse" }}"
|
database: "{{ POSTGRES_DB or "synapse" }}"
|
||||||
|
{% if not SYNAPSE_USE_UNIX_SOCKET %}
|
||||||
|
{# Synapse will use a default unix socket for Postgres when host/port is not specified (behavior from `psycopg2`). #}
|
||||||
host: "{{ POSTGRES_HOST or "db" }}"
|
host: "{{ POSTGRES_HOST or "db" }}"
|
||||||
port: "{{ POSTGRES_PORT or "5432" }}"
|
port: "{{ POSTGRES_PORT or "5432" }}"
|
||||||
|
{% endif %}
|
||||||
cp_min: 5
|
cp_min: 5
|
||||||
cp_max: 10
|
cp_max: 10
|
||||||
{% else %}
|
{% else %}
|
||||||
|
|
|
@ -49,17 +49,35 @@ handlers:
|
||||||
class: logging.StreamHandler
|
class: logging.StreamHandler
|
||||||
formatter: precise
|
formatter: precise
|
||||||
|
|
||||||
{% if not SYNAPSE_LOG_SENSITIVE %}
|
|
||||||
{#
|
|
||||||
If SYNAPSE_LOG_SENSITIVE is unset, then override synapse.storage.SQL to INFO
|
|
||||||
so that DEBUG entries (containing sensitive information) are not emitted.
|
|
||||||
#}
|
|
||||||
loggers:
|
loggers:
|
||||||
|
# This is just here so we can leave `loggers` in the config regardless of whether
|
||||||
|
# we configure other loggers below (avoid empty yaml dict error).
|
||||||
|
_placeholder:
|
||||||
|
level: "INFO"
|
||||||
|
|
||||||
|
{% if not SYNAPSE_LOG_SENSITIVE %}
|
||||||
|
{#
|
||||||
|
If SYNAPSE_LOG_SENSITIVE is unset, then override synapse.storage.SQL to INFO
|
||||||
|
so that DEBUG entries (containing sensitive information) are not emitted.
|
||||||
|
#}
|
||||||
synapse.storage.SQL:
|
synapse.storage.SQL:
|
||||||
# beware: increasing this to DEBUG will make synapse log sensitive
|
# beware: increasing this to DEBUG will make synapse log sensitive
|
||||||
# information such as access tokens.
|
# information such as access tokens.
|
||||||
level: INFO
|
level: INFO
|
||||||
{% endif %}
|
{% endif %}
|
||||||
|
|
||||||
|
{% if SYNAPSE_LOG_TESTING %}
|
||||||
|
{#
|
||||||
|
If Synapse is under test, log a few more useful things for a developer
|
||||||
|
attempting to debug something particularly tricky.
|
||||||
|
|
||||||
|
With `synapse.visibility.filtered_event_debug`, it logs when events are (maybe
|
||||||
|
unexpectedly) filtered out of responses in tests. It's just nice to be able to
|
||||||
|
look at the CI log and figure out why an event isn't being returned.
|
||||||
|
#}
|
||||||
|
synapse.visibility.filtered_event_debug:
|
||||||
|
level: DEBUG
|
||||||
|
{% endif %}
|
||||||
|
|
||||||
root:
|
root:
|
||||||
level: {{ SYNAPSE_LOG_LEVEL or "INFO" }}
|
level: {{ SYNAPSE_LOG_LEVEL or "INFO" }}
|
||||||
|
|
|
@ -40,6 +40,8 @@
|
||||||
# log level. INFO is the default.
|
# log level. INFO is the default.
|
||||||
# * SYNAPSE_LOG_SENSITIVE: If unset, SQL and SQL values won't be logged,
|
# * SYNAPSE_LOG_SENSITIVE: If unset, SQL and SQL values won't be logged,
|
||||||
# regardless of the SYNAPSE_LOG_LEVEL setting.
|
# regardless of the SYNAPSE_LOG_LEVEL setting.
|
||||||
|
# * SYNAPSE_LOG_TESTING: if set, Synapse will log additional information useful
|
||||||
|
# for testing.
|
||||||
#
|
#
|
||||||
# NOTE: According to Complement's ENTRYPOINT expectations for a homeserver image (as defined
|
# NOTE: According to Complement's ENTRYPOINT expectations for a homeserver image (as defined
|
||||||
# in the project's README), this script may be run multiple times, and functionality should
|
# in the project's README), this script may be run multiple times, and functionality should
|
||||||
|
@ -72,6 +74,9 @@ MAIN_PROCESS_HTTP_LISTENER_PORT = 8080
|
||||||
MAIN_PROCESS_INSTANCE_NAME = "main"
|
MAIN_PROCESS_INSTANCE_NAME = "main"
|
||||||
MAIN_PROCESS_LOCALHOST_ADDRESS = "127.0.0.1"
|
MAIN_PROCESS_LOCALHOST_ADDRESS = "127.0.0.1"
|
||||||
MAIN_PROCESS_REPLICATION_PORT = 9093
|
MAIN_PROCESS_REPLICATION_PORT = 9093
|
||||||
|
# Obviously, these would only be used with the UNIX socket option
|
||||||
|
MAIN_PROCESS_UNIX_SOCKET_PUBLIC_PATH = "/run/main_public.sock"
|
||||||
|
MAIN_PROCESS_UNIX_SOCKET_PRIVATE_PATH = "/run/main_private.sock"
|
||||||
|
|
||||||
# A simple name used as a placeholder in the WORKERS_CONFIG below. This will be replaced
|
# A simple name used as a placeholder in the WORKERS_CONFIG below. This will be replaced
|
||||||
# during processing with the name of the worker.
|
# during processing with the name of the worker.
|
||||||
|
@ -178,6 +183,7 @@ WORKERS_CONFIG: Dict[str, Dict[str, Any]] = {
|
||||||
"^/_matrix/client/(r0|v3|unstable)/password_policy$",
|
"^/_matrix/client/(r0|v3|unstable)/password_policy$",
|
||||||
"^/_matrix/client/(api/v1|r0|v3|unstable)/directory/room/.*$",
|
"^/_matrix/client/(api/v1|r0|v3|unstable)/directory/room/.*$",
|
||||||
"^/_matrix/client/(r0|v3|unstable)/capabilities$",
|
"^/_matrix/client/(r0|v3|unstable)/capabilities$",
|
||||||
|
"^/_matrix/client/(r0|v3|unstable)/notifications$",
|
||||||
],
|
],
|
||||||
"shared_extra_conf": {},
|
"shared_extra_conf": {},
|
||||||
"worker_extra_conf": "",
|
"worker_extra_conf": "",
|
||||||
|
@ -242,7 +248,6 @@ WORKERS_CONFIG: Dict[str, Dict[str, Any]] = {
|
||||||
"^/_matrix/client/(api/v1|r0|v3|unstable)/join/",
|
"^/_matrix/client/(api/v1|r0|v3|unstable)/join/",
|
||||||
"^/_matrix/client/(api/v1|r0|v3|unstable)/knock/",
|
"^/_matrix/client/(api/v1|r0|v3|unstable)/knock/",
|
||||||
"^/_matrix/client/(api/v1|r0|v3|unstable)/profile/",
|
"^/_matrix/client/(api/v1|r0|v3|unstable)/profile/",
|
||||||
"^/_matrix/client/(v1|unstable/org.matrix.msc2716)/rooms/.*/batch_send",
|
|
||||||
],
|
],
|
||||||
"shared_extra_conf": {},
|
"shared_extra_conf": {},
|
||||||
"worker_extra_conf": "",
|
"worker_extra_conf": "",
|
||||||
|
@ -406,11 +411,15 @@ def add_worker_roles_to_shared_config(
|
||||||
)
|
)
|
||||||
|
|
||||||
# Map of stream writer instance names to host/ports combos
|
# Map of stream writer instance names to host/ports combos
|
||||||
instance_map[worker_name] = {
|
if os.environ.get("SYNAPSE_USE_UNIX_SOCKET", False):
|
||||||
"host": "localhost",
|
instance_map[worker_name] = {
|
||||||
"port": worker_port,
|
"path": f"/run/worker.{worker_port}",
|
||||||
}
|
}
|
||||||
|
else:
|
||||||
|
instance_map[worker_name] = {
|
||||||
|
"host": "localhost",
|
||||||
|
"port": worker_port,
|
||||||
|
}
|
||||||
# Update the list of stream writers. It's convenient that the name of the worker
|
# Update the list of stream writers. It's convenient that the name of the worker
|
||||||
# type is the same as the stream to write. Iterate over the whole list in case there
|
# type is the same as the stream to write. Iterate over the whole list in case there
|
||||||
# is more than one.
|
# is more than one.
|
||||||
|
@ -422,10 +431,15 @@ def add_worker_roles_to_shared_config(
|
||||||
|
|
||||||
# Map of stream writer instance names to host/ports combos
|
# Map of stream writer instance names to host/ports combos
|
||||||
# For now, all stream writers need http replication ports
|
# For now, all stream writers need http replication ports
|
||||||
instance_map[worker_name] = {
|
if os.environ.get("SYNAPSE_USE_UNIX_SOCKET", False):
|
||||||
"host": "localhost",
|
instance_map[worker_name] = {
|
||||||
"port": worker_port,
|
"path": f"/run/worker.{worker_port}",
|
||||||
}
|
}
|
||||||
|
else:
|
||||||
|
instance_map[worker_name] = {
|
||||||
|
"host": "localhost",
|
||||||
|
"port": worker_port,
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
def merge_worker_template_configs(
|
def merge_worker_template_configs(
|
||||||
|
@ -717,17 +731,29 @@ def generate_worker_files(
|
||||||
# Note that yaml cares about indentation, so care should be taken to insert lines
|
# Note that yaml cares about indentation, so care should be taken to insert lines
|
||||||
# into files at the correct indentation below.
|
# into files at the correct indentation below.
|
||||||
|
|
||||||
|
# Convenience helper for if using unix sockets instead of host:port
|
||||||
|
using_unix_sockets = environ.get("SYNAPSE_USE_UNIX_SOCKET", False)
|
||||||
# First read the original config file and extract the listeners block. Then we'll
|
# First read the original config file and extract the listeners block. Then we'll
|
||||||
# add another listener for replication. Later we'll write out the result to the
|
# add another listener for replication. Later we'll write out the result to the
|
||||||
# shared config file.
|
# shared config file.
|
||||||
listeners = [
|
listeners: List[Any]
|
||||||
{
|
if using_unix_sockets:
|
||||||
"port": MAIN_PROCESS_REPLICATION_PORT,
|
listeners = [
|
||||||
"bind_address": MAIN_PROCESS_LOCALHOST_ADDRESS,
|
{
|
||||||
"type": "http",
|
"path": MAIN_PROCESS_UNIX_SOCKET_PRIVATE_PATH,
|
||||||
"resources": [{"names": ["replication"]}],
|
"type": "http",
|
||||||
}
|
"resources": [{"names": ["replication"]}],
|
||||||
]
|
}
|
||||||
|
]
|
||||||
|
else:
|
||||||
|
listeners = [
|
||||||
|
{
|
||||||
|
"port": MAIN_PROCESS_REPLICATION_PORT,
|
||||||
|
"bind_address": MAIN_PROCESS_LOCALHOST_ADDRESS,
|
||||||
|
"type": "http",
|
||||||
|
"resources": [{"names": ["replication"]}],
|
||||||
|
}
|
||||||
|
]
|
||||||
with open(config_path) as file_stream:
|
with open(config_path) as file_stream:
|
||||||
original_config = yaml.safe_load(file_stream)
|
original_config = yaml.safe_load(file_stream)
|
||||||
original_listeners = original_config.get("listeners")
|
original_listeners = original_config.get("listeners")
|
||||||
|
@ -768,7 +794,17 @@ def generate_worker_files(
|
||||||
|
|
||||||
# A list of internal endpoints to healthcheck, starting with the main process
|
# A list of internal endpoints to healthcheck, starting with the main process
|
||||||
# which exists even if no workers do.
|
# which exists even if no workers do.
|
||||||
healthcheck_urls = ["http://localhost:8080/health"]
|
# This list ends up being part of the command line to curl, (curl added support for
|
||||||
|
# Unix sockets in version 7.40).
|
||||||
|
if using_unix_sockets:
|
||||||
|
healthcheck_urls = [
|
||||||
|
f"--unix-socket {MAIN_PROCESS_UNIX_SOCKET_PUBLIC_PATH} "
|
||||||
|
# The scheme and hostname from the following URL are ignored.
|
||||||
|
# The only thing that matters is the path `/health`
|
||||||
|
"http://localhost/health"
|
||||||
|
]
|
||||||
|
else:
|
||||||
|
healthcheck_urls = ["http://localhost:8080/health"]
|
||||||
|
|
||||||
# Get the set of all worker types that we have configured
|
# Get the set of all worker types that we have configured
|
||||||
all_worker_types_in_use = set(chain(*requested_worker_types.values()))
|
all_worker_types_in_use = set(chain(*requested_worker_types.values()))
|
||||||
|
@ -805,8 +841,12 @@ def generate_worker_files(
|
||||||
# given worker_type needs to stay assigned and not be replaced.
|
# given worker_type needs to stay assigned and not be replaced.
|
||||||
worker_config["shared_extra_conf"].update(shared_config)
|
worker_config["shared_extra_conf"].update(shared_config)
|
||||||
shared_config = worker_config["shared_extra_conf"]
|
shared_config = worker_config["shared_extra_conf"]
|
||||||
|
if using_unix_sockets:
|
||||||
healthcheck_urls.append("http://localhost:%d/health" % (worker_port,))
|
healthcheck_urls.append(
|
||||||
|
f"--unix-socket /run/worker.{worker_port} http://localhost/health"
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
healthcheck_urls.append("http://localhost:%d/health" % (worker_port,))
|
||||||
|
|
||||||
# Update the shared config with sharding-related options if necessary
|
# Update the shared config with sharding-related options if necessary
|
||||||
add_worker_roles_to_shared_config(
|
add_worker_roles_to_shared_config(
|
||||||
|
@ -822,9 +862,10 @@ def generate_worker_files(
|
||||||
# Then a worker config file
|
# Then a worker config file
|
||||||
convert(
|
convert(
|
||||||
"/conf/worker.yaml.j2",
|
"/conf/worker.yaml.j2",
|
||||||
"/conf/workers/{name}.yaml".format(name=worker_name),
|
f"/conf/workers/{worker_name}.yaml",
|
||||||
**worker_config,
|
**worker_config,
|
||||||
worker_log_config_filepath=log_config_filepath,
|
worker_log_config_filepath=log_config_filepath,
|
||||||
|
using_unix_sockets=using_unix_sockets,
|
||||||
)
|
)
|
||||||
|
|
||||||
# Save this worker's port number to the correct nginx upstreams
|
# Save this worker's port number to the correct nginx upstreams
|
||||||
|
@ -845,8 +886,13 @@ def generate_worker_files(
|
||||||
nginx_upstream_config = ""
|
nginx_upstream_config = ""
|
||||||
for upstream_worker_base_name, upstream_worker_ports in nginx_upstreams.items():
|
for upstream_worker_base_name, upstream_worker_ports in nginx_upstreams.items():
|
||||||
body = ""
|
body = ""
|
||||||
for port in upstream_worker_ports:
|
if using_unix_sockets:
|
||||||
body += f" server localhost:{port};\n"
|
for port in upstream_worker_ports:
|
||||||
|
body += f" server unix:/run/worker.{port};\n"
|
||||||
|
|
||||||
|
else:
|
||||||
|
for port in upstream_worker_ports:
|
||||||
|
body += f" server localhost:{port};\n"
|
||||||
|
|
||||||
# Add to the list of configured upstreams
|
# Add to the list of configured upstreams
|
||||||
nginx_upstream_config += NGINX_UPSTREAM_CONFIG_BLOCK.format(
|
nginx_upstream_config += NGINX_UPSTREAM_CONFIG_BLOCK.format(
|
||||||
|
@ -876,10 +922,15 @@ def generate_worker_files(
|
||||||
# If there are workers, add the main process to the instance_map too.
|
# If there are workers, add the main process to the instance_map too.
|
||||||
if workers_in_use:
|
if workers_in_use:
|
||||||
instance_map = shared_config.setdefault("instance_map", {})
|
instance_map = shared_config.setdefault("instance_map", {})
|
||||||
instance_map[MAIN_PROCESS_INSTANCE_NAME] = {
|
if using_unix_sockets:
|
||||||
"host": MAIN_PROCESS_LOCALHOST_ADDRESS,
|
instance_map[MAIN_PROCESS_INSTANCE_NAME] = {
|
||||||
"port": MAIN_PROCESS_REPLICATION_PORT,
|
"path": MAIN_PROCESS_UNIX_SOCKET_PRIVATE_PATH,
|
||||||
}
|
}
|
||||||
|
else:
|
||||||
|
instance_map[MAIN_PROCESS_INSTANCE_NAME] = {
|
||||||
|
"host": MAIN_PROCESS_LOCALHOST_ADDRESS,
|
||||||
|
"port": MAIN_PROCESS_REPLICATION_PORT,
|
||||||
|
}
|
||||||
|
|
||||||
# Shared homeserver config
|
# Shared homeserver config
|
||||||
convert(
|
convert(
|
||||||
|
@ -889,6 +940,7 @@ def generate_worker_files(
|
||||||
appservice_registrations=appservice_registrations,
|
appservice_registrations=appservice_registrations,
|
||||||
enable_redis=workers_in_use,
|
enable_redis=workers_in_use,
|
||||||
workers_in_use=workers_in_use,
|
workers_in_use=workers_in_use,
|
||||||
|
using_unix_sockets=using_unix_sockets,
|
||||||
)
|
)
|
||||||
|
|
||||||
# Nginx config
|
# Nginx config
|
||||||
|
@ -899,6 +951,7 @@ def generate_worker_files(
|
||||||
upstream_directives=nginx_upstream_config,
|
upstream_directives=nginx_upstream_config,
|
||||||
tls_cert_path=os.environ.get("SYNAPSE_TLS_CERT"),
|
tls_cert_path=os.environ.get("SYNAPSE_TLS_CERT"),
|
||||||
tls_key_path=os.environ.get("SYNAPSE_TLS_KEY"),
|
tls_key_path=os.environ.get("SYNAPSE_TLS_KEY"),
|
||||||
|
using_unix_sockets=using_unix_sockets,
|
||||||
)
|
)
|
||||||
|
|
||||||
# Supervisord config
|
# Supervisord config
|
||||||
|
@ -908,6 +961,7 @@ def generate_worker_files(
|
||||||
"/etc/supervisor/supervisord.conf",
|
"/etc/supervisor/supervisord.conf",
|
||||||
main_config_path=config_path,
|
main_config_path=config_path,
|
||||||
enable_redis=workers_in_use,
|
enable_redis=workers_in_use,
|
||||||
|
using_unix_sockets=using_unix_sockets,
|
||||||
)
|
)
|
||||||
|
|
||||||
convert(
|
convert(
|
||||||
|
@ -947,6 +1001,7 @@ def generate_worker_log_config(
|
||||||
extra_log_template_args["SYNAPSE_LOG_SENSITIVE"] = environ.get(
|
extra_log_template_args["SYNAPSE_LOG_SENSITIVE"] = environ.get(
|
||||||
"SYNAPSE_LOG_SENSITIVE"
|
"SYNAPSE_LOG_SENSITIVE"
|
||||||
)
|
)
|
||||||
|
extra_log_template_args["SYNAPSE_LOG_TESTING"] = environ.get("SYNAPSE_LOG_TESTING")
|
||||||
|
|
||||||
# Render and write the file
|
# Render and write the file
|
||||||
log_config_filepath = f"/conf/workers/{worker_name}.log.config"
|
log_config_filepath = f"/conf/workers/{worker_name}.log.config"
|
||||||
|
|
|
@ -8,9 +8,9 @@ ARG PYTHON_VERSION=3.9
|
||||||
###
|
###
|
||||||
### Stage 0: generate requirements.txt
|
### Stage 0: generate requirements.txt
|
||||||
###
|
###
|
||||||
# We hardcode the use of Debian bullseye here because this could change upstream
|
# We hardcode the use of Debian bookworm here because this could change upstream
|
||||||
# and other Dockerfiles used for testing are expecting bullseye.
|
# and other Dockerfiles used for testing are expecting bookworm.
|
||||||
FROM docker.io/python:${PYTHON_VERSION}-slim-bullseye
|
FROM docker.io/library/python:${PYTHON_VERSION}-slim-bookworm
|
||||||
|
|
||||||
# Install Rust and other dependencies (stolen from normal Dockerfile)
|
# Install Rust and other dependencies (stolen from normal Dockerfile)
|
||||||
# install the OS build deps
|
# install the OS build deps
|
||||||
|
@ -33,7 +33,7 @@ RUN \
|
||||||
gosu \
|
gosu \
|
||||||
libjpeg62-turbo \
|
libjpeg62-turbo \
|
||||||
libpq5 \
|
libpq5 \
|
||||||
libwebp6 \
|
libwebp7 \
|
||||||
xmlsec1 \
|
xmlsec1 \
|
||||||
libjemalloc2 \
|
libjemalloc2 \
|
||||||
&& rm -rf /var/lib/apt/lists/*
|
&& rm -rf /var/lib/apt/lists/*
|
||||||
|
|
|
@ -82,7 +82,7 @@ def generate_config_from_template(
|
||||||
with open(filename) as handle:
|
with open(filename) as handle:
|
||||||
value = handle.read()
|
value = handle.read()
|
||||||
else:
|
else:
|
||||||
log("Generating a random secret for {}".format(secret))
|
log(f"Generating a random secret for {secret}")
|
||||||
value = codecs.encode(os.urandom(32), "hex").decode()
|
value = codecs.encode(os.urandom(32), "hex").decode()
|
||||||
with open(filename, "w") as handle:
|
with open(filename, "w") as handle:
|
||||||
handle.write(value)
|
handle.write(value)
|
||||||
|
@ -239,7 +239,7 @@ def main(args: List[str], environ: MutableMapping[str, str]) -> None:
|
||||||
log("Could not find %s, will not use" % (jemallocpath,))
|
log("Could not find %s, will not use" % (jemallocpath,))
|
||||||
|
|
||||||
# if there are no config files passed to synapse, try adding the default file
|
# if there are no config files passed to synapse, try adding the default file
|
||||||
if not any(p.startswith("--config-path") or p.startswith("-c") for p in args):
|
if not any(p.startswith(("--config-path", "-c")) for p in args):
|
||||||
config_dir = environ.get("SYNAPSE_CONFIG_DIR", "/data")
|
config_dir = environ.get("SYNAPSE_CONFIG_DIR", "/data")
|
||||||
config_path = environ.get(
|
config_path = environ.get(
|
||||||
"SYNAPSE_CONFIG_PATH", config_dir + "/homeserver.yaml"
|
"SYNAPSE_CONFIG_PATH", config_dir + "/homeserver.yaml"
|
||||||
|
|
|
@ -97,6 +97,7 @@
|
||||||
- [Cancellation](development/synapse_architecture/cancellation.md)
|
- [Cancellation](development/synapse_architecture/cancellation.md)
|
||||||
- [Log Contexts](log_contexts.md)
|
- [Log Contexts](log_contexts.md)
|
||||||
- [Replication](replication.md)
|
- [Replication](replication.md)
|
||||||
|
- [Streams](development/synapse_architecture/streams.md)
|
||||||
- [TCP Replication](tcp_replication.md)
|
- [TCP Replication](tcp_replication.md)
|
||||||
- [Faster remote joins](development/synapse_architecture/faster_joins.md)
|
- [Faster remote joins](development/synapse_architecture/faster_joins.md)
|
||||||
- [Internal Documentation](development/internal_documentation/README.md)
|
- [Internal Documentation](development/internal_documentation/README.md)
|
||||||
|
|
|
@ -1,5 +1,7 @@
|
||||||
# Account validity API
|
# Account validity API
|
||||||
|
|
||||||
|
**Note:** This API is disabled when MSC3861 is enabled. [See #15582](https://github.com/matrix-org/synapse/pull/15582)
|
||||||
|
|
||||||
This API allows a server administrator to manage the validity of an account. To
|
This API allows a server administrator to manage the validity of an account. To
|
||||||
use it, you must enable the account validity feature (under
|
use it, you must enable the account validity feature (under
|
||||||
`account_validity`) in Synapse's configuration.
|
`account_validity`) in Synapse's configuration.
|
||||||
|
|
|
@ -1,5 +1,7 @@
|
||||||
# Shared-Secret Registration
|
# Shared-Secret Registration
|
||||||
|
|
||||||
|
**Note:** This API is disabled when MSC3861 is enabled. [See #15582](https://github.com/matrix-org/synapse/pull/15582)
|
||||||
|
|
||||||
This API allows for the creation of users in an administrative and
|
This API allows for the creation of users in an administrative and
|
||||||
non-interactive way. This is generally used for bootstrapping a Synapse
|
non-interactive way. This is generally used for bootstrapping a Synapse
|
||||||
instance with administrator accounts.
|
instance with administrator accounts.
|
||||||
|
|
|
@ -419,7 +419,7 @@ The following query parameters are available:
|
||||||
|
|
||||||
* `from` (required) - The token to start returning events from. This token can be obtained from a prev_batch
|
* `from` (required) - The token to start returning events from. This token can be obtained from a prev_batch
|
||||||
or next_batch token returned by the /sync endpoint, or from an end token returned by a previous request to this endpoint.
|
or next_batch token returned by the /sync endpoint, or from an end token returned by a previous request to this endpoint.
|
||||||
* `to` - The token to spot returning events at.
|
* `to` - The token to stop returning events at.
|
||||||
* `limit` - The maximum number of events to return. Defaults to `10`.
|
* `limit` - The maximum number of events to return. Defaults to `10`.
|
||||||
* `filter` - A JSON RoomEventFilter to filter returned events with.
|
* `filter` - A JSON RoomEventFilter to filter returned events with.
|
||||||
* `dir` - The direction to return events from. Either `f` for forwards or `b` for backwards. Setting
|
* `dir` - The direction to return events from. Either `f` for forwards or `b` for backwards. Setting
|
||||||
|
|
|
@ -54,7 +54,8 @@ It returns a JSON body like the following:
|
||||||
"external_id": "<user_id_provider_2>"
|
"external_id": "<user_id_provider_2>"
|
||||||
}
|
}
|
||||||
],
|
],
|
||||||
"user_type": null
|
"user_type": null,
|
||||||
|
"locked": false
|
||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
|
||||||
|
@ -103,7 +104,8 @@ with a body of:
|
||||||
],
|
],
|
||||||
"admin": false,
|
"admin": false,
|
||||||
"deactivated": false,
|
"deactivated": false,
|
||||||
"user_type": null
|
"user_type": null,
|
||||||
|
"locked": false
|
||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
|
||||||
|
@ -155,6 +157,7 @@ Body parameters:
|
||||||
|
|
||||||
Note: a user cannot be erased with this API. For more details on
|
Note: a user cannot be erased with this API. For more details on
|
||||||
deactivating and erasing users see [Deactivate Account](#deactivate-account).
|
deactivating and erasing users see [Deactivate Account](#deactivate-account).
|
||||||
|
- `locked` - **bool**, optional. If unspecified, locked state will be left unchanged.
|
||||||
- `user_type` - **string** or null, optional. If not provided, the user type will be
|
- `user_type` - **string** or null, optional. If not provided, the user type will be
|
||||||
not be changed. If `null` is given, the user type will be cleared.
|
not be changed. If `null` is given, the user type will be cleared.
|
||||||
Other allowed options are: `bot` and `support`.
|
Other allowed options are: `bot` and `support`.
|
||||||
|
@ -183,7 +186,8 @@ A response body like the following is returned:
|
||||||
"shadow_banned": 0,
|
"shadow_banned": 0,
|
||||||
"displayname": "<User One>",
|
"displayname": "<User One>",
|
||||||
"avatar_url": null,
|
"avatar_url": null,
|
||||||
"creation_ts": 1560432668000
|
"creation_ts": 1560432668000,
|
||||||
|
"locked": false
|
||||||
}, {
|
}, {
|
||||||
"name": "<user_id2>",
|
"name": "<user_id2>",
|
||||||
"is_guest": 0,
|
"is_guest": 0,
|
||||||
|
@ -194,7 +198,8 @@ A response body like the following is returned:
|
||||||
"shadow_banned": 0,
|
"shadow_banned": 0,
|
||||||
"displayname": "<User Two>",
|
"displayname": "<User Two>",
|
||||||
"avatar_url": "<avatar_url>",
|
"avatar_url": "<avatar_url>",
|
||||||
"creation_ts": 1561550621000
|
"creation_ts": 1561550621000,
|
||||||
|
"locked": false
|
||||||
}
|
}
|
||||||
],
|
],
|
||||||
"next_token": "100",
|
"next_token": "100",
|
||||||
|
@ -217,7 +222,9 @@ The following parameters should be set in the URL:
|
||||||
- `name` - Is optional and filters to only return users with user ID localparts
|
- `name` - Is optional and filters to only return users with user ID localparts
|
||||||
**or** displaynames that contain this value.
|
**or** displaynames that contain this value.
|
||||||
- `guests` - string representing a bool - Is optional and if `false` will **exclude** guest users.
|
- `guests` - string representing a bool - Is optional and if `false` will **exclude** guest users.
|
||||||
Defaults to `true` to include guest users.
|
Defaults to `true` to include guest users. This parameter is not supported when MSC3861 is enabled. [See #15582](https://github.com/matrix-org/synapse/pull/15582)
|
||||||
|
- `admins` - Optional flag to filter admins. If `true`, only admins are queried. If `false`, admins are excluded from
|
||||||
|
the query. When the flag is absent (the default), **both** admins and non-admins are included in the search results.
|
||||||
- `deactivated` - string representing a bool - Is optional and if `true` will **include** deactivated users.
|
- `deactivated` - string representing a bool - Is optional and if `true` will **include** deactivated users.
|
||||||
Defaults to `false` to exclude deactivated users.
|
Defaults to `false` to exclude deactivated users.
|
||||||
- `limit` - string representing a positive integer - Is optional but is used for pagination,
|
- `limit` - string representing a positive integer - Is optional but is used for pagination,
|
||||||
|
@ -239,9 +246,15 @@ The following parameters should be set in the URL:
|
||||||
- `displayname` - Users are ordered alphabetically by `displayname`.
|
- `displayname` - Users are ordered alphabetically by `displayname`.
|
||||||
- `avatar_url` - Users are ordered alphabetically by avatar URL.
|
- `avatar_url` - Users are ordered alphabetically by avatar URL.
|
||||||
- `creation_ts` - Users are ordered by when the users was created in ms.
|
- `creation_ts` - Users are ordered by when the users was created in ms.
|
||||||
|
- `last_seen_ts` - Users are ordered by when the user was lastly seen in ms.
|
||||||
|
|
||||||
- `dir` - Direction of media order. Either `f` for forwards or `b` for backwards.
|
- `dir` - Direction of media order. Either `f` for forwards or `b` for backwards.
|
||||||
Setting this value to `b` will reverse the above sort order. Defaults to `f`.
|
Setting this value to `b` will reverse the above sort order. Defaults to `f`.
|
||||||
|
- `not_user_type` - Exclude certain user types, such as bot users, from the request.
|
||||||
|
Can be provided multiple times. Possible values are `bot`, `support` or "empty string".
|
||||||
|
"empty string" here means to exclude users without a type.
|
||||||
|
- `locked` - string representing a bool - Is optional and if `true` will **include** locked users.
|
||||||
|
Defaults to `false` to exclude locked users. Note: Introduced in v1.93.
|
||||||
|
|
||||||
Caution. The database only has indexes on the columns `name` and `creation_ts`.
|
Caution. The database only has indexes on the columns `name` and `creation_ts`.
|
||||||
This means that if a different sort order is used (`is_guest`, `admin`,
|
This means that if a different sort order is used (`is_guest`, `admin`,
|
||||||
|
@ -266,10 +279,12 @@ The following fields are returned in the JSON response body:
|
||||||
- `displayname` - string - The user's display name if they have set one.
|
- `displayname` - string - The user's display name if they have set one.
|
||||||
- `avatar_url` - string - The user's avatar URL if they have set one.
|
- `avatar_url` - string - The user's avatar URL if they have set one.
|
||||||
- `creation_ts` - integer - The user's creation timestamp in ms.
|
- `creation_ts` - integer - The user's creation timestamp in ms.
|
||||||
|
- `last_seen_ts` - integer - The user's last activity timestamp in ms.
|
||||||
|
- `locked` - bool - Status if that user has been marked as locked. Note: Introduced in v1.93.
|
||||||
- `next_token`: string representing a positive integer - Indication for pagination. See above.
|
- `next_token`: string representing a positive integer - Indication for pagination. See above.
|
||||||
- `total` - integer - Total number of media.
|
- `total` - integer - Total number of media.
|
||||||
|
|
||||||
|
*Added in Synapse 1.93:* the `locked` query parameter and response field.
|
||||||
|
|
||||||
## Query current sessions for a user
|
## Query current sessions for a user
|
||||||
|
|
||||||
|
@ -384,6 +399,8 @@ The following actions are **NOT** performed. The list may be incomplete.
|
||||||
|
|
||||||
## Reset password
|
## Reset password
|
||||||
|
|
||||||
|
**Note:** This API is disabled when MSC3861 is enabled. [See #15582](https://github.com/matrix-org/synapse/pull/15582)
|
||||||
|
|
||||||
Changes the password of another user. This will automatically log the user out of all their devices.
|
Changes the password of another user. This will automatically log the user out of all their devices.
|
||||||
|
|
||||||
The api is:
|
The api is:
|
||||||
|
@ -407,6 +424,8 @@ The parameter `logout_devices` is optional and defaults to `true`.
|
||||||
|
|
||||||
## Get whether a user is a server administrator or not
|
## Get whether a user is a server administrator or not
|
||||||
|
|
||||||
|
**Note:** This API is disabled when MSC3861 is enabled. [See #15582](https://github.com/matrix-org/synapse/pull/15582)
|
||||||
|
|
||||||
The api is:
|
The api is:
|
||||||
|
|
||||||
```
|
```
|
||||||
|
@ -424,6 +443,8 @@ A response body like the following is returned:
|
||||||
|
|
||||||
## Change whether a user is a server administrator or not
|
## Change whether a user is a server administrator or not
|
||||||
|
|
||||||
|
**Note:** This API is disabled when MSC3861 is enabled. [See #15582](https://github.com/matrix-org/synapse/pull/15582)
|
||||||
|
|
||||||
Note that you cannot demote yourself.
|
Note that you cannot demote yourself.
|
||||||
|
|
||||||
The api is:
|
The api is:
|
||||||
|
@ -717,6 +738,8 @@ delete largest/smallest or newest/oldest files first.
|
||||||
|
|
||||||
## Login as a user
|
## Login as a user
|
||||||
|
|
||||||
|
**Note:** This API is disabled when MSC3861 is enabled. [See #15582](https://github.com/matrix-org/synapse/pull/15582)
|
||||||
|
|
||||||
Get an access token that can be used to authenticate as that user. Useful for
|
Get an access token that can be used to authenticate as that user. Useful for
|
||||||
when admins wish to do actions on behalf of a user.
|
when admins wish to do actions on behalf of a user.
|
||||||
|
|
||||||
|
@ -729,7 +752,8 @@ POST /_synapse/admin/v1/users/<user_id>/login
|
||||||
|
|
||||||
An optional `valid_until_ms` field can be specified in the request body as an
|
An optional `valid_until_ms` field can be specified in the request body as an
|
||||||
integer timestamp that specifies when the token should expire. By default tokens
|
integer timestamp that specifies when the token should expire. By default tokens
|
||||||
do not expire.
|
do not expire. Note that this API does not allow a user to login as themselves
|
||||||
|
(to create more tokens).
|
||||||
|
|
||||||
A response body like the following is returned:
|
A response body like the following is returned:
|
||||||
|
|
||||||
|
@ -1180,7 +1204,7 @@ The following parameters should be set in the URL:
|
||||||
- `user_id` - The fully qualified MXID: for example, `@user:server.com`. The user must
|
- `user_id` - The fully qualified MXID: for example, `@user:server.com`. The user must
|
||||||
be local.
|
be local.
|
||||||
|
|
||||||
### Check username availability
|
## Check username availability
|
||||||
|
|
||||||
Checks to see if a username is available, and valid, for the server. See [the client-server
|
Checks to see if a username is available, and valid, for the server. See [the client-server
|
||||||
API](https://matrix.org/docs/spec/client_server/r0.6.0#get-matrix-client-r0-register-available)
|
API](https://matrix.org/docs/spec/client_server/r0.6.0#get-matrix-client-r0-register-available)
|
||||||
|
@ -1198,7 +1222,7 @@ GET /_synapse/admin/v1/username_available?username=$localpart
|
||||||
The request and response format is the same as the
|
The request and response format is the same as the
|
||||||
[/_matrix/client/r0/register/available](https://matrix.org/docs/spec/client_server/r0.6.0#get-matrix-client-r0-register-available) API.
|
[/_matrix/client/r0/register/available](https://matrix.org/docs/spec/client_server/r0.6.0#get-matrix-client-r0-register-available) API.
|
||||||
|
|
||||||
### Find a user based on their ID in an auth provider
|
## Find a user based on their ID in an auth provider
|
||||||
|
|
||||||
The API is:
|
The API is:
|
||||||
|
|
||||||
|
@ -1237,7 +1261,7 @@ Returns a `404` HTTP status code if no user was found, with a response body like
|
||||||
_Added in Synapse 1.68.0._
|
_Added in Synapse 1.68.0._
|
||||||
|
|
||||||
|
|
||||||
### Find a user based on their Third Party ID (ThreePID or 3PID)
|
## Find a user based on their Third Party ID (ThreePID or 3PID)
|
||||||
|
|
||||||
The API is:
|
The API is:
|
||||||
|
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
# Version API
|
# Version API
|
||||||
|
|
||||||
This API returns the running Synapse version and the Python version
|
This API returns the running Synapse version.
|
||||||
on which Synapse is being run. This is useful when a Synapse instance
|
This is useful when a Synapse instance
|
||||||
is behind a proxy that does not forward the 'Server' header (which also
|
is behind a proxy that does not forward the 'Server' header (which also
|
||||||
contains Synapse version information).
|
contains Synapse version information).
|
||||||
|
|
||||||
|
@ -15,7 +15,9 @@ It returns a JSON body like the following:
|
||||||
|
|
||||||
```json
|
```json
|
||||||
{
|
{
|
||||||
"server_version": "0.99.2rc1 (b=develop, abcdef123)",
|
"server_version": "0.99.2rc1 (b=develop, abcdef123)"
|
||||||
"python_version": "3.7.8"
|
|
||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
|
||||||
|
*Changed in Synapse 1.94.0:* The `python_version` key was removed from the
|
||||||
|
response body.
|
||||||
|
|
|
@ -24,7 +24,7 @@ Server with a domain specific API.
|
||||||
1. **Messaging Layer**
|
1. **Messaging Layer**
|
||||||
|
|
||||||
This is what the rest of the homeserver hits to send messages, join rooms,
|
This is what the rest of the homeserver hits to send messages, join rooms,
|
||||||
etc. It also allows you to register callbacks for when it get's notified by
|
etc. It also allows you to register callbacks for when it gets notified by
|
||||||
lower levels that e.g. a new message has been received.
|
lower levels that e.g. a new message has been received.
|
||||||
|
|
||||||
It is responsible for serializing requests to send to the data
|
It is responsible for serializing requests to send to the data
|
||||||
|
|
|
@ -164,7 +164,7 @@ Synapse 1.6.0rc2 (2019-11-25)
|
||||||
Bugfixes
|
Bugfixes
|
||||||
--------
|
--------
|
||||||
|
|
||||||
- Fix a bug which could cause the background database update hander for event labels to get stuck in a loop raising exceptions. ([\#6407](https://github.com/matrix-org/synapse/issues/6407))
|
- Fix a bug which could cause the background database update handler for event labels to get stuck in a loop raising exceptions. ([\#6407](https://github.com/matrix-org/synapse/issues/6407))
|
||||||
|
|
||||||
|
|
||||||
Synapse 1.6.0rc1 (2019-11-20)
|
Synapse 1.6.0rc1 (2019-11-20)
|
||||||
|
@ -191,7 +191,7 @@ Bugfixes
|
||||||
- Appservice requests will no longer contain a double slash prefix when the appservice url provided ends in a slash. ([\#6306](https://github.com/matrix-org/synapse/issues/6306))
|
- Appservice requests will no longer contain a double slash prefix when the appservice url provided ends in a slash. ([\#6306](https://github.com/matrix-org/synapse/issues/6306))
|
||||||
- Fix `/purge_room` admin API. ([\#6307](https://github.com/matrix-org/synapse/issues/6307))
|
- Fix `/purge_room` admin API. ([\#6307](https://github.com/matrix-org/synapse/issues/6307))
|
||||||
- Fix the `hidden` field in the `devices` table for SQLite versions prior to 3.23.0. ([\#6313](https://github.com/matrix-org/synapse/issues/6313))
|
- Fix the `hidden` field in the `devices` table for SQLite versions prior to 3.23.0. ([\#6313](https://github.com/matrix-org/synapse/issues/6313))
|
||||||
- Fix bug which casued rejected events to be persisted with the wrong room state. ([\#6320](https://github.com/matrix-org/synapse/issues/6320))
|
- Fix bug which caused rejected events to be persisted with the wrong room state. ([\#6320](https://github.com/matrix-org/synapse/issues/6320))
|
||||||
- Fix bug where `rc_login` ratelimiting would prematurely kick in. ([\#6335](https://github.com/matrix-org/synapse/issues/6335))
|
- Fix bug where `rc_login` ratelimiting would prematurely kick in. ([\#6335](https://github.com/matrix-org/synapse/issues/6335))
|
||||||
- Prevent the server taking a long time to start up when guest registration is enabled. ([\#6338](https://github.com/matrix-org/synapse/issues/6338))
|
- Prevent the server taking a long time to start up when guest registration is enabled. ([\#6338](https://github.com/matrix-org/synapse/issues/6338))
|
||||||
- Fix bug where upgrading a guest account to a full user would fail when account validity is enabled. ([\#6359](https://github.com/matrix-org/synapse/issues/6359))
|
- Fix bug where upgrading a guest account to a full user would fail when account validity is enabled. ([\#6359](https://github.com/matrix-org/synapse/issues/6359))
|
||||||
|
@ -232,7 +232,7 @@ Internal Changes
|
||||||
- Add some documentation about worker replication. ([\#6305](https://github.com/matrix-org/synapse/issues/6305))
|
- Add some documentation about worker replication. ([\#6305](https://github.com/matrix-org/synapse/issues/6305))
|
||||||
- Move admin endpoints into separate files. Contributed by Awesome Technologies Innovationslabor GmbH. ([\#6308](https://github.com/matrix-org/synapse/issues/6308))
|
- Move admin endpoints into separate files. Contributed by Awesome Technologies Innovationslabor GmbH. ([\#6308](https://github.com/matrix-org/synapse/issues/6308))
|
||||||
- Document the use of `lint.sh` for code style enforcement & extend it to run on specified paths only. ([\#6312](https://github.com/matrix-org/synapse/issues/6312))
|
- Document the use of `lint.sh` for code style enforcement & extend it to run on specified paths only. ([\#6312](https://github.com/matrix-org/synapse/issues/6312))
|
||||||
- Add optional python dependencies and dependant binary libraries to snapcraft packaging. ([\#6317](https://github.com/matrix-org/synapse/issues/6317))
|
- Add optional python dependencies and dependent binary libraries to snapcraft packaging. ([\#6317](https://github.com/matrix-org/synapse/issues/6317))
|
||||||
- Remove the dependency on psutil and replace functionality with the stdlib `resource` module. ([\#6318](https://github.com/matrix-org/synapse/issues/6318), [\#6336](https://github.com/matrix-org/synapse/issues/6336))
|
- Remove the dependency on psutil and replace functionality with the stdlib `resource` module. ([\#6318](https://github.com/matrix-org/synapse/issues/6318), [\#6336](https://github.com/matrix-org/synapse/issues/6336))
|
||||||
- Improve documentation for EventContext fields. ([\#6319](https://github.com/matrix-org/synapse/issues/6319))
|
- Improve documentation for EventContext fields. ([\#6319](https://github.com/matrix-org/synapse/issues/6319))
|
||||||
- Add some checks that we aren't using state from rejected events. ([\#6330](https://github.com/matrix-org/synapse/issues/6330))
|
- Add some checks that we aren't using state from rejected events. ([\#6330](https://github.com/matrix-org/synapse/issues/6330))
|
||||||
|
@ -653,7 +653,7 @@ Internal Changes
|
||||||
- Return 502 not 500 when failing to reach any remote server. ([\#5810](https://github.com/matrix-org/synapse/issues/5810))
|
- Return 502 not 500 when failing to reach any remote server. ([\#5810](https://github.com/matrix-org/synapse/issues/5810))
|
||||||
- Reduce global pauses in the events stream caused by expensive state resolution during persistence. ([\#5826](https://github.com/matrix-org/synapse/issues/5826))
|
- Reduce global pauses in the events stream caused by expensive state resolution during persistence. ([\#5826](https://github.com/matrix-org/synapse/issues/5826))
|
||||||
- Add a lower bound to well-known lookup cache time to avoid repeated lookups. ([\#5836](https://github.com/matrix-org/synapse/issues/5836))
|
- Add a lower bound to well-known lookup cache time to avoid repeated lookups. ([\#5836](https://github.com/matrix-org/synapse/issues/5836))
|
||||||
- Whitelist history visbility sytests in worker mode tests. ([\#5843](https://github.com/matrix-org/synapse/issues/5843))
|
- Whitelist history visibility sytests in worker mode tests. ([\#5843](https://github.com/matrix-org/synapse/issues/5843))
|
||||||
|
|
||||||
|
|
||||||
Synapse 1.2.1 (2019-07-26)
|
Synapse 1.2.1 (2019-07-26)
|
||||||
|
@ -817,7 +817,7 @@ See the [upgrade notes](docs/upgrade.md#upgrading-to-v110) for more details.
|
||||||
Features
|
Features
|
||||||
--------
|
--------
|
||||||
|
|
||||||
- Added possibilty to disable local password authentication. Contributed by Daniel Hoffend. ([\#5092](https://github.com/matrix-org/synapse/issues/5092))
|
- Added possibility to disable local password authentication. Contributed by Daniel Hoffend. ([\#5092](https://github.com/matrix-org/synapse/issues/5092))
|
||||||
- Add monthly active users to phonehome stats. ([\#5252](https://github.com/matrix-org/synapse/issues/5252))
|
- Add monthly active users to phonehome stats. ([\#5252](https://github.com/matrix-org/synapse/issues/5252))
|
||||||
- Allow expired user to trigger renewal email sending manually. ([\#5363](https://github.com/matrix-org/synapse/issues/5363))
|
- Allow expired user to trigger renewal email sending manually. ([\#5363](https://github.com/matrix-org/synapse/issues/5363))
|
||||||
- Statistics on forward extremities per room are now exposed via Prometheus. ([\#5384](https://github.com/matrix-org/synapse/issues/5384), [\#5458](https://github.com/matrix-org/synapse/issues/5458), [\#5461](https://github.com/matrix-org/synapse/issues/5461))
|
- Statistics on forward extremities per room are now exposed via Prometheus. ([\#5384](https://github.com/matrix-org/synapse/issues/5384), [\#5458](https://github.com/matrix-org/synapse/issues/5458), [\#5461](https://github.com/matrix-org/synapse/issues/5461))
|
||||||
|
@ -850,7 +850,7 @@ Bugfixes
|
||||||
- Fix bug where clients could tight loop calling `/sync` for a period. ([\#5507](https://github.com/matrix-org/synapse/issues/5507))
|
- Fix bug where clients could tight loop calling `/sync` for a period. ([\#5507](https://github.com/matrix-org/synapse/issues/5507))
|
||||||
- Fix bug with `jinja2` preventing Synapse from starting. Users who had this problem should now simply need to run `pip install matrix-synapse`. ([\#5514](https://github.com/matrix-org/synapse/issues/5514))
|
- Fix bug with `jinja2` preventing Synapse from starting. Users who had this problem should now simply need to run `pip install matrix-synapse`. ([\#5514](https://github.com/matrix-org/synapse/issues/5514))
|
||||||
- Fix a regression where homeservers on private IP addresses were incorrectly blacklisted. ([\#5523](https://github.com/matrix-org/synapse/issues/5523))
|
- Fix a regression where homeservers on private IP addresses were incorrectly blacklisted. ([\#5523](https://github.com/matrix-org/synapse/issues/5523))
|
||||||
- Fixed m.login.jwt using unregistred user_id and added pyjwt>=1.6.4 as jwt conditional dependencies. Contributed by Pau Rodriguez-Estivill. ([\#5555](https://github.com/matrix-org/synapse/issues/5555), [\#5586](https://github.com/matrix-org/synapse/issues/5586))
|
- Fixed m.login.jwt using unregistered user_id and added pyjwt>=1.6.4 as jwt conditional dependencies. Contributed by Pau Rodriguez-Estivill. ([\#5555](https://github.com/matrix-org/synapse/issues/5555), [\#5586](https://github.com/matrix-org/synapse/issues/5586))
|
||||||
- Fix a bug that would cause invited users to receive several emails for a single 3PID invite in case the inviter is rate limited. ([\#5576](https://github.com/matrix-org/synapse/issues/5576))
|
- Fix a bug that would cause invited users to receive several emails for a single 3PID invite in case the inviter is rate limited. ([\#5576](https://github.com/matrix-org/synapse/issues/5576))
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -251,7 +251,7 @@ Internal Changes
|
||||||
|
|
||||||
- Optimise `/createRoom` with multiple invited users. ([\#8559](https://github.com/matrix-org/synapse/issues/8559))
|
- Optimise `/createRoom` with multiple invited users. ([\#8559](https://github.com/matrix-org/synapse/issues/8559))
|
||||||
- Implement and use an `@lru_cache` decorator. ([\#8595](https://github.com/matrix-org/synapse/issues/8595))
|
- Implement and use an `@lru_cache` decorator. ([\#8595](https://github.com/matrix-org/synapse/issues/8595))
|
||||||
- Don't instansiate Requester directly. ([\#8614](https://github.com/matrix-org/synapse/issues/8614))
|
- Don't instantiate Requester directly. ([\#8614](https://github.com/matrix-org/synapse/issues/8614))
|
||||||
- Type hints for `RegistrationStore`. ([\#8615](https://github.com/matrix-org/synapse/issues/8615))
|
- Type hints for `RegistrationStore`. ([\#8615](https://github.com/matrix-org/synapse/issues/8615))
|
||||||
- Change schema to support access tokens belonging to one user but granting access to another. ([\#8616](https://github.com/matrix-org/synapse/issues/8616))
|
- Change schema to support access tokens belonging to one user but granting access to another. ([\#8616](https://github.com/matrix-org/synapse/issues/8616))
|
||||||
- Remove unused OPTIONS handlers. ([\#8621](https://github.com/matrix-org/synapse/issues/8621))
|
- Remove unused OPTIONS handlers. ([\#8621](https://github.com/matrix-org/synapse/issues/8621))
|
||||||
|
@ -518,7 +518,7 @@ Bugfixes
|
||||||
- Fix a bug which cause the logging system to report errors, if `DEBUG` was enabled and no `context` filter was applied. ([\#8278](https://github.com/matrix-org/synapse/issues/8278))
|
- Fix a bug which cause the logging system to report errors, if `DEBUG` was enabled and no `context` filter was applied. ([\#8278](https://github.com/matrix-org/synapse/issues/8278))
|
||||||
- Fix edge case where push could get delayed for a user until a later event was pushed. ([\#8287](https://github.com/matrix-org/synapse/issues/8287))
|
- Fix edge case where push could get delayed for a user until a later event was pushed. ([\#8287](https://github.com/matrix-org/synapse/issues/8287))
|
||||||
- Fix fetching malformed events from remote servers. ([\#8324](https://github.com/matrix-org/synapse/issues/8324))
|
- Fix fetching malformed events from remote servers. ([\#8324](https://github.com/matrix-org/synapse/issues/8324))
|
||||||
- Fix `UnboundLocalError` from occuring when appservices send a malformed register request. ([\#8329](https://github.com/matrix-org/synapse/issues/8329))
|
- Fix `UnboundLocalError` from occurring when appservices send a malformed register request. ([\#8329](https://github.com/matrix-org/synapse/issues/8329))
|
||||||
- Don't send push notifications to expired user accounts. ([\#8353](https://github.com/matrix-org/synapse/issues/8353))
|
- Don't send push notifications to expired user accounts. ([\#8353](https://github.com/matrix-org/synapse/issues/8353))
|
||||||
- Fix a regression in v1.19.0 with reactivating users through the admin API. ([\#8362](https://github.com/matrix-org/synapse/issues/8362))
|
- Fix a regression in v1.19.0 with reactivating users through the admin API. ([\#8362](https://github.com/matrix-org/synapse/issues/8362))
|
||||||
- Fix a bug where during device registration the length of the device name wasn't limited. ([\#8364](https://github.com/matrix-org/synapse/issues/8364))
|
- Fix a bug where during device registration the length of the device name wasn't limited. ([\#8364](https://github.com/matrix-org/synapse/issues/8364))
|
||||||
|
@ -815,7 +815,7 @@ Bugfixes
|
||||||
- Fix a bug introduced in Synapse v1.7.2 which caused inaccurate membership counts in the room directory. ([\#7977](https://github.com/matrix-org/synapse/issues/7977))
|
- Fix a bug introduced in Synapse v1.7.2 which caused inaccurate membership counts in the room directory. ([\#7977](https://github.com/matrix-org/synapse/issues/7977))
|
||||||
- Fix a long standing bug: 'Duplicate key value violates unique constraint "event_relations_id"' when message retention is configured. ([\#7978](https://github.com/matrix-org/synapse/issues/7978))
|
- Fix a long standing bug: 'Duplicate key value violates unique constraint "event_relations_id"' when message retention is configured. ([\#7978](https://github.com/matrix-org/synapse/issues/7978))
|
||||||
- Fix "no create event in auth events" when trying to reject invitation after inviter leaves. Bug introduced in Synapse v1.10.0. ([\#7980](https://github.com/matrix-org/synapse/issues/7980))
|
- Fix "no create event in auth events" when trying to reject invitation after inviter leaves. Bug introduced in Synapse v1.10.0. ([\#7980](https://github.com/matrix-org/synapse/issues/7980))
|
||||||
- Fix various comments and minor discrepencies in server notices code. ([\#7996](https://github.com/matrix-org/synapse/issues/7996))
|
- Fix various comments and minor discrepancies in server notices code. ([\#7996](https://github.com/matrix-org/synapse/issues/7996))
|
||||||
- Fix a long standing bug where HTTP HEAD requests resulted in a 400 error. ([\#7999](https://github.com/matrix-org/synapse/issues/7999))
|
- Fix a long standing bug where HTTP HEAD requests resulted in a 400 error. ([\#7999](https://github.com/matrix-org/synapse/issues/7999))
|
||||||
- Fix a long-standing bug which caused two copies of some log lines to be written when synctl was used along with a MemoryHandler logger. ([\#8011](https://github.com/matrix-org/synapse/issues/8011), [\#8012](https://github.com/matrix-org/synapse/issues/8012))
|
- Fix a long-standing bug which caused two copies of some log lines to be written when synctl was used along with a MemoryHandler logger. ([\#8011](https://github.com/matrix-org/synapse/issues/8011), [\#8012](https://github.com/matrix-org/synapse/issues/8012))
|
||||||
|
|
||||||
|
@ -1460,7 +1460,7 @@ Bugfixes
|
||||||
- Transfer alias mappings on room upgrade. ([\#6946](https://github.com/matrix-org/synapse/issues/6946))
|
- Transfer alias mappings on room upgrade. ([\#6946](https://github.com/matrix-org/synapse/issues/6946))
|
||||||
- Ensure that a user interactive authentication session is tied to a single request. ([\#7068](https://github.com/matrix-org/synapse/issues/7068), [\#7455](https://github.com/matrix-org/synapse/issues/7455))
|
- Ensure that a user interactive authentication session is tied to a single request. ([\#7068](https://github.com/matrix-org/synapse/issues/7068), [\#7455](https://github.com/matrix-org/synapse/issues/7455))
|
||||||
- Fix a bug in the federation API which could cause occasional "Failed to get PDU" errors. ([\#7089](https://github.com/matrix-org/synapse/issues/7089))
|
- Fix a bug in the federation API which could cause occasional "Failed to get PDU" errors. ([\#7089](https://github.com/matrix-org/synapse/issues/7089))
|
||||||
- Return the proper error (`M_BAD_ALIAS`) when a non-existant canonical alias is provided. ([\#7109](https://github.com/matrix-org/synapse/issues/7109))
|
- Return the proper error (`M_BAD_ALIAS`) when a non-existent canonical alias is provided. ([\#7109](https://github.com/matrix-org/synapse/issues/7109))
|
||||||
- Fix a bug which meant that groups updates were not correctly replicated between workers. ([\#7117](https://github.com/matrix-org/synapse/issues/7117))
|
- Fix a bug which meant that groups updates were not correctly replicated between workers. ([\#7117](https://github.com/matrix-org/synapse/issues/7117))
|
||||||
- Fix starting workers when federation sending not split out. ([\#7133](https://github.com/matrix-org/synapse/issues/7133))
|
- Fix starting workers when federation sending not split out. ([\#7133](https://github.com/matrix-org/synapse/issues/7133))
|
||||||
- Ensure `is_verified` is a boolean in responses to `GET /_matrix/client/r0/room_keys/keys`. Also warn the user if they forgot the `version` query param. ([\#7150](https://github.com/matrix-org/synapse/issues/7150))
|
- Ensure `is_verified` is a boolean in responses to `GET /_matrix/client/r0/room_keys/keys`. Also warn the user if they forgot the `version` query param. ([\#7150](https://github.com/matrix-org/synapse/issues/7150))
|
||||||
|
@ -1482,7 +1482,7 @@ Bugfixes
|
||||||
- Fix bad error handling that would cause Synapse to crash if it's provided with a YAML configuration file that's either empty or doesn't parse into a key-value map. ([\#7341](https://github.com/matrix-org/synapse/issues/7341))
|
- Fix bad error handling that would cause Synapse to crash if it's provided with a YAML configuration file that's either empty or doesn't parse into a key-value map. ([\#7341](https://github.com/matrix-org/synapse/issues/7341))
|
||||||
- Fix incorrect metrics reporting for `renew_attestations` background task. ([\#7344](https://github.com/matrix-org/synapse/issues/7344))
|
- Fix incorrect metrics reporting for `renew_attestations` background task. ([\#7344](https://github.com/matrix-org/synapse/issues/7344))
|
||||||
- Prevent non-federating rooms from appearing in responses to federated `POST /publicRoom` requests when a filter was included. ([\#7367](https://github.com/matrix-org/synapse/issues/7367))
|
- Prevent non-federating rooms from appearing in responses to federated `POST /publicRoom` requests when a filter was included. ([\#7367](https://github.com/matrix-org/synapse/issues/7367))
|
||||||
- Fix a bug which would cause the room durectory to be incorrectly populated if Synapse was upgraded directly from v1.2.1 or earlier to v1.4.0 or later. Note that this fix does not apply retrospectively; see the [upgrade notes](docs/upgrade.md#upgrading-to-v1130) for more information. ([\#7387](https://github.com/matrix-org/synapse/issues/7387))
|
- Fix a bug which would cause the room directory to be incorrectly populated if Synapse was upgraded directly from v1.2.1 or earlier to v1.4.0 or later. Note that this fix does not apply retrospectively; see the [upgrade notes](docs/upgrade.md#upgrading-to-v1130) for more information. ([\#7387](https://github.com/matrix-org/synapse/issues/7387))
|
||||||
- Fix bug in `EventContext.deserialize`. ([\#7393](https://github.com/matrix-org/synapse/issues/7393))
|
- Fix bug in `EventContext.deserialize`. ([\#7393](https://github.com/matrix-org/synapse/issues/7393))
|
||||||
|
|
||||||
|
|
||||||
|
@ -1638,7 +1638,7 @@ Security advisory
|
||||||
-----------------
|
-----------------
|
||||||
|
|
||||||
Synapse may be vulnerable to request-smuggling attacks when it is used with a
|
Synapse may be vulnerable to request-smuggling attacks when it is used with a
|
||||||
reverse-proxy. The vulnerabilties are fixed in Twisted 20.3.0, and are
|
reverse-proxy. The vulnerabilities are fixed in Twisted 20.3.0, and are
|
||||||
described in
|
described in
|
||||||
[CVE-2020-10108](https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2020-10108)
|
[CVE-2020-10108](https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2020-10108)
|
||||||
and
|
and
|
||||||
|
@ -1748,7 +1748,7 @@ Internal Changes
|
||||||
- Refactoring work in preparation for changing the event redaction algorithm. ([\#6874](https://github.com/matrix-org/synapse/issues/6874), [\#6875](https://github.com/matrix-org/synapse/issues/6875), [\#6983](https://github.com/matrix-org/synapse/issues/6983), [\#7003](https://github.com/matrix-org/synapse/issues/7003))
|
- Refactoring work in preparation for changing the event redaction algorithm. ([\#6874](https://github.com/matrix-org/synapse/issues/6874), [\#6875](https://github.com/matrix-org/synapse/issues/6875), [\#6983](https://github.com/matrix-org/synapse/issues/6983), [\#7003](https://github.com/matrix-org/synapse/issues/7003))
|
||||||
- Improve performance of v2 state resolution for large rooms. ([\#6952](https://github.com/matrix-org/synapse/issues/6952), [\#7095](https://github.com/matrix-org/synapse/issues/7095))
|
- Improve performance of v2 state resolution for large rooms. ([\#6952](https://github.com/matrix-org/synapse/issues/6952), [\#7095](https://github.com/matrix-org/synapse/issues/7095))
|
||||||
- Reduce time spent doing GC, by freezing objects on startup. ([\#6953](https://github.com/matrix-org/synapse/issues/6953))
|
- Reduce time spent doing GC, by freezing objects on startup. ([\#6953](https://github.com/matrix-org/synapse/issues/6953))
|
||||||
- Minor perfermance fixes to `get_auth_chain_ids`. ([\#6954](https://github.com/matrix-org/synapse/issues/6954))
|
- Minor performance fixes to `get_auth_chain_ids`. ([\#6954](https://github.com/matrix-org/synapse/issues/6954))
|
||||||
- Don't record remote cross-signing keys in the `devices` table. ([\#6956](https://github.com/matrix-org/synapse/issues/6956))
|
- Don't record remote cross-signing keys in the `devices` table. ([\#6956](https://github.com/matrix-org/synapse/issues/6956))
|
||||||
- Use flake8-comprehensions to enforce good hygiene of list/set/dict comprehensions. ([\#6957](https://github.com/matrix-org/synapse/issues/6957))
|
- Use flake8-comprehensions to enforce good hygiene of list/set/dict comprehensions. ([\#6957](https://github.com/matrix-org/synapse/issues/6957))
|
||||||
- Merge worker apps together. ([\#6964](https://github.com/matrix-org/synapse/issues/6964), [\#7002](https://github.com/matrix-org/synapse/issues/7002), [\#7055](https://github.com/matrix-org/synapse/issues/7055), [\#7104](https://github.com/matrix-org/synapse/issues/7104))
|
- Merge worker apps together. ([\#6964](https://github.com/matrix-org/synapse/issues/6964), [\#7002](https://github.com/matrix-org/synapse/issues/7002), [\#7055](https://github.com/matrix-org/synapse/issues/7055), [\#7104](https://github.com/matrix-org/synapse/issues/7104))
|
||||||
|
@ -1809,7 +1809,7 @@ Bugfixes
|
||||||
- Allow URL-encoded User IDs on `/_synapse/admin/v2/users/<user_id>[/admin]` endpoints. Thanks to @NHAS for reporting. ([\#6825](https://github.com/matrix-org/synapse/issues/6825))
|
- Allow URL-encoded User IDs on `/_synapse/admin/v2/users/<user_id>[/admin]` endpoints. Thanks to @NHAS for reporting. ([\#6825](https://github.com/matrix-org/synapse/issues/6825))
|
||||||
- Fix Synapse refusing to start if `federation_certificate_verification_whitelist` option is blank. ([\#6849](https://github.com/matrix-org/synapse/issues/6849))
|
- Fix Synapse refusing to start if `federation_certificate_verification_whitelist` option is blank. ([\#6849](https://github.com/matrix-org/synapse/issues/6849))
|
||||||
- Fix errors from logging in the purge jobs related to the message retention policies support. ([\#6945](https://github.com/matrix-org/synapse/issues/6945))
|
- Fix errors from logging in the purge jobs related to the message retention policies support. ([\#6945](https://github.com/matrix-org/synapse/issues/6945))
|
||||||
- Return a 404 instead of 200 for querying information of a non-existant user through the admin API. ([\#6901](https://github.com/matrix-org/synapse/issues/6901))
|
- Return a 404 instead of 200 for querying information of a non-existent user through the admin API. ([\#6901](https://github.com/matrix-org/synapse/issues/6901))
|
||||||
|
|
||||||
|
|
||||||
Updates to the Docker image
|
Updates to the Docker image
|
||||||
|
@ -1889,7 +1889,7 @@ Bugfixes
|
||||||
Synapse 1.10.0rc4 (2020-02-11)
|
Synapse 1.10.0rc4 (2020-02-11)
|
||||||
==============================
|
==============================
|
||||||
|
|
||||||
This release candidate was built incorrectly and is superceded by 1.10.0rc5.
|
This release candidate was built incorrectly and is superseded by 1.10.0rc5.
|
||||||
|
|
||||||
Synapse 1.10.0rc3 (2020-02-10)
|
Synapse 1.10.0rc3 (2020-02-10)
|
||||||
==============================
|
==============================
|
||||||
|
|
|
@ -2270,7 +2270,7 @@ Features
|
||||||
Bugfixes
|
Bugfixes
|
||||||
--------
|
--------
|
||||||
|
|
||||||
- Fix spurious errors in logs when deleting a non-existant pusher. ([\#9121](https://github.com/matrix-org/synapse/issues/9121))
|
- Fix spurious errors in logs when deleting a non-existent pusher. ([\#9121](https://github.com/matrix-org/synapse/issues/9121))
|
||||||
- Fix a long-standing bug where Synapse would return a 500 error when a thumbnail did not exist (and auto-generation of thumbnails was not enabled). ([\#9163](https://github.com/matrix-org/synapse/issues/9163))
|
- Fix a long-standing bug where Synapse would return a 500 error when a thumbnail did not exist (and auto-generation of thumbnails was not enabled). ([\#9163](https://github.com/matrix-org/synapse/issues/9163))
|
||||||
- Fix a long-standing bug where an internal server error was raised when attempting to preview an HTML document in an unknown character encoding. ([\#9164](https://github.com/matrix-org/synapse/issues/9164))
|
- Fix a long-standing bug where an internal server error was raised when attempting to preview an HTML document in an unknown character encoding. ([\#9164](https://github.com/matrix-org/synapse/issues/9164))
|
||||||
- Fix a long-standing bug where invalid data could cause errors when calculating the presentable room name for push. ([\#9165](https://github.com/matrix-org/synapse/issues/9165))
|
- Fix a long-standing bug where invalid data could cause errors when calculating the presentable room name for push. ([\#9165](https://github.com/matrix-org/synapse/issues/9165))
|
||||||
|
@ -2522,7 +2522,7 @@ Bugfixes
|
||||||
- Fix a long-standing bug where a `m.image` event without a `url` would cause errors on push. ([\#8965](https://github.com/matrix-org/synapse/issues/8965))
|
- Fix a long-standing bug where a `m.image` event without a `url` would cause errors on push. ([\#8965](https://github.com/matrix-org/synapse/issues/8965))
|
||||||
- Fix a small bug in v2 state resolution algorithm, which could also cause performance issues for rooms with large numbers of power levels. ([\#8971](https://github.com/matrix-org/synapse/issues/8971))
|
- Fix a small bug in v2 state resolution algorithm, which could also cause performance issues for rooms with large numbers of power levels. ([\#8971](https://github.com/matrix-org/synapse/issues/8971))
|
||||||
- Add validation to the `sendToDevice` API to raise a missing parameters error instead of a 500 error. ([\#8975](https://github.com/matrix-org/synapse/issues/8975))
|
- Add validation to the `sendToDevice` API to raise a missing parameters error instead of a 500 error. ([\#8975](https://github.com/matrix-org/synapse/issues/8975))
|
||||||
- Add validation of group IDs to raise a 400 error instead of a 500 eror. ([\#8977](https://github.com/matrix-org/synapse/issues/8977))
|
- Add validation of group IDs to raise a 400 error instead of a 500 error. ([\#8977](https://github.com/matrix-org/synapse/issues/8977))
|
||||||
|
|
||||||
|
|
||||||
Improved Documentation
|
Improved Documentation
|
||||||
|
|
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
|
@ -8,9 +8,9 @@ to the server until they have.
|
||||||
There are several parts to this functionality; each requires some specific
|
There are several parts to this functionality; each requires some specific
|
||||||
configuration in `homeserver.yaml` to be enabled.
|
configuration in `homeserver.yaml` to be enabled.
|
||||||
|
|
||||||
Note that various parts of the configuation and this document refer to the
|
Note that various parts of the configuration and this document refer to the
|
||||||
"privacy policy": agreement with a privacy policy is one particular use of this
|
"privacy policy": agreement with a privacy policy is one particular use of this
|
||||||
feature, but of course adminstrators can specify other terms and conditions
|
feature, but of course administrators can specify other terms and conditions
|
||||||
unrelated to "privacy" per se.
|
unrelated to "privacy" per se.
|
||||||
|
|
||||||
Collecting policy agreement from a user
|
Collecting policy agreement from a user
|
||||||
|
|
|
@ -23,7 +23,7 @@ people building from source should ensure they can fetch recent versions of Rust
|
||||||
(e.g. by using [rustup](https://rustup.rs/)).
|
(e.g. by using [rustup](https://rustup.rs/)).
|
||||||
|
|
||||||
The oldest supported version of SQLite is the version
|
The oldest supported version of SQLite is the version
|
||||||
[provided](https://packages.debian.org/buster/libsqlite3-0) by
|
[provided](https://packages.debian.org/bullseye/libsqlite3-0) by
|
||||||
[Debian oldstable](https://wiki.debian.org/DebianOldStable).
|
[Debian oldstable](https://wiki.debian.org/DebianOldStable).
|
||||||
|
|
||||||
Context
|
Context
|
||||||
|
|
|
@ -22,6 +22,9 @@ on Windows is not officially supported.
|
||||||
|
|
||||||
The code of Synapse is written in Python 3. To do pretty much anything, you'll need [a recent version of Python 3](https://www.python.org/downloads/). Your Python also needs support for [virtual environments](https://docs.python.org/3/library/venv.html). This is usually built-in, but some Linux distributions like Debian and Ubuntu split it out into its own package. Running `sudo apt install python3-venv` should be enough.
|
The code of Synapse is written in Python 3. To do pretty much anything, you'll need [a recent version of Python 3](https://www.python.org/downloads/). Your Python also needs support for [virtual environments](https://docs.python.org/3/library/venv.html). This is usually built-in, but some Linux distributions like Debian and Ubuntu split it out into its own package. Running `sudo apt install python3-venv` should be enough.
|
||||||
|
|
||||||
|
A recent version of the Rust compiler is needed to build the native modules. The
|
||||||
|
easiest way of installing the latest version is to use [rustup](https://rustup.rs/).
|
||||||
|
|
||||||
Synapse can connect to PostgreSQL via the [psycopg2](https://pypi.org/project/psycopg2/) Python library. Building this library from source requires access to PostgreSQL's C header files. On Debian or Ubuntu Linux, these can be installed with `sudo apt install libpq-dev`.
|
Synapse can connect to PostgreSQL via the [psycopg2](https://pypi.org/project/psycopg2/) Python library. Building this library from source requires access to PostgreSQL's C header files. On Debian or Ubuntu Linux, these can be installed with `sudo apt install libpq-dev`.
|
||||||
|
|
||||||
Synapse has an optional, improved user search with better Unicode support. For that you need the development package of `libicu`. On Debian or Ubuntu Linux, this can be installed with `sudo apt install libicu-dev`.
|
Synapse has an optional, improved user search with better Unicode support. For that you need the development package of `libicu`. On Debian or Ubuntu Linux, this can be installed with `sudo apt install libicu-dev`.
|
||||||
|
@ -30,9 +33,6 @@ The source code of Synapse is hosted on GitHub. You will also need [a recent ver
|
||||||
|
|
||||||
For some tests, you will need [a recent version of Docker](https://docs.docker.com/get-docker/).
|
For some tests, you will need [a recent version of Docker](https://docs.docker.com/get-docker/).
|
||||||
|
|
||||||
A recent version of the Rust compiler is needed to build the native modules. The
|
|
||||||
easiest way of installing the latest version is to use [rustup](https://rustup.rs/).
|
|
||||||
|
|
||||||
|
|
||||||
# 3. Get the source.
|
# 3. Get the source.
|
||||||
|
|
||||||
|
@ -53,6 +53,11 @@ can find many good git tutorials on the web.
|
||||||
|
|
||||||
# 4. Install the dependencies
|
# 4. Install the dependencies
|
||||||
|
|
||||||
|
|
||||||
|
Before installing the Python dependencies, make sure you have installed a recent version
|
||||||
|
of Rust (see the "What do I need?" section above). The easiest way of installing the
|
||||||
|
latest version is to use [rustup](https://rustup.rs/).
|
||||||
|
|
||||||
Synapse uses the [poetry](https://python-poetry.org/) project to manage its dependencies
|
Synapse uses the [poetry](https://python-poetry.org/) project to manage its dependencies
|
||||||
and development environment. Once you have installed Python 3 and added the
|
and development environment. Once you have installed Python 3 and added the
|
||||||
source, you should install `poetry`.
|
source, you should install `poetry`.
|
||||||
|
@ -76,7 +81,8 @@ cd path/where/you/have/cloned/the/repository
|
||||||
poetry install --extras all
|
poetry install --extras all
|
||||||
```
|
```
|
||||||
|
|
||||||
This will install the runtime and developer dependencies for the project.
|
This will install the runtime and developer dependencies for the project. Be sure to check
|
||||||
|
that the `poetry install` step completed cleanly.
|
||||||
|
|
||||||
## Running Synapse via poetry
|
## Running Synapse via poetry
|
||||||
|
|
||||||
|
@ -84,14 +90,31 @@ To start a local instance of Synapse in the locked poetry environment, create a
|
||||||
|
|
||||||
```sh
|
```sh
|
||||||
cp docs/sample_config.yaml homeserver.yaml
|
cp docs/sample_config.yaml homeserver.yaml
|
||||||
|
cp docs/sample_log_config.yaml log_config.yaml
|
||||||
```
|
```
|
||||||
|
|
||||||
Now edit homeserver.yaml, and run Synapse with:
|
Now edit `homeserver.yaml`, things you might want to change include:
|
||||||
|
|
||||||
|
- Set a `server_name`
|
||||||
|
- Adjusting paths to be correct for your system like the `log_config` to point to the log config you just copied
|
||||||
|
- Using a [PostgreSQL database instead of SQLite](https://matrix-org.github.io/synapse/latest/usage/configuration/config_documentation.html#database)
|
||||||
|
- Adding a [`registration_shared_secret`](https://matrix-org.github.io/synapse/latest/usage/configuration/config_documentation.html#registration_shared_secret) so you can use [`register_new_matrix_user` command](https://matrix-org.github.io/synapse/latest/setup/installation.html#registering-a-user).
|
||||||
|
|
||||||
|
And then run Synapse with the following command:
|
||||||
|
|
||||||
```sh
|
```sh
|
||||||
poetry run python -m synapse.app.homeserver -c homeserver.yaml
|
poetry run python -m synapse.app.homeserver -c homeserver.yaml
|
||||||
```
|
```
|
||||||
|
|
||||||
|
If you get an error like the following:
|
||||||
|
|
||||||
|
```
|
||||||
|
importlib.metadata.PackageNotFoundError: matrix-synapse
|
||||||
|
```
|
||||||
|
|
||||||
|
this probably indicates that the `poetry install` step did not complete cleanly - go back and
|
||||||
|
resolve any issues and re-run until successful.
|
||||||
|
|
||||||
# 5. Get in touch.
|
# 5. Get in touch.
|
||||||
|
|
||||||
Join our developer community on Matrix: [#synapse-dev:matrix.org](https://matrix.to/#/#synapse-dev:matrix.org)!
|
Join our developer community on Matrix: [#synapse-dev:matrix.org](https://matrix.to/#/#synapse-dev:matrix.org)!
|
||||||
|
@ -243,7 +266,7 @@ The easiest way to do so is to run Postgres via a docker container. In one
|
||||||
terminal:
|
terminal:
|
||||||
|
|
||||||
```shell
|
```shell
|
||||||
docker run --rm -e POSTGRES_PASSWORD=mysecretpassword -e POSTGRES_USER=postgres -e POSTGRES_DB=postgress -p 5432:5432 postgres:14
|
docker run --rm -e POSTGRES_PASSWORD=mysecretpassword -e POSTGRES_USER=postgres -e POSTGRES_DB=postgres -p 5432:5432 postgres:14
|
||||||
```
|
```
|
||||||
|
|
||||||
If you see an error like
|
If you see an error like
|
||||||
|
@ -299,7 +322,7 @@ The following command will let you run the integration test with the most common
|
||||||
configuration:
|
configuration:
|
||||||
|
|
||||||
```sh
|
```sh
|
||||||
$ docker run --rm -it -v /path/where/you/have/cloned/the/repository\:/src:ro -v /path/to/where/you/want/logs\:/logs matrixdotorg/sytest-synapse:buster
|
$ docker run --rm -it -v /path/where/you/have/cloned/the/repository\:/src:ro -v /path/to/where/you/want/logs\:/logs matrixdotorg/sytest-synapse:focal
|
||||||
```
|
```
|
||||||
(Note that the paths must be full paths! You could also write `$(realpath relative/path)` if needed.)
|
(Note that the paths must be full paths! You could also write `$(realpath relative/path)` if needed.)
|
||||||
|
|
||||||
|
@ -347,6 +370,7 @@ The above will run a monolithic (single-process) Synapse with SQLite as the data
|
||||||
See the [worker documentation](../workers.md) for additional information on workers.
|
See the [worker documentation](../workers.md) for additional information on workers.
|
||||||
- Passing `ASYNCIO_REACTOR=1` as an environment variable to use the Twisted asyncio reactor instead of the default one.
|
- Passing `ASYNCIO_REACTOR=1` as an environment variable to use the Twisted asyncio reactor instead of the default one.
|
||||||
- Passing `PODMAN=1` will use the [podman](https://podman.io/) container runtime, instead of docker.
|
- Passing `PODMAN=1` will use the [podman](https://podman.io/) container runtime, instead of docker.
|
||||||
|
- Passing `UNIX_SOCKETS=1` will utilise Unix socket functionality for Synapse, Redis, and Postgres(when applicable).
|
||||||
|
|
||||||
To increase the log level for the tests, set `SYNAPSE_TEST_LOG_LEVEL`, e.g:
|
To increase the log level for the tests, set `SYNAPSE_TEST_LOG_LEVEL`, e.g:
|
||||||
```sh
|
```sh
|
||||||
|
|
|
@ -150,6 +150,67 @@ def run_upgrade(
|
||||||
...
|
...
|
||||||
```
|
```
|
||||||
|
|
||||||
|
## Background updates
|
||||||
|
|
||||||
|
It is sometimes appropriate to perform database migrations as part of a background
|
||||||
|
process (instead of blocking Synapse until the migration is done). In particular,
|
||||||
|
this is useful for migrating data when adding new columns or tables.
|
||||||
|
|
||||||
|
Pending background updates stored in the `background_updates` table and are denoted
|
||||||
|
by a unique name, the current status (stored in JSON), and some dependency information:
|
||||||
|
|
||||||
|
* Whether the update requires a previous update to be complete.
|
||||||
|
* A rough ordering for which to complete updates.
|
||||||
|
|
||||||
|
A new background updates needs to be added to the `background_updates` table:
|
||||||
|
|
||||||
|
```sql
|
||||||
|
INSERT INTO background_updates (ordering, update_name, depends_on, progress_json) VALUES
|
||||||
|
(7706, 'my_background_update', 'a_previous_background_update' '{}');
|
||||||
|
```
|
||||||
|
|
||||||
|
And then needs an associated handler in the appropriate datastore:
|
||||||
|
|
||||||
|
```python
|
||||||
|
self.db_pool.updates.register_background_update_handler(
|
||||||
|
"my_background_update",
|
||||||
|
update_handler=self._my_background_update,
|
||||||
|
)
|
||||||
|
```
|
||||||
|
|
||||||
|
There are a few types of updates that can be performed, see the `BackgroundUpdater`:
|
||||||
|
|
||||||
|
* `register_background_update_handler`: A generic handler for custom SQL
|
||||||
|
* `register_background_index_update`: Create an index in the background
|
||||||
|
* `register_background_validate_constraint`: Validate a constraint in the background
|
||||||
|
(PostgreSQL-only)
|
||||||
|
* `register_background_validate_constraint_and_delete_rows`: Similar to
|
||||||
|
`register_background_validate_constraint`, but deletes rows which don't fit
|
||||||
|
the constraint.
|
||||||
|
|
||||||
|
For `register_background_update_handler`, the generic handler must track progress
|
||||||
|
and then finalize the background update:
|
||||||
|
|
||||||
|
```python
|
||||||
|
async def _my_background_update(self, progress: JsonDict, batch_size: int) -> int:
|
||||||
|
def _do_something(txn: LoggingTransaction) -> int:
|
||||||
|
...
|
||||||
|
self.db_pool.updates._background_update_progress_txn(
|
||||||
|
txn, "my_background_update", {"last_processed": last_processed}
|
||||||
|
)
|
||||||
|
return last_processed - prev_last_processed
|
||||||
|
|
||||||
|
num_processed = await self.db_pool.runInteraction("_do_something", _do_something)
|
||||||
|
await self.db_pool.updates._end_background_update("my_background_update")
|
||||||
|
|
||||||
|
return num_processed
|
||||||
|
```
|
||||||
|
|
||||||
|
Synapse will attempt to rate-limit how often background updates are run via the
|
||||||
|
given batch-size and the returned number of processed entries (and how long the
|
||||||
|
function took to run). See
|
||||||
|
[background update controller callbacks](../modules/background_update_controller_callbacks.md).
|
||||||
|
|
||||||
## Boolean columns
|
## Boolean columns
|
||||||
|
|
||||||
Boolean columns require special treatment, since SQLite treats booleans the
|
Boolean columns require special treatment, since SQLite treats booleans the
|
||||||
|
@ -184,3 +245,160 @@ version `3`, that can only happen with a hash collision, which we basically hope
|
||||||
will never happen (SHA256 has a massive big key space).
|
will never happen (SHA256 has a massive big key space).
|
||||||
|
|
||||||
|
|
||||||
|
## Worked examples of gradual migrations
|
||||||
|
|
||||||
|
Some migrations need to be performed gradually. A prime example of this is anything
|
||||||
|
which would need to do a large table scan — including adding columns, indices or
|
||||||
|
`NOT NULL` constraints to non-empty tables — such a migration should be done as a
|
||||||
|
background update where possible, at least on Postgres.
|
||||||
|
We can afford to be more relaxed about SQLite databases since they are usually
|
||||||
|
used on smaller deployments and SQLite does not support the same concurrent
|
||||||
|
DDL operations as Postgres.
|
||||||
|
|
||||||
|
We also typically insist on having at least one Synapse version's worth of
|
||||||
|
backwards compatibility, so that administrators can roll back Synapse if an upgrade
|
||||||
|
did not go smoothly.
|
||||||
|
|
||||||
|
This sometimes results in having to plan a migration across multiple versions
|
||||||
|
of Synapse.
|
||||||
|
|
||||||
|
This section includes an example and may include more in the future.
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
### Transforming a column into another one, with `NOT NULL` constraints
|
||||||
|
|
||||||
|
This example illustrates how you would introduce a new column, write data into it
|
||||||
|
based on data from an old column and then drop the old column.
|
||||||
|
|
||||||
|
We are aiming for semantic equivalence to:
|
||||||
|
|
||||||
|
```sql
|
||||||
|
ALTER TABLE mytable ADD COLUMN new_column INTEGER;
|
||||||
|
UPDATE mytable SET new_column = old_column * 100;
|
||||||
|
ALTER TABLE mytable ALTER COLUMN new_column ADD CONSTRAINT NOT NULL;
|
||||||
|
ALTER TABLE mytable DROP COLUMN old_column;
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Synapse version `N`
|
||||||
|
|
||||||
|
```python
|
||||||
|
SCHEMA_VERSION = S
|
||||||
|
SCHEMA_COMPAT_VERSION = ... # unimportant at this stage
|
||||||
|
```
|
||||||
|
|
||||||
|
**Invariants:**
|
||||||
|
1. `old_column` is read by Synapse and written to by Synapse.
|
||||||
|
|
||||||
|
|
||||||
|
#### Synapse version `N + 1`
|
||||||
|
|
||||||
|
```python
|
||||||
|
SCHEMA_VERSION = S + 1
|
||||||
|
SCHEMA_COMPAT_VERSION = ... # unimportant at this stage
|
||||||
|
```
|
||||||
|
|
||||||
|
**Changes:**
|
||||||
|
1.
|
||||||
|
```sql
|
||||||
|
ALTER TABLE mytable ADD COLUMN new_column INTEGER;
|
||||||
|
```
|
||||||
|
|
||||||
|
**Invariants:**
|
||||||
|
1. `old_column` is read by Synapse and written to by Synapse.
|
||||||
|
2. `new_column` is written to by Synapse.
|
||||||
|
|
||||||
|
**Notes:**
|
||||||
|
1. `new_column` can't have a `NOT NULL NOT VALID` constraint yet, because the previous Synapse version did not write to the new column (since we haven't bumped the `SCHEMA_COMPAT_VERSION` yet, we still need to be compatible with the previous version).
|
||||||
|
|
||||||
|
|
||||||
|
#### Synapse version `N + 2`
|
||||||
|
|
||||||
|
```python
|
||||||
|
SCHEMA_VERSION = S + 2
|
||||||
|
SCHEMA_COMPAT_VERSION = S + 1 # this signals that we can't roll back to a time before new_column existed
|
||||||
|
```
|
||||||
|
|
||||||
|
**Changes:**
|
||||||
|
1. On Postgres, add a `NOT VALID` constraint to ensure new rows are compliant. *SQLite does not have such a construct, but it would be unnecessary anyway since there is no way to concurrently perform this migration on SQLite.*
|
||||||
|
```sql
|
||||||
|
ALTER TABLE mytable ADD CONSTRAINT CHECK new_column_not_null (new_column IS NOT NULL) NOT VALID;
|
||||||
|
```
|
||||||
|
2. Start a background update to perform migration: it should gradually run e.g.
|
||||||
|
```sql
|
||||||
|
UPDATE mytable SET new_column = old_column * 100 WHERE 0 < mytable_id AND mytable_id <= 5;
|
||||||
|
```
|
||||||
|
This background update is technically pointless on SQLite, but you must schedule it anyway so that the `portdb` script to migrate to Postgres still works.
|
||||||
|
3. Upon completion of the background update, you should run `VALIDATE CONSTRAINT` on Postgres to turn the `NOT VALID` constraint into a valid one.
|
||||||
|
```sql
|
||||||
|
ALTER TABLE mytable VALIDATE CONSTRAINT new_column_not_null;
|
||||||
|
```
|
||||||
|
This will take some time but does **NOT** hold an exclusive lock over the table.
|
||||||
|
|
||||||
|
**Invariants:**
|
||||||
|
1. `old_column` is read by Synapse and written to by Synapse.
|
||||||
|
2. `new_column` is written to by Synapse and new rows always have a non-`NULL` value in this field.
|
||||||
|
|
||||||
|
|
||||||
|
**Notes:**
|
||||||
|
1. If you wish, you can convert the `CHECK (new_column IS NOT NULL)` to a `NOT NULL` constraint free of charge in Postgres by adding the `NOT NULL` constraint and then dropping the `CHECK` constraint, because Postgres can statically verify that the `NOT NULL` constraint is implied by the `CHECK` constraint without performing a table scan.
|
||||||
|
2. It might be tempting to make version `N + 2` redundant by moving the background update to `N + 1` and delaying adding the `NOT NULL` constraint to `N + 3`, but that would mean the constraint would always be validated in the foreground in `N + 3`. Whereas if the `N + 2` step is kept, the migration in `N + 3` would be fast in the happy case.
|
||||||
|
|
||||||
|
#### Synapse version `N + 3`
|
||||||
|
|
||||||
|
```python
|
||||||
|
SCHEMA_VERSION = S + 3
|
||||||
|
SCHEMA_COMPAT_VERSION = S + 1 # we can't roll back to a time before new_column existed
|
||||||
|
```
|
||||||
|
|
||||||
|
**Changes:**
|
||||||
|
1. (Postgres) Update the table to populate values of `new_column` in case the background update had not completed. Additionally, `VALIDATE CONSTRAINT` to make the check fully valid.
|
||||||
|
```sql
|
||||||
|
-- you ideally want an index on `new_column` or e.g. `(new_column) WHERE new_column IS NULL` first, or perhaps you can find a way to skip this if the `NOT NULL` constraint has already been validated.
|
||||||
|
UPDATE mytable SET new_column = old_column * 100 WHERE new_column IS NULL;
|
||||||
|
|
||||||
|
-- this is a no-op if it already ran as part of the background update
|
||||||
|
ALTER TABLE mytable VALIDATE CONSTRAINT new_column_not_null;
|
||||||
|
```
|
||||||
|
2. (SQLite) Recreate the table by precisely following [the 12-step procedure for SQLite table schema changes](https://www.sqlite.org/lang_altertable.html#otheralter).
|
||||||
|
During this table rewrite, you should recreate `new_column` as `NOT NULL` and populate any outstanding `NULL` values at the same time.
|
||||||
|
Unfortunately, you can't drop `old_column` yet because it must be present for compatibility with the Postgres schema, as needed by `portdb`.
|
||||||
|
(Otherwise you could do this all in one go with SQLite!)
|
||||||
|
|
||||||
|
**Invariants:**
|
||||||
|
1. `old_column` is written to by Synapse (but no longer read by Synapse!).
|
||||||
|
2. `new_column` is read by Synapse and written to by Synapse. Moreover, all rows have a non-`NULL` value in this field, as guaranteed by a schema constraint.
|
||||||
|
|
||||||
|
**Notes:**
|
||||||
|
1. We can't drop `old_column` yet, or even stop writing to it, because that would break a rollback to the previous version of Synapse.
|
||||||
|
2. Application code can now rely on `new_column` being populated. The remaining steps are only motivated by the wish to clean-up old columns.
|
||||||
|
|
||||||
|
|
||||||
|
#### Synapse version `N + 4`
|
||||||
|
|
||||||
|
```python
|
||||||
|
SCHEMA_VERSION = S + 4
|
||||||
|
SCHEMA_COMPAT_VERSION = S + 3 # we can't roll back to a time before new_column was entirely non-NULL
|
||||||
|
```
|
||||||
|
|
||||||
|
**Invariants:**
|
||||||
|
1. `old_column` exists but is not written to or read from by Synapse.
|
||||||
|
2. `new_column` is read by Synapse and written to by Synapse. Moreover, all rows have a non-`NULL` value in this field, as guaranteed by a schema constraint.
|
||||||
|
|
||||||
|
**Notes:**
|
||||||
|
1. We can't drop `old_column` yet because that would break a rollback to the previous version of Synapse. \
|
||||||
|
**TODO:** It may be possible to relax this and drop the column straight away as long as the previous version of Synapse detected a rollback occurred and stopped attempting to write to the column. This could possibly be done by checking whether the database's schema compatibility version was `S + 3`.
|
||||||
|
|
||||||
|
|
||||||
|
#### Synapse version `N + 5`
|
||||||
|
|
||||||
|
```python
|
||||||
|
SCHEMA_VERSION = S + 5
|
||||||
|
SCHEMA_COMPAT_VERSION = S + 4 # we can't roll back to a time before old_column was no longer being touched
|
||||||
|
```
|
||||||
|
|
||||||
|
**Changes:**
|
||||||
|
1.
|
||||||
|
```sql
|
||||||
|
ALTER TABLE mytable DROP COLUMN old_column;
|
||||||
|
```
|
||||||
|
|
|
@ -260,15 +260,17 @@ doesn't require poetry. (It's what we use in CI too). However, you could try
|
||||||
|
|
||||||
## ...handle a Dependabot pull request?
|
## ...handle a Dependabot pull request?
|
||||||
|
|
||||||
Synapse uses Dependabot to keep the `poetry.lock` file up-to-date. When it
|
Synapse uses Dependabot to keep the `poetry.lock` and `Cargo.lock` file
|
||||||
creates a pull request a GitHub Action will run to automatically create a changelog
|
up-to-date with the latest releases of our dependencies. The changelog check is
|
||||||
file. Ensure that:
|
omitted for Dependabot PRs; the release script will include them in the
|
||||||
|
changelog.
|
||||||
|
|
||||||
|
When reviewing a dependabot PR, ensure that:
|
||||||
|
|
||||||
* the lockfile changes look reasonable;
|
* the lockfile changes look reasonable;
|
||||||
* the upstream changelog file (linked in the description) doesn't include any
|
* the upstream changelog file (linked in the description) doesn't include any
|
||||||
breaking changes;
|
breaking changes;
|
||||||
* continuous integration passes (due to permissions, the GitHub Actions run on
|
* continuous integration passes.
|
||||||
the changelog commit will fail, look at the initial commit of the pull request);
|
|
||||||
|
|
||||||
In particular, any updates to the type hints (usually packages which start with `types-`)
|
In particular, any updates to the type hints (usually packages which start with `types-`)
|
||||||
should be safe to merge if linting passes.
|
should be safe to merge if linting passes.
|
||||||
|
|
|
@ -12,7 +12,7 @@ Note that this schedule might be modified depending on the availability of the
|
||||||
Synapse team, e.g. releases may be skipped to avoid holidays.
|
Synapse team, e.g. releases may be skipped to avoid holidays.
|
||||||
|
|
||||||
Release announcements can be found in the
|
Release announcements can be found in the
|
||||||
[release category of the Matrix blog](https://matrix.org/blog/category/releases).
|
[release category of the Matrix blog](https://matrix.org/category/releases).
|
||||||
|
|
||||||
## Bugfix releases
|
## Bugfix releases
|
||||||
|
|
||||||
|
@ -34,4 +34,4 @@ be held to be released together.
|
||||||
|
|
||||||
In some cases, a pre-disclosure of a security release will be issued as a notice
|
In some cases, a pre-disclosure of a security release will be issued as a notice
|
||||||
to Synapse operators that there is an upcoming security release. These can be
|
to Synapse operators that there is an upcoming security release. These can be
|
||||||
found in the [security category of the Matrix blog](https://matrix.org/blog/category/security).
|
found in the [security category of the Matrix blog](https://matrix.org/category/security).
|
||||||
|
|
|
@ -6,7 +6,7 @@ This is a work-in-progress set of notes with two goals:
|
||||||
|
|
||||||
See also [MSC3902](https://github.com/matrix-org/matrix-spec-proposals/pull/3902).
|
See also [MSC3902](https://github.com/matrix-org/matrix-spec-proposals/pull/3902).
|
||||||
|
|
||||||
The key idea is described by [MSC706](https://github.com/matrix-org/matrix-spec-proposals/pull/3902). This allows servers to
|
The key idea is described by [MSC3706](https://github.com/matrix-org/matrix-spec-proposals/pull/3706). This allows servers to
|
||||||
request a lightweight response to the federation `/send_join` endpoint.
|
request a lightweight response to the federation `/send_join` endpoint.
|
||||||
This is called a **faster join**, also known as a **partial join**. In these
|
This is called a **faster join**, also known as a **partial join**. In these
|
||||||
notes we'll usually use the word "partial" as it matches the database schema.
|
notes we'll usually use the word "partial" as it matches the database schema.
|
||||||
|
@ -264,7 +264,7 @@ But don't want to send out sensitive data in other HS's events in this way.
|
||||||
Suppose we discover after resync that we shouldn't have sent out one our events (not a prev_event) to a target HS. Not much we can do.
|
Suppose we discover after resync that we shouldn't have sent out one our events (not a prev_event) to a target HS. Not much we can do.
|
||||||
What about if we didn't send them an event but shouldn't've?
|
What about if we didn't send them an event but shouldn't've?
|
||||||
E.g. what if someone joined from a new HS shortly after you did? We wouldn't talk to them.
|
E.g. what if someone joined from a new HS shortly after you did? We wouldn't talk to them.
|
||||||
Could imagine sending out the "Missed" events after the resync but... painful to work out what they shuld have seen if they joined/left.
|
Could imagine sending out the "Missed" events after the resync but... painful to work out what they should have seen if they joined/left.
|
||||||
Instead, just send them the latest event (if they're still in the room after resync) and let them backfill.(?)
|
Instead, just send them the latest event (if they're still in the room after resync) and let them backfill.(?)
|
||||||
- Don't do this currently.
|
- Don't do this currently.
|
||||||
- If anyone who has received our messages sends a message to a HS we missed, they can backfill our messages
|
- If anyone who has received our messages sends a message to a HS we missed, they can backfill our messages
|
||||||
|
|
|
@ -0,0 +1,157 @@
|
||||||
|
## Streams
|
||||||
|
|
||||||
|
Synapse has a concept of "streams", which are roughly described in [`id_generators.py`](
|
||||||
|
https://github.com/matrix-org/synapse/blob/develop/synapse/storage/util/id_generators.py
|
||||||
|
).
|
||||||
|
Generally speaking, streams are a series of notifications that something in Synapse's database has changed that the application might need to respond to.
|
||||||
|
For example:
|
||||||
|
|
||||||
|
- The events stream reports new events (PDUs) that Synapse creates, or that Synapse accepts from another homeserver.
|
||||||
|
- The account data stream reports changes to users' [account data](https://spec.matrix.org/v1.7/client-server-api/#client-config).
|
||||||
|
- The to-device stream reports when a device has a new [to-device message](https://spec.matrix.org/v1.7/client-server-api/#send-to-device-messaging).
|
||||||
|
|
||||||
|
See [`synapse.replication.tcp.streams`](
|
||||||
|
https://github.com/matrix-org/synapse/blob/develop/synapse/replication/tcp/streams/__init__.py
|
||||||
|
) for the full list of streams.
|
||||||
|
|
||||||
|
It is very helpful to understand the streams mechanism when working on any part of Synapse that needs to respond to changes—especially if those changes are made by different workers.
|
||||||
|
To that end, let's describe streams formally, paraphrasing from the docstring of [`AbstractStreamIdGenerator`](
|
||||||
|
https://github.com/matrix-org/synapse/blob/a719b703d9bd0dade2565ddcad0e2f3a7a9d4c37/synapse/storage/util/id_generators.py#L96
|
||||||
|
).
|
||||||
|
|
||||||
|
### Definition
|
||||||
|
|
||||||
|
A stream is an append-only log `T1, T2, ..., Tn, ...` of facts[^1] which grows over time.
|
||||||
|
Only "writers" can add facts to a stream, and there may be multiple writers.
|
||||||
|
|
||||||
|
Each fact has an ID, called its "stream ID".
|
||||||
|
Readers should only process facts in ascending stream ID order.
|
||||||
|
|
||||||
|
Roughly speaking, each stream is backed by a database table.
|
||||||
|
It should have a `stream_id` (or similar) bigint column holding stream IDs, plus additional columns as necessary to describe the fact.
|
||||||
|
Typically, a fact is expressed with a single row in its backing table.[^2]
|
||||||
|
Within a stream, no two facts may have the same stream_id.
|
||||||
|
|
||||||
|
> _Aside_. Some additional notes on streams' backing tables.
|
||||||
|
>
|
||||||
|
> 1. Rich would like to [ditch the backing tables](https://github.com/matrix-org/synapse/issues/13456).
|
||||||
|
> 2. The backing tables may have other uses.
|
||||||
|
> For example, the events table serves backs the events stream, and is read when processing new events.
|
||||||
|
> But old rows are read from the table all the time, whenever Synapse needs to lookup some facts about an event.
|
||||||
|
> 3. Rich suspects that sometimes the stream is backed by multiple tables, so the stream proper is the union of those tables.
|
||||||
|
|
||||||
|
Stream writers can "reserve" a stream ID, and then later mark it as having being completed.
|
||||||
|
Stream writers need to track the completion of each stream fact.
|
||||||
|
In the happy case, completion means a fact has been written to the stream table.
|
||||||
|
But unhappy cases (e.g. transaction rollback due to an error) also count as completion.
|
||||||
|
Once completed, the rows written with that stream ID are fixed, and no new rows
|
||||||
|
will be inserted with that ID.
|
||||||
|
|
||||||
|
### Current stream ID
|
||||||
|
|
||||||
|
For any given stream reader (including writers themselves), we may define a per-writer current stream ID:
|
||||||
|
|
||||||
|
> The current stream ID _for a writer W_ is the largest stream ID such that
|
||||||
|
> all transactions added by W with equal or smaller ID have completed.
|
||||||
|
|
||||||
|
Similarly, there is a "linear" notion of current stream ID:
|
||||||
|
|
||||||
|
> The "linear" current stream ID is the largest stream ID such that
|
||||||
|
> all facts (added by any writer) with equal or smaller ID have completed.
|
||||||
|
|
||||||
|
Because different stream readers A and B learn about new facts at different times, A and B may disagree about current stream IDs.
|
||||||
|
Put differently: we should think of stream readers as being independent of each other, proceeding through a stream of facts at different rates.
|
||||||
|
|
||||||
|
**NB.** For both senses of "current", that if a writer opens a transaction that never completes, the current stream ID will never advance beyond that writer's last written stream ID.
|
||||||
|
|
||||||
|
For single-writer streams, the per-writer current ID and the linear current ID are the same.
|
||||||
|
Both senses of current ID are monotonic, but they may "skip" or jump over IDs because facts complete out of order.
|
||||||
|
|
||||||
|
|
||||||
|
_Example_.
|
||||||
|
Consider a single-writer stream which is initially at ID 1.
|
||||||
|
|
||||||
|
| Action | Current stream ID | Notes |
|
||||||
|
|------------|-------------------|-------------------------------------------------|
|
||||||
|
| | 1 | |
|
||||||
|
| Reserve 2 | 1 | |
|
||||||
|
| Reserve 3 | 1 | |
|
||||||
|
| Complete 3 | 1 | current ID unchanged, waiting for 2 to complete |
|
||||||
|
| Complete 2 | 3 | current ID jumps from 1 -> 3 |
|
||||||
|
| Reserve 4 | 3 | |
|
||||||
|
| Reserve 5 | 3 | |
|
||||||
|
| Reserve 6 | 3 | |
|
||||||
|
| Complete 5 | 3 | |
|
||||||
|
| Complete 4 | 5 | current ID jumps 3->5, even though 6 is pending |
|
||||||
|
| Complete 6 | 6 | |
|
||||||
|
|
||||||
|
|
||||||
|
### Multi-writer streams
|
||||||
|
|
||||||
|
There are two ways to view a multi-writer stream.
|
||||||
|
|
||||||
|
1. Treat it as a collection of distinct single-writer streams, one
|
||||||
|
for each writer.
|
||||||
|
2. Treat it as a single stream.
|
||||||
|
|
||||||
|
The single stream (option 2) is conceptually simpler, and easier to represent (a single stream id).
|
||||||
|
However, it requires each reader to know about the entire set of writers, to ensures that readers don't erroneously advance their current stream position too early and miss a fact from an unknown writer.
|
||||||
|
In contrast, multiple parallel streams (option 1) are more complex, requiring more state to represent (map from writer to stream id).
|
||||||
|
The payoff for doing so is that readers can "peek" ahead to facts that completed on one writer no matter the state of the others, reducing latency.
|
||||||
|
|
||||||
|
Note that a multi-writer stream can be viewed in both ways.
|
||||||
|
For example, the events stream is treated as multiple single-writer streams (option 1) by the sync handler, so that events are sent to clients as soon as possible.
|
||||||
|
But the background process that works through events treats them as a single linear stream.
|
||||||
|
|
||||||
|
Another useful example is the cache invalidation stream.
|
||||||
|
The facts this stream holds are instructions to "you should now invalidate these cache entries".
|
||||||
|
We only ever treat this as a multiple single-writer streams as there is no important ordering between cache invalidations.
|
||||||
|
(Invalidations are self-contained facts; and the invalidations commute/are idempotent).
|
||||||
|
|
||||||
|
### Writing to streams
|
||||||
|
|
||||||
|
Writers need to track:
|
||||||
|
- track their current position (i.e. its own per-writer stream ID).
|
||||||
|
- their facts currently awaiting completion.
|
||||||
|
|
||||||
|
At startup,
|
||||||
|
- the current position of that writer can be found by querying the database (which suggests that facts need to be written to the database atomically, in a transaction); and
|
||||||
|
- there are no facts awaiting completion.
|
||||||
|
|
||||||
|
To reserve a stream ID, call [`nextval`](https://www.postgresql.org/docs/current/functions-sequence.html) on the appropriate postgres sequence.
|
||||||
|
|
||||||
|
To write a fact to the stream: insert the appropriate rows to the appropriate backing table.
|
||||||
|
|
||||||
|
To complete a fact, first remove it from your map of facts currently awaiting completion.
|
||||||
|
Then, if no earlier fact is awaiting completion, the writer can advance its current position in that stream.
|
||||||
|
Upon doing so it should emit an `RDATA` message[^3], once for every fact between the old and the new stream ID.
|
||||||
|
|
||||||
|
### Subscribing to streams
|
||||||
|
|
||||||
|
Readers need to track the current position of every writer.
|
||||||
|
|
||||||
|
At startup, they can find this by contacting each writer with a `REPLICATE` message,
|
||||||
|
requesting that all writers reply describing their current position in their streams.
|
||||||
|
Writers reply with a `POSITION` message.
|
||||||
|
|
||||||
|
To learn about new facts, readers should listen for `RDATA` messages and process them to respond to the new fact.
|
||||||
|
The `RDATA` itself is not a self-contained representation of the fact;
|
||||||
|
readers will have to query the stream tables for the full details.
|
||||||
|
Readers must also advance their record of the writer's current position for that stream.
|
||||||
|
|
||||||
|
# Summary
|
||||||
|
|
||||||
|
In a nutshell: we have an append-only log with a "buffer/scratchpad" at the end where we have to wait for the sequence to be linear and contiguous.
|
||||||
|
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
[^1]: we use the word _fact_ here for two reasons.
|
||||||
|
Firstly, the word "event" is already heavily overloaded (PDUs, EDUs, account data, ...) and we don't need to make that worse.
|
||||||
|
Secondly, "fact" emphasises that the things we append to a stream cannot change after the fact.
|
||||||
|
|
||||||
|
[^2]: A fact might be expressed with 0 rows, e.g. if we opened a transaction to persist an event, but failed and rolled the transaction back before marking the fact as completed.
|
||||||
|
In principle a fact might be expressed with 2 or more rows; if so, each of those rows should share the fact's stream ID.
|
||||||
|
|
||||||
|
[^3]: This communication used to happen directly with the writers [over TCP](../../tcp_replication.md);
|
||||||
|
nowadays it's done via Redis's Pubsub.
|
|
@ -86,7 +86,7 @@ So we have stopped processing the request (and will probably go on to
|
||||||
start processing the next), without clearing the logcontext.
|
start processing the next), without clearing the logcontext.
|
||||||
|
|
||||||
To circumvent this problem, synapse code assumes that, wherever you have
|
To circumvent this problem, synapse code assumes that, wherever you have
|
||||||
an awaitable, you will want to `await` it. To that end, whereever
|
an awaitable, you will want to `await` it. To that end, wherever
|
||||||
functions return awaitables, we adopt the following conventions:
|
functions return awaitables, we adopt the following conventions:
|
||||||
|
|
||||||
**Rules for functions returning awaitables:**
|
**Rules for functions returning awaitables:**
|
||||||
|
|
|
@ -8,8 +8,7 @@ and allow server and room admins to configure how long messages should
|
||||||
be kept in a homeserver's database before being purged from it.
|
be kept in a homeserver's database before being purged from it.
|
||||||
**Please note that, as this feature isn't part of the Matrix
|
**Please note that, as this feature isn't part of the Matrix
|
||||||
specification yet, this implementation is to be considered as
|
specification yet, this implementation is to be considered as
|
||||||
experimental. There are known bugs which may cause database corruption.
|
experimental.**
|
||||||
Proceed with caution.**
|
|
||||||
|
|
||||||
A message retention policy is mainly defined by its `max_lifetime`
|
A message retention policy is mainly defined by its `max_lifetime`
|
||||||
parameter, which defines how long a message can be kept around after
|
parameter, which defines how long a message can be kept around after
|
||||||
|
|
|
@ -46,6 +46,9 @@ instead.
|
||||||
|
|
||||||
If the authentication is unsuccessful, the module must return `None`.
|
If the authentication is unsuccessful, the module must return `None`.
|
||||||
|
|
||||||
|
Note that the user is not automatically registered, the `register_user(..)` method of
|
||||||
|
the [module API](writing_a_module.html) can be used to lazily create users.
|
||||||
|
|
||||||
If multiple modules register an auth checker for the same login type but with different
|
If multiple modules register an auth checker for the same login type but with different
|
||||||
fields, Synapse will refuse to start.
|
fields, Synapse will refuse to start.
|
||||||
|
|
||||||
|
|
|
@ -348,6 +348,42 @@ callback returns `False`, Synapse falls through to the next one. The value of th
|
||||||
callback that does not return `False` will be used. If this happens, Synapse will not call
|
callback that does not return `False` will be used. If this happens, Synapse will not call
|
||||||
any of the subsequent implementations of this callback.
|
any of the subsequent implementations of this callback.
|
||||||
|
|
||||||
|
|
||||||
|
### `check_login_for_spam`
|
||||||
|
|
||||||
|
_First introduced in Synapse v1.87.0_
|
||||||
|
|
||||||
|
```python
|
||||||
|
async def check_login_for_spam(
|
||||||
|
user_id: str,
|
||||||
|
device_id: Optional[str],
|
||||||
|
initial_display_name: Optional[str],
|
||||||
|
request_info: Collection[Tuple[Optional[str], str]],
|
||||||
|
auth_provider_id: Optional[str] = None,
|
||||||
|
) -> Union["synapse.module_api.NOT_SPAM", "synapse.module_api.errors.Codes"]
|
||||||
|
```
|
||||||
|
|
||||||
|
Called when a user logs in.
|
||||||
|
|
||||||
|
The arguments passed to this callback are:
|
||||||
|
|
||||||
|
* `user_id`: The user ID the user is logging in with
|
||||||
|
* `device_id`: The device ID the user is re-logging into.
|
||||||
|
* `initial_display_name`: The device display name, if any.
|
||||||
|
* `request_info`: A collection of tuples, which first item is a user agent, and which
|
||||||
|
second item is an IP address. These user agents and IP addresses are the ones that were
|
||||||
|
used during the login process.
|
||||||
|
* `auth_provider_id`: The identifier of the SSO authentication provider, if any.
|
||||||
|
|
||||||
|
If multiple modules implement this callback, they will be considered in order. If a
|
||||||
|
callback returns `synapse.module_api.NOT_SPAM`, Synapse falls through to the next one.
|
||||||
|
The value of the first callback that does not return `synapse.module_api.NOT_SPAM` will
|
||||||
|
be used. If this happens, Synapse will not call any of the subsequent implementations of
|
||||||
|
this callback.
|
||||||
|
|
||||||
|
*Note:* This will not be called when a user registers.
|
||||||
|
|
||||||
|
|
||||||
## Example
|
## Example
|
||||||
|
|
||||||
The example below is a module that implements the spam checker callback
|
The example below is a module that implements the spam checker callback
|
||||||
|
|
|
@ -249,7 +249,7 @@ of `COLLATE` and `CTYPE` unless the config flag `allow_unsafe_locale`, found in
|
||||||
underneath the database, or if a different version of the locale is used on any
|
underneath the database, or if a different version of the locale is used on any
|
||||||
replicas.
|
replicas.
|
||||||
|
|
||||||
If you have a databse with an unsafe locale, the safest way to fix the issue is to dump the database and recreate it with
|
If you have a database with an unsafe locale, the safest way to fix the issue is to dump the database and recreate it with
|
||||||
the correct locale parameter (as shown above). It is also possible to change the
|
the correct locale parameter (as shown above). It is also possible to change the
|
||||||
parameters on a live database and run a `REINDEX` on the entire database,
|
parameters on a live database and run a `REINDEX` on the entire database,
|
||||||
however extreme care must be taken to avoid database corruption.
|
however extreme care must be taken to avoid database corruption.
|
||||||
|
|
|
@ -95,7 +95,7 @@ matrix.example.com {
|
||||||
}
|
}
|
||||||
|
|
||||||
example.com:8448 {
|
example.com:8448 {
|
||||||
reverse_proxy localhost:8008
|
reverse_proxy /_matrix/* localhost:8008
|
||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
|
||||||
|
|
|
@ -68,9 +68,7 @@ root:
|
||||||
# Write logs to the `buffer` handler, which will buffer them together in memory,
|
# Write logs to the `buffer` handler, which will buffer them together in memory,
|
||||||
# then write them to a file.
|
# then write them to a file.
|
||||||
#
|
#
|
||||||
# Replace "buffer" with "console" to log to stderr instead. (Note that you'll
|
# Replace "buffer" with "console" to log to stderr instead.
|
||||||
# also need to update the configuration for the `twisted` logger above, in
|
|
||||||
# this case.)
|
|
||||||
#
|
#
|
||||||
handlers: [buffer]
|
handlers: [buffer]
|
||||||
|
|
||||||
|
|
|
@ -37,7 +37,7 @@ Dockerfile to automate a synapse server in a single Docker image, at
|
||||||
<https://hub.docker.com/r/avhost/docker-matrix/tags/>
|
<https://hub.docker.com/r/avhost/docker-matrix/tags/>
|
||||||
|
|
||||||
Slavi Pantaleev has created an Ansible playbook,
|
Slavi Pantaleev has created an Ansible playbook,
|
||||||
which installs the offical Docker image of Matrix Synapse
|
which installs the official Docker image of Matrix Synapse
|
||||||
along with many other Matrix-related services (Postgres database, Element, coturn,
|
along with many other Matrix-related services (Postgres database, Element, coturn,
|
||||||
ma1sd, SSL support, etc.).
|
ma1sd, SSL support, etc.).
|
||||||
For more details, see
|
For more details, see
|
||||||
|
@ -93,7 +93,7 @@ For `bookworm` and `sid`, it can be installed simply with:
|
||||||
sudo apt install matrix-synapse
|
sudo apt install matrix-synapse
|
||||||
```
|
```
|
||||||
|
|
||||||
Synapse is also avaliable in `bullseye-backports`. Please
|
Synapse is also available in `bullseye-backports`. Please
|
||||||
see the [Debian documentation](https://backports.debian.org/Instructions/)
|
see the [Debian documentation](https://backports.debian.org/Instructions/)
|
||||||
for information on how to use backports.
|
for information on how to use backports.
|
||||||
|
|
||||||
|
@ -135,8 +135,8 @@ Unofficial package are built for SLES 15 in the openSUSE:Backports:SLE-15 reposi
|
||||||
|
|
||||||
#### ArchLinux
|
#### ArchLinux
|
||||||
|
|
||||||
The quickest way to get up and running with ArchLinux is probably with the community package
|
The quickest way to get up and running with ArchLinux is probably with the package provided by ArchLinux
|
||||||
<https://archlinux.org/packages/community/x86_64/matrix-synapse/>, which should pull in most of
|
<https://archlinux.org/packages/extra/x86_64/matrix-synapse/>, which should pull in most of
|
||||||
the necessary dependencies.
|
the necessary dependencies.
|
||||||
|
|
||||||
pip may be outdated (6.0.7-1 and needs to be upgraded to 6.0.8-1 ):
|
pip may be outdated (6.0.7-1 and needs to be upgraded to 6.0.8-1 ):
|
||||||
|
@ -155,6 +155,14 @@ sudo pip uninstall py-bcrypt
|
||||||
sudo pip install py-bcrypt
|
sudo pip install py-bcrypt
|
||||||
```
|
```
|
||||||
|
|
||||||
|
#### Alpine Linux
|
||||||
|
|
||||||
|
6543 maintains [Synapse packages for Alpine Linux](https://pkgs.alpinelinux.org/packages?name=synapse&branch=edge) in the community repository. Install with:
|
||||||
|
|
||||||
|
```sh
|
||||||
|
sudo apk add synapse
|
||||||
|
```
|
||||||
|
|
||||||
#### Void Linux
|
#### Void Linux
|
||||||
|
|
||||||
Synapse can be found in the void repositories as
|
Synapse can be found in the void repositories as
|
||||||
|
@ -200,7 +208,7 @@ When following this route please make sure that the [Platform-specific prerequis
|
||||||
System requirements:
|
System requirements:
|
||||||
|
|
||||||
- POSIX-compliant system (tested on Linux & OS X)
|
- POSIX-compliant system (tested on Linux & OS X)
|
||||||
- Python 3.7 or later, up to Python 3.11.
|
- Python 3.8 or later, up to Python 3.11.
|
||||||
- At least 1GB of free RAM if you want to join large public rooms like #matrix:matrix.org
|
- At least 1GB of free RAM if you want to join large public rooms like #matrix:matrix.org
|
||||||
|
|
||||||
If building on an uncommon architecture for which pre-built wheels are
|
If building on an uncommon architecture for which pre-built wheels are
|
||||||
|
|
|
@ -3,7 +3,7 @@
|
||||||
A structured logging system can be useful when your logs are destined for a
|
A structured logging system can be useful when your logs are destined for a
|
||||||
machine to parse and process. By maintaining its machine-readable characteristics,
|
machine to parse and process. By maintaining its machine-readable characteristics,
|
||||||
it enables more efficient searching and aggregations when consumed by software
|
it enables more efficient searching and aggregations when consumed by software
|
||||||
such as the "ELK stack".
|
such as the [ELK stack](https://opensource.com/article/18/9/open-source-log-aggregation-tools).
|
||||||
|
|
||||||
Synapse's structured logging system is configured via the file that Synapse's
|
Synapse's structured logging system is configured via the file that Synapse's
|
||||||
`log_config` config option points to. The file should include a formatter which
|
`log_config` config option points to. The file should include a formatter which
|
||||||
|
|
|
@ -1,8 +1,4 @@
|
||||||
worker_app: synapse.app.generic_worker
|
worker_app: synapse.app.generic_worker
|
||||||
worker_name: background_worker
|
worker_name: background_worker
|
||||||
|
|
||||||
# The replication listener on the main synapse process.
|
|
||||||
worker_replication_host: 127.0.0.1
|
|
||||||
worker_replication_http_port: 9093
|
|
||||||
|
|
||||||
worker_log_config: /etc/matrix-synapse/background-worker-log.yaml
|
worker_log_config: /etc/matrix-synapse/background-worker-log.yaml
|
||||||
|
|
|
@ -1,9 +1,5 @@
|
||||||
worker_app: synapse.app.generic_worker
|
worker_app: synapse.app.generic_worker
|
||||||
worker_name: event_persister1
|
worker_name: event_persister1
|
||||||
|
|
||||||
# The replication listener on the main synapse process.
|
|
||||||
worker_replication_host: 127.0.0.1
|
|
||||||
worker_replication_http_port: 9093
|
|
||||||
|
|
||||||
worker_listeners:
|
worker_listeners:
|
||||||
- type: http
|
- type: http
|
||||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue