Codebase list matrix-synapse / f63f4d3
New upstream version 1.38.0 Andrej Shadura 2 years ago
112 changed file(s) with 5829 addition(s) and 3163 deletion(s). Raw diff Collapse all Expand all
66 - develop
77 # For documentation specific to a release
88 - 'release-v*'
9 # stable docs
10 - master
911
1012 workflow_dispatch:
1113
2224 mdbook-version: '0.4.9'
2325
2426 - name: Build the documentation
25 run: mdbook build
27 # mdbook will only create an index.html if we're including docs/README.md in SUMMARY.md.
28 # However, we're using docs/README.md for other purposes and need to pick a new page
29 # as the default. Let's opt for the welcome page instead.
30 run: |
31 mdbook build
32 cp book/welcome_and_overview.html book/index.html
2633
27 # Deploy to the latest documentation directories
28 - name: Deploy latest documentation
34 # Figure out the target directory.
35 #
36 # The target directory depends on the name of the branch
37 #
38 - name: Get the target directory name
39 id: vars
40 run: |
41 # first strip the 'refs/heads/' prefix with some shell foo
42 branch="${GITHUB_REF#refs/heads/}"
43
44 case $branch in
45 release-*)
46 # strip 'release-' from the name for release branches.
47 branch="${branch#release-}"
48 ;;
49 master)
50 # deploy to "latest" for the master branch.
51 branch="latest"
52 ;;
53 esac
54
55 # finally, set the 'branch-version' var.
56 echo "::set-output name=branch-version::$branch"
57
58 # Deploy to the target directory.
59 - name: Deploy to gh pages
2960 uses: peaceiris/actions-gh-pages@068dc23d9710f1ba62e86896f84735d869951305 # v3.8.0
3061 with:
3162 github_token: ${{ secrets.GITHUB_TOKEN }}
3263 keep_files: true
3364 publish_dir: ./book
34 destination_dir: ./develop
35
36 - name: Get the current Synapse version
37 id: vars
38 # The $GITHUB_REF value for a branch looks like `refs/heads/release-v1.2`. We do some
39 # shell magic to remove the "refs/heads/release-v" bit from this, to end up with "1.2",
40 # our major/minor version number, and set this to a var called `branch-version`.
41 #
42 # We then use some python to get Synapse's full version string, which may look
43 # like "1.2.3rc4". We set this to a var called `synapse-version`. We use this
44 # to determine if this release is still an RC, and if so block deployment.
45 run: |
46 echo ::set-output name=branch-version::${GITHUB_REF#refs/heads/release-v}
47 echo ::set-output name=synapse-version::`python3 -c 'import synapse; print(synapse.__version__)'`
48
49 # Deploy to the version-specific directory
50 - name: Deploy release-specific documentation
51 # We only carry out this step if we're running on a release branch,
52 # and the current Synapse version does not have "rc" in the name.
53 #
54 # The result is that only full releases are deployed, but can be
55 # updated if the release branch gets retroactive fixes.
56 if: ${{ startsWith( github.ref, 'refs/heads/release-v' ) && !contains( steps.vars.outputs.synapse-version, 'rc') }}
57 uses: peaceiris/actions-gh-pages@v3
58 with:
59 github_token: ${{ secrets.GITHUB_TOKEN }}
60 keep_files: true
61 publish_dir: ./book
62 # The resulting documentation will end up in a directory named `vX.Y`.
63 destination_dir: ./v${{ steps.vars.outputs.branch-version }}
65 destination_dir: ./${{ steps.vars.outputs.branch-version }}
0 # GitHub actions workflow which builds the release artifacts.
1
2 name: Build release artifacts
3
4 on:
5 push:
6 # we build on develop and release branches to (hopefully) get early warning
7 # of things breaking
8 branches: ["develop", "release-*"]
9
10 # we also rebuild on tags, so that we can be sure of picking the artifacts
11 # from the right tag.
12 tags: ["v*"]
13
14 permissions:
15 contents: write
16
17 jobs:
18 # first get the list of distros to build for.
19 get-distros:
20 runs-on: ubuntu-latest
21 steps:
22 - uses: actions/checkout@v2
23 - uses: actions/setup-python@v2
24 - id: set-distros
25 run: |
26 echo "::set-output name=distros::$(scripts-dev/build_debian_packages --show-dists-json)"
27 # map the step outputs to job outputs
28 outputs:
29 distros: ${{ steps.set-distros.outputs.distros }}
30
31 # now build the packages with a matrix build.
32 build-debs:
33 needs: get-distros
34 name: "Build .deb packages"
35 runs-on: ubuntu-latest
36 strategy:
37 matrix:
38 distro: ${{ fromJson(needs.get-distros.outputs.distros) }}
39
40 steps:
41 - uses: actions/checkout@v2
42 with:
43 path: src
44 - uses: actions/setup-python@v2
45 - run: ./src/scripts-dev/build_debian_packages "${{ matrix.distro }}"
46 - uses: actions/upload-artifact@v2
47 with:
48 name: debs
49 path: debs/*
50
51 build-sdist:
52 name: "Build pypi distribution files"
53 runs-on: ubuntu-latest
54 steps:
55 - uses: actions/checkout@v2
56 - uses: actions/setup-python@v2
57 - run: pip install wheel
58 - run: |
59 python setup.py sdist bdist_wheel
60 - uses: actions/upload-artifact@v2
61 with:
62 name: python-dist
63 path: dist/*
64
65 # if it's a tag, create a release and attach the artifacts to it
66 attach-assets:
67 name: "Attach assets to release"
68 if: startsWith(github.ref, 'refs/tags/')
69 needs:
70 - build-debs
71 - build-sdist
72 runs-on: ubuntu-latest
73 steps:
74 - name: Download all workflow run artifacts
75 uses: actions/download-artifact@v2
76 - name: Build a tarball for the debs
77 run: tar -cvJf debs.tar.xz debs
78 - name: Attach to release
79 uses: softprops/action-gh-release@a929a66f232c1b11af63782948aa2210f981808a # PR#109
80 env:
81 GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
82 with:
83 files: |
84 python-dist/*
85 debs.tar.xz
86 # if it's not already published, keep the release as a draft.
87 draft: true
88 # mark it as a prerelease if the tag contains 'rc'.
89 prerelease: ${{ contains(github.ref, 'rc') }}
0 Synapse 1.38.0 (2021-07-13)
1 ===========================
2
3 This release includes a database schema update which could result in elevated disk usage. See the [upgrade notes](https://matrix-org.github.io/synapse/develop/upgrade#upgrading-to-v1380) for more information.
4
5 No significant changes since 1.38.0rc3.
6
7
8 Synapse 1.38.0rc3 (2021-07-13)
9 ==============================
10
11 Internal Changes
12 ----------------
13
14 - Build the Debian packages in CI. ([\#10247](https://github.com/matrix-org/synapse/issues/10247), [\#10379](https://github.com/matrix-org/synapse/issues/10379))
15
16
17 Synapse 1.38.0rc2 (2021-07-09)
18 ==============================
19
20 Bugfixes
21 --------
22
23 - Fix bug where inbound federation in a room could be delayed due to not correctly dropping a lock. Introduced in v1.37.1. ([\#10336](https://github.com/matrix-org/synapse/issues/10336))
24
25
26 Improved Documentation
27 ----------------------
28
29 - Update links to documentation in the sample config. Contributed by @dklimpel. ([\#10287](https://github.com/matrix-org/synapse/issues/10287))
30 - Fix broken links in [INSTALL.md](INSTALL.md). Contributed by @dklimpel. ([\#10331](https://github.com/matrix-org/synapse/issues/10331))
31
32
33 Synapse 1.38.0rc1 (2021-07-06)
34 ==============================
35
36 Features
37 --------
38
39 - Implement refresh tokens as specified by [MSC2918](https://github.com/matrix-org/matrix-doc/pull/2918). ([\#9450](https://github.com/matrix-org/synapse/issues/9450))
40 - Add support for evicting cache entries based on last access time. ([\#10205](https://github.com/matrix-org/synapse/issues/10205))
41 - Omit empty fields from the `/sync` response. Contributed by @deepbluev7. ([\#10214](https://github.com/matrix-org/synapse/issues/10214))
42 - Improve validation on federation `send_{join,leave,knock}` endpoints. ([\#10225](https://github.com/matrix-org/synapse/issues/10225), [\#10243](https://github.com/matrix-org/synapse/issues/10243))
43 - Add SSO `external_ids` to the Query User Account admin API. ([\#10261](https://github.com/matrix-org/synapse/issues/10261))
44 - Mark events received over federation which fail a spam check as "soft-failed". ([\#10263](https://github.com/matrix-org/synapse/issues/10263))
45 - Add metrics for new inbound federation staging area. ([\#10284](https://github.com/matrix-org/synapse/issues/10284))
46 - Add script to print information about recently registered users. ([\#10290](https://github.com/matrix-org/synapse/issues/10290))
47
48
49 Bugfixes
50 --------
51
52 - Fix a long-standing bug which meant that invite rejections and knocks were not sent out over federation in a timely manner. ([\#10223](https://github.com/matrix-org/synapse/issues/10223))
53 - Fix a bug introduced in v1.26.0 where only users who have set profile information could be deactivated with erasure enabled. ([\#10252](https://github.com/matrix-org/synapse/issues/10252))
54 - Fix a long-standing bug where Synapse would return errors after 2<sup>31</sup> events were handled by the server. ([\#10264](https://github.com/matrix-org/synapse/issues/10264), [\#10267](https://github.com/matrix-org/synapse/issues/10267), [\#10282](https://github.com/matrix-org/synapse/issues/10282), [\#10286](https://github.com/matrix-org/synapse/issues/10286), [\#10291](https://github.com/matrix-org/synapse/issues/10291), [\#10314](https://github.com/matrix-org/synapse/issues/10314), [\#10326](https://github.com/matrix-org/synapse/issues/10326))
55 - Fix the prometheus `synapse_federation_server_pdu_process_time` metric. Broke in v1.37.1. ([\#10279](https://github.com/matrix-org/synapse/issues/10279))
56 - Ensure that inbound events from federation that were being processed when Synapse was restarted get promptly processed on start up. ([\#10303](https://github.com/matrix-org/synapse/issues/10303))
57
58
59 Improved Documentation
60 ----------------------
61
62 - Move the upgrade notes to [docs/upgrade.md](https://github.com/matrix-org/synapse/blob/develop/docs/upgrade.md) and convert them to markdown. ([\#10166](https://github.com/matrix-org/synapse/issues/10166))
63 - Choose Welcome & Overview as the default page for synapse documentation website. ([\#10242](https://github.com/matrix-org/synapse/issues/10242))
64 - Adjust the URL in the README.rst file to point to irc.libera.chat. ([\#10258](https://github.com/matrix-org/synapse/issues/10258))
65 - Fix homeserver config option name in presence router documentation. ([\#10288](https://github.com/matrix-org/synapse/issues/10288))
66 - Fix link pointing at the wrong section in the modules documentation page. ([\#10302](https://github.com/matrix-org/synapse/issues/10302))
67
68
69 Internal Changes
70 ----------------
71
72 - Drop `Origin` and `Accept` from the value of the `Access-Control-Allow-Headers` response header. ([\#10114](https://github.com/matrix-org/synapse/issues/10114))
73 - Add type hints to the federation servlets. ([\#10213](https://github.com/matrix-org/synapse/issues/10213))
74 - Improve the reliability of auto-joining remote rooms. ([\#10237](https://github.com/matrix-org/synapse/issues/10237))
75 - Update the release script to use the semver terminology and determine the release branch based on the next version. ([\#10239](https://github.com/matrix-org/synapse/issues/10239))
76 - Fix type hints for computing auth events. ([\#10253](https://github.com/matrix-org/synapse/issues/10253))
77 - Improve the performance of the spaces summary endpoint by only recursing into spaces (and not rooms in general). ([\#10256](https://github.com/matrix-org/synapse/issues/10256))
78 - Move event authentication methods from `Auth` to `EventAuthHandler`. ([\#10268](https://github.com/matrix-org/synapse/issues/10268))
79 - Re-enable a SyTest after it has been fixed. ([\#10292](https://github.com/matrix-org/synapse/issues/10292))
80
81
082 Synapse 1.37.1 (2021-06-30)
183 ===========================
284
774856 Synapse 1.29.0 (2021-03-08)
775857 ===========================
776858
777 Note that synapse now expects an `X-Forwarded-Proto` header when used with a reverse proxy. Please see [UPGRADE.rst](UPGRADE.rst#upgrading-to-v1290) for more details on this change.
859 Note that synapse now expects an `X-Forwarded-Proto` header when used with a reverse proxy. Please see the [upgrade notes](docs/upgrade.md#upgrading-to-v1290) for more details on this change.
778860
779861
780862 No significant changes.
839921
840922 Note that this release drops support for ARMv7 in the official Docker images, due to repeated problems building for ARMv7 (and the associated maintenance burden this entails).
841923
842 This release also fixes the documentation included in v1.27.0 around the callback URI for SAML2 identity providers. If your server is configured to use single sign-on via a SAML2 IdP, you may need to make configuration changes. Please review [UPGRADE.rst](UPGRADE.rst) for more details on these changes.
924 This release also fixes the documentation included in v1.27.0 around the callback URI for SAML2 identity providers. If your server is configured to use single sign-on via a SAML2 IdP, you may need to make configuration changes. Please review the [upgrade notes](docs/upgrade.md) for more details on these changes.
843925
844926
845927 Internal Changes
9381020
9391021 Note that this release includes a change in Synapse to use Redis as a cache ─ as well as a pub/sub mechanism ─ if Redis support is enabled for workers. No action is needed by server administrators, and we do not expect resource usage of the Redis instance to change dramatically.
9401022
941 This release also changes the callback URI for OpenID Connect (OIDC) and SAML2 identity providers. If your server is configured to use single sign-on via an OIDC/OAuth2 or SAML2 IdP, you may need to make configuration changes. Please review [UPGRADE.rst](UPGRADE.rst) for more details on these changes.
942
943 This release also changes escaping of variables in the HTML templates for SSO or email notifications. If you have customised these templates, please review [UPGRADE.rst](UPGRADE.rst) for more details on these changes.
1023 This release also changes the callback URI for OpenID Connect (OIDC) and SAML2 identity providers. If your server is configured to use single sign-on via an OIDC/OAuth2 or SAML2 IdP, you may need to make configuration changes. Please review the [upgrade notes](docs/upgrade.md) for more details on these changes.
1024
1025 This release also changes escaping of variables in the HTML templates for SSO or email notifications. If you have customised these templates, please review the [upgrade notes](docs/upgrade.md) for more details on these changes.
9441026
9451027
9461028 Bugfixes
10441126 ===========================
10451127
10461128 This release brings a new schema version for Synapse and rolling back to a previous
1047 version is not trivial. Please review [UPGRADE.rst](UPGRADE.rst) for more details
1129 version is not trivial. Please review the [upgrade notes](docs/upgrade.md) for more details
10481130 on these changes and for general upgrade guidance.
10491131
10501132 No significant changes since 1.26.0rc2.
10711153 ==============================
10721154
10731155 This release brings a new schema version for Synapse and rolling back to a previous
1074 version is not trivial. Please review [UPGRADE.rst](UPGRADE.rst) for more details
1156 version is not trivial. Please review the [upgrade notes](docs/upgrade.md) for more details
10751157 on these changes and for general upgrade guidance.
10761158
10771159 Features
11741256
11751257 The website https://endoflife.date/ has convenient summaries of the support schedules for projects like [Python](https://endoflife.date/python) and [PostgreSQL](https://endoflife.date/postgresql).
11761258
1177 If you are unable to upgrade your environment to a supported version of Python or Postgres, we encourage you to consider using the [Synapse Docker images](./INSTALL.md#docker-images-and-ansible-playbooks) instead.
1259 If you are unable to upgrade your environment to a supported version of Python or
1260 Postgres, we encourage you to consider using the
1261 [Synapse Docker images](https://matrix-org.github.io/synapse/latest/setup/installation.html#docker-images-and-ansible-playbooks)
1262 instead.
11781263
11791264 ### Transition Period
11801265
13171402 * Administrators using the [`matrix.org` Docker
13181403 image](https://hub.docker.com/r/matrixdotorg/synapse/) or the [Debian/Ubuntu
13191404 packages from
1320 `matrix.org`](https://github.com/matrix-org/synapse/blob/master/INSTALL.md#matrixorg-packages)
1405 `matrix.org`](https://matrix-org.github.io/synapse/latest/setup/installation.html#matrixorg-packages)
13211406 should ensure that they have version 1.24.0 or 1.23.1 installed: these images include
13221407 the updated packages.
13231408 * Administrators who have [installed Synapse from
1324 source](https://github.com/matrix-org/synapse/blob/master/INSTALL.md#installing-from-source)
1409 source](https://matrix-org.github.io/synapse/latest/setup/installation.html#installing-from-source)
13251410 should upgrade the cryptography package within their virtualenv by running:
13261411 ```sh
13271412 <path_to_virtualenv>/bin/pip install 'cryptography>=3.3'
13631448 * Administrators using the [`matrix.org` Docker
13641449 image](https://hub.docker.com/r/matrixdotorg/synapse/) or the [Debian/Ubuntu
13651450 packages from
1366 `matrix.org`](https://github.com/matrix-org/synapse/blob/master/INSTALL.md#matrixorg-packages)
1451 `matrix.org`](https://matrix-org.github.io/synapse/latest/setup/installation.html#matrixorg-packages)
13671452 should ensure that they have version 1.24.0 or 1.23.1 installed: these images include
13681453 the updated packages.
13691454 * Administrators who have [installed Synapse from
1370 source](https://github.com/matrix-org/synapse/blob/master/INSTALL.md#installing-from-source)
1455 source](https://matrix-org.github.io/synapse/latest/setup/installation.html#installing-from-source)
13711456 should upgrade the cryptography package within their virtualenv by running:
13721457 ```sh
13731458 <path_to_virtualenv>/bin/pip install 'cryptography>=3.3'
14771562 Synapse 1.23.0 (2020-11-18)
14781563 ===========================
14791564
1480 This release changes the way structured logging is configured. See the [upgrade notes](UPGRADE.rst#upgrading-to-v1230) for details.
1565 This release changes the way structured logging is configured. See the [upgrade notes](docs/upgrade.md#upgrading-to-v1230) for details.
14811566
14821567 **Note**: We are aware of a trivially exploitable denial of service vulnerability in versions of Synapse prior to 1.20.0. Complete details will be disclosed on Monday, November 23rd. If you have not upgraded recently, please do so.
14831568
20802165 Removal warning
20812166 ---------------
20822167
2083 As outlined in the [previous release](https://github.com/matrix-org/synapse/releases/tag/v1.18.0), we are no longer publishing Docker images with the `-py3` tag suffix. On top of that, we have also removed the `latest-py3` tag. Please see [the announcement in the upgrade notes for 1.18.0](https://github.com/matrix-org/synapse/blob/develop/UPGRADE.rst#upgrading-to-v1180).
2168 As outlined in the [previous release](https://github.com/matrix-org/synapse/releases/tag/v1.18.0),
2169 we are no longer publishing Docker images with the `-py3` tag suffix. On top of that, we have also removed the
2170 `latest-py3` tag. Please see
2171 [the announcement in the upgrade notes for 1.18.0](https://github.com/matrix-org/synapse/blob/develop/docs/upgrade.md#upgrading-to-v1180).
20842172
20852173
20862174 Synapse 1.19.0rc1 (2020-08-13)
21112199 Updates to the Docker image
21122200 ---------------------------
21132201
2114 - We no longer publish Docker images with the `-py3` tag suffix, as [announced in the upgrade notes](https://github.com/matrix-org/synapse/blob/develop/UPGRADE.rst#upgrading-to-v1180). ([\#8056](https://github.com/matrix-org/synapse/issues/8056))
2202 - We no longer publish Docker images with the `-py3` tag suffix, as [announced in the upgrade notes](https://github.com/matrix-org/synapse/blob/develop/docs/upgrade.md#upgrading-to-v1180). ([\#8056](https://github.com/matrix-org/synapse/issues/8056))
21152203
21162204
21172205 Improved Documentation
26692757 to be incomplete or empty if Synapse was upgraded directly from v1.2.1 or
26702758 earlier, to versions between v1.4.0 and v1.12.x.
26712759
2672 Please review [UPGRADE.rst](UPGRADE.rst) for more details on these changes
2760 Please review the [upgrade notes](docs/upgrade.md) for more details on these changes
26732761 and for general upgrade guidance.
26742762
26752763
27702858 - Fix bad error handling that would cause Synapse to crash if it's provided with a YAML configuration file that's either empty or doesn't parse into a key-value map. ([\#7341](https://github.com/matrix-org/synapse/issues/7341))
27712859 - Fix incorrect metrics reporting for `renew_attestations` background task. ([\#7344](https://github.com/matrix-org/synapse/issues/7344))
27722860 - Prevent non-federating rooms from appearing in responses to federated `POST /publicRoom` requests when a filter was included. ([\#7367](https://github.com/matrix-org/synapse/issues/7367))
2773 - Fix a bug which would cause the room durectory to be incorrectly populated if Synapse was upgraded directly from v1.2.1 or earlier to v1.4.0 or later. Note that this fix does not apply retrospectively; see the [upgrade notes](UPGRADE.rst#upgrading-to-v1130) for more information. ([\#7387](https://github.com/matrix-org/synapse/issues/7387))
2861 - Fix a bug which would cause the room durectory to be incorrectly populated if Synapse was upgraded directly from v1.2.1 or earlier to v1.4.0 or later. Note that this fix does not apply retrospectively; see the [upgrade notes](docs/upgrade.md#upgrading-to-v1130) for more information. ([\#7387](https://github.com/matrix-org/synapse/issues/7387))
27742862 - Fix bug in `EventContext.deserialize`. ([\#7393](https://github.com/matrix-org/synapse/issues/7393))
27752863
27762864
29203008 and which may take some time (several hours in the case of a large
29213009 server). Synapse will not respond to HTTP requests while this update is taking
29223010 place. For imformation on seeing if you are affected, and workaround if you
2923 are, see the [upgrade notes](UPGRADE.rst#upgrading-to-v1120).
3011 are, see the [upgrade notes](docs/upgrade.md#upgrading-to-v1120).
29243012
29253013 Security advisory
29263014 -----------------
29433031 * Administrators using the [`matrix.org` Docker
29443032 image](https://hub.docker.com/r/matrixdotorg/synapse/) or the [Debian/Ubuntu
29453033 packages from
2946 `matrix.org`](https://github.com/matrix-org/synapse/blob/master/INSTALL.md#matrixorg-packages)
3034 `matrix.org`](https://matrix-org.github.io/synapse/latest/setup/installation.html#matrixorg-packages)
29473035 should ensure that they have version 1.12.0 installed: these images include
29483036 Twisted 20.3.0.
29493037 * Administrators who have [installed Synapse from
2950 source](https://github.com/matrix-org/synapse/blob/master/INSTALL.md#installing-from-source)
3038 source](https://matrix-org.github.io/synapse/latest/setup/installation.html#installing-from-source)
29513039 should upgrade Twisted within their virtualenv by running:
29523040 ```sh
29533041 <path_to_virtualenv>/bin/pip install 'Twisted>=20.3.0'
34733561 Synapse 1.7.0 (2019-12-13)
34743562 ==========================
34753563
3476 This release changes the default settings so that only local authenticated users can query the server's room directory. See the [upgrade notes](UPGRADE.rst#upgrading-to-v170) for details.
3564 This release changes the default settings so that only local authenticated users can query the server's room directory. See the [upgrade notes](docs/upgrade.md#upgrading-to-v170) for details.
34773565
34783566 Support for SQLite versions before 3.11 is now deprecated. A future release will refuse to start if used with an SQLite version before 3.11.
34793567
38373925 =============================
38383926
38393927 Note that this release includes significant changes around 3pid
3840 verification. Administrators are reminded to review the [upgrade notes](UPGRADE.rst#upgrading-to-v140).
3928 verification. Administrators are reminded to review the [upgrade notes](docs/upgrade.md#upgrading-to-v140).
38413929
38423930 Features
38433931 --------
42134301 ==========================
42144302
42154303 As of v1.1.0, Synapse no longer supports Python 2, nor Postgres version 9.4.
4216 See the [upgrade notes](UPGRADE.rst#upgrading-to-v110) for more details.
4304 See the [upgrade notes](docs/upgrade.md#upgrading-to-v110) for more details.
42174305
42184306 This release also deprecates the use of environment variables to configure the
42194307 docker image. See the [docker README](https://github.com/matrix-org/synapse/blob/release-v1.1.0/docker/README.md#legacy-dynamic-configuration-file-support)
42434331 =============================
42444332
42454333 As of v1.1.0, Synapse no longer supports Python 2, nor Postgres version 9.4.
4246 See the [upgrade notes](UPGRADE.rst#upgrading-to-v110) for more details.
4334 See the [upgrade notes](docs/upgrade.md#upgrading-to-v110) for more details.
42474335
42484336 Features
42494337 --------
50155103 remains experimental.
50165104
50175105 We recommend upgrading to Python 3, but make sure to read the [upgrade
5018 notes](UPGRADE.rst#upgrading-to-v0340) when doing so.
5106 notes](docs/upgrade.md#upgrading-to-v0340) when doing so.
50195107
50205108 Features
50215109 --------
00 # Installation Instructions
11
2 There are 3 steps to follow under **Installation Instructions**.
2 This document has moved to the
3 [Synapse documentation website](https://matrix-org.github.io/synapse/latest/setup/installation.html).
4 Please update your links.
35
4 - [Installation Instructions](#installation-instructions)
5 - [Choosing your server name](#choosing-your-server-name)
6 - [Installing Synapse](#installing-synapse)
7 - [Installing from source](#installing-from-source)
8 - [Platform-specific prerequisites](#platform-specific-prerequisites)
9 - [Debian/Ubuntu/Raspbian](#debianubunturaspbian)
10 - [ArchLinux](#archlinux)
11 - [CentOS/Fedora](#centosfedora)
12 - [macOS](#macos)
13 - [OpenSUSE](#opensuse)
14 - [OpenBSD](#openbsd)
15 - [Windows](#windows)
16 - [Prebuilt packages](#prebuilt-packages)
17 - [Docker images and Ansible playbooks](#docker-images-and-ansible-playbooks)
18 - [Debian/Ubuntu](#debianubuntu)
19 - [Matrix.org packages](#matrixorg-packages)
20 - [Downstream Debian packages](#downstream-debian-packages)
21 - [Downstream Ubuntu packages](#downstream-ubuntu-packages)
22 - [Fedora](#fedora)
23 - [OpenSUSE](#opensuse-1)
24 - [SUSE Linux Enterprise Server](#suse-linux-enterprise-server)
25 - [ArchLinux](#archlinux-1)
26 - [Void Linux](#void-linux)
27 - [FreeBSD](#freebsd)
28 - [OpenBSD](#openbsd-1)
29 - [NixOS](#nixos)
30 - [Setting up Synapse](#setting-up-synapse)
31 - [Using PostgreSQL](#using-postgresql)
32 - [TLS certificates](#tls-certificates)
33 - [Client Well-Known URI](#client-well-known-uri)
34 - [Email](#email)
35 - [Registering a user](#registering-a-user)
36 - [Setting up a TURN server](#setting-up-a-turn-server)
37 - [URL previews](#url-previews)
38 - [Troubleshooting Installation](#troubleshooting-installation)
39
40
41 ## Choosing your server name
42
43 It is important to choose the name for your server before you install Synapse,
44 because it cannot be changed later.
45
46 The server name determines the "domain" part of user-ids for users on your
47 server: these will all be of the format `@user:my.domain.name`. It also
48 determines how other matrix servers will reach yours for federation.
49
50 For a test configuration, set this to the hostname of your server. For a more
51 production-ready setup, you will probably want to specify your domain
52 (`example.com`) rather than a matrix-specific hostname here (in the same way
53 that your email address is probably `user@example.com` rather than
54 `user@email.example.com`) - but doing so may require more advanced setup: see
55 [Setting up Federation](docs/federate.md).
56
57 ## Installing Synapse
58
59 ### Installing from source
60
61 (Prebuilt packages are available for some platforms - see [Prebuilt packages](#prebuilt-packages).)
62
63 When installing from source please make sure that the [Platform-specific prerequisites](#platform-specific-prerequisites) are already installed.
64
65 System requirements:
66
67 - POSIX-compliant system (tested on Linux & OS X)
68 - Python 3.5.2 or later, up to Python 3.9.
69 - At least 1GB of free RAM if you want to join large public rooms like #matrix:matrix.org
70
71
72 To install the Synapse homeserver run:
73
74 ```sh
75 mkdir -p ~/synapse
76 virtualenv -p python3 ~/synapse/env
77 source ~/synapse/env/bin/activate
78 pip install --upgrade pip
79 pip install --upgrade setuptools
80 pip install matrix-synapse
81 ```
82
83 This will download Synapse from [PyPI](https://pypi.org/project/matrix-synapse)
84 and install it, along with the python libraries it uses, into a virtual environment
85 under `~/synapse/env`. Feel free to pick a different directory if you
86 prefer.
87
88 This Synapse installation can then be later upgraded by using pip again with the
89 update flag:
90
91 ```sh
92 source ~/synapse/env/bin/activate
93 pip install -U matrix-synapse
94 ```
95
96 Before you can start Synapse, you will need to generate a configuration
97 file. To do this, run (in your virtualenv, as before):
98
99 ```sh
100 cd ~/synapse
101 python -m synapse.app.homeserver \
102 --server-name my.domain.name \
103 --config-path homeserver.yaml \
104 --generate-config \
105 --report-stats=[yes|no]
106 ```
107
108 ... substituting an appropriate value for `--server-name`.
109
110 This command will generate you a config file that you can then customise, but it will
111 also generate a set of keys for you. These keys will allow your homeserver to
112 identify itself to other homeserver, so don't lose or delete them. It would be
113 wise to back them up somewhere safe. (If, for whatever reason, you do need to
114 change your homeserver's keys, you may find that other homeserver have the
115 old key cached. If you update the signing key, you should change the name of the
116 key in the `<server name>.signing.key` file (the second word) to something
117 different. See the [spec](https://matrix.org/docs/spec/server_server/latest.html#retrieving-server-keys) for more information on key management).
118
119 To actually run your new homeserver, pick a working directory for Synapse to
120 run (e.g. `~/synapse`), and:
121
122 ```sh
123 cd ~/synapse
124 source env/bin/activate
125 synctl start
126 ```
127
128 #### Platform-specific prerequisites
129
130 Synapse is written in Python but some of the libraries it uses are written in
131 C. So before we can install Synapse itself we need a working C compiler and the
132 header files for Python C extensions.
133
134 ##### Debian/Ubuntu/Raspbian
135
136 Installing prerequisites on Ubuntu or Debian:
137
138 ```sh
139 sudo apt install build-essential python3-dev libffi-dev \
140 python3-pip python3-setuptools sqlite3 \
141 libssl-dev virtualenv libjpeg-dev libxslt1-dev
142 ```
143
144 ##### ArchLinux
145
146 Installing prerequisites on ArchLinux:
147
148 ```sh
149 sudo pacman -S base-devel python python-pip \
150 python-setuptools python-virtualenv sqlite3
151 ```
152
153 ##### CentOS/Fedora
154
155 Installing prerequisites on CentOS or Fedora Linux:
156
157 ```sh
158 sudo dnf install libtiff-devel libjpeg-devel libzip-devel freetype-devel \
159 libwebp-devel libxml2-devel libxslt-devel libpq-devel \
160 python3-virtualenv libffi-devel openssl-devel python3-devel
161 sudo dnf groupinstall "Development Tools"
162 ```
163
164 ##### macOS
165
166 Installing prerequisites on macOS:
167
168 ```sh
169 xcode-select --install
170 sudo easy_install pip
171 sudo pip install virtualenv
172 brew install pkg-config libffi
173 ```
174
175 On macOS Catalina (10.15) you may need to explicitly install OpenSSL
176 via brew and inform `pip` about it so that `psycopg2` builds:
177
178 ```sh
179 brew install openssl@1.1
180 export LDFLAGS="-L/usr/local/opt/openssl/lib"
181 export CPPFLAGS="-I/usr/local/opt/openssl/include"
182 ```
183
184 ##### OpenSUSE
185
186 Installing prerequisites on openSUSE:
187
188 ```sh
189 sudo zypper in -t pattern devel_basis
190 sudo zypper in python-pip python-setuptools sqlite3 python-virtualenv \
191 python-devel libffi-devel libopenssl-devel libjpeg62-devel
192 ```
193
194 ##### OpenBSD
195
196 A port of Synapse is available under `net/synapse`. The filesystem
197 underlying the homeserver directory (defaults to `/var/synapse`) has to be
198 mounted with `wxallowed` (cf. `mount(8)`), so creating a separate filesystem
199 and mounting it to `/var/synapse` should be taken into consideration.
200
201 To be able to build Synapse's dependency on python the `WRKOBJDIR`
202 (cf. `bsd.port.mk(5)`) for building python, too, needs to be on a filesystem
203 mounted with `wxallowed` (cf. `mount(8)`).
204
205 Creating a `WRKOBJDIR` for building python under `/usr/local` (which on a
206 default OpenBSD installation is mounted with `wxallowed`):
207
208 ```sh
209 doas mkdir /usr/local/pobj_wxallowed
210 ```
211
212 Assuming `PORTS_PRIVSEP=Yes` (cf. `bsd.port.mk(5)`) and `SUDO=doas` are
213 configured in `/etc/mk.conf`:
214
215 ```sh
216 doas chown _pbuild:_pbuild /usr/local/pobj_wxallowed
217 ```
218
219 Setting the `WRKOBJDIR` for building python:
220
221 ```sh
222 echo WRKOBJDIR_lang/python/3.7=/usr/local/pobj_wxallowed \\nWRKOBJDIR_lang/python/2.7=/usr/local/pobj_wxallowed >> /etc/mk.conf
223 ```
224
225 Building Synapse:
226
227 ```sh
228 cd /usr/ports/net/synapse
229 make install
230 ```
231
232 ##### Windows
233
234 If you wish to run or develop Synapse on Windows, the Windows Subsystem For
235 Linux provides a Linux environment on Windows 10 which is capable of using the
236 Debian, Fedora, or source installation methods. More information about WSL can
237 be found at <https://docs.microsoft.com/en-us/windows/wsl/install-win10> for
238 Windows 10 and <https://docs.microsoft.com/en-us/windows/wsl/install-on-server>
239 for Windows Server.
240
241 ### Prebuilt packages
242
243 As an alternative to installing from source, prebuilt packages are available
244 for a number of platforms.
245
246 #### Docker images and Ansible playbooks
247
248 There is an official synapse image available at
249 <https://hub.docker.com/r/matrixdotorg/synapse> which can be used with
250 the docker-compose file available at [contrib/docker](contrib/docker). Further
251 information on this including configuration options is available in the README
252 on hub.docker.com.
253
254 Alternatively, Andreas Peters (previously Silvio Fricke) has contributed a
255 Dockerfile to automate a synapse server in a single Docker image, at
256 <https://hub.docker.com/r/avhost/docker-matrix/tags/>
257
258 Slavi Pantaleev has created an Ansible playbook,
259 which installs the offical Docker image of Matrix Synapse
260 along with many other Matrix-related services (Postgres database, Element, coturn,
261 ma1sd, SSL support, etc.).
262 For more details, see
263 <https://github.com/spantaleev/matrix-docker-ansible-deploy>
264
265 #### Debian/Ubuntu
266
267 ##### Matrix.org packages
268
269 Matrix.org provides Debian/Ubuntu packages of the latest stable version of
270 Synapse via <https://packages.matrix.org/debian/>. They are available for Debian
271 9 (Stretch), Ubuntu 16.04 (Xenial), and later. To use them:
272
273 ```sh
274 sudo apt install -y lsb-release wget apt-transport-https
275 sudo wget -O /usr/share/keyrings/matrix-org-archive-keyring.gpg https://packages.matrix.org/debian/matrix-org-archive-keyring.gpg
276 echo "deb [signed-by=/usr/share/keyrings/matrix-org-archive-keyring.gpg] https://packages.matrix.org/debian/ $(lsb_release -cs) main" |
277 sudo tee /etc/apt/sources.list.d/matrix-org.list
278 sudo apt update
279 sudo apt install matrix-synapse-py3
280 ```
281
282 **Note**: if you followed a previous version of these instructions which
283 recommended using `apt-key add` to add an old key from
284 `https://matrix.org/packages/debian/`, you should note that this key has been
285 revoked. You should remove the old key with `sudo apt-key remove
286 C35EB17E1EAE708E6603A9B3AD0592FE47F0DF61`, and follow the above instructions to
287 update your configuration.
288
289 The fingerprint of the repository signing key (as shown by `gpg
290 /usr/share/keyrings/matrix-org-archive-keyring.gpg`) is
291 `AAF9AE843A7584B5A3E4CD2BCF45A512DE2DA058`.
292
293 ##### Downstream Debian packages
294
295 We do not recommend using the packages from the default Debian `buster`
296 repository at this time, as they are old and suffer from known security
297 vulnerabilities. You can install the latest version of Synapse from
298 [our repository](#matrixorg-packages) or from `buster-backports`. Please
299 see the [Debian documentation](https://backports.debian.org/Instructions/)
300 for information on how to use backports.
301
302 If you are using Debian `sid` or testing, Synapse is available in the default
303 repositories and it should be possible to install it simply with:
304
305 ```sh
306 sudo apt install matrix-synapse
307 ```
308
309 ##### Downstream Ubuntu packages
310
311 We do not recommend using the packages in the default Ubuntu repository
312 at this time, as they are old and suffer from known security vulnerabilities.
313 The latest version of Synapse can be installed from [our repository](#matrixorg-packages).
314
315 #### Fedora
316
317 Synapse is in the Fedora repositories as `matrix-synapse`:
318
319 ```sh
320 sudo dnf install matrix-synapse
321 ```
322
323 Oleg Girko provides Fedora RPMs at
324 <https://obs.infoserver.lv/project/monitor/matrix-synapse>
325
326 #### OpenSUSE
327
328 Synapse is in the OpenSUSE repositories as `matrix-synapse`:
329
330 ```sh
331 sudo zypper install matrix-synapse
332 ```
333
334 #### SUSE Linux Enterprise Server
335
336 Unofficial package are built for SLES 15 in the openSUSE:Backports:SLE-15 repository at
337 <https://download.opensuse.org/repositories/openSUSE:/Backports:/SLE-15/standard/>
338
339 #### ArchLinux
340
341 The quickest way to get up and running with ArchLinux is probably with the community package
342 <https://www.archlinux.org/packages/community/any/matrix-synapse/>, which should pull in most of
343 the necessary dependencies.
344
345 pip may be outdated (6.0.7-1 and needs to be upgraded to 6.0.8-1 ):
346
347 ```sh
348 sudo pip install --upgrade pip
349 ```
350
351 If you encounter an error with lib bcrypt causing an Wrong ELF Class:
352 ELFCLASS32 (x64 Systems), you may need to reinstall py-bcrypt to correctly
353 compile it under the right architecture. (This should not be needed if
354 installing under virtualenv):
355
356 ```sh
357 sudo pip uninstall py-bcrypt
358 sudo pip install py-bcrypt
359 ```
360
361 #### Void Linux
362
363 Synapse can be found in the void repositories as 'synapse':
364
365 ```sh
366 xbps-install -Su
367 xbps-install -S synapse
368 ```
369
370 #### FreeBSD
371
372 Synapse can be installed via FreeBSD Ports or Packages contributed by Brendan Molloy from:
373
374 - Ports: `cd /usr/ports/net-im/py-matrix-synapse && make install clean`
375 - Packages: `pkg install py37-matrix-synapse`
376
377 #### OpenBSD
378
379 As of OpenBSD 6.7 Synapse is available as a pre-compiled binary. The filesystem
380 underlying the homeserver directory (defaults to `/var/synapse`) has to be
381 mounted with `wxallowed` (cf. `mount(8)`), so creating a separate filesystem
382 and mounting it to `/var/synapse` should be taken into consideration.
383
384 Installing Synapse:
385
386 ```sh
387 doas pkg_add synapse
388 ```
389
390 #### NixOS
391
392 Robin Lambertz has packaged Synapse for NixOS at:
393 <https://github.com/NixOS/nixpkgs/blob/master/nixos/modules/services/misc/matrix-synapse.nix>
394
395 ## Setting up Synapse
396
397 Once you have installed synapse as above, you will need to configure it.
398
399 ### Using PostgreSQL
400
401 By default Synapse uses an [SQLite](https://sqlite.org/) database and in doing so trades
402 performance for convenience. Almost all installations should opt to use [PostgreSQL](https://www.postgresql.org)
403 instead. Advantages include:
404
405 - significant performance improvements due to the superior threading and
406 caching model, smarter query optimiser
407 - allowing the DB to be run on separate hardware
408
409 For information on how to install and use PostgreSQL in Synapse, please see
410 [docs/postgres.md](docs/postgres.md)
411
412 SQLite is only acceptable for testing purposes. SQLite should not be used in
413 a production server. Synapse will perform poorly when using
414 SQLite, especially when participating in large rooms.
415
416 ### TLS certificates
417
418 The default configuration exposes a single HTTP port on the local
419 interface: `http://localhost:8008`. It is suitable for local testing,
420 but for any practical use, you will need Synapse's APIs to be served
421 over HTTPS.
422
423 The recommended way to do so is to set up a reverse proxy on port
424 `8448`. You can find documentation on doing so in
425 [docs/reverse_proxy.md](docs/reverse_proxy.md).
426
427 Alternatively, you can configure Synapse to expose an HTTPS port. To do
428 so, you will need to edit `homeserver.yaml`, as follows:
429
430 - First, under the `listeners` section, uncomment the configuration for the
431 TLS-enabled listener. (Remove the hash sign (`#`) at the start of
432 each line). The relevant lines are like this:
433
434 ```yaml
435 - port: 8448
436 type: http
437 tls: true
438 resources:
439 - names: [client, federation]
440 ```
441
442 - You will also need to uncomment the `tls_certificate_path` and
443 `tls_private_key_path` lines under the `TLS` section. You will need to manage
444 provisioning of these certificates yourself.
445
446 If you are using your own certificate, be sure to use a `.pem` file that
447 includes the full certificate chain including any intermediate certificates
448 (for instance, if using certbot, use `fullchain.pem` as your certificate, not
449 `cert.pem`).
450
451 For a more detailed guide to configuring your server for federation, see
452 [federate.md](docs/federate.md).
453
454 ### Client Well-Known URI
455
456 Setting up the client Well-Known URI is optional but if you set it up, it will
457 allow users to enter their full username (e.g. `@user:<server_name>`) into clients
458 which support well-known lookup to automatically configure the homeserver and
459 identity server URLs. This is useful so that users don't have to memorize or think
460 about the actual homeserver URL you are using.
461
462 The URL `https://<server_name>/.well-known/matrix/client` should return JSON in
463 the following format.
464
465 ```json
466 {
467 "m.homeserver": {
468 "base_url": "https://<matrix.example.com>"
469 }
470 }
471 ```
472
473 It can optionally contain identity server information as well.
474
475 ```json
476 {
477 "m.homeserver": {
478 "base_url": "https://<matrix.example.com>"
479 },
480 "m.identity_server": {
481 "base_url": "https://<identity.example.com>"
482 }
483 }
484 ```
485
486 To work in browser based clients, the file must be served with the appropriate
487 Cross-Origin Resource Sharing (CORS) headers. A recommended value would be
488 `Access-Control-Allow-Origin: *` which would allow all browser based clients to
489 view it.
490
491 In nginx this would be something like:
492
493 ```nginx
494 location /.well-known/matrix/client {
495 return 200 '{"m.homeserver": {"base_url": "https://<matrix.example.com>"}}';
496 default_type application/json;
497 add_header Access-Control-Allow-Origin *;
498 }
499 ```
500
501 You should also ensure the `public_baseurl` option in `homeserver.yaml` is set
502 correctly. `public_baseurl` should be set to the URL that clients will use to
503 connect to your server. This is the same URL you put for the `m.homeserver`
504 `base_url` above.
505
506 ```yaml
507 public_baseurl: "https://<matrix.example.com>"
508 ```
509
510 ### Email
511
512 It is desirable for Synapse to have the capability to send email. This allows
513 Synapse to send password reset emails, send verifications when an email address
514 is added to a user's account, and send email notifications to users when they
515 receive new messages.
516
517 To configure an SMTP server for Synapse, modify the configuration section
518 headed `email`, and be sure to have at least the `smtp_host`, `smtp_port`
519 and `notif_from` fields filled out. You may also need to set `smtp_user`,
520 `smtp_pass`, and `require_transport_security`.
521
522 If email is not configured, password reset, registration and notifications via
523 email will be disabled.
524
525 ### Registering a user
526
527 The easiest way to create a new user is to do so from a client like [Element](https://element.io/).
528
529 Alternatively, you can do so from the command line. This can be done as follows:
530
531 1. If synapse was installed via pip, activate the virtualenv as follows (if Synapse was
532 installed via a prebuilt package, `register_new_matrix_user` should already be
533 on the search path):
534 ```sh
535 cd ~/synapse
536 source env/bin/activate
537 synctl start # if not already running
538 ```
539 2. Run the following command:
540 ```sh
541 register_new_matrix_user -c homeserver.yaml http://localhost:8008
542 ```
543
544 This will prompt you to add details for the new user, and will then connect to
545 the running Synapse to create the new user. For example:
546 ```
547 New user localpart: erikj
548 Password:
549 Confirm password:
550 Make admin [no]:
551 Success!
552 ```
553
554 This process uses a setting `registration_shared_secret` in
555 `homeserver.yaml`, which is shared between Synapse itself and the
556 `register_new_matrix_user` script. It doesn't matter what it is (a random
557 value is generated by `--generate-config`), but it should be kept secret, as
558 anyone with knowledge of it can register users, including admin accounts,
559 on your server even if `enable_registration` is `false`.
560
561 ### Setting up a TURN server
562
563 For reliable VoIP calls to be routed via this homeserver, you MUST configure
564 a TURN server. See [docs/turn-howto.md](docs/turn-howto.md) for details.
565
566 ### URL previews
567
568 Synapse includes support for previewing URLs, which is disabled by default. To
569 turn it on you must enable the `url_preview_enabled: True` config parameter
570 and explicitly specify the IP ranges that Synapse is not allowed to spider for
571 previewing in the `url_preview_ip_range_blacklist` configuration parameter.
572 This is critical from a security perspective to stop arbitrary Matrix users
573 spidering 'internal' URLs on your network. At the very least we recommend that
574 your loopback and RFC1918 IP addresses are blacklisted.
575
576 This also requires the optional `lxml` python dependency to be installed. This
577 in turn requires the `libxml2` library to be available - on Debian/Ubuntu this
578 means `apt-get install libxml2-dev`, or equivalent for your OS.
579
580 ### Troubleshooting Installation
581
582 `pip` seems to leak *lots* of memory during installation. For instance, a Linux
583 host with 512MB of RAM may run out of memory whilst installing Twisted. If this
584 happens, you will have to individually install the dependencies which are
585 failing, e.g.:
586
587 ```sh
588 pip install twisted
589 ```
590
591 If you have any other problems, feel free to ask in
592 [#synapse:matrix.org](https://matrix.to/#/#synapse:matrix.org).
6 The markdown source is available in [docs/setup/installation.md](docs/setup/installation.md).
2424
2525 ``#matrix:matrix.org`` is the official support room for Matrix, and can be
2626 accessed by any client from https://matrix.org/docs/projects/try-matrix-now.html or
27 via IRC bridge at irc://irc.freenode.net/matrix.
27 via IRC bridge at irc://irc.libera.chat/matrix.
2828
2929 Synapse is currently in rapid development, but as of version 0.5 we believe it
3030 is sufficiently stable to be run as an internet-facing service for real usage!
9393
9494 .. _federation:
9595
96 * For details on how to install synapse, see `<INSTALL.md>`_.
96 * For details on how to install synapse, see
97 `Installation Instructions <https://matrix-org.github.io/synapse/latest/setup/installation.html>`_.
9798 * For specific details on how to configure Synapse for federation see `docs/federate.md <docs/federate.md>`_
9899
99100
105106
106107 Unless you are running a test instance of Synapse on your local machine, in
107108 general, you will need to enable TLS support before you can successfully
108 connect from a client: see `<INSTALL.md#tls-certificates>`_.
109 connect from a client: see
110 `TLS certificates <https://matrix-org.github.io/synapse/latest/setup/installation.html#tls-certificates>`_.
109111
110112 An easy way to get started is to login or register via Element at
111113 https://app.element.io/#/login or https://app.element.io/#/register respectively.
185187 Upgrading an existing Synapse
186188 =============================
187189
188 The instructions for upgrading synapse are in `UPGRADE.rst`_.
190 The instructions for upgrading synapse are in `the upgrade notes`_.
189191 Please check these instructions as upgrading may require extra steps for some
190192 versions of synapse.
191193
192 .. _UPGRADE.rst: UPGRADE.rst
194 .. _the upgrade notes: https://matrix-org.github.io/synapse/develop/upgrade.html
193195
194196 .. _reverse-proxy:
195197
264266
265267 Before setting up a development environment for synapse, make sure you have the
266268 system dependencies (such as the python header files) installed - see
267 `Installing from source <INSTALL.md#installing-from-source>`_.
269 `Installing from source <https://matrix-org.github.io/synapse/latest/setup/installation.html#installing-from-source>`_.
268270
269271 To check out a synapse for development, clone the git repo into a working
270272 directory of your choice::
332334 the source tree, so installation of the server is not required.
333335
334336 Testing with SyTest is recommended for verifying that changes related to the
335 Client-Server API are functioning correctly. See the `installation instructions
336 <https://github.com/matrix-org/sytest#installing>`_ for details.
337 Client-Server API are functioning correctly. See the `SyTest installation
338 instructions <https://github.com/matrix-org/sytest#installing>`_ for details.
337339
338340
339341 Platform dependencies
00 Upgrading Synapse
11 =================
22
3 Before upgrading check if any special steps are required to upgrade from the
4 version you currently have installed to the current version of Synapse. The extra
5 instructions that may be required are listed later in this document.
3 This document has moved to the `Synapse documentation website <https://matrix-org.github.io/synapse/latest/upgrading>`_.
4 Please update your links.
65
7 * Check that your versions of Python and PostgreSQL are still supported.
8
9 Synapse follows upstream lifecycles for `Python`_ and `PostgreSQL`_, and
10 removes support for versions which are no longer maintained.
11
12 The website https://endoflife.date also offers convenient summaries.
13
14 .. _Python: https://devguide.python.org/devcycle/#end-of-life-branches
15 .. _PostgreSQL: https://www.postgresql.org/support/versioning/
16
17 * If Synapse was installed using `prebuilt packages
18 <INSTALL.md#prebuilt-packages>`_, you will need to follow the normal process
19 for upgrading those packages.
20
21 * If Synapse was installed from source, then:
22
23 1. Activate the virtualenv before upgrading. For example, if Synapse is
24 installed in a virtualenv in ``~/synapse/env`` then run:
25
26 .. code:: bash
27
28 source ~/synapse/env/bin/activate
29
30 2. If Synapse was installed using pip then upgrade to the latest version by
31 running:
32
33 .. code:: bash
34
35 pip install --upgrade matrix-synapse
36
37 If Synapse was installed using git then upgrade to the latest version by
38 running:
39
40 .. code:: bash
41
42 git pull
43 pip install --upgrade .
44
45 3. Restart Synapse:
46
47 .. code:: bash
48
49 ./synctl restart
50
51 To check whether your update was successful, you can check the running server
52 version with:
53
54 .. code:: bash
55
56 # you may need to replace 'localhost:8008' if synapse is not configured
57 # to listen on port 8008.
58
59 curl http://localhost:8008/_synapse/admin/v1/server_version
60
61 Rolling back to older versions
62 ------------------------------
63
64 Rolling back to previous releases can be difficult, due to database schema
65 changes between releases. Where we have been able to test the rollback process,
66 this will be noted below.
67
68 In general, you will need to undo any changes made during the upgrade process,
69 for example:
70
71 * pip:
72
73 .. code:: bash
74
75 source env/bin/activate
76 # replace `1.3.0` accordingly:
77 pip install matrix-synapse==1.3.0
78
79 * Debian:
80
81 .. code:: bash
82
83 # replace `1.3.0` and `stretch` accordingly:
84 wget https://packages.matrix.org/debian/pool/main/m/matrix-synapse-py3/matrix-synapse-py3_1.3.0+stretch1_amd64.deb
85 dpkg -i matrix-synapse-py3_1.3.0+stretch1_amd64.deb
86
87 Upgrading to v1.37.0
88 ====================
89
90 Deprecation of the current spam checker interface
91 -------------------------------------------------
92
93 The current spam checker interface is deprecated in favour of a new generic modules system.
94 Authors of spam checker modules can refer to `this documentation <https://matrix-org.github.io/synapse/develop/modules.html#porting-an-existing-module-that-uses-the-old-interface>`_
95 to update their modules. Synapse administrators can refer to `this documentation <https://matrix-org.github.io/synapse/develop/modules.html#using-modules>`_
96 to update their configuration once the modules they are using have been updated.
97
98 We plan to remove support for the current spam checker interface in August 2021.
99
100 More module interfaces will be ported over to this new generic system in future versions
101 of Synapse.
102
103
104 Upgrading to v1.34.0
105 ====================
106
107 ``room_invite_state_types`` configuration setting
108 -----------------------------------------------
109
110 The ``room_invite_state_types`` configuration setting has been deprecated and
111 replaced with ``room_prejoin_state``. See the `sample configuration file <https://github.com/matrix-org/synapse/blob/v1.34.0/docs/sample_config.yaml#L1515>`_.
112
113 If you have set ``room_invite_state_types`` to the default value you should simply
114 remove it from your configuration file. The default value used to be:
115
116 .. code:: yaml
117
118 room_invite_state_types:
119 - "m.room.join_rules"
120 - "m.room.canonical_alias"
121 - "m.room.avatar"
122 - "m.room.encryption"
123 - "m.room.name"
124
125 If you have customised this value, you should remove ``room_invite_state_types`` and
126 configure ``room_prejoin_state`` instead.
127
128
129
130 Upgrading to v1.33.0
131 ====================
132
133 Account Validity HTML templates can now display a user's expiration date
134 ------------------------------------------------------------------------
135
136 This may affect you if you have enabled the account validity feature, and have made use of a
137 custom HTML template specified by the ``account_validity.template_dir`` or ``account_validity.account_renewed_html_path``
138 Synapse config options.
139
140 The template can now accept an ``expiration_ts`` variable, which represents the unix timestamp in milliseconds for the
141 future date of which their account has been renewed until. See the
142 `default template <https://github.com/matrix-org/synapse/blob/release-v1.33.0/synapse/res/templates/account_renewed.html>`_
143 for an example of usage.
144
145 ALso note that a new HTML template, ``account_previously_renewed.html``, has been added. This is is shown to users
146 when they attempt to renew their account with a valid renewal token that has already been used before. The default
147 template contents can been found
148 `here <https://github.com/matrix-org/synapse/blob/release-v1.33.0/synapse/res/templates/account_previously_renewed.html>`_,
149 and can also accept an ``expiration_ts`` variable. This template replaces the error message users would previously see
150 upon attempting to use a valid renewal token more than once.
151
152
153 Upgrading to v1.32.0
154 ====================
155
156 Regression causing connected Prometheus instances to become overwhelmed
157 -----------------------------------------------------------------------
158
159 This release introduces `a regression <https://github.com/matrix-org/synapse/issues/9853>`_
160 that can overwhelm connected Prometheus instances. This issue is not present in
161 Synapse v1.32.0rc1.
162
163 If you have been affected, please downgrade to 1.31.0. You then may need to
164 remove excess writeahead logs in order for Prometheus to recover. Instructions
165 for doing so are provided
166 `here <https://github.com/matrix-org/synapse/pull/9854#issuecomment-823472183>`_.
167
168 Dropping support for old Python, Postgres and SQLite versions
169 -------------------------------------------------------------
170
171 In line with our `deprecation policy <https://github.com/matrix-org/synapse/blob/release-v1.32.0/docs/deprecation_policy.md>`_,
172 we've dropped support for Python 3.5 and PostgreSQL 9.5, as they are no longer supported upstream.
173
174 This release of Synapse requires Python 3.6+ and PostgresSQL 9.6+ or SQLite 3.22+.
175
176 Removal of old List Accounts Admin API
177 --------------------------------------
178
179 The deprecated v1 "list accounts" admin API (``GET /_synapse/admin/v1/users/<user_id>``) has been removed in this version.
180
181 The `v2 list accounts API <https://github.com/matrix-org/synapse/blob/master/docs/admin_api/user_admin_api.rst#list-accounts>`_
182 has been available since Synapse 1.7.0 (2019-12-13), and is accessible under ``GET /_synapse/admin/v2/users``.
183
184 The deprecation of the old endpoint was announced with Synapse 1.28.0 (released on 2021-02-25).
185
186 Application Services must use type ``m.login.application_service`` when registering users
187 -----------------------------------------------------------------------------------------
188
189 In compliance with the
190 `Application Service spec <https://matrix.org/docs/spec/application_service/r0.1.2#server-admin-style-permissions>`_,
191 Application Services are now required to use the ``m.login.application_service`` type when registering users via the
192 ``/_matrix/client/r0/register`` endpoint. This behaviour was deprecated in Synapse v1.30.0.
193
194 Please ensure your Application Services are up to date.
195
196 Upgrading to v1.29.0
197 ====================
198
199 Requirement for X-Forwarded-Proto header
200 ----------------------------------------
201
202 When using Synapse with a reverse proxy (in particular, when using the
203 `x_forwarded` option on an HTTP listener), Synapse now expects to receive an
204 `X-Forwarded-Proto` header on incoming HTTP requests. If it is not set, Synapse
205 will log a warning on each received request.
206
207 To avoid the warning, administrators using a reverse proxy should ensure that
208 the reverse proxy sets `X-Forwarded-Proto` header to `https` or `http` to
209 indicate the protocol used by the client.
210
211 Synapse also requires the `Host` header to be preserved.
212
213 See the `reverse proxy documentation <docs/reverse_proxy.md>`_, where the
214 example configurations have been updated to show how to set these headers.
215
216 (Users of `Caddy <https://caddyserver.com/>`_ are unaffected, since we believe it
217 sets `X-Forwarded-Proto` by default.)
218
219 Upgrading to v1.27.0
220 ====================
221
222 Changes to callback URI for OAuth2 / OpenID Connect and SAML2
223 -------------------------------------------------------------
224
225 This version changes the URI used for callbacks from OAuth2 and SAML2 identity providers:
226
227 * If your server is configured for single sign-on via an OpenID Connect or OAuth2 identity
228 provider, you will need to add ``[synapse public baseurl]/_synapse/client/oidc/callback``
229 to the list of permitted "redirect URIs" at the identity provider.
230
231 See `docs/openid.md <docs/openid.md>`_ for more information on setting up OpenID
232 Connect.
233
234 * If your server is configured for single sign-on via a SAML2 identity provider, you will
235 need to add ``[synapse public baseurl]/_synapse/client/saml2/authn_response`` as a permitted
236 "ACS location" (also known as "allowed callback URLs") at the identity provider.
237
238 The "Issuer" in the "AuthnRequest" to the SAML2 identity provider is also updated to
239 ``[synapse public baseurl]/_synapse/client/saml2/metadata.xml``. If your SAML2 identity
240 provider uses this property to validate or otherwise identify Synapse, its configuration
241 will need to be updated to use the new URL. Alternatively you could create a new, separate
242 "EntityDescriptor" in your SAML2 identity provider with the new URLs and leave the URLs in
243 the existing "EntityDescriptor" as they were.
244
245 Changes to HTML templates
246 -------------------------
247
248 The HTML templates for SSO and email notifications now have `Jinja2's autoescape <https://jinja.palletsprojects.com/en/2.11.x/api/#autoescaping>`_
249 enabled for files ending in ``.html``, ``.htm``, and ``.xml``. If you have customised
250 these templates and see issues when viewing them you might need to update them.
251 It is expected that most configurations will need no changes.
252
253 If you have customised the templates *names* for these templates, it is recommended
254 to verify they end in ``.html`` to ensure autoescape is enabled.
255
256 The above applies to the following templates:
257
258 * ``add_threepid.html``
259 * ``add_threepid_failure.html``
260 * ``add_threepid_success.html``
261 * ``notice_expiry.html``
262 * ``notice_expiry.html``
263 * ``notif_mail.html`` (which, by default, includes ``room.html`` and ``notif.html``)
264 * ``password_reset.html``
265 * ``password_reset_confirmation.html``
266 * ``password_reset_failure.html``
267 * ``password_reset_success.html``
268 * ``registration.html``
269 * ``registration_failure.html``
270 * ``registration_success.html``
271 * ``sso_account_deactivated.html``
272 * ``sso_auth_bad_user.html``
273 * ``sso_auth_confirm.html``
274 * ``sso_auth_success.html``
275 * ``sso_error.html``
276 * ``sso_login_idp_picker.html``
277 * ``sso_redirect_confirm.html``
278
279 Upgrading to v1.26.0
280 ====================
281
282 Rolling back to v1.25.0 after a failed upgrade
283 ----------------------------------------------
284
285 v1.26.0 includes a lot of large changes. If something problematic occurs, you
286 may want to roll-back to a previous version of Synapse. Because v1.26.0 also
287 includes a new database schema version, reverting that version is also required
288 alongside the generic rollback instructions mentioned above. In short, to roll
289 back to v1.25.0 you need to:
290
291 1. Stop the server
292 2. Decrease the schema version in the database:
293
294 .. code:: sql
295
296 UPDATE schema_version SET version = 58;
297
298 3. Delete the ignored users & chain cover data:
299
300 .. code:: sql
301
302 DROP TABLE IF EXISTS ignored_users;
303 UPDATE rooms SET has_auth_chain_index = false;
304
305 For PostgreSQL run:
306
307 .. code:: sql
308
309 TRUNCATE event_auth_chain_links;
310 TRUNCATE event_auth_chains;
311
312 For SQLite run:
313
314 .. code:: sql
315
316 DELETE FROM event_auth_chain_links;
317 DELETE FROM event_auth_chains;
318
319 4. Mark the deltas as not run (so they will re-run on upgrade).
320
321 .. code:: sql
322
323 DELETE FROM applied_schema_deltas WHERE version = 59 AND file = "59/01ignored_user.py";
324 DELETE FROM applied_schema_deltas WHERE version = 59 AND file = "59/06chain_cover_index.sql";
325
326 5. Downgrade Synapse by following the instructions for your installation method
327 in the "Rolling back to older versions" section above.
328
329 Upgrading to v1.25.0
330 ====================
331
332 Last release supporting Python 3.5
333 ----------------------------------
334
335 This is the last release of Synapse which guarantees support with Python 3.5,
336 which passed its upstream End of Life date several months ago.
337
338 We will attempt to maintain support through March 2021, but without guarantees.
339
340 In the future, Synapse will follow upstream schedules for ending support of
341 older versions of Python and PostgreSQL. Please upgrade to at least Python 3.6
342 and PostgreSQL 9.6 as soon as possible.
343
344 Blacklisting IP ranges
345 ----------------------
346
347 Synapse v1.25.0 includes new settings, ``ip_range_blacklist`` and
348 ``ip_range_whitelist``, for controlling outgoing requests from Synapse for federation,
349 identity servers, push, and for checking key validity for third-party invite events.
350 The previous setting, ``federation_ip_range_blacklist``, is deprecated. The new
351 ``ip_range_blacklist`` defaults to private IP ranges if it is not defined.
352
353 If you have never customised ``federation_ip_range_blacklist`` it is recommended
354 that you remove that setting.
355
356 If you have customised ``federation_ip_range_blacklist`` you should update the
357 setting name to ``ip_range_blacklist``.
358
359 If you have a custom push server that is reached via private IP space you may
360 need to customise ``ip_range_blacklist`` or ``ip_range_whitelist``.
361
362 Upgrading to v1.24.0
363 ====================
364
365 Custom OpenID Connect mapping provider breaking change
366 ------------------------------------------------------
367
368 This release allows the OpenID Connect mapping provider to perform normalisation
369 of the localpart of the Matrix ID. This allows for the mapping provider to
370 specify different algorithms, instead of the [default way](https://matrix.org/docs/spec/appendices#mapping-from-other-character-sets).
371
372 If your Synapse configuration uses a custom mapping provider
373 (`oidc_config.user_mapping_provider.module` is specified and not equal to
374 `synapse.handlers.oidc_handler.JinjaOidcMappingProvider`) then you *must* ensure
375 that `map_user_attributes` of the mapping provider performs some normalisation
376 of the `localpart` returned. To match previous behaviour you can use the
377 `map_username_to_mxid_localpart` function provided by Synapse. An example is
378 shown below:
379
380 .. code-block:: python
381
382 from synapse.types import map_username_to_mxid_localpart
383
384 class MyMappingProvider:
385 def map_user_attributes(self, userinfo, token):
386 # ... your custom logic ...
387 sso_user_id = ...
388 localpart = map_username_to_mxid_localpart(sso_user_id)
389
390 return {"localpart": localpart}
391
392 Removal historical Synapse Admin API
393 ------------------------------------
394
395 Historically, the Synapse Admin API has been accessible under:
396
397 * ``/_matrix/client/api/v1/admin``
398 * ``/_matrix/client/unstable/admin``
399 * ``/_matrix/client/r0/admin``
400 * ``/_synapse/admin/v1``
401
402 The endpoints with ``/_matrix/client/*`` prefixes have been removed as of v1.24.0.
403 The Admin API is now only accessible under:
404
405 * ``/_synapse/admin/v1``
406
407 The only exception is the `/admin/whois` endpoint, which is
408 `also available via the client-server API <https://matrix.org/docs/spec/client_server/r0.6.1#get-matrix-client-r0-admin-whois-userid>`_.
409
410 The deprecation of the old endpoints was announced with Synapse 1.20.0 (released
411 on 2020-09-22) and makes it easier for homeserver admins to lock down external
412 access to the Admin API endpoints.
413
414 Upgrading to v1.23.0
415 ====================
416
417 Structured logging configuration breaking changes
418 -------------------------------------------------
419
420 This release deprecates use of the ``structured: true`` logging configuration for
421 structured logging. If your logging configuration contains ``structured: true``
422 then it should be modified based on the `structured logging documentation
423 <https://github.com/matrix-org/synapse/blob/master/docs/structured_logging.md>`_.
424
425 The ``structured`` and ``drains`` logging options are now deprecated and should
426 be replaced by standard logging configuration of ``handlers`` and ``formatters``.
427
428 A future will release of Synapse will make using ``structured: true`` an error.
429
430 Upgrading to v1.22.0
431 ====================
432
433 ThirdPartyEventRules breaking changes
434 -------------------------------------
435
436 This release introduces a backwards-incompatible change to modules making use of
437 ``ThirdPartyEventRules`` in Synapse. If you make use of a module defined under the
438 ``third_party_event_rules`` config option, please make sure it is updated to handle
439 the below change:
440
441 The ``http_client`` argument is no longer passed to modules as they are initialised. Instead,
442 modules are expected to make use of the ``http_client`` property on the ``ModuleApi`` class.
443 Modules are now passed a ``module_api`` argument during initialisation, which is an instance of
444 ``ModuleApi``. ``ModuleApi`` instances have a ``http_client`` property which acts the same as
445 the ``http_client`` argument previously passed to ``ThirdPartyEventRules`` modules.
446
447 Upgrading to v1.21.0
448 ====================
449
450 Forwarding ``/_synapse/client`` through your reverse proxy
451 ----------------------------------------------------------
452
453 The `reverse proxy documentation
454 <https://github.com/matrix-org/synapse/blob/develop/docs/reverse_proxy.md>`_ has been updated
455 to include reverse proxy directives for ``/_synapse/client/*`` endpoints. As the user password
456 reset flow now uses endpoints under this prefix, **you must update your reverse proxy
457 configurations for user password reset to work**.
458
459 Additionally, note that the `Synapse worker documentation
460 <https://github.com/matrix-org/synapse/blob/develop/docs/workers.md>`_ has been updated to
461 state that the ``/_synapse/client/password_reset/email/submit_token`` endpoint can be handled
462 by all workers. If you make use of Synapse's worker feature, please update your reverse proxy
463 configuration to reflect this change.
464
465 New HTML templates
466 ------------------
467
468 A new HTML template,
469 `password_reset_confirmation.html <https://github.com/matrix-org/synapse/blob/develop/synapse/res/templates/password_reset_confirmation.html>`_,
470 has been added to the ``synapse/res/templates`` directory. If you are using a
471 custom template directory, you may want to copy the template over and modify it.
472
473 Note that as of v1.20.0, templates do not need to be included in custom template
474 directories for Synapse to start. The default templates will be used if a custom
475 template cannot be found.
476
477 This page will appear to the user after clicking a password reset link that has
478 been emailed to them.
479
480 To complete password reset, the page must include a way to make a `POST`
481 request to
482 ``/_synapse/client/password_reset/{medium}/submit_token``
483 with the query parameters from the original link, presented as a URL-encoded form. See the file
484 itself for more details.
485
486 Updated Single Sign-on HTML Templates
487 -------------------------------------
488
489 The ``saml_error.html`` template was removed from Synapse and replaced with the
490 ``sso_error.html`` template. If your Synapse is configured to use SAML and a
491 custom ``sso_redirect_confirm_template_dir`` configuration then any customisations
492 of the ``saml_error.html`` template will need to be merged into the ``sso_error.html``
493 template. These templates are similar, but the parameters are slightly different:
494
495 * The ``msg`` parameter should be renamed to ``error_description``.
496 * There is no longer a ``code`` parameter for the response code.
497 * A string ``error`` parameter is available that includes a short hint of why a
498 user is seeing the error page.
499
500 Upgrading to v1.18.0
501 ====================
502
503 Docker `-py3` suffix will be removed in future versions
504 -------------------------------------------------------
505
506 From 10th August 2020, we will no longer publish Docker images with the `-py3` tag suffix. The images tagged with the `-py3` suffix have been identical to the non-suffixed tags since release 0.99.0, and the suffix is obsolete.
507
508 On 10th August, we will remove the `latest-py3` tag. Existing per-release tags (such as `v1.18.0-py3`) will not be removed, but no new `-py3` tags will be added.
509
510 Scripts relying on the `-py3` suffix will need to be updated.
511
512 Redis replication is now recommended in lieu of TCP replication
513 ---------------------------------------------------------------
514
515 When setting up worker processes, we now recommend the use of a Redis server for replication. **The old direct TCP connection method is deprecated and will be removed in a future release.**
516 See `docs/workers.md <docs/workers.md>`_ for more details.
517
518 Upgrading to v1.14.0
519 ====================
520
521 This version includes a database update which is run as part of the upgrade,
522 and which may take a couple of minutes in the case of a large server. Synapse
523 will not respond to HTTP requests while this update is taking place.
524
525 Upgrading to v1.13.0
526 ====================
527
528 Incorrect database migration in old synapse versions
529 ----------------------------------------------------
530
531 A bug was introduced in Synapse 1.4.0 which could cause the room directory to
532 be incomplete or empty if Synapse was upgraded directly from v1.2.1 or
533 earlier, to versions between v1.4.0 and v1.12.x.
534
535 This will *not* be a problem for Synapse installations which were:
536 * created at v1.4.0 or later,
537 * upgraded via v1.3.x, or
538 * upgraded straight from v1.2.1 or earlier to v1.13.0 or later.
539
540 If completeness of the room directory is a concern, installations which are
541 affected can be repaired as follows:
542
543 1. Run the following sql from a `psql` or `sqlite3` console:
544
545 .. code:: sql
546
547 INSERT INTO background_updates (update_name, progress_json, depends_on) VALUES
548 ('populate_stats_process_rooms', '{}', 'current_state_events_membership');
549
550 INSERT INTO background_updates (update_name, progress_json, depends_on) VALUES
551 ('populate_stats_process_users', '{}', 'populate_stats_process_rooms');
552
553 2. Restart synapse.
554
555 New Single Sign-on HTML Templates
556 ---------------------------------
557
558 New templates (``sso_auth_confirm.html``, ``sso_auth_success.html``, and
559 ``sso_account_deactivated.html``) were added to Synapse. If your Synapse is
560 configured to use SSO and a custom ``sso_redirect_confirm_template_dir``
561 configuration then these templates will need to be copied from
562 `synapse/res/templates <synapse/res/templates>`_ into that directory.
563
564 Synapse SSO Plugins Method Deprecation
565 --------------------------------------
566
567 Plugins using the ``complete_sso_login`` method of
568 ``synapse.module_api.ModuleApi`` should update to using the async/await
569 version ``complete_sso_login_async`` which includes additional checks. The
570 non-async version is considered deprecated.
571
572 Rolling back to v1.12.4 after a failed upgrade
573 ----------------------------------------------
574
575 v1.13.0 includes a lot of large changes. If something problematic occurs, you
576 may want to roll-back to a previous version of Synapse. Because v1.13.0 also
577 includes a new database schema version, reverting that version is also required
578 alongside the generic rollback instructions mentioned above. In short, to roll
579 back to v1.12.4 you need to:
580
581 1. Stop the server
582 2. Decrease the schema version in the database:
583
584 .. code:: sql
585
586 UPDATE schema_version SET version = 57;
587
588 3. Downgrade Synapse by following the instructions for your installation method
589 in the "Rolling back to older versions" section above.
590
591
592 Upgrading to v1.12.0
593 ====================
594
595 This version includes a database update which is run as part of the upgrade,
596 and which may take some time (several hours in the case of a large
597 server). Synapse will not respond to HTTP requests while this update is taking
598 place.
599
600 This is only likely to be a problem in the case of a server which is
601 participating in many rooms.
602
603 0. As with all upgrades, it is recommended that you have a recent backup of
604 your database which can be used for recovery in the event of any problems.
605
606 1. As an initial check to see if you will be affected, you can try running the
607 following query from the `psql` or `sqlite3` console. It is safe to run it
608 while Synapse is still running.
609
610 .. code:: sql
611
612 SELECT MAX(q.v) FROM (
613 SELECT (
614 SELECT ej.json AS v
615 FROM state_events se INNER JOIN event_json ej USING (event_id)
616 WHERE se.room_id=rooms.room_id AND se.type='m.room.create' AND se.state_key=''
617 LIMIT 1
618 ) FROM rooms WHERE rooms.room_version IS NULL
619 ) q;
620
621 This query will take about the same amount of time as the upgrade process: ie,
622 if it takes 5 minutes, then it is likely that Synapse will be unresponsive for
623 5 minutes during the upgrade.
624
625 If you consider an outage of this duration to be acceptable, no further
626 action is necessary and you can simply start Synapse 1.12.0.
627
628 If you would prefer to reduce the downtime, continue with the steps below.
629
630 2. The easiest workaround for this issue is to manually
631 create a new index before upgrading. On PostgreSQL, his can be done as follows:
632
633 .. code:: sql
634
635 CREATE INDEX CONCURRENTLY tmp_upgrade_1_12_0_index
636 ON state_events(room_id) WHERE type = 'm.room.create';
637
638 The above query may take some time, but is also safe to run while Synapse is
639 running.
640
641 We assume that no SQLite users have databases large enough to be
642 affected. If you *are* affected, you can run a similar query, omitting the
643 ``CONCURRENTLY`` keyword. Note however that this operation may in itself cause
644 Synapse to stop running for some time. Synapse admins are reminded that
645 `SQLite is not recommended for use outside a test
646 environment <https://github.com/matrix-org/synapse/blob/master/README.rst#using-postgresql>`_.
647
648 3. Once the index has been created, the ``SELECT`` query in step 1 above should
649 complete quickly. It is therefore safe to upgrade to Synapse 1.12.0.
650
651 4. Once Synapse 1.12.0 has successfully started and is responding to HTTP
652 requests, the temporary index can be removed:
653
654 .. code:: sql
655
656 DROP INDEX tmp_upgrade_1_12_0_index;
657
658 Upgrading to v1.10.0
659 ====================
660
661 Synapse will now log a warning on start up if used with a PostgreSQL database
662 that has a non-recommended locale set.
663
664 See `docs/postgres.md <docs/postgres.md>`_ for details.
665
666
667 Upgrading to v1.8.0
668 ===================
669
670 Specifying a ``log_file`` config option will now cause Synapse to refuse to
671 start, and should be replaced by with the ``log_config`` option. Support for
672 the ``log_file`` option was removed in v1.3.0 and has since had no effect.
673
674
675 Upgrading to v1.7.0
676 ===================
677
678 In an attempt to configure Synapse in a privacy preserving way, the default
679 behaviours of ``allow_public_rooms_without_auth`` and
680 ``allow_public_rooms_over_federation`` have been inverted. This means that by
681 default, only authenticated users querying the Client/Server API will be able
682 to query the room directory, and relatedly that the server will not share
683 room directory information with other servers over federation.
684
685 If your installation does not explicitly set these settings one way or the other
686 and you want either setting to be ``true`` then it will necessary to update
687 your homeserver configuration file accordingly.
688
689 For more details on the surrounding context see our `explainer
690 <https://matrix.org/blog/2019/11/09/avoiding-unwelcome-visitors-on-private-matrix-servers>`_.
691
692
693 Upgrading to v1.5.0
694 ===================
695
696 This release includes a database migration which may take several minutes to
697 complete if there are a large number (more than a million or so) of entries in
698 the ``devices`` table. This is only likely to a be a problem on very large
699 installations.
700
701
702 Upgrading to v1.4.0
703 ===================
704
705 New custom templates
706 --------------------
707
708 If you have configured a custom template directory with the
709 ``email.template_dir`` option, be aware that there are new templates regarding
710 registration and threepid management (see below) that must be included.
711
712 * ``registration.html`` and ``registration.txt``
713 * ``registration_success.html`` and ``registration_failure.html``
714 * ``add_threepid.html`` and ``add_threepid.txt``
715 * ``add_threepid_failure.html`` and ``add_threepid_success.html``
716
717 Synapse will expect these files to exist inside the configured template
718 directory, and **will fail to start** if they are absent.
719 To view the default templates, see `synapse/res/templates
720 <https://github.com/matrix-org/synapse/tree/master/synapse/res/templates>`_.
721
722 3pid verification changes
723 -------------------------
724
725 **Note: As of this release, users will be unable to add phone numbers or email
726 addresses to their accounts, without changes to the Synapse configuration. This
727 includes adding an email address during registration.**
728
729 It is possible for a user to associate an email address or phone number
730 with their account, for a number of reasons:
731
732 * for use when logging in, as an alternative to the user id.
733 * in the case of email, as an alternative contact to help with account recovery.
734 * in the case of email, to receive notifications of missed messages.
735
736 Before an email address or phone number can be added to a user's account,
737 or before such an address is used to carry out a password-reset, Synapse must
738 confirm the operation with the owner of the email address or phone number.
739 It does this by sending an email or text giving the user a link or token to confirm
740 receipt. This process is known as '3pid verification'. ('3pid', or 'threepid',
741 stands for third-party identifier, and we use it to refer to external
742 identifiers such as email addresses and phone numbers.)
743
744 Previous versions of Synapse delegated the task of 3pid verification to an
745 identity server by default. In most cases this server is ``vector.im`` or
746 ``matrix.org``.
747
748 In Synapse 1.4.0, for security and privacy reasons, the homeserver will no
749 longer delegate this task to an identity server by default. Instead,
750 the server administrator will need to explicitly decide how they would like the
751 verification messages to be sent.
752
753 In the medium term, the ``vector.im`` and ``matrix.org`` identity servers will
754 disable support for delegated 3pid verification entirely. However, in order to
755 ease the transition, they will retain the capability for a limited
756 period. Delegated email verification will be disabled on Monday 2nd December
757 2019 (giving roughly 2 months notice). Disabling delegated SMS verification
758 will follow some time after that once SMS verification support lands in
759 Synapse.
760
761 Once delegated 3pid verification support has been disabled in the ``vector.im`` and
762 ``matrix.org`` identity servers, all Synapse versions that depend on those
763 instances will be unable to verify email and phone numbers through them. There
764 are no imminent plans to remove delegated 3pid verification from Sydent
765 generally. (Sydent is the identity server project that backs the ``vector.im`` and
766 ``matrix.org`` instances).
767
768 Email
769 ~~~~~
770 Following upgrade, to continue verifying email (e.g. as part of the
771 registration process), admins can either:-
772
773 * Configure Synapse to use an email server.
774 * Run or choose an identity server which allows delegated email verification
775 and delegate to it.
776
777 Configure SMTP in Synapse
778 +++++++++++++++++++++++++
779
780 To configure an SMTP server for Synapse, modify the configuration section
781 headed ``email``, and be sure to have at least the ``smtp_host, smtp_port``
782 and ``notif_from`` fields filled out.
783
784 You may also need to set ``smtp_user``, ``smtp_pass``, and
785 ``require_transport_security``.
786
787 See the `sample configuration file <docs/sample_config.yaml>`_ for more details
788 on these settings.
789
790 Delegate email to an identity server
791 ++++++++++++++++++++++++++++++++++++
792
793 Some admins will wish to continue using email verification as part of the
794 registration process, but will not immediately have an appropriate SMTP server
795 at hand.
796
797 To this end, we will continue to support email verification delegation via the
798 ``vector.im`` and ``matrix.org`` identity servers for two months. Support for
799 delegated email verification will be disabled on Monday 2nd December.
800
801 The ``account_threepid_delegates`` dictionary defines whether the homeserver
802 should delegate an external server (typically an `identity server
803 <https://matrix.org/docs/spec/identity_service/r0.2.1>`_) to handle sending
804 confirmation messages via email and SMS.
805
806 So to delegate email verification, in ``homeserver.yaml``, set
807 ``account_threepid_delegates.email`` to the base URL of an identity server. For
808 example:
809
810 .. code:: yaml
811
812 account_threepid_delegates:
813 email: https://example.com # Delegate email sending to example.com
814
815 Note that ``account_threepid_delegates.email`` replaces the deprecated
816 ``email.trust_identity_server_for_password_resets``: if
817 ``email.trust_identity_server_for_password_resets`` is set to ``true``, and
818 ``account_threepid_delegates.email`` is not set, then the first entry in
819 ``trusted_third_party_id_servers`` will be used as the
820 ``account_threepid_delegate`` for email. This is to ensure compatibility with
821 existing Synapse installs that set up external server handling for these tasks
822 before v1.4.0. If ``email.trust_identity_server_for_password_resets`` is
823 ``true`` and no trusted identity server domains are configured, Synapse will
824 report an error and refuse to start.
825
826 If ``email.trust_identity_server_for_password_resets`` is ``false`` or absent
827 and no ``email`` delegate is configured in ``account_threepid_delegates``,
828 then Synapse will send email verification messages itself, using the configured
829 SMTP server (see above).
830 that type.
831
832 Phone numbers
833 ~~~~~~~~~~~~~
834
835 Synapse does not support phone-number verification itself, so the only way to
836 maintain the ability for users to add phone numbers to their accounts will be
837 by continuing to delegate phone number verification to the ``matrix.org`` and
838 ``vector.im`` identity servers (or another identity server that supports SMS
839 sending).
840
841 The ``account_threepid_delegates`` dictionary defines whether the homeserver
842 should delegate an external server (typically an `identity server
843 <https://matrix.org/docs/spec/identity_service/r0.2.1>`_) to handle sending
844 confirmation messages via email and SMS.
845
846 So to delegate phone number verification, in ``homeserver.yaml``, set
847 ``account_threepid_delegates.msisdn`` to the base URL of an identity
848 server. For example:
849
850 .. code:: yaml
851
852 account_threepid_delegates:
853 msisdn: https://example.com # Delegate sms sending to example.com
854
855 The ``matrix.org`` and ``vector.im`` identity servers will continue to support
856 delegated phone number verification via SMS until such time as it is possible
857 for admins to configure their servers to perform phone number verification
858 directly. More details will follow in a future release.
859
860 Rolling back to v1.3.1
861 ----------------------
862
863 If you encounter problems with v1.4.0, it should be possible to roll back to
864 v1.3.1, subject to the following:
865
866 * The 'room statistics' engine was heavily reworked in this release (see
867 `#5971 <https://github.com/matrix-org/synapse/pull/5971>`_), including
868 significant changes to the database schema, which are not easily
869 reverted. This will cause the room statistics engine to stop updating when
870 you downgrade.
871
872 The room statistics are essentially unused in v1.3.1 (in future versions of
873 Synapse, they will be used to populate the room directory), so there should
874 be no loss of functionality. However, the statistics engine will write errors
875 to the logs, which can be avoided by setting the following in
876 `homeserver.yaml`:
877
878 .. code:: yaml
879
880 stats:
881 enabled: false
882
883 Don't forget to re-enable it when you upgrade again, in preparation for its
884 use in the room directory!
885
886 Upgrading to v1.2.0
887 ===================
888
889 Some counter metrics have been renamed, with the old names deprecated. See
890 `the metrics documentation <docs/metrics-howto.md#renaming-of-metrics--deprecation-of-old-names-in-12>`_
891 for details.
892
893 Upgrading to v1.1.0
894 ===================
895
896 Synapse v1.1.0 removes support for older Python and PostgreSQL versions, as
897 outlined in `our deprecation notice <https://matrix.org/blog/2019/04/08/synapse-deprecating-postgres-9-4-and-python-2-x>`_.
898
899 Minimum Python Version
900 ----------------------
901
902 Synapse v1.1.0 has a minimum Python requirement of Python 3.5. Python 3.6 or
903 Python 3.7 are recommended as they have improved internal string handling,
904 significantly reducing memory usage.
905
906 If you use current versions of the Matrix.org-distributed Debian packages or
907 Docker images, action is not required.
908
909 If you install Synapse in a Python virtual environment, please see "Upgrading to
910 v0.34.0" for notes on setting up a new virtualenv under Python 3.
911
912 Minimum PostgreSQL Version
913 --------------------------
914
915 If using PostgreSQL under Synapse, you will need to use PostgreSQL 9.5 or above.
916 Please see the
917 `PostgreSQL documentation <https://www.postgresql.org/docs/11/upgrading.html>`_
918 for more details on upgrading your database.
919
920 Upgrading to v1.0
921 =================
922
923 Validation of TLS certificates
924 ------------------------------
925
926 Synapse v1.0 is the first release to enforce
927 validation of TLS certificates for the federation API. It is therefore
928 essential that your certificates are correctly configured. See the `FAQ
929 <docs/MSC1711_certificates_FAQ.md>`_ for more information.
930
931 Note, v1.0 installations will also no longer be able to federate with servers
932 that have not correctly configured their certificates.
933
934 In rare cases, it may be desirable to disable certificate checking: for
935 example, it might be essential to be able to federate with a given legacy
936 server in a closed federation. This can be done in one of two ways:-
937
938 * Configure the global switch ``federation_verify_certificates`` to ``false``.
939 * Configure a whitelist of server domains to trust via ``federation_certificate_verification_whitelist``.
940
941 See the `sample configuration file <docs/sample_config.yaml>`_
942 for more details on these settings.
943
944 Email
945 -----
946 When a user requests a password reset, Synapse will send an email to the
947 user to confirm the request.
948
949 Previous versions of Synapse delegated the job of sending this email to an
950 identity server. If the identity server was somehow malicious or became
951 compromised, it would be theoretically possible to hijack an account through
952 this means.
953
954 Therefore, by default, Synapse v1.0 will send the confirmation email itself. If
955 Synapse is not configured with an SMTP server, password reset via email will be
956 disabled.
957
958 To configure an SMTP server for Synapse, modify the configuration section
959 headed ``email``, and be sure to have at least the ``smtp_host``, ``smtp_port``
960 and ``notif_from`` fields filled out. You may also need to set ``smtp_user``,
961 ``smtp_pass``, and ``require_transport_security``.
962
963 If you are absolutely certain that you wish to continue using an identity
964 server for password resets, set ``trust_identity_server_for_password_resets`` to ``true``.
965
966 See the `sample configuration file <docs/sample_config.yaml>`_
967 for more details on these settings.
968
969 New email templates
970 ---------------
971 Some new templates have been added to the default template directory for the purpose of the
972 homeserver sending its own password reset emails. If you have configured a custom
973 ``template_dir`` in your Synapse config, these files will need to be added.
974
975 ``password_reset.html`` and ``password_reset.txt`` are HTML and plain text templates
976 respectively that contain the contents of what will be emailed to the user upon attempting to
977 reset their password via email. ``password_reset_success.html`` and
978 ``password_reset_failure.html`` are HTML files that the content of which (assuming no redirect
979 URL is set) will be shown to the user after they attempt to click the link in the email sent
980 to them.
981
982 Upgrading to v0.99.0
983 ====================
984
985 Please be aware that, before Synapse v1.0 is released around March 2019, you
986 will need to replace any self-signed certificates with those verified by a
987 root CA. Information on how to do so can be found at `the ACME docs
988 <docs/ACME.md>`_.
989
990 For more information on configuring TLS certificates see the `FAQ <docs/MSC1711_certificates_FAQ.md>`_.
991
992 Upgrading to v0.34.0
993 ====================
994
995 1. This release is the first to fully support Python 3. Synapse will now run on
996 Python versions 3.5, or 3.6 (as well as 2.7). We recommend switching to
997 Python 3, as it has been shown to give performance improvements.
998
999 For users who have installed Synapse into a virtualenv, we recommend doing
1000 this by creating a new virtualenv. For example::
1001
1002 virtualenv -p python3 ~/synapse/env3
1003 source ~/synapse/env3/bin/activate
1004 pip install matrix-synapse
1005
1006 You can then start synapse as normal, having activated the new virtualenv::
1007
1008 cd ~/synapse
1009 source env3/bin/activate
1010 synctl start
1011
1012 Users who have installed from distribution packages should see the relevant
1013 package documentation. See below for notes on Debian packages.
1014
1015 * When upgrading to Python 3, you **must** make sure that your log files are
1016 configured as UTF-8, by adding ``encoding: utf8`` to the
1017 ``RotatingFileHandler`` configuration (if you have one) in your
1018 ``<server>.log.config`` file. For example, if your ``log.config`` file
1019 contains::
1020
1021 handlers:
1022 file:
1023 class: logging.handlers.RotatingFileHandler
1024 formatter: precise
1025 filename: homeserver.log
1026 maxBytes: 104857600
1027 backupCount: 10
1028 filters: [context]
1029 console:
1030 class: logging.StreamHandler
1031 formatter: precise
1032 filters: [context]
1033
1034 Then you should update this to be::
1035
1036 handlers:
1037 file:
1038 class: logging.handlers.RotatingFileHandler
1039 formatter: precise
1040 filename: homeserver.log
1041 maxBytes: 104857600
1042 backupCount: 10
1043 filters: [context]
1044 encoding: utf8
1045 console:
1046 class: logging.StreamHandler
1047 formatter: precise
1048 filters: [context]
1049
1050 There is no need to revert this change if downgrading to Python 2.
1051
1052 We are also making available Debian packages which will run Synapse on
1053 Python 3. You can switch to these packages with ``apt-get install
1054 matrix-synapse-py3``, however, please read `debian/NEWS
1055 <https://github.com/matrix-org/synapse/blob/release-v0.34.0/debian/NEWS>`_
1056 before doing so. The existing ``matrix-synapse`` packages will continue to
1057 use Python 2 for the time being.
1058
1059 2. This release removes the ``riot.im`` from the default list of trusted
1060 identity servers.
1061
1062 If ``riot.im`` is in your homeserver's list of
1063 ``trusted_third_party_id_servers``, you should remove it. It was added in
1064 case a hypothetical future identity server was put there. If you don't
1065 remove it, users may be unable to deactivate their accounts.
1066
1067 3. This release no longer installs the (unmaintained) Matrix Console web client
1068 as part of the default installation. It is possible to re-enable it by
1069 installing it separately and setting the ``web_client_location`` config
1070 option, but please consider switching to another client.
1071
1072 Upgrading to v0.33.7
1073 ====================
1074
1075 This release removes the example email notification templates from
1076 ``res/templates`` (they are now internal to the python package). This should
1077 only affect you if you (a) deploy your Synapse instance from a git checkout or
1078 a github snapshot URL, and (b) have email notifications enabled.
1079
1080 If you have email notifications enabled, you should ensure that
1081 ``email.template_dir`` is either configured to point at a directory where you
1082 have installed customised templates, or leave it unset to use the default
1083 templates.
1084
1085 Upgrading to v0.27.3
1086 ====================
1087
1088 This release expands the anonymous usage stats sent if the opt-in
1089 ``report_stats`` configuration is set to ``true``. We now capture RSS memory
1090 and cpu use at a very coarse level. This requires administrators to install
1091 the optional ``psutil`` python module.
1092
1093 We would appreciate it if you could assist by ensuring this module is available
1094 and ``report_stats`` is enabled. This will let us see if performance changes to
1095 synapse are having an impact to the general community.
1096
1097 Upgrading to v0.15.0
1098 ====================
1099
1100 If you want to use the new URL previewing API (/_matrix/media/r0/preview_url)
1101 then you have to explicitly enable it in the config and update your dependencies
1102 dependencies. See README.rst for details.
1103
1104
1105 Upgrading to v0.11.0
1106 ====================
1107
1108 This release includes the option to send anonymous usage stats to matrix.org,
1109 and requires that administrators explictly opt in or out by setting the
1110 ``report_stats`` option to either ``true`` or ``false``.
1111
1112 We would really appreciate it if you could help our project out by reporting
1113 anonymized usage statistics from your homeserver. Only very basic aggregate
1114 data (e.g. number of users) will be reported, but it helps us to track the
1115 growth of the Matrix community, and helps us to make Matrix a success, as well
1116 as to convince other networks that they should peer with us.
1117
1118
1119 Upgrading to v0.9.0
1120 ===================
1121
1122 Application services have had a breaking API change in this version.
1123
1124 They can no longer register themselves with a home server using the AS HTTP API. This
1125 decision was made because a compromised application service with free reign to register
1126 any regex in effect grants full read/write access to the home server if a regex of ``.*``
1127 is used. An attack where a compromised AS re-registers itself with ``.*`` was deemed too
1128 big of a security risk to ignore, and so the ability to register with the HS remotely has
1129 been removed.
1130
1131 It has been replaced by specifying a list of application service registrations in
1132 ``homeserver.yaml``::
1133
1134 app_service_config_files: ["registration-01.yaml", "registration-02.yaml"]
1135
1136 Where ``registration-01.yaml`` looks like::
1137
1138 url: <String> # e.g. "https://my.application.service.com"
1139 as_token: <String>
1140 hs_token: <String>
1141 sender_localpart: <String> # This is a new field which denotes the user_id localpart when using the AS token
1142 namespaces:
1143 users:
1144 - exclusive: <Boolean>
1145 regex: <String> # e.g. "@prefix_.*"
1146 aliases:
1147 - exclusive: <Boolean>
1148 regex: <String>
1149 rooms:
1150 - exclusive: <Boolean>
1151 regex: <String>
1152
1153 Upgrading to v0.8.0
1154 ===================
1155
1156 Servers which use captchas will need to add their public key to::
1157
1158 static/client/register/register_config.js
1159
1160 window.matrixRegistrationConfig = {
1161 recaptcha_public_key: "YOUR_PUBLIC_KEY"
1162 };
1163
1164 This is required in order to support registration fallback (typically used on
1165 mobile devices).
1166
1167
1168 Upgrading to v0.7.0
1169 ===================
1170
1171 New dependencies are:
1172
1173 - pydenticon
1174 - simplejson
1175 - syutil
1176 - matrix-angular-sdk
1177
1178 To pull in these dependencies in a virtual env, run::
1179
1180 python synapse/python_dependencies.py | xargs -n 1 pip install
1181
1182 Upgrading to v0.6.0
1183 ===================
1184
1185 To pull in new dependencies, run::
1186
1187 python setup.py develop --user
1188
1189 This update includes a change to the database schema. To upgrade you first need
1190 to upgrade the database by running::
1191
1192 python scripts/upgrade_db_to_v0.6.0.py <db> <server_name> <signing_key>
1193
1194 Where `<db>` is the location of the database, `<server_name>` is the
1195 server name as specified in the synapse configuration, and `<signing_key>` is
1196 the location of the signing key as specified in the synapse configuration.
1197
1198 This may take some time to complete. Failures of signatures and content hashes
1199 can safely be ignored.
1200
1201
1202 Upgrading to v0.5.1
1203 ===================
1204
1205 Depending on precisely when you installed v0.5.0 you may have ended up with
1206 a stale release of the reference matrix webclient installed as a python module.
1207 To uninstall it and ensure you are depending on the latest module, please run::
1208
1209 $ pip uninstall syweb
1210
1211 Upgrading to v0.5.0
1212 ===================
1213
1214 The webclient has been split out into a seperate repository/pacakage in this
1215 release. Before you restart your homeserver you will need to pull in the
1216 webclient package by running::
1217
1218 python setup.py develop --user
1219
1220 This release completely changes the database schema and so requires upgrading
1221 it before starting the new version of the homeserver.
1222
1223 The script "database-prepare-for-0.5.0.sh" should be used to upgrade the
1224 database. This will save all user information, such as logins and profiles,
1225 but will otherwise purge the database. This includes messages, which
1226 rooms the home server was a member of and room alias mappings.
1227
1228 If you would like to keep your history, please take a copy of your database
1229 file and ask for help in #matrix:matrix.org. The upgrade process is,
1230 unfortunately, non trivial and requires human intervention to resolve any
1231 resulting conflicts during the upgrade process.
1232
1233 Before running the command the homeserver should be first completely
1234 shutdown. To run it, simply specify the location of the database, e.g.:
1235
1236 ./scripts/database-prepare-for-0.5.0.sh "homeserver.db"
1237
1238 Once this has successfully completed it will be safe to restart the
1239 homeserver. You may notice that the homeserver takes a few seconds longer to
1240 restart than usual as it reinitializes the database.
1241
1242 On startup of the new version, users can either rejoin remote rooms using room
1243 aliases or by being reinvited. Alternatively, if any other homeserver sends a
1244 message to a room that the homeserver was previously in the local HS will
1245 automatically rejoin the room.
1246
1247 Upgrading to v0.4.0
1248 ===================
1249
1250 This release needs an updated syutil version. Run::
1251
1252 python setup.py develop
1253
1254 You will also need to upgrade your configuration as the signing key format has
1255 changed. Run::
1256
1257 python -m synapse.app.homeserver --config-path <CONFIG> --generate-config
1258
1259
1260 Upgrading to v0.3.0
1261 ===================
1262
1263 This registration API now closely matches the login API. This introduces a bit
1264 more backwards and forwards between the HS and the client, but this improves
1265 the overall flexibility of the API. You can now GET on /register to retrieve a list
1266 of valid registration flows. Upon choosing one, they are submitted in the same
1267 way as login, e.g::
1268
1269 {
1270 type: m.login.password,
1271 user: foo,
1272 password: bar
1273 }
1274
1275 The default HS supports 2 flows, with and without Identity Server email
1276 authentication. Enabling captcha on the HS will add in an extra step to all
1277 flows: ``m.login.recaptcha`` which must be completed before you can transition
1278 to the next stage. There is a new login type: ``m.login.email.identity`` which
1279 contains the ``threepidCreds`` key which were previously sent in the original
1280 register request. For more information on this, see the specification.
1281
1282 Web Client
1283 ----------
1284
1285 The VoIP specification has changed between v0.2.0 and v0.3.0. Users should
1286 refresh any browser tabs to get the latest web client code. Users on
1287 v0.2.0 of the web client will not be able to call those on v0.3.0 and
1288 vice versa.
1289
1290
1291 Upgrading to v0.2.0
1292 ===================
1293
1294 The home server now requires setting up of SSL config before it can run. To
1295 automatically generate default config use::
1296
1297 $ python synapse/app/homeserver.py \
1298 --server-name machine.my.domain.name \
1299 --bind-port 8448 \
1300 --config-path homeserver.config \
1301 --generate-config
1302
1303 This config can be edited if desired, for example to specify a different SSL
1304 certificate to use. Once done you can run the home server using::
1305
1306 $ python synapse/app/homeserver.py --config-path homeserver.config
1307
1308 See the README.rst for more information.
1309
1310 Also note that some config options have been renamed, including:
1311
1312 - "host" to "server-name"
1313 - "database" to "database-path"
1314 - "port" to "bind-port" and "unsecure-port"
1315
1316
1317 Upgrading to v0.0.1
1318 ===================
1319
1320 This release completely changes the database schema and so requires upgrading
1321 it before starting the new version of the homeserver.
1322
1323 The script "database-prepare-for-0.0.1.sh" should be used to upgrade the
1324 database. This will save all user information, such as logins and profiles,
1325 but will otherwise purge the database. This includes messages, which
1326 rooms the home server was a member of and room alias mappings.
1327
1328 Before running the command the homeserver should be first completely
1329 shutdown. To run it, simply specify the location of the database, e.g.:
1330
1331 ./scripts/database-prepare-for-0.0.1.sh "homeserver.db"
1332
1333 Once this has successfully completed it will be safe to restart the
1334 homeserver. You may notice that the homeserver takes a few seconds longer to
1335 restart than usual as it reinitializes the database.
1336
1337 On startup of the new version, users can either rejoin remote rooms using room
1338 aliases or by being reinvited. Alternatively, if any other homeserver sends a
1339 message to a room that the homeserver was previously in the local HS will
1340 automatically rejoin the room.
6 The markdown source is available in `docs/upgrade.md <docs/upgrade.md>`_.
11 This is a setup for managing synapse with a user contributed systemd unit
22 file. It provides a `matrix-synapse` systemd unit file that should be tailored
33 to accommodate your installation in accordance with the installation
4 instructions provided in [installation instructions](../../INSTALL.md).
4 instructions provided in
5 [installation instructions](https://matrix-org.github.io/synapse/latest/setup/installation.html).
56
67 ## Setup
78 1. Under the service section, ensure the `User` variable matches which user
0 matrix-synapse-py3 (1.38.0) stable; urgency=medium
1
2 * New synapse release 1.38.0.
3
4 -- Synapse Packaging team <packages@matrix.org> Tue, 13 Jul 2021 13:20:56 +0100
5
6 matrix-synapse-py3 (1.38.0rc3) prerelease; urgency=medium
7
8 [ Erik Johnston ]
9 * Add synapse_review_recent_signups script
10
11 [ Synapse Packaging team ]
12 * New synapse release 1.38.0rc3.
13
14 -- Synapse Packaging team <packages@matrix.org> Tue, 13 Jul 2021 11:53:56 +0100
15
016 matrix-synapse-py3 (1.37.1) stable; urgency=medium
117
218 * New synapse release 1.37.1.
0 .\" generated with Ronn/v0.7.3
1 .\" http://github.com/rtomayko/ronn/tree/0.7.3
2 .
3 .TH "HASH_PASSWORD" "1" "February 2017" "" ""
4 .
0 .\" generated with Ronn-NG/v0.8.0
1 .\" http://github.com/apjanke/ronn-ng/tree/0.8.0
2 .TH "HASH_PASSWORD" "1" "July 2021" "" ""
53 .SH "NAME"
64 \fBhash_password\fR \- Calculate the hash of a new password, so that passwords can be reset
7 .
85 .SH "SYNOPSIS"
96 \fBhash_password\fR [\fB\-p\fR|\fB\-\-password\fR [password]] [\fB\-c\fR|\fB\-\-config\fR \fIfile\fR]
10 .
117 .SH "DESCRIPTION"
128 \fBhash_password\fR calculates the hash of a supplied password using bcrypt\.
13 .
149 .P
1510 \fBhash_password\fR takes a password as an parameter either on the command line or the \fBSTDIN\fR if not supplied\.
16 .
1711 .P
1812 It accepts an YAML file which can be used to specify parameters like the number of rounds for bcrypt and password_config section having the pepper value used for the hashing\. By default \fBbcrypt_rounds\fR is set to \fB10\fR\.
19 .
2013 .P
2114 The hashed password is written on the \fBSTDOUT\fR\.
22 .
2315 .SH "FILES"
2416 A sample YAML file accepted by \fBhash_password\fR is described below:
25 .
2617 .P
2718 bcrypt_rounds: 17 password_config: pepper: "random hashing pepper"
28 .
2919 .SH "OPTIONS"
30 .
3120 .TP
3221 \fB\-p\fR, \fB\-\-password\fR
3322 Read the password form the command line if [password] is supplied\. If not, prompt the user and read the password form the \fBSTDIN\fR\. It is not recommended to type the password on the command line directly\. Use the STDIN instead\.
34 .
3523 .TP
3624 \fB\-c\fR, \fB\-\-config\fR
3725 Read the supplied YAML \fIfile\fR containing the options \fBbcrypt_rounds\fR and the \fBpassword_config\fR section containing the \fBpepper\fR value\.
38 .
3926 .SH "EXAMPLES"
4027 Hash from the command line:
41 .
4228 .IP "" 4
43 .
4429 .nf
45
4630 $ hash_password \-p "p@ssw0rd"
4731 $2b$12$VJNqWQYfsWTEwcELfoSi4Oa8eA17movHqqi8\.X8fWFpum7SxZ9MFe
48 .
4932 .fi
50 .
5133 .IP "" 0
52 .
5334 .P
5435 Hash from the STDIN:
55 .
5636 .IP "" 4
57 .
5837 .nf
59
6038 $ hash_password
6139 Password:
6240 Confirm password:
6341 $2b$12$AszlvfmJl2esnyhmn8m/kuR2tdXgROWtWxnX\.rcuAbM8ErLoUhybG
64 .
6542 .fi
66 .
6743 .IP "" 0
68 .
6944 .P
7045 Using a config file:
71 .
7246 .IP "" 4
73 .
7447 .nf
75
7648 $ hash_password \-c config\.yml
7749 Password:
7850 Confirm password:
7951 $2b$12$CwI\.wBNr\.w3kmiUlV3T5s\.GT2wH7uebDCovDrCOh18dFedlANK99O
80 .
8152 .fi
82 .
8353 .IP "" 0
84 .
8554 .SH "COPYRIGHT"
86 This man page was written by Rahul De <\fIrahulde@swecha\.net\fR> for Debian GNU/Linux distribution\.
87 .
55 This man page was written by Rahul De <\fI\%mailto:rahulde@swecha\.net\fR> for Debian GNU/Linux distribution\.
8856 .SH "SEE ALSO"
89 synctl(1), synapse_port_db(1), register_new_matrix_user(1)
57 synctl(1), synapse_port_db(1), register_new_matrix_user(1), synapse_review_recent_signups(1)
6565
6666 ## SEE ALSO
6767
68 synctl(1), synapse_port_db(1), register_new_matrix_user(1)
68 synctl(1), synapse_port_db(1), register_new_matrix_user(1), synapse_review_recent_signups(1)
00 debian/hash_password.1
11 debian/register_new_matrix_user.1
22 debian/synapse_port_db.1
3 debian/synapse_review_recent_signups.1
34 debian/synctl.1
00 opt/venvs/matrix-synapse/bin/hash_password usr/bin/hash_password
11 opt/venvs/matrix-synapse/bin/register_new_matrix_user usr/bin/register_new_matrix_user
22 opt/venvs/matrix-synapse/bin/synapse_port_db usr/bin/synapse_port_db
3 opt/venvs/matrix-synapse/bin/synapse_review_recent_signups usr/bin/synapse_review_recent_signups
34 opt/venvs/matrix-synapse/bin/synctl usr/bin/synctl
0 .\" generated with Ronn/v0.7.3
1 .\" http://github.com/rtomayko/ronn/tree/0.7.3
2 .
3 .TH "REGISTER_NEW_MATRIX_USER" "1" "February 2017" "" ""
4 .
0 .\" generated with Ronn-NG/v0.8.0
1 .\" http://github.com/apjanke/ronn-ng/tree/0.8.0
2 .TH "REGISTER_NEW_MATRIX_USER" "1" "July 2021" "" ""
53 .SH "NAME"
64 \fBregister_new_matrix_user\fR \- Used to register new users with a given home server when registration has been disabled
7 .
85 .SH "SYNOPSIS"
9 \fBregister_new_matrix_user\fR options\.\.\.
10 .
6 \fBregister_new_matrix_user\fR options\|\.\|\.\|\.
117 .SH "DESCRIPTION"
128 \fBregister_new_matrix_user\fR registers new users with a given home server when registration has been disabled\. For this to work, the home server must be configured with the \'registration_shared_secret\' option set\.
13 .
149 .P
1510 This accepts the user credentials like the username, password, is user an admin or not and registers the user onto the homeserver database\. Also, a YAML file containing the shared secret can be provided\. If not, the shared secret can be provided via the command line\.
16 .
1711 .P
1812 By default it assumes the home server URL to be \fBhttps://localhost:8448\fR\. This can be changed via the \fBserver_url\fR command line option\.
19 .
2013 .SH "FILES"
2114 A sample YAML file accepted by \fBregister_new_matrix_user\fR is described below:
22 .
2315 .IP "" 4
24 .
2516 .nf
26
2717 registration_shared_secret: "s3cr3t"
28 .
2918 .fi
30 .
3119 .IP "" 0
32 .
3320 .SH "OPTIONS"
34 .
3521 .TP
3622 \fB\-u\fR, \fB\-\-user\fR
3723 Local part of the new user\. Will prompt if omitted\.
38 .
3924 .TP
4025 \fB\-p\fR, \fB\-\-password\fR
4126 New password for user\. Will prompt if omitted\. Supplying the password on the command line is not recommended\. Use the STDIN instead\.
42 .
4327 .TP
4428 \fB\-a\fR, \fB\-\-admin\fR
4529 Register new user as an admin\. Will prompt if omitted\.
46 .
4730 .TP
4831 \fB\-c\fR, \fB\-\-config\fR
4932 Path to server config file containing the shared secret\.
50 .
5133 .TP
5234 \fB\-k\fR, \fB\-\-shared\-secret\fR
5335 Shared secret as defined in server config file\. This is an optional parameter as it can be also supplied via the YAML file\.
54 .
5536 .TP
5637 \fBserver_url\fR
5738 URL of the home server\. Defaults to \'https://localhost:8448\'\.
58 .
5939 .SH "EXAMPLES"
60 .
6140 .nf
62
6341 $ register_new_matrix_user \-u user1 \-p p@ssword \-a \-c config\.yaml
64 .
6542 .fi
66 .
6743 .SH "COPYRIGHT"
68 This man page was written by Rahul De <\fIrahulde@swecha\.net\fR> for Debian GNU/Linux distribution\.
69 .
44 This man page was written by Rahul De <\fI\%mailto:rahulde@swecha\.net\fR> for Debian GNU/Linux distribution\.
7045 .SH "SEE ALSO"
71 synctl(1), synapse_port_db(1), hash_password(1)
46 synctl(1), synapse_port_db(1), hash_password(1), synapse_review_recent_signups(1)
5757
5858 ## SEE ALSO
5959
60 synctl(1), synapse_port_db(1), hash_password(1)
60 synctl(1), synapse_port_db(1), hash_password(1), synapse_review_recent_signups(1)
0 .\" generated with Ronn/v0.7.3
1 .\" http://github.com/rtomayko/ronn/tree/0.7.3
2 .
3 .TH "SYNAPSE_PORT_DB" "1" "February 2017" "" ""
4 .
0 .\" generated with Ronn-NG/v0.8.0
1 .\" http://github.com/apjanke/ronn-ng/tree/0.8.0
2 .TH "SYNAPSE_PORT_DB" "1" "July 2021" "" ""
53 .SH "NAME"
64 \fBsynapse_port_db\fR \- A script to port an existing synapse SQLite database to a new PostgreSQL database\.
7 .
85 .SH "SYNOPSIS"
96 \fBsynapse_port_db\fR [\-v] \-\-sqlite\-database=\fIdbfile\fR \-\-postgres\-config=\fIyamlconfig\fR [\-\-curses] [\-\-batch\-size=\fIbatch\-size\fR]
10 .
117 .SH "DESCRIPTION"
128 \fBsynapse_port_db\fR ports an existing synapse SQLite database to a new PostgreSQL database\.
13 .
149 .P
1510 SQLite database is specified with \fB\-\-sqlite\-database\fR option and PostgreSQL configuration required to connect to PostgreSQL database is provided using \fB\-\-postgres\-config\fR configuration\. The configuration is specified in YAML format\.
16 .
1711 .SH "OPTIONS"
18 .
1912 .TP
2013 \fB\-v\fR
2114 Print log messages in \fBdebug\fR level instead of \fBinfo\fR level\.
22 .
2315 .TP
2416 \fB\-\-sqlite\-database\fR
2517 The snapshot of the SQLite database file\. This must not be currently used by a running synapse server\.
26 .
2718 .TP
2819 \fB\-\-postgres\-config\fR
2920 The database config file for the PostgreSQL database\.
30 .
3121 .TP
3222 \fB\-\-curses\fR
3323 Display a curses based progress UI\.
34 .
3524 .SH "CONFIG FILE"
3625 The postgres configuration file must be a valid YAML file with the following options\.
37 .
38 .IP "\(bu" 4
26 .IP "\[ci]" 4
3927 \fBdatabase\fR: Database configuration section\. This section header can be ignored and the options below may be specified as top level keys\.
40 .
41 .IP "\(bu" 4
28 .IP "\[ci]" 4
4229 \fBname\fR: Connector to use when connecting to the database\. This value must be \fBpsycopg2\fR\.
43 .
44 .IP "\(bu" 4
30 .IP "\[ci]" 4
4531 \fBargs\fR: DB API 2\.0 compatible arguments to send to the \fBpsycopg2\fR module\.
46 .
47 .IP "\(bu" 4
32 .IP "\[ci]" 4
4833 \fBdbname\fR \- the database name
49 .
50 .IP "\(bu" 4
34 .IP "\[ci]" 4
5135 \fBuser\fR \- user name used to authenticate
52 .
53 .IP "\(bu" 4
36 .IP "\[ci]" 4
5437 \fBpassword\fR \- password used to authenticate
55 .
56 .IP "\(bu" 4
38 .IP "\[ci]" 4
5739 \fBhost\fR \- database host address (defaults to UNIX socket if not provided)
58 .
59 .IP "\(bu" 4
40 .IP "\[ci]" 4
6041 \fBport\fR \- connection port number (defaults to 5432 if not provided)
61 .
6242 .IP "" 0
6343
64 .
65 .IP "\(bu" 4
44 .IP "\[ci]" 4
6645 \fBsynchronous_commit\fR: Optional\. Default is True\. If the value is \fBFalse\fR, enable asynchronous commit and don\'t wait for the server to call fsync before ending the transaction\. See: https://www\.postgresql\.org/docs/current/static/wal\-async\-commit\.html
67 .
6846 .IP "" 0
6947
70 .
7148 .IP "" 0
72 .
7349 .P
7450 Following example illustrates the configuration file format\.
75 .
7651 .IP "" 4
77 .
7852 .nf
79
8053 database:
8154 name: psycopg2
8255 args:
8558 password: ORohmi9Eet=ohphi
8659 host: localhost
8760 synchronous_commit: false
88 .
8961 .fi
90 .
9162 .IP "" 0
92 .
9363 .SH "COPYRIGHT"
94 This man page was written by Sunil Mohan Adapa <\fIsunil@medhas\.org\fR> for Debian GNU/Linux distribution\.
95 .
64 This man page was written by Sunil Mohan Adapa <\fI\%mailto:sunil@medhas\.org\fR> for Debian GNU/Linux distribution\.
9665 .SH "SEE ALSO"
97 synctl(1), hash_password(1), register_new_matrix_user(1)
66 synctl(1), hash_password(1), register_new_matrix_user(1), synapse_review_recent_signups(1)
4646 * `args`:
4747 DB API 2.0 compatible arguments to send to the `psycopg2` module.
4848
49 * `dbname` - the database name
49 * `dbname` - the database name
5050
5151 * `user` - user name used to authenticate
5252
5757
5858 * `port` - connection port number (defaults to 5432 if not
5959 provided)
60
60
6161
6262 * `synchronous_commit`:
6363 Optional. Default is True. If the value is `False`, enable
7575 password: ORohmi9Eet=ohphi
7676 host: localhost
7777 synchronous_commit: false
78
78
7979 ## COPYRIGHT
8080
8181 This man page was written by Sunil Mohan Adapa <<sunil@medhas.org>> for
8383
8484 ## SEE ALSO
8585
86 synctl(1), hash_password(1), register_new_matrix_user(1)
86 synctl(1), hash_password(1), register_new_matrix_user(1), synapse_review_recent_signups(1)
0 .\" generated with Ronn-NG/v0.8.0
1 .\" http://github.com/apjanke/ronn-ng/tree/0.8.0
2 .TH "SYNAPSE_REVIEW_RECENT_SIGNUPS" "1" "July 2021" "" ""
3 .SH "NAME"
4 \fBsynapse_review_recent_signups\fR \- Print users that have recently registered on Synapse
5 .SH "SYNOPSIS"
6 \fBsynapse_review_recent_signups\fR \fB\-c\fR|\fB\-\-config\fR \fIfile\fR [\fB\-s\fR|\fB\-\-since\fR \fIperiod\fR] [\fB\-e\fR|\fB\-\-exclude\-emails\fR] [\fB\-u\fR|\fB\-\-only\-users\fR]
7 .SH "DESCRIPTION"
8 \fBsynapse_review_recent_signups\fR prints out recently registered users on a Synapse server, as well as some basic information about the user\.
9 .P
10 \fBsynapse_review_recent_signups\fR must be supplied with the config of the Synapse server, so that it can fetch the database config and connect to the database\.
11 .SH "OPTIONS"
12 .TP
13 \fB\-c\fR, \fB\-\-config\fR
14 The config file(s) used by the Synapse server\.
15 .TP
16 \fB\-s\fR, \fB\-\-since\fR
17 How far back to search for newly registered users\. Defaults to 7d, i\.e\. up to seven days in the past\. Valid units are \'s\', \'m\', \'h\', \'d\', \'w\', or \'y\'\.
18 .TP
19 \fB\-e\fR, \fB\-\-exclude\-emails\fR
20 Do not print out users that have validated emails associated with their account\.
21 .TP
22 \fB\-u\fR, \fB\-\-only\-users\fR
23 Only print out the user IDs of recently registered users, without any additional information
24 .SH "SEE ALSO"
25 synctl(1), synapse_port_db(1), register_new_matrix_user(1), hash_password(1)
0 synapse_review_recent_signups(1) -- Print users that have recently registered on Synapse
1 ========================================================================================
2
3 ## SYNOPSIS
4
5 `synapse_review_recent_signups` `-c`|`--config` <file> [`-s`|`--since` <period>] [`-e`|`--exclude-emails`] [`-u`|`--only-users`]
6
7 ## DESCRIPTION
8
9 **synapse_review_recent_signups** prints out recently registered users on a
10 Synapse server, as well as some basic information about the user.
11
12 `synapse_review_recent_signups` must be supplied with the config of the Synapse
13 server, so that it can fetch the database config and connect to the database.
14
15
16 ## OPTIONS
17
18 * `-c`, `--config`:
19 The config file(s) used by the Synapse server.
20
21 * `-s`, `--since`:
22 How far back to search for newly registered users. Defaults to 7d, i.e. up
23 to seven days in the past. Valid units are 's', 'm', 'h', 'd', 'w', or 'y'.
24
25 * `-e`, `--exclude-emails`:
26 Do not print out users that have validated emails associated with their
27 account.
28
29 * `-u`, `--only-users`:
30 Only print out the user IDs of recently registered users, without any
31 additional information
32
33
34 ## SEE ALSO
35
36 synctl(1), synapse_port_db(1), register_new_matrix_user(1), hash_password(1)
0 .\" generated with Ronn/v0.7.3
1 .\" http://github.com/rtomayko/ronn/tree/0.7.3
2 .
3 .TH "SYNCTL" "1" "February 2017" "" ""
4 .
0 .\" generated with Ronn-NG/v0.8.0
1 .\" http://github.com/apjanke/ronn-ng/tree/0.8.0
2 .TH "SYNCTL" "1" "July 2021" "" ""
53 .SH "NAME"
64 \fBsynctl\fR \- Synapse server control interface
7 .
85 .SH "SYNOPSIS"
96 Start, stop or restart synapse server\.
10 .
117 .P
128 \fBsynctl\fR {start|stop|restart} [configfile] [\-w|\-\-worker=\fIWORKERCONFIG\fR] [\-a|\-\-all\-processes=\fIWORKERCONFIGDIR\fR]
13 .
149 .SH "DESCRIPTION"
1510 \fBsynctl\fR can be used to start, stop or restart Synapse server\. The control operation can be done on all processes or a single worker process\.
16 .
1711 .SH "OPTIONS"
18 .
1912 .TP
2013 \fBaction\fR
2114 The value of action should be one of \fBstart\fR, \fBstop\fR or \fBrestart\fR\.
22 .
2315 .TP
2416 \fBconfigfile\fR
2517 Optional path of the configuration file to use\. Default value is \fBhomeserver\.yaml\fR\. The configuration file must exist for the operation to succeed\.
26 .
2718 .TP
2819 \fB\-w\fR, \fB\-\-worker\fR:
29 .
30 .IP
31 Perform start, stop or restart operations on a single worker\. Incompatible with \fB\-a\fR|\fB\-\-all\-processes\fR\. Value passed must be a valid worker\'s configuration file\.
32 .
20
3321 .TP
3422 \fB\-a\fR, \fB\-\-all\-processes\fR:
35 .
36 .IP
37 Perform start, stop or restart operations on all the workers in the given directory and the main synapse process\. Incompatible with \fB\-w\fR|\fB\-\-worker\fR\. Value passed must be a directory containing valid work configuration files\. All files ending with \fB\.yaml\fR extension shall be considered as configuration files and all other files in the directory are ignored\.
38 .
23
3924 .SH "CONFIGURATION FILE"
4025 Configuration file may be generated as follows:
41 .
4226 .IP "" 4
43 .
4427 .nf
45
4628 $ python \-m synapse\.app\.homeserver \-c config\.yaml \-\-generate\-config \-\-server\-name=<server name>
47 .
4829 .fi
49 .
5030 .IP "" 0
51 .
5231 .SH "ENVIRONMENT"
53 .
5432 .TP
5533 \fBSYNAPSE_CACHE_FACTOR\fR
56 Synapse\'s architecture is quite RAM hungry currently \- a lot of recent room data and metadata is deliberately cached in RAM in order to speed up common requests\. This will be improved in future, but for now the easiest way to either reduce the RAM usage (at the risk of slowing things down) is to set the SYNAPSE_CACHE_FACTOR environment variable\. Roughly speaking, a SYNAPSE_CACHE_FACTOR of 1\.0 will max out at around 3\-4GB of resident memory \- this is what we currently run the matrix\.org on\. The default setting is currently 0\.1, which is probably around a ~700MB footprint\. You can dial it down further to 0\.02 if desired, which targets roughly ~512MB\. Conversely you can dial it up if you need performance for lots of users and have a box with a lot of RAM\.
57 .
34 Synapse\'s architecture is quite RAM hungry currently \- we deliberately cache a lot of recent room data and metadata in RAM in order to speed up common requests\. We\'ll improve this in the future, but for now the easiest way to either reduce the RAM usage (at the risk of slowing things down) is to set the almost\-undocumented \fBSYNAPSE_CACHE_FACTOR\fR environment variable\. The default is 0\.5, which can be decreased to reduce RAM usage in memory constrained enviroments, or increased if performance starts to degrade\.
35 .IP
36 However, degraded performance due to a low cache factor, common on machines with slow disks, often leads to explosions in memory use due backlogged requests\. In this case, reducing the cache factor will make things worse\. Instead, try increasing it drastically\. 2\.0 is a good starting value\.
5837 .SH "COPYRIGHT"
59 This man page was written by Sunil Mohan Adapa <\fIsunil@medhas\.org\fR> for Debian GNU/Linux distribution\.
60 .
38 This man page was written by Sunil Mohan Adapa <\fI\%mailto:sunil@medhas\.org\fR> for Debian GNU/Linux distribution\.
6139 .SH "SEE ALSO"
62 synapse_port_db(1), hash_password(1), register_new_matrix_user(1)
40 synapse_port_db(1), hash_password(1), register_new_matrix_user(1), synapse_review_recent_signups(1)
6767
6868 ## SEE ALSO
6969
70 synapse_port_db(1), hash_password(1), register_new_matrix_user(1)
70 synapse_port_db(1), hash_password(1), register_new_matrix_user(1), synapse_review_recent_signups(1)
4444 ```
4545
4646 For information on picking a suitable server name, see
47 https://github.com/matrix-org/synapse/blob/master/INSTALL.md.
47 https://matrix-org.github.io/synapse/latest/setup/installation.html.
4848
4949 The above command will generate a `homeserver.yaml` in (typically)
5050 `/var/lib/docker/volumes/synapse-data/_data`. You should check this file, and
138138 https://github.com/matrix-org/synapse/blob/master/docs/reverse_proxy.md.
139139
140140 For more information on enabling TLS support in synapse itself, see
141 https://github.com/matrix-org/synapse/blob/master/INSTALL.md#tls-certificates. Of
141 https://matrix-org.github.io/synapse/latest/setup/installation.html#tls-certificates. Of
142142 course, you will need to expose the TLS port from the container with a `-p`
143143 argument to `docker run`.
144144
77 #
88 # It is *not* intended to be copied and used as the basis for a real
99 # homeserver.yaml. Instead, if you are starting from scratch, please generate
10 # a fresh config using Synapse by following the instructions in INSTALL.md.
10 # a fresh config using Synapse by following the instructions in
11 # https://matrix-org.github.io/synapse/latest/setup/installation.html.
1112
1213 # Configuration options that take a time period can be set using a number
1314 # followed by a letter. Letters have the following meanings:
1313 project.
1414
1515 If you are setting up a server from scratch you almost certainly should look at
16 the [installation guide](../INSTALL.md) instead.
16 the [installation guide](setup/installation.md) instead.
1717
1818 ## Introduction
1919 The goal of Synapse 0.99.0 is to act as a stepping stone to Synapse 1.0.0. It
1010 - [Delegation](delegate.md)
1111
1212 # Upgrading
13 - [Upgrading between Synapse Versions](upgrading/README.md)
13 - [Upgrading between Synapse Versions](upgrade.md)
1414 - [Upgrading from pre-Synapse 1.0](MSC1711_certificates_FAQ.md)
1515
1616 # Usage
3535 "creation_ts": 1560432506,
3636 "appservice_id": null,
3737 "consent_server_notice_sent": null,
38 "consent_version": null
38 "consent_version": null,
39 "external_ids": [
40 {
41 "auth_provider": "<provider1>",
42 "external_id": "<user_id_provider_1>"
43 },
44 {
45 "auth_provider": "<provider2>",
46 "external_id": "<user_id_provider_2>"
47 }
48 ]
3949 }
4050 ```
4151
193193
194194 * ensure the module's callbacks are all asynchronous.
195195 * register their callbacks using one or more of the `register_[...]_callbacks` methods
196 from the `ModuleApi` class in the module's `__init__` method (see [this section](#registering-a-web-resource)
196 from the `ModuleApi` class in the module's `__init__` method (see [this section](#registering-a-callback)
197197 for more info).
198198
199199 Additionally, if the module is packaged with an additional web resource, the module
77 connect to a postgres database.
88
99 - If you are using the [matrix.org debian/ubuntu
10 packages](../INSTALL.md#matrixorg-packages), the necessary python
10 packages](setup/installation.md#matrixorg-packages), the necessary python
1111 library will already be installed, but you will need to ensure the
1212 low-level postgres library is installed, which you can do with
1313 `apt install libpq5`.
1414 - For other pre-built packages, please consult the documentation from
1515 the relevant package.
1616 - If you installed synapse [in a
17 virtualenv](../INSTALL.md#installing-from-source), you can install
17 virtualenv](setup/installation.md#installing-from-source), you can install
1818 the library with:
1919
2020 ~/synapse/env/bin/pip install "matrix-synapse[postgres]"
221221
222222 ```yaml
223223 presence:
224 routing_module:
224 enabled: true
225
226 presence_router:
225227 module: my_module.ExamplePresenceRouter
226228 config:
227229 # Any configuration options for your module. The below is an example.
77 #
88 # It is *not* intended to be copied and used as the basis for a real
99 # homeserver.yaml. Instead, if you are starting from scratch, please generate
10 # a fresh config using Synapse by following the instructions in INSTALL.md.
10 # a fresh config using Synapse by following the instructions in
11 # https://matrix-org.github.io/synapse/latest/setup/installation.html.
1112
1213 # Configuration options that take a time period can be set using a number
1314 # followed by a letter. Letters have the following meanings:
3536
3637 # Server admins can expand Synapse's functionality with external modules.
3738 #
38 # See https://matrix-org.github.io/synapse/develop/modules.html for more
39 # See https://matrix-org.github.io/synapse/latest/modules.html for more
3940 # documentation on how to configure or create custom modules for Synapse.
4041 #
4142 modules:
5758 # In most cases you should avoid using a matrix specific subdomain such as
5859 # matrix.example.com or synapse.example.com as the server_name for the same
5960 # reasons you wouldn't use user@email.example.com as your email address.
60 # See https://github.com/matrix-org/synapse/blob/master/docs/delegate.md
61 # See https://matrix-org.github.io/synapse/latest/delegate.html
6162 # for information on how to host Synapse on a subdomain while preserving
6263 # a clean server_name.
6364 #
252253 # 'all local interfaces'.
253254 #
254255 # type: the type of listener. Normally 'http', but other valid options are:
255 # 'manhole' (see docs/manhole.md),
256 # 'metrics' (see docs/metrics-howto.md),
257 # 'replication' (see docs/workers.md).
256 # 'manhole' (see https://matrix-org.github.io/synapse/latest/manhole.html),
257 # 'metrics' (see https://matrix-org.github.io/synapse/latest/metrics-howto.html),
258 # 'replication' (see https://matrix-org.github.io/synapse/latest/workers.html).
258259 #
259260 # tls: set to true to enable TLS for this listener. Will use the TLS
260261 # key/cert specified in tls_private_key_path / tls_certificate_path.
279280 # client: the client-server API (/_matrix/client), and the synapse admin
280281 # API (/_synapse/admin). Also implies 'media' and 'static'.
281282 #
282 # consent: user consent forms (/_matrix/consent). See
283 # docs/consent_tracking.md.
283 # consent: user consent forms (/_matrix/consent).
284 # See https://matrix-org.github.io/synapse/latest/consent_tracking.html.
284285 #
285286 # federation: the server-server API (/_matrix/federation). Also implies
286287 # 'media', 'keys', 'openid'
289290 #
290291 # media: the media API (/_matrix/media).
291292 #
292 # metrics: the metrics interface. See docs/metrics-howto.md.
293 # metrics: the metrics interface.
294 # See https://matrix-org.github.io/synapse/latest/metrics-howto.html.
293295 #
294296 # openid: OpenID authentication.
295297 #
296 # replication: the HTTP replication API (/_synapse/replication). See
297 # docs/workers.md.
298 # replication: the HTTP replication API (/_synapse/replication).
299 # See https://matrix-org.github.io/synapse/latest/workers.html.
298300 #
299301 # static: static resources under synapse/static (/_matrix/static). (Mostly
300302 # useful for 'fallback authentication'.)
318320 # that unwraps TLS.
319321 #
320322 # If you plan to use a reverse proxy, please see
321 # https://github.com/matrix-org/synapse/blob/master/docs/reverse_proxy.md.
323 # https://matrix-org.github.io/synapse/latest/reverse_proxy.html.
322324 #
323325 - port: 8008
324326 tls: false
672674 #event_cache_size: 10K
673675
674676 caches:
675 # Controls the global cache factor, which is the default cache factor
676 # for all caches if a specific factor for that cache is not otherwise
677 # set.
678 #
679 # This can also be set by the "SYNAPSE_CACHE_FACTOR" environment
680 # variable. Setting by environment variable takes priority over
681 # setting through the config file.
682 #
683 # Defaults to 0.5, which will half the size of all caches.
684 #
685 #global_factor: 1.0
686
687 # A dictionary of cache name to cache factor for that individual
688 # cache. Overrides the global cache factor for a given cache.
689 #
690 # These can also be set through environment variables comprised
691 # of "SYNAPSE_CACHE_FACTOR_" + the name of the cache in capital
692 # letters and underscores. Setting by environment variable
693 # takes priority over setting through the config file.
694 # Ex. SYNAPSE_CACHE_FACTOR_GET_USERS_WHO_SHARE_ROOM_WITH_USER=2.0
695 #
696 # Some caches have '*' and other characters that are not
697 # alphanumeric or underscores. These caches can be named with or
698 # without the special characters stripped. For example, to specify
699 # the cache factor for `*stateGroupCache*` via an environment
700 # variable would be `SYNAPSE_CACHE_FACTOR_STATEGROUPCACHE=2.0`.
701 #
702 per_cache_factors:
703 #get_users_who_share_room_with_user: 2.0
677 # Controls the global cache factor, which is the default cache factor
678 # for all caches if a specific factor for that cache is not otherwise
679 # set.
680 #
681 # This can also be set by the "SYNAPSE_CACHE_FACTOR" environment
682 # variable. Setting by environment variable takes priority over
683 # setting through the config file.
684 #
685 # Defaults to 0.5, which will half the size of all caches.
686 #
687 #global_factor: 1.0
688
689 # A dictionary of cache name to cache factor for that individual
690 # cache. Overrides the global cache factor for a given cache.
691 #
692 # These can also be set through environment variables comprised
693 # of "SYNAPSE_CACHE_FACTOR_" + the name of the cache in capital
694 # letters and underscores. Setting by environment variable
695 # takes priority over setting through the config file.
696 # Ex. SYNAPSE_CACHE_FACTOR_GET_USERS_WHO_SHARE_ROOM_WITH_USER=2.0
697 #
698 # Some caches have '*' and other characters that are not
699 # alphanumeric or underscores. These caches can be named with or
700 # without the special characters stripped. For example, to specify
701 # the cache factor for `*stateGroupCache*` via an environment
702 # variable would be `SYNAPSE_CACHE_FACTOR_STATEGROUPCACHE=2.0`.
703 #
704 per_cache_factors:
705 #get_users_who_share_room_with_user: 2.0
706
707 # Controls how long an entry can be in a cache without having been
708 # accessed before being evicted. Defaults to None, which means
709 # entries are never evicted based on time.
710 #
711 #expiry_time: 30m
704712
705713
706714 ## Database ##
740748 # cp_min: 5
741749 # cp_max: 10
742750 #
743 # For more information on using Synapse with Postgres, see `docs/postgres.md`.
751 # For more information on using Synapse with Postgres,
752 # see https://matrix-org.github.io/synapse/latest/postgres.html.
744753 #
745754 database:
746755 name: sqlite3
893902 #
894903 # If you are using a reverse proxy you may also need to set this value in
895904 # your reverse proxy's config. Notably Nginx has a small max body size by default.
896 # See https://matrix-org.github.io/synapse/develop/reverse_proxy.html.
905 # See https://matrix-org.github.io/synapse/latest/reverse_proxy.html.
897906 #
898907 #max_upload_size: 50M
899908
18331842 #
18341843 # module: The class name of a custom mapping module. Default is
18351844 # 'synapse.handlers.oidc.JinjaOidcMappingProvider'.
1836 # See https://github.com/matrix-org/synapse/blob/master/docs/sso_mapping_providers.md#openid-mapping-providers
1845 # See https://matrix-org.github.io/synapse/latest/sso_mapping_providers.html#openid-mapping-providers
18371846 # for information on implementing a custom mapping provider.
18381847 #
18391848 # config: Configuration for the mapping provider module. This section will
18841893 # - attribute: groups
18851894 # value: "admin"
18861895 #
1887 # See https://github.com/matrix-org/synapse/blob/master/docs/openid.md
1896 # See https://matrix-org.github.io/synapse/latest/openid.html
18881897 # for information on how to configure these options.
18891898 #
18901899 # For backwards compatibility, it is also possible to configure a single OIDC
21622171 # Note that this is a non-standard login type and client support is
21632172 # expected to be non-existent.
21642173 #
2165 # See https://github.com/matrix-org/synapse/blob/master/docs/jwt.md.
2174 # See https://matrix-org.github.io/synapse/latest/jwt.html.
21662175 #
21672176 #jwt_config:
21682177 # Uncomment the following to enable authorization using JSON web
24622471 # ex. LDAP, external tokens, etc.
24632472 #
24642473 # For more information and known implementations, please see
2465 # https://github.com/matrix-org/synapse/blob/master/docs/password_auth_providers.md
2474 # https://matrix-org.github.io/synapse/latest/password_auth_providers.html
24662475 #
24672476 # Note: instances wishing to use SAML or CAS authentication should
24682477 # instead use the `saml2_config` or `cas_config` options,
25642573 #
25652574 # If you set it true, you'll have to rebuild the user_directory search
25662575 # indexes, see:
2567 # https://github.com/matrix-org/synapse/blob/master/docs/user_directory.md
2576 # https://matrix-org.github.io/synapse/latest/user_directory.html
25682577 #
25692578 # Uncomment to return search results containing all known users, even if that
25702579 # user does not share a room with the requester.
25842593 # User Consent configuration
25852594 #
25862595 # for detailed instructions, see
2587 # https://github.com/matrix-org/synapse/blob/master/docs/consent_tracking.md
2596 # https://matrix-org.github.io/synapse/latest/consent_tracking.html
25882597 #
25892598 # Parts of this section are required if enabling the 'consent' resource under
25902599 # 'listeners', in particular 'template_dir' and 'version'.
26342643
26352644
26362645 # Settings for local room and user statistics collection. See
2637 # docs/room_and_user_statistics.md.
2646 # https://matrix-org.github.io/synapse/latest/room_and_user_statistics.html.
26382647 #
26392648 stats:
26402649 # Uncomment the following to disable room and user statistics. Note that doing
27612770 #enabled: true
27622771
27632772 # The list of homeservers we wish to send and receive span contexts and span baggage.
2764 # See docs/opentracing.rst.
2773 # See https://matrix-org.github.io/synapse/latest/opentracing.html.
27652774 #
27662775 # This is a list of regexes which are matched against the server_name of the
27672776 # homeserver.
66 # be ingested by ELK stacks. See [2] for details.
77 #
88 # [1]: https://docs.python.org/3.7/library/logging.config.html#configuration-dictionary-schema
9 # [2]: https://github.com/matrix-org/synapse/blob/master/docs/structured_logging.md
9 # [2]: https://matrix-org.github.io/synapse/latest/structured_logging.html
1010
1111 version: 1
1212
0 <!--
1 Include the contents of INSTALL.md from the project root without moving it, which may
2 break links around the internet. Additionally, note that SUMMARY.md is unable to
3 directly link to content outside of the docs/ directory. So we use this file as a
4 redirection.
5 -->
6 {{#include ../../INSTALL.md}}
0 # Installation Instructions
1
2 There are 3 steps to follow under **Installation Instructions**.
3
4 - [Installation Instructions](#installation-instructions)
5 - [Choosing your server name](#choosing-your-server-name)
6 - [Installing Synapse](#installing-synapse)
7 - [Installing from source](#installing-from-source)
8 - [Platform-specific prerequisites](#platform-specific-prerequisites)
9 - [Debian/Ubuntu/Raspbian](#debianubunturaspbian)
10 - [ArchLinux](#archlinux)
11 - [CentOS/Fedora](#centosfedora)
12 - [macOS](#macos)
13 - [OpenSUSE](#opensuse)
14 - [OpenBSD](#openbsd)
15 - [Windows](#windows)
16 - [Prebuilt packages](#prebuilt-packages)
17 - [Docker images and Ansible playbooks](#docker-images-and-ansible-playbooks)
18 - [Debian/Ubuntu](#debianubuntu)
19 - [Matrix.org packages](#matrixorg-packages)
20 - [Downstream Debian packages](#downstream-debian-packages)
21 - [Downstream Ubuntu packages](#downstream-ubuntu-packages)
22 - [Fedora](#fedora)
23 - [OpenSUSE](#opensuse-1)
24 - [SUSE Linux Enterprise Server](#suse-linux-enterprise-server)
25 - [ArchLinux](#archlinux-1)
26 - [Void Linux](#void-linux)
27 - [FreeBSD](#freebsd)
28 - [OpenBSD](#openbsd-1)
29 - [NixOS](#nixos)
30 - [Setting up Synapse](#setting-up-synapse)
31 - [Using PostgreSQL](#using-postgresql)
32 - [TLS certificates](#tls-certificates)
33 - [Client Well-Known URI](#client-well-known-uri)
34 - [Email](#email)
35 - [Registering a user](#registering-a-user)
36 - [Setting up a TURN server](#setting-up-a-turn-server)
37 - [URL previews](#url-previews)
38 - [Troubleshooting Installation](#troubleshooting-installation)
39
40
41 ## Choosing your server name
42
43 It is important to choose the name for your server before you install Synapse,
44 because it cannot be changed later.
45
46 The server name determines the "domain" part of user-ids for users on your
47 server: these will all be of the format `@user:my.domain.name`. It also
48 determines how other matrix servers will reach yours for federation.
49
50 For a test configuration, set this to the hostname of your server. For a more
51 production-ready setup, you will probably want to specify your domain
52 (`example.com`) rather than a matrix-specific hostname here (in the same way
53 that your email address is probably `user@example.com` rather than
54 `user@email.example.com`) - but doing so may require more advanced setup: see
55 [Setting up Federation](../federate.md).
56
57 ## Installing Synapse
58
59 ### Installing from source
60
61 (Prebuilt packages are available for some platforms - see [Prebuilt packages](#prebuilt-packages).)
62
63 When installing from source please make sure that the [Platform-specific prerequisites](#platform-specific-prerequisites) are already installed.
64
65 System requirements:
66
67 - POSIX-compliant system (tested on Linux & OS X)
68 - Python 3.5.2 or later, up to Python 3.9.
69 - At least 1GB of free RAM if you want to join large public rooms like #matrix:matrix.org
70
71
72 To install the Synapse homeserver run:
73
74 ```sh
75 mkdir -p ~/synapse
76 virtualenv -p python3 ~/synapse/env
77 source ~/synapse/env/bin/activate
78 pip install --upgrade pip
79 pip install --upgrade setuptools
80 pip install matrix-synapse
81 ```
82
83 This will download Synapse from [PyPI](https://pypi.org/project/matrix-synapse)
84 and install it, along with the python libraries it uses, into a virtual environment
85 under `~/synapse/env`. Feel free to pick a different directory if you
86 prefer.
87
88 This Synapse installation can then be later upgraded by using pip again with the
89 update flag:
90
91 ```sh
92 source ~/synapse/env/bin/activate
93 pip install -U matrix-synapse
94 ```
95
96 Before you can start Synapse, you will need to generate a configuration
97 file. To do this, run (in your virtualenv, as before):
98
99 ```sh
100 cd ~/synapse
101 python -m synapse.app.homeserver \
102 --server-name my.domain.name \
103 --config-path homeserver.yaml \
104 --generate-config \
105 --report-stats=[yes|no]
106 ```
107
108 ... substituting an appropriate value for `--server-name`.
109
110 This command will generate you a config file that you can then customise, but it will
111 also generate a set of keys for you. These keys will allow your homeserver to
112 identify itself to other homeserver, so don't lose or delete them. It would be
113 wise to back them up somewhere safe. (If, for whatever reason, you do need to
114 change your homeserver's keys, you may find that other homeserver have the
115 old key cached. If you update the signing key, you should change the name of the
116 key in the `<server name>.signing.key` file (the second word) to something
117 different. See the [spec](https://matrix.org/docs/spec/server_server/latest.html#retrieving-server-keys) for more information on key management).
118
119 To actually run your new homeserver, pick a working directory for Synapse to
120 run (e.g. `~/synapse`), and:
121
122 ```sh
123 cd ~/synapse
124 source env/bin/activate
125 synctl start
126 ```
127
128 #### Platform-specific prerequisites
129
130 Synapse is written in Python but some of the libraries it uses are written in
131 C. So before we can install Synapse itself we need a working C compiler and the
132 header files for Python C extensions.
133
134 ##### Debian/Ubuntu/Raspbian
135
136 Installing prerequisites on Ubuntu or Debian:
137
138 ```sh
139 sudo apt install build-essential python3-dev libffi-dev \
140 python3-pip python3-setuptools sqlite3 \
141 libssl-dev virtualenv libjpeg-dev libxslt1-dev
142 ```
143
144 ##### ArchLinux
145
146 Installing prerequisites on ArchLinux:
147
148 ```sh
149 sudo pacman -S base-devel python python-pip \
150 python-setuptools python-virtualenv sqlite3
151 ```
152
153 ##### CentOS/Fedora
154
155 Installing prerequisites on CentOS or Fedora Linux:
156
157 ```sh
158 sudo dnf install libtiff-devel libjpeg-devel libzip-devel freetype-devel \
159 libwebp-devel libxml2-devel libxslt-devel libpq-devel \
160 python3-virtualenv libffi-devel openssl-devel python3-devel
161 sudo dnf groupinstall "Development Tools"
162 ```
163
164 ##### macOS
165
166 Installing prerequisites on macOS:
167
168 ```sh
169 xcode-select --install
170 sudo easy_install pip
171 sudo pip install virtualenv
172 brew install pkg-config libffi
173 ```
174
175 On macOS Catalina (10.15) you may need to explicitly install OpenSSL
176 via brew and inform `pip` about it so that `psycopg2` builds:
177
178 ```sh
179 brew install openssl@1.1
180 export LDFLAGS="-L/usr/local/opt/openssl/lib"
181 export CPPFLAGS="-I/usr/local/opt/openssl/include"
182 ```
183
184 ##### OpenSUSE
185
186 Installing prerequisites on openSUSE:
187
188 ```sh
189 sudo zypper in -t pattern devel_basis
190 sudo zypper in python-pip python-setuptools sqlite3 python-virtualenv \
191 python-devel libffi-devel libopenssl-devel libjpeg62-devel
192 ```
193
194 ##### OpenBSD
195
196 A port of Synapse is available under `net/synapse`. The filesystem
197 underlying the homeserver directory (defaults to `/var/synapse`) has to be
198 mounted with `wxallowed` (cf. `mount(8)`), so creating a separate filesystem
199 and mounting it to `/var/synapse` should be taken into consideration.
200
201 To be able to build Synapse's dependency on python the `WRKOBJDIR`
202 (cf. `bsd.port.mk(5)`) for building python, too, needs to be on a filesystem
203 mounted with `wxallowed` (cf. `mount(8)`).
204
205 Creating a `WRKOBJDIR` for building python under `/usr/local` (which on a
206 default OpenBSD installation is mounted with `wxallowed`):
207
208 ```sh
209 doas mkdir /usr/local/pobj_wxallowed
210 ```
211
212 Assuming `PORTS_PRIVSEP=Yes` (cf. `bsd.port.mk(5)`) and `SUDO=doas` are
213 configured in `/etc/mk.conf`:
214
215 ```sh
216 doas chown _pbuild:_pbuild /usr/local/pobj_wxallowed
217 ```
218
219 Setting the `WRKOBJDIR` for building python:
220
221 ```sh
222 echo WRKOBJDIR_lang/python/3.7=/usr/local/pobj_wxallowed \\nWRKOBJDIR_lang/python/2.7=/usr/local/pobj_wxallowed >> /etc/mk.conf
223 ```
224
225 Building Synapse:
226
227 ```sh
228 cd /usr/ports/net/synapse
229 make install
230 ```
231
232 ##### Windows
233
234 If you wish to run or develop Synapse on Windows, the Windows Subsystem For
235 Linux provides a Linux environment on Windows 10 which is capable of using the
236 Debian, Fedora, or source installation methods. More information about WSL can
237 be found at <https://docs.microsoft.com/en-us/windows/wsl/install-win10> for
238 Windows 10 and <https://docs.microsoft.com/en-us/windows/wsl/install-on-server>
239 for Windows Server.
240
241 ### Prebuilt packages
242
243 As an alternative to installing from source, prebuilt packages are available
244 for a number of platforms.
245
246 #### Docker images and Ansible playbooks
247
248 There is an official synapse image available at
249 <https://hub.docker.com/r/matrixdotorg/synapse> which can be used with
250 the docker-compose file available at
251 [contrib/docker](https://github.com/matrix-org/synapse/tree/develop/contrib/docker).
252 Further information on this including configuration options is available in the README
253 on hub.docker.com.
254
255 Alternatively, Andreas Peters (previously Silvio Fricke) has contributed a
256 Dockerfile to automate a synapse server in a single Docker image, at
257 <https://hub.docker.com/r/avhost/docker-matrix/tags/>
258
259 Slavi Pantaleev has created an Ansible playbook,
260 which installs the offical Docker image of Matrix Synapse
261 along with many other Matrix-related services (Postgres database, Element, coturn,
262 ma1sd, SSL support, etc.).
263 For more details, see
264 <https://github.com/spantaleev/matrix-docker-ansible-deploy>
265
266 #### Debian/Ubuntu
267
268 ##### Matrix.org packages
269
270 Matrix.org provides Debian/Ubuntu packages of the latest stable version of
271 Synapse via <https://packages.matrix.org/debian/>. They are available for Debian
272 9 (Stretch), Ubuntu 16.04 (Xenial), and later. To use them:
273
274 ```sh
275 sudo apt install -y lsb-release wget apt-transport-https
276 sudo wget -O /usr/share/keyrings/matrix-org-archive-keyring.gpg https://packages.matrix.org/debian/matrix-org-archive-keyring.gpg
277 echo "deb [signed-by=/usr/share/keyrings/matrix-org-archive-keyring.gpg] https://packages.matrix.org/debian/ $(lsb_release -cs) main" |
278 sudo tee /etc/apt/sources.list.d/matrix-org.list
279 sudo apt update
280 sudo apt install matrix-synapse-py3
281 ```
282
283 **Note**: if you followed a previous version of these instructions which
284 recommended using `apt-key add` to add an old key from
285 `https://matrix.org/packages/debian/`, you should note that this key has been
286 revoked. You should remove the old key with `sudo apt-key remove
287 C35EB17E1EAE708E6603A9B3AD0592FE47F0DF61`, and follow the above instructions to
288 update your configuration.
289
290 The fingerprint of the repository signing key (as shown by `gpg
291 /usr/share/keyrings/matrix-org-archive-keyring.gpg`) is
292 `AAF9AE843A7584B5A3E4CD2BCF45A512DE2DA058`.
293
294 ##### Downstream Debian packages
295
296 We do not recommend using the packages from the default Debian `buster`
297 repository at this time, as they are old and suffer from known security
298 vulnerabilities. You can install the latest version of Synapse from
299 [our repository](#matrixorg-packages) or from `buster-backports`. Please
300 see the [Debian documentation](https://backports.debian.org/Instructions/)
301 for information on how to use backports.
302
303 If you are using Debian `sid` or testing, Synapse is available in the default
304 repositories and it should be possible to install it simply with:
305
306 ```sh
307 sudo apt install matrix-synapse
308 ```
309
310 ##### Downstream Ubuntu packages
311
312 We do not recommend using the packages in the default Ubuntu repository
313 at this time, as they are old and suffer from known security vulnerabilities.
314 The latest version of Synapse can be installed from [our repository](#matrixorg-packages).
315
316 #### Fedora
317
318 Synapse is in the Fedora repositories as `matrix-synapse`:
319
320 ```sh
321 sudo dnf install matrix-synapse
322 ```
323
324 Oleg Girko provides Fedora RPMs at
325 <https://obs.infoserver.lv/project/monitor/matrix-synapse>
326
327 #### OpenSUSE
328
329 Synapse is in the OpenSUSE repositories as `matrix-synapse`:
330
331 ```sh
332 sudo zypper install matrix-synapse
333 ```
334
335 #### SUSE Linux Enterprise Server
336
337 Unofficial package are built for SLES 15 in the openSUSE:Backports:SLE-15 repository at
338 <https://download.opensuse.org/repositories/openSUSE:/Backports:/SLE-15/standard/>
339
340 #### ArchLinux
341
342 The quickest way to get up and running with ArchLinux is probably with the community package
343 <https://www.archlinux.org/packages/community/any/matrix-synapse/>, which should pull in most of
344 the necessary dependencies.
345
346 pip may be outdated (6.0.7-1 and needs to be upgraded to 6.0.8-1 ):
347
348 ```sh
349 sudo pip install --upgrade pip
350 ```
351
352 If you encounter an error with lib bcrypt causing an Wrong ELF Class:
353 ELFCLASS32 (x64 Systems), you may need to reinstall py-bcrypt to correctly
354 compile it under the right architecture. (This should not be needed if
355 installing under virtualenv):
356
357 ```sh
358 sudo pip uninstall py-bcrypt
359 sudo pip install py-bcrypt
360 ```
361
362 #### Void Linux
363
364 Synapse can be found in the void repositories as 'synapse':
365
366 ```sh
367 xbps-install -Su
368 xbps-install -S synapse
369 ```
370
371 #### FreeBSD
372
373 Synapse can be installed via FreeBSD Ports or Packages contributed by Brendan Molloy from:
374
375 - Ports: `cd /usr/ports/net-im/py-matrix-synapse && make install clean`
376 - Packages: `pkg install py37-matrix-synapse`
377
378 #### OpenBSD
379
380 As of OpenBSD 6.7 Synapse is available as a pre-compiled binary. The filesystem
381 underlying the homeserver directory (defaults to `/var/synapse`) has to be
382 mounted with `wxallowed` (cf. `mount(8)`), so creating a separate filesystem
383 and mounting it to `/var/synapse` should be taken into consideration.
384
385 Installing Synapse:
386
387 ```sh
388 doas pkg_add synapse
389 ```
390
391 #### NixOS
392
393 Robin Lambertz has packaged Synapse for NixOS at:
394 <https://github.com/NixOS/nixpkgs/blob/master/nixos/modules/services/misc/matrix-synapse.nix>
395
396 ## Setting up Synapse
397
398 Once you have installed synapse as above, you will need to configure it.
399
400 ### Using PostgreSQL
401
402 By default Synapse uses an [SQLite](https://sqlite.org/) database and in doing so trades
403 performance for convenience. Almost all installations should opt to use [PostgreSQL](https://www.postgresql.org)
404 instead. Advantages include:
405
406 - significant performance improvements due to the superior threading and
407 caching model, smarter query optimiser
408 - allowing the DB to be run on separate hardware
409
410 For information on how to install and use PostgreSQL in Synapse, please see
411 [docs/postgres.md](../postgres.md)
412
413 SQLite is only acceptable for testing purposes. SQLite should not be used in
414 a production server. Synapse will perform poorly when using
415 SQLite, especially when participating in large rooms.
416
417 ### TLS certificates
418
419 The default configuration exposes a single HTTP port on the local
420 interface: `http://localhost:8008`. It is suitable for local testing,
421 but for any practical use, you will need Synapse's APIs to be served
422 over HTTPS.
423
424 The recommended way to do so is to set up a reverse proxy on port
425 `8448`. You can find documentation on doing so in
426 [docs/reverse_proxy.md](../reverse_proxy.md).
427
428 Alternatively, you can configure Synapse to expose an HTTPS port. To do
429 so, you will need to edit `homeserver.yaml`, as follows:
430
431 - First, under the `listeners` section, uncomment the configuration for the
432 TLS-enabled listener. (Remove the hash sign (`#`) at the start of
433 each line). The relevant lines are like this:
434
435 ```yaml
436 - port: 8448
437 type: http
438 tls: true
439 resources:
440 - names: [client, federation]
441 ```
442
443 - You will also need to uncomment the `tls_certificate_path` and
444 `tls_private_key_path` lines under the `TLS` section. You will need to manage
445 provisioning of these certificates yourself.
446
447 If you are using your own certificate, be sure to use a `.pem` file that
448 includes the full certificate chain including any intermediate certificates
449 (for instance, if using certbot, use `fullchain.pem` as your certificate, not
450 `cert.pem`).
451
452 For a more detailed guide to configuring your server for federation, see
453 [federate.md](../federate.md).
454
455 ### Client Well-Known URI
456
457 Setting up the client Well-Known URI is optional but if you set it up, it will
458 allow users to enter their full username (e.g. `@user:<server_name>`) into clients
459 which support well-known lookup to automatically configure the homeserver and
460 identity server URLs. This is useful so that users don't have to memorize or think
461 about the actual homeserver URL you are using.
462
463 The URL `https://<server_name>/.well-known/matrix/client` should return JSON in
464 the following format.
465
466 ```json
467 {
468 "m.homeserver": {
469 "base_url": "https://<matrix.example.com>"
470 }
471 }
472 ```
473
474 It can optionally contain identity server information as well.
475
476 ```json
477 {
478 "m.homeserver": {
479 "base_url": "https://<matrix.example.com>"
480 },
481 "m.identity_server": {
482 "base_url": "https://<identity.example.com>"
483 }
484 }
485 ```
486
487 To work in browser based clients, the file must be served with the appropriate
488 Cross-Origin Resource Sharing (CORS) headers. A recommended value would be
489 `Access-Control-Allow-Origin: *` which would allow all browser based clients to
490 view it.
491
492 In nginx this would be something like:
493
494 ```nginx
495 location /.well-known/matrix/client {
496 return 200 '{"m.homeserver": {"base_url": "https://<matrix.example.com>"}}';
497 default_type application/json;
498 add_header Access-Control-Allow-Origin *;
499 }
500 ```
501
502 You should also ensure the `public_baseurl` option in `homeserver.yaml` is set
503 correctly. `public_baseurl` should be set to the URL that clients will use to
504 connect to your server. This is the same URL you put for the `m.homeserver`
505 `base_url` above.
506
507 ```yaml
508 public_baseurl: "https://<matrix.example.com>"
509 ```
510
511 ### Email
512
513 It is desirable for Synapse to have the capability to send email. This allows
514 Synapse to send password reset emails, send verifications when an email address
515 is added to a user's account, and send email notifications to users when they
516 receive new messages.
517
518 To configure an SMTP server for Synapse, modify the configuration section
519 headed `email`, and be sure to have at least the `smtp_host`, `smtp_port`
520 and `notif_from` fields filled out. You may also need to set `smtp_user`,
521 `smtp_pass`, and `require_transport_security`.
522
523 If email is not configured, password reset, registration and notifications via
524 email will be disabled.
525
526 ### Registering a user
527
528 The easiest way to create a new user is to do so from a client like [Element](https://element.io/).
529
530 Alternatively, you can do so from the command line. This can be done as follows:
531
532 1. If synapse was installed via pip, activate the virtualenv as follows (if Synapse was
533 installed via a prebuilt package, `register_new_matrix_user` should already be
534 on the search path):
535 ```sh
536 cd ~/synapse
537 source env/bin/activate
538 synctl start # if not already running
539 ```
540 2. Run the following command:
541 ```sh
542 register_new_matrix_user -c homeserver.yaml http://localhost:8008
543 ```
544
545 This will prompt you to add details for the new user, and will then connect to
546 the running Synapse to create the new user. For example:
547 ```
548 New user localpart: erikj
549 Password:
550 Confirm password:
551 Make admin [no]:
552 Success!
553 ```
554
555 This process uses a setting `registration_shared_secret` in
556 `homeserver.yaml`, which is shared between Synapse itself and the
557 `register_new_matrix_user` script. It doesn't matter what it is (a random
558 value is generated by `--generate-config`), but it should be kept secret, as
559 anyone with knowledge of it can register users, including admin accounts,
560 on your server even if `enable_registration` is `false`.
561
562 ### Setting up a TURN server
563
564 For reliable VoIP calls to be routed via this homeserver, you MUST configure
565 a TURN server. See
566 [docs/turn-howto.md](../turn-howto.md)
567 for details.
568
569 ### URL previews
570
571 Synapse includes support for previewing URLs, which is disabled by default. To
572 turn it on you must enable the `url_preview_enabled: True` config parameter
573 and explicitly specify the IP ranges that Synapse is not allowed to spider for
574 previewing in the `url_preview_ip_range_blacklist` configuration parameter.
575 This is critical from a security perspective to stop arbitrary Matrix users
576 spidering 'internal' URLs on your network. At the very least we recommend that
577 your loopback and RFC1918 IP addresses are blacklisted.
578
579 This also requires the optional `lxml` python dependency to be installed. This
580 in turn requires the `libxml2` library to be available - on Debian/Ubuntu this
581 means `apt-get install libxml2-dev`, or equivalent for your OS.
582
583 ### Troubleshooting Installation
584
585 `pip` seems to leak *lots* of memory during installation. For instance, a Linux
586 host with 512MB of RAM may run out of memory whilst installing Twisted. If this
587 happens, you will have to individually install the dependencies which are
588 failing, e.g.:
589
590 ```sh
591 pip install twisted
592 ```
593
594 If you have any other problems, feel free to ask in
595 [#synapse:matrix.org](https://matrix.to/#/#synapse:matrix.org).
0 # Upgrading Synapse
1
2 Before upgrading check if any special steps are required to upgrade from
3 the version you currently have installed to the current version of
4 Synapse. The extra instructions that may be required are listed later in
5 this document.
6
7 - Check that your versions of Python and PostgreSQL are still
8 supported.
9
10 Synapse follows upstream lifecycles for [Python](https://endoflife.date/python) and
11 [PostgreSQL](https://endoflife.date/postgresql), and removes support for versions
12 which are no longer maintained.
13
14 The website <https://endoflife.date> also offers convenient
15 summaries.
16
17 - If Synapse was installed using [prebuilt
18 packages](setup/installation.md#prebuilt-packages), you will need to follow the
19 normal process for upgrading those packages.
20
21 - If Synapse was installed from source, then:
22
23 1. Activate the virtualenv before upgrading. For example, if
24 Synapse is installed in a virtualenv in `~/synapse/env` then
25 run:
26
27 ```bash
28 source ~/synapse/env/bin/activate
29 ```
30
31 2. If Synapse was installed using pip then upgrade to the latest
32 version by running:
33
34 ```bash
35 pip install --upgrade matrix-synapse
36 ```
37
38 If Synapse was installed using git then upgrade to the latest
39 version by running:
40
41 ```bash
42 git pull
43 pip install --upgrade .
44 ```
45
46 3. Restart Synapse:
47
48 ```bash
49 ./synctl restart
50 ```
51
52 To check whether your update was successful, you can check the running
53 server version with:
54
55 ```bash
56 # you may need to replace 'localhost:8008' if synapse is not configured
57 # to listen on port 8008.
58
59 curl http://localhost:8008/_synapse/admin/v1/server_version
60 ```
61
62 ## Rolling back to older versions
63
64 Rolling back to previous releases can be difficult, due to database
65 schema changes between releases. Where we have been able to test the
66 rollback process, this will be noted below.
67
68 In general, you will need to undo any changes made during the upgrade
69 process, for example:
70
71 - pip:
72
73 ```bash
74 source env/bin/activate
75 # replace `1.3.0` accordingly:
76 pip install matrix-synapse==1.3.0
77 ```
78
79 - Debian:
80
81 ```bash
82 # replace `1.3.0` and `stretch` accordingly:
83 wget https://packages.matrix.org/debian/pool/main/m/matrix-synapse-py3/matrix-synapse-py3_1.3.0+stretch1_amd64.deb
84 dpkg -i matrix-synapse-py3_1.3.0+stretch1_amd64.deb
85 ```
86
87
88 # Upgrading to v1.38.0
89
90 ## Re-indexing of `events` table on Postgres databases
91
92 This release includes a database schema update which requires re-indexing one of
93 the larger tables in the database, `events`. This could result in increased
94 disk I/O for several hours or days after upgrading while the migration
95 completes. Furthermore, because we have to keep the old indexes until the new
96 indexes are ready, it could result in a significant, temporary, increase in
97 disk space.
98
99 To get a rough idea of the disk space required, check the current size of one
100 of the indexes. For example, from a `psql` shell, run the following sql:
101
102 ```sql
103 SELECT pg_size_pretty(pg_relation_size('events_order_room'));
104 ```
105
106 We need to rebuild **four** indexes, so you will need to multiply this result
107 by four to give an estimate of the disk space required. For example, on one
108 particular server:
109
110 ```
111 synapse=# select pg_size_pretty(pg_relation_size('events_order_room'));
112 pg_size_pretty
113 ----------------
114 288 MB
115 (1 row)
116 ```
117
118 On this server, it would be wise to ensure that at least 1152MB are free.
119
120 The additional disk space will be freed once the migration completes.
121
122 SQLite databases are unaffected by this change.
123
124
125 # Upgrading to v1.37.0
126
127 ## Deprecation of the current spam checker interface
128
129 The current spam checker interface is deprecated in favour of a new generic modules system.
130 Authors of spam checker modules can refer to [this
131 documentation](https://matrix-org.github.io/synapse/develop/modules.html#porting-an-existing-module-that-uses-the-old-interface)
132 to update their modules. Synapse administrators can refer to [this
133 documentation](https://matrix-org.github.io/synapse/develop/modules.html#using-modules)
134 to update their configuration once the modules they are using have been updated.
135
136 We plan to remove support for the current spam checker interface in August 2021.
137
138 More module interfaces will be ported over to this new generic system in future versions
139 of Synapse.
140
141
142 # Upgrading to v1.34.0
143
144 ## `room_invite_state_types` configuration setting
145
146 The `room_invite_state_types` configuration setting has been deprecated
147 and replaced with `room_prejoin_state`. See the [sample configuration
148 file](https://github.com/matrix-org/synapse/blob/v1.34.0/docs/sample_config.yaml#L1515).
149
150 If you have set `room_invite_state_types` to the default value you
151 should simply remove it from your configuration file. The default value
152 used to be:
153
154 ```yaml
155 room_invite_state_types:
156 - "m.room.join_rules"
157 - "m.room.canonical_alias"
158 - "m.room.avatar"
159 - "m.room.encryption"
160 - "m.room.name"
161 ```
162
163 If you have customised this value, you should remove
164 `room_invite_state_types` and configure `room_prejoin_state` instead.
165
166 # Upgrading to v1.33.0
167
168 ## Account Validity HTML templates can now display a user's expiration date
169
170 This may affect you if you have enabled the account validity feature,
171 and have made use of a custom HTML template specified by the
172 `account_validity.template_dir` or
173 `account_validity.account_renewed_html_path` Synapse config options.
174
175 The template can now accept an `expiration_ts` variable, which
176 represents the unix timestamp in milliseconds for the future date of
177 which their account has been renewed until. See the [default
178 template](https://github.com/matrix-org/synapse/blob/release-v1.33.0/synapse/res/templates/account_renewed.html)
179 for an example of usage.
180
181 ALso note that a new HTML template, `account_previously_renewed.html`,
182 has been added. This is is shown to users when they attempt to renew
183 their account with a valid renewal token that has already been used
184 before. The default template contents can been found
185 [here](https://github.com/matrix-org/synapse/blob/release-v1.33.0/synapse/res/templates/account_previously_renewed.html),
186 and can also accept an `expiration_ts` variable. This template replaces
187 the error message users would previously see upon attempting to use a
188 valid renewal token more than once.
189
190 # Upgrading to v1.32.0
191
192 ## Regression causing connected Prometheus instances to become overwhelmed
193
194 This release introduces [a
195 regression](https://github.com/matrix-org/synapse/issues/9853) that can
196 overwhelm connected Prometheus instances. This issue is not present in
197 Synapse v1.32.0rc1.
198
199 If you have been affected, please downgrade to 1.31.0. You then may need
200 to remove excess writeahead logs in order for Prometheus to recover.
201 Instructions for doing so are provided
202 [here](https://github.com/matrix-org/synapse/pull/9854#issuecomment-823472183).
203
204 ## Dropping support for old Python, Postgres and SQLite versions
205
206 In line with our [deprecation
207 policy](https://github.com/matrix-org/synapse/blob/release-v1.32.0/docs/deprecation_policy.md),
208 we've dropped support for Python 3.5 and PostgreSQL 9.5, as they are no
209 longer supported upstream.
210
211 This release of Synapse requires Python 3.6+ and PostgresSQL 9.6+ or
212 SQLite 3.22+.
213
214 ## Removal of old List Accounts Admin API
215
216 The deprecated v1 "list accounts" admin API
217 (`GET /_synapse/admin/v1/users/<user_id>`) has been removed in this
218 version.
219
220 The [v2 list accounts
221 API](https://github.com/matrix-org/synapse/blob/master/docs/admin_api/user_admin_api.rst#list-accounts)
222 has been available since Synapse 1.7.0 (2019-12-13), and is accessible
223 under `GET /_synapse/admin/v2/users`.
224
225 The deprecation of the old endpoint was announced with Synapse 1.28.0
226 (released on 2021-02-25).
227
228 ## Application Services must use type `m.login.application_service` when registering users
229
230 In compliance with the [Application Service
231 spec](https://matrix.org/docs/spec/application_service/r0.1.2#server-admin-style-permissions),
232 Application Services are now required to use the
233 `m.login.application_service` type when registering users via the
234 `/_matrix/client/r0/register` endpoint. This behaviour was deprecated in
235 Synapse v1.30.0.
236
237 Please ensure your Application Services are up to date.
238
239 # Upgrading to v1.29.0
240
241 ## Requirement for X-Forwarded-Proto header
242
243 When using Synapse with a reverse proxy (in particular, when using the
244 [x_forwarded]{.title-ref} option on an HTTP listener), Synapse now
245 expects to receive an [X-Forwarded-Proto]{.title-ref} header on incoming
246 HTTP requests. If it is not set, Synapse will log a warning on each
247 received request.
248
249 To avoid the warning, administrators using a reverse proxy should ensure
250 that the reverse proxy sets [X-Forwarded-Proto]{.title-ref} header to
251 [https]{.title-ref} or [http]{.title-ref} to indicate the protocol used
252 by the client.
253
254 Synapse also requires the [Host]{.title-ref} header to be preserved.
255
256 See the [reverse proxy documentation](../reverse_proxy.md), where the
257 example configurations have been updated to show how to set these
258 headers.
259
260 (Users of [Caddy](https://caddyserver.com/) are unaffected, since we
261 believe it sets [X-Forwarded-Proto]{.title-ref} by default.)
262
263 # Upgrading to v1.27.0
264
265 ## Changes to callback URI for OAuth2 / OpenID Connect and SAML2
266
267 This version changes the URI used for callbacks from OAuth2 and SAML2
268 identity providers:
269
270 - If your server is configured for single sign-on via an OpenID
271 Connect or OAuth2 identity provider, you will need to add
272 `[synapse public baseurl]/_synapse/client/oidc/callback` to the list
273 of permitted "redirect URIs" at the identity provider.
274
275 See the [OpenID docs](../openid.md) for more information on setting
276 up OpenID Connect.
277
278 - If your server is configured for single sign-on via a SAML2 identity
279 provider, you will need to add
280 `[synapse public baseurl]/_synapse/client/saml2/authn_response` as a
281 permitted "ACS location" (also known as "allowed callback URLs")
282 at the identity provider.
283
284 The "Issuer" in the "AuthnRequest" to the SAML2 identity
285 provider is also updated to
286 `[synapse public baseurl]/_synapse/client/saml2/metadata.xml`. If
287 your SAML2 identity provider uses this property to validate or
288 otherwise identify Synapse, its configuration will need to be
289 updated to use the new URL. Alternatively you could create a new,
290 separate "EntityDescriptor" in your SAML2 identity provider with
291 the new URLs and leave the URLs in the existing "EntityDescriptor"
292 as they were.
293
294 ## Changes to HTML templates
295
296 The HTML templates for SSO and email notifications now have [Jinja2's
297 autoescape](https://jinja.palletsprojects.com/en/2.11.x/api/#autoescaping)
298 enabled for files ending in `.html`, `.htm`, and `.xml`. If you have
299 customised these templates and see issues when viewing them you might
300 need to update them. It is expected that most configurations will need
301 no changes.
302
303 If you have customised the templates *names* for these templates, it is
304 recommended to verify they end in `.html` to ensure autoescape is
305 enabled.
306
307 The above applies to the following templates:
308
309 - `add_threepid.html`
310 - `add_threepid_failure.html`
311 - `add_threepid_success.html`
312 - `notice_expiry.html`
313 - `notice_expiry.html`
314 - `notif_mail.html` (which, by default, includes `room.html` and
315 `notif.html`)
316 - `password_reset.html`
317 - `password_reset_confirmation.html`
318 - `password_reset_failure.html`
319 - `password_reset_success.html`
320 - `registration.html`
321 - `registration_failure.html`
322 - `registration_success.html`
323 - `sso_account_deactivated.html`
324 - `sso_auth_bad_user.html`
325 - `sso_auth_confirm.html`
326 - `sso_auth_success.html`
327 - `sso_error.html`
328 - `sso_login_idp_picker.html`
329 - `sso_redirect_confirm.html`
330
331 # Upgrading to v1.26.0
332
333 ## Rolling back to v1.25.0 after a failed upgrade
334
335 v1.26.0 includes a lot of large changes. If something problematic
336 occurs, you may want to roll-back to a previous version of Synapse.
337 Because v1.26.0 also includes a new database schema version, reverting
338 that version is also required alongside the generic rollback
339 instructions mentioned above. In short, to roll back to v1.25.0 you need
340 to:
341
342 1. Stop the server
343
344 2. Decrease the schema version in the database:
345
346 ```sql
347 UPDATE schema_version SET version = 58;
348 ```
349
350 3. Delete the ignored users & chain cover data:
351
352 ```sql
353 DROP TABLE IF EXISTS ignored_users;
354 UPDATE rooms SET has_auth_chain_index = false;
355 ```
356
357 For PostgreSQL run:
358
359 ```sql
360 TRUNCATE event_auth_chain_links;
361 TRUNCATE event_auth_chains;
362 ```
363
364 For SQLite run:
365
366 ```sql
367 DELETE FROM event_auth_chain_links;
368 DELETE FROM event_auth_chains;
369 ```
370
371 4. Mark the deltas as not run (so they will re-run on upgrade).
372
373 ```sql
374 DELETE FROM applied_schema_deltas WHERE version = 59 AND file = "59/01ignored_user.py";
375 DELETE FROM applied_schema_deltas WHERE version = 59 AND file = "59/06chain_cover_index.sql";
376 ```
377
378 5. Downgrade Synapse by following the instructions for your
379 installation method in the "Rolling back to older versions"
380 section above.
381
382 # Upgrading to v1.25.0
383
384 ## Last release supporting Python 3.5
385
386 This is the last release of Synapse which guarantees support with Python
387 3.5, which passed its upstream End of Life date several months ago.
388
389 We will attempt to maintain support through March 2021, but without
390 guarantees.
391
392 In the future, Synapse will follow upstream schedules for ending support
393 of older versions of Python and PostgreSQL. Please upgrade to at least
394 Python 3.6 and PostgreSQL 9.6 as soon as possible.
395
396 ## Blacklisting IP ranges
397
398 Synapse v1.25.0 includes new settings, `ip_range_blacklist` and
399 `ip_range_whitelist`, for controlling outgoing requests from Synapse for
400 federation, identity servers, push, and for checking key validity for
401 third-party invite events. The previous setting,
402 `federation_ip_range_blacklist`, is deprecated. The new
403 `ip_range_blacklist` defaults to private IP ranges if it is not defined.
404
405 If you have never customised `federation_ip_range_blacklist` it is
406 recommended that you remove that setting.
407
408 If you have customised `federation_ip_range_blacklist` you should update
409 the setting name to `ip_range_blacklist`.
410
411 If you have a custom push server that is reached via private IP space
412 you may need to customise `ip_range_blacklist` or `ip_range_whitelist`.
413
414 # Upgrading to v1.24.0
415
416 ## Custom OpenID Connect mapping provider breaking change
417
418 This release allows the OpenID Connect mapping provider to perform
419 normalisation of the localpart of the Matrix ID. This allows for the
420 mapping provider to specify different algorithms, instead of the
421 [default
422 way](<https://matrix.org/docs/spec/appendices#mapping-from-other-character-sets>).
423
424 If your Synapse configuration uses a custom mapping provider
425 ([oidc_config.user_mapping_provider.module]{.title-ref} is specified and
426 not equal to
427 [synapse.handlers.oidc_handler.JinjaOidcMappingProvider]{.title-ref})
428 then you *must* ensure that [map_user_attributes]{.title-ref} of the
429 mapping provider performs some normalisation of the
430 [localpart]{.title-ref} returned. To match previous behaviour you can
431 use the [map_username_to_mxid_localpart]{.title-ref} function provided
432 by Synapse. An example is shown below:
433
434 ```python
435 from synapse.types import map_username_to_mxid_localpart
436
437 class MyMappingProvider:
438 def map_user_attributes(self, userinfo, token):
439 # ... your custom logic ...
440 sso_user_id = ...
441 localpart = map_username_to_mxid_localpart(sso_user_id)
442
443 return {"localpart": localpart}
444 ```
445
446 ## Removal historical Synapse Admin API
447
448 Historically, the Synapse Admin API has been accessible under:
449
450 - `/_matrix/client/api/v1/admin`
451 - `/_matrix/client/unstable/admin`
452 - `/_matrix/client/r0/admin`
453 - `/_synapse/admin/v1`
454
455 The endpoints with `/_matrix/client/*` prefixes have been removed as of
456 v1.24.0. The Admin API is now only accessible under:
457
458 - `/_synapse/admin/v1`
459
460 The only exception is the [/admin/whois]{.title-ref} endpoint, which is
461 [also available via the client-server
462 API](https://matrix.org/docs/spec/client_server/r0.6.1#get-matrix-client-r0-admin-whois-userid).
463
464 The deprecation of the old endpoints was announced with Synapse 1.20.0
465 (released on 2020-09-22) and makes it easier for homeserver admins to
466 lock down external access to the Admin API endpoints.
467
468 # Upgrading to v1.23.0
469
470 ## Structured logging configuration breaking changes
471
472 This release deprecates use of the `structured: true` logging
473 configuration for structured logging. If your logging configuration
474 contains `structured: true` then it should be modified based on the
475 [structured logging
476 documentation](../structured_logging.md).
477
478 The `structured` and `drains` logging options are now deprecated and
479 should be replaced by standard logging configuration of `handlers` and
480 `formatters`.
481
482 A future will release of Synapse will make using `structured: true` an
483 error.
484
485 # Upgrading to v1.22.0
486
487 ## ThirdPartyEventRules breaking changes
488
489 This release introduces a backwards-incompatible change to modules
490 making use of `ThirdPartyEventRules` in Synapse. If you make use of a
491 module defined under the `third_party_event_rules` config option, please
492 make sure it is updated to handle the below change:
493
494 The `http_client` argument is no longer passed to modules as they are
495 initialised. Instead, modules are expected to make use of the
496 `http_client` property on the `ModuleApi` class. Modules are now passed
497 a `module_api` argument during initialisation, which is an instance of
498 `ModuleApi`. `ModuleApi` instances have a `http_client` property which
499 acts the same as the `http_client` argument previously passed to
500 `ThirdPartyEventRules` modules.
501
502 # Upgrading to v1.21.0
503
504 ## Forwarding `/_synapse/client` through your reverse proxy
505
506 The [reverse proxy
507 documentation](https://github.com/matrix-org/synapse/blob/develop/docs/reverse_proxy.md)
508 has been updated to include reverse proxy directives for
509 `/_synapse/client/*` endpoints. As the user password reset flow now uses
510 endpoints under this prefix, **you must update your reverse proxy
511 configurations for user password reset to work**.
512
513 Additionally, note that the [Synapse worker documentation](https://github.com/matrix-org/synapse/blob/develop/docs/workers.md) has been updated to
514
515 : state that the `/_synapse/client/password_reset/email/submit_token`
516 endpoint can be handled
517
518 by all workers. If you make use of Synapse's worker feature, please
519 update your reverse proxy configuration to reflect this change.
520
521 ## New HTML templates
522
523 A new HTML template,
524 [password_reset_confirmation.html](https://github.com/matrix-org/synapse/blob/develop/synapse/res/templates/password_reset_confirmation.html),
525 has been added to the `synapse/res/templates` directory. If you are
526 using a custom template directory, you may want to copy the template
527 over and modify it.
528
529 Note that as of v1.20.0, templates do not need to be included in custom
530 template directories for Synapse to start. The default templates will be
531 used if a custom template cannot be found.
532
533 This page will appear to the user after clicking a password reset link
534 that has been emailed to them.
535
536 To complete password reset, the page must include a way to make a
537 [POST]{.title-ref} request to
538 `/_synapse/client/password_reset/{medium}/submit_token` with the query
539 parameters from the original link, presented as a URL-encoded form. See
540 the file itself for more details.
541
542 ## Updated Single Sign-on HTML Templates
543
544 The `saml_error.html` template was removed from Synapse and replaced
545 with the `sso_error.html` template. If your Synapse is configured to use
546 SAML and a custom `sso_redirect_confirm_template_dir` configuration then
547 any customisations of the `saml_error.html` template will need to be
548 merged into the `sso_error.html` template. These templates are similar,
549 but the parameters are slightly different:
550
551 - The `msg` parameter should be renamed to `error_description`.
552 - There is no longer a `code` parameter for the response code.
553 - A string `error` parameter is available that includes a short hint
554 of why a user is seeing the error page.
555
556 # Upgrading to v1.18.0
557
558 ## Docker [-py3]{.title-ref} suffix will be removed in future versions
559
560 From 10th August 2020, we will no longer publish Docker images with the
561 [-py3]{.title-ref} tag suffix. The images tagged with the
562 [-py3]{.title-ref} suffix have been identical to the non-suffixed tags
563 since release 0.99.0, and the suffix is obsolete.
564
565 On 10th August, we will remove the [latest-py3]{.title-ref} tag.
566 Existing per-release tags (such as [v1.18.0-py3]{.title-ref}) will not
567 be removed, but no new [-py3]{.title-ref} tags will be added.
568
569 Scripts relying on the [-py3]{.title-ref} suffix will need to be
570 updated.
571
572 ## Redis replication is now recommended in lieu of TCP replication
573
574 When setting up worker processes, we now recommend the use of a Redis
575 server for replication. **The old direct TCP connection method is
576 deprecated and will be removed in a future release.** See
577 [workers](../workers.md) for more details.
578
579 # Upgrading to v1.14.0
580
581 This version includes a database update which is run as part of the
582 upgrade, and which may take a couple of minutes in the case of a large
583 server. Synapse will not respond to HTTP requests while this update is
584 taking place.
585
586 # Upgrading to v1.13.0
587
588 ## Incorrect database migration in old synapse versions
589
590 A bug was introduced in Synapse 1.4.0 which could cause the room
591 directory to be incomplete or empty if Synapse was upgraded directly
592 from v1.2.1 or earlier, to versions between v1.4.0 and v1.12.x.
593
594 This will *not* be a problem for Synapse installations which were:
595
596 : - created at v1.4.0 or later,
597 - upgraded via v1.3.x, or
598 - upgraded straight from v1.2.1 or earlier to v1.13.0 or later.
599
600 If completeness of the room directory is a concern, installations which
601 are affected can be repaired as follows:
602
603 1. Run the following sql from a [psql]{.title-ref} or
604 [sqlite3]{.title-ref} console:
605
606 ```sql
607 INSERT INTO background_updates (update_name, progress_json, depends_on) VALUES
608 ('populate_stats_process_rooms', '{}', 'current_state_events_membership');
609
610 INSERT INTO background_updates (update_name, progress_json, depends_on) VALUES
611 ('populate_stats_process_users', '{}', 'populate_stats_process_rooms');
612 ```
613
614 2. Restart synapse.
615
616 ## New Single Sign-on HTML Templates
617
618 New templates (`sso_auth_confirm.html`, `sso_auth_success.html`, and
619 `sso_account_deactivated.html`) were added to Synapse. If your Synapse
620 is configured to use SSO and a custom
621 `sso_redirect_confirm_template_dir` configuration then these templates
622 will need to be copied from
623 [synapse/res/templates](synapse/res/templates) into that directory.
624
625 ## Synapse SSO Plugins Method Deprecation
626
627 Plugins using the `complete_sso_login` method of
628 `synapse.module_api.ModuleApi` should update to using the async/await
629 version `complete_sso_login_async` which includes additional checks. The
630 non-async version is considered deprecated.
631
632 ## Rolling back to v1.12.4 after a failed upgrade
633
634 v1.13.0 includes a lot of large changes. If something problematic
635 occurs, you may want to roll-back to a previous version of Synapse.
636 Because v1.13.0 also includes a new database schema version, reverting
637 that version is also required alongside the generic rollback
638 instructions mentioned above. In short, to roll back to v1.12.4 you need
639 to:
640
641 1. Stop the server
642
643 2. Decrease the schema version in the database:
644
645 ```sql
646 UPDATE schema_version SET version = 57;
647 ```
648
649 3. Downgrade Synapse by following the instructions for your
650 installation method in the "Rolling back to older versions"
651 section above.
652
653 # Upgrading to v1.12.0
654
655 This version includes a database update which is run as part of the
656 upgrade, and which may take some time (several hours in the case of a
657 large server). Synapse will not respond to HTTP requests while this
658 update is taking place.
659
660 This is only likely to be a problem in the case of a server which is
661 participating in many rooms.
662
663 0. As with all upgrades, it is recommended that you have a recent
664 backup of your database which can be used for recovery in the event
665 of any problems.
666
667 1. As an initial check to see if you will be affected, you can try
668 running the following query from the [psql]{.title-ref} or
669 [sqlite3]{.title-ref} console. It is safe to run it while Synapse is
670 still running.
671
672 ```sql
673 SELECT MAX(q.v) FROM (
674 SELECT (
675 SELECT ej.json AS v
676 FROM state_events se INNER JOIN event_json ej USING (event_id)
677 WHERE se.room_id=rooms.room_id AND se.type='m.room.create' AND se.state_key=''
678 LIMIT 1
679 ) FROM rooms WHERE rooms.room_version IS NULL
680 ) q;
681 ```
682
683 This query will take about the same amount of time as the upgrade
684 process: ie, if it takes 5 minutes, then it is likely that Synapse
685 will be unresponsive for 5 minutes during the upgrade.
686
687 If you consider an outage of this duration to be acceptable, no
688 further action is necessary and you can simply start Synapse 1.12.0.
689
690 If you would prefer to reduce the downtime, continue with the steps
691 below.
692
693 2. The easiest workaround for this issue is to manually create a new
694 index before upgrading. On PostgreSQL, his can be done as follows:
695
696 ```sql
697 CREATE INDEX CONCURRENTLY tmp_upgrade_1_12_0_index
698 ON state_events(room_id) WHERE type = 'm.room.create';
699 ```
700
701 The above query may take some time, but is also safe to run while
702 Synapse is running.
703
704 We assume that no SQLite users have databases large enough to be
705 affected. If you *are* affected, you can run a similar query,
706 omitting the `CONCURRENTLY` keyword. Note however that this
707 operation may in itself cause Synapse to stop running for some time.
708 Synapse admins are reminded that [SQLite is not recommended for use
709 outside a test
710 environment](https://github.com/matrix-org/synapse/blob/master/README.rst#using-postgresql).
711
712 3. Once the index has been created, the `SELECT` query in step 1 above
713 should complete quickly. It is therefore safe to upgrade to Synapse
714 1.12.0.
715
716 4. Once Synapse 1.12.0 has successfully started and is responding to
717 HTTP requests, the temporary index can be removed:
718
719 ```sql
720 DROP INDEX tmp_upgrade_1_12_0_index;
721 ```
722
723 # Upgrading to v1.10.0
724
725 Synapse will now log a warning on start up if used with a PostgreSQL
726 database that has a non-recommended locale set.
727
728 See [Postgres](../postgres.md) for details.
729
730 # Upgrading to v1.8.0
731
732 Specifying a `log_file` config option will now cause Synapse to refuse
733 to start, and should be replaced by with the `log_config` option.
734 Support for the `log_file` option was removed in v1.3.0 and has since
735 had no effect.
736
737 # Upgrading to v1.7.0
738
739 In an attempt to configure Synapse in a privacy preserving way, the
740 default behaviours of `allow_public_rooms_without_auth` and
741 `allow_public_rooms_over_federation` have been inverted. This means that
742 by default, only authenticated users querying the Client/Server API will
743 be able to query the room directory, and relatedly that the server will
744 not share room directory information with other servers over federation.
745
746 If your installation does not explicitly set these settings one way or
747 the other and you want either setting to be `true` then it will
748 necessary to update your homeserver configuration file accordingly.
749
750 For more details on the surrounding context see our
751 [explainer](https://matrix.org/blog/2019/11/09/avoiding-unwelcome-visitors-on-private-matrix-servers).
752
753 # Upgrading to v1.5.0
754
755 This release includes a database migration which may take several
756 minutes to complete if there are a large number (more than a million or
757 so) of entries in the `devices` table. This is only likely to a be a
758 problem on very large installations.
759
760 # Upgrading to v1.4.0
761
762 ## New custom templates
763
764 If you have configured a custom template directory with the
765 `email.template_dir` option, be aware that there are new templates
766 regarding registration and threepid management (see below) that must be
767 included.
768
769 - `registration.html` and `registration.txt`
770 - `registration_success.html` and `registration_failure.html`
771 - `add_threepid.html` and `add_threepid.txt`
772 - `add_threepid_failure.html` and `add_threepid_success.html`
773
774 Synapse will expect these files to exist inside the configured template
775 directory, and **will fail to start** if they are absent. To view the
776 default templates, see
777 [synapse/res/templates](https://github.com/matrix-org/synapse/tree/master/synapse/res/templates).
778
779 ## 3pid verification changes
780
781 **Note: As of this release, users will be unable to add phone numbers or
782 email addresses to their accounts, without changes to the Synapse
783 configuration. This includes adding an email address during
784 registration.**
785
786 It is possible for a user to associate an email address or phone number
787 with their account, for a number of reasons:
788
789 - for use when logging in, as an alternative to the user id.
790 - in the case of email, as an alternative contact to help with account
791 recovery.
792 - in the case of email, to receive notifications of missed messages.
793
794 Before an email address or phone number can be added to a user's
795 account, or before such an address is used to carry out a
796 password-reset, Synapse must confirm the operation with the owner of the
797 email address or phone number. It does this by sending an email or text
798 giving the user a link or token to confirm receipt. This process is
799 known as '3pid verification'. ('3pid', or 'threepid', stands for
800 third-party identifier, and we use it to refer to external identifiers
801 such as email addresses and phone numbers.)
802
803 Previous versions of Synapse delegated the task of 3pid verification to
804 an identity server by default. In most cases this server is `vector.im`
805 or `matrix.org`.
806
807 In Synapse 1.4.0, for security and privacy reasons, the homeserver will
808 no longer delegate this task to an identity server by default. Instead,
809 the server administrator will need to explicitly decide how they would
810 like the verification messages to be sent.
811
812 In the medium term, the `vector.im` and `matrix.org` identity servers
813 will disable support for delegated 3pid verification entirely. However,
814 in order to ease the transition, they will retain the capability for a
815 limited period. Delegated email verification will be disabled on Monday
816 2nd December 2019 (giving roughly 2 months notice). Disabling delegated
817 SMS verification will follow some time after that once SMS verification
818 support lands in Synapse.
819
820 Once delegated 3pid verification support has been disabled in the
821 `vector.im` and `matrix.org` identity servers, all Synapse versions that
822 depend on those instances will be unable to verify email and phone
823 numbers through them. There are no imminent plans to remove delegated
824 3pid verification from Sydent generally. (Sydent is the identity server
825 project that backs the `vector.im` and `matrix.org` instances).
826
827 ### Email
828
829 Following upgrade, to continue verifying email (e.g. as part of the
830 registration process), admins can either:-
831
832 - Configure Synapse to use an email server.
833 - Run or choose an identity server which allows delegated email
834 verification and delegate to it.
835
836 #### Configure SMTP in Synapse
837
838 To configure an SMTP server for Synapse, modify the configuration
839 section headed `email`, and be sure to have at least the
840 `smtp_host, smtp_port` and `notif_from` fields filled out.
841
842 You may also need to set `smtp_user`, `smtp_pass`, and
843 `require_transport_security`.
844
845 See the [sample configuration file](docs/sample_config.yaml) for more
846 details on these settings.
847
848 #### Delegate email to an identity server
849
850 Some admins will wish to continue using email verification as part of
851 the registration process, but will not immediately have an appropriate
852 SMTP server at hand.
853
854 To this end, we will continue to support email verification delegation
855 via the `vector.im` and `matrix.org` identity servers for two months.
856 Support for delegated email verification will be disabled on Monday 2nd
857 December.
858
859 The `account_threepid_delegates` dictionary defines whether the
860 homeserver should delegate an external server (typically an [identity
861 server](https://matrix.org/docs/spec/identity_service/r0.2.1)) to handle
862 sending confirmation messages via email and SMS.
863
864 So to delegate email verification, in `homeserver.yaml`, set
865 `account_threepid_delegates.email` to the base URL of an identity
866 server. For example:
867
868 ```yaml
869 account_threepid_delegates:
870 email: https://example.com # Delegate email sending to example.com
871 ```
872
873 Note that `account_threepid_delegates.email` replaces the deprecated
874 `email.trust_identity_server_for_password_resets`: if
875 `email.trust_identity_server_for_password_resets` is set to `true`, and
876 `account_threepid_delegates.email` is not set, then the first entry in
877 `trusted_third_party_id_servers` will be used as the
878 `account_threepid_delegate` for email. This is to ensure compatibility
879 with existing Synapse installs that set up external server handling for
880 these tasks before v1.4.0. If
881 `email.trust_identity_server_for_password_resets` is `true` and no
882 trusted identity server domains are configured, Synapse will report an
883 error and refuse to start.
884
885 If `email.trust_identity_server_for_password_resets` is `false` or
886 absent and no `email` delegate is configured in
887 `account_threepid_delegates`, then Synapse will send email verification
888 messages itself, using the configured SMTP server (see above). that
889 type.
890
891 ### Phone numbers
892
893 Synapse does not support phone-number verification itself, so the only
894 way to maintain the ability for users to add phone numbers to their
895 accounts will be by continuing to delegate phone number verification to
896 the `matrix.org` and `vector.im` identity servers (or another identity
897 server that supports SMS sending).
898
899 The `account_threepid_delegates` dictionary defines whether the
900 homeserver should delegate an external server (typically an [identity
901 server](https://matrix.org/docs/spec/identity_service/r0.2.1)) to handle
902 sending confirmation messages via email and SMS.
903
904 So to delegate phone number verification, in `homeserver.yaml`, set
905 `account_threepid_delegates.msisdn` to the base URL of an identity
906 server. For example:
907
908 ```yaml
909 account_threepid_delegates:
910 msisdn: https://example.com # Delegate sms sending to example.com
911 ```
912
913 The `matrix.org` and `vector.im` identity servers will continue to
914 support delegated phone number verification via SMS until such time as
915 it is possible for admins to configure their servers to perform phone
916 number verification directly. More details will follow in a future
917 release.
918
919 ## Rolling back to v1.3.1
920
921 If you encounter problems with v1.4.0, it should be possible to roll
922 back to v1.3.1, subject to the following:
923
924 - The 'room statistics' engine was heavily reworked in this release
925 (see [#5971](https://github.com/matrix-org/synapse/pull/5971)),
926 including significant changes to the database schema, which are not
927 easily reverted. This will cause the room statistics engine to stop
928 updating when you downgrade.
929
930 The room statistics are essentially unused in v1.3.1 (in future
931 versions of Synapse, they will be used to populate the room
932 directory), so there should be no loss of functionality. However,
933 the statistics engine will write errors to the logs, which can be
934 avoided by setting the following in `homeserver.yaml`:
935
936 ```yaml
937 stats:
938 enabled: false
939 ```
940
941 Don't forget to re-enable it when you upgrade again, in preparation
942 for its use in the room directory!
943
944 # Upgrading to v1.2.0
945
946 Some counter metrics have been renamed, with the old names deprecated.
947 See [the metrics
948 documentation](../metrics-howto.md#renaming-of-metrics--deprecation-of-old-names-in-12)
949 for details.
950
951 # Upgrading to v1.1.0
952
953 Synapse v1.1.0 removes support for older Python and PostgreSQL versions,
954 as outlined in [our deprecation
955 notice](https://matrix.org/blog/2019/04/08/synapse-deprecating-postgres-9-4-and-python-2-x).
956
957 ## Minimum Python Version
958
959 Synapse v1.1.0 has a minimum Python requirement of Python 3.5. Python
960 3.6 or Python 3.7 are recommended as they have improved internal string
961 handling, significantly reducing memory usage.
962
963 If you use current versions of the Matrix.org-distributed Debian
964 packages or Docker images, action is not required.
965
966 If you install Synapse in a Python virtual environment, please see
967 "Upgrading to v0.34.0" for notes on setting up a new virtualenv under
968 Python 3.
969
970 ## Minimum PostgreSQL Version
971
972 If using PostgreSQL under Synapse, you will need to use PostgreSQL 9.5
973 or above. Please see the [PostgreSQL
974 documentation](https://www.postgresql.org/docs/11/upgrading.html) for
975 more details on upgrading your database.
976
977 # Upgrading to v1.0
978
979 ## Validation of TLS certificates
980
981 Synapse v1.0 is the first release to enforce validation of TLS
982 certificates for the federation API. It is therefore essential that your
983 certificates are correctly configured. See the
984 [FAQ](../MSC1711_certificates_FAQ.md) for more information.
985
986 Note, v1.0 installations will also no longer be able to federate with
987 servers that have not correctly configured their certificates.
988
989 In rare cases, it may be desirable to disable certificate checking: for
990 example, it might be essential to be able to federate with a given
991 legacy server in a closed federation. This can be done in one of two
992 ways:-
993
994 - Configure the global switch `federation_verify_certificates` to
995 `false`.
996 - Configure a whitelist of server domains to trust via
997 `federation_certificate_verification_whitelist`.
998
999 See the [sample configuration file](docs/sample_config.yaml) for more
1000 details on these settings.
1001
1002 ## Email
1003
1004 When a user requests a password reset, Synapse will send an email to the
1005 user to confirm the request.
1006
1007 Previous versions of Synapse delegated the job of sending this email to
1008 an identity server. If the identity server was somehow malicious or
1009 became compromised, it would be theoretically possible to hijack an
1010 account through this means.
1011
1012 Therefore, by default, Synapse v1.0 will send the confirmation email
1013 itself. If Synapse is not configured with an SMTP server, password reset
1014 via email will be disabled.
1015
1016 To configure an SMTP server for Synapse, modify the configuration
1017 section headed `email`, and be sure to have at least the `smtp_host`,
1018 `smtp_port` and `notif_from` fields filled out. You may also need to set
1019 `smtp_user`, `smtp_pass`, and `require_transport_security`.
1020
1021 If you are absolutely certain that you wish to continue using an
1022 identity server for password resets, set
1023 `trust_identity_server_for_password_resets` to `true`.
1024
1025 See the [sample configuration file](docs/sample_config.yaml) for more
1026 details on these settings.
1027
1028 ## New email templates
1029
1030 Some new templates have been added to the default template directory for the purpose of
1031 the homeserver sending its own password reset emails. If you have configured a
1032 custom `template_dir` in your Synapse config, these files will need to be added.
1033
1034 `password_reset.html` and `password_reset.txt` are HTML and plain text
1035 templates respectively that contain the contents of what will be emailed
1036 to the user upon attempting to reset their password via email.
1037 `password_reset_success.html` and `password_reset_failure.html` are HTML
1038 files that the content of which (assuming no redirect URL is set) will
1039 be shown to the user after they attempt to click the link in the email
1040 sent to them.
1041
1042 # Upgrading to v0.99.0
1043
1044 Please be aware that, before Synapse v1.0 is released around March 2019,
1045 you will need to replace any self-signed certificates with those
1046 verified by a root CA. Information on how to do so can be found at [the
1047 ACME docs](../ACME.md).
1048
1049 For more information on configuring TLS certificates see the
1050 [FAQ](../MSC1711_certificates_FAQ.md).
1051
1052 # Upgrading to v0.34.0
1053
1054 1. This release is the first to fully support Python 3. Synapse will
1055 now run on Python versions 3.5, or 3.6 (as well as 2.7). We
1056 recommend switching to Python 3, as it has been shown to give
1057 performance improvements.
1058
1059 For users who have installed Synapse into a virtualenv, we recommend
1060 doing this by creating a new virtualenv. For example:
1061
1062 virtualenv -p python3 ~/synapse/env3
1063 source ~/synapse/env3/bin/activate
1064 pip install matrix-synapse
1065
1066 You can then start synapse as normal, having activated the new
1067 virtualenv:
1068
1069 cd ~/synapse
1070 source env3/bin/activate
1071 synctl start
1072
1073 Users who have installed from distribution packages should see the
1074 relevant package documentation. See below for notes on Debian
1075 packages.
1076
1077 - When upgrading to Python 3, you **must** make sure that your log
1078 files are configured as UTF-8, by adding `encoding: utf8` to the
1079 `RotatingFileHandler` configuration (if you have one) in your
1080 `<server>.log.config` file. For example, if your `log.config`
1081 file contains:
1082
1083 handlers:
1084 file:
1085 class: logging.handlers.RotatingFileHandler
1086 formatter: precise
1087 filename: homeserver.log
1088 maxBytes: 104857600
1089 backupCount: 10
1090 filters: [context]
1091 console:
1092 class: logging.StreamHandler
1093 formatter: precise
1094 filters: [context]
1095
1096 Then you should update this to be:
1097
1098 handlers:
1099 file:
1100 class: logging.handlers.RotatingFileHandler
1101 formatter: precise
1102 filename: homeserver.log
1103 maxBytes: 104857600
1104 backupCount: 10
1105 filters: [context]
1106 encoding: utf8
1107 console:
1108 class: logging.StreamHandler
1109 formatter: precise
1110 filters: [context]
1111
1112 There is no need to revert this change if downgrading to
1113 Python 2.
1114
1115 We are also making available Debian packages which will run Synapse
1116 on Python 3. You can switch to these packages with
1117 `apt-get install matrix-synapse-py3`, however, please read
1118 [debian/NEWS](https://github.com/matrix-org/synapse/blob/release-v0.34.0/debian/NEWS)
1119 before doing so. The existing `matrix-synapse` packages will
1120 continue to use Python 2 for the time being.
1121
1122 2. This release removes the `riot.im` from the default list of trusted
1123 identity servers.
1124
1125 If `riot.im` is in your homeserver's list of
1126 `trusted_third_party_id_servers`, you should remove it. It was added
1127 in case a hypothetical future identity server was put there. If you
1128 don't remove it, users may be unable to deactivate their accounts.
1129
1130 3. This release no longer installs the (unmaintained) Matrix Console
1131 web client as part of the default installation. It is possible to
1132 re-enable it by installing it separately and setting the
1133 `web_client_location` config option, but please consider switching
1134 to another client.
1135
1136 # Upgrading to v0.33.7
1137
1138 This release removes the example email notification templates from
1139 `res/templates` (they are now internal to the python package). This
1140 should only affect you if you (a) deploy your Synapse instance from a
1141 git checkout or a github snapshot URL, and (b) have email notifications
1142 enabled.
1143
1144 If you have email notifications enabled, you should ensure that
1145 `email.template_dir` is either configured to point at a directory where
1146 you have installed customised templates, or leave it unset to use the
1147 default templates.
1148
1149 # Upgrading to v0.27.3
1150
1151 This release expands the anonymous usage stats sent if the opt-in
1152 `report_stats` configuration is set to `true`. We now capture RSS memory
1153 and cpu use at a very coarse level. This requires administrators to
1154 install the optional `psutil` python module.
1155
1156 We would appreciate it if you could assist by ensuring this module is
1157 available and `report_stats` is enabled. This will let us see if
1158 performance changes to synapse are having an impact to the general
1159 community.
1160
1161 # Upgrading to v0.15.0
1162
1163 If you want to use the new URL previewing API
1164 (`/_matrix/media/r0/preview_url`) then you have to explicitly enable it
1165 in the config and update your dependencies dependencies. See README.rst
1166 for details.
1167
1168 # Upgrading to v0.11.0
1169
1170 This release includes the option to send anonymous usage stats to
1171 matrix.org, and requires that administrators explictly opt in or out by
1172 setting the `report_stats` option to either `true` or `false`.
1173
1174 We would really appreciate it if you could help our project out by
1175 reporting anonymized usage statistics from your homeserver. Only very
1176 basic aggregate data (e.g. number of users) will be reported, but it
1177 helps us to track the growth of the Matrix community, and helps us to
1178 make Matrix a success, as well as to convince other networks that they
1179 should peer with us.
1180
1181 # Upgrading to v0.9.0
1182
1183 Application services have had a breaking API change in this version.
1184
1185 They can no longer register themselves with a home server using the AS
1186 HTTP API. This decision was made because a compromised application
1187 service with free reign to register any regex in effect grants full
1188 read/write access to the home server if a regex of `.*` is used. An
1189 attack where a compromised AS re-registers itself with `.*` was deemed
1190 too big of a security risk to ignore, and so the ability to register
1191 with the HS remotely has been removed.
1192
1193 It has been replaced by specifying a list of application service
1194 registrations in `homeserver.yaml`:
1195
1196 app_service_config_files: ["registration-01.yaml", "registration-02.yaml"]
1197
1198 Where `registration-01.yaml` looks like:
1199
1200 url: <String> # e.g. "https://my.application.service.com"
1201 as_token: <String>
1202 hs_token: <String>
1203 sender_localpart: <String> # This is a new field which denotes the user_id localpart when using the AS token
1204 namespaces:
1205 users:
1206 - exclusive: <Boolean>
1207 regex: <String> # e.g. "@prefix_.*"
1208 aliases:
1209 - exclusive: <Boolean>
1210 regex: <String>
1211 rooms:
1212 - exclusive: <Boolean>
1213 regex: <String>
1214
1215 # Upgrading to v0.8.0
1216
1217 Servers which use captchas will need to add their public key to:
1218
1219 static/client/register/register_config.js
1220
1221 window.matrixRegistrationConfig = {
1222 recaptcha_public_key: "YOUR_PUBLIC_KEY"
1223 };
1224
1225 This is required in order to support registration fallback (typically
1226 used on mobile devices).
1227
1228 # Upgrading to v0.7.0
1229
1230 New dependencies are:
1231
1232 - pydenticon
1233 - simplejson
1234 - syutil
1235 - matrix-angular-sdk
1236
1237 To pull in these dependencies in a virtual env, run:
1238
1239 python synapse/python_dependencies.py | xargs -n 1 pip install
1240
1241 # Upgrading to v0.6.0
1242
1243 To pull in new dependencies, run:
1244
1245 python setup.py develop --user
1246
1247 This update includes a change to the database schema. To upgrade you
1248 first need to upgrade the database by running:
1249
1250 python scripts/upgrade_db_to_v0.6.0.py <db> <server_name> <signing_key>
1251
1252 Where [<db>]{.title-ref} is the location of the database,
1253 [<server_name>]{.title-ref} is the server name as specified in the
1254 synapse configuration, and [<signing_key>]{.title-ref} is the location
1255 of the signing key as specified in the synapse configuration.
1256
1257 This may take some time to complete. Failures of signatures and content
1258 hashes can safely be ignored.
1259
1260 # Upgrading to v0.5.1
1261
1262 Depending on precisely when you installed v0.5.0 you may have ended up
1263 with a stale release of the reference matrix webclient installed as a
1264 python module. To uninstall it and ensure you are depending on the
1265 latest module, please run:
1266
1267 $ pip uninstall syweb
1268
1269 # Upgrading to v0.5.0
1270
1271 The webclient has been split out into a seperate repository/pacakage in
1272 this release. Before you restart your homeserver you will need to pull
1273 in the webclient package by running:
1274
1275 python setup.py develop --user
1276
1277 This release completely changes the database schema and so requires
1278 upgrading it before starting the new version of the homeserver.
1279
1280 The script "database-prepare-for-0.5.0.sh" should be used to upgrade
1281 the database. This will save all user information, such as logins and
1282 profiles, but will otherwise purge the database. This includes messages,
1283 which rooms the home server was a member of and room alias mappings.
1284
1285 If you would like to keep your history, please take a copy of your
1286 database file and ask for help in #matrix:matrix.org. The upgrade
1287 process is, unfortunately, non trivial and requires human intervention
1288 to resolve any resulting conflicts during the upgrade process.
1289
1290 Before running the command the homeserver should be first completely
1291 shutdown. To run it, simply specify the location of the database, e.g.:
1292
1293 > ./scripts/database-prepare-for-0.5.0.sh "homeserver.db"
1294
1295 Once this has successfully completed it will be safe to restart the
1296 homeserver. You may notice that the homeserver takes a few seconds
1297 longer to restart than usual as it reinitializes the database.
1298
1299 On startup of the new version, users can either rejoin remote rooms
1300 using room aliases or by being reinvited. Alternatively, if any other
1301 homeserver sends a message to a room that the homeserver was previously
1302 in the local HS will automatically rejoin the room.
1303
1304 # Upgrading to v0.4.0
1305
1306 This release needs an updated syutil version. Run:
1307
1308 python setup.py develop
1309
1310 You will also need to upgrade your configuration as the signing key
1311 format has changed. Run:
1312
1313 python -m synapse.app.homeserver --config-path <CONFIG> --generate-config
1314
1315 # Upgrading to v0.3.0
1316
1317 This registration API now closely matches the login API. This introduces
1318 a bit more backwards and forwards between the HS and the client, but
1319 this improves the overall flexibility of the API. You can now GET on
1320 /register to retrieve a list of valid registration flows. Upon choosing
1321 one, they are submitted in the same way as login, e.g:
1322
1323 {
1324 type: m.login.password,
1325 user: foo,
1326 password: bar
1327 }
1328
1329 The default HS supports 2 flows, with and without Identity Server email
1330 authentication. Enabling captcha on the HS will add in an extra step to
1331 all flows: `m.login.recaptcha` which must be completed before you can
1332 transition to the next stage. There is a new login type:
1333 `m.login.email.identity` which contains the `threepidCreds` key which
1334 were previously sent in the original register request. For more
1335 information on this, see the specification.
1336
1337 ## Web Client
1338
1339 The VoIP specification has changed between v0.2.0 and v0.3.0. Users
1340 should refresh any browser tabs to get the latest web client code. Users
1341 on v0.2.0 of the web client will not be able to call those on v0.3.0 and
1342 vice versa.
1343
1344 # Upgrading to v0.2.0
1345
1346 The home server now requires setting up of SSL config before it can run.
1347 To automatically generate default config use:
1348
1349 $ python synapse/app/homeserver.py \
1350 --server-name machine.my.domain.name \
1351 --bind-port 8448 \
1352 --config-path homeserver.config \
1353 --generate-config
1354
1355 This config can be edited if desired, for example to specify a different
1356 SSL certificate to use. Once done you can run the home server using:
1357
1358 $ python synapse/app/homeserver.py --config-path homeserver.config
1359
1360 See the README.rst for more information.
1361
1362 Also note that some config options have been renamed, including:
1363
1364 - "host" to "server-name"
1365 - "database" to "database-path"
1366 - "port" to "bind-port" and "unsecure-port"
1367
1368 # Upgrading to v0.0.1
1369
1370 This release completely changes the database schema and so requires
1371 upgrading it before starting the new version of the homeserver.
1372
1373 The script "database-prepare-for-0.0.1.sh" should be used to upgrade
1374 the database. This will save all user information, such as logins and
1375 profiles, but will otherwise purge the database. This includes messages,
1376 which rooms the home server was a member of and room alias mappings.
1377
1378 Before running the command the homeserver should be first completely
1379 shutdown. To run it, simply specify the location of the database, e.g.:
1380
1381 > ./scripts/database-prepare-for-0.0.1.sh "homeserver.db"
1382
1383 Once this has successfully completed it will be safe to restart the
1384 homeserver. You may notice that the homeserver takes a few seconds
1385 longer to restart than usual as it reinitializes the database.
1386
1387 On startup of the new version, users can either rejoin remote rooms
1388 using room aliases or by being reinvited. Alternatively, if any other
1389 homeserver sends a message to a room that the homeserver was previously
1390 in the local HS will automatically rejoin the room.
+0
-7
docs/upgrading/README.md less more
0 <!--
1 Include the contents of UPGRADE.rst from the project root without moving it, which may
2 break links around the internet. Additionally, note that SUMMARY.md is unable to
3 directly link to content outside of the docs/ directory. So we use this file as a
4 redirection.
5 -->
6 {{#include ../../UPGRADE.rst}}
7474 synapse/util/daemonize.py,
7575 synapse/util/hash.py,
7676 synapse/util/iterutils.py,
77 synapse/util/linked_list.py,
7778 synapse/util/metrics.py,
7879 synapse/util/macaroons.py,
7980 synapse/util/module_loader.py,
9292 "local_media_repository": ["safe_from_quarantine"],
9393 "users": ["shadow_banned"],
9494 "e2e_fallback_keys_json": ["used"],
95 "access_tokens": ["used"],
9596 }
9697
9798
306307 information_schema.table_constraints AS tc
307308 INNER JOIN information_schema.constraint_column_usage AS ccu
308309 USING (table_schema, constraint_name)
309 WHERE tc.constraint_type = 'FOREIGN KEY';
310 WHERE tc.constraint_type = 'FOREIGN KEY'
311 AND tc.table_name != ccu.table_name;
310312 """
311313 txn.execute(sql)
312314
0 #!/usr/bin/env python
1 # Copyright 2021 The Matrix.org Foundation C.I.C.
2 #
3 # Licensed under the Apache License, Version 2.0 (the "License");
4 # you may not use this file except in compliance with the License.
5 # You may obtain a copy of the License at
6 #
7 # http://www.apache.org/licenses/LICENSE-2.0
8 #
9 # Unless required by applicable law or agreed to in writing, software
10 # distributed under the License is distributed on an "AS IS" BASIS,
11 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 # See the License for the specific language governing permissions and
13 # limitations under the License.
14
15 from synapse._scripts.review_recent_signups import main
16
17 if __name__ == "__main__":
18 main()
99 # can be passed on the commandline for debugging.
1010
1111 import argparse
12 import json
1213 import os
1314 import signal
1415 import subprocess
3334 can be passed on the commandline for debugging.
3435 """
3536
37 projdir = os.path.dirname(os.path.dirname(os.path.realpath(__file__)))
38
3639
3740 class Builder(object):
3841 def __init__(self, redirect_stdout=False):
5659 raise
5760
5861 def _inner_build(self, dist, skip_tests=False):
59 projdir = os.path.dirname(os.path.dirname(os.path.realpath(__file__)))
60 os.chdir(projdir)
61
6262 tag = dist.split(":", 1)[1]
6363
6464 # Make the dir where the debs will live.
9292 ],
9393 stdout=stdout,
9494 stderr=subprocess.STDOUT,
95 cwd=projdir,
9596 )
9697
9798 container_name = "synapse_build_" + tag
179180 help="skip running tests after building",
180181 )
181182 parser.add_argument(
183 "--show-dists-json",
184 action="store_true",
185 help="instead of building the packages, just list the dists to build for, as a json array",
186 )
187 parser.add_argument(
182188 "dist",
183189 nargs="*",
184190 default=DISTS,
185191 help="a list of distributions to build for. Default: %(default)s",
186192 )
187193 args = parser.parse_args()
188 run_builds(dists=args.dist, jobs=args.jobs, skip_tests=args.no_check)
194 if args.show_dists_json:
195 print(json.dumps(DISTS))
196 else:
197 run_builds(dists=args.dist, jobs=args.jobs, skip_tests=args.no_check)
6464 fi
6565
6666 # Run the tests!
67 go test -v -tags synapse_blacklist,msc2946,msc3083,msc2716 -count=1 $EXTRA_COMPLEMENT_ARGS ./tests
67 go test -v -tags synapse_blacklist,msc2946,msc3083,msc2716,msc2403 -count=1 $EXTRA_COMPLEMENT_ARGS ./tests
8282 if current_version.pre:
8383 # If the current version is an RC we don't need to bump any of the
8484 # version numbers (other than the RC number).
85 base_version = "{}.{}.{}".format(
86 current_version.major,
87 current_version.minor,
88 current_version.micro,
89 )
90
9185 if rc:
9286 new_version = "{}.{}.{}rc{}".format(
9387 current_version.major,
9690 current_version.pre[1] + 1,
9791 )
9892 else:
99 new_version = base_version
93 new_version = "{}.{}.{}".format(
94 current_version.major,
95 current_version.minor,
96 current_version.micro,
97 )
10098 else:
101 # If this is a new release cycle then we need to know if its a major
102 # version bump or a hotfix.
99 # If this is a new release cycle then we need to know if it's a minor
100 # or a patch version bump.
103101 release_type = click.prompt(
104102 "Release type",
105 type=click.Choice(("major", "hotfix")),
103 type=click.Choice(("minor", "patch")),
106104 show_choices=True,
107 default="major",
105 default="minor",
108106 )
109107
110 if release_type == "major":
111 base_version = new_version = "{}.{}.{}".format(
112 current_version.major,
113 current_version.minor + 1,
114 0,
115 )
108 if release_type == "minor":
116109 if rc:
117110 new_version = "{}.{}.{}rc1".format(
118111 current_version.major,
119112 current_version.minor + 1,
120113 0,
121114 )
122
115 else:
116 new_version = "{}.{}.{}".format(
117 current_version.major,
118 current_version.minor + 1,
119 0,
120 )
123121 else:
124 base_version = new_version = "{}.{}.{}".format(
125 current_version.major,
126 current_version.minor,
127 current_version.micro + 1,
128 )
129122 if rc:
130123 new_version = "{}.{}.{}rc1".format(
124 current_version.major,
125 current_version.minor,
126 current_version.micro + 1,
127 )
128 else:
129 new_version = "{}.{}.{}".format(
131130 current_version.major,
132131 current_version.minor,
133132 current_version.micro + 1,
138137 click.get_current_context().abort()
139138
140139 # Switch to the release branch.
141 release_branch_name = f"release-v{current_version.major}.{current_version.minor}"
140 parsed_new_version = version.parse(new_version)
141 release_branch_name = (
142 f"release-v{parsed_new_version.major}.{parsed_new_version.minor}"
143 )
142144 release_branch = find_ref(repo, release_branch_name)
143145 if release_branch:
144146 if release_branch.is_remote():
152154 # release type.
153155 if current_version.is_prerelease:
154156 default = release_branch_name
155 elif release_type == "major":
157 elif release_type == "minor":
156158 default = "develop"
157159 else:
158160 default = "master"
4646 except ImportError:
4747 pass
4848
49 __version__ = "1.37.1"
49 __version__ = "1.38.0"
5050
5151 if bool(os.environ.get("SYNAPSE_TEST_PATCH_LOG_CONTEXTS", False)):
5252 # We import here so that we don't have to install a bunch of deps when
0 #!/usr/bin/env python
1 # Copyright 2021 The Matrix.org Foundation C.I.C.
2 #
3 # Licensed under the Apache License, Version 2.0 (the "License");
4 # you may not use this file except in compliance with the License.
5 # You may obtain a copy of the License at
6 #
7 # http://www.apache.org/licenses/LICENSE-2.0
8 #
9 # Unless required by applicable law or agreed to in writing, software
10 # distributed under the License is distributed on an "AS IS" BASIS,
11 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 # See the License for the specific language governing permissions and
13 # limitations under the License.
14
15 import argparse
16 import sys
17 import time
18 from datetime import datetime
19 from typing import List
20
21 import attr
22
23 from synapse.config._base import RootConfig, find_config_files, read_config_files
24 from synapse.config.database import DatabaseConfig
25 from synapse.storage.database import DatabasePool, LoggingTransaction, make_conn
26 from synapse.storage.engines import create_engine
27
28
29 class ReviewConfig(RootConfig):
30 "A config class that just pulls out the database config"
31 config_classes = [DatabaseConfig]
32
33
34 @attr.s(auto_attribs=True)
35 class UserInfo:
36 user_id: str
37 creation_ts: int
38 emails: List[str] = attr.Factory(list)
39 private_rooms: List[str] = attr.Factory(list)
40 public_rooms: List[str] = attr.Factory(list)
41 ips: List[str] = attr.Factory(list)
42
43
44 def get_recent_users(txn: LoggingTransaction, since_ms: int) -> List[UserInfo]:
45 """Fetches recently registered users and some info on them."""
46
47 sql = """
48 SELECT name, creation_ts FROM users
49 WHERE
50 ? <= creation_ts
51 AND deactivated = 0
52 """
53
54 txn.execute(sql, (since_ms / 1000,))
55
56 user_infos = [UserInfo(user_id, creation_ts) for user_id, creation_ts in txn]
57
58 for user_info in user_infos:
59 user_info.emails = DatabasePool.simple_select_onecol_txn(
60 txn,
61 table="user_threepids",
62 keyvalues={"user_id": user_info.user_id, "medium": "email"},
63 retcol="address",
64 )
65
66 sql = """
67 SELECT room_id, canonical_alias, name, join_rules
68 FROM local_current_membership
69 INNER JOIN room_stats_state USING (room_id)
70 WHERE user_id = ? AND membership = 'join'
71 """
72
73 txn.execute(sql, (user_info.user_id,))
74 for room_id, canonical_alias, name, join_rules in txn:
75 if join_rules == "public":
76 user_info.public_rooms.append(canonical_alias or name or room_id)
77 else:
78 user_info.private_rooms.append(canonical_alias or name or room_id)
79
80 user_info.ips = DatabasePool.simple_select_onecol_txn(
81 txn,
82 table="user_ips",
83 keyvalues={"user_id": user_info.user_id},
84 retcol="ip",
85 )
86
87 return user_infos
88
89
90 def main():
91 parser = argparse.ArgumentParser()
92 parser.add_argument(
93 "-c",
94 "--config-path",
95 action="append",
96 metavar="CONFIG_FILE",
97 help="The config files for Synapse.",
98 required=True,
99 )
100 parser.add_argument(
101 "-s",
102 "--since",
103 metavar="duration",
104 help="Specify how far back to review user registrations for, defaults to 7d (i.e. 7 days).",
105 default="7d",
106 )
107 parser.add_argument(
108 "-e",
109 "--exclude-emails",
110 action="store_true",
111 help="Exclude users that have validated email addresses",
112 )
113 parser.add_argument(
114 "-u",
115 "--only-users",
116 action="store_true",
117 help="Only print user IDs that match.",
118 )
119
120 config = ReviewConfig()
121
122 config_args = parser.parse_args(sys.argv[1:])
123 config_files = find_config_files(search_paths=config_args.config_path)
124 config_dict = read_config_files(config_files)
125 config.parse_config_dict(
126 config_dict,
127 )
128
129 since_ms = time.time() * 1000 - config.parse_duration(config_args.since)
130 exclude_users_with_email = config_args.exclude_emails
131 include_context = not config_args.only_users
132
133 for database_config in config.database.databases:
134 if "main" in database_config.databases:
135 break
136
137 engine = create_engine(database_config.config)
138
139 with make_conn(database_config, engine, "review_recent_signups") as db_conn:
140 user_infos = get_recent_users(db_conn.cursor(), since_ms)
141
142 for user_info in user_infos:
143 if exclude_users_with_email and user_info.emails:
144 continue
145
146 if include_context:
147 print_public_rooms = ""
148 if user_info.public_rooms:
149 print_public_rooms = "(" + ", ".join(user_info.public_rooms[:3])
150
151 if len(user_info.public_rooms) > 3:
152 print_public_rooms += ", ..."
153
154 print_public_rooms += ")"
155
156 print("# Created:", datetime.fromtimestamp(user_info.creation_ts))
157 print("# Email:", ", ".join(user_info.emails) or "None")
158 print("# IPs:", ", ".join(user_info.ips))
159 print(
160 "# Number joined public rooms:",
161 len(user_info.public_rooms),
162 print_public_rooms,
163 )
164 print("# Number joined private rooms:", len(user_info.private_rooms))
165 print("#")
166
167 print(user_info.user_id)
168
169 if include_context:
170 print()
171
172
173 if __name__ == "__main__":
174 main()
1111 # See the License for the specific language governing permissions and
1212 # limitations under the License.
1313 import logging
14 from typing import TYPE_CHECKING, Any, Dict, List, Optional, Tuple
14 from typing import TYPE_CHECKING, Optional, Tuple
1515
1616 import pymacaroons
1717 from netaddr import IPAddress
2727 InvalidClientTokenError,
2828 MissingClientTokenError,
2929 )
30 from synapse.api.room_versions import KNOWN_ROOM_VERSIONS
3130 from synapse.appservice import ApplicationService
3231 from synapse.events import EventBase
3332 from synapse.http import get_request_user_agent
3736 from synapse.types import Requester, StateMap, UserID, create_requester
3837 from synapse.util.caches.lrucache import LruCache
3938 from synapse.util.macaroons import get_value_from_macaroon, satisfy_expiry
40 from synapse.util.metrics import Measure
4139
4240 if TYPE_CHECKING:
4341 from synapse.server import HomeServer
4543 logger = logging.getLogger(__name__)
4644
4745
48 AuthEventTypes = (
49 EventTypes.Create,
50 EventTypes.Member,
51 EventTypes.PowerLevels,
52 EventTypes.JoinRules,
53 EventTypes.RoomHistoryVisibility,
54 EventTypes.ThirdPartyInvite,
55 )
56
5746 # guests always get this device id.
5847 GUEST_DEVICE_ID = "guest_device"
5948
6453
6554 class Auth:
6655 """
67 FIXME: This class contains a mix of functions for authenticating users
68 of our client-server API and authenticating events added to room graphs.
69 The latter should be moved to synapse.handlers.event_auth.EventAuthHandler.
56 This class contains functions for authenticating users of our client-server API.
7057 """
7158
7259 def __init__(self, hs: "HomeServer"):
8774 self._track_appservice_user_ips = hs.config.track_appservice_user_ips
8875 self._macaroon_secret_key = hs.config.macaroon_secret_key
8976 self._force_tracing_for_users = hs.config.tracing.force_tracing_for_users
90
91 async def check_from_context(
92 self, room_version: str, event, context, do_sig_check=True
93 ) -> None:
94 auth_event_ids = event.auth_event_ids()
95 auth_events_by_id = await self.store.get_events(auth_event_ids)
96 auth_events = {(e.type, e.state_key): e for e in auth_events_by_id.values()}
97
98 room_version_obj = KNOWN_ROOM_VERSIONS[room_version]
99 event_auth.check(
100 room_version_obj, event, auth_events=auth_events, do_sig_check=do_sig_check
101 )
10277
10378 async def check_user_in_room(
10479 self,
149124 return member
150125
151126 raise AuthError(403, "User %s not in room %s" % (user_id, room_id))
152
153 async def check_host_in_room(self, room_id: str, host: str) -> bool:
154 with Measure(self.clock, "check_host_in_room"):
155 return await self.store.is_host_joined(room_id, host)
156
157 def get_public_keys(self, invite_event: EventBase) -> List[Dict[str, Any]]:
158 return event_auth.get_public_keys(invite_event)
159127
160128 async def get_user_by_req(
161129 self,
243211 "Guest access not allowed",
244212 errcode=Codes.GUEST_ACCESS_FORBIDDEN,
245213 )
214
215 # Mark the token as used. This is used to invalidate old refresh
216 # tokens after some time.
217 if not user_info.token_used and token_id is not None:
218 await self.store.mark_access_token_as_used(token_id)
246219
247220 requester = create_requester(
248221 user_info.user_id,
481454 True if the user is an admin
482455 """
483456 return await self.store.is_server_admin(user)
484
485 def compute_auth_events(
486 self,
487 event,
488 current_state_ids: StateMap[str],
489 for_verification: bool = False,
490 ) -> List[str]:
491 """Given an event and current state return the list of event IDs used
492 to auth an event.
493
494 If `for_verification` is False then only return auth events that
495 should be added to the event's `auth_events`.
496
497 Returns:
498 List of event IDs.
499 """
500
501 if event.type == EventTypes.Create:
502 return []
503
504 # Currently we ignore the `for_verification` flag even though there are
505 # some situations where we can drop particular auth events when adding
506 # to the event's `auth_events` (e.g. joins pointing to previous joins
507 # when room is publicly joinable). Dropping event IDs has the
508 # advantage that the auth chain for the room grows slower, but we use
509 # the auth chain in state resolution v2 to order events, which means
510 # care must be taken if dropping events to ensure that it doesn't
511 # introduce undesirable "state reset" behaviour.
512 #
513 # All of which sounds a bit tricky so we don't bother for now.
514
515 auth_ids = []
516 for etype, state_key in event_auth.auth_types_for_event(event):
517 auth_ev_id = current_state_ids.get((etype, state_key))
518 if auth_ev_id:
519 auth_ids.append(auth_ev_id)
520
521 return auth_ids
522457
523458 async def check_can_change_room_list(self, room_id: str, user: UserID) -> bool:
524459 """Determine whether the user is allowed to edit the room's entry in the
200200 )
201201
202202
203 class RoomTypes:
204 """Understood values of the room_type field of m.room.create events."""
205
206 SPACE = "m.space"
207
208
203209 class RoomEncryptionAlgorithms:
204210 MEGOLM_V1_AES_SHA2 = "m.megolm.v1.aes-sha2"
205211 DEFAULT = MEGOLM_V1_AES_SHA2
2020 import sys
2121 import traceback
2222 import warnings
23 from typing import Awaitable, Callable, Iterable
23 from typing import TYPE_CHECKING, Awaitable, Callable, Iterable
2424
2525 from cryptography.utils import CryptographyDeprecationWarning
2626 from typing_extensions import NoReturn
4040 from synapse.logging.context import PreserveLoggingContext
4141 from synapse.metrics.background_process_metrics import wrap_as_background_process
4242 from synapse.metrics.jemalloc import setup_jemalloc_stats
43 from synapse.util.caches.lrucache import setup_expire_lru_cache_entries
4344 from synapse.util.daemonize import daemonize_process
4445 from synapse.util.rlimit import change_resource_limit
4546 from synapse.util.versionstring import get_version_string
47
48 if TYPE_CHECKING:
49 from synapse.server import HomeServer
4650
4751 logger = logging.getLogger(__name__)
4852
311315 logger.info("Context factories updated.")
312316
313317
314 async def start(hs: "synapse.server.HomeServer"):
318 async def start(hs: "HomeServer"):
315319 """
316320 Start a Synapse server or worker.
317321
363367 module(config=config, api=module_api)
364368
365369 load_legacy_spam_checkers(hs)
370
371 # If we've configured an expiry time for caches, start the background job now.
372 setup_expire_lru_cache_entries(hs)
366373
367374 # It is now safe to start your Synapse.
368375 hs.start_listening()
44 api,
55 appservice,
66 auth,
7 cache,
78 captcha,
89 cas,
910 consent,
8788 tracer: tracer.TracerConfig
8889 redis: redis.RedisConfig
8990 modules: modules.ModulesConfig
91 caches: cache.CacheConfig
9092 federation: federation.FederationConfig
9193
9294 config_classes: List = ...
115115 #event_cache_size: 10K
116116
117117 caches:
118 # Controls the global cache factor, which is the default cache factor
119 # for all caches if a specific factor for that cache is not otherwise
120 # set.
121 #
122 # This can also be set by the "SYNAPSE_CACHE_FACTOR" environment
123 # variable. Setting by environment variable takes priority over
124 # setting through the config file.
125 #
126 # Defaults to 0.5, which will half the size of all caches.
127 #
128 #global_factor: 1.0
129
130 # A dictionary of cache name to cache factor for that individual
131 # cache. Overrides the global cache factor for a given cache.
132 #
133 # These can also be set through environment variables comprised
134 # of "SYNAPSE_CACHE_FACTOR_" + the name of the cache in capital
135 # letters and underscores. Setting by environment variable
136 # takes priority over setting through the config file.
137 # Ex. SYNAPSE_CACHE_FACTOR_GET_USERS_WHO_SHARE_ROOM_WITH_USER=2.0
138 #
139 # Some caches have '*' and other characters that are not
140 # alphanumeric or underscores. These caches can be named with or
141 # without the special characters stripped. For example, to specify
142 # the cache factor for `*stateGroupCache*` via an environment
143 # variable would be `SYNAPSE_CACHE_FACTOR_STATEGROUPCACHE=2.0`.
144 #
145 per_cache_factors:
146 #get_users_who_share_room_with_user: 2.0
118 # Controls the global cache factor, which is the default cache factor
119 # for all caches if a specific factor for that cache is not otherwise
120 # set.
121 #
122 # This can also be set by the "SYNAPSE_CACHE_FACTOR" environment
123 # variable. Setting by environment variable takes priority over
124 # setting through the config file.
125 #
126 # Defaults to 0.5, which will half the size of all caches.
127 #
128 #global_factor: 1.0
129
130 # A dictionary of cache name to cache factor for that individual
131 # cache. Overrides the global cache factor for a given cache.
132 #
133 # These can also be set through environment variables comprised
134 # of "SYNAPSE_CACHE_FACTOR_" + the name of the cache in capital
135 # letters and underscores. Setting by environment variable
136 # takes priority over setting through the config file.
137 # Ex. SYNAPSE_CACHE_FACTOR_GET_USERS_WHO_SHARE_ROOM_WITH_USER=2.0
138 #
139 # Some caches have '*' and other characters that are not
140 # alphanumeric or underscores. These caches can be named with or
141 # without the special characters stripped. For example, to specify
142 # the cache factor for `*stateGroupCache*` via an environment
143 # variable would be `SYNAPSE_CACHE_FACTOR_STATEGROUPCACHE=2.0`.
144 #
145 per_cache_factors:
146 #get_users_who_share_room_with_user: 2.0
147
148 # Controls how long an entry can be in a cache without having been
149 # accessed before being evicted. Defaults to None, which means
150 # entries are never evicted based on time.
151 #
152 #expiry_time: 30m
147153 """
148154
149155 def read_config(self, config, **kwargs):
199205 e.message # noqa: B306, DependencyException.message is a property
200206 )
201207
208 expiry_time = cache_config.get("expiry_time")
209 if expiry_time:
210 self.expiry_time_msec = self.parse_duration(expiry_time)
211 else:
212 self.expiry_time_msec = None
213
202214 # Resize all caches (if necessary) with the new factors we've loaded
203215 self.resize_all_caches()
204216
2121 # User Consent configuration
2222 #
2323 # for detailed instructions, see
24 # https://github.com/matrix-org/synapse/blob/master/docs/consent_tracking.md
24 # https://matrix-org.github.io/synapse/latest/consent_tracking.html
2525 #
2626 # Parts of this section are required if enabling the 'consent' resource under
2727 # 'listeners', in particular 'template_dir' and 'version'.
6161 # cp_min: 5
6262 # cp_max: 10
6363 #
64 # For more information on using Synapse with Postgres, see `docs/postgres.md`.
64 # For more information on using Synapse with Postgres,
65 # see https://matrix-org.github.io/synapse/latest/postgres.html.
6566 #
6667 database:
6768 name: sqlite3
6363 # Note that this is a non-standard login type and client support is
6464 # expected to be non-existent.
6565 #
66 # See https://github.com/matrix-org/synapse/blob/master/docs/jwt.md.
66 # See https://matrix-org.github.io/synapse/latest/jwt.html.
6767 #
6868 #jwt_config:
6969 # Uncomment the following to enable authorization using JSON web
4848 # be ingested by ELK stacks. See [2] for details.
4949 #
5050 # [1]: https://docs.python.org/3.7/library/logging.config.html#configuration-dictionary-schema
51 # [2]: https://github.com/matrix-org/synapse/blob/master/docs/structured_logging.md
51 # [2]: https://matrix-org.github.io/synapse/latest/structured_logging.html
5252
5353 version: 1
5454
3636
3737 # Server admins can expand Synapse's functionality with external modules.
3838 #
39 # See https://matrix-org.github.io/synapse/develop/modules.html for more
39 # See https://matrix-org.github.io/synapse/latest/modules.html for more
4040 # documentation on how to configure or create custom modules for Synapse.
4141 #
4242 modules:
165165 #
166166 # module: The class name of a custom mapping module. Default is
167167 # {mapping_provider!r}.
168 # See https://github.com/matrix-org/synapse/blob/master/docs/sso_mapping_providers.md#openid-mapping-providers
168 # See https://matrix-org.github.io/synapse/latest/sso_mapping_providers.html#openid-mapping-providers
169169 # for information on implementing a custom mapping provider.
170170 #
171171 # config: Configuration for the mapping provider module. This section will
216216 # - attribute: groups
217217 # value: "admin"
218218 #
219 # See https://github.com/matrix-org/synapse/blob/master/docs/openid.md
219 # See https://matrix-org.github.io/synapse/latest/openid.html
220220 # for information on how to configure these options.
221221 #
222222 # For backwards compatibility, it is also possible to configure a single OIDC
5656 # ex. LDAP, external tokens, etc.
5757 #
5858 # For more information and known implementations, please see
59 # https://github.com/matrix-org/synapse/blob/master/docs/password_auth_providers.md
59 # https://matrix-org.github.io/synapse/latest/password_auth_providers.html
6060 #
6161 # Note: instances wishing to use SAML or CAS authentication should
6262 # instead use the `saml2_config` or `cas_config` options,
118118 session_lifetime = self.parse_duration(session_lifetime)
119119 self.session_lifetime = session_lifetime
120120
121 # The `access_token_lifetime` applies for tokens that can be renewed
122 # using a refresh token, as per MSC2918. If it is `None`, the refresh
123 # token mechanism is disabled.
124 #
125 # Since it is incompatible with the `session_lifetime` mechanism, it is set to
126 # `None` by default if a `session_lifetime` is set.
127 access_token_lifetime = config.get(
128 "access_token_lifetime", "5m" if session_lifetime is None else None
129 )
130 if access_token_lifetime is not None:
131 access_token_lifetime = self.parse_duration(access_token_lifetime)
132 self.access_token_lifetime = access_token_lifetime
133
134 if session_lifetime is not None and access_token_lifetime is not None:
135 raise ConfigError(
136 "The refresh token mechanism is incompatible with the "
137 "`session_lifetime` option. Consider disabling the "
138 "`session_lifetime` option or disabling the refresh token "
139 "mechanism by removing the `access_token_lifetime` option."
140 )
141
121142 # The success template used during fallback auth.
122143 self.fallback_success_template = self.read_template("auth_success.html")
123144
249249 #
250250 # If you are using a reverse proxy you may also need to set this value in
251251 # your reverse proxy's config. Notably Nginx has a small max body size by default.
252 # See https://matrix-org.github.io/synapse/develop/reverse_proxy.html.
252 # See https://matrix-org.github.io/synapse/latest/reverse_proxy.html.
253253 #
254254 #max_upload_size: 50M
255255
152152 METRICS_PORT_WARNING = """\
153153 The metrics_port configuration option is deprecated in Synapse 0.31 in favour of
154154 a listener. Please see
155 https://github.com/matrix-org/synapse/blob/master/docs/metrics-howto.md
155 https://matrix-org.github.io/synapse/latest/metrics-howto.html
156156 on how to configure the new listener.
157157 --------------------------------------------------------------------------------"""
158158
810810 # In most cases you should avoid using a matrix specific subdomain such as
811811 # matrix.example.com or synapse.example.com as the server_name for the same
812812 # reasons you wouldn't use user@email.example.com as your email address.
813 # See https://github.com/matrix-org/synapse/blob/master/docs/delegate.md
813 # See https://matrix-org.github.io/synapse/latest/delegate.html
814814 # for information on how to host Synapse on a subdomain while preserving
815815 # a clean server_name.
816816 #
987987 # 'all local interfaces'.
988988 #
989989 # type: the type of listener. Normally 'http', but other valid options are:
990 # 'manhole' (see docs/manhole.md),
991 # 'metrics' (see docs/metrics-howto.md),
992 # 'replication' (see docs/workers.md).
990 # 'manhole' (see https://matrix-org.github.io/synapse/latest/manhole.html),
991 # 'metrics' (see https://matrix-org.github.io/synapse/latest/metrics-howto.html),
992 # 'replication' (see https://matrix-org.github.io/synapse/latest/workers.html).
993993 #
994994 # tls: set to true to enable TLS for this listener. Will use the TLS
995995 # key/cert specified in tls_private_key_path / tls_certificate_path.
10141014 # client: the client-server API (/_matrix/client), and the synapse admin
10151015 # API (/_synapse/admin). Also implies 'media' and 'static'.
10161016 #
1017 # consent: user consent forms (/_matrix/consent). See
1018 # docs/consent_tracking.md.
1017 # consent: user consent forms (/_matrix/consent).
1018 # See https://matrix-org.github.io/synapse/latest/consent_tracking.html.
10191019 #
10201020 # federation: the server-server API (/_matrix/federation). Also implies
10211021 # 'media', 'keys', 'openid'
10241024 #
10251025 # media: the media API (/_matrix/media).
10261026 #
1027 # metrics: the metrics interface. See docs/metrics-howto.md.
1027 # metrics: the metrics interface.
1028 # See https://matrix-org.github.io/synapse/latest/metrics-howto.html.
10281029 #
10291030 # openid: OpenID authentication.
10301031 #
1031 # replication: the HTTP replication API (/_synapse/replication). See
1032 # docs/workers.md.
1032 # replication: the HTTP replication API (/_synapse/replication).
1033 # See https://matrix-org.github.io/synapse/latest/workers.html.
10331034 #
10341035 # static: static resources under synapse/static (/_matrix/static). (Mostly
10351036 # useful for 'fallback authentication'.)
10491050 # that unwraps TLS.
10501051 #
10511052 # If you plan to use a reverse proxy, please see
1052 # https://github.com/matrix-org/synapse/blob/master/docs/reverse_proxy.md.
1053 # https://matrix-org.github.io/synapse/latest/reverse_proxy.html.
10531054 #
10541055 %(unsecure_http_bindings)s
10551056
2525 This server is using a spam checker module that is implementing the deprecated spam
2626 checker interface. Please check with the module's maintainer to see if a new version
2727 supporting Synapse's generic modules system is available.
28 For more information, please see https://matrix-org.github.io/synapse/develop/modules.html
28 For more information, please see https://matrix-org.github.io/synapse/latest/modules.html
2929 ---------------------------------------------------------------------------------------"""
3030
3131
5050 def generate_config_section(self, config_dir_path, server_name, **kwargs):
5151 return """
5252 # Settings for local room and user statistics collection. See
53 # docs/room_and_user_statistics.md.
53 # https://matrix-org.github.io/synapse/latest/room_and_user_statistics.html.
5454 #
5555 stats:
5656 # Uncomment the following to disable room and user statistics. Note that doing
8080 #enabled: true
8181
8282 # The list of homeservers we wish to send and receive span contexts and span baggage.
83 # See docs/opentracing.rst.
83 # See https://matrix-org.github.io/synapse/latest/opentracing.html.
8484 #
8585 # This is a list of regexes which are matched against the server_name of the
8686 # homeserver.
4949 #
5050 # If you set it true, you'll have to rebuild the user_directory search
5151 # indexes, see:
52 # https://github.com/matrix-org/synapse/blob/master/docs/user_directory.md
52 # https://matrix-org.github.io/synapse/latest/user_directory.html
5353 #
5454 # Uncomment to return search results containing all known users, even if that
5555 # user does not share a room with the requester.
1313 # limitations under the License.
1414
1515 import logging
16 from typing import Any, Dict, List, Optional, Set, Tuple
16 from typing import Any, Dict, List, Optional, Set, Tuple, Union
1717
1818 from canonicaljson import encode_canonical_json
1919 from signedjson.key import decode_verify_key_bytes
2828 RoomVersion,
2929 )
3030 from synapse.events import EventBase
31 from synapse.events.builder import EventBuilder
3132 from synapse.types import StateMap, UserID, get_domain_from_id
3233
3334 logger = logging.getLogger(__name__)
723724 return public_keys
724725
725726
726 def auth_types_for_event(event: EventBase) -> Set[Tuple[str, str]]:
727 def auth_types_for_event(event: Union[EventBase, EventBuilder]) -> Set[Tuple[str, str]]:
727728 """Given an event, return a list of (EventType, StateKey) that may be
728729 needed to auth the event. The returned list may be a superset of what
729730 would actually be required depending on the full state of the room.
117117 proactively_send = DictProperty("proactively_send") # type: bool
118118 redacted = DictProperty("redacted") # type: bool
119119 txn_id = DictProperty("txn_id") # type: str
120 token_id = DictProperty("token_id") # type: str
120 token_id = DictProperty("token_id") # type: int
121121 historical = DictProperty("historical") # type: bool
122122
123123 # XXX: These are set by StreamWorkerStore._set_before_and_after.
1111 # See the License for the specific language governing permissions and
1212 # limitations under the License.
1313 import logging
14 from typing import Any, Dict, List, Optional, Tuple, Union
14 from typing import TYPE_CHECKING, Any, Dict, List, Optional, Tuple, Union
1515
1616 import attr
1717 from nacl.signing import SigningKey
1818
19 from synapse.api.auth import Auth
2019 from synapse.api.constants import MAX_DEPTH
2120 from synapse.api.errors import UnsupportedRoomVersionError
2221 from synapse.api.room_versions import (
3332 from synapse.util import Clock
3433 from synapse.util.stringutils import random_string
3534
35 if TYPE_CHECKING:
36 from synapse.handlers.event_auth import EventAuthHandler
37 from synapse.server import HomeServer
38
3639 logger = logging.getLogger(__name__)
3740
3841
39 @attr.s(slots=True, cmp=False, frozen=True)
42 @attr.s(slots=True, cmp=False, frozen=True, auto_attribs=True)
4043 class EventBuilder:
4144 """A format independent event builder used to build up the event content
4245 before signing the event.
6164 _signing_key: The signing key to use to sign the event as the server
6265 """
6366
64 _state = attr.ib(type=StateHandler)
65 _auth = attr.ib(type=Auth)
66 _store = attr.ib(type=DataStore)
67 _clock = attr.ib(type=Clock)
68 _hostname = attr.ib(type=str)
69 _signing_key = attr.ib(type=SigningKey)
70
71 room_version = attr.ib(type=RoomVersion)
72
73 room_id = attr.ib(type=str)
74 type = attr.ib(type=str)
75 sender = attr.ib(type=str)
76
77 content = attr.ib(default=attr.Factory(dict), type=JsonDict)
78 unsigned = attr.ib(default=attr.Factory(dict), type=JsonDict)
67 _state: StateHandler
68 _event_auth_handler: "EventAuthHandler"
69 _store: DataStore
70 _clock: Clock
71 _hostname: str
72 _signing_key: SigningKey
73
74 room_version: RoomVersion
75
76 room_id: str
77 type: str
78 sender: str
79
80 content: JsonDict = attr.Factory(dict)
81 unsigned: JsonDict = attr.Factory(dict)
7982
8083 # These only exist on a subset of events, so they raise AttributeError if
8184 # someone tries to get them when they don't exist.
82 _state_key = attr.ib(default=None, type=Optional[str])
83 _redacts = attr.ib(default=None, type=Optional[str])
84 _origin_server_ts = attr.ib(default=None, type=Optional[int])
85
86 internal_metadata = attr.ib(
87 default=attr.Factory(lambda: _EventInternalMetadata({})),
88 type=_EventInternalMetadata,
85 _state_key: Optional[str] = None
86 _redacts: Optional[str] = None
87 _origin_server_ts: Optional[int] = None
88
89 internal_metadata: _EventInternalMetadata = attr.Factory(
90 lambda: _EventInternalMetadata({})
8991 )
9092
9193 @property
122124 state_ids = await self._state.get_current_state_ids(
123125 self.room_id, prev_event_ids
124126 )
125 auth_event_ids = self._auth.compute_auth_events(self, state_ids)
127 auth_event_ids = self._event_auth_handler.compute_auth_events(
128 self, state_ids
129 )
126130
127131 format_version = self.room_version.event_format
128132 if format_version == EventFormatVersions.V1:
183187
184188
185189 class EventBuilderFactory:
186 def __init__(self, hs):
190 def __init__(self, hs: "HomeServer"):
187191 self.clock = hs.get_clock()
188192 self.hostname = hs.hostname
189193 self.signing_key = hs.signing_key
190194
191195 self.store = hs.get_datastore()
192196 self.state = hs.get_state_handler()
193 self.auth = hs.get_auth()
194
195 def new(self, room_version, key_values):
197 self._event_auth_handler = hs.get_event_auth_handler()
198
199 def new(self, room_version: str, key_values: dict) -> EventBuilder:
196200 """Generate an event builder appropriate for the given room version
197201
198202 Deprecated: use for_room_version with a RoomVersion object instead
199203
200204 Args:
201 room_version (str): Version of the room that we're creating an event builder
202 for
203 key_values (dict): Fields used as the basis of the new event
205 room_version: Version of the room that we're creating an event builder for
206 key_values: Fields used as the basis of the new event
204207
205208 Returns:
206209 EventBuilder
211214 raise UnsupportedRoomVersionError()
212215 return self.for_room_version(v, key_values)
213216
214 def for_room_version(self, room_version, key_values):
217 def for_room_version(
218 self, room_version: RoomVersion, key_values: dict
219 ) -> EventBuilder:
215220 """Generate an event builder appropriate for the given room version
216221
217222 Args:
218 room_version (synapse.api.room_versions.RoomVersion):
223 room_version:
219224 Version of the room that we're creating an event builder for
220 key_values (dict): Fields used as the basis of the new event
225 key_values: Fields used as the basis of the new event
221226
222227 Returns:
223228 EventBuilder
225230 return EventBuilder(
226231 store=self.store,
227232 state=self.state,
228 auth=self.auth,
233 event_auth_handler=self._event_auth_handler,
229234 clock=self.clock,
230235 hostname=self.hostname,
231236 signing_key=self.signing_key,
285290 _event_id_counter = 0
286291
287292
288 def _create_event_id(clock, hostname):
293 def _create_event_id(clock: Clock, hostname: str) -> str:
289294 """Create a new event ID
290295
291296 Args:
292 clock (Clock)
293 hostname (str): The server name for the event ID
297 clock
298 hostname: The server name for the event ID
294299
295300 Returns:
296 str
301 The new event ID
297302 """
298303
299304 global _event_id_counter
8888 result = await self.spam_checker.check_event_for_spam(pdu)
8989
9090 if result:
91 logger.warning(
92 "Event contains spam, redacting %s: %s",
93 pdu.event_id,
94 pdu.get_pdu_json(),
95 )
96 return prune_event(pdu)
91 logger.warning("Event contains spam, soft-failing %s", pdu.event_id)
92 # we redact (to save disk space) as well as soft-failing (to stop
93 # using the event in prev_events).
94 redacted_event = prune_event(pdu)
95 redacted_event.internal_metadata.soft_failed = True
96 return redacted_event
9797
9898 return pdu
9999
3333 from twisted.internet.abstract import isIPAddress
3434 from twisted.python import failure
3535
36 from synapse.api.constants import EduTypes, EventTypes
36 from synapse.api.constants import EduTypes, EventTypes, Membership
3737 from synapse.api.errors import (
3838 AuthError,
3939 Codes,
4545 )
4646 from synapse.api.room_versions import KNOWN_ROOM_VERSIONS, RoomVersion
4747 from synapse.events import EventBase
48 from synapse.events.snapshot import EventContext
4849 from synapse.federation.federation_base import FederationBase, event_from_pdu_json
4950 from synapse.federation.persistence import TransactionActions
5051 from synapse.federation.units import Edu, Transaction
106107 def __init__(self, hs: "HomeServer"):
107108 super().__init__(hs)
108109
109 self.auth = hs.get_auth()
110110 self.handler = hs.get_federation_handler()
111111 self.state = hs.get_state_handler()
112 self._event_auth_handler = hs.get_event_auth_handler()
112113
113114 self.device_handler = hs.get_device_handler()
114115
146147
147148 self._room_prejoin_state_types = hs.config.api.room_prejoin_state
148149
150 # Whether we have started handling old events in the staging area.
151 self._started_handling_of_staged_events = False
152
153 @wrap_as_background_process("_handle_old_staged_events")
154 async def _handle_old_staged_events(self) -> None:
155 """Handle old staged events by fetching all rooms that have staged
156 events and start the processing of each of those rooms.
157 """
158
159 # Get all the rooms IDs with staged events.
160 room_ids = await self.store.get_all_rooms_with_staged_incoming_events()
161
162 # We then shuffle them so that if there are multiple instances doing
163 # this work they're less likely to collide.
164 random.shuffle(room_ids)
165
166 for room_id in room_ids:
167 room_version = await self.store.get_room_version(room_id)
168
169 # Try and acquire the processing lock for the room, if we get it start a
170 # background process for handling the events in the room.
171 lock = await self.store.try_acquire_lock(
172 _INBOUND_EVENT_HANDLING_LOCK_NAME, room_id
173 )
174 if lock:
175 logger.info("Handling old staged inbound events in %s", room_id)
176 self._process_incoming_pdus_in_room_inner(
177 room_id,
178 room_version,
179 lock,
180 )
181
182 # We pause a bit so that we don't start handling all rooms at once.
183 await self._clock.sleep(random.uniform(0, 0.1))
184
149185 async def on_backfill_request(
150186 self, origin: str, room_id: str, versions: List[str], limit: int
151187 ) -> Tuple[int, Dict[str, Any]]:
164200 async def on_incoming_transaction(
165201 self, origin: str, transaction_data: JsonDict
166202 ) -> Tuple[int, Dict[str, Any]]:
203 # If we receive a transaction we should make sure that kick off handling
204 # any old events in the staging area.
205 if not self._started_handling_of_staged_events:
206 self._started_handling_of_staged_events = True
207 self._handle_old_staged_events()
208
167209 # keep this as early as possible to make the calculated origin ts as
168210 # accurate as possible.
169211 request_time = self._clock.time_msec()
367409
368410 async def process_pdu(pdu: EventBase) -> JsonDict:
369411 event_id = pdu.event_id
370 with pdu_process_time.time():
371 with nested_logging_context(event_id):
372 try:
373 await self._handle_received_pdu(origin, pdu)
374 return {}
375 except FederationError as e:
376 logger.warning("Error handling PDU %s: %s", event_id, e)
377 return {"error": str(e)}
378 except Exception as e:
379 f = failure.Failure()
380 logger.error(
381 "Failed to handle PDU %s",
382 event_id,
383 exc_info=(f.type, f.value, f.getTracebackObject()), # type: ignore
384 )
385 return {"error": str(e)}
412 with nested_logging_context(event_id):
413 try:
414 await self._handle_received_pdu(origin, pdu)
415 return {}
416 except FederationError as e:
417 logger.warning("Error handling PDU %s: %s", event_id, e)
418 return {"error": str(e)}
419 except Exception as e:
420 f = failure.Failure()
421 logger.error(
422 "Failed to handle PDU %s",
423 event_id,
424 exc_info=(f.type, f.value, f.getTracebackObject()), # type: ignore
425 )
426 return {"error": str(e)}
386427
387428 await concurrently_execute(
388429 process_pdus_for_room, pdus_by_room.keys(), TRANSACTION_CONCURRENCY_LIMIT
419460 origin_host, _ = parse_server_name(origin)
420461 await self.check_server_matches_acl(origin_host, room_id)
421462
422 in_room = await self.auth.check_host_in_room(room_id, origin)
463 in_room = await self._event_auth_handler.check_host_in_room(room_id, origin)
423464 if not in_room:
424465 raise AuthError(403, "Host not in room.")
425466
452493 origin_host, _ = parse_server_name(origin)
453494 await self.check_server_matches_acl(origin_host, room_id)
454495
455 in_room = await self.auth.check_host_in_room(room_id, origin)
496 in_room = await self._event_auth_handler.check_host_in_room(room_id, origin)
456497 if not in_room:
457498 raise AuthError(403, "Host not in room.")
458499
543584 return {"event": ret_pdu.get_pdu_json(time_now)}
544585
545586 async def on_send_join_request(
546 self, origin: str, content: JsonDict
587 self, origin: str, content: JsonDict, room_id: str
547588 ) -> Dict[str, Any]:
548 logger.debug("on_send_join_request: content: %s", content)
549
550 assert_params_in_dict(content, ["room_id"])
551 room_version = await self.store.get_room_version(content["room_id"])
552 pdu = event_from_pdu_json(content, room_version)
553
554 origin_host, _ = parse_server_name(origin)
555 await self.check_server_matches_acl(origin_host, pdu.room_id)
556
557 logger.debug("on_send_join_request: pdu sigs: %s", pdu.signatures)
558
559 pdu = await self._check_sigs_and_hash(room_version, pdu)
560
561 res_pdus = await self.handler.on_send_join_request(origin, pdu)
589 context = await self._on_send_membership_event(
590 origin, content, Membership.JOIN, room_id
591 )
592
593 prev_state_ids = await context.get_prev_state_ids()
594 state_ids = list(prev_state_ids.values())
595 auth_chain = await self.store.get_auth_chain(room_id, state_ids)
596 state = await self.store.get_events(state_ids)
597
562598 time_now = self._clock.time_msec()
563599 return {
564 "state": [p.get_pdu_json(time_now) for p in res_pdus["state"]],
565 "auth_chain": [p.get_pdu_json(time_now) for p in res_pdus["auth_chain"]],
600 "state": [p.get_pdu_json(time_now) for p in state.values()],
601 "auth_chain": [p.get_pdu_json(time_now) for p in auth_chain],
566602 }
567603
568604 async def on_make_leave_request(
577613 time_now = self._clock.time_msec()
578614 return {"event": pdu.get_pdu_json(time_now), "room_version": room_version}
579615
580 async def on_send_leave_request(self, origin: str, content: JsonDict) -> dict:
616 async def on_send_leave_request(
617 self, origin: str, content: JsonDict, room_id: str
618 ) -> dict:
581619 logger.debug("on_send_leave_request: content: %s", content)
582
583 assert_params_in_dict(content, ["room_id"])
584 room_version = await self.store.get_room_version(content["room_id"])
585 pdu = event_from_pdu_json(content, room_version)
586
587 origin_host, _ = parse_server_name(origin)
588 await self.check_server_matches_acl(origin_host, pdu.room_id)
589
590 logger.debug("on_send_leave_request: pdu sigs: %s", pdu.signatures)
591
592 pdu = await self._check_sigs_and_hash(room_version, pdu)
593
594 await self.handler.on_send_leave_request(origin, pdu)
620 await self._on_send_membership_event(origin, content, Membership.LEAVE, room_id)
595621 return {}
596622
597623 async def on_make_knock_request(
657683 Returns:
658684 The stripped room state.
659685 """
660 logger.debug("on_send_knock_request: content: %s", content)
661
662 room_version = await self.store.get_room_version(room_id)
663
664 # Check that this room supports knocking as defined by its room version
665 if not room_version.msc2403_knocking:
666 raise SynapseError(
667 403,
668 "This room version does not support knocking",
669 errcode=Codes.FORBIDDEN,
670 )
671
672 pdu = event_from_pdu_json(content, room_version)
673
674 origin_host, _ = parse_server_name(origin)
675 await self.check_server_matches_acl(origin_host, pdu.room_id)
676
677 logger.debug("on_send_knock_request: pdu sigs: %s", pdu.signatures)
678
679 pdu = await self._check_sigs_and_hash(room_version, pdu)
680
681 # Handle the event, and retrieve the EventContext
682 event_context = await self.handler.on_send_knock_request(origin, pdu)
686 event_context = await self._on_send_membership_event(
687 origin, content, Membership.KNOCK, room_id
688 )
683689
684690 # Retrieve stripped state events from the room and send them back to the remote
685691 # server. This will allow the remote server's clients to display information
690696 )
691697 )
692698 return {"knock_state_events": stripped_room_state}
699
700 async def _on_send_membership_event(
701 self, origin: str, content: JsonDict, membership_type: str, room_id: str
702 ) -> EventContext:
703 """Handle an on_send_{join,leave,knock} request
704
705 Does some preliminary validation before passing the request on to the
706 federation handler.
707
708 Args:
709 origin: The (authenticated) requesting server
710 content: The body of the send_* request - a complete membership event
711 membership_type: The expected membership type (join or leave, depending
712 on the endpoint)
713 room_id: The room_id from the request, to be validated against the room_id
714 in the event
715
716 Returns:
717 The context of the event after inserting it into the room graph.
718
719 Raises:
720 SynapseError if there is a problem with the request, including things like
721 the room_id not matching or the event not being authorized.
722 """
723 assert_params_in_dict(content, ["room_id"])
724 if content["room_id"] != room_id:
725 raise SynapseError(
726 400,
727 "Room ID in body does not match that in request path",
728 Codes.BAD_JSON,
729 )
730
731 room_version = await self.store.get_room_version(room_id)
732
733 if membership_type == Membership.KNOCK and not room_version.msc2403_knocking:
734 raise SynapseError(
735 403,
736 "This room version does not support knocking",
737 errcode=Codes.FORBIDDEN,
738 )
739
740 event = event_from_pdu_json(content, room_version)
741
742 if event.type != EventTypes.Member or not event.is_state():
743 raise SynapseError(400, "Not an m.room.member event", Codes.BAD_JSON)
744
745 if event.content.get("membership") != membership_type:
746 raise SynapseError(400, "Not a %s event" % membership_type, Codes.BAD_JSON)
747
748 origin_host, _ = parse_server_name(origin)
749 await self.check_server_matches_acl(origin_host, event.room_id)
750
751 logger.debug("_on_send_membership_event: pdu sigs: %s", event.signatures)
752
753 event = await self._check_sigs_and_hash(room_version, event)
754
755 return await self.handler.on_send_membership_event(origin, event)
693756
694757 async def on_event_auth(
695758 self, origin: str, room_id: str, event_id: str
859922 room_id: str,
860923 room_version: RoomVersion,
861924 lock: Lock,
862 latest_origin: str,
863 latest_event: EventBase,
925 latest_origin: Optional[str] = None,
926 latest_event: Optional[EventBase] = None,
864927 ) -> None:
865928 """Process events in the staging area for the given room.
866929
867930 The latest_origin and latest_event args are the latest origin and event
868 received.
931 received (or None to simply pull the next event from the database).
869932 """
870933
871934 # The common path is for the event we just received be the only event in
872935 # the room, so instead of pulling the event out of the DB and parsing
873936 # the event we just pull out the next event ID and check if that matches.
874 next_origin, next_event_id = await self.store.get_next_staged_event_id_for_room(
875 room_id
876 )
877 if next_origin == latest_origin and next_event_id == latest_event.event_id:
937 if latest_event is not None and latest_origin is not None:
938 (
939 next_origin,
940 next_event_id,
941 ) = await self.store.get_next_staged_event_id_for_room(room_id)
942 if next_origin != latest_origin or next_event_id != latest_event.event_id:
943 latest_origin = None
944 latest_event = None
945
946 if latest_origin is None or latest_event is None:
947 next = await self.store.get_next_staged_event_for_room(
948 room_id, room_version
949 )
950 if not next:
951 await lock.release()
952 return
953
954 origin, event = next
955 else:
878956 origin = latest_origin
879957 event = latest_event
880 else:
881 next = await self.store.get_next_staged_event_for_room(
882 room_id, room_version
883 )
884 if not next:
885 return
886
887 origin, event = next
888958
889959 # We loop round until there are no more events in the room in the
890960 # staging area, or we fail to get the lock (which means another process
908978 exc_info=(f.type, f.value, f.getTracebackObject()), # type: ignore
909979 )
910980
911 await self.store.remove_received_event_from_staging(
981 received_ts = await self.store.remove_received_event_from_staging(
912982 origin, event.event_id
913983 )
984 if received_ts is not None:
985 pdu_process_time.observe(
986 (self._clock.time_msec() - received_ts) / 1000
987 )
914988
915989 # We need to do this check outside the lock to avoid a race between
916990 # a new event being inserted by another instance and it attempting
1414 import functools
1515 import logging
1616 import re
17 from typing import Container, Mapping, Optional, Sequence, Tuple, Type
17 from typing import (
18 Container,
19 Dict,
20 List,
21 Mapping,
22 Optional,
23 Sequence,
24 Tuple,
25 Type,
26 Union,
27 )
28
29 from typing_extensions import Literal
1830
1931 import synapse
2032 from synapse.api.constants import MAX_GROUP_CATEGORYID_LENGTH, MAX_GROUP_ROLEID_LENGTH
5567 class TransportLayerServer(JsonResource):
5668 """Handles incoming federation HTTP requests"""
5769
58 def __init__(self, hs, servlet_groups=None):
70 def __init__(self, hs: HomeServer, servlet_groups: Optional[List[str]] = None):
5971 """Initialize the TransportLayerServer
6072
6173 Will by default register all servlets. For custom behaviour, pass in
6274 a list of servlet_groups to register.
6375
6476 Args:
65 hs (synapse.server.HomeServer): homeserver
66 servlet_groups (list[str], optional): List of servlet groups to register.
77 hs: homeserver
78 servlet_groups: List of servlet groups to register.
6779 Defaults to ``DEFAULT_SERVLET_GROUPS``.
6880 """
6981 self.hs = hs
7789
7890 self.register_servlets()
7991
80 def register_servlets(self):
92 def register_servlets(self) -> None:
8193 register_servlets(
8294 self.hs,
8395 resource=self,
90102 class AuthenticationError(SynapseError):
91103 """There was a problem authenticating the request"""
92104
93 pass
94
95105
96106 class NoAuthenticationError(AuthenticationError):
97107 """The request had no authentication information"""
98
99 pass
100108
101109
102110 class Authenticator:
409417 RATELIMIT = False
410418
411419 # This is when someone is trying to send us a bunch of data.
412 async def on_PUT(self, origin, content, query, transaction_id):
420 async def on_PUT(
421 self,
422 origin: str,
423 content: JsonDict,
424 query: Dict[bytes, List[bytes]],
425 transaction_id: str,
426 ) -> Tuple[int, JsonDict]:
413427 """Called on PUT /send/<transaction_id>/
414428
415429 Args:
416 request (twisted.web.http.Request): The HTTP request.
417 transaction_id (str): The transaction_id associated with this
418 request. This is *not* None.
430 transaction_id: The transaction_id associated with this request. This
431 is *not* None.
419432
420433 Returns:
421434 Tuple of `(code, response)`, where
460473 PATH = "/event/(?P<event_id>[^/]*)/?"
461474
462475 # This is when someone asks for a data item for a given server data_id pair.
463 async def on_GET(self, origin, content, query, event_id):
476 async def on_GET(
477 self,
478 origin: str,
479 content: Literal[None],
480 query: Dict[bytes, List[bytes]],
481 event_id: str,
482 ) -> Tuple[int, Union[JsonDict, str]]:
464483 return await self.handler.on_pdu_request(origin, event_id)
465484
466485
468487 PATH = "/state/(?P<room_id>[^/]*)/?"
469488
470489 # This is when someone asks for all data for a given room.
471 async def on_GET(self, origin, content, query, room_id):
490 async def on_GET(
491 self,
492 origin: str,
493 content: Literal[None],
494 query: Dict[bytes, List[bytes]],
495 room_id: str,
496 ) -> Tuple[int, JsonDict]:
472497 return await self.handler.on_room_state_request(
473498 origin,
474499 room_id,
479504 class FederationStateIdsServlet(BaseFederationServerServlet):
480505 PATH = "/state_ids/(?P<room_id>[^/]*)/?"
481506
482 async def on_GET(self, origin, content, query, room_id):
507 async def on_GET(
508 self,
509 origin: str,
510 content: Literal[None],
511 query: Dict[bytes, List[bytes]],
512 room_id: str,
513 ) -> Tuple[int, JsonDict]:
483514 return await self.handler.on_state_ids_request(
484515 origin,
485516 room_id,
490521 class FederationBackfillServlet(BaseFederationServerServlet):
491522 PATH = "/backfill/(?P<room_id>[^/]*)/?"
492523
493 async def on_GET(self, origin, content, query, room_id):
524 async def on_GET(
525 self,
526 origin: str,
527 content: Literal[None],
528 query: Dict[bytes, List[bytes]],
529 room_id: str,
530 ) -> Tuple[int, JsonDict]:
494531 versions = [x.decode("ascii") for x in query[b"v"]]
495532 limit = parse_integer_from_args(query, "limit", None)
496533
504541 PATH = "/query/(?P<query_type>[^/]*)"
505542
506543 # This is when we receive a server-server Query
507 async def on_GET(self, origin, content, query, query_type):
544 async def on_GET(
545 self,
546 origin: str,
547 content: Literal[None],
548 query: Dict[bytes, List[bytes]],
549 query_type: str,
550 ) -> Tuple[int, JsonDict]:
508551 args = {k.decode("utf8"): v[0].decode("utf-8") for k, v in query.items()}
509552 args["origin"] = origin
510553 return await self.handler.on_query_request(query_type, args)
513556 class FederationMakeJoinServlet(BaseFederationServerServlet):
514557 PATH = "/make_join/(?P<room_id>[^/]*)/(?P<user_id>[^/]*)"
515558
516 async def on_GET(self, origin, _content, query, room_id, user_id):
559 async def on_GET(
560 self,
561 origin: str,
562 content: Literal[None],
563 query: Dict[bytes, List[bytes]],
564 room_id: str,
565 user_id: str,
566 ) -> Tuple[int, JsonDict]:
517567 """
518568 Args:
519 origin (unicode): The authenticated server_name of the calling server
520
521 _content (None): (GETs don't have bodies)
522
523 query (dict[bytes, list[bytes]]): Query params from the request.
524
525 **kwargs (dict[unicode, unicode]): the dict mapping keys to path
526 components as specified in the path match regexp.
569 origin: The authenticated server_name of the calling server
570
571 content: (GETs don't have bodies)
572
573 query: Query params from the request.
574
575 **kwargs: the dict mapping keys to path components as specified in
576 the path match regexp.
527577
528578 Returns:
529 Tuple[int, object]: (response code, response object)
579 Tuple of (response code, response object)
530580 """
531 versions = query.get(b"ver")
532 if versions is not None:
533 supported_versions = [v.decode("utf-8") for v in versions]
534 else:
581 supported_versions = parse_strings_from_args(query, "ver", encoding="utf-8")
582 if supported_versions is None:
535583 supported_versions = ["1"]
536584
537 content = await self.handler.on_make_join_request(
585 result = await self.handler.on_make_join_request(
538586 origin, room_id, user_id, supported_versions=supported_versions
539587 )
540 return 200, content
588 return 200, result
541589
542590
543591 class FederationMakeLeaveServlet(BaseFederationServerServlet):
544592 PATH = "/make_leave/(?P<room_id>[^/]*)/(?P<user_id>[^/]*)"
545593
546 async def on_GET(self, origin, content, query, room_id, user_id):
547 content = await self.handler.on_make_leave_request(origin, room_id, user_id)
548 return 200, content
594 async def on_GET(
595 self,
596 origin: str,
597 content: Literal[None],
598 query: Dict[bytes, List[bytes]],
599 room_id: str,
600 user_id: str,
601 ) -> Tuple[int, JsonDict]:
602 result = await self.handler.on_make_leave_request(origin, room_id, user_id)
603 return 200, result
549604
550605
551606 class FederationV1SendLeaveServlet(BaseFederationServerServlet):
552607 PATH = "/send_leave/(?P<room_id>[^/]*)/(?P<event_id>[^/]*)"
553608
554 async def on_PUT(self, origin, content, query, room_id, event_id):
555 content = await self.handler.on_send_leave_request(origin, content)
556 return 200, (200, content)
609 async def on_PUT(
610 self,
611 origin: str,
612 content: JsonDict,
613 query: Dict[bytes, List[bytes]],
614 room_id: str,
615 event_id: str,
616 ) -> Tuple[int, Tuple[int, JsonDict]]:
617 result = await self.handler.on_send_leave_request(origin, content, room_id)
618 return 200, (200, result)
557619
558620
559621 class FederationV2SendLeaveServlet(BaseFederationServerServlet):
561623
562624 PREFIX = FEDERATION_V2_PREFIX
563625
564 async def on_PUT(self, origin, content, query, room_id, event_id):
565 content = await self.handler.on_send_leave_request(origin, content)
566 return 200, content
626 async def on_PUT(
627 self,
628 origin: str,
629 content: JsonDict,
630 query: Dict[bytes, List[bytes]],
631 room_id: str,
632 event_id: str,
633 ) -> Tuple[int, JsonDict]:
634 result = await self.handler.on_send_leave_request(origin, content, room_id)
635 return 200, result
567636
568637
569638 class FederationMakeKnockServlet(BaseFederationServerServlet):
570639 PATH = "/make_knock/(?P<room_id>[^/]*)/(?P<user_id>[^/]*)"
571640
572 async def on_GET(self, origin, content, query, room_id, user_id):
573 try:
574 # Retrieve the room versions the remote homeserver claims to support
575 supported_versions = parse_strings_from_args(query, "ver", encoding="utf-8")
576 except KeyError:
577 raise SynapseError(400, "Missing required query parameter 'ver'")
578
579 content = await self.handler.on_make_knock_request(
641 async def on_GET(
642 self,
643 origin: str,
644 content: Literal[None],
645 query: Dict[bytes, List[bytes]],
646 room_id: str,
647 user_id: str,
648 ) -> Tuple[int, JsonDict]:
649 # Retrieve the room versions the remote homeserver claims to support
650 supported_versions = parse_strings_from_args(
651 query, "ver", required=True, encoding="utf-8"
652 )
653
654 result = await self.handler.on_make_knock_request(
580655 origin, room_id, user_id, supported_versions=supported_versions
581656 )
582 return 200, content
657 return 200, result
583658
584659
585660 class FederationV1SendKnockServlet(BaseFederationServerServlet):
586661 PATH = "/send_knock/(?P<room_id>[^/]*)/(?P<event_id>[^/]*)"
587662
588 async def on_PUT(self, origin, content, query, room_id, event_id):
589 content = await self.handler.on_send_knock_request(origin, content, room_id)
590 return 200, content
663 async def on_PUT(
664 self,
665 origin: str,
666 content: JsonDict,
667 query: Dict[bytes, List[bytes]],
668 room_id: str,
669 event_id: str,
670 ) -> Tuple[int, JsonDict]:
671 result = await self.handler.on_send_knock_request(origin, content, room_id)
672 return 200, result
591673
592674
593675 class FederationEventAuthServlet(BaseFederationServerServlet):
594676 PATH = "/event_auth/(?P<room_id>[^/]*)/(?P<event_id>[^/]*)"
595677
596 async def on_GET(self, origin, content, query, room_id, event_id):
678 async def on_GET(
679 self,
680 origin: str,
681 content: Literal[None],
682 query: Dict[bytes, List[bytes]],
683 room_id: str,
684 event_id: str,
685 ) -> Tuple[int, JsonDict]:
597686 return await self.handler.on_event_auth(origin, room_id, event_id)
598687
599688
600689 class FederationV1SendJoinServlet(BaseFederationServerServlet):
601690 PATH = "/send_join/(?P<room_id>[^/]*)/(?P<event_id>[^/]*)"
602691
603 async def on_PUT(self, origin, content, query, room_id, event_id):
604 # TODO(paul): assert that room_id/event_id parsed from path actually
692 async def on_PUT(
693 self,
694 origin: str,
695 content: JsonDict,
696 query: Dict[bytes, List[bytes]],
697 room_id: str,
698 event_id: str,
699 ) -> Tuple[int, Tuple[int, JsonDict]]:
700 # TODO(paul): assert that event_id parsed from path actually
605701 # match those given in content
606 content = await self.handler.on_send_join_request(origin, content)
607 return 200, (200, content)
702 result = await self.handler.on_send_join_request(origin, content, room_id)
703 return 200, (200, result)
608704
609705
610706 class FederationV2SendJoinServlet(BaseFederationServerServlet):
612708
613709 PREFIX = FEDERATION_V2_PREFIX
614710
615 async def on_PUT(self, origin, content, query, room_id, event_id):
616 # TODO(paul): assert that room_id/event_id parsed from path actually
711 async def on_PUT(
712 self,
713 origin: str,
714 content: JsonDict,
715 query: Dict[bytes, List[bytes]],
716 room_id: str,
717 event_id: str,
718 ) -> Tuple[int, JsonDict]:
719 # TODO(paul): assert that event_id parsed from path actually
617720 # match those given in content
618 content = await self.handler.on_send_join_request(origin, content)
619 return 200, content
721 result = await self.handler.on_send_join_request(origin, content, room_id)
722 return 200, result
620723
621724
622725 class FederationV1InviteServlet(BaseFederationServerServlet):
623726 PATH = "/invite/(?P<room_id>[^/]*)/(?P<event_id>[^/]*)"
624727
625 async def on_PUT(self, origin, content, query, room_id, event_id):
728 async def on_PUT(
729 self,
730 origin: str,
731 content: JsonDict,
732 query: Dict[bytes, List[bytes]],
733 room_id: str,
734 event_id: str,
735 ) -> Tuple[int, Tuple[int, JsonDict]]:
626736 # We don't get a room version, so we have to assume its EITHER v1 or
627737 # v2. This is "fine" as the only difference between V1 and V2 is the
628738 # state resolution algorithm, and we don't use that for processing
629739 # invites
630 content = await self.handler.on_invite_request(
740 result = await self.handler.on_invite_request(
631741 origin, content, room_version_id=RoomVersions.V1.identifier
632742 )
633743
634744 # V1 federation API is defined to return a content of `[200, {...}]`
635745 # due to a historical bug.
636 return 200, (200, content)
746 return 200, (200, result)
637747
638748
639749 class FederationV2InviteServlet(BaseFederationServerServlet):
641751
642752 PREFIX = FEDERATION_V2_PREFIX
643753
644 async def on_PUT(self, origin, content, query, room_id, event_id):
754 async def on_PUT(
755 self,
756 origin: str,
757 content: JsonDict,
758 query: Dict[bytes, List[bytes]],
759 room_id: str,
760 event_id: str,
761 ) -> Tuple[int, JsonDict]:
645762 # TODO(paul): assert that room_id/event_id parsed from path actually
646763 # match those given in content
647764
654771
655772 event.setdefault("unsigned", {})["invite_room_state"] = invite_room_state
656773
657 content = await self.handler.on_invite_request(
774 result = await self.handler.on_invite_request(
658775 origin, event, room_version_id=room_version
659776 )
660 return 200, content
777 return 200, result
661778
662779
663780 class FederationThirdPartyInviteExchangeServlet(BaseFederationServerServlet):
664781 PATH = "/exchange_third_party_invite/(?P<room_id>[^/]*)"
665782
666 async def on_PUT(self, origin, content, query, room_id):
783 async def on_PUT(
784 self,
785 origin: str,
786 content: JsonDict,
787 query: Dict[bytes, List[bytes]],
788 room_id: str,
789 ) -> Tuple[int, JsonDict]:
667790 await self.handler.on_exchange_third_party_invite_request(content)
668791 return 200, {}
669792
671794 class FederationClientKeysQueryServlet(BaseFederationServerServlet):
672795 PATH = "/user/keys/query"
673796
674 async def on_POST(self, origin, content, query):
797 async def on_POST(
798 self, origin: str, content: JsonDict, query: Dict[bytes, List[bytes]]
799 ) -> Tuple[int, JsonDict]:
675800 return await self.handler.on_query_client_keys(origin, content)
676801
677802
678803 class FederationUserDevicesQueryServlet(BaseFederationServerServlet):
679804 PATH = "/user/devices/(?P<user_id>[^/]*)"
680805
681 async def on_GET(self, origin, content, query, user_id):
806 async def on_GET(
807 self,
808 origin: str,
809 content: Literal[None],
810 query: Dict[bytes, List[bytes]],
811 user_id: str,
812 ) -> Tuple[int, JsonDict]:
682813 return await self.handler.on_query_user_devices(origin, user_id)
683814
684815
685816 class FederationClientKeysClaimServlet(BaseFederationServerServlet):
686817 PATH = "/user/keys/claim"
687818
688 async def on_POST(self, origin, content, query):
819 async def on_POST(
820 self, origin: str, content: JsonDict, query: Dict[bytes, List[bytes]]
821 ) -> Tuple[int, JsonDict]:
689822 response = await self.handler.on_claim_client_keys(origin, content)
690823 return 200, response
691824
694827 # TODO(paul): Why does this path alone end with "/?" optional?
695828 PATH = "/get_missing_events/(?P<room_id>[^/]*)/?"
696829
697 async def on_POST(self, origin, content, query, room_id):
830 async def on_POST(
831 self,
832 origin: str,
833 content: JsonDict,
834 query: Dict[bytes, List[bytes]],
835 room_id: str,
836 ) -> Tuple[int, JsonDict]:
698837 limit = int(content.get("limit", 10))
699838 earliest_events = content.get("earliest_events", [])
700839 latest_events = content.get("latest_events", [])
701840
702 content = await self.handler.on_get_missing_events(
841 result = await self.handler.on_get_missing_events(
703842 origin,
704843 room_id=room_id,
705844 earliest_events=earliest_events,
707846 limit=limit,
708847 )
709848
710 return 200, content
849 return 200, result
711850
712851
713852 class On3pidBindServlet(BaseFederationServerServlet):
715854
716855 REQUIRE_AUTH = False
717856
718 async def on_POST(self, origin, content, query):
857 async def on_POST(
858 self, origin: Optional[str], content: JsonDict, query: Dict[bytes, List[bytes]]
859 ) -> Tuple[int, JsonDict]:
719860 if "invites" in content:
720861 last_exception = None
721862 for invite in content["invites"]:
761902
762903 REQUIRE_AUTH = False
763904
764 async def on_GET(self, origin, content, query):
765 token = query.get(b"access_token", [None])[0]
905 async def on_GET(
906 self,
907 origin: Optional[str],
908 content: Literal[None],
909 query: Dict[bytes, List[bytes]],
910 ) -> Tuple[int, JsonDict]:
911 token = parse_string_from_args(query, "access_token")
766912 if token is None:
767913 return (
768914 401,
769915 {"errcode": "M_MISSING_TOKEN", "error": "Access Token required"},
770916 )
771917
772 user_id = await self.handler.on_openid_userinfo(token.decode("ascii"))
918 user_id = await self.handler.on_openid_userinfo(token)
773919
774920 if user_id is None:
775921 return (
828974 self.handler = hs.get_room_list_handler()
829975 self.allow_access = allow_access
830976
831 async def on_GET(self, origin, content, query):
977 async def on_GET(
978 self, origin: str, content: Literal[None], query: Dict[bytes, List[bytes]]
979 ) -> Tuple[int, JsonDict]:
832980 if not self.allow_access:
833981 raise FederationDeniedError(origin)
834982
8571005 )
8581006 return 200, data
8591007
860 async def on_POST(self, origin, content, query):
1008 async def on_POST(
1009 self, origin: str, content: JsonDict, query: Dict[bytes, List[bytes]]
1010 ) -> Tuple[int, JsonDict]:
8611011 # This implements MSC2197 (Search Filtering over Federation)
8621012 if not self.allow_access:
8631013 raise FederationDeniedError(origin)
9031053
9041054 REQUIRE_AUTH = False
9051055
906 async def on_GET(self, origin, content, query):
1056 async def on_GET(
1057 self,
1058 origin: Optional[str],
1059 content: Literal[None],
1060 query: Dict[bytes, List[bytes]],
1061 ) -> Tuple[int, JsonDict]:
9071062 return (
9081063 200,
9091064 {"server": {"name": "Synapse", "version": get_version_string(synapse)}},
9321087
9331088 PATH = "/groups/(?P<group_id>[^/]*)/profile"
9341089
935 async def on_GET(self, origin, content, query, group_id):
1090 async def on_GET(
1091 self,
1092 origin: str,
1093 content: Literal[None],
1094 query: Dict[bytes, List[bytes]],
1095 group_id: str,
1096 ) -> Tuple[int, JsonDict]:
9361097 requester_user_id = parse_string_from_args(query, "requester_user_id")
9371098 if get_domain_from_id(requester_user_id) != origin:
9381099 raise SynapseError(403, "requester_user_id doesn't match origin")
9411102
9421103 return 200, new_content
9431104
944 async def on_POST(self, origin, content, query, group_id):
1105 async def on_POST(
1106 self,
1107 origin: str,
1108 content: JsonDict,
1109 query: Dict[bytes, List[bytes]],
1110 group_id: str,
1111 ) -> Tuple[int, JsonDict]:
9451112 requester_user_id = parse_string_from_args(query, "requester_user_id")
9461113 if get_domain_from_id(requester_user_id) != origin:
9471114 raise SynapseError(403, "requester_user_id doesn't match origin")
9561123 class FederationGroupsSummaryServlet(BaseGroupsServerServlet):
9571124 PATH = "/groups/(?P<group_id>[^/]*)/summary"
9581125
959 async def on_GET(self, origin, content, query, group_id):
1126 async def on_GET(
1127 self,
1128 origin: str,
1129 content: Literal[None],
1130 query: Dict[bytes, List[bytes]],
1131 group_id: str,
1132 ) -> Tuple[int, JsonDict]:
9601133 requester_user_id = parse_string_from_args(query, "requester_user_id")
9611134 if get_domain_from_id(requester_user_id) != origin:
9621135 raise SynapseError(403, "requester_user_id doesn't match origin")
9711144
9721145 PATH = "/groups/(?P<group_id>[^/]*)/rooms"
9731146
974 async def on_GET(self, origin, content, query, group_id):
1147 async def on_GET(
1148 self,
1149 origin: str,
1150 content: Literal[None],
1151 query: Dict[bytes, List[bytes]],
1152 group_id: str,
1153 ) -> Tuple[int, JsonDict]:
9751154 requester_user_id = parse_string_from_args(query, "requester_user_id")
9761155 if get_domain_from_id(requester_user_id) != origin:
9771156 raise SynapseError(403, "requester_user_id doesn't match origin")
9861165
9871166 PATH = "/groups/(?P<group_id>[^/]*)/room/(?P<room_id>[^/]*)"
9881167
989 async def on_POST(self, origin, content, query, group_id, room_id):
1168 async def on_POST(
1169 self,
1170 origin: str,
1171 content: JsonDict,
1172 query: Dict[bytes, List[bytes]],
1173 group_id: str,
1174 room_id: str,
1175 ) -> Tuple[int, JsonDict]:
9901176 requester_user_id = parse_string_from_args(query, "requester_user_id")
9911177 if get_domain_from_id(requester_user_id) != origin:
9921178 raise SynapseError(403, "requester_user_id doesn't match origin")
9971183
9981184 return 200, new_content
9991185
1000 async def on_DELETE(self, origin, content, query, group_id, room_id):
1186 async def on_DELETE(
1187 self,
1188 origin: str,
1189 content: Literal[None],
1190 query: Dict[bytes, List[bytes]],
1191 group_id: str,
1192 room_id: str,
1193 ) -> Tuple[int, JsonDict]:
10011194 requester_user_id = parse_string_from_args(query, "requester_user_id")
10021195 if get_domain_from_id(requester_user_id) != origin:
10031196 raise SynapseError(403, "requester_user_id doesn't match origin")
10171210 "/config/(?P<config_key>[^/]*)"
10181211 )
10191212
1020 async def on_POST(self, origin, content, query, group_id, room_id, config_key):
1213 async def on_POST(
1214 self,
1215 origin: str,
1216 content: JsonDict,
1217 query: Dict[bytes, List[bytes]],
1218 group_id: str,
1219 room_id: str,
1220 config_key: str,
1221 ) -> Tuple[int, JsonDict]:
10211222 requester_user_id = parse_string_from_args(query, "requester_user_id")
10221223 if get_domain_from_id(requester_user_id) != origin:
10231224 raise SynapseError(403, "requester_user_id doesn't match origin")
10341235
10351236 PATH = "/groups/(?P<group_id>[^/]*)/users"
10361237
1037 async def on_GET(self, origin, content, query, group_id):
1238 async def on_GET(
1239 self,
1240 origin: str,
1241 content: Literal[None],
1242 query: Dict[bytes, List[bytes]],
1243 group_id: str,
1244 ) -> Tuple[int, JsonDict]:
10381245 requester_user_id = parse_string_from_args(query, "requester_user_id")
10391246 if get_domain_from_id(requester_user_id) != origin:
10401247 raise SynapseError(403, "requester_user_id doesn't match origin")
10491256
10501257 PATH = "/groups/(?P<group_id>[^/]*)/invited_users"
10511258
1052 async def on_GET(self, origin, content, query, group_id):
1259 async def on_GET(
1260 self,
1261 origin: str,
1262 content: Literal[None],
1263 query: Dict[bytes, List[bytes]],
1264 group_id: str,
1265 ) -> Tuple[int, JsonDict]:
10531266 requester_user_id = parse_string_from_args(query, "requester_user_id")
10541267 if get_domain_from_id(requester_user_id) != origin:
10551268 raise SynapseError(403, "requester_user_id doesn't match origin")
10661279
10671280 PATH = "/groups/(?P<group_id>[^/]*)/users/(?P<user_id>[^/]*)/invite"
10681281
1069 async def on_POST(self, origin, content, query, group_id, user_id):
1282 async def on_POST(
1283 self,
1284 origin: str,
1285 content: JsonDict,
1286 query: Dict[bytes, List[bytes]],
1287 group_id: str,
1288 user_id: str,
1289 ) -> Tuple[int, JsonDict]:
10701290 requester_user_id = parse_string_from_args(query, "requester_user_id")
10711291 if get_domain_from_id(requester_user_id) != origin:
10721292 raise SynapseError(403, "requester_user_id doesn't match origin")
10831303
10841304 PATH = "/groups/(?P<group_id>[^/]*)/users/(?P<user_id>[^/]*)/accept_invite"
10851305
1086 async def on_POST(self, origin, content, query, group_id, user_id):
1306 async def on_POST(
1307 self,
1308 origin: str,
1309 content: JsonDict,
1310 query: Dict[bytes, List[bytes]],
1311 group_id: str,
1312 user_id: str,
1313 ) -> Tuple[int, JsonDict]:
10871314 if get_domain_from_id(user_id) != origin:
10881315 raise SynapseError(403, "user_id doesn't match origin")
10891316
10971324
10981325 PATH = "/groups/(?P<group_id>[^/]*)/users/(?P<user_id>[^/]*)/join"
10991326
1100 async def on_POST(self, origin, content, query, group_id, user_id):
1327 async def on_POST(
1328 self,
1329 origin: str,
1330 content: JsonDict,
1331 query: Dict[bytes, List[bytes]],
1332 group_id: str,
1333 user_id: str,
1334 ) -> Tuple[int, JsonDict]:
11011335 if get_domain_from_id(user_id) != origin:
11021336 raise SynapseError(403, "user_id doesn't match origin")
11031337
11111345
11121346 PATH = "/groups/(?P<group_id>[^/]*)/users/(?P<user_id>[^/]*)/remove"
11131347
1114 async def on_POST(self, origin, content, query, group_id, user_id):
1348 async def on_POST(
1349 self,
1350 origin: str,
1351 content: JsonDict,
1352 query: Dict[bytes, List[bytes]],
1353 group_id: str,
1354 user_id: str,
1355 ) -> Tuple[int, JsonDict]:
11151356 requester_user_id = parse_string_from_args(query, "requester_user_id")
11161357 if get_domain_from_id(requester_user_id) != origin:
11171358 raise SynapseError(403, "requester_user_id doesn't match origin")
11451386
11461387 PATH = "/groups/local/(?P<group_id>[^/]*)/users/(?P<user_id>[^/]*)/invite"
11471388
1148 async def on_POST(self, origin, content, query, group_id, user_id):
1389 async def on_POST(
1390 self,
1391 origin: str,
1392 content: JsonDict,
1393 query: Dict[bytes, List[bytes]],
1394 group_id: str,
1395 user_id: str,
1396 ) -> Tuple[int, JsonDict]:
11491397 if get_domain_from_id(group_id) != origin:
11501398 raise SynapseError(403, "group_id doesn't match origin")
11511399
11631411
11641412 PATH = "/groups/local/(?P<group_id>[^/]*)/users/(?P<user_id>[^/]*)/remove"
11651413
1166 async def on_POST(self, origin, content, query, group_id, user_id):
1414 async def on_POST(
1415 self,
1416 origin: str,
1417 content: JsonDict,
1418 query: Dict[bytes, List[bytes]],
1419 group_id: str,
1420 user_id: str,
1421 ) -> Tuple[int, None]:
11671422 if get_domain_from_id(group_id) != origin:
11681423 raise SynapseError(403, "user_id doesn't match origin")
11691424
11711426 self.handler, GroupsLocalHandler
11721427 ), "Workers cannot handle group removals."
11731428
1174 new_content = await self.handler.user_removed_from_group(
1175 group_id, user_id, content
1176 )
1177
1178 return 200, new_content
1429 await self.handler.user_removed_from_group(group_id, user_id, content)
1430
1431 return 200, None
11791432
11801433
11811434 class FederationGroupsRenewAttestaionServlet(BaseFederationServlet):
11931446 super().__init__(hs, authenticator, ratelimiter, server_name)
11941447 self.handler = hs.get_groups_attestation_renewer()
11951448
1196 async def on_POST(self, origin, content, query, group_id, user_id):
1449 async def on_POST(
1450 self,
1451 origin: str,
1452 content: JsonDict,
1453 query: Dict[bytes, List[bytes]],
1454 group_id: str,
1455 user_id: str,
1456 ) -> Tuple[int, JsonDict]:
11971457 # We don't need to check auth here as we check the attestation signatures
11981458
11991459 new_content = await self.handler.on_renew_attestation(
12171477 "/rooms/(?P<room_id>[^/]*)"
12181478 )
12191479
1220 async def on_POST(self, origin, content, query, group_id, category_id, room_id):
1480 async def on_POST(
1481 self,
1482 origin: str,
1483 content: JsonDict,
1484 query: Dict[bytes, List[bytes]],
1485 group_id: str,
1486 category_id: str,
1487 room_id: str,
1488 ) -> Tuple[int, JsonDict]:
12211489 requester_user_id = parse_string_from_args(query, "requester_user_id")
12221490 if get_domain_from_id(requester_user_id) != origin:
12231491 raise SynapseError(403, "requester_user_id doesn't match origin")
12451513
12461514 return 200, resp
12471515
1248 async def on_DELETE(self, origin, content, query, group_id, category_id, room_id):
1516 async def on_DELETE(
1517 self,
1518 origin: str,
1519 content: Literal[None],
1520 query: Dict[bytes, List[bytes]],
1521 group_id: str,
1522 category_id: str,
1523 room_id: str,
1524 ) -> Tuple[int, JsonDict]:
12491525 requester_user_id = parse_string_from_args(query, "requester_user_id")
12501526 if get_domain_from_id(requester_user_id) != origin:
12511527 raise SynapseError(403, "requester_user_id doesn't match origin")
12651541
12661542 PATH = "/groups/(?P<group_id>[^/]*)/categories/?"
12671543
1268 async def on_GET(self, origin, content, query, group_id):
1544 async def on_GET(
1545 self,
1546 origin: str,
1547 content: Literal[None],
1548 query: Dict[bytes, List[bytes]],
1549 group_id: str,
1550 ) -> Tuple[int, JsonDict]:
12691551 requester_user_id = parse_string_from_args(query, "requester_user_id")
12701552 if get_domain_from_id(requester_user_id) != origin:
12711553 raise SynapseError(403, "requester_user_id doesn't match origin")
12801562
12811563 PATH = "/groups/(?P<group_id>[^/]*)/categories/(?P<category_id>[^/]+)"
12821564
1283 async def on_GET(self, origin, content, query, group_id, category_id):
1565 async def on_GET(
1566 self,
1567 origin: str,
1568 content: Literal[None],
1569 query: Dict[bytes, List[bytes]],
1570 group_id: str,
1571 category_id: str,
1572 ) -> Tuple[int, JsonDict]:
12841573 requester_user_id = parse_string_from_args(query, "requester_user_id")
12851574 if get_domain_from_id(requester_user_id) != origin:
12861575 raise SynapseError(403, "requester_user_id doesn't match origin")
12911580
12921581 return 200, resp
12931582
1294 async def on_POST(self, origin, content, query, group_id, category_id):
1583 async def on_POST(
1584 self,
1585 origin: str,
1586 content: JsonDict,
1587 query: Dict[bytes, List[bytes]],
1588 group_id: str,
1589 category_id: str,
1590 ) -> Tuple[int, JsonDict]:
12951591 requester_user_id = parse_string_from_args(query, "requester_user_id")
12961592 if get_domain_from_id(requester_user_id) != origin:
12971593 raise SynapseError(403, "requester_user_id doesn't match origin")
13131609
13141610 return 200, resp
13151611
1316 async def on_DELETE(self, origin, content, query, group_id, category_id):
1612 async def on_DELETE(
1613 self,
1614 origin: str,
1615 content: Literal[None],
1616 query: Dict[bytes, List[bytes]],
1617 group_id: str,
1618 category_id: str,
1619 ) -> Tuple[int, JsonDict]:
13171620 requester_user_id = parse_string_from_args(query, "requester_user_id")
13181621 if get_domain_from_id(requester_user_id) != origin:
13191622 raise SynapseError(403, "requester_user_id doesn't match origin")
13331636
13341637 PATH = "/groups/(?P<group_id>[^/]*)/roles/?"
13351638
1336 async def on_GET(self, origin, content, query, group_id):
1639 async def on_GET(
1640 self,
1641 origin: str,
1642 content: Literal[None],
1643 query: Dict[bytes, List[bytes]],
1644 group_id: str,
1645 ) -> Tuple[int, JsonDict]:
13371646 requester_user_id = parse_string_from_args(query, "requester_user_id")
13381647 if get_domain_from_id(requester_user_id) != origin:
13391648 raise SynapseError(403, "requester_user_id doesn't match origin")
13481657
13491658 PATH = "/groups/(?P<group_id>[^/]*)/roles/(?P<role_id>[^/]+)"
13501659
1351 async def on_GET(self, origin, content, query, group_id, role_id):
1660 async def on_GET(
1661 self,
1662 origin: str,
1663 content: Literal[None],
1664 query: Dict[bytes, List[bytes]],
1665 group_id: str,
1666 role_id: str,
1667 ) -> Tuple[int, JsonDict]:
13521668 requester_user_id = parse_string_from_args(query, "requester_user_id")
13531669 if get_domain_from_id(requester_user_id) != origin:
13541670 raise SynapseError(403, "requester_user_id doesn't match origin")
13571673
13581674 return 200, resp
13591675
1360 async def on_POST(self, origin, content, query, group_id, role_id):
1676 async def on_POST(
1677 self,
1678 origin: str,
1679 content: JsonDict,
1680 query: Dict[bytes, List[bytes]],
1681 group_id: str,
1682 role_id: str,
1683 ) -> Tuple[int, JsonDict]:
13611684 requester_user_id = parse_string_from_args(query, "requester_user_id")
13621685 if get_domain_from_id(requester_user_id) != origin:
13631686 raise SynapseError(403, "requester_user_id doesn't match origin")
13811704
13821705 return 200, resp
13831706
1384 async def on_DELETE(self, origin, content, query, group_id, role_id):
1707 async def on_DELETE(
1708 self,
1709 origin: str,
1710 content: Literal[None],
1711 query: Dict[bytes, List[bytes]],
1712 group_id: str,
1713 role_id: str,
1714 ) -> Tuple[int, JsonDict]:
13851715 requester_user_id = parse_string_from_args(query, "requester_user_id")
13861716 if get_domain_from_id(requester_user_id) != origin:
13871717 raise SynapseError(403, "requester_user_id doesn't match origin")
14101740 "/users/(?P<user_id>[^/]*)"
14111741 )
14121742
1413 async def on_POST(self, origin, content, query, group_id, role_id, user_id):
1743 async def on_POST(
1744 self,
1745 origin: str,
1746 content: JsonDict,
1747 query: Dict[bytes, List[bytes]],
1748 group_id: str,
1749 role_id: str,
1750 user_id: str,
1751 ) -> Tuple[int, JsonDict]:
14141752 requester_user_id = parse_string_from_args(query, "requester_user_id")
14151753 if get_domain_from_id(requester_user_id) != origin:
14161754 raise SynapseError(403, "requester_user_id doesn't match origin")
14361774
14371775 return 200, resp
14381776
1439 async def on_DELETE(self, origin, content, query, group_id, role_id, user_id):
1777 async def on_DELETE(
1778 self,
1779 origin: str,
1780 content: Literal[None],
1781 query: Dict[bytes, List[bytes]],
1782 group_id: str,
1783 role_id: str,
1784 user_id: str,
1785 ) -> Tuple[int, JsonDict]:
14401786 requester_user_id = parse_string_from_args(query, "requester_user_id")
14411787 if get_domain_from_id(requester_user_id) != origin:
14421788 raise SynapseError(403, "requester_user_id doesn't match origin")
14561802
14571803 PATH = "/get_groups_publicised"
14581804
1459 async def on_POST(self, origin, content, query):
1805 async def on_POST(
1806 self, origin: str, content: JsonDict, query: Dict[bytes, List[bytes]]
1807 ) -> Tuple[int, JsonDict]:
14601808 resp = await self.handler.bulk_get_publicised_groups(
14611809 content["user_ids"], proxy=False
14621810 )
14691817
14701818 PATH = "/groups/(?P<group_id>[^/]*)/settings/m.join_policy"
14711819
1472 async def on_PUT(self, origin, content, query, group_id):
1820 async def on_PUT(
1821 self,
1822 origin: str,
1823 content: JsonDict,
1824 query: Dict[bytes, List[bytes]],
1825 group_id: str,
1826 ) -> Tuple[int, JsonDict]:
14731827 requester_user_id = parse_string_from_args(query, "requester_user_id")
14741828 if get_domain_from_id(requester_user_id) != origin:
14751829 raise SynapseError(403, "requester_user_id doesn't match origin")
14981852 async def on_GET(
14991853 self,
15001854 origin: str,
1501 content: JsonDict,
1855 content: Literal[None],
15021856 query: Mapping[bytes, Sequence[bytes]],
15031857 room_id: str,
15041858 ) -> Tuple[int, JsonDict]:
15701924 super().__init__(hs, authenticator, ratelimiter, server_name)
15711925 self._store = self.hs.get_datastore()
15721926
1573 async def on_GET(self, origin, content, query, room_id):
1927 async def on_GET(
1928 self,
1929 origin: str,
1930 content: Literal[None],
1931 query: Dict[bytes, List[bytes]],
1932 room_id: str,
1933 ) -> Tuple[int, JsonDict]:
15741934 is_public = await self._store.is_room_world_readable_or_publicly_joinable(
15751935 room_id
15761936 )
6161 if ret:
6262 profile = await self.store.get_profileinfo(user.localpart)
6363 threepids = await self.store.user_get_threepids(user.to_string())
64 external_ids = [
65 ({"auth_provider": auth_provider, "external_id": external_id})
66 for auth_provider, external_id in await self.store.get_external_ids_by_user(
67 user.to_string()
68 )
69 ]
6470 ret["displayname"] = profile.display_name
6571 ret["avatar_url"] = profile.avatar_url
6672 ret["threepids"] = threepids
73 ret["external_ids"] = external_ids
6774 return ret
6875
6976 async def export_user_data(self, user_id: str, writer: "ExfiltrationWriter") -> Any:
2929 Optional,
3030 Tuple,
3131 Union,
32 cast,
3233 )
3334
3435 import attr
7172 from synapse.util.threepids import canonicalise_email
7273
7374 if TYPE_CHECKING:
75 from synapse.rest.client.v1.login import LoginResponse
7476 from synapse.server import HomeServer
7577
7678 logger = logging.getLogger(__name__)
776778 "params": params,
777779 }
778780
781 async def refresh_token(
782 self,
783 refresh_token: str,
784 valid_until_ms: Optional[int],
785 ) -> Tuple[str, str]:
786 """
787 Consumes a refresh token and generate both a new access token and a new refresh token from it.
788
789 The consumed refresh token is considered invalid after the first use of the new access token or the new refresh token.
790
791 Args:
792 refresh_token: The token to consume.
793 valid_until_ms: The expiration timestamp of the new access token.
794
795 Returns:
796 A tuple containing the new access token and refresh token
797 """
798
799 # Verify the token signature first before looking up the token
800 if not self._verify_refresh_token(refresh_token):
801 raise SynapseError(401, "invalid refresh token", Codes.UNKNOWN_TOKEN)
802
803 existing_token = await self.store.lookup_refresh_token(refresh_token)
804 if existing_token is None:
805 raise SynapseError(401, "refresh token does not exist", Codes.UNKNOWN_TOKEN)
806
807 if (
808 existing_token.has_next_access_token_been_used
809 or existing_token.has_next_refresh_token_been_refreshed
810 ):
811 raise SynapseError(
812 403, "refresh token isn't valid anymore", Codes.FORBIDDEN
813 )
814
815 (
816 new_refresh_token,
817 new_refresh_token_id,
818 ) = await self.get_refresh_token_for_user_id(
819 user_id=existing_token.user_id, device_id=existing_token.device_id
820 )
821 access_token = await self.get_access_token_for_user_id(
822 user_id=existing_token.user_id,
823 device_id=existing_token.device_id,
824 valid_until_ms=valid_until_ms,
825 refresh_token_id=new_refresh_token_id,
826 )
827 await self.store.replace_refresh_token(
828 existing_token.token_id, new_refresh_token_id
829 )
830 return access_token, new_refresh_token
831
832 def _verify_refresh_token(self, token: str) -> bool:
833 """
834 Verifies the shape of a refresh token.
835
836 Args:
837 token: The refresh token to verify
838
839 Returns:
840 Whether the token has the right shape
841 """
842 parts = token.split("_", maxsplit=4)
843 if len(parts) != 4:
844 return False
845
846 type, localpart, rand, crc = parts
847
848 # Refresh tokens are prefixed by "syr_", let's check that
849 if type != "syr":
850 return False
851
852 # Check the CRC
853 base = f"{type}_{localpart}_{rand}"
854 expected_crc = base62_encode(crc32(base.encode("ascii")), minwidth=6)
855 if crc != expected_crc:
856 return False
857
858 return True
859
860 async def get_refresh_token_for_user_id(
861 self,
862 user_id: str,
863 device_id: str,
864 ) -> Tuple[str, int]:
865 """
866 Creates a new refresh token for the user with the given user ID.
867
868 Args:
869 user_id: canonical user ID
870 device_id: the device ID to associate with the token.
871
872 Returns:
873 The newly created refresh token and its ID in the database
874 """
875 refresh_token = self.generate_refresh_token(UserID.from_string(user_id))
876 refresh_token_id = await self.store.add_refresh_token_to_user(
877 user_id=user_id,
878 token=refresh_token,
879 device_id=device_id,
880 )
881 return refresh_token, refresh_token_id
882
779883 async def get_access_token_for_user_id(
780884 self,
781885 user_id: str,
783887 valid_until_ms: Optional[int],
784888 puppets_user_id: Optional[str] = None,
785889 is_appservice_ghost: bool = False,
890 refresh_token_id: Optional[int] = None,
786891 ) -> str:
787892 """
788893 Creates a new access token for the user with the given user ID.
800905 valid_until_ms: when the token is valid until. None for
801906 no expiry.
802907 is_appservice_ghost: Whether the user is an application ghost user
908 refresh_token_id: the refresh token ID that will be associated with
909 this access token.
803910 Returns:
804911 The access token for the user's session.
805912 Raises:
835942 device_id=device_id,
836943 valid_until_ms=valid_until_ms,
837944 puppets_user_id=puppets_user_id,
945 refresh_token_id=refresh_token_id,
838946 )
839947
840948 # the device *should* have been registered before we got here; however,
9271035 self,
9281036 login_submission: Dict[str, Any],
9291037 ratelimit: bool = False,
930 ) -> Tuple[str, Optional[Callable[[Dict[str, str]], Awaitable[None]]]]:
1038 ) -> Tuple[str, Optional[Callable[["LoginResponse"], Awaitable[None]]]]:
9311039 """Authenticates the user for the /login API
9321040
9331041 Also used by the user-interactive auth flow to validate auth types which don't
10721180 self,
10731181 username: str,
10741182 login_submission: Dict[str, Any],
1075 ) -> Tuple[str, Optional[Callable[[Dict[str, str]], Awaitable[None]]]]:
1183 ) -> Tuple[str, Optional[Callable[["LoginResponse"], Awaitable[None]]]]:
10761184 """Helper for validate_login
10771185
10781186 Handles login, once we've mapped 3pids onto userids
11501258
11511259 async def check_password_provider_3pid(
11521260 self, medium: str, address: str, password: str
1153 ) -> Tuple[Optional[str], Optional[Callable[[Dict[str, str]], Awaitable[None]]]]:
1261 ) -> Tuple[Optional[str], Optional[Callable[["LoginResponse"], Awaitable[None]]]]:
11541262 """Check if a password provider is able to validate a thirdparty login
11551263
11561264 Args:
12101318 b64local = unpaddedbase64.encode_base64(for_user.localpart.encode("utf-8"))
12111319 random_string = stringutils.random_string(20)
12121320 base = f"syt_{b64local}_{random_string}"
1321
1322 crc = base62_encode(crc32(base.encode("ascii")), minwidth=6)
1323 return f"{base}_{crc}"
1324
1325 def generate_refresh_token(self, for_user: UserID) -> str:
1326 """Generates an opaque string, for use as a refresh token"""
1327
1328 # we use the following format for refresh tokens:
1329 # syr_<base64 local part>_<random string>_<base62 crc check>
1330
1331 b64local = unpaddedbase64.encode_base64(for_user.localpart.encode("utf-8"))
1332 random_string = stringutils.random_string(20)
1333 base = f"syr_{b64local}_{random_string}"
12131334
12141335 crc = base62_encode(crc32(base.encode("ascii")), minwidth=6)
12151336 return f"{base}_{crc}"
15621683 )
15631684 respond_with_html(request, 200, html)
15641685
1565 async def _sso_login_callback(self, login_result: JsonDict) -> None:
1686 async def _sso_login_callback(self, login_result: "LoginResponse") -> None:
15661687 """
15671688 A login callback which might add additional attributes to the login response.
15681689
15761697
15771698 extra_attributes = self._extra_attributes.get(login_result["user_id"])
15781699 if extra_attributes:
1579 login_result.update(extra_attributes.extra_attributes)
1700 login_result_dict = cast(Dict[str, Any], login_result)
1701 login_result_dict.update(extra_attributes.extra_attributes)
15801702
15811703 def _expire_sso_extra_attributes(self) -> None:
15821704 """
1010 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
1111 # See the License for the specific language governing permissions and
1212 # limitations under the License.
13 from typing import TYPE_CHECKING, Collection, Optional
14
13 from typing import TYPE_CHECKING, Collection, List, Optional, Union
14
15 from synapse import event_auth
1516 from synapse.api.constants import (
1617 EventTypes,
1718 JoinRules,
1920 RestrictedJoinRuleTypes,
2021 )
2122 from synapse.api.errors import AuthError
22 from synapse.api.room_versions import RoomVersion
23 from synapse.api.room_versions import KNOWN_ROOM_VERSIONS, RoomVersion
2324 from synapse.events import EventBase
25 from synapse.events.builder import EventBuilder
2426 from synapse.types import StateMap
27 from synapse.util.metrics import Measure
2528
2629 if TYPE_CHECKING:
2730 from synapse.server import HomeServer
3336 """
3437
3538 def __init__(self, hs: "HomeServer"):
39 self._clock = hs.get_clock()
3640 self._store = hs.get_datastore()
41
42 async def check_from_context(
43 self, room_version: str, event, context, do_sig_check=True
44 ) -> None:
45 auth_event_ids = event.auth_event_ids()
46 auth_events_by_id = await self._store.get_events(auth_event_ids)
47 auth_events = {(e.type, e.state_key): e for e in auth_events_by_id.values()}
48
49 room_version_obj = KNOWN_ROOM_VERSIONS[room_version]
50 event_auth.check(
51 room_version_obj, event, auth_events=auth_events, do_sig_check=do_sig_check
52 )
53
54 def compute_auth_events(
55 self,
56 event: Union[EventBase, EventBuilder],
57 current_state_ids: StateMap[str],
58 for_verification: bool = False,
59 ) -> List[str]:
60 """Given an event and current state return the list of event IDs used
61 to auth an event.
62
63 If `for_verification` is False then only return auth events that
64 should be added to the event's `auth_events`.
65
66 Returns:
67 List of event IDs.
68 """
69
70 if event.type == EventTypes.Create:
71 return []
72
73 # Currently we ignore the `for_verification` flag even though there are
74 # some situations where we can drop particular auth events when adding
75 # to the event's `auth_events` (e.g. joins pointing to previous joins
76 # when room is publicly joinable). Dropping event IDs has the
77 # advantage that the auth chain for the room grows slower, but we use
78 # the auth chain in state resolution v2 to order events, which means
79 # care must be taken if dropping events to ensure that it doesn't
80 # introduce undesirable "state reset" behaviour.
81 #
82 # All of which sounds a bit tricky so we don't bother for now.
83
84 auth_ids = []
85 for etype, state_key in event_auth.auth_types_for_event(event):
86 auth_ev_id = current_state_ids.get((etype, state_key))
87 if auth_ev_id:
88 auth_ids.append(auth_ev_id)
89
90 return auth_ids
91
92 async def check_host_in_room(self, room_id: str, host: str) -> bool:
93 with Measure(self._clock, "check_host_in_room"):
94 return await self._store.is_host_joined(room_id, host)
3795
3896 async def check_restricted_join_rules(
3997 self,
249249 #
250250 # Note that if we were never in the room then we would have already
251251 # dropped the event, since we wouldn't know the room version.
252 is_in_room = await self.auth.check_host_in_room(room_id, self.server_name)
252 is_in_room = await self._event_auth_handler.check_host_in_room(
253 room_id, self.server_name
254 )
253255 if not is_in_room:
254256 logger.info(
255257 "Ignoring PDU from %s as we're not in the room",
16731675 room_version = await self.store.get_room_version_id(room_id)
16741676
16751677 # now check that we are *still* in the room
1676 is_in_room = await self.auth.check_host_in_room(room_id, self.server_name)
1678 is_in_room = await self._event_auth_handler.check_host_in_room(
1679 room_id, self.server_name
1680 )
16771681 if not is_in_room:
16781682 logger.info(
16791683 "Got /make_join request for room %s we are no longer in",
17041708
17051709 # The remote hasn't signed it yet, obviously. We'll do the full checks
17061710 # when we get the event back in `on_send_join_request`
1707 await self.auth.check_from_context(
1711 await self._event_auth_handler.check_from_context(
17081712 room_version, event, context, do_sig_check=False
17091713 )
17101714
17111715 return event
1712
1713 async def on_send_join_request(self, origin: str, pdu: EventBase) -> JsonDict:
1714 """We have received a join event for a room. Fully process it and
1715 respond with the current state and auth chains.
1716 """
1717 event = pdu
1718
1719 logger.debug(
1720 "on_send_join_request from %s: Got event: %s, signatures: %s",
1721 origin,
1722 event.event_id,
1723 event.signatures,
1724 )
1725
1726 if get_domain_from_id(event.sender) != origin:
1727 logger.info(
1728 "Got /send_join request for user %r from different origin %s",
1729 event.sender,
1730 origin,
1731 )
1732 raise SynapseError(403, "User not from origin", Codes.FORBIDDEN)
1733
1734 event.internal_metadata.outlier = False
1735 # Send this event on behalf of the origin server.
1736 #
1737 # The reasons we have the destination server rather than the origin
1738 # server send it are slightly mysterious: the origin server should have
1739 # all the necessary state once it gets the response to the send_join,
1740 # so it could send the event itself if it wanted to. It may be that
1741 # doing it this way reduces failure modes, or avoids certain attacks
1742 # where a new server selectively tells a subset of the federation that
1743 # it has joined.
1744 #
1745 # The fact is that, as of the current writing, Synapse doesn't send out
1746 # the join event over federation after joining, and changing it now
1747 # would introduce the danger of backwards-compatibility problems.
1748 event.internal_metadata.send_on_behalf_of = origin
1749
1750 # Calculate the event context.
1751 context = await self.state_handler.compute_event_context(event)
1752
1753 # Get the state before the new event.
1754 prev_state_ids = await context.get_prev_state_ids()
1755
1756 # Check if the user is already in the room or invited to the room.
1757 user_id = event.state_key
1758 prev_member_event_id = prev_state_ids.get((EventTypes.Member, user_id), None)
1759 prev_member_event = None
1760 if prev_member_event_id:
1761 prev_member_event = await self.store.get_event(prev_member_event_id)
1762
1763 # Check if the member should be allowed access via membership in a space.
1764 await self._event_auth_handler.check_restricted_join_rules(
1765 prev_state_ids,
1766 event.room_version,
1767 user_id,
1768 prev_member_event,
1769 )
1770
1771 # Persist the event.
1772 await self._auth_and_persist_event(origin, event, context)
1773
1774 logger.debug(
1775 "on_send_join_request: After _auth_and_persist_event: %s, sigs: %s",
1776 event.event_id,
1777 event.signatures,
1778 )
1779
1780 state_ids = list(prev_state_ids.values())
1781 auth_chain = await self.store.get_auth_chain(event.room_id, state_ids)
1782
1783 state = await self.store.get_events(list(prev_state_ids.values()))
1784
1785 return {"state": list(state.values()), "auth_chain": auth_chain}
17861716
17871717 async def on_invite_request(
17881718 self, origin: str, event: EventBase, room_version: RoomVersion
19501880 try:
19511881 # The remote hasn't signed it yet, obviously. We'll do the full checks
19521882 # when we get the event back in `on_send_leave_request`
1953 await self.auth.check_from_context(
1883 await self._event_auth_handler.check_from_context(
19541884 room_version, event, context, do_sig_check=False
19551885 )
19561886 except AuthError as e:
19581888 raise e
19591889
19601890 return event
1961
1962 async def on_send_leave_request(self, origin: str, pdu: EventBase) -> None:
1963 """We have received a leave event for a room. Fully process it."""
1964 event = pdu
1965
1966 logger.debug(
1967 "on_send_leave_request: Got event: %s, signatures: %s",
1968 event.event_id,
1969 event.signatures,
1970 )
1971
1972 if get_domain_from_id(event.sender) != origin:
1973 logger.info(
1974 "Got /send_leave request for user %r from different origin %s",
1975 event.sender,
1976 origin,
1977 )
1978 raise SynapseError(403, "User not from origin", Codes.FORBIDDEN)
1979
1980 event.internal_metadata.outlier = False
1981
1982 context = await self.state_handler.compute_event_context(event)
1983 await self._auth_and_persist_event(origin, event, context)
1984
1985 logger.debug(
1986 "on_send_leave_request: After _auth_and_persist_event: %s, sigs: %s",
1987 event.event_id,
1988 event.signatures,
1989 )
1990
1991 return None
19921891
19931892 @log_function
19941893 async def on_make_knock_request(
20431942 try:
20441943 # The remote hasn't signed it yet, obviously. We'll do the full checks
20451944 # when we get the event back in `on_send_knock_request`
2046 await self.auth.check_from_context(
1945 await self._event_auth_handler.check_from_context(
20471946 room_version, event, context, do_sig_check=False
20481947 )
20491948 except AuthError as e:
20531952 return event
20541953
20551954 @log_function
2056 async def on_send_knock_request(
1955 async def on_send_membership_event(
20571956 self, origin: str, event: EventBase
20581957 ) -> EventContext:
20591958 """
2060 We have received a knock event for a room. Verify that event and send it into the room
2061 on the knocking homeserver's behalf.
1959 We have received a join/leave/knock event for a room via send_join/leave/knock.
1960
1961 Verify that event and send it into the room on the remote homeserver's behalf.
1962
1963 This is quite similar to on_receive_pdu, with the following principal
1964 differences:
1965 * only membership events are permitted (and only events with
1966 sender==state_key -- ie, no kicks or bans)
1967 * *We* send out the event on behalf of the remote server.
1968 * We enforce the membership restrictions of restricted rooms.
1969 * Rejected events result in an exception rather than being stored.
1970
1971 There are also other differences, however it is not clear if these are by
1972 design or omission. In particular, we do not attempt to backfill any missing
1973 prev_events.
20621974
20631975 Args:
2064 origin: The remote homeserver of the knocking user.
2065 event: The knocking member event that has been signed by the remote homeserver.
1976 origin: The homeserver of the remote (joining/invited/knocking) user.
1977 event: The member event that has been signed by the remote homeserver.
20661978
20671979 Returns:
20681980 The context of the event after inserting it into the room graph.
1981
1982 Raises:
1983 SynapseError if the event is not accepted into the room
20691984 """
20701985 logger.debug(
2071 "on_send_knock_request: Got event: %s, signatures: %s",
1986 "on_send_membership_event: Got event: %s, signatures: %s",
20721987 event.event_id,
20731988 event.signatures,
20741989 )
20751990
20761991 if get_domain_from_id(event.sender) != origin:
20771992 logger.info(
2078 "Got /send_knock request for user %r from different origin %s",
1993 "Got send_membership request for user %r from different origin %s",
20791994 event.sender,
20801995 origin,
20811996 )
20821997 raise SynapseError(403, "User not from origin", Codes.FORBIDDEN)
20831998
2084 event.internal_metadata.outlier = False
1999 if event.sender != event.state_key:
2000 raise SynapseError(400, "state_key and sender must match", Codes.BAD_JSON)
2001
2002 assert not event.internal_metadata.outlier
2003
2004 # Send this event on behalf of the other server.
2005 #
2006 # The remote server isn't a full participant in the room at this point, so
2007 # may not have an up-to-date list of the other homeservers participating in
2008 # the room, so we send it on their behalf.
2009 event.internal_metadata.send_on_behalf_of = origin
20852010
20862011 context = await self.state_handler.compute_event_context(event)
2087
2088 event_allowed = await self.third_party_event_rules.check_event_allowed(
2089 event, context
2090 )
2091 if not event_allowed:
2092 logger.info("Sending of knock %s forbidden by third-party rules", event)
2012 context = await self._check_event_auth(origin, event, context)
2013 if context.rejected:
20932014 raise SynapseError(
2094 403, "This event is not allowed in this context", Codes.FORBIDDEN
2095 )
2096
2097 await self._auth_and_persist_event(origin, event, context)
2098
2015 403, f"{event.membership} event was rejected", Codes.FORBIDDEN
2016 )
2017
2018 # for joins, we need to check the restrictions of restricted rooms
2019 if event.membership == Membership.JOIN:
2020 await self._check_join_restrictions(context, event)
2021
2022 # for knock events, we run the third-party event rules. It's not entirely clear
2023 # why we don't do this for other sorts of membership events.
2024 if event.membership == Membership.KNOCK:
2025 event_allowed = await self.third_party_event_rules.check_event_allowed(
2026 event, context
2027 )
2028 if not event_allowed:
2029 logger.info("Sending of knock %s forbidden by third-party rules", event)
2030 raise SynapseError(
2031 403, "This event is not allowed in this context", Codes.FORBIDDEN
2032 )
2033
2034 # all looks good, we can persist the event.
2035 await self._run_push_actions_and_persist_event(event, context)
20992036 return context
2037
2038 async def _check_join_restrictions(
2039 self, context: EventContext, event: EventBase
2040 ) -> None:
2041 """Check that restrictions in restricted join rules are matched
2042
2043 Called when we receive a join event via send_join.
2044
2045 Raises an auth error if the restrictions are not matched.
2046 """
2047 prev_state_ids = await context.get_prev_state_ids()
2048
2049 # Check if the user is already in the room or invited to the room.
2050 user_id = event.state_key
2051 prev_member_event_id = prev_state_ids.get((EventTypes.Member, user_id), None)
2052 prev_member_event = None
2053 if prev_member_event_id:
2054 prev_member_event = await self.store.get_event(prev_member_event_id)
2055
2056 # Check if the member should be allowed access via membership in a space.
2057 await self._event_auth_handler.check_restricted_join_rules(
2058 prev_state_ids,
2059 event.room_version,
2060 user_id,
2061 prev_member_event,
2062 )
21002063
21012064 async def get_state_for_pdu(self, room_id: str, event_id: str) -> List[EventBase]:
21022065 """Returns the state at the event. i.e. not including said event."""
21512114 async def on_backfill_request(
21522115 self, origin: str, room_id: str, pdu_list: List[str], limit: int
21532116 ) -> List[EventBase]:
2154 in_room = await self.auth.check_host_in_room(room_id, origin)
2117 in_room = await self._event_auth_handler.check_host_in_room(room_id, origin)
21552118 if not in_room:
21562119 raise AuthError(403, "Host not in room.")
21572120
21862149 )
21872150
21882151 if event:
2189 in_room = await self.auth.check_host_in_room(event.room_id, origin)
2152 in_room = await self._event_auth_handler.check_host_in_room(
2153 event.room_id, origin
2154 )
21902155 if not in_room:
21912156 raise AuthError(403, "Host not in room.")
21922157
22392204 backfilled=backfilled,
22402205 )
22412206
2207 await self._run_push_actions_and_persist_event(event, context, backfilled)
2208
2209 async def _run_push_actions_and_persist_event(
2210 self, event: EventBase, context: EventContext, backfilled: bool = False
2211 ):
2212 """Run the push actions for a received event, and persist it.
2213
2214 Args:
2215 event: The event itself.
2216 context: The event context.
2217 backfilled: True if the event was backfilled.
2218 """
22422219 try:
22432220 if (
22442221 not event.internal_metadata.is_outlier()
25272504 latest_events: List[str],
25282505 limit: int,
25292506 ) -> List[EventBase]:
2530 in_room = await self.auth.check_host_in_room(room_id, origin)
2507 in_room = await self._event_auth_handler.check_host_in_room(room_id, origin)
25312508 if not in_room:
25322509 raise AuthError(403, "Host not in room.")
25332510
25522529 origin: str,
25532530 event: EventBase,
25542531 context: EventContext,
2555 state: Optional[Iterable[EventBase]],
2556 auth_events: Optional[MutableStateMap[EventBase]],
2557 backfilled: bool,
2532 state: Optional[Iterable[EventBase]] = None,
2533 auth_events: Optional[MutableStateMap[EventBase]] = None,
2534 backfilled: bool = False,
25582535 ) -> EventContext:
25592536 """
25602537 Checks whether an event should be rejected (for failing auth checks).
25902567
25912568 if not auth_events:
25922569 prev_state_ids = await context.get_prev_state_ids()
2593 auth_events_ids = self.auth.compute_auth_events(
2570 auth_events_ids = self._event_auth_handler.compute_auth_events(
25942571 event, prev_state_ids, for_verification=True
25952572 )
25962573 auth_events_x = await self.store.get_events(auth_events_ids)
30192996 "state_key": target_user_id,
30202997 }
30212998
3022 if await self.auth.check_host_in_room(room_id, self.hs.hostname):
2999 if await self._event_auth_handler.check_host_in_room(room_id, self.hs.hostname):
30233000 room_version = await self.store.get_room_version_id(room_id)
30243001 builder = self.event_builder_factory.new(room_version, event_dict)
30253002
30393016 event.internal_metadata.send_on_behalf_of = self.hs.hostname
30403017
30413018 try:
3042 await self.auth.check_from_context(room_version, event, context)
3019 await self._event_auth_handler.check_from_context(
3020 room_version, event, context
3021 )
30433022 except AuthError as e:
30443023 logger.warning("Denying new third party invite %r because %s", event, e)
30453024 raise e
30823061 )
30833062
30843063 try:
3085 await self.auth.check_from_context(room_version, event, context)
3064 await self._event_auth_handler.check_from_context(
3065 room_version, event, context
3066 )
30863067 except AuthError as e:
30873068 logger.warning("Denying third party invite %r because %s", event, e)
30883069 raise e
31703151 last_exception = None # type: Optional[Exception]
31713152
31723153 # for each public key in the 3pid invite event
3173 for public_key_object in self.hs.get_auth().get_public_keys(invite_event):
3154 for public_key_object in event_auth.get_public_keys(invite_event):
31743155 try:
31753156 # for each sig on the third_party_invite block of the actual invite
31763157 for server, signature_block in signed["signatures"].items():
384384 def __init__(self, hs: "HomeServer"):
385385 self.hs = hs
386386 self.auth = hs.get_auth()
387 self._event_auth_handler = hs.get_event_auth_handler()
387388 self.store = hs.get_datastore()
388389 self.storage = hs.get_storage()
389390 self.state = hs.get_state_handler()
508509 Should normally be left as None, which will cause them to be calculated
509510 based on the room state at the prev_events.
510511
512 If non-None, prev_event_ids must also be provided.
513
511514 require_consent: Whether to check if the requester has
512515 consented to the privacy policy.
513516
580583 # Strip down the auth_event_ids to only what we need to auth the event.
581584 # For example, we don't need extra m.room.member that don't match event.sender
582585 if auth_event_ids is not None:
586 # If auth events are provided, prev events must be also.
587 assert prev_event_ids is not None
588
583589 temp_event = await builder.build(
584590 prev_event_ids=prev_event_ids,
585591 auth_event_ids=auth_event_ids,
591597 (e.type, e.state_key): e.event_id for e in auth_events
592598 }
593599 # Actually strip down and use the necessary auth events
594 auth_event_ids = self.auth.compute_auth_events(
600 auth_event_ids = self._event_auth_handler.compute_auth_events(
595601 event=temp_event,
596602 current_state_ids=auth_event_state_map,
597603 for_verification=False,
783789 The event ids to use as the auth_events for the new event.
784790 Should normally be left as None, which will cause them to be calculated
785791 based on the room state at the prev_events.
792
793 If non-None, prev_event_ids must also be provided.
786794 ratelimit: Whether to rate limit this send.
787795 txn_id: The transaction ID.
788796 ignore_shadow_ban: True if shadow-banned users should be allowed to
10481056 assert event.content["membership"] == Membership.LEAVE
10491057 else:
10501058 try:
1051 await self.auth.check_from_context(room_version, event, context)
1059 await self._event_auth_handler.check_from_context(
1060 room_version, event, context
1061 )
10521062 except AuthError as err:
10531063 logger.warning("Denying new event %r because %s", event, err)
10541064 raise err
13731383 raise AuthError(403, "Redacting server ACL events is not permitted")
13741384
13751385 prev_state_ids = await context.get_prev_state_ids()
1376 auth_events_ids = self.auth.compute_auth_events(
1386 auth_events_ids = self._event_auth_handler.compute_auth_events(
13771387 event, prev_state_ids, for_verification=True
13781388 )
13791389 auth_events_map = await self.store.get_events(auth_events_ids)
1414 """Contains functions for registering clients."""
1515
1616 import logging
17 from typing import TYPE_CHECKING, Dict, Iterable, List, Optional, Tuple
17 from typing import TYPE_CHECKING, Iterable, List, Optional, Tuple
1818
1919 from prometheus_client import Counter
20 from typing_extensions import TypedDict
2021
2122 from synapse import types
2223 from synapse.api.constants import MAX_USERID_LENGTH, EventTypes, JoinRules, LoginType
5354 ["guest", "auth_provider"],
5455 )
5556
57 LoginDict = TypedDict(
58 "LoginDict",
59 {
60 "device_id": str,
61 "access_token": str,
62 "valid_until_ms": Optional[int],
63 "refresh_token": Optional[str],
64 },
65 )
66
5667
5768 class RegistrationHandler(BaseHandler):
5869 def __init__(self, hs: "HomeServer"):
8495 self.pusher_pool = hs.get_pusherpool()
8596
8697 self.session_lifetime = hs.config.session_lifetime
98 self.access_token_lifetime = hs.config.access_token_lifetime
8799
88100 async def check_username(
89101 self,
385397 room_alias = RoomAlias.from_string(r)
386398
387399 if self.hs.hostname != room_alias.domain:
388 logger.warning(
389 "Cannot create room alias %s, "
390 "it does not match server domain",
400 # If the alias is remote, try to join the room. This might fail
401 # because the room might be invite only, but we don't have any local
402 # user in the room to invite this one with, so at this point that's
403 # the best we can do.
404 logger.info(
405 "Cannot automatically create room with alias %s as it isn't"
406 " local, trying to join the room instead",
391407 r,
408 )
409
410 (
411 room,
412 remote_room_hosts,
413 ) = await room_member_handler.lookup_room_alias(room_alias)
414 room_id = room.to_string()
415
416 await room_member_handler.update_membership(
417 requester=create_requester(
418 user_id, authenticated_entity=self._server_name
419 ),
420 target=UserID.from_string(user_id),
421 room_id=room_id,
422 remote_room_hosts=remote_room_hosts,
423 action="join",
424 ratelimit=False,
392425 )
393426 else:
394427 # A shallow copy is OK here since the only key that is
447480 )
448481
449482 # Calculate whether the room requires an invite or can be
450 # joined directly. Note that unless a join rule of public exists,
451 # it is treated as requiring an invite.
452 requires_invite = True
453
454 state = await self.store.get_filtered_current_state_ids(
455 room_id, StateFilter.from_types([(EventTypes.JoinRules, "")])
456 )
457
458 event_id = state.get((EventTypes.JoinRules, ""))
459 if event_id:
460 join_rules_event = await self.store.get_event(
461 event_id, allow_none=True
483 # joined directly. By default, we consider the room as requiring an
484 # invite if the homeserver is in the room (unless told otherwise by the
485 # join rules). Otherwise we consider it as being joinable, at the risk of
486 # failing to join, but in this case there's little more we can do since
487 # we don't have a local user in the room to craft up an invite with.
488 requires_invite = await self.store.is_host_joined(
489 room_id,
490 self.server_name,
491 )
492
493 if requires_invite:
494 # If the server is in the room, check if the room is public.
495 state = await self.store.get_filtered_current_state_ids(
496 room_id, StateFilter.from_types([(EventTypes.JoinRules, "")])
462497 )
463 if join_rules_event:
464 join_rule = join_rules_event.content.get("join_rule", None)
465 requires_invite = join_rule and join_rule != JoinRules.PUBLIC
498
499 event_id = state.get((EventTypes.JoinRules, ""))
500 if event_id:
501 join_rules_event = await self.store.get_event(
502 event_id, allow_none=True
503 )
504 if join_rules_event:
505 join_rule = join_rules_event.content.get("join_rule", None)
506 requires_invite = (
507 join_rule and join_rule != JoinRules.PUBLIC
508 )
466509
467510 # Send the invite, if necessary.
468511 if requires_invite:
664707 is_guest: bool = False,
665708 is_appservice_ghost: bool = False,
666709 auth_provider_id: Optional[str] = None,
667 ) -> Tuple[str, str]:
710 should_issue_refresh_token: bool = False,
711 ) -> Tuple[str, str, Optional[int], Optional[str]]:
668712 """Register a device for a user and generate an access token.
669713
670714 The access token will be limited by the homeserver's session_lifetime config.
676720 is_guest: Whether this is a guest account
677721 auth_provider_id: The SSO IdP the user used, if any (just used for the
678722 prometheus metrics).
723 should_issue_refresh_token: Whether it should also issue a refresh token
679724 Returns:
680 Tuple of device ID and access token
725 Tuple of device ID, access token, access token expiration time and refresh token
681726 """
682727 res = await self._register_device_client(
683728 user_id=user_id,
685730 initial_display_name=initial_display_name,
686731 is_guest=is_guest,
687732 is_appservice_ghost=is_appservice_ghost,
733 should_issue_refresh_token=should_issue_refresh_token,
688734 )
689735
690736 login_counter.labels(
692738 auth_provider=(auth_provider_id or ""),
693739 ).inc()
694740
695 return res["device_id"], res["access_token"]
741 return (
742 res["device_id"],
743 res["access_token"],
744 res["valid_until_ms"],
745 res["refresh_token"],
746 )
696747
697748 async def register_device_inner(
698749 self,
701752 initial_display_name: Optional[str],
702753 is_guest: bool = False,
703754 is_appservice_ghost: bool = False,
704 ) -> Dict[str, str]:
755 should_issue_refresh_token: bool = False,
756 ) -> LoginDict:
705757 """Helper for register_device
706758
707759 Does the bits that need doing on the main process. Not for use outside this
715767 "session_lifetime is not currently implemented for guest access"
716768 )
717769 valid_until_ms = self.clock.time_msec() + self.session_lifetime
770
771 refresh_token = None
772 refresh_token_id = None
718773
719774 registered_device_id = await self.device_handler.check_device_registered(
720775 user_id, device_id, initial_display_name
723778 assert valid_until_ms is None
724779 access_token = self.macaroon_gen.generate_guest_access_token(user_id)
725780 else:
781 if should_issue_refresh_token:
782 (
783 refresh_token,
784 refresh_token_id,
785 ) = await self._auth_handler.get_refresh_token_for_user_id(
786 user_id,
787 device_id=registered_device_id,
788 )
789 valid_until_ms = self.clock.time_msec() + self.access_token_lifetime
790
726791 access_token = await self._auth_handler.get_access_token_for_user_id(
727792 user_id,
728793 device_id=registered_device_id,
729794 valid_until_ms=valid_until_ms,
730795 is_appservice_ghost=is_appservice_ghost,
731 )
732
733 return {"device_id": registered_device_id, "access_token": access_token}
796 refresh_token_id=refresh_token_id,
797 )
798
799 return {
800 "device_id": registered_device_id,
801 "access_token": access_token,
802 "valid_until_ms": valid_until_ms,
803 "refresh_token": refresh_token,
804 }
734805
735806 async def post_registration_actions(
736807 self, user_id: str, auth_result: dict, access_token: Optional[str]
8282 self.spam_checker = hs.get_spam_checker()
8383 self.event_creation_handler = hs.get_event_creation_handler()
8484 self.room_member_handler = hs.get_room_member_handler()
85 self._event_auth_handler = hs.get_event_auth_handler()
8586 self.config = hs.config
8687
8788 # Room state based off defined presets
225226 },
226227 )
227228 old_room_version = await self.store.get_room_version_id(old_room_id)
228 await self.auth.check_from_context(
229 await self._event_auth_handler.check_from_context(
229230 old_room_version, tombstone_event, tombstone_context
230231 )
231232
2424 EventTypes,
2525 HistoryVisibility,
2626 Membership,
27 RoomTypes,
2728 )
2829 from synapse.events import EventBase
2930 from synapse.events.utils import format_event_for_client_v2
317318
318319 Returns:
319320 A tuple of:
320 An iterable of a single value of the room.
321 The room information, if the room should be returned to the
322 user. None, otherwise.
321323
322324 An iterable of the sorted children events. This may be limited
323325 to a maximum size or may include all children.
327329
328330 room_entry = await self._build_room_entry(room_id)
329331
330 # look for child rooms/spaces.
332 # If the room is not a space, return just the room information.
333 if room_entry.get("room_type") != RoomTypes.SPACE:
334 return room_entry, ()
335
336 # Otherwise, look for child rooms/spaces.
331337 child_events = await self._get_child_events(room_id)
332338
333339 if suggested_only:
347353 event_format=format_event_for_client_v2,
348354 )
349355 )
356
350357 return room_entry, events_result
351358
352359 async def _summarize_remote_room(
464471 # If this is a request over federation, check if the host is in the room or
465472 # is in one of the spaces specified via the join rules.
466473 elif origin:
467 if await self._auth.check_host_in_room(room_id, origin):
474 if await self._event_auth_handler.check_host_in_room(room_id, origin):
468475 return True
469476
470477 # Alternately, if the host has a user in any of the spaces specified
477484 await self._event_auth_handler.get_rooms_that_allow_join(state_ids)
478485 )
479486 for space_id in allowed_rooms:
480 if await self._auth.check_host_in_room(space_id, origin):
487 if await self._event_auth_handler.check_host_in_room(
488 space_id, origin
489 ):
481490 return True
482491
483492 # otherwise, check if the room is peekable
727727 )
728728 request.setHeader(
729729 b"Access-Control-Allow-Headers",
730 b"Origin, X-Requested-With, Content-Type, Accept, Authorization, Date",
730 b"X-Requested-With, Content-Type, Authorization, Date",
731731 )
732732
733733
112112 def parse_bytes_from_args(
113113 args: Dict[bytes, List[bytes]],
114114 name: str,
115 default: Optional[bytes] = None,
116 ) -> Optional[bytes]:
117 ...
118
119
120 @overload
121 def parse_bytes_from_args(
122 args: Dict[bytes, List[bytes]],
123 name: str,
115124 default: Literal[None] = None,
116 required: Literal[True] = True,
125 *,
126 required: Literal[True],
117127 ) -> bytes:
118128 ...
119129
196206 """
197207 args = request.args # type: Dict[bytes, List[bytes]] # type: ignore
198208 return parse_string_from_args(
199 args, name, default, required, allowed_values, encoding
209 args,
210 name,
211 default,
212 required=required,
213 allowed_values=allowed_values,
214 encoding=encoding,
200215 )
201216
202217
226241 args: Dict[bytes, List[bytes]],
227242 name: str,
228243 default: Optional[List[str]] = None,
229 required: Literal[True] = True,
244 *,
245 allowed_values: Optional[Iterable[str]] = None,
246 encoding: str = "ascii",
247 ) -> Optional[List[str]]:
248 ...
249
250
251 @overload
252 def parse_strings_from_args(
253 args: Dict[bytes, List[bytes]],
254 name: str,
255 default: Optional[List[str]] = None,
256 *,
257 required: Literal[True],
230258 allowed_values: Optional[Iterable[str]] = None,
231259 encoding: str = "ascii",
232260 ) -> List[str]:
238266 args: Dict[bytes, List[bytes]],
239267 name: str,
240268 default: Optional[List[str]] = None,
269 *,
241270 required: bool = False,
242271 allowed_values: Optional[Iterable[str]] = None,
243272 encoding: str = "ascii",
298327 args: Dict[bytes, List[bytes]],
299328 name: str,
300329 default: Optional[str] = None,
301 required: Literal[True] = True,
330 *,
331 allowed_values: Optional[Iterable[str]] = None,
332 encoding: str = "ascii",
333 ) -> Optional[str]:
334 ...
335
336
337 @overload
338 def parse_string_from_args(
339 args: Dict[bytes, List[bytes]],
340 name: str,
341 default: Optional[str] = None,
342 *,
343 required: Literal[True],
302344 allowed_values: Optional[Iterable[str]] = None,
303345 encoding: str = "ascii",
304346 ) -> str:
167167 "Using deprecated ModuleApi.register which creates a dummy user device."
168168 )
169169 user_id = yield self.register_user(localpart, displayname, emails or [])
170 _, access_token = yield self.register_device(user_id)
170 _, access_token, _, _ = yield self.register_device(user_id)
171171 return user_id, access_token
172172
173173 def register_user(
103103 def __init__(self, hs: "HomeServer"):
104104 self.hs = hs
105105 self.store = hs.get_datastore()
106 self.auth = hs.get_auth()
106 self._event_auth_handler = hs.get_event_auth_handler()
107107
108108 # Used by `RulesForRoom` to ensure only one thing mutates the cache at a
109109 # time. Keyed off room_id.
171171 # not having a power level event is an extreme edge case
172172 auth_events = {POWER_KEY: await self.store.get_event(pl_event_id)}
173173 else:
174 auth_events_ids = self.auth.compute_auth_events(
174 auth_events_ids = self._event_auth_handler.compute_auth_events(
175175 event, prev_state_ids, for_verification=False
176176 )
177177 auth_events_dict = await self.store.get_events(auth_events_ids)
3535
3636 @staticmethod
3737 async def _serialize_payload(
38 user_id, device_id, initial_display_name, is_guest, is_appservice_ghost
38 user_id,
39 device_id,
40 initial_display_name,
41 is_guest,
42 is_appservice_ghost,
43 should_issue_refresh_token,
3944 ):
4045 """
4146 Args:
47 user_id (int)
4248 device_id (str|None): Device ID to use, if None a new one is
4349 generated.
4450 initial_display_name (str|None)
4551 is_guest (bool)
52 is_appservice_ghost (bool)
53 should_issue_refresh_token (bool)
4654 """
4755 return {
4856 "device_id": device_id,
4957 "initial_display_name": initial_display_name,
5058 "is_guest": is_guest,
5159 "is_appservice_ghost": is_appservice_ghost,
60 "should_issue_refresh_token": should_issue_refresh_token,
5261 }
5362
5463 async def _handle_request(self, request, user_id):
5867 initial_display_name = content["initial_display_name"]
5968 is_guest = content["is_guest"]
6069 is_appservice_ghost = content["is_appservice_ghost"]
70 should_issue_refresh_token = content["should_issue_refresh_token"]
6171
6272 res = await self.registration_handler.register_device_inner(
6373 user_id,
6575 initial_display_name,
6676 is_guest,
6777 is_appservice_ghost=is_appservice_ghost,
78 should_issue_refresh_token=should_issue_refresh_token,
6879 )
6980
7081 return 200, res
1313
1414 import logging
1515 import re
16 from typing import TYPE_CHECKING, Awaitable, Callable, Dict, List, Optional
16 from typing import TYPE_CHECKING, Any, Awaitable, Callable, Dict, List, Optional
17
18 from typing_extensions import TypedDict
1719
1820 from synapse.api.errors import Codes, LoginError, SynapseError
1921 from synapse.api.ratelimiting import Ratelimiter
2426 from synapse.http.server import HttpServer, finish_request
2527 from synapse.http.servlet import (
2628 RestServlet,
29 assert_params_in_dict,
30 parse_boolean,
2731 parse_bytes_from_args,
2832 parse_json_object_from_request,
2933 parse_string,
3943 logger = logging.getLogger(__name__)
4044
4145
46 LoginResponse = TypedDict(
47 "LoginResponse",
48 {
49 "user_id": str,
50 "access_token": str,
51 "home_server": str,
52 "expires_in_ms": Optional[int],
53 "refresh_token": Optional[str],
54 "device_id": str,
55 "well_known": Optional[Dict[str, Any]],
56 },
57 total=False,
58 )
59
60
4261 class LoginRestServlet(RestServlet):
4362 PATTERNS = client_patterns("/login$", v1=True)
4463 CAS_TYPE = "m.login.cas"
4766 JWT_TYPE = "org.matrix.login.jwt"
4867 JWT_TYPE_DEPRECATED = "m.login.jwt"
4968 APPSERVICE_TYPE = "uk.half-shot.msc2778.login.application_service"
69 REFRESH_TOKEN_PARAM = "org.matrix.msc2918.refresh_token"
5070
5171 def __init__(self, hs: "HomeServer"):
5272 super().__init__()
6484 self.cas_enabled = hs.config.cas_enabled
6585 self.oidc_enabled = hs.config.oidc_enabled
6686 self._msc2858_enabled = hs.config.experimental.msc2858_enabled
87 self._msc2918_enabled = hs.config.access_token_lifetime is not None
6788
6889 self.auth = hs.get_auth()
90
91 self.clock = hs.get_clock()
6992
7093 self.auth_handler = self.hs.get_auth_handler()
7194 self.registration_handler = hs.get_registration_handler()
137160 async def on_POST(self, request: SynapseRequest):
138161 login_submission = parse_json_object_from_request(request)
139162
163 if self._msc2918_enabled:
164 # Check if this login should also issue a refresh token, as per
165 # MSC2918
166 should_issue_refresh_token = parse_boolean(
167 request, name=LoginRestServlet.REFRESH_TOKEN_PARAM, default=False
168 )
169 else:
170 should_issue_refresh_token = False
171
140172 try:
141173 if login_submission["type"] == LoginRestServlet.APPSERVICE_TYPE:
142174 appservice = self.auth.get_appservice_by_req(request)
146178 None, request.getClientIP()
147179 )
148180
149 result = await self._do_appservice_login(login_submission, appservice)
181 result = await self._do_appservice_login(
182 login_submission,
183 appservice,
184 should_issue_refresh_token=should_issue_refresh_token,
185 )
150186 elif self.jwt_enabled and (
151187 login_submission["type"] == LoginRestServlet.JWT_TYPE
152188 or login_submission["type"] == LoginRestServlet.JWT_TYPE_DEPRECATED
153189 ):
154190 await self._address_ratelimiter.ratelimit(None, request.getClientIP())
155 result = await self._do_jwt_login(login_submission)
191 result = await self._do_jwt_login(
192 login_submission,
193 should_issue_refresh_token=should_issue_refresh_token,
194 )
156195 elif login_submission["type"] == LoginRestServlet.TOKEN_TYPE:
157196 await self._address_ratelimiter.ratelimit(None, request.getClientIP())
158 result = await self._do_token_login(login_submission)
197 result = await self._do_token_login(
198 login_submission,
199 should_issue_refresh_token=should_issue_refresh_token,
200 )
159201 else:
160202 await self._address_ratelimiter.ratelimit(None, request.getClientIP())
161 result = await self._do_other_login(login_submission)
203 result = await self._do_other_login(
204 login_submission,
205 should_issue_refresh_token=should_issue_refresh_token,
206 )
162207 except KeyError:
163208 raise SynapseError(400, "Missing JSON keys.")
164209
168213 return 200, result
169214
170215 async def _do_appservice_login(
171 self, login_submission: JsonDict, appservice: ApplicationService
216 self,
217 login_submission: JsonDict,
218 appservice: ApplicationService,
219 should_issue_refresh_token: bool = False,
172220 ):
173221 identifier = login_submission.get("identifier")
174222 logger.info("Got appservice login request with identifier: %r", identifier)
197245 raise LoginError(403, "Invalid access_token", errcode=Codes.FORBIDDEN)
198246
199247 return await self._complete_login(
200 qualified_user_id, login_submission, ratelimit=appservice.is_rate_limited()
201 )
202
203 async def _do_other_login(self, login_submission: JsonDict) -> Dict[str, str]:
248 qualified_user_id,
249 login_submission,
250 ratelimit=appservice.is_rate_limited(),
251 should_issue_refresh_token=should_issue_refresh_token,
252 )
253
254 async def _do_other_login(
255 self, login_submission: JsonDict, should_issue_refresh_token: bool = False
256 ) -> LoginResponse:
204257 """Handle non-token/saml/jwt logins
205258
206259 Args:
207260 login_submission:
261 should_issue_refresh_token: True if this login should issue
262 a refresh token alongside the access token.
208263
209264 Returns:
210265 HTTP response
223278 login_submission, ratelimit=True
224279 )
225280 result = await self._complete_login(
226 canonical_user_id, login_submission, callback
281 canonical_user_id,
282 login_submission,
283 callback,
284 should_issue_refresh_token=should_issue_refresh_token,
227285 )
228286 return result
229287
231289 self,
232290 user_id: str,
233291 login_submission: JsonDict,
234 callback: Optional[Callable[[Dict[str, str]], Awaitable[None]]] = None,
292 callback: Optional[Callable[[LoginResponse], Awaitable[None]]] = None,
235293 create_non_existent_users: bool = False,
236294 ratelimit: bool = True,
237295 auth_provider_id: Optional[str] = None,
238 ) -> Dict[str, str]:
296 should_issue_refresh_token: bool = False,
297 ) -> LoginResponse:
239298 """Called when we've successfully authed the user and now need to
240299 actually login them in (e.g. create devices). This gets called on
241300 all successful logins.
252311 ratelimit: Whether to ratelimit the login request.
253312 auth_provider_id: The SSO IdP the user used, if any (just used for the
254313 prometheus metrics).
314 should_issue_refresh_token: True if this login should issue
315 a refresh token alongside the access token.
255316
256317 Returns:
257318 result: Dictionary of account information after successful login.
273334
274335 device_id = login_submission.get("device_id")
275336 initial_display_name = login_submission.get("initial_device_display_name")
276 device_id, access_token = await self.registration_handler.register_device(
277 user_id, device_id, initial_display_name, auth_provider_id=auth_provider_id
278 )
279
280 result = {
281 "user_id": user_id,
282 "access_token": access_token,
283 "home_server": self.hs.hostname,
284 "device_id": device_id,
285 }
337 (
338 device_id,
339 access_token,
340 valid_until_ms,
341 refresh_token,
342 ) = await self.registration_handler.register_device(
343 user_id,
344 device_id,
345 initial_display_name,
346 auth_provider_id=auth_provider_id,
347 should_issue_refresh_token=should_issue_refresh_token,
348 )
349
350 result = LoginResponse(
351 user_id=user_id,
352 access_token=access_token,
353 home_server=self.hs.hostname,
354 device_id=device_id,
355 )
356
357 if valid_until_ms is not None:
358 expires_in_ms = valid_until_ms - self.clock.time_msec()
359 result["expires_in_ms"] = expires_in_ms
360
361 if refresh_token is not None:
362 result["refresh_token"] = refresh_token
286363
287364 if callback is not None:
288365 await callback(result)
289366
290367 return result
291368
292 async def _do_token_login(self, login_submission: JsonDict) -> Dict[str, str]:
369 async def _do_token_login(
370 self, login_submission: JsonDict, should_issue_refresh_token: bool = False
371 ) -> LoginResponse:
293372 """
294373 Handle the final stage of SSO login.
295374
296375 Args:
297 login_submission: The JSON request body.
376 login_submission: The JSON request body.
377 should_issue_refresh_token: True if this login should issue
378 a refresh token alongside the access token.
298379
299380 Returns:
300381 The body of the JSON response.
308389 login_submission,
309390 self.auth_handler._sso_login_callback,
310391 auth_provider_id=res.auth_provider_id,
311 )
312
313 async def _do_jwt_login(self, login_submission: JsonDict) -> Dict[str, str]:
392 should_issue_refresh_token=should_issue_refresh_token,
393 )
394
395 async def _do_jwt_login(
396 self, login_submission: JsonDict, should_issue_refresh_token: bool = False
397 ) -> LoginResponse:
314398 token = login_submission.get("token", None)
315399 if token is None:
316400 raise LoginError(
341425
342426 user_id = UserID(user, self.hs.hostname).to_string()
343427 result = await self._complete_login(
344 user_id, login_submission, create_non_existent_users=True
428 user_id,
429 login_submission,
430 create_non_existent_users=True,
431 should_issue_refresh_token=should_issue_refresh_token,
345432 )
346433 return result
347434
368455 if use_unstable_brands and idp.unstable_idp_brand:
369456 e["brand"] = idp.unstable_idp_brand
370457 return e
458
459
460 class RefreshTokenServlet(RestServlet):
461 PATTERNS = client_patterns(
462 "/org.matrix.msc2918.refresh_token/refresh$", releases=(), unstable=True
463 )
464
465 def __init__(self, hs: "HomeServer"):
466 self._auth_handler = hs.get_auth_handler()
467 self._clock = hs.get_clock()
468 self.access_token_lifetime = hs.config.access_token_lifetime
469
470 async def on_POST(
471 self,
472 request: SynapseRequest,
473 ):
474 refresh_submission = parse_json_object_from_request(request)
475
476 assert_params_in_dict(refresh_submission, ["refresh_token"])
477 token = refresh_submission["refresh_token"]
478 if not isinstance(token, str):
479 raise SynapseError(400, "Invalid param: refresh_token", Codes.INVALID_PARAM)
480
481 valid_until_ms = self._clock.time_msec() + self.access_token_lifetime
482 access_token, refresh_token = await self._auth_handler.refresh_token(
483 token, valid_until_ms
484 )
485 expires_in_ms = valid_until_ms - self._clock.time_msec()
486 return (
487 200,
488 {
489 "access_token": access_token,
490 "refresh_token": refresh_token,
491 "expires_in_ms": expires_in_ms,
492 },
493 )
371494
372495
373496 class SsoRedirectServlet(RestServlet):
476599
477600 def register_servlets(hs, http_server):
478601 LoginRestServlet(hs).register(http_server)
602 if hs.config.access_token_lifetime is not None:
603 RefreshTokenServlet(hs).register(http_server)
479604 SsoRedirectServlet(hs).register(http_server)
480605 if hs.config.cas_enabled:
481606 CasTicketServlet(hs).register(http_server)
4040 from synapse.http.servlet import (
4141 RestServlet,
4242 assert_params_in_dict,
43 parse_boolean,
4344 parse_json_object_from_request,
4445 parse_string,
4546 )
4647 from synapse.metrics import threepid_send_requests
4748 from synapse.push.mailer import Mailer
49 from synapse.types import JsonDict
4850 from synapse.util.msisdn import phone_number_to_msisdn
4951 from synapse.util.ratelimitutils import FederationRateLimiter
5052 from synapse.util.stringutils import assert_valid_client_secret, random_string
398400 self.password_policy_handler = hs.get_password_policy_handler()
399401 self.clock = hs.get_clock()
400402 self._registration_enabled = self.hs.config.enable_registration
403 self._msc2918_enabled = hs.config.access_token_lifetime is not None
401404
402405 self._registration_flows = _calculate_registration_flows(
403406 hs.config, self.auth_handler
422425 raise UnrecognizedRequestError(
423426 "Do not understand membership kind: %s" % (kind.decode("utf8"),)
424427 )
428
429 if self._msc2918_enabled:
430 # Check if this registration should also issue a refresh token, as
431 # per MSC2918
432 should_issue_refresh_token = parse_boolean(
433 request, name="org.matrix.msc2918.refresh_token", default=False
434 )
435 else:
436 should_issue_refresh_token = False
425437
426438 # Pull out the provided username and do basic sanity checks early since
427439 # the auth layer will store these in sessions.
461473 raise SynapseError(400, "Desired Username is missing or not a string")
462474
463475 result = await self._do_appservice_registration(
464 desired_username, access_token, body
476 desired_username,
477 access_token,
478 body,
479 should_issue_refresh_token=should_issue_refresh_token,
465480 )
466481
467482 return 200, result
664679 registered = True
665680
666681 return_dict = await self._create_registration_details(
667 registered_user_id, params
682 registered_user_id,
683 params,
684 should_issue_refresh_token=should_issue_refresh_token,
668685 )
669686
670687 if registered:
676693
677694 return 200, return_dict
678695
679 async def _do_appservice_registration(self, username, as_token, body):
696 async def _do_appservice_registration(
697 self, username, as_token, body, should_issue_refresh_token: bool = False
698 ):
680699 user_id = await self.registration_handler.appservice_register(
681700 username, as_token
682701 )
684703 user_id,
685704 body,
686705 is_appservice_ghost=True,
706 should_issue_refresh_token=should_issue_refresh_token,
687707 )
688708
689709 async def _create_registration_details(
690 self, user_id, params, is_appservice_ghost=False
710 self,
711 user_id: str,
712 params: JsonDict,
713 is_appservice_ghost: bool = False,
714 should_issue_refresh_token: bool = False,
691715 ):
692716 """Complete registration of newly-registered user
693717
694718 Allocates device_id if one was not given; also creates access_token.
695719
696720 Args:
697 (str) user_id: full canonical @user:id
698 (object) params: registration parameters, from which we pull
699 device_id, initial_device_name and inhibit_login
721 user_id: full canonical @user:id
722 params: registration parameters, from which we pull device_id,
723 initial_device_name and inhibit_login
724 is_appservice_ghost
725 should_issue_refresh_token: True if this registration should issue
726 a refresh token alongside the access token.
700727 Returns:
701728 dictionary for response from /register
702729 """
704731 if not params.get("inhibit_login", False):
705732 device_id = params.get("device_id")
706733 initial_display_name = params.get("initial_device_display_name")
707 device_id, access_token = await self.registration_handler.register_device(
734 (
735 device_id,
736 access_token,
737 valid_until_ms,
738 refresh_token,
739 ) = await self.registration_handler.register_device(
708740 user_id,
709741 device_id,
710742 initial_display_name,
711743 is_guest=False,
712744 is_appservice_ghost=is_appservice_ghost,
745 should_issue_refresh_token=should_issue_refresh_token,
713746 )
714747
715748 result.update({"access_token": access_token, "device_id": device_id})
749
750 if valid_until_ms is not None:
751 expires_in_ms = valid_until_ms - self.clock.time_msec()
752 result["expires_in_ms"] = expires_in_ms
753
754 if refresh_token is not None:
755 result["refresh_token"] = refresh_token
756
716757 return result
717758
718759 async def _do_guest_registration(self, params, address=None):
726767 # we have nowhere to store it.
727768 device_id = synapse.api.auth.GUEST_DEVICE_ID
728769 initial_display_name = params.get("initial_device_display_name")
729 device_id, access_token = await self.registration_handler.register_device(
770 (
771 device_id,
772 access_token,
773 valid_until_ms,
774 refresh_token,
775 ) = await self.registration_handler.register_device(
730776 user_id, device_id, initial_display_name, is_guest=True
731777 )
732778
733 return (
734 200,
735 {
736 "user_id": user_id,
737 "device_id": device_id,
738 "access_token": access_token,
739 "home_server": self.hs.hostname,
740 },
741 )
779 result = {
780 "user_id": user_id,
781 "device_id": device_id,
782 "access_token": access_token,
783 "home_server": self.hs.hostname,
784 }
785
786 if valid_until_ms is not None:
787 expires_in_ms = valid_until_ms - self.clock.time_msec()
788 result["expires_in_ms"] = expires_in_ms
789
790 if refresh_token is not None:
791 result["refresh_token"] = refresh_token
792
793 return 200, result
742794
743795
744796 def _calculate_registration_flows(
1212 # limitations under the License.
1313 import itertools
1414 import logging
15 from collections import defaultdict
1516 from typing import TYPE_CHECKING, Any, Callable, Dict, List, Tuple
1617
1718 from synapse.api.constants import Membership, PresenceState
231232 )
232233
233234 logger.debug("building sync response dict")
234 return {
235 "account_data": {"events": sync_result.account_data},
236 "to_device": {"events": sync_result.to_device},
237 "device_lists": {
238 "changed": list(sync_result.device_lists.changed),
239 "left": list(sync_result.device_lists.left),
240 },
241 "presence": SyncRestServlet.encode_presence(sync_result.presence, time_now),
242 "rooms": {
243 Membership.JOIN: joined,
244 Membership.INVITE: invited,
245 Membership.KNOCK: knocked,
246 Membership.LEAVE: archived,
247 },
248 "groups": {
249 Membership.JOIN: sync_result.groups.join,
250 Membership.INVITE: sync_result.groups.invite,
251 Membership.LEAVE: sync_result.groups.leave,
252 },
253 "device_one_time_keys_count": sync_result.device_one_time_keys_count,
254 "org.matrix.msc2732.device_unused_fallback_key_types": sync_result.device_unused_fallback_key_types,
255 "next_batch": await sync_result.next_batch.to_string(self.store),
256 }
235
236 response: dict = defaultdict(dict)
237 response["next_batch"] = await sync_result.next_batch.to_string(self.store)
238
239 if sync_result.account_data:
240 response["account_data"] = {"events": sync_result.account_data}
241 if sync_result.presence:
242 response["presence"] = SyncRestServlet.encode_presence(
243 sync_result.presence, time_now
244 )
245
246 if sync_result.to_device:
247 response["to_device"] = {"events": sync_result.to_device}
248
249 if sync_result.device_lists.changed:
250 response["device_lists"]["changed"] = list(sync_result.device_lists.changed)
251 if sync_result.device_lists.left:
252 response["device_lists"]["left"] = list(sync_result.device_lists.left)
253
254 if sync_result.device_one_time_keys_count:
255 response[
256 "device_one_time_keys_count"
257 ] = sync_result.device_one_time_keys_count
258 if sync_result.device_unused_fallback_key_types:
259 response[
260 "org.matrix.msc2732.device_unused_fallback_key_types"
261 ] = sync_result.device_unused_fallback_key_types
262
263 if joined:
264 response["rooms"][Membership.JOIN] = joined
265 if invited:
266 response["rooms"][Membership.INVITE] = invited
267 if knocked:
268 response["rooms"][Membership.KNOCK] = knocked
269 if archived:
270 response["rooms"][Membership.LEAVE] = archived
271
272 if sync_result.groups.join:
273 response["groups"][Membership.JOIN] = sync_result.groups.join
274 if sync_result.groups.invite:
275 response["groups"][Membership.INVITE] = sync_result.groups.invite
276 if sync_result.groups.leave:
277 response["groups"][Membership.LEAVE] = sync_result.groups.leave
278
279 return response
257280
258281 @staticmethod
259282 def encode_presence(events, time_now):
110110 db_config: DatabaseConnectionConfig,
111111 engine: BaseDatabaseEngine,
112112 default_txn_name: str,
113 ) -> Connection:
113 ) -> "LoggingDatabaseConnection":
114114 """Make a new connection to the database and return it.
115115
116116 Returns:
1515 from queue import Empty, PriorityQueue
1616 from typing import Collection, Dict, Iterable, List, Optional, Set, Tuple
1717
18 from prometheus_client import Gauge
19
1820 from synapse.api.constants import MAX_DEPTH
1921 from synapse.api.errors import StoreError
2022 from synapse.api.room_versions import RoomVersion
3133 from synapse.util.caches.lrucache import LruCache
3234 from synapse.util.iterutils import batch_iter
3335
36 oldest_pdu_in_federation_staging = Gauge(
37 "synapse_federation_server_oldest_inbound_pdu_in_staging",
38 "The age in seconds since we received the oldest pdu in the federation staging area",
39 )
40
41 number_pdus_in_federation_queue = Gauge(
42 "synapse_federation_server_number_inbound_pdu_in_staging",
43 "The total number of events in the inbound federation staging",
44 )
45
3446 logger = logging.getLogger(__name__)
3547
3648
5264 self._event_auth_cache = LruCache(
5365 500000, "_event_auth_cache", size_callback=len
5466 ) # type: LruCache[str, List[Tuple[str, int]]]
67
68 self._clock.looping_call(self._get_stats_for_federation_staging, 30 * 1000)
5569
5670 async def get_auth_chain(
5771 self, room_id: str, event_ids: Collection[str], include_given: bool = False
10741088 self,
10751089 origin: str,
10761090 event_id: str,
1077 ) -> None:
1078 """Remove the given event from the staging area"""
1079 await self.db_pool.simple_delete(
1080 table="federation_inbound_events_staging",
1081 keyvalues={
1082 "origin": origin,
1083 "event_id": event_id,
1084 },
1085 desc="remove_received_event_from_staging",
1086 )
1091 ) -> Optional[int]:
1092 """Remove the given event from the staging area.
1093
1094 Returns:
1095 The received_ts of the row that was deleted, if any.
1096 """
1097 if self.db_pool.engine.supports_returning:
1098
1099 def _remove_received_event_from_staging_txn(txn):
1100 sql = """
1101 DELETE FROM federation_inbound_events_staging
1102 WHERE origin = ? AND event_id = ?
1103 RETURNING received_ts
1104 """
1105
1106 txn.execute(sql, (origin, event_id))
1107 return txn.fetchone()
1108
1109 row = await self.db_pool.runInteraction(
1110 "remove_received_event_from_staging",
1111 _remove_received_event_from_staging_txn,
1112 db_autocommit=True,
1113 )
1114 if row is None:
1115 return None
1116
1117 return row[0]
1118
1119 else:
1120
1121 def _remove_received_event_from_staging_txn(txn):
1122 received_ts = self.db_pool.simple_select_one_onecol_txn(
1123 txn,
1124 table="federation_inbound_events_staging",
1125 keyvalues={
1126 "origin": origin,
1127 "event_id": event_id,
1128 },
1129 retcol="received_ts",
1130 allow_none=True,
1131 )
1132 self.db_pool.simple_delete_txn(
1133 txn,
1134 table="federation_inbound_events_staging",
1135 keyvalues={
1136 "origin": origin,
1137 "event_id": event_id,
1138 },
1139 )
1140
1141 return received_ts
1142
1143 return await self.db_pool.runInteraction(
1144 "remove_received_event_from_staging",
1145 _remove_received_event_from_staging_txn,
1146 )
10871147
10881148 async def get_next_staged_event_id_for_room(
10891149 self,
11461206
11471207 return origin, event
11481208
1209 async def get_all_rooms_with_staged_incoming_events(self) -> List[str]:
1210 """Get the room IDs of all events currently staged."""
1211 return await self.db_pool.simple_select_onecol(
1212 table="federation_inbound_events_staging",
1213 keyvalues={},
1214 retcol="DISTINCT room_id",
1215 desc="get_all_rooms_with_staged_incoming_events",
1216 )
1217
1218 @wrap_as_background_process("_get_stats_for_federation_staging")
1219 async def _get_stats_for_federation_staging(self):
1220 """Update the prometheus metrics for the inbound federation staging area."""
1221
1222 def _get_stats_for_federation_staging_txn(txn):
1223 txn.execute(
1224 "SELECT coalesce(count(*), 0) FROM federation_inbound_events_staging"
1225 )
1226 (count,) = txn.fetchone()
1227
1228 txn.execute(
1229 "SELECT coalesce(min(received_ts), 0) FROM federation_inbound_events_staging"
1230 )
1231
1232 (age,) = txn.fetchone()
1233
1234 return count, age
1235
1236 count, age = await self.db_pool.runInteraction(
1237 "_get_stats_for_federation_staging", _get_stats_for_federation_staging_txn
1238 )
1239
1240 number_pdus_in_federation_queue.set(count)
1241 oldest_pdu_in_federation_staging.set(age)
1242
11491243
11501244 class EventFederationStore(EventFederationWorkerStore):
11511245 """Responsible for storing and serving up the various graphs associated
2828 logger = logging.getLogger(__name__)
2929
3030
31 _REPLACE_STREAM_ORDERING_SQL_COMMANDS = (
32 # there should be no leftover rows without a stream_ordering2, but just in case...
33 "UPDATE events SET stream_ordering2 = stream_ordering WHERE stream_ordering2 IS NULL",
34 # now we can drop the rule and switch the columns
35 "DROP RULE populate_stream_ordering2 ON events",
36 "ALTER TABLE events DROP COLUMN stream_ordering",
37 "ALTER TABLE events RENAME COLUMN stream_ordering2 TO stream_ordering",
38 # ... and finally, rename the indexes into place for consistency with sqlite
39 "ALTER INDEX event_contains_url_index2 RENAME TO event_contains_url_index",
40 "ALTER INDEX events_order_room2 RENAME TO events_order_room",
41 "ALTER INDEX events_room_stream2 RENAME TO events_room_stream",
42 "ALTER INDEX events_ts2 RENAME TO events_ts",
43 )
44
45
46 class _BackgroundUpdates:
47 EVENT_ORIGIN_SERVER_TS_NAME = "event_origin_server_ts"
48 EVENT_FIELDS_SENDER_URL_UPDATE_NAME = "event_fields_sender_url"
49 DELETE_SOFT_FAILED_EXTREMITIES = "delete_soft_failed_extremities"
50 POPULATE_STREAM_ORDERING2 = "populate_stream_ordering2"
51 INDEX_STREAM_ORDERING2 = "index_stream_ordering2"
52 INDEX_STREAM_ORDERING2_CONTAINS_URL = "index_stream_ordering2_contains_url"
53 INDEX_STREAM_ORDERING2_ROOM_ORDER = "index_stream_ordering2_room_order"
54 INDEX_STREAM_ORDERING2_ROOM_STREAM = "index_stream_ordering2_room_stream"
55 INDEX_STREAM_ORDERING2_TS = "index_stream_ordering2_ts"
56 REPLACE_STREAM_ORDERING_COLUMN = "replace_stream_ordering_column"
57
58
3159 @attr.s(slots=True, frozen=True)
3260 class _CalculateChainCover:
3361 """Return value for _calculate_chain_cover_txn."""
4775
4876
4977 class EventsBackgroundUpdatesStore(SQLBaseStore):
50
51 EVENT_ORIGIN_SERVER_TS_NAME = "event_origin_server_ts"
52 EVENT_FIELDS_SENDER_URL_UPDATE_NAME = "event_fields_sender_url"
53 DELETE_SOFT_FAILED_EXTREMITIES = "delete_soft_failed_extremities"
54
5578 def __init__(self, database: DatabasePool, db_conn, hs):
5679 super().__init__(database, db_conn, hs)
5780
5881 self.db_pool.updates.register_background_update_handler(
59 self.EVENT_ORIGIN_SERVER_TS_NAME, self._background_reindex_origin_server_ts
82 _BackgroundUpdates.EVENT_ORIGIN_SERVER_TS_NAME,
83 self._background_reindex_origin_server_ts,
6084 )
6185 self.db_pool.updates.register_background_update_handler(
62 self.EVENT_FIELDS_SENDER_URL_UPDATE_NAME,
86 _BackgroundUpdates.EVENT_FIELDS_SENDER_URL_UPDATE_NAME,
6387 self._background_reindex_fields_sender,
6488 )
6589
84108 )
85109
86110 self.db_pool.updates.register_background_update_handler(
87 self.DELETE_SOFT_FAILED_EXTREMITIES, self._cleanup_extremities_bg_update
111 _BackgroundUpdates.DELETE_SOFT_FAILED_EXTREMITIES,
112 self._cleanup_extremities_bg_update,
88113 )
89114
90115 self.db_pool.updates.register_background_update_handler(
138163 self._purged_chain_cover_index,
139164 )
140165
166 ################################################################################
167
168 # bg updates for replacing stream_ordering with a BIGINT
169 # (these only run on postgres.)
170
171 self.db_pool.updates.register_background_update_handler(
172 _BackgroundUpdates.POPULATE_STREAM_ORDERING2,
173 self._background_populate_stream_ordering2,
174 )
175 # CREATE UNIQUE INDEX events_stream_ordering ON events(stream_ordering2);
176 self.db_pool.updates.register_background_index_update(
177 _BackgroundUpdates.INDEX_STREAM_ORDERING2,
178 index_name="events_stream_ordering",
179 table="events",
180 columns=["stream_ordering2"],
181 unique=True,
182 )
183 # CREATE INDEX event_contains_url_index ON events(room_id, topological_ordering, stream_ordering) WHERE contains_url = true AND outlier = false;
184 self.db_pool.updates.register_background_index_update(
185 _BackgroundUpdates.INDEX_STREAM_ORDERING2_CONTAINS_URL,
186 index_name="event_contains_url_index2",
187 table="events",
188 columns=["room_id", "topological_ordering", "stream_ordering2"],
189 where_clause="contains_url = true AND outlier = false",
190 )
191 # CREATE INDEX events_order_room ON events(room_id, topological_ordering, stream_ordering);
192 self.db_pool.updates.register_background_index_update(
193 _BackgroundUpdates.INDEX_STREAM_ORDERING2_ROOM_ORDER,
194 index_name="events_order_room2",
195 table="events",
196 columns=["room_id", "topological_ordering", "stream_ordering2"],
197 )
198 # CREATE INDEX events_room_stream ON events(room_id, stream_ordering);
199 self.db_pool.updates.register_background_index_update(
200 _BackgroundUpdates.INDEX_STREAM_ORDERING2_ROOM_STREAM,
201 index_name="events_room_stream2",
202 table="events",
203 columns=["room_id", "stream_ordering2"],
204 )
205 # CREATE INDEX events_ts ON events(origin_server_ts, stream_ordering);
206 self.db_pool.updates.register_background_index_update(
207 _BackgroundUpdates.INDEX_STREAM_ORDERING2_TS,
208 index_name="events_ts2",
209 table="events",
210 columns=["origin_server_ts", "stream_ordering2"],
211 )
212 self.db_pool.updates.register_background_update_handler(
213 _BackgroundUpdates.REPLACE_STREAM_ORDERING_COLUMN,
214 self._background_replace_stream_ordering_column,
215 )
216
217 ################################################################################
218
141219 async def _background_reindex_fields_sender(self, progress, batch_size):
142220 target_min_stream_id = progress["target_min_stream_id_inclusive"]
143221 max_stream_id = progress["max_stream_id_exclusive"]
189267 }
190268
191269 self.db_pool.updates._background_update_progress_txn(
192 txn, self.EVENT_FIELDS_SENDER_URL_UPDATE_NAME, progress
270 txn, _BackgroundUpdates.EVENT_FIELDS_SENDER_URL_UPDATE_NAME, progress
193271 )
194272
195273 return len(rows)
196274
197275 result = await self.db_pool.runInteraction(
198 self.EVENT_FIELDS_SENDER_URL_UPDATE_NAME, reindex_txn
276 _BackgroundUpdates.EVENT_FIELDS_SENDER_URL_UPDATE_NAME, reindex_txn
199277 )
200278
201279 if not result:
202280 await self.db_pool.updates._end_background_update(
203 self.EVENT_FIELDS_SENDER_URL_UPDATE_NAME
281 _BackgroundUpdates.EVENT_FIELDS_SENDER_URL_UPDATE_NAME
204282 )
205283
206284 return result
263341 }
264342
265343 self.db_pool.updates._background_update_progress_txn(
266 txn, self.EVENT_ORIGIN_SERVER_TS_NAME, progress
344 txn, _BackgroundUpdates.EVENT_ORIGIN_SERVER_TS_NAME, progress
267345 )
268346
269347 return len(rows_to_update)
270348
271349 result = await self.db_pool.runInteraction(
272 self.EVENT_ORIGIN_SERVER_TS_NAME, reindex_search_txn
350 _BackgroundUpdates.EVENT_ORIGIN_SERVER_TS_NAME, reindex_search_txn
273351 )
274352
275353 if not result:
276354 await self.db_pool.updates._end_background_update(
277 self.EVENT_ORIGIN_SERVER_TS_NAME
355 _BackgroundUpdates.EVENT_ORIGIN_SERVER_TS_NAME
278356 )
279357
280358 return result
453531
454532 if not num_handled:
455533 await self.db_pool.updates._end_background_update(
456 self.DELETE_SOFT_FAILED_EXTREMITIES
534 _BackgroundUpdates.DELETE_SOFT_FAILED_EXTREMITIES
457535 )
458536
459537 def _drop_table_txn(txn):
10081086 await self.db_pool.updates._end_background_update("purged_chain_cover")
10091087
10101088 return result
1089
1090 async def _background_populate_stream_ordering2(
1091 self, progress: JsonDict, batch_size: int
1092 ) -> int:
1093 """Populate events.stream_ordering2, then replace stream_ordering
1094
1095 This is to deal with the fact that stream_ordering was initially created as a
1096 32-bit integer field.
1097 """
1098 batch_size = max(batch_size, 1)
1099
1100 def process(txn: Cursor) -> int:
1101 last_stream = progress.get("last_stream", -(1 << 31))
1102 txn.execute(
1103 """
1104 UPDATE events SET stream_ordering2=stream_ordering
1105 WHERE stream_ordering IN (
1106 SELECT stream_ordering FROM events WHERE stream_ordering > ?
1107 ORDER BY stream_ordering LIMIT ?
1108 )
1109 RETURNING stream_ordering;
1110 """,
1111 (last_stream, batch_size),
1112 )
1113 row_count = txn.rowcount
1114 if row_count == 0:
1115 return 0
1116 last_stream = max(row[0] for row in txn)
1117 logger.info("populated stream_ordering2 up to %i", last_stream)
1118
1119 self.db_pool.updates._background_update_progress_txn(
1120 txn,
1121 _BackgroundUpdates.POPULATE_STREAM_ORDERING2,
1122 {"last_stream": last_stream},
1123 )
1124 return row_count
1125
1126 result = await self.db_pool.runInteraction(
1127 "_background_populate_stream_ordering2", process
1128 )
1129
1130 if result != 0:
1131 return result
1132
1133 await self.db_pool.updates._end_background_update(
1134 _BackgroundUpdates.POPULATE_STREAM_ORDERING2
1135 )
1136 return 0
1137
1138 async def _background_replace_stream_ordering_column(
1139 self, progress: JsonDict, batch_size: int
1140 ) -> int:
1141 """Drop the old 'stream_ordering' column and rename 'stream_ordering2' into its place."""
1142
1143 def process(txn: Cursor) -> None:
1144 for sql in _REPLACE_STREAM_ORDERING_SQL_COMMANDS:
1145 logger.info("completing stream_ordering migration: %s", sql)
1146 txn.execute(sql)
1147
1148 # ANALYZE the new column to build stats on it, to encourage PostgreSQL to use the
1149 # indexes on it.
1150 # We need to pass execute a dummy function to handle the txn's result otherwise
1151 # it tries to call fetchall() on it and fails because there's no result to fetch.
1152 await self.db_pool.execute(
1153 "background_analyze_new_stream_ordering_column",
1154 lambda txn: None,
1155 "ANALYZE events(stream_ordering2)",
1156 )
1157
1158 await self.db_pool.runInteraction(
1159 "_background_replace_stream_ordering_column", process
1160 )
1161
1162 await self.db_pool.updates._end_background_update(
1163 _BackgroundUpdates.REPLACE_STREAM_ORDERING_COLUMN
1164 )
1165
1166 return 0
309309 _excinst: Optional[BaseException],
310310 _exctb: Optional[TracebackType],
311311 ) -> bool:
312 await self.release()
313
314 return False
315
316 async def release(self) -> None:
317 """Release the lock.
318
319 This is automatically called when using the lock as a context manager.
320 """
321
322 if self._dropped:
323 return
324
312325 if self._looping_call.running:
313326 self._looping_call.stop()
314327
315328 await self._store._drop_lock(self._lock_name, self._lock_key, self._token)
316329 self._dropped = True
317
318 return False
319330
320331 def __del__(self) -> None:
321332 if not self._dropped:
7272 async def set_profile_displayname(
7373 self, user_localpart: str, new_displayname: Optional[str]
7474 ) -> None:
75 await self.db_pool.simple_update_one(
75 await self.db_pool.simple_upsert(
7676 table="profiles",
7777 keyvalues={"user_id": user_localpart},
78 updatevalues={"displayname": new_displayname},
78 values={"displayname": new_displayname},
7979 desc="set_profile_displayname",
8080 )
8181
8282 async def set_profile_avatar_url(
8383 self, user_localpart: str, new_avatar_url: Optional[str]
8484 ) -> None:
85 await self.db_pool.simple_update_one(
85 await self.db_pool.simple_upsert(
8686 table="profiles",
8787 keyvalues={"user_id": user_localpart},
88 updatevalues={"avatar_url": new_avatar_url},
88 values={"avatar_url": new_avatar_url},
8989 desc="set_profile_avatar_url",
9090 )
9191
5252 valid_until_ms: The timestamp the token expires, if any.
5353 token_owner: The "owner" of the token. This is either the same as the
5454 user, or a server admin who is logged in as the user.
55 token_used: True if this token was used at least once in a request.
56 This field can be out of date since `get_user_by_access_token` is
57 cached.
5558 """
5659
5760 user_id = attr.ib(type=str)
6164 device_id = attr.ib(type=Optional[str], default=None)
6265 valid_until_ms = attr.ib(type=Optional[int], default=None)
6366 token_owner = attr.ib(type=str)
67 token_used = attr.ib(type=bool, default=False)
6468
6569 # Make the token owner default to the user ID, which is the common case.
6670 @token_owner.default
6771 def _default_token_owner(self):
6872 return self.user_id
73
74
75 @attr.s(frozen=True, slots=True)
76 class RefreshTokenLookupResult:
77 """Result of looking up a refresh token."""
78
79 user_id = attr.ib(type=str)
80 """The user this token belongs to."""
81
82 device_id = attr.ib(type=str)
83 """The device associated with this refresh token."""
84
85 token_id = attr.ib(type=int)
86 """The ID of this refresh token."""
87
88 next_token_id = attr.ib(type=Optional[int])
89 """The ID of the refresh token which replaced this one."""
90
91 has_next_refresh_token_been_refreshed = attr.ib(type=bool)
92 """True if the next refresh token was used for another refresh."""
93
94 has_next_access_token_been_used = attr.ib(type=bool)
95 """True if the next access token was already used at least once."""
6996
7097
7198 class RegistrationWorkerStore(CacheInvalidationWorkerStore):
440467 access_tokens.id as token_id,
441468 access_tokens.device_id,
442469 access_tokens.valid_until_ms,
443 access_tokens.user_id as token_owner
470 access_tokens.user_id as token_owner,
471 access_tokens.used as token_used
444472 FROM users
445473 INNER JOIN access_tokens on users.name = COALESCE(puppets_user_id, access_tokens.user_id)
446474 WHERE token = ?
448476
449477 txn.execute(sql, (token,))
450478 rows = self.db_pool.cursor_to_dict(txn)
479
451480 if rows:
452 return TokenLookupResult(**rows[0])
481 row = rows[0]
482
483 # This field is nullable, ensure it comes out as a boolean
484 if row["token_used"] is None:
485 row["token_used"] = False
486
487 return TokenLookupResult(**row)
453488
454489 return None
455490
10691104 {"id": token_id},
10701105 {"last_validated": now},
10711106 desc="update_access_token_last_validated",
1107 )
1108
1109 @cached()
1110 async def mark_access_token_as_used(self, token_id: int) -> None:
1111 """
1112 Mark the access token as used, which invalidates the refresh token used
1113 to obtain it.
1114
1115 Because get_user_by_access_token is cached, this function might be
1116 called multiple times for the same token, effectively doing unnecessary
1117 SQL updates. Because updating the `used` field only goes one way (from
1118 False to True) it is safe to cache this function as well to avoid this
1119 issue.
1120
1121 Args:
1122 token_id: The ID of the access token to update.
1123 Raises:
1124 StoreError if there was a problem updating this.
1125 """
1126 await self.db_pool.simple_update_one(
1127 "access_tokens",
1128 {"id": token_id},
1129 {"used": True},
1130 desc="mark_access_token_as_used",
1131 )
1132
1133 async def lookup_refresh_token(
1134 self, token: str
1135 ) -> Optional[RefreshTokenLookupResult]:
1136 """Lookup a refresh token with hints about its validity."""
1137
1138 def _lookup_refresh_token_txn(txn) -> Optional[RefreshTokenLookupResult]:
1139 txn.execute(
1140 """
1141 SELECT
1142 rt.id token_id,
1143 rt.user_id,
1144 rt.device_id,
1145 rt.next_token_id,
1146 (nrt.next_token_id IS NOT NULL) has_next_refresh_token_been_refreshed,
1147 at.used has_next_access_token_been_used
1148 FROM refresh_tokens rt
1149 LEFT JOIN refresh_tokens nrt ON rt.next_token_id = nrt.id
1150 LEFT JOIN access_tokens at ON at.refresh_token_id = nrt.id
1151 WHERE rt.token = ?
1152 """,
1153 (token,),
1154 )
1155 row = txn.fetchone()
1156
1157 if row is None:
1158 return None
1159
1160 return RefreshTokenLookupResult(
1161 token_id=row[0],
1162 user_id=row[1],
1163 device_id=row[2],
1164 next_token_id=row[3],
1165 has_next_refresh_token_been_refreshed=row[4],
1166 # This column is nullable, ensure it's a boolean
1167 has_next_access_token_been_used=(row[5] or False),
1168 )
1169
1170 return await self.db_pool.runInteraction(
1171 "lookup_refresh_token", _lookup_refresh_token_txn
1172 )
1173
1174 async def replace_refresh_token(self, token_id: int, next_token_id: int) -> None:
1175 """
1176 Set the successor of a refresh token, removing the existing successor
1177 if any.
1178
1179 Args:
1180 token_id: ID of the refresh token to update.
1181 next_token_id: ID of its successor.
1182 """
1183
1184 def _replace_refresh_token_txn(txn) -> None:
1185 # First check if there was an existing refresh token
1186 old_next_token_id = self.db_pool.simple_select_one_onecol_txn(
1187 txn,
1188 "refresh_tokens",
1189 {"id": token_id},
1190 "next_token_id",
1191 allow_none=True,
1192 )
1193
1194 self.db_pool.simple_update_one_txn(
1195 txn,
1196 "refresh_tokens",
1197 {"id": token_id},
1198 {"next_token_id": next_token_id},
1199 )
1200
1201 # Delete the old "next" token if it exists. This should cascade and
1202 # delete the associated access_token
1203 if old_next_token_id is not None:
1204 self.db_pool.simple_delete_one_txn(
1205 txn,
1206 "refresh_tokens",
1207 {"id": old_next_token_id},
1208 )
1209
1210 await self.db_pool.runInteraction(
1211 "replace_refresh_token", _replace_refresh_token_txn
10721212 )
10731213
10741214
12621402 self._ignore_unknown_session_error = hs.config.request_token_inhibit_3pid_errors
12631403
12641404 self._access_tokens_id_gen = IdGenerator(db_conn, "access_tokens", "id")
1405 self._refresh_tokens_id_gen = IdGenerator(db_conn, "refresh_tokens", "id")
12651406
12661407 async def add_access_token_to_user(
12671408 self,
12701411 device_id: Optional[str],
12711412 valid_until_ms: Optional[int],
12721413 puppets_user_id: Optional[str] = None,
1414 refresh_token_id: Optional[int] = None,
12731415 ) -> int:
12741416 """Adds an access token for the given user.
12751417
12761418 Args:
12771419 user_id: The user ID.
12781420 token: The new access token to add.
1279 device_id: ID of the device to associate with the access token
1421 device_id: ID of the device to associate with the access token.
12801422 valid_until_ms: when the token is valid until. None for no expiry.
1423 puppets_user_id
1424 refresh_token_id: ID of the refresh token generated alongside this
1425 access token.
12811426 Raises:
12821427 StoreError if there was a problem adding this.
12831428 Returns:
12961441 "valid_until_ms": valid_until_ms,
12971442 "puppets_user_id": puppets_user_id,
12981443 "last_validated": now,
1444 "refresh_token_id": refresh_token_id,
1445 "used": False,
12991446 },
13001447 desc="add_access_token_to_user",
1448 )
1449
1450 return next_id
1451
1452 async def add_refresh_token_to_user(
1453 self,
1454 user_id: str,
1455 token: str,
1456 device_id: Optional[str],
1457 ) -> int:
1458 """Adds a refresh token for the given user.
1459
1460 Args:
1461 user_id: The user ID.
1462 token: The new access token to add.
1463 device_id: ID of the device to associate with the refresh token.
1464 Raises:
1465 StoreError if there was a problem adding this.
1466 Returns:
1467 The token ID
1468 """
1469 next_id = self._refresh_tokens_id_gen.get_next()
1470
1471 await self.db_pool.simple_insert(
1472 "refresh_tokens",
1473 {
1474 "id": next_id,
1475 "user_id": user_id,
1476 "device_id": device_id,
1477 "token": token,
1478 "next_token_id": None,
1479 },
1480 desc="add_refresh_token_to_user",
13011481 )
13021482
13031483 return next_id
15441724 device_id: Optional[str] = None,
15451725 ) -> List[Tuple[str, int, Optional[str]]]:
15461726 """
1547 Invalidate access tokens belonging to a user
1727 Invalidate access and refresh tokens belonging to a user
15481728
15491729 Args:
15501730 user_id: ID of user the tokens belong to
15641744 items = keyvalues.items()
15651745 where_clause = " AND ".join(k + " = ?" for k, _ in items)
15661746 values = [v for _, v in items] # type: List[Union[str, int]]
1747 # Conveniently, refresh_tokens and access_tokens both use the user_id and device_id fields. Only caveat
1748 # is the `except_token_id` param that is tricky to get right, so for now we're just using the same where
1749 # clause and values before we handle that. This seems to be only used in the "set password" handler.
1750 refresh_where_clause = where_clause
1751 refresh_values = values.copy()
15671752 if except_token_id:
1753 # TODO: support that for refresh tokens
15681754 where_clause += " AND id != ?"
15691755 values.append(except_token_id)
15701756
15821768
15831769 txn.execute("DELETE FROM access_tokens WHERE %s" % where_clause, values)
15841770
1771 txn.execute(
1772 "DELETE FROM refresh_tokens WHERE %s" % refresh_where_clause,
1773 refresh_values,
1774 )
1775
15851776 return tokens_and_devices
15861777
15871778 return await self.db_pool.runInteraction("user_delete_access_tokens", f)
15971788 )
15981789
15991790 await self.db_pool.runInteraction("delete_access_token", f)
1791
1792 async def delete_refresh_token(self, refresh_token: str) -> None:
1793 def f(txn):
1794 self.db_pool.simple_delete_one_txn(
1795 txn, table="refresh_tokens", keyvalues={"token": refresh_token}
1796 )
1797
1798 await self.db_pool.runInteraction("delete_refresh_token", f)
16001799
16011800 async def add_user_pending_deactivation(self, user_id: str) -> None:
16021801 """
4646 """
4747 Do we support using `a = ANY(?)` and passing a list
4848 """
49 ...
50
51 @property
52 @abc.abstractmethod
53 def supports_returning(self) -> bool:
54 """Do we support the `RETURNING` clause in insert/update/delete?"""
4955 ...
5056
5157 @abc.abstractmethod
132132 """Do we support using `a = ANY(?)` and passing a list"""
133133 return True
134134
135 @property
136 def supports_returning(self) -> bool:
137 """Do we support the `RETURNING` clause in insert/update/delete?"""
138 return True
139
135140 def is_deadlock(self, error):
136141 if isinstance(error, self.module.DatabaseError):
137142 # https://www.postgresql.org/docs/current/static/errcodes-appendix.html
5858 def supports_using_any_list(self):
5959 """Do we support using `a = ANY(?)` and passing a list"""
6060 return False
61
62 @property
63 def supports_returning(self) -> bool:
64 """Do we support the `RETURNING` clause in insert/update/delete?"""
65 return self.module.sqlite_version_info >= (3, 35, 0)
6166
6267 def check_database(self, db_conn, allow_outdated_version: bool = False):
6368 if not allow_outdated_version:
1111 # See the License for the specific language governing permissions and
1212 # limitations under the License.
1313
14 SCHEMA_VERSION = 59
14 SCHEMA_VERSION = 60
1515 """Represents the expectations made by the codebase about the database schema
1616
1717 This should be incremented whenever the codebase changes its requirements on the
0 /* Copyright 2021 The Matrix.org Foundation C.I.C
1 *
2 * Licensed under the Apache License, Version 2.0 (the "License");
3 * you may not use this file except in compliance with the License.
4 * You may obtain a copy of the License at
5 *
6 * http://www.apache.org/licenses/LICENSE-2.0
7 *
8 * Unless required by applicable law or agreed to in writing, software
9 * distributed under the License is distributed on an "AS IS" BASIS,
10 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
11 * See the License for the specific language governing permissions and
12 * limitations under the License.
13 */
14
15 -- Holds MSC2918 refresh tokens
16 CREATE TABLE refresh_tokens (
17 id BIGINT PRIMARY KEY,
18 user_id TEXT NOT NULL,
19 device_id TEXT NOT NULL,
20 token TEXT NOT NULL,
21 -- When consumed, a new refresh token is generated, which is tracked by
22 -- this foreign key
23 next_token_id BIGINT REFERENCES refresh_tokens (id) ON DELETE CASCADE,
24 UNIQUE(token)
25 );
26
27 -- Add a reference to the refresh token generated alongside each access token
28 ALTER TABLE "access_tokens"
29 ADD COLUMN refresh_token_id BIGINT REFERENCES refresh_tokens (id) ON DELETE CASCADE;
30
31 -- Add a flag whether the token was already used or not
32 ALTER TABLE "access_tokens"
33 ADD COLUMN used BOOLEAN;
0 /* Copyright 2021 The Matrix.org Foundation C.I.C
1 *
2 * Licensed under the Apache License, Version 2.0 (the "License");
3 * you may not use this file except in compliance with the License.
4 * You may obtain a copy of the License at
5 *
6 * http://www.apache.org/licenses/LICENSE-2.0
7 *
8 * Unless required by applicable law or agreed to in writing, software
9 * distributed under the License is distributed on an "AS IS" BASIS,
10 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
11 * See the License for the specific language governing permissions and
12 * limitations under the License.
13 */
14
15 -- This migration handles the process of changing the type of `stream_ordering` to
16 -- a BIGINT.
17 --
18 -- Note that this is only a problem on postgres as sqlite only has one "integer" type
19 -- which can cope with values up to 2^63.
20
21 -- First add a new column to contain the bigger stream_ordering
22 ALTER TABLE events ADD COLUMN stream_ordering2 BIGINT;
23
24 -- Create a rule which will populate it for new rows.
25 CREATE OR REPLACE RULE "populate_stream_ordering2" AS
26 ON INSERT TO events
27 DO UPDATE events SET stream_ordering2=NEW.stream_ordering WHERE stream_ordering=NEW.stream_ordering;
28
29 -- Start a bg process to populate it for old events
30 INSERT INTO background_updates (ordering, update_name, progress_json) VALUES
31 (6001, 'populate_stream_ordering2', '{}');
32
33 -- ... and some more to build indexes on it. These aren't really interdependent
34 -- but the backround_updates manager can only handle a single dependency per update.
35 INSERT INTO background_updates (ordering, update_name, progress_json, depends_on) VALUES
36 (6001, 'index_stream_ordering2', '{}', 'populate_stream_ordering2'),
37 (6001, 'index_stream_ordering2_room_order', '{}', 'index_stream_ordering2'),
38 (6001, 'index_stream_ordering2_contains_url', '{}', 'index_stream_ordering2_room_order'),
39 (6001, 'index_stream_ordering2_room_stream', '{}', 'index_stream_ordering2_contains_url'),
40 (6001, 'index_stream_ordering2_ts', '{}', 'index_stream_ordering2_room_stream');
41
42 -- ... and another to do the switcheroo
43 INSERT INTO background_updates (ordering, update_name, progress_json, depends_on) VALUES
44 (6001, 'replace_stream_ordering_column', '{}', 'index_stream_ordering2_ts');
0 /* Copyright 2021 The Matrix.org Foundation C.I.C
1 *
2 * Licensed under the Apache License, Version 2.0 (the "License");
3 * you may not use this file except in compliance with the License.
4 * You may obtain a copy of the License at
5 *
6 * http://www.apache.org/licenses/LICENSE-2.0
7 *
8 * Unless required by applicable law or agreed to in writing, software
9 * distributed under the License is distributed on an "AS IS" BASIS,
10 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
11 * See the License for the specific language governing permissions and
12 * limitations under the License.
13 */
14
15 -- This migration is closely related to '01recreate_stream_ordering.sql.postgres'.
16 --
17 -- It updates the other tables which use an INTEGER to refer to a stream ordering.
18 -- These tables are all small enough that a re-create is tractable.
19 ALTER TABLE pushers ALTER COLUMN last_stream_ordering SET DATA TYPE BIGINT;
20 ALTER TABLE federation_stream_position ALTER COLUMN stream_id SET DATA TYPE BIGINT;
21
22 -- these aren't actually event stream orderings, but they are numbers where 2 billion
23 -- is a bit limiting, application_services_state is tiny, and I don't want to ever have
24 -- to do this again.
25 ALTER TABLE application_services_state ALTER COLUMN last_txn SET DATA TYPE BIGINT;
26 ALTER TABLE application_services_state ALTER COLUMN read_receipt_stream_id SET DATA TYPE BIGINT;
27 ALTER TABLE application_services_state ALTER COLUMN presence_stream_id SET DATA TYPE BIGINT;
28
29
1111 # See the License for the specific language governing permissions and
1212 # limitations under the License.
1313
14 import logging
1415 import threading
16 import weakref
1517 from functools import wraps
1618 from typing import (
19 TYPE_CHECKING,
1720 Any,
1821 Callable,
1922 Collection,
3033
3134 from typing_extensions import Literal
3235
36 from twisted.internet import reactor
37
3338 from synapse.config import cache as cache_config
34 from synapse.util import caches
39 from synapse.metrics.background_process_metrics import wrap_as_background_process
40 from synapse.util import Clock, caches
3541 from synapse.util.caches import CacheMetric, register_cache
3642 from synapse.util.caches.treecache import TreeCache, iterate_tree_cache_entry
43 from synapse.util.linked_list import ListNode
44
45 if TYPE_CHECKING:
46 from synapse.server import HomeServer
47
48 logger = logging.getLogger(__name__)
3749
3850 try:
3951 from pympler.asizeof import Asizer
8193 yield m
8294
8395
96 P = TypeVar("P")
97
98
99 class _TimedListNode(ListNode[P]):
100 """A `ListNode` that tracks last access time."""
101
102 __slots__ = ["last_access_ts_secs"]
103
104 def update_last_access(self, clock: Clock):
105 self.last_access_ts_secs = int(clock.time())
106
107
108 # Whether to insert new cache entries to the global list. We only add to it if
109 # time based eviction is enabled.
110 USE_GLOBAL_LIST = False
111
112 # A linked list of all cache entries, allowing efficient time based eviction.
113 GLOBAL_ROOT = ListNode["_Node"].create_root_node()
114
115
116 @wrap_as_background_process("LruCache._expire_old_entries")
117 async def _expire_old_entries(clock: Clock, expiry_seconds: int):
118 """Walks the global cache list to find cache entries that haven't been
119 accessed in the given number of seconds.
120 """
121
122 now = int(clock.time())
123 node = GLOBAL_ROOT.prev_node
124 assert node is not None
125
126 i = 0
127
128 logger.debug("Searching for stale caches")
129
130 while node is not GLOBAL_ROOT:
131 # Only the root node isn't a `_TimedListNode`.
132 assert isinstance(node, _TimedListNode)
133
134 if node.last_access_ts_secs > now - expiry_seconds:
135 break
136
137 cache_entry = node.get_cache_entry()
138 next_node = node.prev_node
139
140 # The node should always have a reference to a cache entry and a valid
141 # `prev_node`, as we only drop them when we remove the node from the
142 # list.
143 assert next_node is not None
144 assert cache_entry is not None
145 cache_entry.drop_from_cache()
146
147 # If we do lots of work at once we yield to allow other stuff to happen.
148 if (i + 1) % 10000 == 0:
149 logger.debug("Waiting during drop")
150 await clock.sleep(0)
151 logger.debug("Waking during drop")
152
153 node = next_node
154
155 # If we've yielded then our current node may have been evicted, so we
156 # need to check that its still valid.
157 if node.prev_node is None:
158 break
159
160 i += 1
161
162 logger.info("Dropped %d items from caches", i)
163
164
165 def setup_expire_lru_cache_entries(hs: "HomeServer"):
166 """Start a background job that expires all cache entries if they have not
167 been accessed for the given number of seconds.
168 """
169 if not hs.config.caches.expiry_time_msec:
170 return
171
172 logger.info(
173 "Expiring LRU caches after %d seconds", hs.config.caches.expiry_time_msec / 1000
174 )
175
176 global USE_GLOBAL_LIST
177 USE_GLOBAL_LIST = True
178
179 clock = hs.get_clock()
180 clock.looping_call(
181 _expire_old_entries, 30 * 1000, clock, hs.config.caches.expiry_time_msec / 1000
182 )
183
184
84185 class _Node:
85 __slots__ = ["prev_node", "next_node", "key", "value", "callbacks", "memory"]
186 __slots__ = [
187 "_list_node",
188 "_global_list_node",
189 "_cache",
190 "key",
191 "value",
192 "callbacks",
193 "memory",
194 ]
86195
87196 def __init__(
88197 self,
89 prev_node,
90 next_node,
198 root: "ListNode[_Node]",
91199 key,
92200 value,
201 cache: "weakref.ReferenceType[LruCache]",
202 clock: Clock,
93203 callbacks: Collection[Callable[[], None]] = (),
94204 ):
95 self.prev_node = prev_node
96 self.next_node = next_node
205 self._list_node = ListNode.insert_after(self, root)
206 self._global_list_node = None
207 if USE_GLOBAL_LIST:
208 self._global_list_node = _TimedListNode.insert_after(self, GLOBAL_ROOT)
209 self._global_list_node.update_last_access(clock)
210
211 # We store a weak reference to the cache object so that this _Node can
212 # remove itself from the cache. If the cache is dropped we ensure we
213 # remove our entries in the lists.
214 self._cache = cache
215
97216 self.key = key
98217 self.value = value
99218
115234 self.memory = (
116235 _get_size_of(key)
117236 + _get_size_of(value)
237 + _get_size_of(self._list_node, recurse=False)
118238 + _get_size_of(self.callbacks, recurse=False)
119239 + _get_size_of(self, recurse=False)
120240 )
121241 self.memory += _get_size_of(self.memory, recurse=False)
122242
243 if self._global_list_node:
244 self.memory += _get_size_of(self._global_list_node, recurse=False)
245 self.memory += _get_size_of(self._global_list_node.last_access_ts_secs)
246
123247 def add_callbacks(self, callbacks: Collection[Callable[[], None]]) -> None:
124248 """Add to stored list of callbacks, removing duplicates."""
125249
145269 callback()
146270
147271 self.callbacks = None
272
273 def drop_from_cache(self) -> None:
274 """Drop this node from the cache.
275
276 Ensures that the entry gets removed from the cache and that we get
277 removed from all lists.
278 """
279 cache = self._cache()
280 if not cache or not cache.pop(self.key, None):
281 # `cache.pop` should call `drop_from_lists()`, unless this Node had
282 # already been removed from the cache.
283 self.drop_from_lists()
284
285 def drop_from_lists(self) -> None:
286 """Remove this node from the cache lists."""
287 self._list_node.remove_from_list()
288
289 if self._global_list_node:
290 self._global_list_node.remove_from_list()
291
292 def move_to_front(self, clock: Clock, cache_list_root: ListNode) -> None:
293 """Moves this node to the front of all the lists its in."""
294 self._list_node.move_after(cache_list_root)
295 if self._global_list_node:
296 self._global_list_node.move_after(GLOBAL_ROOT)
297 self._global_list_node.update_last_access(clock)
148298
149299
150300 class LruCache(Generic[KT, VT]):
162312 size_callback: Optional[Callable] = None,
163313 metrics_collection_callback: Optional[Callable[[], None]] = None,
164314 apply_cache_factor_from_config: bool = True,
315 clock: Optional[Clock] = None,
165316 ):
166317 """
167318 Args:
187338 apply_cache_factor_from_config (bool): If true, `max_size` will be
188339 multiplied by a cache factor derived from the homeserver config
189340 """
341 # Default `clock` to something sensible. Note that we rename it to
342 # `real_clock` so that mypy doesn't think its still `Optional`.
343 if clock is None:
344 real_clock = Clock(reactor)
345 else:
346 real_clock = clock
347
190348 cache = cache_type()
191349 self.cache = cache # Used for introspection.
192350 self.apply_cache_factor_from_config = apply_cache_factor_from_config
218376 # this is exposed for access from outside this class
219377 self.metrics = metrics
220378
221 list_root = _Node(None, None, None, None)
222 list_root.next_node = list_root
223 list_root.prev_node = list_root
379 # We create a single weakref to self here so that we don't need to keep
380 # creating more each time we create a `_Node`.
381 weak_ref_to_self = weakref.ref(self)
382
383 list_root = ListNode[_Node].create_root_node()
224384
225385 lock = threading.Lock()
226386
227387 def evict():
228388 while cache_len() > self.max_size:
389 # Get the last node in the list (i.e. the oldest node).
229390 todelete = list_root.prev_node
230 evicted_len = delete_node(todelete)
231 cache.pop(todelete.key, None)
391
392 # The list root should always have a valid `prev_node` if the
393 # cache is not empty.
394 assert todelete is not None
395
396 # The node should always have a reference to a cache entry, as
397 # we only drop the cache entry when we remove the node from the
398 # list.
399 node = todelete.get_cache_entry()
400 assert node is not None
401
402 evicted_len = delete_node(node)
403 cache.pop(node.key, None)
232404 if metrics:
233405 metrics.inc_evictions(evicted_len)
234406
254426 self.len = synchronized(cache_len)
255427
256428 def add_node(key, value, callbacks: Collection[Callable[[], None]] = ()):
257 prev_node = list_root
258 next_node = prev_node.next_node
259 node = _Node(prev_node, next_node, key, value, callbacks)
260 prev_node.next_node = node
261 next_node.prev_node = node
429 node = _Node(list_root, key, value, weak_ref_to_self, real_clock, callbacks)
262430 cache[key] = node
263431
264432 if size_callback:
267435 if caches.TRACK_MEMORY_USAGE and metrics:
268436 metrics.inc_memory_usage(node.memory)
269437
270 def move_node_to_front(node):
271 prev_node = node.prev_node
272 next_node = node.next_node
273 prev_node.next_node = next_node
274 next_node.prev_node = prev_node
275 prev_node = list_root
276 next_node = prev_node.next_node
277 node.prev_node = prev_node
278 node.next_node = next_node
279 prev_node.next_node = node
280 next_node.prev_node = node
281
282 def delete_node(node):
283 prev_node = node.prev_node
284 next_node = node.next_node
285 prev_node.next_node = next_node
286 next_node.prev_node = prev_node
438 def move_node_to_front(node: _Node):
439 node.move_to_front(real_clock, list_root)
440
441 def delete_node(node: _Node) -> int:
442 node.drop_from_lists()
287443
288444 deleted_len = 1
289445 if size_callback:
410566
411567 @synchronized
412568 def cache_clear() -> None:
413 list_root.next_node = list_root
414 list_root.prev_node = list_root
415569 for node in cache.values():
416570 node.run_and_clear_callbacks()
571 node.drop_from_lists()
572
573 assert list_root.next_node == list_root
574 assert list_root.prev_node == list_root
575
417576 cache.clear()
418577 if size_callback:
419578 cached_cache_len[0] = 0
483642 self._on_resize()
484643 return True
485644 return False
645
646 def __del__(self) -> None:
647 # We're about to be deleted, so we make sure to clear up all the nodes
648 # and run callbacks, etc.
649 #
650 # This happens e.g. in the sync code where we have an expiring cache of
651 # lru caches.
652 self.clear()
0 # Copyright 2021 The Matrix.org Foundation C.I.C.
1 #
2 # Licensed under the Apache License, Version 2.0 (the "License");
3 # you may not use this file except in compliance with the License.
4 # You may obtain a copy of the License at
5 #
6 # http://www.apache.org/licenses/LICENSE-2.0
7 #
8 # Unless required by applicable law or agreed to in writing, software
9 # distributed under the License is distributed on an "AS IS" BASIS,
10 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
11 # See the License for the specific language governing permissions and
12 # limitations under the License.
13
14 """A circular doubly linked list implementation.
15 """
16
17 import threading
18 from typing import Generic, Optional, Type, TypeVar
19
20 P = TypeVar("P")
21 LN = TypeVar("LN", bound="ListNode")
22
23
24 class ListNode(Generic[P]):
25 """A node in a circular doubly linked list, with an (optional) reference to
26 a cache entry.
27
28 The reference should only be `None` for the root node or if the node has
29 been removed from the list.
30 """
31
32 # A lock to protect mutating the list prev/next pointers.
33 _LOCK = threading.Lock()
34
35 # We don't use attrs here as in py3.6 you can't have `attr.s(slots=True)`
36 # and inherit from `Generic` for some reason
37 __slots__ = [
38 "cache_entry",
39 "prev_node",
40 "next_node",
41 ]
42
43 def __init__(self, cache_entry: Optional[P] = None) -> None:
44 self.cache_entry = cache_entry
45 self.prev_node: Optional[ListNode[P]] = None
46 self.next_node: Optional[ListNode[P]] = None
47
48 @classmethod
49 def create_root_node(cls: Type["ListNode[P]"]) -> "ListNode[P]":
50 """Create a new linked list by creating a "root" node, which is a node
51 that has prev_node/next_node pointing to itself and no associated cache
52 entry.
53 """
54 root = cls()
55 root.prev_node = root
56 root.next_node = root
57 return root
58
59 @classmethod
60 def insert_after(
61 cls: Type[LN],
62 cache_entry: P,
63 node: "ListNode[P]",
64 ) -> LN:
65 """Create a new list node that is placed after the given node.
66
67 Args:
68 cache_entry: The associated cache entry.
69 node: The existing node in the list to insert the new entry after.
70 """
71 new_node = cls(cache_entry)
72 with cls._LOCK:
73 new_node._refs_insert_after(node)
74 return new_node
75
76 def remove_from_list(self):
77 """Remove this node from the list."""
78 with self._LOCK:
79 self._refs_remove_node_from_list()
80
81 # We drop the reference to the cache entry to break the reference cycle
82 # between the list node and cache entry, allowing the two to be dropped
83 # immediately rather than at the next GC.
84 self.cache_entry = None
85
86 def move_after(self, node: "ListNode"):
87 """Move this node from its current location in the list to after the
88 given node.
89 """
90 with self._LOCK:
91 # We assert that both this node and the target node is still "alive".
92 assert self.prev_node
93 assert self.next_node
94 assert node.prev_node
95 assert node.next_node
96
97 assert self is not node
98
99 # Remove self from the list
100 self._refs_remove_node_from_list()
101
102 # Insert self back into the list, after target node
103 self._refs_insert_after(node)
104
105 def _refs_remove_node_from_list(self):
106 """Internal method to *just* remove the node from the list, without
107 e.g. clearing out the cache entry.
108 """
109 if self.prev_node is None or self.next_node is None:
110 # We've already been removed from the list.
111 return
112
113 prev_node = self.prev_node
114 next_node = self.next_node
115
116 prev_node.next_node = next_node
117 next_node.prev_node = prev_node
118
119 # We set these to None so that we don't get circular references,
120 # allowing us to be dropped without having to go via the GC.
121 self.prev_node = None
122 self.next_node = None
123
124 def _refs_insert_after(self, node: "ListNode"):
125 """Internal method to insert the node after the given node."""
126
127 # This method should only be called when we're not already in the list.
128 assert self.prev_node is None
129 assert self.next_node is None
130
131 # We expect the given node to be in the list and thus have valid
132 # prev/next refs.
133 assert node.next_node
134 assert node.prev_node
135
136 prev_node = node
137 next_node = node.next_node
138
139 self.prev_node = prev_node
140 self.next_node = next_node
141
142 prev_node.next_node = self
143 next_node.prev_node = self
144
145 def get_cache_entry(self) -> Optional[P]:
146 """Get the cache entry, returns None if this is the root node (i.e.
147 cache_entry is None) or if the entry has been dropped.
148 """
149 return self.cache_entry
4444
4545 # Blacklisted due to changes made in #10272
4646 Outbound federation will ignore a missing event with bad JSON for room version 6
47 Backfilled events whose prev_events are in a different room do not allow cross-room back-pagination
4847 Federation rejects inbound events where the prev_events cannot be found
5757 user_id=self.test_user, token_id=5, device_id="device"
5858 )
5959 self.store.get_user_by_access_token = simple_async_mock(user_info)
60 self.store.mark_access_token_as_used = simple_async_mock(None)
6061
6162 request = Mock(args={})
6263 request.args[b"access_token"] = [self.test_token]
204204
205205 # Have this homeserver skip event auth checks. This is necessary due to
206206 # event auth checks ensuring that events were signed by the sender's homeserver.
207 async def _check_event_auth(
208 origin, event, context, state, auth_events, backfilled
209 ):
207 async def _check_event_auth(origin, event, context, *args, **kwargs):
210208 return context
211209
212210 homeserver.get_federation_handler()._check_event_auth = _check_event_auth
256256 self.assertEqual(device_data, {"device_data": {"foo": "bar"}})
257257
258258 # Create a new login for the user and dehydrated the device
259 device_id, access_token = self.get_success(
259 device_id, access_token, _expiration_time, _refresh_token = self.get_success(
260260 self.registration.register_device(
261261 user_id=user_id,
262262 device_id=None,
250250 join_event.signatures[other_server] = {"x": "y"}
251251 with LoggingContext("send_join"):
252252 d = run_in_background(
253 self.handler.on_send_join_request, other_server, join_event
253 self.handler.on_send_membership_event, other_server, join_event
254254 )
255255 self.get_success(d)
256256
733733
734734 self.store = hs.get_datastore()
735735 self.state = hs.get_state_handler()
736 self.auth = hs.get_auth()
736 self._event_auth_handler = hs.get_event_auth_handler()
737737
738738 # We don't actually check signatures in tests, so lets just create a
739739 # random key to use.
845845
846846 builder = EventBuilder(
847847 state=self.state,
848 auth=self.auth,
848 event_auth_handler=self._event_auth_handler,
849849 store=self.store,
850850 clock=self.clock,
851851 hostname=hostname,
1818 from synapse.api.errors import Codes, ResourceLimitError, SynapseError
1919 from synapse.events.spamcheck import load_legacy_spam_checkers
2020 from synapse.spam_checker_api import RegistrationBehaviour
21 from synapse.types import RoomAlias, UserID, create_requester
21 from synapse.types import RoomAlias, RoomID, UserID, create_requester
2222
2323 from tests.test_utils import make_awaitable
2424 from tests.unittest import override_config
718718 )
719719
720720 return user_id, token
721
722
723 class RemoteAutoJoinTestCase(unittest.HomeserverTestCase):
724 """Tests auto-join on remote rooms."""
725
726 def make_homeserver(self, reactor, clock):
727 self.room_id = "!roomid:remotetest"
728
729 async def update_membership(*args, **kwargs):
730 pass
731
732 async def lookup_room_alias(*args, **kwargs):
733 return RoomID.from_string(self.room_id), ["remotetest"]
734
735 self.room_member_handler = Mock(spec=["update_membership", "lookup_room_alias"])
736 self.room_member_handler.update_membership.side_effect = update_membership
737 self.room_member_handler.lookup_room_alias.side_effect = lookup_room_alias
738
739 hs = self.setup_test_homeserver(room_member_handler=self.room_member_handler)
740 return hs
741
742 def prepare(self, reactor, clock, hs):
743 self.handler = self.hs.get_registration_handler()
744 self.store = self.hs.get_datastore()
745
746 @override_config({"auto_join_rooms": ["#room:remotetest"]})
747 def test_auto_create_auto_join_remote_room(self):
748 """Tests that we don't attempt to create remote rooms, and that we don't attempt
749 to invite ourselves to rooms we're not in."""
750
751 # Register a first user; this should call _create_and_join_rooms
752 self.get_success(self.handler.register_user(localpart="jeff"))
753
754 _, kwargs = self.room_member_handler.update_membership.call_args
755
756 self.assertEqual(kwargs["room_id"], self.room_id)
757 self.assertEqual(kwargs["action"], "join")
758 self.assertEqual(kwargs["remote_room_hosts"], ["remotetest"])
759
760 # Register a second user; this should call _join_rooms
761 self.get_success(self.handler.register_user(localpart="jeff2"))
762
763 _, kwargs = self.room_member_handler.update_membership.call_args
764
765 self.assertEqual(kwargs["room_id"], self.room_id)
766 self.assertEqual(kwargs["action"], "join")
767 self.assertEqual(kwargs["remote_room_hosts"], ["remotetest"])
1313 from typing import Any, Iterable, Optional, Tuple
1414 from unittest import mock
1515
16 from synapse.api.constants import EventContentFields, RoomTypes
1617 from synapse.api.errors import AuthError
1718 from synapse.handlers.space_summary import _child_events_comparison_key
1819 from synapse.rest import admin
9697 self.hs = hs
9798 self.handler = self.hs.get_space_summary_handler()
9899
100 # Create a user.
99101 self.user = self.register_user("user", "pass")
100102 self.token = self.login("user", "pass")
103
104 # Create a space and a child room.
105 self.space = self.helper.create_room_as(
106 self.user,
107 tok=self.token,
108 extra_content={
109 "creation_content": {EventContentFields.ROOM_TYPE: RoomTypes.SPACE}
110 },
111 )
112 self.room = self.helper.create_room_as(self.user, tok=self.token)
113 self._add_child(self.space, self.room, self.token)
101114
102115 def _add_child(self, space_id: str, room_id: str, token: str) -> None:
103116 """Add a child room to a space."""
127140
128141 def test_simple_space(self):
129142 """Test a simple space with a single room."""
130 space = self.helper.create_room_as(self.user, tok=self.token)
131 room = self.helper.create_room_as(self.user, tok=self.token)
132 self._add_child(space, room, self.token)
133
134 result = self.get_success(self.handler.get_space_summary(self.user, space))
143 result = self.get_success(self.handler.get_space_summary(self.user, self.space))
135144 # The result should have the space and the room in it, along with a link
136145 # from space -> room.
137 self._assert_rooms(result, [space, room])
138 self._assert_events(result, [(space, room)])
146 self._assert_rooms(result, [self.space, self.room])
147 self._assert_events(result, [(self.space, self.room)])
139148
140149 def test_visibility(self):
141150 """A user not in a space cannot inspect it."""
142 space = self.helper.create_room_as(self.user, tok=self.token)
143 room = self.helper.create_room_as(self.user, tok=self.token)
144 self._add_child(space, room, self.token)
145
146151 user2 = self.register_user("user2", "pass")
147152 token2 = self.login("user2", "pass")
148153
149154 # The user cannot see the space.
150 self.get_failure(self.handler.get_space_summary(user2, space), AuthError)
155 self.get_failure(self.handler.get_space_summary(user2, self.space), AuthError)
151156
152157 # Joining the room causes it to be visible.
153 self.helper.join(space, user2, tok=token2)
154 result = self.get_success(self.handler.get_space_summary(user2, space))
158 self.helper.join(self.space, user2, tok=token2)
159 result = self.get_success(self.handler.get_space_summary(user2, self.space))
155160
156161 # The result should only have the space, but includes the link to the room.
157 self._assert_rooms(result, [space])
158 self._assert_events(result, [(space, room)])
162 self._assert_rooms(result, [self.space])
163 self._assert_events(result, [(self.space, self.room)])
159164
160165 def test_world_readable(self):
161166 """A world-readable room is visible to everyone."""
162 space = self.helper.create_room_as(self.user, tok=self.token)
163 room = self.helper.create_room_as(self.user, tok=self.token)
164 self._add_child(space, room, self.token)
165167 self.helper.send_state(
166 space,
168 self.space,
167169 event_type="m.room.history_visibility",
168170 body={"history_visibility": "world_readable"},
169171 tok=self.token,
172174 user2 = self.register_user("user2", "pass")
173175
174176 # The space should be visible, as well as the link to the room.
175 result = self.get_success(self.handler.get_space_summary(user2, space))
176 self._assert_rooms(result, [space])
177 self._assert_events(result, [(space, room)])
177 result = self.get_success(self.handler.get_space_summary(user2, self.space))
178 self._assert_rooms(result, [self.space])
179 self._assert_events(result, [(self.space, self.room)])
227227 builder.build(prev_event_ids=prev_event_ids, auth_event_ids=None)
228228 )
229229
230 self.get_success(federation.on_send_join_request(remote_server, join_event))
230 self.get_success(federation.on_send_membership_event(remote_server, join_event))
231231 self.replicate()
232232
233233 return room
938938 """
939939 channel = self.make_request("POST", self.url, b"{}")
940940
941 self.assertEqual(401, int(channel.result["code"]), msg=channel.result["body"])
941 self.assertEqual(401, channel.code, msg=channel.json_body)
942942 self.assertEqual(Codes.MISSING_TOKEN, channel.json_body["errcode"])
943943
944944 def test_requester_is_not_admin(self):
949949
950950 channel = self.make_request("POST", url, access_token=self.other_user_token)
951951
952 self.assertEqual(403, int(channel.result["code"]), msg=channel.result["body"])
952 self.assertEqual(403, channel.code, msg=channel.json_body)
953953 self.assertEqual("You are not a server admin", channel.json_body["error"])
954954
955955 channel = self.make_request(
959959 content=b"{}",
960960 )
961961
962 self.assertEqual(403, int(channel.result["code"]), msg=channel.result["body"])
962 self.assertEqual(403, channel.code, msg=channel.json_body)
963963 self.assertEqual("You are not a server admin", channel.json_body["error"])
964964
965965 def test_user_does_not_exist(self):
989989 access_token=self.admin_user_tok,
990990 )
991991
992 self.assertEqual(400, int(channel.result["code"]), msg=channel.result["body"])
992 self.assertEqual(400, channel.code, msg=channel.json_body)
993993 self.assertEqual(Codes.BAD_JSON, channel.json_body["errcode"])
994994
995995 def test_user_is_not_local(self):
10051005
10061006 def test_deactivate_user_erase_true(self):
10071007 """
1008 Test deactivating an user and set `erase` to `true`
1008 Test deactivating a user and set `erase` to `true`
10091009 """
10101010
10111011 # Get user
10151015 access_token=self.admin_user_tok,
10161016 )
10171017
1018 self.assertEqual(200, int(channel.result["code"]), msg=channel.result["body"])
1018 self.assertEqual(200, channel.code, msg=channel.json_body)
10191019 self.assertEqual("@user:test", channel.json_body["name"])
10201020 self.assertEqual(False, channel.json_body["deactivated"])
10211021 self.assertEqual("foo@bar.com", channel.json_body["threepids"][0]["address"])
10221022 self.assertEqual("mxc://servername/mediaid", channel.json_body["avatar_url"])
10231023 self.assertEqual("User1", channel.json_body["displayname"])
10241024
1025 # Deactivate user
1026 body = json.dumps({"erase": True})
1027
1025 # Deactivate and erase user
10281026 channel = self.make_request(
10291027 "POST",
10301028 self.url,
10311029 access_token=self.admin_user_tok,
1032 content=body.encode(encoding="utf_8"),
1033 )
1034
1035 self.assertEqual(200, int(channel.result["code"]), msg=channel.result["body"])
1030 content={"erase": True},
1031 )
1032
1033 self.assertEqual(200, channel.code, msg=channel.json_body)
10361034
10371035 # Get user
10381036 channel = self.make_request(
10411039 access_token=self.admin_user_tok,
10421040 )
10431041
1044 self.assertEqual(200, int(channel.result["code"]), msg=channel.result["body"])
1042 self.assertEqual(200, channel.code, msg=channel.json_body)
10451043 self.assertEqual("@user:test", channel.json_body["name"])
10461044 self.assertEqual(True, channel.json_body["deactivated"])
10471045 self.assertEqual(0, len(channel.json_body["threepids"]))
10521050
10531051 def test_deactivate_user_erase_false(self):
10541052 """
1055 Test deactivating an user and set `erase` to `false`
1053 Test deactivating a user and set `erase` to `false`
10561054 """
10571055
10581056 # Get user
10621060 access_token=self.admin_user_tok,
10631061 )
10641062
1065 self.assertEqual(200, int(channel.result["code"]), msg=channel.result["body"])
1063 self.assertEqual(200, channel.code, msg=channel.json_body)
10661064 self.assertEqual("@user:test", channel.json_body["name"])
10671065 self.assertEqual(False, channel.json_body["deactivated"])
10681066 self.assertEqual("foo@bar.com", channel.json_body["threepids"][0]["address"])
10701068 self.assertEqual("User1", channel.json_body["displayname"])
10711069
10721070 # Deactivate user
1073 body = json.dumps({"erase": False})
1074
10751071 channel = self.make_request(
10761072 "POST",
10771073 self.url,
10781074 access_token=self.admin_user_tok,
1079 content=body.encode(encoding="utf_8"),
1075 content={"erase": False},
10801076 )
10811077
10821078 self.assertEqual(200, int(channel.result["code"]), msg=channel.result["body"])
10881084 access_token=self.admin_user_tok,
10891085 )
10901086
1091 self.assertEqual(200, int(channel.result["code"]), msg=channel.result["body"])
1087 self.assertEqual(200, channel.code, msg=channel.json_body)
10921088 self.assertEqual("@user:test", channel.json_body["name"])
10931089 self.assertEqual(True, channel.json_body["deactivated"])
10941090 self.assertEqual(0, len(channel.json_body["threepids"]))
10961092 self.assertEqual("User1", channel.json_body["displayname"])
10971093
10981094 self._is_erased("@user:test", False)
1095
1096 def test_deactivate_user_erase_true_no_profile(self):
1097 """
1098 Test deactivating a user and set `erase` to `true`
1099 if user has no profile information (stored in the database table `profiles`).
1100 """
1101
1102 # Users normally have an entry in `profiles`, but occasionally they are created without one.
1103 # To test deactivation for users without a profile, we delete the profile information for our user.
1104 self.get_success(
1105 self.store.db_pool.simple_delete_one(
1106 table="profiles", keyvalues={"user_id": "user"}
1107 )
1108 )
1109
1110 # Get user
1111 channel = self.make_request(
1112 "GET",
1113 self.url_other_user,
1114 access_token=self.admin_user_tok,
1115 )
1116
1117 self.assertEqual(200, channel.code, msg=channel.json_body)
1118 self.assertEqual("@user:test", channel.json_body["name"])
1119 self.assertEqual(False, channel.json_body["deactivated"])
1120 self.assertEqual("foo@bar.com", channel.json_body["threepids"][0]["address"])
1121 self.assertIsNone(channel.json_body["avatar_url"])
1122 self.assertIsNone(channel.json_body["displayname"])
1123
1124 # Deactivate and erase user
1125 channel = self.make_request(
1126 "POST",
1127 self.url,
1128 access_token=self.admin_user_tok,
1129 content={"erase": True},
1130 )
1131
1132 self.assertEqual(200, channel.code, msg=channel.json_body)
1133
1134 # Get user
1135 channel = self.make_request(
1136 "GET",
1137 self.url_other_user,
1138 access_token=self.admin_user_tok,
1139 )
1140
1141 self.assertEqual(200, channel.code, msg=channel.json_body)
1142 self.assertEqual("@user:test", channel.json_body["name"])
1143 self.assertEqual(True, channel.json_body["deactivated"])
1144 self.assertEqual(0, len(channel.json_body["threepids"]))
1145 self.assertIsNone(channel.json_body["avatar_url"])
1146 self.assertIsNone(channel.json_body["displayname"])
1147
1148 self._is_erased("@user:test", True)
10991149
11001150 def _is_erased(self, user_id: str, expect: bool) -> None:
11011151 """Assert that the user is erased or not"""
11491199 access_token=self.other_user_token,
11501200 )
11511201
1152 self.assertEqual(403, int(channel.result["code"]), msg=channel.result["body"])
1202 self.assertEqual(403, channel.code, msg=channel.json_body)
11531203 self.assertEqual("You are not a server admin", channel.json_body["error"])
11541204
11551205 channel = self.make_request(
11591209 content=b"{}",
11601210 )
11611211
1162 self.assertEqual(403, int(channel.result["code"]), msg=channel.result["body"])
1212 self.assertEqual(403, channel.code, msg=channel.json_body)
11631213 self.assertEqual("You are not a server admin", channel.json_body["error"])
11641214
11651215 def test_user_does_not_exist(self):
11761226 self.assertEqual(404, channel.code, msg=channel.json_body)
11771227 self.assertEqual("M_NOT_FOUND", channel.json_body["errcode"])
11781228
1229 def test_get_user(self):
1230 """
1231 Test a simple get of a user.
1232 """
1233 channel = self.make_request(
1234 "GET",
1235 self.url_other_user,
1236 access_token=self.admin_user_tok,
1237 )
1238
1239 self.assertEqual(200, channel.code, msg=channel.json_body)
1240 self.assertEqual("@user:test", channel.json_body["name"])
1241 self.assertEqual("User", channel.json_body["displayname"])
1242 self._check_fields(channel.json_body)
1243
1244 def test_get_user_with_sso(self):
1245 """
1246 Test get a user with SSO details.
1247 """
1248 self.get_success(
1249 self.store.record_user_external_id(
1250 "auth_provider1", "external_id1", self.other_user
1251 )
1252 )
1253 self.get_success(
1254 self.store.record_user_external_id(
1255 "auth_provider2", "external_id2", self.other_user
1256 )
1257 )
1258
1259 channel = self.make_request(
1260 "GET",
1261 self.url_other_user,
1262 access_token=self.admin_user_tok,
1263 )
1264
1265 self.assertEqual(200, channel.code, msg=channel.json_body)
1266 self.assertEqual("@user:test", channel.json_body["name"])
1267 self.assertEqual(
1268 "external_id1", channel.json_body["external_ids"][0]["external_id"]
1269 )
1270 self.assertEqual(
1271 "auth_provider1", channel.json_body["external_ids"][0]["auth_provider"]
1272 )
1273 self.assertEqual(
1274 "external_id2", channel.json_body["external_ids"][1]["external_id"]
1275 )
1276 self.assertEqual(
1277 "auth_provider2", channel.json_body["external_ids"][1]["auth_provider"]
1278 )
1279 self._check_fields(channel.json_body)
1280
11791281 def test_create_server_admin(self):
11801282 """
11811283 Check that a new admin user is created successfully.
11831285 url = "/_synapse/admin/v2/users/@bob:test"
11841286
11851287 # Create user (server admin)
1186 body = json.dumps(
1187 {
1188 "password": "abc123",
1189 "admin": True,
1190 "displayname": "Bob's name",
1191 "threepids": [{"medium": "email", "address": "bob@bob.bob"}],
1192 "avatar_url": "mxc://fibble/wibble",
1193 }
1194 )
1288 body = {
1289 "password": "abc123",
1290 "admin": True,
1291 "displayname": "Bob's name",
1292 "threepids": [{"medium": "email", "address": "bob@bob.bob"}],
1293 "avatar_url": "mxc://fibble/wibble",
1294 }
11951295
11961296 channel = self.make_request(
11971297 "PUT",
11981298 url,
11991299 access_token=self.admin_user_tok,
1200 content=body.encode(encoding="utf_8"),
1201 )
1202
1203 self.assertEqual(201, int(channel.result["code"]), msg=channel.result["body"])
1300 content=body,
1301 )
1302
1303 self.assertEqual(201, channel.code, msg=channel.json_body)
12041304 self.assertEqual("@bob:test", channel.json_body["name"])
12051305 self.assertEqual("Bob's name", channel.json_body["displayname"])
12061306 self.assertEqual("email", channel.json_body["threepids"][0]["medium"])
12071307 self.assertEqual("bob@bob.bob", channel.json_body["threepids"][0]["address"])
12081308 self.assertTrue(channel.json_body["admin"])
12091309 self.assertEqual("mxc://fibble/wibble", channel.json_body["avatar_url"])
1310 self._check_fields(channel.json_body)
12101311
12111312 # Get user
12121313 channel = self.make_request(
12151316 access_token=self.admin_user_tok,
12161317 )
12171318
1218 self.assertEqual(200, int(channel.result["code"]), msg=channel.result["body"])
1319 self.assertEqual(200, channel.code, msg=channel.json_body)
12191320 self.assertEqual("@bob:test", channel.json_body["name"])
12201321 self.assertEqual("Bob's name", channel.json_body["displayname"])
12211322 self.assertEqual("email", channel.json_body["threepids"][0]["medium"])
12241325 self.assertFalse(channel.json_body["is_guest"])
12251326 self.assertFalse(channel.json_body["deactivated"])
12261327 self.assertEqual("mxc://fibble/wibble", channel.json_body["avatar_url"])
1328 self._check_fields(channel.json_body)
12271329
12281330 def test_create_user(self):
12291331 """
12321334 url = "/_synapse/admin/v2/users/@bob:test"
12331335
12341336 # Create user
1235 body = json.dumps(
1236 {
1237 "password": "abc123",
1238 "admin": False,
1239 "displayname": "Bob's name",
1240 "threepids": [{"medium": "email", "address": "bob@bob.bob"}],
1241 "avatar_url": "mxc://fibble/wibble",
1242 }
1243 )
1337 body = {
1338 "password": "abc123",
1339 "admin": False,
1340 "displayname": "Bob's name",
1341 "threepids": [{"medium": "email", "address": "bob@bob.bob"}],
1342 "avatar_url": "mxc://fibble/wibble",
1343 }
12441344
12451345 channel = self.make_request(
12461346 "PUT",
12471347 url,
12481348 access_token=self.admin_user_tok,
1249 content=body.encode(encoding="utf_8"),
1250 )
1251
1252 self.assertEqual(201, int(channel.result["code"]), msg=channel.result["body"])
1349 content=body,
1350 )
1351
1352 self.assertEqual(201, channel.code, msg=channel.json_body)
12531353 self.assertEqual("@bob:test", channel.json_body["name"])
12541354 self.assertEqual("Bob's name", channel.json_body["displayname"])
12551355 self.assertEqual("email", channel.json_body["threepids"][0]["medium"])
12561356 self.assertEqual("bob@bob.bob", channel.json_body["threepids"][0]["address"])
12571357 self.assertFalse(channel.json_body["admin"])
12581358 self.assertEqual("mxc://fibble/wibble", channel.json_body["avatar_url"])
1359 self._check_fields(channel.json_body)
12591360
12601361 # Get user
12611362 channel = self.make_request(
12641365 access_token=self.admin_user_tok,
12651366 )
12661367
1267 self.assertEqual(200, int(channel.result["code"]), msg=channel.result["body"])
1368 self.assertEqual(200, channel.code, msg=channel.json_body)
12681369 self.assertEqual("@bob:test", channel.json_body["name"])
12691370 self.assertEqual("Bob's name", channel.json_body["displayname"])
12701371 self.assertEqual("email", channel.json_body["threepids"][0]["medium"])
12741375 self.assertFalse(channel.json_body["deactivated"])
12751376 self.assertFalse(channel.json_body["shadow_banned"])
12761377 self.assertEqual("mxc://fibble/wibble", channel.json_body["avatar_url"])
1378 self._check_fields(channel.json_body)
12771379
12781380 @override_config(
12791381 {"limit_usage_by_mau": True, "max_mau_value": 2, "mau_trial_days": 0}
13101412 url = "/_synapse/admin/v2/users/@bob:test"
13111413
13121414 # Create user
1313 body = json.dumps({"password": "abc123", "admin": False})
1314
13151415 channel = self.make_request(
13161416 "PUT",
13171417 url,
13181418 access_token=self.admin_user_tok,
1319 content=body.encode(encoding="utf_8"),
1320 )
1321
1322 self.assertEqual(201, int(channel.result["code"]), msg=channel.result["body"])
1419 content={"password": "abc123", "admin": False},
1420 )
1421
1422 self.assertEqual(201, channel.code, msg=channel.json_body)
13231423 self.assertEqual("@bob:test", channel.json_body["name"])
13241424 self.assertFalse(channel.json_body["admin"])
13251425
13491449 url = "/_synapse/admin/v2/users/@bob:test"
13501450
13511451 # Create user
1352 body = json.dumps({"password": "abc123", "admin": False})
1353
13541452 channel = self.make_request(
13551453 "PUT",
13561454 url,
13571455 access_token=self.admin_user_tok,
1358 content=body.encode(encoding="utf_8"),
1456 content={"password": "abc123", "admin": False},
13591457 )
13601458
13611459 # Admin user is not blocked by mau anymore
1362 self.assertEqual(201, int(channel.result["code"]), msg=channel.result["body"])
1460 self.assertEqual(201, channel.code, msg=channel.json_body)
13631461 self.assertEqual("@bob:test", channel.json_body["name"])
13641462 self.assertFalse(channel.json_body["admin"])
13651463
13811479 url = "/_synapse/admin/v2/users/@bob:test"
13821480
13831481 # Create user
1384 body = json.dumps(
1385 {
1386 "password": "abc123",
1387 "threepids": [{"medium": "email", "address": "bob@bob.bob"}],
1388 }
1389 )
1482 body = {
1483 "password": "abc123",
1484 "threepids": [{"medium": "email", "address": "bob@bob.bob"}],
1485 }
13901486
13911487 channel = self.make_request(
13921488 "PUT",
13931489 url,
13941490 access_token=self.admin_user_tok,
1395 content=body.encode(encoding="utf_8"),
1396 )
1397
1398 self.assertEqual(201, int(channel.result["code"]), msg=channel.result["body"])
1491 content=body,
1492 )
1493
1494 self.assertEqual(201, channel.code, msg=channel.json_body)
13991495 self.assertEqual("@bob:test", channel.json_body["name"])
14001496 self.assertEqual("email", channel.json_body["threepids"][0]["medium"])
14011497 self.assertEqual("bob@bob.bob", channel.json_body["threepids"][0]["address"])
14251521 url = "/_synapse/admin/v2/users/@bob:test"
14261522
14271523 # Create user
1428 body = json.dumps(
1429 {
1430 "password": "abc123",
1431 "threepids": [{"medium": "email", "address": "bob@bob.bob"}],
1432 }
1433 )
1524 body = {
1525 "password": "abc123",
1526 "threepids": [{"medium": "email", "address": "bob@bob.bob"}],
1527 }
14341528
14351529 channel = self.make_request(
14361530 "PUT",
14371531 url,
14381532 access_token=self.admin_user_tok,
1439 content=body.encode(encoding="utf_8"),
1440 )
1441
1442 self.assertEqual(201, int(channel.result["code"]), msg=channel.result["body"])
1533 content=body,
1534 )
1535
1536 self.assertEqual(201, channel.code, msg=channel.json_body)
14431537 self.assertEqual("@bob:test", channel.json_body["name"])
14441538 self.assertEqual("email", channel.json_body["threepids"][0]["medium"])
14451539 self.assertEqual("bob@bob.bob", channel.json_body["threepids"][0]["address"])
14561550 """
14571551
14581552 # Change password
1459 body = json.dumps({"password": "hahaha"})
1460
14611553 channel = self.make_request(
14621554 "PUT",
14631555 self.url_other_user,
14641556 access_token=self.admin_user_tok,
1465 content=body.encode(encoding="utf_8"),
1466 )
1467
1468 self.assertEqual(200, int(channel.result["code"]), msg=channel.result["body"])
1557 content={"password": "hahaha"},
1558 )
1559
1560 self.assertEqual(200, channel.code, msg=channel.json_body)
1561 self._check_fields(channel.json_body)
14691562
14701563 def test_set_displayname(self):
14711564 """
14731566 """
14741567
14751568 # Modify user
1476 body = json.dumps({"displayname": "foobar"})
1477
14781569 channel = self.make_request(
14791570 "PUT",
14801571 self.url_other_user,
14811572 access_token=self.admin_user_tok,
1482 content=body.encode(encoding="utf_8"),
1483 )
1484
1485 self.assertEqual(200, int(channel.result["code"]), msg=channel.result["body"])
1573 content={"displayname": "foobar"},
1574 )
1575
1576 self.assertEqual(200, channel.code, msg=channel.json_body)
14861577 self.assertEqual("@user:test", channel.json_body["name"])
14871578 self.assertEqual("foobar", channel.json_body["displayname"])
14881579
14931584 access_token=self.admin_user_tok,
14941585 )
14951586
1496 self.assertEqual(200, int(channel.result["code"]), msg=channel.result["body"])
1587 self.assertEqual(200, channel.code, msg=channel.json_body)
14971588 self.assertEqual("@user:test", channel.json_body["name"])
14981589 self.assertEqual("foobar", channel.json_body["displayname"])
14991590
15031594 """
15041595
15051596 # Delete old and add new threepid to user
1506 body = json.dumps(
1507 {"threepids": [{"medium": "email", "address": "bob3@bob.bob"}]}
1508 )
1509
15101597 channel = self.make_request(
15111598 "PUT",
15121599 self.url_other_user,
15131600 access_token=self.admin_user_tok,
1514 content=body.encode(encoding="utf_8"),
1515 )
1516
1517 self.assertEqual(200, int(channel.result["code"]), msg=channel.result["body"])
1601 content={"threepids": [{"medium": "email", "address": "bob3@bob.bob"}]},
1602 )
1603
1604 self.assertEqual(200, channel.code, msg=channel.json_body)
15181605 self.assertEqual("@user:test", channel.json_body["name"])
15191606 self.assertEqual("email", channel.json_body["threepids"][0]["medium"])
15201607 self.assertEqual("bob3@bob.bob", channel.json_body["threepids"][0]["address"])
15261613 access_token=self.admin_user_tok,
15271614 )
15281615
1529 self.assertEqual(200, int(channel.result["code"]), msg=channel.result["body"])
1616 self.assertEqual(200, channel.code, msg=channel.json_body)
15301617 self.assertEqual("@user:test", channel.json_body["name"])
15311618 self.assertEqual("email", channel.json_body["threepids"][0]["medium"])
15321619 self.assertEqual("bob3@bob.bob", channel.json_body["threepids"][0]["address"])
15511638 access_token=self.admin_user_tok,
15521639 )
15531640
1554 self.assertEqual(200, int(channel.result["code"]), msg=channel.result["body"])
1641 self.assertEqual(200, channel.code, msg=channel.json_body)
15551642 self.assertEqual("@user:test", channel.json_body["name"])
15561643 self.assertFalse(channel.json_body["deactivated"])
15571644 self.assertEqual("foo@bar.com", channel.json_body["threepids"][0]["address"])
15661653 content={"deactivated": True},
15671654 )
15681655
1569 self.assertEqual(200, int(channel.result["code"]), msg=channel.result["body"])
1656 self.assertEqual(200, channel.code, msg=channel.json_body)
15701657 self.assertEqual("@user:test", channel.json_body["name"])
15711658 self.assertTrue(channel.json_body["deactivated"])
15721659 self.assertIsNone(channel.json_body["password_hash"])
15821669 access_token=self.admin_user_tok,
15831670 )
15841671
1585 self.assertEqual(200, int(channel.result["code"]), msg=channel.result["body"])
1672 self.assertEqual(200, channel.code, msg=channel.json_body)
15861673 self.assertEqual("@user:test", channel.json_body["name"])
15871674 self.assertTrue(channel.json_body["deactivated"])
15881675 self.assertIsNone(channel.json_body["password_hash"])
16091696 content={"deactivated": True},
16101697 )
16111698
1612 self.assertEqual(200, int(channel.result["code"]), msg=channel.result["body"])
1699 self.assertEqual(200, channel.code, msg=channel.json_body)
16131700 self.assertEqual("@user:test", channel.json_body["name"])
16141701 self.assertTrue(channel.json_body["deactivated"])
16151702
16251712 content={"displayname": "Foobar"},
16261713 )
16271714
1628 self.assertEqual(200, int(channel.result["code"]), msg=channel.result["body"])
1715 self.assertEqual(200, channel.code, msg=channel.json_body)
16291716 self.assertEqual("@user:test", channel.json_body["name"])
16301717 self.assertTrue(channel.json_body["deactivated"])
16311718 self.assertEqual("Foobar", channel.json_body["displayname"])
16491736 access_token=self.admin_user_tok,
16501737 content={"deactivated": False},
16511738 )
1652 self.assertEqual(400, int(channel.result["code"]), msg=channel.result["body"])
1739 self.assertEqual(400, channel.code, msg=channel.json_body)
16531740
16541741 # Reactivate the user.
16551742 channel = self.make_request(
16581745 access_token=self.admin_user_tok,
16591746 content={"deactivated": False, "password": "foo"},
16601747 )
1661 self.assertEqual(200, int(channel.result["code"]), msg=channel.result["body"])
1748 self.assertEqual(200, channel.code, msg=channel.json_body)
16621749 self.assertEqual("@user:test", channel.json_body["name"])
16631750 self.assertFalse(channel.json_body["deactivated"])
16641751 self.assertIsNotNone(channel.json_body["password_hash"])
16801767 access_token=self.admin_user_tok,
16811768 content={"deactivated": False, "password": "foo"},
16821769 )
1683 self.assertEqual(403, int(channel.result["code"]), msg=channel.result["body"])
1770 self.assertEqual(403, channel.code, msg=channel.json_body)
16841771 self.assertEqual(Codes.FORBIDDEN, channel.json_body["errcode"])
16851772
16861773 # Reactivate the user without a password.
16901777 access_token=self.admin_user_tok,
16911778 content={"deactivated": False},
16921779 )
1693 self.assertEqual(200, int(channel.result["code"]), msg=channel.result["body"])
1780 self.assertEqual(200, channel.code, msg=channel.json_body)
16941781 self.assertEqual("@user:test", channel.json_body["name"])
16951782 self.assertFalse(channel.json_body["deactivated"])
16961783 self.assertIsNone(channel.json_body["password_hash"])
17121799 access_token=self.admin_user_tok,
17131800 content={"deactivated": False, "password": "foo"},
17141801 )
1715 self.assertEqual(403, int(channel.result["code"]), msg=channel.result["body"])
1802 self.assertEqual(403, channel.code, msg=channel.json_body)
17161803 self.assertEqual(Codes.FORBIDDEN, channel.json_body["errcode"])
17171804
17181805 # Reactivate the user without a password.
17221809 access_token=self.admin_user_tok,
17231810 content={"deactivated": False},
17241811 )
1725 self.assertEqual(200, int(channel.result["code"]), msg=channel.result["body"])
1812 self.assertEqual(200, channel.code, msg=channel.json_body)
17261813 self.assertEqual("@user:test", channel.json_body["name"])
17271814 self.assertFalse(channel.json_body["deactivated"])
17281815 self.assertIsNone(channel.json_body["password_hash"])
17411828 content={"admin": True},
17421829 )
17431830
1744 self.assertEqual(200, int(channel.result["code"]), msg=channel.result["body"])
1831 self.assertEqual(200, channel.code, msg=channel.json_body)
17451832 self.assertEqual("@user:test", channel.json_body["name"])
17461833 self.assertTrue(channel.json_body["admin"])
17471834
17521839 access_token=self.admin_user_tok,
17531840 )
17541841
1755 self.assertEqual(200, int(channel.result["code"]), msg=channel.result["body"])
1842 self.assertEqual(200, channel.code, msg=channel.json_body)
17561843 self.assertEqual("@user:test", channel.json_body["name"])
17571844 self.assertTrue(channel.json_body["admin"])
17581845
17711858 content={"password": "abc123"},
17721859 )
17731860
1774 self.assertEqual(201, int(channel.result["code"]), msg=channel.result["body"])
1861 self.assertEqual(201, channel.code, msg=channel.json_body)
17751862 self.assertEqual("@bob:test", channel.json_body["name"])
17761863 self.assertEqual("bob", channel.json_body["displayname"])
17771864
17821869 access_token=self.admin_user_tok,
17831870 )
17841871
1785 self.assertEqual(200, int(channel.result["code"]), msg=channel.result["body"])
1872 self.assertEqual(200, channel.code, msg=channel.json_body)
17861873 self.assertEqual("@bob:test", channel.json_body["name"])
17871874 self.assertEqual("bob", channel.json_body["displayname"])
17881875 self.assertEqual(0, channel.json_body["deactivated"])
17951882 content={"password": "abc123", "deactivated": "false"},
17961883 )
17971884
1798 self.assertEqual(400, int(channel.result["code"]), msg=channel.result["body"])
1885 self.assertEqual(400, channel.code, msg=channel.json_body)
17991886
18001887 # Check user is not deactivated
18011888 channel = self.make_request(
18041891 access_token=self.admin_user_tok,
18051892 )
18061893
1807 self.assertEqual(200, int(channel.result["code"]), msg=channel.result["body"])
1894 self.assertEqual(200, channel.code, msg=channel.json_body)
18081895 self.assertEqual("@bob:test", channel.json_body["name"])
18091896 self.assertEqual("bob", channel.json_body["displayname"])
18101897
18291916 access_token=self.admin_user_tok,
18301917 content={"deactivated": True},
18311918 )
1832 self.assertEqual(200, int(channel.result["code"]), msg=channel.result["body"])
1919 self.assertEqual(200, channel.code, msg=channel.json_body)
18331920 self.assertTrue(channel.json_body["deactivated"])
18341921 self.assertIsNone(channel.json_body["password_hash"])
18351922 self._is_erased(user_id, False)
18361923 d = self.store.mark_user_erased(user_id)
18371924 self.assertIsNone(self.get_success(d))
18381925 self._is_erased(user_id, True)
1926
1927 def _check_fields(self, content: JsonDict):
1928 """Checks that the expected user attributes are present in content
1929
1930 Args:
1931 content: Content dictionary to check
1932 """
1933 self.assertIn("displayname", content)
1934 self.assertIn("threepids", content)
1935 self.assertIn("avatar_url", content)
1936 self.assertIn("admin", content)
1937 self.assertIn("deactivated", content)
1938 self.assertIn("shadow_banned", content)
1939 self.assertIn("password_hash", content)
1940 self.assertIn("creation_ts", content)
1941 self.assertIn("appservice_id", content)
1942 self.assertIn("consent_server_notice_sent", content)
1943 self.assertIn("consent_version", content)
1944 self.assertIn("external_ids", content)
18391945
18401946
18411947 class UserMembershipRestTestCase(unittest.HomeserverTestCase):
5151 room_version: str = None,
5252 tok: str = None,
5353 expect_code: int = 200,
54 extra_content: Optional[Dict] = None,
5455 ) -> str:
5556 """
5657 Create a room.
7172 temp_id = self.auth_user_id
7273 self.auth_user_id = room_creator
7374 path = "/_matrix/client/r0/createRoom"
74 content = {}
75 content = extra_content or {}
7576 if not is_public:
7677 content["visibility"] = "private"
7778 if room_version:
1919 from synapse.api.constants import LoginType
2020 from synapse.handlers.ui_auth.checkers import UserInteractiveAuthChecker
2121 from synapse.rest.client.v1 import login
22 from synapse.rest.client.v2_alpha import auth, devices, register
22 from synapse.rest.client.v2_alpha import account, auth, devices, register
2323 from synapse.rest.synapse.client import build_synapse_client_resource_tree
2424 from synapse.types import JsonDict, UserID
2525
497497 self.delete_device(
498498 self.user_tok, self.device_id, 403, body={"auth": {"session": session_id}}
499499 )
500
501
502 class RefreshAuthTests(unittest.HomeserverTestCase):
503 servlets = [
504 auth.register_servlets,
505 account.register_servlets,
506 login.register_servlets,
507 synapse.rest.admin.register_servlets_for_client_rest_resource,
508 register.register_servlets,
509 ]
510 hijack_auth = False
511
512 def prepare(self, reactor, clock, hs):
513 self.user_pass = "pass"
514 self.user = self.register_user("test", self.user_pass)
515
516 def test_login_issue_refresh_token(self):
517 """
518 A login response should include a refresh_token only if asked.
519 """
520 # Test login
521 body = {"type": "m.login.password", "user": "test", "password": self.user_pass}
522
523 login_without_refresh = self.make_request(
524 "POST", "/_matrix/client/r0/login", body
525 )
526 self.assertEqual(login_without_refresh.code, 200, login_without_refresh.result)
527 self.assertNotIn("refresh_token", login_without_refresh.json_body)
528
529 login_with_refresh = self.make_request(
530 "POST",
531 "/_matrix/client/r0/login?org.matrix.msc2918.refresh_token=true",
532 body,
533 )
534 self.assertEqual(login_with_refresh.code, 200, login_with_refresh.result)
535 self.assertIn("refresh_token", login_with_refresh.json_body)
536 self.assertIn("expires_in_ms", login_with_refresh.json_body)
537
538 def test_register_issue_refresh_token(self):
539 """
540 A register response should include a refresh_token only if asked.
541 """
542 register_without_refresh = self.make_request(
543 "POST",
544 "/_matrix/client/r0/register",
545 {
546 "username": "test2",
547 "password": self.user_pass,
548 "auth": {"type": LoginType.DUMMY},
549 },
550 )
551 self.assertEqual(
552 register_without_refresh.code, 200, register_without_refresh.result
553 )
554 self.assertNotIn("refresh_token", register_without_refresh.json_body)
555
556 register_with_refresh = self.make_request(
557 "POST",
558 "/_matrix/client/r0/register?org.matrix.msc2918.refresh_token=true",
559 {
560 "username": "test3",
561 "password": self.user_pass,
562 "auth": {"type": LoginType.DUMMY},
563 },
564 )
565 self.assertEqual(register_with_refresh.code, 200, register_with_refresh.result)
566 self.assertIn("refresh_token", register_with_refresh.json_body)
567 self.assertIn("expires_in_ms", register_with_refresh.json_body)
568
569 def test_token_refresh(self):
570 """
571 A refresh token can be used to issue a new access token.
572 """
573 body = {"type": "m.login.password", "user": "test", "password": self.user_pass}
574 login_response = self.make_request(
575 "POST",
576 "/_matrix/client/r0/login?org.matrix.msc2918.refresh_token=true",
577 body,
578 )
579 self.assertEqual(login_response.code, 200, login_response.result)
580
581 refresh_response = self.make_request(
582 "POST",
583 "/_matrix/client/unstable/org.matrix.msc2918.refresh_token/refresh",
584 {"refresh_token": login_response.json_body["refresh_token"]},
585 )
586 self.assertEqual(refresh_response.code, 200, refresh_response.result)
587 self.assertIn("access_token", refresh_response.json_body)
588 self.assertIn("refresh_token", refresh_response.json_body)
589 self.assertIn("expires_in_ms", refresh_response.json_body)
590
591 # The access and refresh tokens should be different from the original ones after refresh
592 self.assertNotEqual(
593 login_response.json_body["access_token"],
594 refresh_response.json_body["access_token"],
595 )
596 self.assertNotEqual(
597 login_response.json_body["refresh_token"],
598 refresh_response.json_body["refresh_token"],
599 )
600
601 @override_config({"access_token_lifetime": "1m"})
602 def test_refresh_token_expiration(self):
603 """
604 The access token should have some time as specified in the config.
605 """
606 body = {"type": "m.login.password", "user": "test", "password": self.user_pass}
607 login_response = self.make_request(
608 "POST",
609 "/_matrix/client/r0/login?org.matrix.msc2918.refresh_token=true",
610 body,
611 )
612 self.assertEqual(login_response.code, 200, login_response.result)
613 self.assertApproximates(
614 login_response.json_body["expires_in_ms"], 60 * 1000, 100
615 )
616
617 refresh_response = self.make_request(
618 "POST",
619 "/_matrix/client/unstable/org.matrix.msc2918.refresh_token/refresh",
620 {"refresh_token": login_response.json_body["refresh_token"]},
621 )
622 self.assertEqual(refresh_response.code, 200, refresh_response.result)
623 self.assertApproximates(
624 refresh_response.json_body["expires_in_ms"], 60 * 1000, 100
625 )
626
627 def test_refresh_token_invalidation(self):
628 """Refresh tokens are invalidated after first use of the next token.
629
630 A refresh token is considered invalid if:
631 - it was already used at least once
632 - and either
633 - the next access token was used
634 - the next refresh token was used
635
636 The chain of tokens goes like this:
637
638 login -|-> first_refresh -> third_refresh (fails)
639 |-> second_refresh -> fifth_refresh
640 |-> fourth_refresh (fails)
641 """
642
643 body = {"type": "m.login.password", "user": "test", "password": self.user_pass}
644 login_response = self.make_request(
645 "POST",
646 "/_matrix/client/r0/login?org.matrix.msc2918.refresh_token=true",
647 body,
648 )
649 self.assertEqual(login_response.code, 200, login_response.result)
650
651 # This first refresh should work properly
652 first_refresh_response = self.make_request(
653 "POST",
654 "/_matrix/client/unstable/org.matrix.msc2918.refresh_token/refresh",
655 {"refresh_token": login_response.json_body["refresh_token"]},
656 )
657 self.assertEqual(
658 first_refresh_response.code, 200, first_refresh_response.result
659 )
660
661 # This one as well, since the token in the first one was never used
662 second_refresh_response = self.make_request(
663 "POST",
664 "/_matrix/client/unstable/org.matrix.msc2918.refresh_token/refresh",
665 {"refresh_token": login_response.json_body["refresh_token"]},
666 )
667 self.assertEqual(
668 second_refresh_response.code, 200, second_refresh_response.result
669 )
670
671 # This one should not, since the token from the first refresh is not valid anymore
672 third_refresh_response = self.make_request(
673 "POST",
674 "/_matrix/client/unstable/org.matrix.msc2918.refresh_token/refresh",
675 {"refresh_token": first_refresh_response.json_body["refresh_token"]},
676 )
677 self.assertEqual(
678 third_refresh_response.code, 401, third_refresh_response.result
679 )
680
681 # The associated access token should also be invalid
682 whoami_response = self.make_request(
683 "GET",
684 "/_matrix/client/r0/account/whoami",
685 access_token=first_refresh_response.json_body["access_token"],
686 )
687 self.assertEqual(whoami_response.code, 401, whoami_response.result)
688
689 # But all other tokens should work (they will expire after some time)
690 for access_token in [
691 second_refresh_response.json_body["access_token"],
692 login_response.json_body["access_token"],
693 ]:
694 whoami_response = self.make_request(
695 "GET", "/_matrix/client/r0/account/whoami", access_token=access_token
696 )
697 self.assertEqual(whoami_response.code, 200, whoami_response.result)
698
699 # Now that the access token from the last valid refresh was used once, refreshing with the N-1 token should fail
700 fourth_refresh_response = self.make_request(
701 "POST",
702 "/_matrix/client/unstable/org.matrix.msc2918.refresh_token/refresh",
703 {"refresh_token": login_response.json_body["refresh_token"]},
704 )
705 self.assertEqual(
706 fourth_refresh_response.code, 403, fourth_refresh_response.result
707 )
708
709 # But refreshing from the last valid refresh token still works
710 fifth_refresh_response = self.make_request(
711 "POST",
712 "/_matrix/client/unstable/org.matrix.msc2918.refresh_token/refresh",
713 {"refresh_token": second_refresh_response.json_body["refresh_token"]},
714 )
715 self.assertEqual(
716 fifth_refresh_response.code, 200, fifth_refresh_response.result
717 )
4040 channel = self.make_request("GET", "/sync")
4141
4242 self.assertEqual(channel.code, 200)
43 self.assertTrue(
44 {
45 "next_batch",
46 "rooms",
47 "presence",
48 "account_data",
49 "to_device",
50 "device_lists",
51 }.issubset(set(channel.json_body.keys()))
52 )
53
54 def test_sync_presence_disabled(self):
55 """
56 When presence is disabled, the key does not appear in /sync.
57 """
58 self.hs.config.use_presence = False
59
60 channel = self.make_request("GET", "/sync")
61
62 self.assertEqual(channel.code, 200)
63 self.assertTrue(
64 {
65 "next_batch",
66 "rooms",
67 "account_data",
68 "to_device",
69 "device_lists",
70 }.issubset(set(channel.json_body.keys()))
71 )
43 self.assertIn("next_batch", channel.json_body)
7244
7345
7446 class SyncFilterTestCase(unittest.HomeserverTestCase):
305305
306306 channel = self.make_request("GET", "/sync?timeout=0", access_token=tok)
307307
308 invites = channel.json_body["rooms"]["invite"]
309 self.assertEqual(len(invites), 0, invites)
308 self.assertNotIn(
309 "rooms", channel.json_body, "Got invites without server notice"
310 )
310311
311312 def test_invite_with_notice(self):
312313 """Tests that, if the MAU limit is hit, the server notices user invites each user
363364 # We could also pick another user and sync with it, which would return an
364365 # invite to a system notices room, but it doesn't matter which user we're
365366 # using so we use the last one because it saves us an extra sync.
366 invites = channel.json_body["rooms"]["invite"]
367 if "rooms" in channel.json_body:
368 invites = channel.json_body["rooms"]["invite"]
367369
368370 # Make sure we have an invite to process.
369371 self.assertEqual(len(invites), 1, invites)
1414
1515 from unittest.mock import Mock
1616
17 from synapse.util.caches.lrucache import LruCache
17 from synapse.util.caches.lrucache import LruCache, setup_expire_lru_cache_entries
1818 from synapse.util.caches.treecache import TreeCache
1919
2020 from tests import unittest
259259 self.assertEquals(cache["key3"], [3])
260260 self.assertEquals(cache["key4"], [4])
261261 self.assertEquals(cache["key5"], [5, 6])
262
263
264 class TimeEvictionTestCase(unittest.HomeserverTestCase):
265 """Test that time based eviction works correctly."""
266
267 def default_config(self):
268 config = super().default_config()
269
270 config.setdefault("caches", {})["expiry_time"] = "30m"
271
272 return config
273
274 def test_evict(self):
275 setup_expire_lru_cache_entries(self.hs)
276
277 cache = LruCache(5, clock=self.hs.get_clock())
278
279 # Check that we evict entries we haven't accessed for 30 minutes.
280 cache["key1"] = 1
281 cache["key2"] = 2
282
283 self.reactor.advance(20 * 60)
284
285 self.assertEqual(cache.get("key1"), 1)
286
287 self.reactor.advance(20 * 60)
288
289 # We have only touched `key1` in the last 30m, so we expect that to
290 # still be in the cache while `key2` should have been evicted.
291 self.assertEqual(cache.get("key1"), 1)
292 self.assertEqual(cache.get("key2"), None)
293
294 # Check that re-adding an expired key works correctly.
295 cache["key2"] = 3
296 self.assertEqual(cache.get("key2"), 3)
297
298 self.reactor.advance(20 * 60)
299
300 self.assertEqual(cache.get("key2"), 3)
301
302 self.reactor.advance(20 * 60)
303
304 self.assertEqual(cache.get("key1"), None)
305 self.assertEqual(cache.get("key2"), 3)