Codebase list matrix-synapse / 3a94124
Imported Upstream version 0.33.5 Richard van der Hoff 5 years ago
88 changed file(s) with 2128 addition(s) and 1221 deletion(s). Raw diff Collapse all Expand all
88 - store_artifacts:
99 path: ~/project/logs
1010 destination: logs
11 - store_test_results:
12 path: logs
1113 sytestpy2postgres:
1214 machine: true
1315 steps:
1719 - store_artifacts:
1820 path: ~/project/logs
1921 destination: logs
22 - store_test_results:
23 path: logs
24 sytestpy2merged:
25 machine: true
26 steps:
27 - checkout
28 - run: bash .circleci/merge_base_branch.sh
29 - run: docker pull matrixdotorg/sytest-synapsepy2
30 - run: docker run --rm -it -v $(pwd)\:/src -v $(pwd)/logs\:/logs matrixdotorg/sytest-synapsepy2
31 - store_artifacts:
32 path: ~/project/logs
33 destination: logs
34 - store_test_results:
35 path: logs
36
37 sytestpy2postgresmerged:
38 machine: true
39 steps:
40 - checkout
41 - run: bash .circleci/merge_base_branch.sh
42 - run: docker pull matrixdotorg/sytest-synapsepy2
43 - run: docker run --rm -it -v $(pwd)\:/src -v $(pwd)/logs\:/logs -e POSTGRES=1 matrixdotorg/sytest-synapsepy2
44 - store_artifacts:
45 path: ~/project/logs
46 destination: logs
47 - store_test_results:
48 path: logs
49
2050 sytestpy3:
2151 machine: true
2252 steps:
2353 - checkout
2454 - run: docker pull matrixdotorg/sytest-synapsepy3
25 - run: docker run --rm -it -v $(pwd)\:/src -v $(pwd)/logs\:/logs hawkowl/sytestpy3
55 - run: docker run --rm -it -v $(pwd)\:/src -v $(pwd)/logs\:/logs matrixdotorg/sytest-synapsepy3
2656 - store_artifacts:
2757 path: ~/project/logs
2858 destination: logs
59 - store_test_results:
60 path: logs
2961 sytestpy3postgres:
3062 machine: true
3163 steps:
3567 - store_artifacts:
3668 path: ~/project/logs
3769 destination: logs
70 - store_test_results:
71 path: logs
72 sytestpy3merged:
73 machine: true
74 steps:
75 - checkout
76 - run: bash .circleci/merge_base_branch.sh
77 - run: docker pull matrixdotorg/sytest-synapsepy3
78 - run: docker run --rm -it -v $(pwd)\:/src -v $(pwd)/logs\:/logs matrixdotorg/sytest-synapsepy3
79 - store_artifacts:
80 path: ~/project/logs
81 destination: logs
82 - store_test_results:
83 path: logs
84 sytestpy3postgresmerged:
85 machine: true
86 steps:
87 - checkout
88 - run: bash .circleci/merge_base_branch.sh
89 - run: docker pull matrixdotorg/sytest-synapsepy3
90 - run: docker run --rm -it -v $(pwd)\:/src -v $(pwd)/logs\:/logs -e POSTGRES=1 matrixdotorg/sytest-synapsepy3
91 - store_artifacts:
92 path: ~/project/logs
93 destination: logs
94 - store_test_results:
95 path: logs
3896
3997 workflows:
4098 version: 2
42100 jobs:
43101 - sytestpy2
44102 - sytestpy2postgres
45 # Currently broken while the Python 3 port is incomplete
46 # - sytestpy3
47 # - sytestpy3postgres
103 - sytestpy3
104 - sytestpy3postgres
105 - sytestpy2merged:
106 filters:
107 branches:
108 ignore: /develop|master/
109 - sytestpy2postgresmerged:
110 filters:
111 branches:
112 ignore: /develop|master/
113 - sytestpy3merged:
114 filters:
115 branches:
116 ignore: /develop|master/
117 - sytestpy3postgresmerged:
118 filters:
119 branches:
120 ignore: /develop|master/
0 #!/usr/bin/env bash
1
2 set -e
3
4 # CircleCI doesn't give CIRCLE_PR_NUMBER in the environment for non-forked PRs. Wonderful.
5 # In this case, we just need to do some ~shell magic~ to strip it out of the PULL_REQUEST URL.
6 echo 'export CIRCLE_PR_NUMBER="${CIRCLE_PR_NUMBER:-${CIRCLE_PULL_REQUEST##*/}}"' >> $BASH_ENV
7 source $BASH_ENV
8
9 if [[ -z "${CIRCLE_PR_NUMBER}" ]]
10 then
11 echo "Can't figure out what the PR number is!"
12 exit 1
13 fi
14
15 # Get the reference, using the GitHub API
16 GITBASE=`curl -q https://api.github.com/repos/matrix-org/synapse/pulls/${CIRCLE_PR_NUMBER} | jq -r '.base.ref'`
17
18 # Show what we are before
19 git show -s
20
21 # Set up username so it can do a merge
22 git config --global user.email bot@matrix.org
23 git config --global user.name "A robot"
24
25 # Fetch and merge. If it doesn't work, it will raise due to set -e.
26 git fetch -u origin $GITBASE
27 git merge --no-edit origin/$GITBASE
28
29 # Show what we are after.
30 git show -s
4343 build/
4444 venv/
4545 venv*/
46 *venv/
4647
4748 localhost-800*/
4849 static/client/register/register_config.js
66 before_script:
77 - git remote set-branches --add origin develop
88 - git fetch origin develop
9
10 services:
11 - postgresql
129
1310 matrix:
1411 fast_finish: true
2421
2522 - python: 2.7
2623 env: TOX_ENV=py27-postgres TRIAL_FLAGS="-j 4"
24 services:
25 - postgresql
26
27 - python: 3.5
28 env: TOX_ENV=py35
2729
2830 - python: 3.6
2931 env: TOX_ENV=py36
0 Synapse 0.33.5 (2018-09-24)
1 ===========================
2
3 No significant changes.
4
5
6 Synapse 0.33.5rc1 (2018-09-17)
7 ==============================
8
9 Features
10 --------
11
12 - Python 3.5 and 3.6 support is now in beta. ([\#3576](https://github.com/matrix-org/synapse/issues/3576))
13 - Implement `event_format` filter param in `/sync` ([\#3790](https://github.com/matrix-org/synapse/issues/3790))
14 - Add synapse_admin_mau:registered_reserved_users metric to expose number of real reaserved users ([\#3846](https://github.com/matrix-org/synapse/issues/3846))
15
16
17 Bugfixes
18 --------
19
20 - Remove connection ID for replication prometheus metrics, as it creates a large number of new series. ([\#3788](https://github.com/matrix-org/synapse/issues/3788))
21 - guest users should not be part of mau total ([\#3800](https://github.com/matrix-org/synapse/issues/3800))
22 - Bump dependency on pyopenssl 16.x, to avoid incompatibility with recent Twisted. ([\#3804](https://github.com/matrix-org/synapse/issues/3804))
23 - Fix existing room tags not coming down sync when joining a room ([\#3810](https://github.com/matrix-org/synapse/issues/3810))
24 - Fix jwt import check ([\#3824](https://github.com/matrix-org/synapse/issues/3824))
25 - fix VOIP crashes under Python 3 (#3821) ([\#3835](https://github.com/matrix-org/synapse/issues/3835))
26 - Fix manhole so that it works with latest openssh clients ([\#3841](https://github.com/matrix-org/synapse/issues/3841))
27 - Fix outbound requests occasionally wedging, which can result in federation breaking between servers. ([\#3845](https://github.com/matrix-org/synapse/issues/3845))
28 - Show heroes if room name/canonical alias has been deleted ([\#3851](https://github.com/matrix-org/synapse/issues/3851))
29 - Fix handling of redacted events from federation ([\#3859](https://github.com/matrix-org/synapse/issues/3859))
30 - ([\#3874](https://github.com/matrix-org/synapse/issues/3874))
31 - Mitigate outbound federation randomly becoming wedged ([\#3875](https://github.com/matrix-org/synapse/issues/3875))
32
33
34 Internal Changes
35 ----------------
36
37 - CircleCI tests now run on the potential merge of a PR. ([\#3704](https://github.com/matrix-org/synapse/issues/3704))
38 - http/ is now ported to Python 3. ([\#3771](https://github.com/matrix-org/synapse/issues/3771))
39 - Improve human readable error messages for threepid registration/account update ([\#3789](https://github.com/matrix-org/synapse/issues/3789))
40 - Make /sync slightly faster by avoiding needless copies ([\#3795](https://github.com/matrix-org/synapse/issues/3795))
41 - handlers/ is now ported to Python 3. ([\#3803](https://github.com/matrix-org/synapse/issues/3803))
42 - Limit the number of PDUs/EDUs per federation transaction ([\#3805](https://github.com/matrix-org/synapse/issues/3805))
43 - Only start postgres instance for postgres tests on Travis CI ([\#3806](https://github.com/matrix-org/synapse/issues/3806))
44 - tests/ is now ported to Python 3. ([\#3808](https://github.com/matrix-org/synapse/issues/3808))
45 - crypto/ is now ported to Python 3. ([\#3822](https://github.com/matrix-org/synapse/issues/3822))
46 - rest/ is now ported to Python 3. ([\#3823](https://github.com/matrix-org/synapse/issues/3823))
47 - add some logging for the keyring queue ([\#3826](https://github.com/matrix-org/synapse/issues/3826))
48 - speed up lazy loading by 2-3x ([\#3827](https://github.com/matrix-org/synapse/issues/3827))
49 - Improved Dockerfile to remove build requirements after building reducing the image size. ([\#3834](https://github.com/matrix-org/synapse/issues/3834))
50 - Disable lazy loading for incremental syncs for now ([\#3840](https://github.com/matrix-org/synapse/issues/3840))
51 - federation/ is now ported to Python 3. ([\#3847](https://github.com/matrix-org/synapse/issues/3847))
52 - Log when we retry outbound requests ([\#3853](https://github.com/matrix-org/synapse/issues/3853))
53 - Removed some excess logging messages. ([\#3855](https://github.com/matrix-org/synapse/issues/3855))
54 - Speed up purge history for rooms that have been previously purged ([\#3856](https://github.com/matrix-org/synapse/issues/3856))
55 - Refactor some HTTP timeout code. ([\#3857](https://github.com/matrix-org/synapse/issues/3857))
56 - Fix running merged builds on CircleCI ([\#3858](https://github.com/matrix-org/synapse/issues/3858))
57 - Fix typo in replication stream exception. ([\#3860](https://github.com/matrix-org/synapse/issues/3860))
58 - Add in flight real time metrics for Measure blocks ([\#3871](https://github.com/matrix-org/synapse/issues/3871))
59 - Disable buffering and automatic retrying in treq requests to prevent timeouts. ([\#3872](https://github.com/matrix-org/synapse/issues/3872))
60 - mention jemalloc in the README ([\#3877](https://github.com/matrix-org/synapse/issues/3877))
61 - Remove unmaintained "nuke-room-from-db.sh" script ([\#3888](https://github.com/matrix-org/synapse/issues/3888))
62
63
064 Synapse 0.33.4 (2018-09-07)
165 ===========================
266
741741 }
742742 }
743743
744 and an example apache configuration may look like::
745
746 <VirtualHost *:443>
747 SSLEngine on
748 ServerName matrix.example.com;
749
750 <Location /_matrix>
751 ProxyPass http://127.0.0.1:8008/_matrix nocanon
752 ProxyPassReverse http://127.0.0.1:8008/_matrix
753 </Location>
754 </VirtualHost>
755
744756 You will also want to set ``bind_addresses: ['127.0.0.1']`` and ``x_forwarded: true``
745757 for port 8008 in ``homeserver.yaml`` to ensure that client IP addresses are
746758 recorded correctly.
950962 in memory constrained enviroments, or increased if performance starts to
951963 degrade.
952964
965 Using `libjemalloc <http://jemalloc.net/>`_ can also yield a significant
966 improvement in overall amount, and especially in terms of giving back RAM
967 to the OS. To use it, the library must simply be put in the LD_PRELOAD
968 environment variable when launching Synapse. On Debian, this can be done
969 by installing the ``libjemalloc1`` package and adding this line to
970 ``/etc/default/matrix-synaspse``::
971
972 LD_PRELOAD=/usr/lib/x86_64-linux-gnu/libjemalloc.so.1
953973
954974 .. _`key_management`: https://matrix.org/docs/spec/server_server/unstable.html#retrieving-server-keys
00 FROM docker.io/python:2-alpine3.8
11
2 RUN apk add --no-cache --virtual .nacl_deps \
2 COPY . /synapse
3
4 RUN apk add --no-cache --virtual .build_deps \
35 build-base \
46 libffi-dev \
57 libjpeg-turbo-dev \
79 libxslt-dev \
810 linux-headers \
911 postgresql-dev \
10 su-exec \
11 zlib-dev
12
13 COPY . /synapse
14
15 # A wheel cache may be provided in ./cache for faster build
16 RUN cd /synapse \
12 zlib-dev \
13 && cd /synapse \
14 && apk add --no-cache --virtual .runtime_deps \
15 libffi \
16 libjpeg-turbo \
17 libressl \
18 libxslt \
19 libpq \
20 zlib \
21 su-exec \
1722 && pip install --upgrade \
1823 lxml \
1924 pip \
2530 && rm -rf \
2631 setup.cfg \
2732 setup.py \
28 synapse
29
33 synapse \
34 && apk del .build_deps
35
3036 VOLUME ["/data"]
3137
3238 EXPOSE 8008/tcp 8448/tcp
+0
-57
scripts-dev/nuke-room-from-db.sh less more
0 #!/bin/bash
1
2 ## CAUTION:
3 ## This script will remove (hopefully) all trace of the given room ID from
4 ## your homeserver.db
5
6 ## Do not run it lightly.
7
8 set -e
9
10 if [ "$1" == "-h" ] || [ "$1" == "" ]; then
11 echo "Call with ROOM_ID as first option and then pipe it into the database. So for instance you might run"
12 echo " nuke-room-from-db.sh <room_id> | sqlite3 homeserver.db"
13 echo "or"
14 echo " nuke-room-from-db.sh <room_id> | psql --dbname=synapse"
15 exit
16 fi
17
18 ROOMID="$1"
19
20 cat <<EOF
21 DELETE FROM event_forward_extremities WHERE room_id = '$ROOMID';
22 DELETE FROM event_backward_extremities WHERE room_id = '$ROOMID';
23 DELETE FROM event_edges WHERE room_id = '$ROOMID';
24 DELETE FROM room_depth WHERE room_id = '$ROOMID';
25 DELETE FROM state_forward_extremities WHERE room_id = '$ROOMID';
26 DELETE FROM events WHERE room_id = '$ROOMID';
27 DELETE FROM event_json WHERE room_id = '$ROOMID';
28 DELETE FROM state_events WHERE room_id = '$ROOMID';
29 DELETE FROM current_state_events WHERE room_id = '$ROOMID';
30 DELETE FROM room_memberships WHERE room_id = '$ROOMID';
31 DELETE FROM feedback WHERE room_id = '$ROOMID';
32 DELETE FROM topics WHERE room_id = '$ROOMID';
33 DELETE FROM room_names WHERE room_id = '$ROOMID';
34 DELETE FROM rooms WHERE room_id = '$ROOMID';
35 DELETE FROM room_hosts WHERE room_id = '$ROOMID';
36 DELETE FROM room_aliases WHERE room_id = '$ROOMID';
37 DELETE FROM state_groups WHERE room_id = '$ROOMID';
38 DELETE FROM state_groups_state WHERE room_id = '$ROOMID';
39 DELETE FROM receipts_graph WHERE room_id = '$ROOMID';
40 DELETE FROM receipts_linearized WHERE room_id = '$ROOMID';
41 DELETE FROM event_search WHERE room_id = '$ROOMID';
42 DELETE FROM guest_access WHERE room_id = '$ROOMID';
43 DELETE FROM history_visibility WHERE room_id = '$ROOMID';
44 DELETE FROM room_tags WHERE room_id = '$ROOMID';
45 DELETE FROM room_tags_revisions WHERE room_id = '$ROOMID';
46 DELETE FROM room_account_data WHERE room_id = '$ROOMID';
47 DELETE FROM event_push_actions WHERE room_id = '$ROOMID';
48 DELETE FROM local_invites WHERE room_id = '$ROOMID';
49 DELETE FROM pusher_throttle WHERE room_id = '$ROOMID';
50 DELETE FROM event_reports WHERE room_id = '$ROOMID';
51 DELETE FROM public_room_list_stream WHERE room_id = '$ROOMID';
52 DELETE FROM stream_ordering_to_exterm WHERE room_id = '$ROOMID';
53 DELETE FROM event_auth WHERE room_id = '$ROOMID';
54 DELETE FROM appservice_room_list WHERE room_id = '$ROOMID';
55 VACUUM;
56 EOF
1616 [pep8]
1717 max-line-length = 90
1818 # W503 requires that binary operators be at the end, not start, of lines. Erik
19 # doesn't like it. E203 is contrary to PEP8.
20 ignore = W503,E203
19 # doesn't like it. E203 is contrary to PEP8. E731 is silly.
20 ignore = W503,E203,E731
2121
2222 [flake8]
2323 # note that flake8 inherits the "ignore" settings from "pep8" (because it uses
2424 # pep8 to do those checks), but not the "max-line-length" setting
2525 max-line-length = 90
26 ignore=W503,E203,E731
2627
2728 [isort]
2829 line_length = 89
1616 """ This is a reference implementation of a Matrix home server.
1717 """
1818
19 __version__ = "0.33.4"
19 try:
20 from twisted.internet import protocol
21 from twisted.internet.protocol import Factory
22 from twisted.names.dns import DNSDatagramProtocol
23 protocol.Factory.noisy = False
24 Factory.noisy = False
25 DNSDatagramProtocol.noisy = False
26 except ImportError:
27 pass
28
29 __version__ = "0.33.5"
250250 "include_leave", False
251251 )
252252 self.event_fields = filter_json.get("event_fields", [])
253 self.event_format = filter_json.get("event_format", "client")
253254
254255 def __repr__(self):
255256 return "<FilterCollection %s>" % (json.dumps(self._filter_json),)
306306 # Gauges to expose monthly active user control metrics
307307 current_mau_gauge = Gauge("synapse_admin_mau:current", "Current MAU")
308308 max_mau_gauge = Gauge("synapse_admin_mau:max", "MAU Limit")
309 registered_reserved_users_mau_gauge = Gauge(
310 "synapse_admin_mau:registered_reserved_users",
311 "Registered users with reserved threepids"
312 )
309313
310314
311315 def setup(config_options):
530534
531535 @defer.inlineCallbacks
532536 def generate_monthly_active_users():
533 count = 0
537 current_mau_count = 0
538 reserved_count = 0
539 store = hs.get_datastore()
534540 if hs.config.limit_usage_by_mau:
535 count = yield hs.get_datastore().get_monthly_active_count()
536 current_mau_gauge.set(float(count))
541 current_mau_count = yield store.get_monthly_active_count()
542 reserved_count = yield store.get_registered_reserved_users_count()
543 current_mau_gauge.set(float(current_mau_count))
544 registered_reserved_users_mau_gauge.set(float(reserved_count))
537545 max_mau_gauge.set(float(hs.config.max_mau_value))
538546
539547 hs.get_datastore().initialise_reserved_users(
1212 # See the License for the specific language governing permissions and
1313 # limitations under the License.
1414 import logging
15 import urllib
15
16 from six.moves import urllib
1617
1718 from prometheus_client import Counter
1819
9798 def query_user(self, service, user_id):
9899 if service.url is None:
99100 defer.returnValue(False)
100 uri = service.url + ("/users/%s" % urllib.quote(user_id))
101 uri = service.url + ("/users/%s" % urllib.parse.quote(user_id))
101102 response = None
102103 try:
103104 response = yield self.get_json(uri, {
118119 def query_alias(self, service, alias):
119120 if service.url is None:
120121 defer.returnValue(False)
121 uri = service.url + ("/rooms/%s" % urllib.quote(alias))
122 uri = service.url + ("/rooms/%s" % urllib.parse.quote(alias))
122123 response = None
123124 try:
124125 response = yield self.get_json(uri, {
152153 service.url,
153154 APP_SERVICE_PREFIX,
154155 kind,
155 urllib.quote(protocol)
156 urllib.parse.quote(protocol)
156157 )
157158 try:
158159 response = yield self.get_json(uri, fields)
187188 uri = "%s%s/thirdparty/protocol/%s" % (
188189 service.url,
189190 APP_SERVICE_PREFIX,
190 urllib.quote(protocol)
191 urllib.parse.quote(protocol)
191192 )
192193 try:
193194 info = yield self.get_json(uri, {})
227228 txn_id = str(txn_id)
228229
229230 uri = service.url + ("/transactions/%s" %
230 urllib.quote(txn_id))
231 urllib.parse.quote(txn_id))
231232 try:
232233 yield self.put_json(
233234 uri=uri,
2020 from .database import DatabaseConfig
2121 from .emailconfig import EmailConfig
2222 from .groups import GroupsConfig
23 from .jwt import JWTConfig
23 from .jwt_config import JWTConfig
2424 from .key import KeyConfig
2525 from .logger import LoggingConfig
2626 from .metrics import MetricsConfig
+0
-53
synapse/config/jwt.py less more
0 # -*- coding: utf-8 -*-
1 # Copyright 2015 Niklas Riekenbrauck
2 #
3 # Licensed under the Apache License, Version 2.0 (the "License");
4 # you may not use this file except in compliance with the License.
5 # You may obtain a copy of the License at
6 #
7 # http://www.apache.org/licenses/LICENSE-2.0
8 #
9 # Unless required by applicable law or agreed to in writing, software
10 # distributed under the License is distributed on an "AS IS" BASIS,
11 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 # See the License for the specific language governing permissions and
13 # limitations under the License.
14
15 from ._base import Config, ConfigError
16
17 MISSING_JWT = (
18 """Missing jwt library. This is required for jwt login.
19
20 Install by running:
21 pip install pyjwt
22 """
23 )
24
25
26 class JWTConfig(Config):
27 def read_config(self, config):
28 jwt_config = config.get("jwt_config", None)
29 if jwt_config:
30 self.jwt_enabled = jwt_config.get("enabled", False)
31 self.jwt_secret = jwt_config["secret"]
32 self.jwt_algorithm = jwt_config["algorithm"]
33
34 try:
35 import jwt
36 jwt # To stop unused lint.
37 except ImportError:
38 raise ConfigError(MISSING_JWT)
39 else:
40 self.jwt_enabled = False
41 self.jwt_secret = None
42 self.jwt_algorithm = None
43
44 def default_config(self, **kwargs):
45 return """\
46 # The JWT needs to contain a globally unique "sub" (subject) claim.
47 #
48 # jwt_config:
49 # enabled: true
50 # secret: "a secret"
51 # algorithm: "HS256"
52 """
0 # -*- coding: utf-8 -*-
1 # Copyright 2015 Niklas Riekenbrauck
2 #
3 # Licensed under the Apache License, Version 2.0 (the "License");
4 # you may not use this file except in compliance with the License.
5 # You may obtain a copy of the License at
6 #
7 # http://www.apache.org/licenses/LICENSE-2.0
8 #
9 # Unless required by applicable law or agreed to in writing, software
10 # distributed under the License is distributed on an "AS IS" BASIS,
11 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 # See the License for the specific language governing permissions and
13 # limitations under the License.
14
15 from ._base import Config, ConfigError
16
17 MISSING_JWT = (
18 """Missing jwt library. This is required for jwt login.
19
20 Install by running:
21 pip install pyjwt
22 """
23 )
24
25
26 class JWTConfig(Config):
27 def read_config(self, config):
28 jwt_config = config.get("jwt_config", None)
29 if jwt_config:
30 self.jwt_enabled = jwt_config.get("enabled", False)
31 self.jwt_secret = jwt_config["secret"]
32 self.jwt_algorithm = jwt_config["algorithm"]
33
34 try:
35 import jwt
36 jwt # To stop unused lint.
37 except ImportError:
38 raise ConfigError(MISSING_JWT)
39 else:
40 self.jwt_enabled = False
41 self.jwt_secret = None
42 self.jwt_algorithm = None
43
44 def default_config(self, **kwargs):
45 return """\
46 # The JWT needs to contain a globally unique "sub" (subject) claim.
47 #
48 # jwt_config:
49 # enabled: true
50 # secret: "a secret"
51 # algorithm: "HS256"
52 """
226226 #
227227 # However this may not be too much of a problem if we are just writing to a file.
228228 observer = STDLibLogObserver()
229
230 def _log(event):
231
232 if "log_text" in event:
233 if event["log_text"].startswith("DNSDatagramProtocol starting on "):
234 return
235
236 if event["log_text"].startswith("(UDP Port "):
237 return
238
239 if event["log_text"].startswith("Timing out client"):
240 return
241
242 return observer(event)
243
229244 globalLogBeginner.beginLoggingTo(
230 [observer],
245 [_log],
231246 redirectStandardIO=not config.no_redirect_stdio,
232247 )
122122
123123 def get_options(self, host):
124124 return ClientTLSOptions(
125 host.decode('utf-8'),
125 host,
126126 CertificateOptions(verify=False).getContext()
127127 )
4949 defer.returnValue((server_response, server_certificate))
5050 except SynapseKeyClientError as e:
5151 logger.warn("Error getting key for %r: %s", server_name, e)
52 if e.status.startswith("4"):
52 if e.status.startswith(b"4"):
5353 # Don't retry for 4xx responses.
5454 raise IOError("Cannot get key for %r" % server_name)
5555 except (ConnectError, DomainError) as e:
8080 def connectionMade(self):
8181 self._peer = self.transport.getPeer()
8282 logger.debug("Connected to %s", self._peer)
83
84 if not isinstance(self.path, bytes):
85 self.path = self.path.encode('ascii')
86
87 if not isinstance(self.host, bytes):
88 self.host = self.host.encode('ascii')
8389
8490 self.sendCommand(b"GET", self.path)
8591 if self.host:
1515
1616 import hashlib
1717 import logging
18 import urllib
1918 from collections import namedtuple
19
20 from six.moves import urllib
2021
2122 from signedjson.key import (
2223 decode_verify_key_bytes,
3940 from synapse.crypto.keyclient import fetch_server_key
4041 from synapse.util import logcontext, unwrapFirstError
4142 from synapse.util.logcontext import (
43 LoggingContext,
4244 PreserveLoggingContext,
4345 preserve_fn,
4446 run_in_background,
215217 servers have completed. Follows the synapse rules of logcontext
216218 preservation.
217219 """
220 loop_count = 1
218221 while True:
219222 wait_on = [
220 self.key_downloads[server_name]
223 (server_name, self.key_downloads[server_name])
221224 for server_name in server_names
222225 if server_name in self.key_downloads
223226 ]
224 if wait_on:
225 with PreserveLoggingContext():
226 yield defer.DeferredList(wait_on)
227 else:
227 if not wait_on:
228228 break
229 logger.info(
230 "Waiting for existing lookups for %s to complete [loop %i]",
231 [w[0] for w in wait_on], loop_count,
232 )
233 with PreserveLoggingContext():
234 yield defer.DeferredList((w[1] for w in wait_on))
235
236 loop_count += 1
237
238 ctx = LoggingContext.current_context()
229239
230240 def rm(r, server_name_):
231 self.key_downloads.pop(server_name_, None)
241 with PreserveLoggingContext(ctx):
242 logger.debug("Releasing key lookup lock on %s", server_name_)
243 self.key_downloads.pop(server_name_, None)
232244 return r
233245
234246 for server_name, deferred in server_to_deferred.items():
247 logger.debug("Got key lookup lock on %s", server_name)
235248 self.key_downloads[server_name] = deferred
236249 deferred.addBoth(rm, server_name)
237250
431444 # an incoming request.
432445 query_response = yield self.client.post_json(
433446 destination=perspective_name,
434 path=b"/_matrix/key/v2/query",
447 path="/_matrix/key/v2/query",
435448 data={
436449 u"server_keys": {
437450 server_name: {
512525
513526 (response, tls_certificate) = yield fetch_server_key(
514527 server_name, self.hs.tls_client_options_factory,
515 path=(b"/_matrix/key/v2/server/%s" % (
516 urllib.quote(requested_key_id),
528 path=("/_matrix/key/v2/server/%s" % (
529 urllib.parse.quote(requested_key_id),
517530 )).encode("ascii"),
518531 )
519532
1212 # See the License for the specific language governing permissions and
1313 # limitations under the License.
1414
15 import six
16
1517 from synapse.util.caches import intern_dict
1618 from synapse.util.frozenutils import freeze
1719
146148 def items(self):
147149 return list(self._event_dict.items())
148150
151 def keys(self):
152 return six.iterkeys(self._event_dict)
153
149154
150155 class FrozenEvent(EventBase):
151156 def __init__(self, event_dict, internal_metadata_dict={}, rejected_reason=None):
142142 def callback(_, pdu):
143143 with logcontext.PreserveLoggingContext(ctx):
144144 if not check_event_content_hash(pdu):
145 logger.warn(
146 "Event content has been tampered, redacting %s: %s",
147 pdu.event_id, pdu.get_pdu_json()
148 )
149 return prune_event(pdu)
145 # let's try to distinguish between failures because the event was
146 # redacted (which are somewhat expected) vs actual ball-tampering
147 # incidents.
148 #
149 # This is just a heuristic, so we just assume that if the keys are
150 # about the same between the redacted and received events, then the
151 # received event was probably a redacted copy (but we then use our
152 # *actual* redacted copy to be on the safe side.)
153 redacted_event = prune_event(pdu)
154 if (
155 set(redacted_event.keys()) == set(pdu.keys()) and
156 set(six.iterkeys(redacted_event.content))
157 == set(six.iterkeys(pdu.content))
158 ):
159 logger.info(
160 "Event %s seems to have been redacted; using our redacted "
161 "copy",
162 pdu.event_id,
163 )
164 else:
165 logger.warning(
166 "Event %s content has been tampered, redacting",
167 pdu.event_id, pdu.get_pdu_json(),
168 )
169 return redacted_event
150170
151171 if self.spam_checker.check_event_for_spam(pdu):
152172 logger.warn(
161181 failure.trap(SynapseError)
162182 with logcontext.PreserveLoggingContext(ctx):
163183 logger.warn(
164 "Signature check failed for %s",
165 pdu.event_id,
184 "Signature check failed for %s: %s",
185 pdu.event_id, failure.getErrorMessage(),
166186 )
167187 return failure
168188
270270 event_id, destination, e,
271271 )
272272 except NotRetryingDestination as e:
273 logger.info(e.message)
273 logger.info(str(e))
274274 continue
275275 except FederationDeniedError as e:
276 logger.info(e.message)
276 logger.info(str(e))
277277 continue
278278 except Exception as e:
279279 pdu_attempts[destination] = now
509509 else:
510510 logger.warn(
511511 "Failed to %s via %s: %i %s",
512 description, destination, e.code, e.message,
512 description, destination, e.code, e.args[0],
513513 )
514514 except Exception:
515515 logger.warn(
874874 except Exception as e:
875875 logger.exception(
876876 "Failed to send_third_party_invite via %s: %s",
877 destination, e.message
877 destination, str(e)
878878 )
879879
880880 raise RuntimeError("Failed to send to any server.")
837837 )
838838
839839 return self._send_edu(
840 edu_type=edu_type,
841 origin=origin,
842 content=content,
840 edu_type=edu_type,
841 origin=origin,
842 content=content,
843843 )
844844
845845 def on_query(self, query_type, args):
850850 return handler(args)
851851
852852 return self._get_query_client(
853 query_type=query_type,
854 args=args,
855 )
853 query_type=query_type,
854 args=args,
855 )
462462 # pending_transactions flag.
463463
464464 pending_pdus = self.pending_pdus_by_dest.pop(destination, [])
465
466 # We can only include at most 50 PDUs per transactions
467 pending_pdus, leftover_pdus = pending_pdus[:50], pending_pdus[50:]
468 if leftover_pdus:
469 self.pending_pdus_by_dest[destination] = leftover_pdus
470
465471 pending_edus = self.pending_edus_by_dest.pop(destination, [])
472
473 # We can only include at most 100 EDUs per transactions
474 pending_edus, leftover_edus = pending_edus[:100], pending_edus[100:]
475 if leftover_edus:
476 self.pending_edus_by_dest[destination] = leftover_edus
477
466478 pending_presence = self.pending_presence_by_dest.pop(destination, {})
467479
468480 pending_edus.extend(
1414 # limitations under the License.
1515
1616 import logging
17 import urllib
17
18 from six.moves import urllib
1819
1920 from twisted.internet import defer
2021
950951 Returns:
951952 str
952953 """
953 return prefix + path % tuple(urllib.quote(arg, "") for arg in args)
954 return prefix + path % tuple(urllib.parse.quote(arg, "") for arg in args)
8989 @defer.inlineCallbacks
9090 def authenticate_request(self, request, content):
9191 json_request = {
92 "method": request.method,
93 "uri": request.uri,
92 "method": request.method.decode('ascii'),
93 "uri": request.uri.decode('ascii'),
9494 "destination": self.server_name,
9595 "signatures": {},
9696 }
251251 by the callback method. None if the request has already been handled.
252252 """
253253 content = None
254 if request.method in ["PUT", "POST"]:
254 if request.method in [b"PUT", b"POST"]:
255255 # TODO: Handle other method types? other content types?
256256 content = parse_json_object_from_request(request)
257257
385385 return self.handler.on_context_state_request(
386386 origin,
387387 context,
388 query.get("event_id", [None])[0],
388 parse_string_from_args(query, "event_id", None),
389389 )
390390
391391
396396 return self.handler.on_state_ids_request(
397397 origin,
398398 room_id,
399 query.get("event_id", [None])[0],
399 parse_string_from_args(query, "event_id", None),
400400 )
401401
402402
404404 PATH = "/backfill/(?P<context>[^/]*)/"
405405
406406 def on_GET(self, origin, content, query, context):
407 versions = query["v"]
408 limits = query["limit"]
409
410 if not limits:
407 versions = [x.decode('ascii') for x in query[b"v"]]
408 limit = parse_integer_from_args(query, "limit", None)
409
410 if not limit:
411411 return defer.succeed((400, {"error": "Did not include limit param"}))
412
413 limit = int(limits[-1])
414412
415413 return self.handler.on_backfill_request(origin, context, versions, limit)
416414
422420 def on_GET(self, origin, content, query, query_type):
423421 return self.handler.on_query_request(
424422 query_type,
425 {k: v[0].decode("utf-8") for k, v in query.items()}
423 {k.decode('utf8'): v[0].decode("utf-8") for k, v in query.items()}
426424 )
427425
428426
629627
630628 @defer.inlineCallbacks
631629 def on_GET(self, origin, content, query):
632 token = query.get("access_token", [None])[0]
630 token = query.get(b"access_token", [None])[0]
633631 if token is None:
634632 defer.returnValue((401, {
635633 "errcode": "M_MISSING_TOKEN", "error": "Access Token required"
636634 }))
637635 return
638636
639 user_id = yield self.handler.on_openid_userinfo(token)
637 user_id = yield self.handler.on_openid_userinfo(token.decode('ascii'))
640638
641639 if user_id is None:
642640 defer.returnValue((401, {
894894
895895 Args:
896896 password (unicode): Password to hash.
897 stored_hash (unicode): Expected hash value.
897 stored_hash (bytes): Expected hash value.
898898
899899 Returns:
900900 Deferred(bool): Whether self.hash(password) == stored_hash.
901901 """
902
903902 def _do_validate_hash():
904903 # Normalise the Unicode in the password
905904 pw = unicodedata.normalize("NFKC", password)
906905
907906 return bcrypt.checkpw(
908907 pw.encode('utf8') + self.hs.config.password_pepper.encode("utf8"),
909 stored_hash.encode('utf8')
908 stored_hash
910909 )
911910
912911 if stored_hash:
912 if not isinstance(stored_hash, bytes):
913 stored_hash = stored_hash.encode('ascii')
914
913915 return make_deferred_yieldable(
914916 threads.deferToThreadPool(
915917 self.hs.get_reactor(),
329329 (algorithm, key_id, ex_json, key)
330330 )
331331 else:
332 new_keys.append((algorithm, key_id, encode_canonical_json(key)))
332 new_keys.append((
333 algorithm, key_id, encode_canonical_json(key).decode('ascii')))
333334
334335 yield self.store.add_e2e_one_time_keys(
335336 user_id, device_id, time_now, new_keys
357358 # Note that some Exceptions (notably twisted's ResponseFailed etc) don't
358359 # give a string for e.message, which json then fails to serialize.
359360 return {
360 "status": 503, "message": str(e.message),
361 "status": 503, "message": str(e),
361362 }
362363
363364
593593
594594 required_auth = set(
595595 a_id
596 for event in events + state_events.values() + auth_events.values()
596 for event in events + list(state_events.values()) + list(auth_events.values())
597597 for a_id, _ in event.auth_events
598598 )
599599 auth_events.update({
801801 )
802802 continue
803803 except NotRetryingDestination as e:
804 logger.info(e.message)
804 logger.info(str(e))
805805 continue
806806 except FederationDeniedError as e:
807807 logger.info(e)
13571357 )
13581358
13591359 if state_groups:
1360 _, state = state_groups.items().pop()
1360 _, state = list(state_groups.items()).pop()
13611361 results = state
13621362
13631363 if event.is_state():
268268
269269 if state_ids:
270270 state = yield self.store.get_events(list(state_ids.values()))
271
272 if state:
273 state = yield filter_events_for_client(
274 self.store,
275 user_id,
276 state.values(),
277 is_peeking=(member_event_id is None),
278 )
271 state = state.values()
279272
280273 time_now = self.clock.time_msec()
281274
161161 # Filter out rooms that we don't want to return
162162 rooms_to_scan = [
163163 r for r in sorted_rooms
164 if r not in newly_unpublished and rooms_to_num_joined[room_id] > 0
164 if r not in newly_unpublished and rooms_to_num_joined[r] > 0
165165 ]
166166
167167 total_room_count = len(rooms_to_scan)
5353 batch_token = None
5454 if batch:
5555 try:
56 b = decode_base64(batch)
56 b = decode_base64(batch).decode('ascii')
5757 batch_group, batch_group_key, batch_token = b.split("\n")
5858
5959 assert batch_group is not None
257257 # it returns more from the same group (if applicable) rather
258258 # than reverting to searching all results again.
259259 if batch_group and batch_group_key:
260 global_next_batch = encode_base64("%s\n%s\n%s" % (
260 global_next_batch = encode_base64(("%s\n%s\n%s" % (
261261 batch_group, batch_group_key, pagination_token
262 ))
262 )).encode('ascii'))
263263 else:
264 global_next_batch = encode_base64("%s\n%s\n%s" % (
264 global_next_batch = encode_base64(("%s\n%s\n%s" % (
265265 "all", "", pagination_token
266 ))
266 )).encode('ascii'))
267267
268268 for room_id, group in room_groups.items():
269 group["next_batch"] = encode_base64("%s\n%s\n%s" % (
269 group["next_batch"] = encode_base64(("%s\n%s\n%s" % (
270270 "room_id", room_id, pagination_token
271 ))
271 )).encode('ascii'))
272272
273273 allowed_events.extend(room_events)
274274
2323
2424 from synapse.api.constants import EventTypes, Membership
2525 from synapse.push.clientformat import format_push_rules_for_user
26 from synapse.storage.roommember import MemberSummary
2627 from synapse.types import RoomStreamToken
2728 from synapse.util.async_helpers import concurrently_execute
2829 from synapse.util.caches.expiringcache import ExpiringCache
524525 A deferred dict describing the room summary
525526 """
526527
528 # FIXME: we could/should get this from room_stats when matthew/stats lands
529
527530 # FIXME: this promulgates https://github.com/matrix-org/synapse/issues/3305
528531 last_events, _ = yield self.store.get_recent_event_ids_for_room(
529532 room_id, end_token=now_token.room_key, limit=1,
536539 last_event = last_events[-1]
537540 state_ids = yield self.store.get_state_ids_for_event(
538541 last_event.event_id, [
539 (EventTypes.Member, None),
540542 (EventTypes.Name, ''),
541543 (EventTypes.CanonicalAlias, ''),
542544 ]
543545 )
544546
545 member_ids = {
546 state_key: event_id
547 for (t, state_key), event_id in state_ids.iteritems()
548 if t == EventTypes.Member
549 }
547 # this is heavily cached, thus: fast.
548 details = yield self.store.get_room_summary(room_id)
549
550550 name_id = state_ids.get((EventTypes.Name, ''))
551551 canonical_alias_id = state_ids.get((EventTypes.CanonicalAlias, ''))
552552
553553 summary = {}
554
555 # FIXME: it feels very heavy to load up every single membership event
556 # just to calculate the counts.
557 member_events = yield self.store.get_events(member_ids.values())
558
559 joined_user_ids = []
560 invited_user_ids = []
561
562 for ev in member_events.values():
563 if ev.content.get("membership") == Membership.JOIN:
564 joined_user_ids.append(ev.state_key)
565 elif ev.content.get("membership") == Membership.INVITE:
566 invited_user_ids.append(ev.state_key)
554 empty_ms = MemberSummary([], 0)
567555
568556 # TODO: only send these when they change.
569 summary["m.joined_member_count"] = len(joined_user_ids)
570 summary["m.invited_member_count"] = len(invited_user_ids)
571
572 if name_id or canonical_alias_id:
573 defer.returnValue(summary)
574
575 # FIXME: order by stream ordering, not alphabetic
576
557 summary["m.joined_member_count"] = (
558 details.get(Membership.JOIN, empty_ms).count
559 )
560 summary["m.invited_member_count"] = (
561 details.get(Membership.INVITE, empty_ms).count
562 )
563
564 # if the room has a name or canonical_alias set, we can skip
565 # calculating heroes. we assume that if the event has contents, it'll
566 # be a valid name or canonical_alias - i.e. we're checking that they
567 # haven't been "deleted" by blatting {} over the top.
568 if name_id:
569 name = yield self.store.get_event(name_id, allow_none=False)
570 if name and name.content:
571 defer.returnValue(summary)
572
573 if canonical_alias_id:
574 canonical_alias = yield self.store.get_event(
575 canonical_alias_id, allow_none=False,
576 )
577 if canonical_alias and canonical_alias.content:
578 defer.returnValue(summary)
579
580 joined_user_ids = [
581 r[0] for r in details.get(Membership.JOIN, empty_ms).members
582 ]
583 invited_user_ids = [
584 r[0] for r in details.get(Membership.INVITE, empty_ms).members
585 ]
586 gone_user_ids = (
587 [r[0] for r in details.get(Membership.LEAVE, empty_ms).members] +
588 [r[0] for r in details.get(Membership.BAN, empty_ms).members]
589 )
590
591 # FIXME: only build up a member_ids list for our heroes
592 member_ids = {}
593 for membership in (
594 Membership.JOIN,
595 Membership.INVITE,
596 Membership.LEAVE,
597 Membership.BAN
598 ):
599 for user_id, event_id in details.get(membership, empty_ms).members:
600 member_ids[user_id] = event_id
601
602 # FIXME: order by stream ordering rather than as returned by SQL
577603 me = sync_config.user.to_string()
578604 if (joined_user_ids or invited_user_ids):
579605 summary['m.heroes'] = sorted(
585611 )[0:5]
586612 else:
587613 summary['m.heroes'] = sorted(
588 [user_id for user_id in member_ids.keys() if user_id != me]
614 [
615 user_id
616 for user_id in gone_user_ids
617 if user_id != me
618 ]
589619 )[0:5]
590620
591621 if not sync_config.filter_collection.lazy_load_members():
718748 lazy_load_members=lazy_load_members,
719749 )
720750 elif batch.limited:
751 state_at_timeline_start = yield self.store.get_state_ids_for_event(
752 batch.events[0].event_id, types=types,
753 filtered_types=filtered_types,
754 )
755
756 # for now, we disable LL for gappy syncs - see
757 # https://github.com/vector-im/riot-web/issues/7211#issuecomment-419976346
758 # N.B. this slows down incr syncs as we are now processing way
759 # more state in the server than if we were LLing.
760 #
761 # We still have to filter timeline_start to LL entries (above) in order
762 # for _calculate_state's LL logic to work, as we have to include LL
763 # members for timeline senders in case they weren't loaded in the initial
764 # sync. We do this by (counterintuitively) by filtering timeline_start
765 # members to just be ones which were timeline senders, which then ensures
766 # all of the rest get included in the state block (if we need to know
767 # about them).
768 types = None
769 filtered_types = None
770
721771 state_at_previous_sync = yield self.get_state_at(
722772 room_id, stream_position=since_token, types=types,
723773 filtered_types=filtered_types,
725775
726776 current_state_ids = yield self.store.get_state_ids_for_event(
727777 batch.events[-1].event_id, types=types,
728 filtered_types=filtered_types,
729 )
730
731 state_at_timeline_start = yield self.store.get_state_ids_for_event(
732 batch.events[0].event_id, types=types,
733778 filtered_types=filtered_types,
734779 )
735780
738783 timeline_start=state_at_timeline_start,
739784 previous=state_at_previous_sync,
740785 current=current_state_ids,
786 # we have to include LL members in case LL initial sync missed them
741787 lazy_load_members=lazy_load_members,
742788 )
743789 else:
744790 state_ids = {}
745791 if lazy_load_members:
746792 if types:
747 # We're returning an incremental sync, with no "gap" since
748 # the previous sync, so normally there would be no state to return
793 # We're returning an incremental sync, with no
794 # "gap" since the previous sync, so normally there would be
795 # no state to return.
749796 # But we're lazy-loading, so the client might need some more
750797 # member events to understand the events in this timeline.
751798 # So we fish out all the member events corresponding to the
773820 logger.debug("filtering state from %r...", state_ids)
774821 state_ids = {
775822 t: event_id
776 for t, event_id in state_ids.iteritems()
823 for t, event_id in iteritems(state_ids)
777824 if cache.get(t[1]) != event_id
778825 }
779826 logger.debug("...to %r", state_ids)
15741621 newly_joined_room=newly_joined,
15751622 )
15761623
1624 # When we join the room (or the client requests full_state), we should
1625 # send down any existing tags. Usually the user won't have tags in a
1626 # newly joined room, unless either a) they've joined before or b) the
1627 # tag was added by synapse e.g. for server notice rooms.
1628 if full_state:
1629 user_id = sync_result_builder.sync_config.user.to_string()
1630 tags = yield self.store.get_tags_for_room(user_id, room_id)
1631
1632 # If there aren't any tags, don't send the empty tags list down
1633 # sync
1634 if not tags:
1635 tags = None
1636
15771637 account_data_events = []
15781638 if tags is not None:
15791639 account_data_events.append({
16021662 )
16031663
16041664 summary = {}
1665
1666 # we include a summary in room responses when we're lazy loading
1667 # members (as the client otherwise doesn't have enough info to form
1668 # the name itself).
16051669 if (
16061670 sync_config.filter_collection.lazy_load_members() and
16071671 (
1672 # we recalulate the summary:
1673 # if there are membership changes in the timeline, or
1674 # if membership has changed during a gappy sync, or
1675 # if this is an initial sync.
16081676 any(ev.type == EventTypes.Member for ev in batch.events) or
1677 (
1678 # XXX: this may include false positives in the form of LL
1679 # members which have snuck into state
1680 batch.limited and
1681 any(t == EventTypes.Member for (t, k) in state)
1682 ) or
16091683 since_token is None
16101684 )
16111685 ):
16351709 unread_notifications["highlight_count"] = notifs["highlight_count"]
16361710
16371711 sync_result_builder.joined.append(room_sync)
1712
1713 if batch.limited and since_token:
1714 user_id = sync_result_builder.sync_config.user.to_string()
1715 logger.info(
1716 "Incremental gappy sync of %s for user %s with %d state events" % (
1717 room_id,
1718 user_id,
1719 len(state),
1720 )
1721 )
16381722 elif room_builder.rtype == "archived":
16391723 room_sync = ArchivedSyncResult(
16401724 room_id=room_id,
17281812 event_id_to_key = {
17291813 e: key
17301814 for key, e in itertools.chain(
1731 timeline_contains.items(),
1732 previous.items(),
1733 timeline_start.items(),
1734 current.items(),
1815 iteritems(timeline_contains),
1816 iteritems(previous),
1817 iteritems(timeline_start),
1818 iteritems(current),
17351819 )
17361820 }
17371821
1738 c_ids = set(e for e in current.values())
1739 ts_ids = set(e for e in timeline_start.values())
1740 p_ids = set(e for e in previous.values())
1741 tc_ids = set(e for e in timeline_contains.values())
1822 c_ids = set(e for e in itervalues(current))
1823 ts_ids = set(e for e in itervalues(timeline_start))
1824 p_ids = set(e for e in itervalues(previous))
1825 tc_ids = set(e for e in itervalues(timeline_contains))
17421826
17431827 # If we are lazyloading room members, we explicitly add the membership events
17441828 # for the senders in the timeline into the state block returned by /sync,
17521836
17531837 if lazy_load_members:
17541838 p_ids.difference_update(
1755 e for t, e in timeline_start.iteritems()
1839 e for t, e in iteritems(timeline_start)
17561840 if t[0] == EventTypes.Member
17571841 )
17581842
3737 return value
3838
3939
40 ACCESS_TOKEN_RE = re.compile(br'(\?.*access(_|%5[Ff])token=)[^&]*(.*)$')
40 ACCESS_TOKEN_RE = re.compile(r'(\?.*access(_|%5[Ff])token=)[^&]*(.*)$')
4141
4242
4343 def redact_uri(uri):
4444 """Strips access tokens from the uri replaces with <redacted>"""
4545 return ACCESS_TOKEN_RE.sub(
46 br'\1<redacted>\3',
46 r'\1<redacted>\3',
4747 uri
4848 )
1212 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
1313 # See the License for the specific language governing permissions and
1414 # limitations under the License.
15
1516 import logging
16 import urllib
17
18 from six import StringIO
19
17
18 from six import text_type
19 from six.moves import urllib
20
21 import treq
2022 from canonicaljson import encode_canonical_json, json
2123 from prometheus_client import Counter
2224
2325 from OpenSSL import SSL
2426 from OpenSSL.SSL import VERIFY_NONE
25 from twisted.internet import defer, protocol, reactor, ssl, task
27 from twisted.internet import defer, protocol, reactor, ssl
2628 from twisted.internet.endpoints import HostnameEndpoint, wrapClientTLS
2729 from twisted.web._newclient import ResponseDone
2830 from twisted.web.client import (
2931 Agent,
3032 BrowserLikeRedirectAgent,
3133 ContentDecoderAgent,
32 FileBodyProducer as TwistedFileBodyProducer,
3334 GzipDecoder,
3435 HTTPConnectionPool,
3536 PartialDownloadError,
8283 if hs.config.user_agent_suffix:
8384 self.user_agent = "%s %s" % (self.user_agent, hs.config.user_agent_suffix,)
8485
85 @defer.inlineCallbacks
86 def request(self, method, uri, *args, **kwargs):
86 self.user_agent = self.user_agent.encode('ascii')
87
88 @defer.inlineCallbacks
89 def request(self, method, uri, data=b'', headers=None):
8790 # A small wrapper around self.agent.request() so we can easily attach
8891 # counters to it
8992 outgoing_requests_counter.labels(method).inc()
9295 logger.info("Sending request %s %s", method, redact_uri(uri))
9396
9497 try:
95 request_deferred = self.agent.request(
96 method, uri, *args, **kwargs
98 request_deferred = treq.request(
99 method, uri, agent=self.agent, data=data, headers=headers
97100 )
98101 add_timeout_to_deferred(
99102 request_deferred, 60, self.hs.get_reactor(),
111114 incoming_responses_counter.labels(method, "ERR").inc()
112115 logger.info(
113116 "Error sending request to %s %s: %s %s",
114 method, redact_uri(uri), type(e).__name__, e.message
117 method, redact_uri(uri), type(e).__name__, e.args[0]
115118 )
116119 raise
117120
136139 # TODO: Do we ever want to log message contents?
137140 logger.debug("post_urlencoded_get_json args: %s", args)
138141
139 query_bytes = urllib.urlencode(encode_urlencode_args(args), True)
142 query_bytes = urllib.parse.urlencode(
143 encode_urlencode_args(args), True).encode("utf8")
140144
141145 actual_headers = {
142146 b"Content-Type": [b"application/x-www-form-urlencoded"],
147151
148152 response = yield self.request(
149153 "POST",
150 uri.encode("ascii"),
154 uri,
151155 headers=Headers(actual_headers),
152 bodyProducer=FileBodyProducer(StringIO(query_bytes))
153 )
154
155 body = yield make_deferred_yieldable(readBody(response))
156 data=query_bytes
157 )
156158
157159 if 200 <= response.code < 300:
158 defer.returnValue(json.loads(body))
160 body = yield make_deferred_yieldable(treq.json_content(response))
161 defer.returnValue(body)
159162 else:
160163 raise HttpResponseException(response.code, response.phrase, body)
161164
190193
191194 response = yield self.request(
192195 "POST",
193 uri.encode("ascii"),
196 uri,
194197 headers=Headers(actual_headers),
195 bodyProducer=FileBodyProducer(StringIO(json_str))
198 data=json_str
196199 )
197200
198201 body = yield make_deferred_yieldable(readBody(response))
247250 ValueError: if the response was not JSON
248251 """
249252 if len(args):
250 query_bytes = urllib.urlencode(args, True)
253 query_bytes = urllib.parse.urlencode(args, True)
251254 uri = "%s?%s" % (uri, query_bytes)
252255
253256 json_str = encode_canonical_json(json_body)
261264
262265 response = yield self.request(
263266 "PUT",
264 uri.encode("ascii"),
267 uri,
265268 headers=Headers(actual_headers),
266 bodyProducer=FileBodyProducer(StringIO(json_str))
269 data=json_str
267270 )
268271
269272 body = yield make_deferred_yieldable(readBody(response))
292295 HttpResponseException on a non-2xx HTTP response.
293296 """
294297 if len(args):
295 query_bytes = urllib.urlencode(args, True)
298 query_bytes = urllib.parse.urlencode(args, True)
296299 uri = "%s?%s" % (uri, query_bytes)
297300
298301 actual_headers = {
303306
304307 response = yield self.request(
305308 "GET",
306 uri.encode("ascii"),
309 uri,
307310 headers=Headers(actual_headers),
308311 )
309312
338341
339342 response = yield self.request(
340343 "GET",
341 url.encode("ascii"),
344 url,
342345 headers=Headers(actual_headers),
343346 )
344347
345348 resp_headers = dict(response.headers.getAllRawHeaders())
346349
347 if 'Content-Length' in resp_headers and resp_headers['Content-Length'] > max_size:
350 if (b'Content-Length' in resp_headers and
351 int(resp_headers[b'Content-Length']) > max_size):
348352 logger.warn("Requested URL is too large > %r bytes" % (self.max_size,))
349353 raise SynapseError(
350354 502,
377381 )
378382
379383 defer.returnValue(
380 (length, resp_headers, response.request.absoluteURI, response.code),
384 (
385 length,
386 resp_headers,
387 response.request.absoluteURI.decode('ascii'),
388 response.code,
389 ),
381390 )
382391
383392
433442
434443 @defer.inlineCallbacks
435444 def post_urlencoded_get_raw(self, url, args={}):
436 query_bytes = urllib.urlencode(encode_urlencode_args(args), True)
445 query_bytes = urllib.parse.urlencode(encode_urlencode_args(args), True)
437446
438447 response = yield self.request(
439448 "POST",
440 url.encode("ascii"),
441 bodyProducer=FileBodyProducer(StringIO(query_bytes)),
449 url,
450 data=query_bytes,
442451 headers=Headers({
443452 b"Content-Type": [b"application/x-www-form-urlencoded"],
444453 b"User-Agent": [self.user_agent],
462471 def endpointForURI(self, uri):
463472 logger.info("Getting endpoint for %s", uri.toBytes())
464473
465 if uri.scheme == "http":
474 if uri.scheme == b"http":
466475 endpoint_factory = HostnameEndpoint
467 elif uri.scheme == "https":
476 elif uri.scheme == b"https":
468477 tlsCreator = self.policyForHTTPS.creatorForNetloc(uri.host, uri.port)
469478
470479 def endpoint_factory(reactor, host, port, **kw):
509518
510519
511520 def encode_urlencode_arg(arg):
512 if isinstance(arg, unicode):
521 if isinstance(arg, text_type):
513522 return arg.encode('utf-8')
514523 elif isinstance(arg, list):
515524 return [encode_urlencode_arg(i) for i in arg]
541550
542551 def creatorForNetloc(self, hostname, port):
543552 return self
544
545
546 class FileBodyProducer(TwistedFileBodyProducer):
547 """Workaround for https://twistedmatrix.com/trac/ticket/8473
548
549 We override the pauseProducing and resumeProducing methods in twisted's
550 FileBodyProducer so that they do not raise exceptions if the task has
551 already completed.
552 """
553
554 def pauseProducing(self):
555 try:
556 super(FileBodyProducer, self).pauseProducing()
557 except task.TaskDone:
558 # task has already completed
559 pass
560
561 def resumeProducing(self):
562 try:
563 super(FileBodyProducer, self).resumeProducing()
564 except task.NotPaused:
565 # task was not paused (probably because it had already completed)
566 pass
1616 import logging
1717 import random
1818 import sys
19 import urllib
20
21 from six import string_types
22 from six.moves.urllib import parse as urlparse
23
24 from canonicaljson import encode_canonical_json, json
19
20 from six import PY3, string_types
21 from six.moves import urllib
22
23 import treq
24 from canonicaljson import encode_canonical_json
2525 from prometheus_client import Counter
2626 from signedjson.sign import sign_json
2727
28 from twisted.internet import defer, protocol, reactor
28 from twisted.internet import defer, protocol
2929 from twisted.internet.error import DNSLookupError
3030 from twisted.web._newclient import ResponseDone
31 from twisted.web.client import Agent, HTTPConnectionPool, readBody
31 from twisted.web.client import Agent, HTTPConnectionPool
3232 from twisted.web.http_headers import Headers
3333
3434 import synapse.metrics
3939 HttpResponseException,
4040 SynapseError,
4141 )
42 from synapse.http import cancelled_to_request_timed_out_error
4342 from synapse.http.endpoint import matrix_federation_endpoint
4443 from synapse.util import logcontext
45 from synapse.util.async_helpers import add_timeout_to_deferred
44 from synapse.util.async_helpers import timeout_no_seriously
4645 from synapse.util.logcontext import make_deferred_yieldable
46 from synapse.util.metrics import Measure
4747
4848 logger = logging.getLogger(__name__)
4949 outbound_logger = logging.getLogger("synapse.http.outbound")
5757 MAX_LONG_RETRIES = 10
5858 MAX_SHORT_RETRIES = 3
5959
60 if PY3:
61 MAXINT = sys.maxsize
62 else:
63 MAXINT = sys.maxint
64
6065
6166 class MatrixFederationEndpointFactory(object):
6267 def __init__(self, hs):
68 self.reactor = hs.get_reactor()
6369 self.tls_client_options_factory = hs.tls_client_options_factory
6470
6571 def endpointForURI(self, uri):
66 destination = uri.netloc
72 destination = uri.netloc.decode('ascii')
6773
6874 return matrix_federation_endpoint(
69 reactor, destination, timeout=10,
75 self.reactor, destination, timeout=10,
7076 tls_client_options_factory=self.tls_client_options_factory
7177 )
7278
8490 self.hs = hs
8591 self.signing_key = hs.config.signing_key[0]
8692 self.server_name = hs.hostname
93 reactor = hs.get_reactor()
8794 pool = HTTPConnectionPool(reactor)
95 pool.retryAutomatically = False
8896 pool.maxPersistentPerHost = 5
8997 pool.cachedConnectionTimeout = 2 * 60
9098 self.agent = Agent.usingEndpointFactory(
92100 )
93101 self.clock = hs.get_clock()
94102 self._store = hs.get_datastore()
95 self.version_string = hs.version_string
103 self.version_string = hs.version_string.encode('ascii')
96104 self._next_id = 1
105 self.default_timeout = 60
97106
98107 def _create_url(self, destination, path_bytes, param_bytes, query_bytes):
99 return urlparse.urlunparse(
100 ("matrix", destination, path_bytes, param_bytes, query_bytes, "")
108 return urllib.parse.urlunparse(
109 (b"matrix", destination, path_bytes, param_bytes, query_bytes, b"")
101110 )
102111
103112 @defer.inlineCallbacks
104113 def _request(self, destination, method, path,
105 body_callback, headers_dict={}, param_bytes=b"",
106 query_bytes=b"", retry_on_dns_fail=True,
114 json=None, json_callback=None,
115 param_bytes=b"",
116 query=None, retry_on_dns_fail=True,
107117 timeout=None, long_retries=False,
108118 ignore_backoff=False,
109119 backoff_on_404=False):
110 """ Creates and sends a request to the given server
120 """
121 Creates and sends a request to the given server.
122
111123 Args:
112124 destination (str): The remote server to send the HTTP request to.
113125 method (str): HTTP method
114126 path (str): The HTTP path
127 json (dict or None): JSON to send in the body.
128 json_callback (func or None): A callback to generate the JSON.
129 query (dict or None): Query arguments.
115130 ignore_backoff (bool): true to ignore the historical backoff data
116131 and try the request anyway.
117132 backoff_on_404 (bool): Back off if we get a 404
131146 (May also fail with plenty of other Exceptions for things like DNS
132147 failures, connection failures, SSL failures.)
133148 """
149 if timeout:
150 _sec_timeout = timeout / 1000
151 else:
152 _sec_timeout = self.default_timeout
153
134154 if (
135155 self.hs.config.federation_domain_whitelist is not None and
136156 destination not in self.hs.config.federation_domain_whitelist
145165 ignore_backoff=ignore_backoff,
146166 )
147167
148 destination = destination.encode("ascii")
168 headers_dict = {}
149169 path_bytes = path.encode("ascii")
170 if query:
171 query_bytes = encode_query_args(query)
172 else:
173 query_bytes = b""
174
175 headers_dict = {
176 "User-Agent": [self.version_string],
177 "Host": [destination],
178 }
179
150180 with limiter:
151 headers_dict[b"User-Agent"] = [self.version_string]
152 headers_dict[b"Host"] = [destination]
153
154 url_bytes = self._create_url(
155 destination, path_bytes, param_bytes, query_bytes
156 )
181 url = self._create_url(
182 destination.encode("ascii"), path_bytes, param_bytes, query_bytes
183 ).decode('ascii')
157184
158185 txn_id = "%s-O-%s" % (method, self._next_id)
159 self._next_id = (self._next_id + 1) % (sys.maxint - 1)
160
161 outbound_logger.info(
162 "{%s} [%s] Sending request: %s %s",
163 txn_id, destination, method, url_bytes
164 )
186 self._next_id = (self._next_id + 1) % (MAXINT - 1)
165187
166188 # XXX: Would be much nicer to retry only at the transaction-layer
167189 # (once we have reliable transactions in place)
170192 else:
171193 retries_left = MAX_SHORT_RETRIES
172194
173 http_url_bytes = urlparse.urlunparse(
174 ("", "", path_bytes, param_bytes, query_bytes, "")
175 )
195 http_url = urllib.parse.urlunparse(
196 (b"", b"", path_bytes, param_bytes, query_bytes, b"")
197 ).decode('ascii')
176198
177199 log_result = None
178 try:
179 while True:
180 producer = None
181 if body_callback:
182 producer = body_callback(method, http_url_bytes, headers_dict)
183
184 try:
185 request_deferred = self.agent.request(
186 method,
187 url_bytes,
188 Headers(headers_dict),
189 producer
200 while True:
201 try:
202 if json_callback:
203 json = json_callback()
204
205 if json:
206 data = encode_canonical_json(json)
207 headers_dict["Content-Type"] = ["application/json"]
208 self.sign_request(
209 destination, method, http_url, headers_dict, json
190210 )
191 add_timeout_to_deferred(
192 request_deferred,
193 timeout / 1000. if timeout else 60,
194 self.hs.get_reactor(),
195 cancelled_to_request_timed_out_error,
196 )
211 else:
212 data = None
213 self.sign_request(destination, method, http_url, headers_dict)
214
215 outbound_logger.info(
216 "{%s} [%s] Sending request: %s %s",
217 txn_id, destination, method, url
218 )
219
220 request_deferred = treq.request(
221 method,
222 url,
223 headers=Headers(headers_dict),
224 data=data,
225 agent=self.agent,
226 reactor=self.hs.get_reactor(),
227 unbuffered=True
228 )
229 request_deferred.addTimeout(_sec_timeout, self.hs.get_reactor())
230
231 # Sometimes the timeout above doesn't work, so lets hack yet
232 # another layer of timeouts in in the vain hope that at some
233 # point the world made sense and this really really really
234 # should work.
235 request_deferred = timeout_no_seriously(
236 request_deferred,
237 timeout=_sec_timeout * 2,
238 reactor=self.hs.get_reactor(),
239 )
240
241 with Measure(self.clock, "outbound_request"):
197242 response = yield make_deferred_yieldable(
198243 request_deferred,
199244 )
200245
201 log_result = "%d %s" % (response.code, response.phrase,)
202 break
203 except Exception as e:
204 if not retry_on_dns_fail and isinstance(e, DNSLookupError):
205 logger.warn(
206 "DNS Lookup failed to %s with %s",
207 destination,
208 e
209 )
210 log_result = "DNS Lookup failed to %s with %s" % (
211 destination, e
212 )
213 raise
214
246 log_result = "%d %s" % (response.code, response.phrase,)
247 break
248 except Exception as e:
249 if not retry_on_dns_fail and isinstance(e, DNSLookupError):
215250 logger.warn(
216 "{%s} Sending request failed to %s: %s %s: %s",
251 "DNS Lookup failed to %s with %s",
252 destination,
253 e
254 )
255 log_result = "DNS Lookup failed to %s with %s" % (
256 destination, e
257 )
258 raise
259
260 logger.warn(
261 "{%s} Sending request failed to %s: %s %s: %s",
262 txn_id,
263 destination,
264 method,
265 url,
266 _flatten_response_never_received(e),
267 )
268
269 log_result = _flatten_response_never_received(e)
270
271 if retries_left and not timeout:
272 if long_retries:
273 delay = 4 ** (MAX_LONG_RETRIES + 1 - retries_left)
274 delay = min(delay, 60)
275 delay *= random.uniform(0.8, 1.4)
276 else:
277 delay = 0.5 * 2 ** (MAX_SHORT_RETRIES - retries_left)
278 delay = min(delay, 2)
279 delay *= random.uniform(0.8, 1.4)
280
281 logger.debug(
282 "{%s} Waiting %s before sending to %s...",
217283 txn_id,
218 destination,
219 method,
220 url_bytes,
221 _flatten_response_never_received(e),
284 delay,
285 destination
222286 )
223287
224 log_result = _flatten_response_never_received(e)
225
226 if retries_left and not timeout:
227 if long_retries:
228 delay = 4 ** (MAX_LONG_RETRIES + 1 - retries_left)
229 delay = min(delay, 60)
230 delay *= random.uniform(0.8, 1.4)
231 else:
232 delay = 0.5 * 2 ** (MAX_SHORT_RETRIES - retries_left)
233 delay = min(delay, 2)
234 delay *= random.uniform(0.8, 1.4)
235
236 yield self.clock.sleep(delay)
237 retries_left -= 1
238 else:
239 raise
240 finally:
241 outbound_logger.info(
242 "{%s} [%s] Result: %s",
243 txn_id,
244 destination,
245 log_result,
246 )
288 yield self.clock.sleep(delay)
289 retries_left -= 1
290 else:
291 raise
292 finally:
293 outbound_logger.info(
294 "{%s} [%s] Result: %s",
295 txn_id,
296 destination,
297 log_result,
298 )
247299
248300 if 200 <= response.code < 300:
249301 pass
251303 # :'(
252304 # Update transactions table?
253305 with logcontext.PreserveLoggingContext():
254 body = yield readBody(response)
306 d = treq.content(response)
307 d.addTimeout(_sec_timeout, self.hs.get_reactor())
308 body = yield make_deferred_yieldable(d)
255309 raise HttpResponseException(
256310 response.code, response.phrase, body
257311 )
296350 auth_headers = []
297351
298352 for key, sig in request["signatures"][self.server_name].items():
299 auth_headers.append(bytes(
353 auth_headers.append((
300354 "X-Matrix origin=%s,key=\"%s\",sig=\"%s\"" % (
301355 self.server_name, key, sig,
302 )
303 ))
356 )).encode('ascii')
357 )
304358
305359 headers_dict[b"Authorization"] = auth_headers
306360
346400 """
347401
348402 if not json_data_callback:
349 def json_data_callback():
350 return data
351
352 def body_callback(method, url_bytes, headers_dict):
353 json_data = json_data_callback()
354 self.sign_request(
355 destination, method, url_bytes, headers_dict, json_data
356 )
357 producer = _JsonProducer(json_data)
358 return producer
403 json_data_callback = lambda: data
359404
360405 response = yield self._request(
361406 destination,
362407 "PUT",
363408 path,
364 body_callback=body_callback,
365 headers_dict={"Content-Type": ["application/json"]},
366 query_bytes=encode_query_args(args),
409 json_callback=json_data_callback,
410 query=args,
367411 long_retries=long_retries,
368412 timeout=timeout,
369413 ignore_backoff=ignore_backoff,
375419 check_content_type_is_json(response.headers)
376420
377421 with logcontext.PreserveLoggingContext():
378 body = yield readBody(response)
379 defer.returnValue(json.loads(body))
422 d = treq.json_content(response)
423 d.addTimeout(self.default_timeout, self.hs.get_reactor())
424 body = yield make_deferred_yieldable(d)
425 defer.returnValue(body)
380426
381427 @defer.inlineCallbacks
382428 def post_json(self, destination, path, data={}, long_retries=False,
409455 Fails with ``FederationDeniedError`` if this destination
410456 is not on our federation whitelist
411457 """
412
413 def body_callback(method, url_bytes, headers_dict):
414 self.sign_request(
415 destination, method, url_bytes, headers_dict, data
416 )
417 return _JsonProducer(data)
418
419458 response = yield self._request(
420459 destination,
421460 "POST",
422461 path,
423 query_bytes=encode_query_args(args),
424 body_callback=body_callback,
425 headers_dict={"Content-Type": ["application/json"]},
462 query=args,
463 json=data,
426464 long_retries=long_retries,
427465 timeout=timeout,
428466 ignore_backoff=ignore_backoff,
433471 check_content_type_is_json(response.headers)
434472
435473 with logcontext.PreserveLoggingContext():
436 body = yield readBody(response)
437
438 defer.returnValue(json.loads(body))
474 d = treq.json_content(response)
475 if timeout:
476 _sec_timeout = timeout / 1000
477 else:
478 _sec_timeout = self.default_timeout
479
480 d.addTimeout(_sec_timeout, self.hs.get_reactor())
481 body = yield make_deferred_yieldable(d)
482
483 defer.returnValue(body)
439484
440485 @defer.inlineCallbacks
441486 def get_json(self, destination, path, args=None, retry_on_dns_fail=True,
470515
471516 logger.debug("Query bytes: %s Retry DNS: %s", args, retry_on_dns_fail)
472517
473 def body_callback(method, url_bytes, headers_dict):
474 self.sign_request(destination, method, url_bytes, headers_dict)
475 return None
476
477518 response = yield self._request(
478519 destination,
479520 "GET",
480521 path,
481 query_bytes=encode_query_args(args),
482 body_callback=body_callback,
522 query=args,
483523 retry_on_dns_fail=retry_on_dns_fail,
484524 timeout=timeout,
485525 ignore_backoff=ignore_backoff,
490530 check_content_type_is_json(response.headers)
491531
492532 with logcontext.PreserveLoggingContext():
493 body = yield readBody(response)
494
495 defer.returnValue(json.loads(body))
533 d = treq.json_content(response)
534 d.addTimeout(self.default_timeout, self.hs.get_reactor())
535 body = yield make_deferred_yieldable(d)
536
537 defer.returnValue(body)
496538
497539 @defer.inlineCallbacks
498540 def delete_json(self, destination, path, long_retries=False,
522564 Fails with ``FederationDeniedError`` if this destination
523565 is not on our federation whitelist
524566 """
525
526567 response = yield self._request(
527568 destination,
528569 "DELETE",
529570 path,
530 query_bytes=encode_query_args(args),
531 headers_dict={"Content-Type": ["application/json"]},
571 query=args,
532572 long_retries=long_retries,
533573 timeout=timeout,
534574 ignore_backoff=ignore_backoff,
539579 check_content_type_is_json(response.headers)
540580
541581 with logcontext.PreserveLoggingContext():
542 body = yield readBody(response)
543
544 defer.returnValue(json.loads(body))
582 d = treq.json_content(response)
583 d.addTimeout(self.default_timeout, self.hs.get_reactor())
584 body = yield make_deferred_yieldable(d)
585
586 defer.returnValue(body)
545587
546588 @defer.inlineCallbacks
547589 def get_file(self, destination, path, output_stream, args={},
568610 Fails with ``FederationDeniedError`` if this destination
569611 is not on our federation whitelist
570612 """
571
572 encoded_args = {}
573 for k, vs in args.items():
574 if isinstance(vs, string_types):
575 vs = [vs]
576 encoded_args[k] = [v.encode("UTF-8") for v in vs]
577
578 query_bytes = urllib.urlencode(encoded_args, True)
579 logger.debug("Query bytes: %s Retry DNS: %s", query_bytes, retry_on_dns_fail)
580
581 def body_callback(method, url_bytes, headers_dict):
582 self.sign_request(destination, method, url_bytes, headers_dict)
583 return None
584
585613 response = yield self._request(
586614 destination,
587615 "GET",
588616 path,
589 query_bytes=query_bytes,
590 body_callback=body_callback,
617 query=args,
591618 retry_on_dns_fail=retry_on_dns_fail,
592619 ignore_backoff=ignore_backoff,
593620 )
596623
597624 try:
598625 with logcontext.PreserveLoggingContext():
599 length = yield _readBodyToFile(
600 response, output_stream, max_size
601 )
626 d = _readBodyToFile(response, output_stream, max_size)
627 d.addTimeout(self.default_timeout, self.hs.get_reactor())
628 length = yield make_deferred_yieldable(d)
602629 except Exception:
603630 logger.exception("Failed to download body")
604631 raise
638665 return d
639666
640667
641 class _JsonProducer(object):
642 """ Used by the twisted http client to create the HTTP body from json
643 """
644 def __init__(self, jsn):
645 self.reset(jsn)
646
647 def reset(self, jsn):
648 self.body = encode_canonical_json(jsn)
649 self.length = len(self.body)
650
651 def startProducing(self, consumer):
652 consumer.write(self.body)
653 return defer.succeed(None)
654
655 def pauseProducing(self):
656 pass
657
658 def stopProducing(self):
659 pass
660
661 def resumeProducing(self):
662 pass
663
664
665668 def _flatten_response_never_received(e):
666669 if hasattr(e, "reasons"):
667670 reasons = ", ".join(
692695 "No Content-Type header"
693696 )
694697
695 c_type = c_type[0] # only the first header
698 c_type = c_type[0].decode('ascii') # only the first header
696699 val, options = cgi.parse_header(c_type)
697700 if val != "application/json":
698701 raise RuntimeError(
710713 vs = [vs]
711714 encoded_args[k] = [v.encode("UTF-8") for v in vs]
712715
713 query_bytes = urllib.urlencode(encoded_args, True)
714
715 return query_bytes
716 query_bytes = urllib.parse.urlencode(encoded_args, True)
717
718 return query_bytes.encode('utf8')
8484 return "%s-%i" % (self.method, self.request_seq)
8585
8686 def get_redacted_uri(self):
87 return redact_uri(self.uri)
87 uri = self.uri
88 if isinstance(uri, bytes):
89 uri = self.uri.decode('ascii')
90 return redact_uri(uri)
8891
8992 def get_user_agent(self):
9093 return self.requestHeaders.getRawHeaders(b"User-Agent", [None])[-1]
203206 self.start_time = time.time()
204207 self.request_metrics = RequestMetrics()
205208 self.request_metrics.start(
206 self.start_time, name=servlet_name, method=self.method,
209 self.start_time, name=servlet_name, method=self.method.decode('ascii'),
207210 )
208211
209212 self.site.access_logger.info(
210213 "%s - %s - Received request: %s %s",
211214 self.getClientIP(),
212215 self.site.site_tag,
213 self.method,
216 self.method.decode('ascii'),
214217 self.get_redacted_uri()
215218 )
216219
1717 import logging
1818 import os
1919 import platform
20 import threading
2021 import time
22
23 import six
2124
2225 import attr
2326 from prometheus_client import Counter, Gauge, Histogram
6770 return
6871
6972 if isinstance(calls, dict):
70 for k, v in calls.items():
73 for k, v in six.iteritems(calls):
7174 g.add_metric(k, v)
7275 else:
7376 g.add_metric([], calls)
7881 self._register()
7982
8083 def _register(self):
84 if self.name in all_gauges.keys():
85 logger.warning("%s already registered, reregistering" % (self.name,))
86 REGISTRY.unregister(all_gauges.pop(self.name))
87
88 REGISTRY.register(self)
89 all_gauges[self.name] = self
90
91
92 class InFlightGauge(object):
93 """Tracks number of things (e.g. requests, Measure blocks, etc) in flight
94 at any given time.
95
96 Each InFlightGauge will create a metric called `<name>_total` that counts
97 the number of in flight blocks, as well as a metrics for each item in the
98 given `sub_metrics` as `<name>_<sub_metric>` which will get updated by the
99 callbacks.
100
101 Args:
102 name (str)
103 desc (str)
104 labels (list[str])
105 sub_metrics (list[str]): A list of sub metrics that the callbacks
106 will update.
107 """
108
109 def __init__(self, name, desc, labels, sub_metrics):
110 self.name = name
111 self.desc = desc
112 self.labels = labels
113 self.sub_metrics = sub_metrics
114
115 # Create a class which have the sub_metrics values as attributes, which
116 # default to 0 on initialization. Used to pass to registered callbacks.
117 self._metrics_class = attr.make_class(
118 "_MetricsEntry",
119 attrs={x: attr.ib(0) for x in sub_metrics},
120 slots=True,
121 )
122
123 # Counts number of in flight blocks for a given set of label values
124 self._registrations = {}
125
126 # Protects access to _registrations
127 self._lock = threading.Lock()
128
129 self._register_with_collector()
130
131 def register(self, key, callback):
132 """Registers that we've entered a new block with labels `key`.
133
134 `callback` gets called each time the metrics are collected. The same
135 value must also be given to `unregister`.
136
137 `callback` gets called with an object that has an attribute per
138 sub_metric, which should be updated with the necessary values. Note that
139 the metrics object is shared between all callbacks registered with the
140 same key.
141
142 Note that `callback` may be called on a separate thread.
143 """
144 with self._lock:
145 self._registrations.setdefault(key, set()).add(callback)
146
147 def unregister(self, key, callback):
148 """Registers that we've exited a block with labels `key`.
149 """
150
151 with self._lock:
152 self._registrations.setdefault(key, set()).discard(callback)
153
154 def collect(self):
155 """Called by prometheus client when it reads metrics.
156
157 Note: may be called by a separate thread.
158 """
159 in_flight = GaugeMetricFamily(self.name + "_total", self.desc, labels=self.labels)
160
161 metrics_by_key = {}
162
163 # We copy so that we don't mutate the list while iterating
164 with self._lock:
165 keys = list(self._registrations)
166
167 for key in keys:
168 with self._lock:
169 callbacks = set(self._registrations[key])
170
171 in_flight.add_metric(key, len(callbacks))
172
173 metrics = self._metrics_class()
174 metrics_by_key[key] = metrics
175 for callback in callbacks:
176 callback(metrics)
177
178 yield in_flight
179
180 for name in self.sub_metrics:
181 gauge = GaugeMetricFamily("_".join([self.name, name]), "", labels=self.labels)
182 for key, metrics in six.iteritems(metrics_by_key):
183 gauge.add_metric(key, getattr(metrics, name))
184 yield gauge
185
186 def _register_with_collector(self):
81187 if self.name in all_gauges.keys():
82188 logger.warning("%s already registered, reregistering" % (self.name,))
83189 REGISTRY.unregister(all_gauges.pop(self.name))
1414 # limitations under the License.
1515 import logging
1616
17 import six
18
1719 from prometheus_client import Counter
1820
1921 from twisted.internet import defer
2426 from synapse.util.metrics import Measure
2527
2628 from . import push_rule_evaluator, push_tools
29
30 if six.PY3:
31 long = int
2732
2833 logger = logging.getLogger(__name__)
2934
95100
96101 @defer.inlineCallbacks
97102 def on_new_notifications(self, min_stream_ordering, max_stream_ordering):
98 self.max_stream_ordering = max(max_stream_ordering, self.max_stream_ordering)
103 self.max_stream_ordering = max(max_stream_ordering, self.max_stream_ordering or 0)
99104 yield self._process()
100105
101106 @defer.inlineCallbacks
1616 import email.utils
1717 import logging
1818 import time
19 import urllib
2019 from email.mime.multipart import MIMEMultipart
2120 from email.mime.text import MIMEText
21
22 from six.moves import urllib
2223
2324 import bleach
2425 import jinja2
473474 # XXX: make r0 once API is stable
474475 return "%s_matrix/client/unstable/pushers/remove?%s" % (
475476 self.hs.config.public_baseurl,
476 urllib.urlencode(params),
477 urllib.parse.urlencode(params),
477478 )
478479
479480
560561 return "%s_matrix/media/v1/thumbnail/%s?%s%s" % (
561562 config.public_baseurl,
562563 serverAndMediaId,
563 urllib.urlencode(params),
564 urllib.parse.urlencode(params),
564565 fragment or "",
565566 )
566567
3939 "pynacl>=1.2.1": ["nacl>=1.2.1", "nacl.bindings"],
4040 "service_identity>=1.0.0": ["service_identity>=1.0.0"],
4141 "Twisted>=17.1.0": ["twisted>=17.1.0"],
42 "treq>=15.1": ["treq>=15.1"],
4243
43 # We use crypto.get_elliptic_curve which is only supported in >=0.15
44 "pyopenssl>=0.15": ["OpenSSL>=0.15"],
44 # Twisted has required pyopenssl 16.0 since about Twisted 16.6.
45 "pyopenssl>=16.0.0": ["OpenSSL>=16.0.0"],
4546
4647 "pyyaml": ["yaml"],
4748 "pyasn1": ["pyasn1"],
1212 # See the License for the specific language governing permissions and
1313 # limitations under the License.
1414
15 import six
16
1517 from synapse.storage import DataStore
1618 from synapse.storage.end_to_end_keys import EndToEndKeyStore
1719 from synapse.util.caches.stream_change_cache import StreamChangeCache
1820
1921 from ._base import BaseSlavedStore
2022 from ._slaved_id_tracker import SlavedIdTracker
23
24
25 def __func__(inp):
26 if six.PY3:
27 return inp
28 else:
29 return inp.__func__
2130
2231
2332 class SlavedDeviceStore(BaseSlavedStore):
3746 "DeviceListFederationStreamChangeCache", device_list_max,
3847 )
3948
40 get_device_stream_token = DataStore.get_device_stream_token.__func__
41 get_user_whose_devices_changed = DataStore.get_user_whose_devices_changed.__func__
42 get_devices_by_remote = DataStore.get_devices_by_remote.__func__
43 _get_devices_by_remote_txn = DataStore._get_devices_by_remote_txn.__func__
44 _get_e2e_device_keys_txn = DataStore._get_e2e_device_keys_txn.__func__
45 mark_as_sent_devices_by_remote = DataStore.mark_as_sent_devices_by_remote.__func__
49 get_device_stream_token = __func__(DataStore.get_device_stream_token)
50 get_user_whose_devices_changed = __func__(DataStore.get_user_whose_devices_changed)
51 get_devices_by_remote = __func__(DataStore.get_devices_by_remote)
52 _get_devices_by_remote_txn = __func__(DataStore._get_devices_by_remote_txn)
53 _get_e2e_device_keys_txn = __func__(DataStore._get_e2e_device_keys_txn)
54 mark_as_sent_devices_by_remote = __func__(DataStore.mark_as_sent_devices_by_remote)
4655 _mark_as_sent_devices_by_remote_txn = (
47 DataStore._mark_as_sent_devices_by_remote_txn.__func__
56 __func__(DataStore._mark_as_sent_devices_by_remote_txn)
4857 )
4958 count_e2e_one_time_keys = EndToEndKeyStore.__dict__["count_e2e_one_time_keys"]
5059
589589 pending_commands = LaterGauge(
590590 "synapse_replication_tcp_protocol_pending_commands",
591591 "",
592 ["name", "conn_id"],
592 ["name"],
593593 lambda: {
594 (p.name, p.conn_id): len(p.pending_commands) for p in connected_connections
594 (p.name,): len(p.pending_commands) for p in connected_connections
595595 },
596596 )
597597
606606 transport_send_buffer = LaterGauge(
607607 "synapse_replication_tcp_protocol_transport_send_buffer",
608608 "",
609 ["name", "conn_id"],
609 ["name"],
610610 lambda: {
611 (p.name, p.conn_id): transport_buffer_size(p) for p in connected_connections
611 (p.name,): transport_buffer_size(p) for p in connected_connections
612612 },
613613 )
614614
631631 tcp_transport_kernel_send_buffer = LaterGauge(
632632 "synapse_replication_tcp_protocol_transport_kernel_send_buffer",
633633 "",
634 ["name", "conn_id"],
634 ["name"],
635635 lambda: {
636 (p.name, p.conn_id): transport_kernel_read_buffer_size(p, False)
636 (p.name,): transport_kernel_read_buffer_size(p, False)
637637 for p in connected_connections
638638 },
639639 )
642642 tcp_transport_kernel_read_buffer = LaterGauge(
643643 "synapse_replication_tcp_protocol_transport_kernel_read_buffer",
644644 "",
645 ["name", "conn_id"],
645 ["name"],
646646 lambda: {
647 (p.name, p.conn_id): transport_kernel_read_buffer_size(p, True)
647 (p.name,): transport_kernel_read_buffer_size(p, True)
648648 for p in connected_connections
649649 },
650650 )
653653 tcp_inbound_commands = LaterGauge(
654654 "synapse_replication_tcp_protocol_inbound_commands",
655655 "",
656 ["command", "name", "conn_id"],
656 ["command", "name"],
657657 lambda: {
658 (k[0], p.name, p.conn_id): count
658 (k[0], p.name,): count
659659 for p in connected_connections
660660 for k, count in iteritems(p.inbound_commands_counter)
661661 },
664664 tcp_outbound_commands = LaterGauge(
665665 "synapse_replication_tcp_protocol_outbound_commands",
666666 "",
667 ["command", "name", "conn_id"],
667 ["command", "name"],
668668 lambda: {
669 (k[0], p.name, p.conn_id): count
669 (k[0], p.name,): count
670670 for p in connected_connections
671671 for k, count in iteritems(p.outbound_commands_counter)
672672 },
195195 )
196196
197197 if len(rows) >= MAX_EVENTS_BEHIND:
198 raise Exception("stream %s has fallen behined" % (self.NAME))
198 raise Exception("stream %s has fallen behind" % (self.NAME))
199199 else:
200200 rows = yield self.update_function(
201201 from_token, current_token,
100100
101101 nonce = self.hs.get_secrets().token_hex(64)
102102 self.nonces[nonce] = int(self.reactor.seconds())
103 return (200, {"nonce": nonce.encode('ascii')})
103 return (200, {"nonce": nonce})
104104
105105 @defer.inlineCallbacks
106106 def on_POST(self, request):
163163 key=self.hs.config.registration_shared_secret.encode(),
164164 digestmod=hashlib.sha1,
165165 )
166 want_mac.update(nonce)
166 want_mac.update(nonce.encode('utf8'))
167167 want_mac.update(b"\x00")
168168 want_mac.update(username)
169169 want_mac.update(b"\x00")
172172 want_mac.update(b"admin" if admin else b"notadmin")
173173 want_mac = want_mac.hexdigest()
174174
175 if not hmac.compare_digest(want_mac, got_mac.encode('ascii')):
175 if not hmac.compare_digest(
176 want_mac.encode('ascii'),
177 got_mac.encode('ascii')
178 ):
176179 raise SynapseError(403, "HMAC incorrect")
177180
178181 # Reuse the parts of RegisterRestServlet to reduce code duplication
4444 is_guest = requester.is_guest
4545 room_id = None
4646 if is_guest:
47 if "room_id" not in request.args:
47 if b"room_id" not in request.args:
4848 raise SynapseError(400, "Guest users must specify room_id param")
49 if "room_id" in request.args:
50 room_id = request.args["room_id"][0]
49 if b"room_id" in request.args:
50 room_id = request.args[b"room_id"][0].decode('ascii')
5151
5252 pagin_config = PaginationConfig.from_request(request)
5353 timeout = EventStreamRestServlet.DEFAULT_LONGPOLL_TIME_MS
54 if "timeout" in request.args:
54 if b"timeout" in request.args:
5555 try:
56 timeout = int(request.args["timeout"][0])
56 timeout = int(request.args[b"timeout"][0])
5757 except ValueError:
5858 raise SynapseError(400, "timeout must be in milliseconds.")
5959
60 as_client_event = "raw" not in request.args
60 as_client_event = b"raw" not in request.args
6161
6262 chunk = yield self.event_stream_handler.get_stream(
6363 requester.user.to_string(),
3131 @defer.inlineCallbacks
3232 def on_GET(self, request):
3333 requester = yield self.auth.get_user_by_req(request)
34 as_client_event = "raw" not in request.args
34 as_client_event = b"raw" not in request.args
3535 pagination_config = PaginationConfig.from_request(request)
3636 include_archived = parse_boolean(request, "archived", default=False)
3737 content = yield self.initial_sync_handler.snapshot_all_rooms(
1313 # limitations under the License.
1414
1515 import logging
16 import urllib
1716 import xml.etree.ElementTree as ET
1817
19 from six.moves.urllib import parse as urlparse
18 from six.moves import urllib
2019
2120 from canonicaljson import json
2221 from saml2 import BINDING_HTTP_POST, config
133132 LoginRestServlet.SAML2_TYPE):
134133 relay_state = ""
135134 if "relay_state" in login_submission:
136 relay_state = "&RelayState=" + urllib.quote(
135 relay_state = "&RelayState=" + urllib.parse.quote(
137136 login_submission["relay_state"])
138137 result = {
139138 "uri": "%s%s" % (self.idp_redirect_url, relay_state)
365364 (user_id, token) = yield handler.register_saml2(username)
366365 # Forward to the RelayState callback along with ava
367366 if 'RelayState' in request.args:
368 request.redirect(urllib.unquote(
367 request.redirect(urllib.parse.unquote(
369368 request.args['RelayState'][0]) +
370369 '?status=authenticated&access_token=' +
371370 token + '&user_id=' + user_id + '&ava=' +
376375 "user_id": user_id, "token": token,
377376 "ava": saml2_auth.ava}))
378377 elif 'RelayState' in request.args:
379 request.redirect(urllib.unquote(
378 request.redirect(urllib.parse.unquote(
380379 request.args['RelayState'][0]) +
381380 '?status=not_authenticated')
382381 finish_request(request)
389388
390389 def __init__(self, hs):
391390 super(CasRedirectServlet, self).__init__(hs)
392 self.cas_server_url = hs.config.cas_server_url
393 self.cas_service_url = hs.config.cas_service_url
391 self.cas_server_url = hs.config.cas_server_url.encode('ascii')
392 self.cas_service_url = hs.config.cas_service_url.encode('ascii')
394393
395394 def on_GET(self, request):
396395 args = request.args
397 if "redirectUrl" not in args:
396 if b"redirectUrl" not in args:
398397 return (400, "Redirect URL not specified for CAS auth")
399 client_redirect_url_param = urllib.urlencode({
400 "redirectUrl": args["redirectUrl"][0]
401 })
402 hs_redirect_url = self.cas_service_url + "/_matrix/client/api/v1/login/cas/ticket"
403 service_param = urllib.urlencode({
404 "service": "%s?%s" % (hs_redirect_url, client_redirect_url_param)
405 })
406 request.redirect("%s/login?%s" % (self.cas_server_url, service_param))
398 client_redirect_url_param = urllib.parse.urlencode({
399 b"redirectUrl": args[b"redirectUrl"][0]
400 }).encode('ascii')
401 hs_redirect_url = (self.cas_service_url +
402 b"/_matrix/client/api/v1/login/cas/ticket")
403 service_param = urllib.parse.urlencode({
404 b"service": b"%s?%s" % (hs_redirect_url, client_redirect_url_param)
405 }).encode('ascii')
406 request.redirect(b"%s/login?%s" % (self.cas_server_url, service_param))
407407 finish_request(request)
408408
409409
421421
422422 @defer.inlineCallbacks
423423 def on_GET(self, request):
424 client_redirect_url = request.args["redirectUrl"][0]
424 client_redirect_url = request.args[b"redirectUrl"][0]
425425 http_client = self.hs.get_simple_http_client()
426426 uri = self.cas_server_url + "/proxyValidate"
427427 args = {
428 "ticket": request.args["ticket"],
428 "ticket": request.args[b"ticket"][0].decode('ascii'),
429429 "service": self.cas_service_url
430430 }
431431 try:
470470 finish_request(request)
471471
472472 def add_login_token_to_redirect_url(self, url, token):
473 url_parts = list(urlparse.urlparse(url))
474 query = dict(urlparse.parse_qsl(url_parts[4]))
473 url_parts = list(urllib.parse.urlparse(url))
474 query = dict(urllib.parse.parse_qsl(url_parts[4]))
475475 query.update({"loginToken": token})
476 url_parts[4] = urllib.urlencode(query)
477 return urlparse.urlunparse(url_parts)
476 url_parts[4] = urllib.parse.urlencode(query).encode('ascii')
477 return urllib.parse.urlunparse(url_parts)
478478
479479 def parse_cas_response(self, cas_response_body):
480480 user = None
4545 try:
4646 priority_class = _priority_class_from_spec(spec)
4747 except InvalidRuleException as e:
48 raise SynapseError(400, e.message)
48 raise SynapseError(400, str(e))
4949
5050 requester = yield self.auth.get_user_by_req(request)
5151
7272 content,
7373 )
7474 except InvalidRuleException as e:
75 raise SynapseError(400, e.message)
75 raise SynapseError(400, str(e))
7676
7777 before = parse_string(request, "before")
7878 if before:
9494 )
9595 self.notify_user(user_id)
9696 except InconsistentRuleException as e:
97 raise SynapseError(400, e.message)
97 raise SynapseError(400, str(e))
9898 except RuleNotFoundException as e:
99 raise SynapseError(400, e.message)
99 raise SynapseError(400, str(e))
100100
101101 defer.returnValue((200, {}))
102102
141141 PushRuleRestServlet.SLIGHTLY_PEDANTIC_TRAILING_SLASH_ERROR
142142 )
143143
144 if path[0] == '':
144 if path[0] == b'':
145145 defer.returnValue((200, rules))
146 elif path[0] == 'global':
147 path = path[1:]
146 elif path[0] == b'global':
147 path = [x.decode('ascii') for x in path[1:]]
148148 result = _filter_ruleset_with_path(rules['global'], path)
149149 defer.returnValue((200, result))
150150 else:
191191 def _rule_spec_from_path(path):
192192 if len(path) < 2:
193193 raise UnrecognizedRequestError()
194 if path[0] != 'pushrules':
195 raise UnrecognizedRequestError()
196
197 scope = path[1]
194 if path[0] != b'pushrules':
195 raise UnrecognizedRequestError()
196
197 scope = path[1].decode('ascii')
198198 path = path[2:]
199199 if scope != 'global':
200200 raise UnrecognizedRequestError()
202202 if len(path) == 0:
203203 raise UnrecognizedRequestError()
204204
205 template = path[0]
205 template = path[0].decode('ascii')
206206 path = path[1:]
207207
208208 if len(path) == 0 or len(path[0]) == 0:
209209 raise UnrecognizedRequestError()
210210
211 rule_id = path[0]
211 rule_id = path[0].decode('ascii')
212212
213213 spec = {
214214 'scope': scope,
219219 path = path[1:]
220220
221221 if len(path) > 0 and len(path[0]) > 0:
222 spec['attr'] = path[0]
222 spec['attr'] = path[0].decode('ascii')
223223
224224 return spec
225225
5858 ]
5959
6060 for p in pushers:
61 for k, v in p.items():
61 for k, v in list(p.items()):
6262 if k not in allowed_keys:
6363 del p[k]
6464
125125 profile_tag=content.get('profile_tag', ""),
126126 )
127127 except PusherConfigException as pce:
128 raise SynapseError(400, "Config Error: " + pce.message,
128 raise SynapseError(400, "Config Error: " + str(pce),
129129 errcode=Codes.MISSING_PARAM)
130130
131131 self.notifier.on_new_replication_data()
206206 "sender": requester.user.to_string(),
207207 }
208208
209 if 'ts' in request.args and requester.app_service:
209 if b'ts' in request.args and requester.app_service:
210210 event_dict['origin_server_ts'] = parse_integer(request, "ts", 0)
211211
212212 event = yield self.event_creation_hander.create_and_send_nonmember_event(
254254 if RoomID.is_valid(room_identifier):
255255 room_id = room_identifier
256256 try:
257 remote_room_hosts = request.args["server_name"]
257 remote_room_hosts = [
258 x.decode('ascii') for x in request.args[b"server_name"]
259 ]
258260 except Exception:
259261 remote_room_hosts = None
260262 elif RoomAlias.is_valid(room_identifier):
460462 pagination_config = PaginationConfig.from_request(
461463 request, default_limit=10,
462464 )
463 as_client_event = "raw" not in request.args
464 filter_bytes = parse_string(request, "filter")
465 as_client_event = b"raw" not in request.args
466 filter_bytes = parse_string(request, b"filter", encoding=None)
465467 if filter_bytes:
466 filter_json = urlparse.unquote(filter_bytes).decode("UTF-8")
468 filter_json = urlparse.unquote(filter_bytes.decode("UTF-8"))
467469 event_filter = Filter(json.loads(filter_json))
468470 else:
469471 event_filter = None
559561 # picking the API shape for symmetry with /messages
560562 filter_bytes = parse_string(request, "filter")
561563 if filter_bytes:
562 filter_json = urlparse.unquote(filter_bytes).decode("UTF-8")
564 filter_json = urlparse.unquote(filter_bytes)
563565 event_filter = Filter(json.loads(filter_json))
564566 else:
565567 event_filter = None
4141 expiry = (self.hs.get_clock().time_msec() + userLifetime) / 1000
4242 username = "%d:%s" % (expiry, requester.user.to_string())
4343
44 mac = hmac.new(turnSecret, msg=username, digestmod=hashlib.sha1)
44 mac = hmac.new(
45 turnSecret.encode(),
46 msg=username.encode(),
47 digestmod=hashlib.sha1
48 )
4549 # We need to use standard padded base64 encoding here
4650 # encode_base64 because we need to add the standard padding to get the
4751 # same result as the TURN server.
5252
5353 if not check_3pid_allowed(self.hs, "email", body['email']):
5454 raise SynapseError(
55 403, "Third party identifier is not allowed", Codes.THREEPID_DENIED,
55 403,
56 "Your email domain is not authorized on this server",
57 Codes.THREEPID_DENIED,
5658 )
5759
5860 existingUid = yield self.hs.get_datastore().get_user_id_by_threepid(
8890
8991 if not check_3pid_allowed(self.hs, "msisdn", msisdn):
9092 raise SynapseError(
91 403, "Third party identifier is not allowed", Codes.THREEPID_DENIED,
93 403,
94 "Account phone numbers are not authorized on this server",
95 Codes.THREEPID_DENIED,
9296 )
9397
9498 existingUid = yield self.datastore.get_user_id_by_threepid(
240244
241245 if not check_3pid_allowed(self.hs, "email", body['email']):
242246 raise SynapseError(
243 403, "Third party identifier is not allowed", Codes.THREEPID_DENIED,
247 403,
248 "Your email domain is not authorized on this server",
249 Codes.THREEPID_DENIED,
244250 )
245251
246252 existingUid = yield self.datastore.get_user_id_by_threepid(
275281
276282 if not check_3pid_allowed(self.hs, "msisdn", msisdn):
277283 raise SynapseError(
278 403, "Third party identifier is not allowed", Codes.THREEPID_DENIED,
284 403,
285 "Account phone numbers are not authorized on this server",
286 Codes.THREEPID_DENIED,
279287 )
280288
281289 existingUid = yield self.datastore.get_user_id_by_threepid(
7474
7575 if not check_3pid_allowed(self.hs, "email", body['email']):
7676 raise SynapseError(
77 403, "Third party identifier is not allowed", Codes.THREEPID_DENIED,
77 403,
78 "Your email domain is not authorized to register on this server",
79 Codes.THREEPID_DENIED,
7880 )
7981
8082 existingUid = yield self.hs.get_datastore().get_user_id_by_threepid(
114116
115117 if not check_3pid_allowed(self.hs, "msisdn", msisdn):
116118 raise SynapseError(
117 403, "Third party identifier is not allowed", Codes.THREEPID_DENIED,
119 403,
120 "Phone numbers are not authorized to register on this server",
121 Codes.THREEPID_DENIED,
118122 )
119123
120124 existingUid = yield self.hs.get_datastore().get_user_id_by_threepid(
372376
373377 if not check_3pid_allowed(self.hs, medium, address):
374378 raise SynapseError(
375 403, "Third party identifier is not allowed",
379 403,
380 "Third party identifiers (email/phone numbers)" +
381 " are not authorized on this server",
376382 Codes.THREEPID_DENIED,
377383 )
378384
2424 from synapse.api.filtering import DEFAULT_FILTER_COLLECTION, FilterCollection
2525 from synapse.events.utils import (
2626 format_event_for_client_v2_without_room_id,
27 format_event_raw,
2728 serialize_event,
2829 )
2930 from synapse.handlers.presence import format_user_presence_state
8788
8889 @defer.inlineCallbacks
8990 def on_GET(self, request):
90 if "from" in request.args:
91 if b"from" in request.args:
9192 # /events used to use 'from', but /sync uses 'since'.
9293 # Lets be helpful and whine if we see a 'from'.
9394 raise SynapseError(
174175
175176 @staticmethod
176177 def encode_response(time_now, sync_result, access_token_id, filter):
178 if filter.event_format == 'client':
179 event_formatter = format_event_for_client_v2_without_room_id
180 elif filter.event_format == 'federation':
181 event_formatter = format_event_raw
182 else:
183 raise Exception("Unknown event format %s" % (filter.event_format, ))
184
177185 joined = SyncRestServlet.encode_joined(
178 sync_result.joined, time_now, access_token_id, filter.event_fields
186 sync_result.joined, time_now, access_token_id,
187 filter.event_fields,
188 event_formatter,
179189 )
180190
181191 invited = SyncRestServlet.encode_invited(
182192 sync_result.invited, time_now, access_token_id,
193 event_formatter,
183194 )
184195
185196 archived = SyncRestServlet.encode_archived(
186197 sync_result.archived, time_now, access_token_id,
187198 filter.event_fields,
199 event_formatter,
188200 )
189201
190202 return {
227239 }
228240
229241 @staticmethod
230 def encode_joined(rooms, time_now, token_id, event_fields):
242 def encode_joined(rooms, time_now, token_id, event_fields, event_formatter):
231243 """
232244 Encode the joined rooms in a sync result
233245
239251 token_id(int): ID of the user's auth token - used for namespacing
240252 of transaction IDs
241253 event_fields(list<str>): List of event fields to include. If empty,
242 all fields will be returned.
254 all fields will be returned.
255 event_formatter (func[dict]): function to convert from federation format
256 to client format
243257 Returns:
244258 dict[str, dict[str, object]]: the joined rooms list, in our
245259 response format
247261 joined = {}
248262 for room in rooms:
249263 joined[room.room_id] = SyncRestServlet.encode_room(
250 room, time_now, token_id, only_fields=event_fields
264 room, time_now, token_id, joined=True, only_fields=event_fields,
265 event_formatter=event_formatter,
251266 )
252267
253268 return joined
254269
255270 @staticmethod
256 def encode_invited(rooms, time_now, token_id):
271 def encode_invited(rooms, time_now, token_id, event_formatter):
257272 """
258273 Encode the invited rooms in a sync result
259274
263278 time_now(int): current time - used as a baseline for age
264279 calculations
265280 token_id(int): ID of the user's auth token - used for namespacing
266 of transaction IDs
281 of transaction IDs
282 event_formatter (func[dict]): function to convert from federation format
283 to client format
267284
268285 Returns:
269286 dict[str, dict[str, object]]: the invited rooms list, in our
273290 for room in rooms:
274291 invite = serialize_event(
275292 room.invite, time_now, token_id=token_id,
276 event_format=format_event_for_client_v2_without_room_id,
293 event_format=event_formatter,
277294 is_invite=True,
278295 )
279296 unsigned = dict(invite.get("unsigned", {}))
287304 return invited
288305
289306 @staticmethod
290 def encode_archived(rooms, time_now, token_id, event_fields):
307 def encode_archived(rooms, time_now, token_id, event_fields, event_formatter):
291308 """
292309 Encode the archived rooms in a sync result
293310
299316 token_id(int): ID of the user's auth token - used for namespacing
300317 of transaction IDs
301318 event_fields(list<str>): List of event fields to include. If empty,
302 all fields will be returned.
319 all fields will be returned.
320 event_formatter (func[dict]): function to convert from federation format
321 to client format
303322 Returns:
304323 dict[str, dict[str, object]]: The invited rooms list, in our
305324 response format
307326 joined = {}
308327 for room in rooms:
309328 joined[room.room_id] = SyncRestServlet.encode_room(
310 room, time_now, token_id, joined=False, only_fields=event_fields
329 room, time_now, token_id, joined=False,
330 only_fields=event_fields,
331 event_formatter=event_formatter,
311332 )
312333
313334 return joined
314335
315336 @staticmethod
316 def encode_room(room, time_now, token_id, joined=True, only_fields=None):
337 def encode_room(
338 room, time_now, token_id, joined,
339 only_fields, event_formatter,
340 ):
317341 """
318342 Args:
319343 room (JoinedSyncResult|ArchivedSyncResult): sync result for a
325349 joined (bool): True if the user is joined to this room - will mean
326350 we handle ephemeral events
327351 only_fields(list<str>): Optional. The list of event fields to include.
352 event_formatter (func[dict]): function to convert from federation format
353 to client format
328354 Returns:
329355 dict[str, object]: the room, encoded in our response format
330356 """
331357 def serialize(event):
332 # TODO(mjark): Respect formatting requirements in the filter.
333358 return serialize_event(
334359 event, time_now, token_id=token_id,
335 event_format=format_event_for_client_v2_without_room_id,
360 event_format=event_formatter,
336361 only_event_fields=only_fields,
337362 )
338363
7878 yield self.auth.get_user_by_req(request, allow_guest=True)
7979
8080 fields = request.args
81 fields.pop("access_token", None)
81 fields.pop(b"access_token", None)
8282
8383 results = yield self.appservice_handler.query_3pe(
8484 ThirdPartyEntityKind.USER, protocol, fields
101101 yield self.auth.get_user_by_req(request, allow_guest=True)
102102
103103 fields = request.args
104 fields.pop("access_token", None)
104 fields.pop(b"access_token", None)
105105
106106 results = yield self.appservice_handler.query_3pe(
107107 ThirdPartyEntityKind.LOCATION, protocol, fields
8787 )
8888
8989 def getChild(self, name, request):
90 if name == '':
90 if name == b'':
9191 return self
2121 class KeyApiV2Resource(Resource):
2222 def __init__(self, hs):
2323 Resource.__init__(self)
24 self.putChild("server", LocalKey(hs))
25 self.putChild("query", RemoteKey(hs))
24 self.putChild(b"server", LocalKey(hs))
25 self.putChild(b"query", RemoteKey(hs))
102102 def async_render_GET(self, request):
103103 if len(request.postpath) == 1:
104104 server, = request.postpath
105 query = {server: {}}
105 query = {server.decode('ascii'): {}}
106106 elif len(request.postpath) == 2:
107107 server, key_id = request.postpath
108108 minimum_valid_until_ts = parse_integer(
111111 arguments = {}
112112 if minimum_valid_until_ts is not None:
113113 arguments["minimum_valid_until_ts"] = minimum_valid_until_ts
114 query = {server: {key_id: arguments}}
114 query = {server.decode('ascii'): {key_id.decode('ascii'): arguments}}
115115 else:
116116 raise SynapseError(
117117 404, "Not found %r" % request.postpath, Codes.NOT_FOUND
118118 )
119
119120 yield self.query_keys(request, query, query_remote_on_cache_miss=True)
120121
121122 def render_POST(self, request):
134135 @defer.inlineCallbacks
135136 def query_keys(self, request, query, query_remote_on_cache_miss=False):
136137 logger.info("Handling query for keys %r", query)
138
137139 store_queries = []
138140 for server_name, key_ids in query.items():
139141 if (
5555 # servers.
5656
5757 # TODO: A little crude here, we could do this better.
58 filename = request.path.split('/')[-1]
58 filename = request.path.decode('ascii').split('/')[-1]
5959 # be paranoid
6060 filename = re.sub("[^0-9A-z.-_]", "", filename)
6161
7777 # select private. don't bother setting Expires as all our matrix
7878 # clients are smart enough to be happy with Cache-Control (right?)
7979 request.setHeader(
80 "Cache-Control", "public,max-age=86400,s-maxage=86400"
80 b"Cache-Control", b"public,max-age=86400,s-maxage=86400"
8181 )
8282
8383 d = FileSender().beginFileTransfer(f, request)
1414
1515 import logging
1616 import os
17 import urllib
1817
19 from six.moves.urllib import parse as urlparse
18 from six.moves import urllib
2019
2120 from twisted.internet import defer
2221 from twisted.protocols.basic import FileSender
3433 # This allows users to append e.g. /test.png to the URL. Useful for
3534 # clients that parse the URL to see content type.
3635 server_name, media_id = request.postpath[:2]
36
37 if isinstance(server_name, bytes):
38 server_name = server_name.decode('utf-8')
39 media_id = media_id.decode('utf8')
40
3741 file_name = None
3842 if len(request.postpath) > 2:
3943 try:
40 file_name = urlparse.unquote(request.postpath[-1]).decode("utf-8")
44 file_name = urllib.parse.unquote(request.postpath[-1].decode("utf-8"))
4145 except UnicodeDecodeError:
4246 pass
4347 return server_name, media_id, file_name
9296 file_size (int): Size in bytes of the media, if known.
9397 upload_name (str): The name of the requested file, if any.
9498 """
99 def _quote(x):
100 return urllib.parse.quote(x.encode("utf-8"))
101
95102 request.setHeader(b"Content-Type", media_type.encode("UTF-8"))
96103 if upload_name:
97104 if is_ascii(upload_name):
98 request.setHeader(
99 b"Content-Disposition",
100 b"inline; filename=%s" % (
101 urllib.quote(upload_name.encode("utf-8")),
102 ),
103 )
105 disposition = ("inline; filename=%s" % (_quote(upload_name),)).encode("ascii")
104106 else:
105 request.setHeader(
106 b"Content-Disposition",
107 b"inline; filename*=utf-8''%s" % (
108 urllib.quote(upload_name.encode("utf-8")),
109 ),
110 )
107 disposition = (
108 "inline; filename*=utf-8''%s" % (_quote(upload_name),)).encode("ascii")
109
110 request.setHeader(b"Content-Disposition", disposition)
111111
112112 # cache for at least a day.
113113 # XXX: we might want to turn this off for data we don't want to
4646 def _async_render_GET(self, request):
4747 set_cors_headers(request)
4848 request.setHeader(
49 "Content-Security-Policy",
50 "default-src 'none';"
51 " script-src 'none';"
52 " plugin-types application/pdf;"
53 " style-src 'unsafe-inline';"
54 " object-src 'self';"
49 b"Content-Security-Policy",
50 b"default-src 'none';"
51 b" script-src 'none';"
52 b" plugin-types application/pdf;"
53 b" style-src 'unsafe-inline';"
54 b" object-src 'self';"
5555 )
5656 server_name, media_id, name = parse_media_id(request)
5757 if server_name == self.server_name:
1919 import os
2020 import shutil
2121
22 from six import iteritems
22 from six import PY3, iteritems
2323 from six.moves.urllib import parse as urlparse
2424
2525 import twisted.internet.error
396396
397397 yield finish()
398398
399 media_type = headers["Content-Type"][0]
399 media_type = headers[b"Content-Type"][0].decode('ascii')
400400
401401 time_now_ms = self.clock.time_msec()
402402
403 content_disposition = headers.get("Content-Disposition", None)
403 content_disposition = headers.get(b"Content-Disposition", None)
404404 if content_disposition:
405 _, params = cgi.parse_header(content_disposition[0],)
405 _, params = cgi.parse_header(content_disposition[0].decode('ascii'),)
406406 upload_name = None
407407
408408 # First check if there is a valid UTF-8 filename
418418 upload_name = upload_name_ascii
419419
420420 if upload_name:
421 upload_name = urlparse.unquote(upload_name)
421 if PY3:
422 upload_name = urlparse.unquote(upload_name)
423 else:
424 upload_name = urlparse.unquote(upload_name.encode('ascii'))
422425 try:
423 upload_name = upload_name.decode("utf-8")
426 if isinstance(upload_name, bytes):
427 upload_name = upload_name.decode("utf-8")
424428 except UnicodeDecodeError:
425429 upload_name = None
426430 else:
754758 Resource.__init__(self)
755759
756760 media_repo = hs.get_media_repository()
757 self.putChild("upload", UploadResource(hs, media_repo))
758 self.putChild("download", DownloadResource(hs, media_repo))
759 self.putChild("thumbnail", ThumbnailResource(
761
762 self.putChild(b"upload", UploadResource(hs, media_repo))
763 self.putChild(b"download", DownloadResource(hs, media_repo))
764 self.putChild(b"thumbnail", ThumbnailResource(
760765 hs, media_repo, media_repo.media_storage,
761766 ))
762 self.putChild("identicon", IdenticonResource())
767 self.putChild(b"identicon", IdenticonResource())
763768 if hs.config.url_preview_enabled:
764 self.putChild("preview_url", PreviewUrlResource(
769 self.putChild(b"preview_url", PreviewUrlResource(
765770 hs, media_repo, media_repo.media_storage,
766771 ))
767 self.putChild("config", MediaConfigResource(hs))
772 self.putChild(b"config", MediaConfigResource(hs))
260260
261261 logger.debug("Calculated OG for %s as %s" % (url, og))
262262
263 jsonog = json.dumps(og)
263 jsonog = json.dumps(og).encode('utf8')
264264
265265 # store OG in history-aware DB cache
266266 yield self.store.store_url_cache(
300300 logger.warn("Error downloading %s: %r", url, e)
301301 raise SynapseError(
302302 500, "Failed to download content: %s" % (
303 traceback.format_exception_only(sys.exc_type, e),
303 traceback.format_exception_only(sys.exc_info()[0], e),
304304 ),
305305 Codes.UNKNOWN,
306306 )
307307 yield finish()
308308
309309 try:
310 if "Content-Type" in headers:
311 media_type = headers["Content-Type"][0]
310 if b"Content-Type" in headers:
311 media_type = headers[b"Content-Type"][0].decode('ascii')
312312 else:
313313 media_type = "application/octet-stream"
314314 time_now_ms = self.clock.time_msec()
315315
316 content_disposition = headers.get("Content-Disposition", None)
316 content_disposition = headers.get(b"Content-Disposition", None)
317317 if content_disposition:
318318 _, params = cgi.parse_header(content_disposition[0],)
319319 download_name = None
929929 )
930930
931931 self._invalidate_cache_and_stream(
932 txn, self.get_room_summary, (room_id,)
933 )
934
935 self._invalidate_cache_and_stream(
932936 txn, self.get_current_state_ids, (room_id,)
933937 )
934938
18851889 ")"
18861890 )
18871891
1888 # create an index on should_delete because later we'll be looking for
1889 # the should_delete / shouldn't_delete subsets
1890 txn.execute(
1891 "CREATE INDEX events_to_purge_should_delete"
1892 " ON events_to_purge(should_delete)",
1893 )
1894
1895 # We do joins against events_to_purge for e.g. calculating state
1896 # groups to purge, etc., so lets make an index.
1897 txn.execute(
1898 "CREATE INDEX events_to_purge_id"
1899 " ON events_to_purge(event_id)",
1900 )
1901
19021892 # First ensure that we're not about to delete all the forward extremeties
19031893 txn.execute(
19041894 "SELECT e.event_id, e.depth FROM events as e "
19251915 should_delete_params = ()
19261916 if not delete_local_events:
19271917 should_delete_expr += " AND event_id NOT LIKE ?"
1928 should_delete_params += ("%:" + self.hs.hostname, )
1918
1919 # We include the parameter twice since we use the expression twice
1920 should_delete_params += (
1921 "%:" + self.hs.hostname,
1922 "%:" + self.hs.hostname,
1923 )
19291924
19301925 should_delete_params += (room_id, token.topological)
19311926
1927 # Note that we insert events that are outliers and aren't going to be
1928 # deleted, as nothing will happen to them.
19321929 txn.execute(
19331930 "INSERT INTO events_to_purge"
19341931 " SELECT event_id, %s"
19351932 " FROM events AS e LEFT JOIN state_events USING (event_id)"
1936 " WHERE e.room_id = ? AND topological_ordering < ?" % (
1933 " WHERE (NOT outlier OR (%s)) AND e.room_id = ? AND topological_ordering < ?"
1934 % (
1935 should_delete_expr,
19371936 should_delete_expr,
19381937 ),
19391938 should_delete_params,
19401939 )
1940
1941 # We create the indices *after* insertion as that's a lot faster.
1942
1943 # create an index on should_delete because later we'll be looking for
1944 # the should_delete / shouldn't_delete subsets
1945 txn.execute(
1946 "CREATE INDEX events_to_purge_should_delete"
1947 " ON events_to_purge(should_delete)",
1948 )
1949
1950 # We do joins against events_to_purge for e.g. calculating state
1951 # groups to purge, etc., so lets make an index.
1952 txn.execute(
1953 "CREATE INDEX events_to_purge_id"
1954 " ON events_to_purge(event_id)",
1955 )
1956
19411957 txn.execute(
19421958 "SELECT event_id, should_delete FROM events_to_purge"
19431959 )
133133 """
134134 key_id = "%s:%s" % (verify_key.alg, verify_key.version)
135135
136 # XXX fix this to not need a lock (#3819)
136137 def _txn(txn):
137138 self._simple_upsert_txn(
138139 txn,
146146 return self.runInteraction("count_users", _count_users)
147147
148148 @defer.inlineCallbacks
149 def get_registered_reserved_users_count(self):
150 """Of the reserved threepids defined in config, how many are associated
151 with registered users?
152
153 Returns:
154 Defered[int]: Number of real reserved users
155 """
156 count = 0
157 for tp in self.hs.config.mau_limits_reserved_threepids:
158 user_id = yield self.hs.get_datastore().get_user_id_by_threepid(
159 tp["medium"], tp["address"]
160 )
161 if user_id:
162 count = count + 1
163 defer.returnValue(count)
164
165 @defer.inlineCallbacks
149166 def upsert_monthly_active_user(self, user_id):
150167 """
151168 Updates or inserts monthly active user member
198215 Args:
199216 user_id(str): the user_id to query
200217 """
218
201219 if self.hs.config.limit_usage_by_mau:
220 # Trial users and guests should not be included as part of MAU group
221 is_guest = yield self.is_guest(user_id)
222 if is_guest:
223 return
202224 is_trial = yield self.is_trial_user(user_id)
203225 if is_trial:
204 # we don't track trial users in the MAU table.
205226 return
206227
207228 last_seen_timestamp = yield self.user_last_seen_monthly_active(user_id)
5050 "ProfileInfo", ("avatar_url", "display_name")
5151 )
5252
53 # "members" points to a truncated list of (user_id, event_id) tuples for users of
54 # a given membership type, suitable for use in calculating heroes for a room.
55 # "count" points to the total numberr of users of a given membership type.
56 MemberSummary = namedtuple(
57 "MemberSummary", ("members", "count")
58 )
5359
5460 _MEMBERSHIP_PROFILE_UPDATE_NAME = "room_membership_profile_update"
5561
8086 txn.execute(sql, (room_id, Membership.JOIN,))
8187 return [to_ascii(r[0]) for r in txn]
8288 return self.runInteraction("get_users_in_room", f)
89
90 @cached(max_entries=100000)
91 def get_room_summary(self, room_id):
92 """ Get the details of a room roughly suitable for use by the room
93 summary extension to /sync. Useful when lazy loading room members.
94 Args:
95 room_id (str): The room ID to query
96 Returns:
97 Deferred[dict[str, MemberSummary]:
98 dict of membership states, pointing to a MemberSummary named tuple.
99 """
100
101 def _get_room_summary_txn(txn):
102 # first get counts.
103 # We do this all in one transaction to keep the cache small.
104 # FIXME: get rid of this when we have room_stats
105 sql = """
106 SELECT count(*), m.membership FROM room_memberships as m
107 INNER JOIN current_state_events as c
108 ON m.event_id = c.event_id
109 AND m.room_id = c.room_id
110 AND m.user_id = c.state_key
111 WHERE c.type = 'm.room.member' AND c.room_id = ?
112 GROUP BY m.membership
113 """
114
115 txn.execute(sql, (room_id,))
116 res = {}
117 for count, membership in txn:
118 summary = res.setdefault(to_ascii(membership), MemberSummary([], count))
119
120 # we order by membership and then fairly arbitrarily by event_id so
121 # heroes are consistent
122 sql = """
123 SELECT m.user_id, m.membership, m.event_id
124 FROM room_memberships as m
125 INNER JOIN current_state_events as c
126 ON m.event_id = c.event_id
127 AND m.room_id = c.room_id
128 AND m.user_id = c.state_key
129 WHERE c.type = 'm.room.member' AND c.room_id = ?
130 ORDER BY
131 CASE m.membership WHEN ? THEN 1 WHEN ? THEN 2 ELSE 3 END ASC,
132 m.event_id ASC
133 LIMIT ?
134 """
135
136 # 6 is 5 (number of heroes) plus 1, in case one of them is the calling user.
137 txn.execute(sql, (room_id, Membership.JOIN, Membership.INVITE, 6))
138 for user_id, membership, event_id in txn:
139 summary = res[to_ascii(membership)]
140 # we will always have a summary for this membership type at this
141 # point given the summary currently contains the counts.
142 members = summary.members
143 members.append((to_ascii(user_id), to_ascii(event_id)))
144
145 return res
146
147 return self.runInteraction("get_room_summary", _get_room_summary_txn)
83148
84149 @cached()
85150 def get_invited_rooms_for_user(self, user_id):
437437 value.trap(CancelledError)
438438 raise DeferredTimeoutError(timeout, "Deferred")
439439 return value
440
441
442 def timeout_no_seriously(deferred, timeout, reactor):
443 """The in build twisted deferred addTimeout (and the method above)
444 completely fail to time things out under some unknown circumstances.
445
446 Lets try a different way of timing things out and maybe that will make
447 things work?!
448
449 TODO: Kill this with fire.
450 """
451
452 new_d = defer.Deferred()
453
454 timed_out = [False]
455
456 def time_it_out():
457 timed_out[0] = True
458
459 if not new_d.called:
460 new_d.errback(DeferredTimeoutError(timeout, "Deferred"))
461
462 deferred.cancel()
463
464 delayed_call = reactor.callLater(timeout, time_it_out)
465
466 def convert_cancelled(value):
467 if timed_out[0]:
468 return _cancelled_to_timed_out_error(value, timeout)
469 return value
470
471 deferred.addBoth(convert_cancelled)
472
473 def cancel_timeout(result):
474 # stop the pending call to cancel the deferred if it's been fired
475 if delayed_call.active():
476 delayed_call.cancel()
477 return result
478
479 deferred.addBoth(cancel_timeout)
480
481 def success_cb(val):
482 if not new_d.called:
483 new_d.callback(val)
484
485 def failure_cb(val):
486 if not new_d.called:
487 new_d.errback(val)
488
489 deferred.addCallbacks(success_cb, failure_cb)
490
491 return new_d
1818 from twisted.cred import checkers, portal
1919
2020 PUBLIC_KEY = (
21 "ssh-rsa AAAAB3NzaC1yc2EAAAABIwAAAGEArzJx8OYOnJmzf4tfBEvLi8DVPrJ3/c9k2I/Az"
22 "64fxjHf9imyRJbixtQhlH9lfNjUIx+4LmrJH5QNRsFporcHDKOTwTTYLh5KmRpslkYHRivcJS"
23 "kbh/C+BR3utDS555mV"
21 "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQDHhGATaW4KhE23+7nrH4jFx3yLq9OjaEs5"
22 "XALqeK+7385NlLja3DE/DO9mGhnd9+bAy39EKT3sTV6+WXQ4yD0TvEEyUEMtjWkSEm6U32+C"
23 "DaS3TW/vPBUMeJQwq+Ydcif1UlnpXrDDTamD0AU9VaEvHq+3HAkipqn0TGpKON6aqk4vauDx"
24 "oXSsV5TXBVrxP/y7HpMOpU4GUWsaaacBTKKNnUaQB4UflvydaPJUuwdaCUJGTMjbhWrjVfK+"
25 "jslseSPxU6XvrkZMyCr4znxvuDxjMk1RGIdO7v+rbBMLEgqtSMNqJbYeVCnj2CFgc3fcTcld"
26 "X2uOJDrJb/WRlHulthCh"
2427 )
2528
2629 PRIVATE_KEY = """-----BEGIN RSA PRIVATE KEY-----
27 MIIByAIBAAJhAK8ycfDmDpyZs3+LXwRLy4vA1T6yd/3PZNiPwM+uH8Yx3/YpskSW
28 4sbUIZR/ZXzY1CMfuC5qyR+UDUbBaaK3Bwyjk8E02C4eSpkabJZGB0Yr3CUpG4fw
29 vgUd7rQ0ueeZlQIBIwJgbh+1VZfr7WftK5lu7MHtqE1S1vPWZQYE3+VUn8yJADyb
30 Z4fsZaCrzW9lkIqXkE3GIY+ojdhZhkO1gbG0118sIgphwSWKRxK0mvh6ERxKqIt1
31 xJEJO74EykXZV4oNJ8sjAjEA3J9r2ZghVhGN6V8DnQrTk24Td0E8hU8AcP0FVP+8
32 PQm/g/aXf2QQkQT+omdHVEJrAjEAy0pL0EBH6EVS98evDCBtQw22OZT52qXlAwZ2
33 gyTriKFVoqjeEjt3SZKKqXHSApP/AjBLpF99zcJJZRq2abgYlf9lv1chkrWqDHUu
34 DZttmYJeEfiFBBavVYIF1dOlZT0G8jMCMBc7sOSZodFnAiryP+Qg9otSBjJ3bQML
35 pSTqy7c3a2AScC/YyOwkDaICHnnD3XyjMwIxALRzl0tQEKMXs6hH8ToUdlLROCrP
36 EhQ0wahUTCk1gKA4uPD6TMTChavbh4K63OvbKg==
30 MIIEpQIBAAKCAQEAx4RgE2luCoRNt/u56x+Ixcd8i6vTo2hLOVwC6nivu9/OTZS4
31 2twxPwzvZhoZ3ffmwMt/RCk97E1evll0OMg9E7xBMlBDLY1pEhJulN9vgg2kt01v
32 7zwVDHiUMKvmHXIn9VJZ6V6ww02pg9AFPVWhLx6vtxwJIqap9ExqSjjemqpOL2rg
33 8aF0rFeU1wVa8T/8ux6TDqVOBlFrGmmnAUyijZ1GkAeFH5b8nWjyVLsHWglCRkzI
34 24Vq41Xyvo7JbHkj8VOl765GTMgq+M58b7g8YzJNURiHTu7/q2wTCxIKrUjDaiW2
35 HlQp49ghYHN33E3JXV9rjiQ6yW/1kZR7pbYQoQIDAQABAoIBAQC8KJ0q8Wzzwh5B
36 esa1dQHZ8+4DEsL/Amae66VcVwD0X3cCN1W2IZ7X5W0Ij2kBqr8V51RYhcR+S+Ek
37 BtzSiBUBvbKGrqcMGKaUgomDIMzai99hd0gvCCyZnEW1OQhFkNkaRNXCfqiZJ27M
38 fqvSUiU2eOwh9fCvmxoA6Of8o3FbzcJ+1GMcobWRllDtLmj6lgVbDzuA+0jC5daB
39 9Tj1pBzu3wn3ufxiS+gBnJ+7NcXH3E73lqCcPa2ufbZ1haxfiGCnRIhFXuQDgxFX
40 vKdEfDgtvas6r1ahGbc+b/q8E8fZT7cABuIU4yfOORK+MhpyWbvoyyzuVGKj3PKt
41 KSPJu5CZAoGBAOkoJfAVyYteqKcmGTanGqQnAY43CaYf6GdSPX/jg+JmKZg0zqMC
42 jWZUtPb93i+jnOInbrnuHOiHAxI8wmhEPed28H2lC/LU8PzlqFkZXKFZ4vLOhhRB
43 /HeHCFIDosPFlohWi3b+GAjD7sXgnIuGmnXWe2ea/TS3yersifDEoKKjAoGBANsQ
44 gJX2cJv1c3jhdgcs8vAt5zIOKcCLTOr/QPmVf/kxjNgndswcKHwsxE/voTO9q+TF
45 v/6yCSTxAdjuKz1oIYWgi/dZo82bBKWxNRpgrGviU3/zwxiHlyIXUhzQu78q3VS/
46 7S1XVbc7qMV++XkYKHPVD+nVG/gGzFxumX7MLXfrAoGBAJit9cn2OnjNj9uFE1W6
47 r7N254ndeLAUjPe73xH0RtTm2a4WRopwjW/JYIetTuYbWgyujc+robqTTuuOZjAp
48 H/CG7o0Ym251CypQqaFO/l2aowclPp/dZhpPjp9GSjuxFBZLtiBB3DNBOwbRQzIK
49 /vLTdRQvZkgzYkI4i0vjNt3JAoGBANP8HSKBLymMlShlrSx2b8TB9tc2Y2riohVJ
50 2ttqs0M2kt/dGJWdrgOz4mikL+983Olt/0P9juHDoxEEMK2kpcPEv40lnmBpYU7h
51 s8yJvnBLvJe2EJYdJ8AipyAhUX1FgpbvfxmASP8eaUxsegeXvBWTGWojAoS6N2o+
52 0KSl+l3vAoGAFqm0gO9f/Q1Se60YQd4l2PZeMnJFv0slpgHHUwegmd6wJhOD7zJ1
53 CkZcXwiv7Nog7AI9qKJEUXLjoqL+vJskBzSOqU3tcd670YQMi1aXSXJqYE202K7o
54 EddTrx3TNpr1D5m/f+6mnXWrc8u9y1+GNx9yz889xMjIBTBI9KqaaOs=
3755 -----END RSA PRIVATE KEY-----"""
3856
3957
1919
2020 from twisted.internet import defer
2121
22 from synapse.metrics import InFlightGauge
2223 from synapse.util.logcontext import LoggingContext
2324
2425 logger = logging.getLogger(__name__)
4344 # seconds spent waiting for a db connection, in this block
4445 block_db_sched_duration = Counter(
4546 "synapse_util_metrics_block_db_sched_duration_seconds", "", ["block_name"])
47
48 # Tracks the number of blocks currently active
49 in_flight = InFlightGauge(
50 "synapse_util_metrics_block_in_flight", "",
51 labels=["block_name"],
52 sub_metrics=["real_time_max", "real_time_sum"],
53 )
4654
4755
4856 def measure_func(name):
8189
8290 self.start_usage = self.start_context.get_resource_usage()
8391
92 in_flight.register((self.name,), self._update_in_flight)
93
8494 def __exit__(self, exc_type, exc_val, exc_tb):
8595 if isinstance(exc_type, Exception) or not self.start_context:
8696 return
97
98 in_flight.unregister((self.name,), self._update_in_flight)
8799
88100 duration = self.clock.time() - self.start
89101
119131
120132 if self.created_context:
121133 self.start_context.__exit__(exc_type, exc_val, exc_tb)
134
135 def _update_in_flight(self, metrics):
136 """Gets called when processing in flight metrics
137 """
138 duration = self.clock.time() - self.start
139
140 metrics.real_time_max = max(metrics.real_time_max, duration)
141 metrics.real_time_sum += duration
142
143 # TODO: Add other in flight metrics.
470470 def test_reserved_threepid(self):
471471 self.hs.config.limit_usage_by_mau = True
472472 self.hs.config.max_mau_value = 1
473 self.store.get_monthly_active_count = lambda: defer.succeed(2)
473474 threepid = {'medium': 'email', 'address': 'reserved@server.com'}
474475 unknown_threepid = {'medium': 'email', 'address': 'unreserved@server.com'}
475476 self.hs.config.mau_limits_reserved_threepids = [threepid]
4646 self.assertEqual(len(self.reactor.tcpServers), 1)
4747 site = self.reactor.tcpServers[0][1]
4848 self.resource = (
49 site.resource.children["_matrix"].children["client"].children["r0"]
49 site.resource.children[b"_matrix"].children[b"client"].children[b"r0"]
5050 )
5151
5252 request, channel = self.make_request("PUT", "presence/a/status")
7676 self.assertEqual(len(self.reactor.tcpServers), 1)
7777 site = self.reactor.tcpServers[0][1]
7878 self.resource = (
79 site.resource.children["_matrix"].children["client"].children["r0"]
79 site.resource.children[b"_matrix"].children[b"client"].children[b"r0"]
8080 )
8181
8282 request, channel = self.make_request("PUT", "presence/a/status")
4242
4343
4444 def _make_edu_transaction_json(edu_type, content):
45 return json.dumps(_expect_edu_transaction(edu_type, content)).encode(
46 'utf8'
47 )
45 return json.dumps(_expect_edu_transaction(edu_type, content)).encode('utf8')
4846
4947
5048 class TypingNotificationsTestCase(unittest.TestCase):
0 # -*- coding: utf-8 -*-
1 # Copyright 2018 New Vector Ltd
2 #
3 # Licensed under the Apache License, Version 2.0 (the "License");
4 # you may not use this file except in compliance with the License.
5 # You may obtain a copy of the License at
6 #
7 # http://www.apache.org/licenses/LICENSE-2.0
8 #
9 # Unless required by applicable law or agreed to in writing, software
10 # distributed under the License is distributed on an "AS IS" BASIS,
11 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 # See the License for the specific language governing permissions and
13 # limitations under the License.
14
15 from mock import Mock
16
17 from twisted.internet.defer import TimeoutError
18 from twisted.internet.error import ConnectingCancelledError, DNSLookupError
19 from twisted.web.client import ResponseNeverReceived
20
21 from synapse.http.matrixfederationclient import MatrixFederationHttpClient
22
23 from tests.unittest import HomeserverTestCase
24
25
26 class FederationClientTests(HomeserverTestCase):
27 def make_homeserver(self, reactor, clock):
28
29 hs = self.setup_test_homeserver(reactor=reactor, clock=clock)
30 hs.tls_client_options_factory = None
31 return hs
32
33 def prepare(self, reactor, clock, homeserver):
34
35 self.cl = MatrixFederationHttpClient(self.hs)
36 self.reactor.lookups["testserv"] = "1.2.3.4"
37
38 def test_dns_error(self):
39 """
40 If the DNS raising returns an error, it will bubble up.
41 """
42 d = self.cl._request("testserv2:8008", "GET", "foo/bar", timeout=10000)
43 self.pump()
44
45 f = self.failureResultOf(d)
46 self.assertIsInstance(f.value, DNSLookupError)
47
48 def test_client_never_connect(self):
49 """
50 If the HTTP request is not connected and is timed out, it'll give a
51 ConnectingCancelledError.
52 """
53 d = self.cl._request("testserv:8008", "GET", "foo/bar", timeout=10000)
54
55 self.pump()
56
57 # Nothing happened yet
58 self.assertFalse(d.called)
59
60 # Make sure treq is trying to connect
61 clients = self.reactor.tcpClients
62 self.assertEqual(len(clients), 1)
63 self.assertEqual(clients[0][0], '1.2.3.4')
64 self.assertEqual(clients[0][1], 8008)
65
66 # Deferred is still without a result
67 self.assertFalse(d.called)
68
69 # Push by enough to time it out
70 self.reactor.advance(10.5)
71 f = self.failureResultOf(d)
72
73 self.assertIsInstance(f.value, ConnectingCancelledError)
74
75 def test_client_connect_no_response(self):
76 """
77 If the HTTP request is connected, but gets no response before being
78 timed out, it'll give a ResponseNeverReceived.
79 """
80 d = self.cl._request("testserv:8008", "GET", "foo/bar", timeout=10000)
81
82 self.pump()
83
84 # Nothing happened yet
85 self.assertFalse(d.called)
86
87 # Make sure treq is trying to connect
88 clients = self.reactor.tcpClients
89 self.assertEqual(len(clients), 1)
90 self.assertEqual(clients[0][0], '1.2.3.4')
91 self.assertEqual(clients[0][1], 8008)
92
93 conn = Mock()
94 client = clients[0][2].buildProtocol(None)
95 client.makeConnection(conn)
96
97 # Deferred is still without a result
98 self.assertFalse(d.called)
99
100 # Push by enough to time it out
101 self.reactor.advance(10.5)
102 f = self.failureResultOf(d)
103
104 self.assertIsInstance(f.value, ResponseNeverReceived)
105
106 def test_client_gets_headers(self):
107 """
108 Once the client gets the headers, _request returns successfully.
109 """
110 d = self.cl._request("testserv:8008", "GET", "foo/bar", timeout=10000)
111
112 self.pump()
113
114 conn = Mock()
115 clients = self.reactor.tcpClients
116 client = clients[0][2].buildProtocol(None)
117 client.makeConnection(conn)
118
119 # Deferred does not have a result
120 self.assertFalse(d.called)
121
122 # Send it the HTTP response
123 client.dataReceived(b"HTTP/1.1 200 OK\r\nServer: Fake\r\n\r\n")
124
125 # We should get a successful response
126 r = self.successResultOf(d)
127 self.assertEqual(r.code, 200)
128
129 def test_client_headers_no_body(self):
130 """
131 If the HTTP request is connected, but gets no response before being
132 timed out, it'll give a ResponseNeverReceived.
133 """
134 d = self.cl.post_json("testserv:8008", "foo/bar", timeout=10000)
135
136 self.pump()
137
138 conn = Mock()
139 clients = self.reactor.tcpClients
140 client = clients[0][2].buildProtocol(None)
141 client.makeConnection(conn)
142
143 # Deferred does not have a result
144 self.assertFalse(d.called)
145
146 # Send it the HTTP response
147 client.dataReceived(
148 (b"HTTP/1.1 200 OK\r\nContent-Type: application/json\r\n"
149 b"Server: Fake\r\n\r\n")
150 )
151
152 # Push by enough to time it out
153 self.reactor.advance(10.5)
154 f = self.failureResultOf(d)
155
156 self.assertIsInstance(f.value, TimeoutError)
2121
2222 from twisted.internet import defer
2323
24 import synapse.rest.client.v1.room
2524 from synapse.api.constants import Membership
26 from synapse.http.server import JsonResource
27 from synapse.types import UserID
28 from synapse.util import Clock
25 from synapse.rest.client.v1 import room
2926
3027 from tests import unittest
31 from tests.server import (
32 ThreadedMemoryReactorClock,
33 make_request,
34 render,
35 setup_test_homeserver,
36 )
37
38 from .utils import RestHelper
3928
4029 PATH_PREFIX = b"/_matrix/client/api/v1"
4130
4231
43 class RoomBase(unittest.TestCase):
32 class RoomBase(unittest.HomeserverTestCase):
4433 rmcreator_id = None
4534
46 def setUp(self):
47
48 self.clock = ThreadedMemoryReactorClock()
49 self.hs_clock = Clock(self.clock)
50
51 self.hs = setup_test_homeserver(
52 self.addCleanup,
35 servlets = [room.register_servlets, room.register_deprecated_servlets]
36
37 def make_homeserver(self, reactor, clock):
38
39 self.hs = self.setup_test_homeserver(
5340 "red",
5441 http_client=None,
55 clock=self.hs_clock,
56 reactor=self.clock,
5742 federation_client=Mock(),
5843 ratelimiter=NonCallableMock(spec_set=["send_message"]),
5944 )
6247
6348 self.hs.get_federation_handler = Mock(return_value=Mock())
6449
65 def get_user_by_access_token(token=None, allow_guest=False):
66 return {
67 "user": UserID.from_string(self.helper.auth_user_id),
68 "token_id": 1,
69 "is_guest": False,
70 }
71
72 def get_user_by_req(request, allow_guest=False, rights="access"):
73 return synapse.types.create_requester(
74 UserID.from_string(self.helper.auth_user_id), 1, False, None
75 )
76
77 self.hs.get_auth().get_user_by_req = get_user_by_req
78 self.hs.get_auth().get_user_by_access_token = get_user_by_access_token
79 self.hs.get_auth().get_access_token_from_request = Mock(return_value=b"1234")
80
8150 def _insert_client_ip(*args, **kwargs):
8251 return defer.succeed(None)
8352
8453 self.hs.get_datastore().insert_client_ip = _insert_client_ip
8554
86 self.resource = JsonResource(self.hs)
87 synapse.rest.client.v1.room.register_servlets(self.hs, self.resource)
88 synapse.rest.client.v1.room.register_deprecated_servlets(self.hs, self.resource)
89 self.helper = RestHelper(self.hs, self.resource, self.user_id)
55 return self.hs
9056
9157
9258 class RoomPermissionsTestCase(RoomBase):
9359 """ Tests room permissions. """
9460
95 user_id = b"@sid1:red"
96 rmcreator_id = b"@notme:red"
97
98 def setUp(self):
99
100 super(RoomPermissionsTestCase, self).setUp()
61 user_id = "@sid1:red"
62 rmcreator_id = "@notme:red"
63
64 def prepare(self, reactor, clock, hs):
10165
10266 self.helper.auth_user_id = self.rmcreator_id
10367 # create some rooms under the name rmcreator_id
11377 self.created_rmid_msg_path = (
11478 "rooms/%s/send/m.room.message/a1" % (self.created_rmid)
11579 ).encode('ascii')
116 request, channel = make_request(
117 b"PUT",
118 self.created_rmid_msg_path,
119 b'{"msgtype":"m.text","body":"test msg"}',
120 )
121 render(request, self.resource, self.clock)
122 self.assertEquals(channel.result["code"], b"200", channel.result)
80 request, channel = self.make_request(
81 "PUT", self.created_rmid_msg_path, b'{"msgtype":"m.text","body":"test msg"}'
82 )
83 self.render(request)
84 self.assertEquals(200, channel.code, channel.result)
12385
12486 # set topic for public room
125 request, channel = make_request(
126 b"PUT",
87 request, channel = self.make_request(
88 "PUT",
12789 ("rooms/%s/state/m.room.topic" % self.created_public_rmid).encode('ascii'),
12890 b'{"topic":"Public Room Topic"}',
12991 )
130 render(request, self.resource, self.clock)
131 self.assertEquals(channel.result["code"], b"200", channel.result)
92 self.render(request)
93 self.assertEquals(200, channel.code, channel.result)
13294
13395 # auth as user_id now
13496 self.helper.auth_user_id = self.user_id
139101 seq = iter(range(100))
140102
141103 def send_msg_path():
142 return b"/rooms/%s/send/m.room.message/mid%s" % (
104 return "/rooms/%s/send/m.room.message/mid%s" % (
143105 self.created_rmid,
144 str(next(seq)).encode('ascii'),
106 str(next(seq)),
145107 )
146108
147109 # send message in uncreated room, expect 403
148 request, channel = make_request(
149 b"PUT",
150 b"/rooms/%s/send/m.room.message/mid2" % (self.uncreated_rmid,),
110 request, channel = self.make_request(
111 "PUT",
112 "/rooms/%s/send/m.room.message/mid2" % (self.uncreated_rmid,),
151113 msg_content,
152114 )
153 render(request, self.resource, self.clock)
154 self.assertEquals(403, int(channel.result["code"]), msg=channel.result["body"])
115 self.render(request)
116 self.assertEquals(403, channel.code, msg=channel.result["body"])
155117
156118 # send message in created room not joined (no state), expect 403
157 request, channel = make_request(b"PUT", send_msg_path(), msg_content)
158 render(request, self.resource, self.clock)
159 self.assertEquals(403, int(channel.result["code"]), msg=channel.result["body"])
119 request, channel = self.make_request("PUT", send_msg_path(), msg_content)
120 self.render(request)
121 self.assertEquals(403, channel.code, msg=channel.result["body"])
160122
161123 # send message in created room and invited, expect 403
162124 self.helper.invite(
163125 room=self.created_rmid, src=self.rmcreator_id, targ=self.user_id
164126 )
165 request, channel = make_request(b"PUT", send_msg_path(), msg_content)
166 render(request, self.resource, self.clock)
167 self.assertEquals(403, int(channel.result["code"]), msg=channel.result["body"])
127 request, channel = self.make_request("PUT", send_msg_path(), msg_content)
128 self.render(request)
129 self.assertEquals(403, channel.code, msg=channel.result["body"])
168130
169131 # send message in created room and joined, expect 200
170132 self.helper.join(room=self.created_rmid, user=self.user_id)
171 request, channel = make_request(b"PUT", send_msg_path(), msg_content)
172 render(request, self.resource, self.clock)
173 self.assertEquals(200, int(channel.result["code"]), msg=channel.result["body"])
133 request, channel = self.make_request("PUT", send_msg_path(), msg_content)
134 self.render(request)
135 self.assertEquals(200, channel.code, msg=channel.result["body"])
174136
175137 # send message in created room and left, expect 403
176138 self.helper.leave(room=self.created_rmid, user=self.user_id)
177 request, channel = make_request(b"PUT", send_msg_path(), msg_content)
178 render(request, self.resource, self.clock)
179 self.assertEquals(403, int(channel.result["code"]), msg=channel.result["body"])
139 request, channel = self.make_request("PUT", send_msg_path(), msg_content)
140 self.render(request)
141 self.assertEquals(403, channel.code, msg=channel.result["body"])
180142
181143 def test_topic_perms(self):
182144 topic_content = b'{"topic":"My Topic Name"}'
183 topic_path = b"/rooms/%s/state/m.room.topic" % self.created_rmid
145 topic_path = "/rooms/%s/state/m.room.topic" % self.created_rmid
184146
185147 # set/get topic in uncreated room, expect 403
186 request, channel = make_request(
187 b"PUT", b"/rooms/%s/state/m.room.topic" % self.uncreated_rmid, topic_content
188 )
189 render(request, self.resource, self.clock)
190 self.assertEquals(403, int(channel.result["code"]), msg=channel.result["body"])
191 request, channel = make_request(
192 b"GET", "/rooms/%s/state/m.room.topic" % self.uncreated_rmid
193 )
194 render(request, self.resource, self.clock)
195 self.assertEquals(403, int(channel.result["code"]), msg=channel.result["body"])
148 request, channel = self.make_request(
149 "PUT", "/rooms/%s/state/m.room.topic" % self.uncreated_rmid, topic_content
150 )
151 self.render(request)
152 self.assertEquals(403, channel.code, msg=channel.result["body"])
153 request, channel = self.make_request(
154 "GET", "/rooms/%s/state/m.room.topic" % self.uncreated_rmid
155 )
156 self.render(request)
157 self.assertEquals(403, channel.code, msg=channel.result["body"])
196158
197159 # set/get topic in created PRIVATE room not joined, expect 403
198 request, channel = make_request(b"PUT", topic_path, topic_content)
199 render(request, self.resource, self.clock)
200 self.assertEquals(403, int(channel.result["code"]), msg=channel.result["body"])
201 request, channel = make_request(b"GET", topic_path)
202 render(request, self.resource, self.clock)
203 self.assertEquals(403, int(channel.result["code"]), msg=channel.result["body"])
160 request, channel = self.make_request("PUT", topic_path, topic_content)
161 self.render(request)
162 self.assertEquals(403, channel.code, msg=channel.result["body"])
163 request, channel = self.make_request("GET", topic_path)
164 self.render(request)
165 self.assertEquals(403, channel.code, msg=channel.result["body"])
204166
205167 # set topic in created PRIVATE room and invited, expect 403
206168 self.helper.invite(
207169 room=self.created_rmid, src=self.rmcreator_id, targ=self.user_id
208170 )
209 request, channel = make_request(b"PUT", topic_path, topic_content)
210 render(request, self.resource, self.clock)
211 self.assertEquals(403, int(channel.result["code"]), msg=channel.result["body"])
171 request, channel = self.make_request("PUT", topic_path, topic_content)
172 self.render(request)
173 self.assertEquals(403, channel.code, msg=channel.result["body"])
212174
213175 # get topic in created PRIVATE room and invited, expect 403
214 request, channel = make_request(b"GET", topic_path)
215 render(request, self.resource, self.clock)
216 self.assertEquals(403, int(channel.result["code"]), msg=channel.result["body"])
176 request, channel = self.make_request("GET", topic_path)
177 self.render(request)
178 self.assertEquals(403, channel.code, msg=channel.result["body"])
217179
218180 # set/get topic in created PRIVATE room and joined, expect 200
219181 self.helper.join(room=self.created_rmid, user=self.user_id)
220182
221183 # Only room ops can set topic by default
222184 self.helper.auth_user_id = self.rmcreator_id
223 request, channel = make_request(b"PUT", topic_path, topic_content)
224 render(request, self.resource, self.clock)
225 self.assertEquals(200, int(channel.result["code"]), msg=channel.result["body"])
185 request, channel = self.make_request("PUT", topic_path, topic_content)
186 self.render(request)
187 self.assertEquals(200, channel.code, msg=channel.result["body"])
226188 self.helper.auth_user_id = self.user_id
227189
228 request, channel = make_request(b"GET", topic_path)
229 render(request, self.resource, self.clock)
230 self.assertEquals(200, int(channel.result["code"]), msg=channel.result["body"])
231 self.assert_dict(json.loads(topic_content), channel.json_body)
190 request, channel = self.make_request("GET", topic_path)
191 self.render(request)
192 self.assertEquals(200, channel.code, msg=channel.result["body"])
193 self.assert_dict(json.loads(topic_content.decode('utf8')), channel.json_body)
232194
233195 # set/get topic in created PRIVATE room and left, expect 403
234196 self.helper.leave(room=self.created_rmid, user=self.user_id)
235 request, channel = make_request(b"PUT", topic_path, topic_content)
236 render(request, self.resource, self.clock)
237 self.assertEquals(403, int(channel.result["code"]), msg=channel.result["body"])
238 request, channel = make_request(b"GET", topic_path)
239 render(request, self.resource, self.clock)
240 self.assertEquals(200, int(channel.result["code"]), msg=channel.result["body"])
197 request, channel = self.make_request("PUT", topic_path, topic_content)
198 self.render(request)
199 self.assertEquals(403, channel.code, msg=channel.result["body"])
200 request, channel = self.make_request("GET", topic_path)
201 self.render(request)
202 self.assertEquals(200, channel.code, msg=channel.result["body"])
241203
242204 # get topic in PUBLIC room, not joined, expect 403
243 request, channel = make_request(
244 b"GET", b"/rooms/%s/state/m.room.topic" % self.created_public_rmid
245 )
246 render(request, self.resource, self.clock)
247 self.assertEquals(403, int(channel.result["code"]), msg=channel.result["body"])
205 request, channel = self.make_request(
206 "GET", "/rooms/%s/state/m.room.topic" % self.created_public_rmid
207 )
208 self.render(request)
209 self.assertEquals(403, channel.code, msg=channel.result["body"])
248210
249211 # set topic in PUBLIC room, not joined, expect 403
250 request, channel = make_request(
251 b"PUT",
252 b"/rooms/%s/state/m.room.topic" % self.created_public_rmid,
212 request, channel = self.make_request(
213 "PUT",
214 "/rooms/%s/state/m.room.topic" % self.created_public_rmid,
253215 topic_content,
254216 )
255 render(request, self.resource, self.clock)
256 self.assertEquals(403, int(channel.result["code"]), msg=channel.result["body"])
217 self.render(request)
218 self.assertEquals(403, channel.code, msg=channel.result["body"])
257219
258220 def _test_get_membership(self, room=None, members=[], expect_code=None):
259221 for member in members:
260 path = b"/rooms/%s/state/m.room.member/%s" % (room, member)
261 request, channel = make_request(b"GET", path)
262 render(request, self.resource, self.clock)
263 self.assertEquals(expect_code, int(channel.result["code"]))
222 path = "/rooms/%s/state/m.room.member/%s" % (room, member)
223 request, channel = self.make_request("GET", path)
224 self.render(request)
225 self.assertEquals(expect_code, channel.code)
264226
265227 def test_membership_basic_room_perms(self):
266228 # === room does not exist ===
427389 class RoomsMemberListTestCase(RoomBase):
428390 """ Tests /rooms/$room_id/members/list REST events."""
429391
430 user_id = b"@sid1:red"
392 user_id = "@sid1:red"
431393
432394 def test_get_member_list(self):
433395 room_id = self.helper.create_room_as(self.user_id)
434 request, channel = make_request(b"GET", b"/rooms/%s/members" % room_id)
435 render(request, self.resource, self.clock)
436 self.assertEquals(200, int(channel.result["code"]), msg=channel.result["body"])
396 request, channel = self.make_request("GET", "/rooms/%s/members" % room_id)
397 self.render(request)
398 self.assertEquals(200, channel.code, msg=channel.result["body"])
437399
438400 def test_get_member_list_no_room(self):
439 request, channel = make_request(b"GET", b"/rooms/roomdoesnotexist/members")
440 render(request, self.resource, self.clock)
441 self.assertEquals(403, int(channel.result["code"]), msg=channel.result["body"])
401 request, channel = self.make_request("GET", "/rooms/roomdoesnotexist/members")
402 self.render(request)
403 self.assertEquals(403, channel.code, msg=channel.result["body"])
442404
443405 def test_get_member_list_no_permission(self):
444 room_id = self.helper.create_room_as(b"@some_other_guy:red")
445 request, channel = make_request(b"GET", b"/rooms/%s/members" % room_id)
446 render(request, self.resource, self.clock)
447 self.assertEquals(403, int(channel.result["code"]), msg=channel.result["body"])
406 room_id = self.helper.create_room_as("@some_other_guy:red")
407 request, channel = self.make_request("GET", "/rooms/%s/members" % room_id)
408 self.render(request)
409 self.assertEquals(403, channel.code, msg=channel.result["body"])
448410
449411 def test_get_member_list_mixed_memberships(self):
450 room_creator = b"@some_other_guy:red"
412 room_creator = "@some_other_guy:red"
451413 room_id = self.helper.create_room_as(room_creator)
452 room_path = b"/rooms/%s/members" % room_id
414 room_path = "/rooms/%s/members" % room_id
453415 self.helper.invite(room=room_id, src=room_creator, targ=self.user_id)
454416 # can't see list if you're just invited.
455 request, channel = make_request(b"GET", room_path)
456 render(request, self.resource, self.clock)
457 self.assertEquals(403, int(channel.result["code"]), msg=channel.result["body"])
417 request, channel = self.make_request("GET", room_path)
418 self.render(request)
419 self.assertEquals(403, channel.code, msg=channel.result["body"])
458420
459421 self.helper.join(room=room_id, user=self.user_id)
460422 # can see list now joined
461 request, channel = make_request(b"GET", room_path)
462 render(request, self.resource, self.clock)
463 self.assertEquals(200, int(channel.result["code"]), msg=channel.result["body"])
423 request, channel = self.make_request("GET", room_path)
424 self.render(request)
425 self.assertEquals(200, channel.code, msg=channel.result["body"])
464426
465427 self.helper.leave(room=room_id, user=self.user_id)
466428 # can see old list once left
467 request, channel = make_request(b"GET", room_path)
468 render(request, self.resource, self.clock)
469 self.assertEquals(200, int(channel.result["code"]), msg=channel.result["body"])
429 request, channel = self.make_request("GET", room_path)
430 self.render(request)
431 self.assertEquals(200, channel.code, msg=channel.result["body"])
470432
471433
472434 class RoomsCreateTestCase(RoomBase):
473435 """ Tests /rooms and /rooms/$room_id REST events. """
474436
475 user_id = b"@sid1:red"
437 user_id = "@sid1:red"
476438
477439 def test_post_room_no_keys(self):
478440 # POST with no config keys, expect new room id
479 request, channel = make_request(b"POST", b"/createRoom", b"{}")
480
481 render(request, self.resource, self.clock)
482 self.assertEquals(200, int(channel.result["code"]), channel.result)
441 request, channel = self.make_request("POST", "/createRoom", "{}")
442
443 self.render(request)
444 self.assertEquals(200, channel.code, channel.result)
483445 self.assertTrue("room_id" in channel.json_body)
484446
485447 def test_post_room_visibility_key(self):
486448 # POST with visibility config key, expect new room id
487 request, channel = make_request(
488 b"POST", b"/createRoom", b'{"visibility":"private"}'
489 )
490 render(request, self.resource, self.clock)
491 self.assertEquals(200, int(channel.result["code"]))
449 request, channel = self.make_request(
450 "POST", "/createRoom", b'{"visibility":"private"}'
451 )
452 self.render(request)
453 self.assertEquals(200, channel.code)
492454 self.assertTrue("room_id" in channel.json_body)
493455
494456 def test_post_room_custom_key(self):
495457 # POST with custom config keys, expect new room id
496 request, channel = make_request(b"POST", b"/createRoom", b'{"custom":"stuff"}')
497 render(request, self.resource, self.clock)
498 self.assertEquals(200, int(channel.result["code"]))
458 request, channel = self.make_request(
459 "POST", "/createRoom", b'{"custom":"stuff"}'
460 )
461 self.render(request)
462 self.assertEquals(200, channel.code)
499463 self.assertTrue("room_id" in channel.json_body)
500464
501465 def test_post_room_known_and_unknown_keys(self):
502466 # POST with custom + known config keys, expect new room id
503 request, channel = make_request(
504 b"POST", b"/createRoom", b'{"visibility":"private","custom":"things"}'
505 )
506 render(request, self.resource, self.clock)
507 self.assertEquals(200, int(channel.result["code"]))
467 request, channel = self.make_request(
468 "POST", "/createRoom", b'{"visibility":"private","custom":"things"}'
469 )
470 self.render(request)
471 self.assertEquals(200, channel.code)
508472 self.assertTrue("room_id" in channel.json_body)
509473
510474 def test_post_room_invalid_content(self):
511475 # POST with invalid content / paths, expect 400
512 request, channel = make_request(b"POST", b"/createRoom", b'{"visibili')
513 render(request, self.resource, self.clock)
514 self.assertEquals(400, int(channel.result["code"]))
515
516 request, channel = make_request(b"POST", b"/createRoom", b'["hello"]')
517 render(request, self.resource, self.clock)
518 self.assertEquals(400, int(channel.result["code"]))
476 request, channel = self.make_request("POST", "/createRoom", b'{"visibili')
477 self.render(request)
478 self.assertEquals(400, channel.code)
479
480 request, channel = self.make_request("POST", "/createRoom", b'["hello"]')
481 self.render(request)
482 self.assertEquals(400, channel.code)
519483
520484
521485 class RoomTopicTestCase(RoomBase):
522486 """ Tests /rooms/$room_id/topic REST events. """
523487
524 user_id = b"@sid1:red"
525
526 def setUp(self):
527
528 super(RoomTopicTestCase, self).setUp()
529
488 user_id = "@sid1:red"
489
490 def prepare(self, reactor, clock, hs):
530491 # create the room
531492 self.room_id = self.helper.create_room_as(self.user_id)
532 self.path = b"/rooms/%s/state/m.room.topic" % (self.room_id,)
493 self.path = "/rooms/%s/state/m.room.topic" % (self.room_id,)
533494
534495 def test_invalid_puts(self):
535496 # missing keys or invalid json
536 request, channel = make_request(b"PUT", self.path, '{}')
537 render(request, self.resource, self.clock)
538 self.assertEquals(400, int(channel.result["code"]), msg=channel.result["body"])
539
540 request, channel = make_request(b"PUT", self.path, '{"_name":"bob"}')
541 render(request, self.resource, self.clock)
542 self.assertEquals(400, int(channel.result["code"]), msg=channel.result["body"])
543
544 request, channel = make_request(b"PUT", self.path, '{"nao')
545 render(request, self.resource, self.clock)
546 self.assertEquals(400, int(channel.result["code"]), msg=channel.result["body"])
547
548 request, channel = make_request(
549 b"PUT", self.path, '[{"_name":"bob"},{"_name":"jill"}]'
550 )
551 render(request, self.resource, self.clock)
552 self.assertEquals(400, int(channel.result["code"]), msg=channel.result["body"])
553
554 request, channel = make_request(b"PUT", self.path, 'text only')
555 render(request, self.resource, self.clock)
556 self.assertEquals(400, int(channel.result["code"]), msg=channel.result["body"])
557
558 request, channel = make_request(b"PUT", self.path, '')
559 render(request, self.resource, self.clock)
560 self.assertEquals(400, int(channel.result["code"]), msg=channel.result["body"])
497 request, channel = self.make_request("PUT", self.path, '{}')
498 self.render(request)
499 self.assertEquals(400, channel.code, msg=channel.result["body"])
500
501 request, channel = self.make_request("PUT", self.path, '{"_name":"bo"}')
502 self.render(request)
503 self.assertEquals(400, channel.code, msg=channel.result["body"])
504
505 request, channel = self.make_request("PUT", self.path, '{"nao')
506 self.render(request)
507 self.assertEquals(400, channel.code, msg=channel.result["body"])
508
509 request, channel = self.make_request(
510 "PUT", self.path, '[{"_name":"bo"},{"_name":"jill"}]'
511 )
512 self.render(request)
513 self.assertEquals(400, channel.code, msg=channel.result["body"])
514
515 request, channel = self.make_request("PUT", self.path, 'text only')
516 self.render(request)
517 self.assertEquals(400, channel.code, msg=channel.result["body"])
518
519 request, channel = self.make_request("PUT", self.path, '')
520 self.render(request)
521 self.assertEquals(400, channel.code, msg=channel.result["body"])
561522
562523 # valid key, wrong type
563524 content = '{"topic":["Topic name"]}'
564 request, channel = make_request(b"PUT", self.path, content)
565 render(request, self.resource, self.clock)
566 self.assertEquals(400, int(channel.result["code"]), msg=channel.result["body"])
525 request, channel = self.make_request("PUT", self.path, content)
526 self.render(request)
527 self.assertEquals(400, channel.code, msg=channel.result["body"])
567528
568529 def test_rooms_topic(self):
569530 # nothing should be there
570 request, channel = make_request(b"GET", self.path)
571 render(request, self.resource, self.clock)
572 self.assertEquals(404, int(channel.result["code"]), msg=channel.result["body"])
531 request, channel = self.make_request("GET", self.path)
532 self.render(request)
533 self.assertEquals(404, channel.code, msg=channel.result["body"])
573534
574535 # valid put
575536 content = '{"topic":"Topic name"}'
576 request, channel = make_request(b"PUT", self.path, content)
577 render(request, self.resource, self.clock)
578 self.assertEquals(200, int(channel.result["code"]), msg=channel.result["body"])
537 request, channel = self.make_request("PUT", self.path, content)
538 self.render(request)
539 self.assertEquals(200, channel.code, msg=channel.result["body"])
579540
580541 # valid get
581 request, channel = make_request(b"GET", self.path)
582 render(request, self.resource, self.clock)
583 self.assertEquals(200, int(channel.result["code"]), msg=channel.result["body"])
542 request, channel = self.make_request("GET", self.path)
543 self.render(request)
544 self.assertEquals(200, channel.code, msg=channel.result["body"])
584545 self.assert_dict(json.loads(content), channel.json_body)
585546
586547 def test_rooms_topic_with_extra_keys(self):
587548 # valid put with extra keys
588549 content = '{"topic":"Seasons","subtopic":"Summer"}'
589 request, channel = make_request(b"PUT", self.path, content)
590 render(request, self.resource, self.clock)
591 self.assertEquals(200, int(channel.result["code"]), msg=channel.result["body"])
550 request, channel = self.make_request("PUT", self.path, content)
551 self.render(request)
552 self.assertEquals(200, channel.code, msg=channel.result["body"])
592553
593554 # valid get
594 request, channel = make_request(b"GET", self.path)
595 render(request, self.resource, self.clock)
596 self.assertEquals(200, int(channel.result["code"]), msg=channel.result["body"])
555 request, channel = self.make_request("GET", self.path)
556 self.render(request)
557 self.assertEquals(200, channel.code, msg=channel.result["body"])
597558 self.assert_dict(json.loads(content), channel.json_body)
598559
599560
600561 class RoomMemberStateTestCase(RoomBase):
601562 """ Tests /rooms/$room_id/members/$user_id/state REST events. """
602563
603 user_id = b"@sid1:red"
604
605 def setUp(self):
606
607 super(RoomMemberStateTestCase, self).setUp()
564 user_id = "@sid1:red"
565
566 def prepare(self, reactor, clock, hs):
608567 self.room_id = self.helper.create_room_as(self.user_id)
609
610 def tearDown(self):
611 pass
612568
613569 def test_invalid_puts(self):
614570 path = "/rooms/%s/state/m.room.member/%s" % (self.room_id, self.user_id)
615571 # missing keys or invalid json
616 request, channel = make_request(b"PUT", path, '{}')
617 render(request, self.resource, self.clock)
618 self.assertEquals(400, int(channel.result["code"]), msg=channel.result["body"])
619
620 request, channel = make_request(b"PUT", path, '{"_name":"bob"}')
621 render(request, self.resource, self.clock)
622 self.assertEquals(400, int(channel.result["code"]), msg=channel.result["body"])
623
624 request, channel = make_request(b"PUT", path, '{"nao')
625 render(request, self.resource, self.clock)
626 self.assertEquals(400, int(channel.result["code"]), msg=channel.result["body"])
627
628 request, channel = make_request(
629 b"PUT", path, b'[{"_name":"bob"},{"_name":"jill"}]'
630 )
631 render(request, self.resource, self.clock)
632 self.assertEquals(400, int(channel.result["code"]), msg=channel.result["body"])
633
634 request, channel = make_request(b"PUT", path, 'text only')
635 render(request, self.resource, self.clock)
636 self.assertEquals(400, int(channel.result["code"]), msg=channel.result["body"])
637
638 request, channel = make_request(b"PUT", path, '')
639 render(request, self.resource, self.clock)
640 self.assertEquals(400, int(channel.result["code"]), msg=channel.result["body"])
572 request, channel = self.make_request("PUT", path, '{}')
573 self.render(request)
574 self.assertEquals(400, channel.code, msg=channel.result["body"])
575
576 request, channel = self.make_request("PUT", path, '{"_name":"bo"}')
577 self.render(request)
578 self.assertEquals(400, channel.code, msg=channel.result["body"])
579
580 request, channel = self.make_request("PUT", path, '{"nao')
581 self.render(request)
582 self.assertEquals(400, channel.code, msg=channel.result["body"])
583
584 request, channel = self.make_request(
585 "PUT", path, b'[{"_name":"bo"},{"_name":"jill"}]'
586 )
587 self.render(request)
588 self.assertEquals(400, channel.code, msg=channel.result["body"])
589
590 request, channel = self.make_request("PUT", path, 'text only')
591 self.render(request)
592 self.assertEquals(400, channel.code, msg=channel.result["body"])
593
594 request, channel = self.make_request("PUT", path, '')
595 self.render(request)
596 self.assertEquals(400, channel.code, msg=channel.result["body"])
641597
642598 # valid keys, wrong types
643599 content = '{"membership":["%s","%s","%s"]}' % (
645601 Membership.JOIN,
646602 Membership.LEAVE,
647603 )
648 request, channel = make_request(b"PUT", path, content.encode('ascii'))
649 render(request, self.resource, self.clock)
650 self.assertEquals(400, int(channel.result["code"]), msg=channel.result["body"])
604 request, channel = self.make_request("PUT", path, content.encode('ascii'))
605 self.render(request)
606 self.assertEquals(400, channel.code, msg=channel.result["body"])
651607
652608 def test_rooms_members_self(self):
653609 path = "/rooms/%s/state/m.room.member/%s" % (
657613
658614 # valid join message (NOOP since we made the room)
659615 content = '{"membership":"%s"}' % Membership.JOIN
660 request, channel = make_request(b"PUT", path, content.encode('ascii'))
661 render(request, self.resource, self.clock)
662 self.assertEquals(200, int(channel.result["code"]), msg=channel.result["body"])
663
664 request, channel = make_request(b"GET", path, None)
665 render(request, self.resource, self.clock)
666 self.assertEquals(200, int(channel.result["code"]), msg=channel.result["body"])
616 request, channel = self.make_request("PUT", path, content.encode('ascii'))
617 self.render(request)
618 self.assertEquals(200, channel.code, msg=channel.result["body"])
619
620 request, channel = self.make_request("GET", path, None)
621 self.render(request)
622 self.assertEquals(200, channel.code, msg=channel.result["body"])
667623
668624 expected_response = {"membership": Membership.JOIN}
669625 self.assertEquals(expected_response, channel.json_body)
677633
678634 # valid invite message
679635 content = '{"membership":"%s"}' % Membership.INVITE
680 request, channel = make_request(b"PUT", path, content)
681 render(request, self.resource, self.clock)
682 self.assertEquals(200, int(channel.result["code"]), msg=channel.result["body"])
683
684 request, channel = make_request(b"GET", path, None)
685 render(request, self.resource, self.clock)
686 self.assertEquals(200, int(channel.result["code"]), msg=channel.result["body"])
636 request, channel = self.make_request("PUT", path, content)
637 self.render(request)
638 self.assertEquals(200, channel.code, msg=channel.result["body"])
639
640 request, channel = self.make_request("GET", path, None)
641 self.render(request)
642 self.assertEquals(200, channel.code, msg=channel.result["body"])
687643 self.assertEquals(json.loads(content), channel.json_body)
688644
689645 def test_rooms_members_other_custom_keys(self):
698654 Membership.INVITE,
699655 "Join us!",
700656 )
701 request, channel = make_request(b"PUT", path, content)
702 render(request, self.resource, self.clock)
703 self.assertEquals(200, int(channel.result["code"]), msg=channel.result["body"])
704
705 request, channel = make_request(b"GET", path, None)
706 render(request, self.resource, self.clock)
707 self.assertEquals(200, int(channel.result["code"]), msg=channel.result["body"])
657 request, channel = self.make_request("PUT", path, content)
658 self.render(request)
659 self.assertEquals(200, channel.code, msg=channel.result["body"])
660
661 request, channel = self.make_request("GET", path, None)
662 self.render(request)
663 self.assertEquals(200, channel.code, msg=channel.result["body"])
708664 self.assertEquals(json.loads(content), channel.json_body)
709665
710666
713669
714670 user_id = "@sid1:red"
715671
716 def setUp(self):
717 super(RoomMessagesTestCase, self).setUp()
718
672 def prepare(self, reactor, clock, hs):
719673 self.room_id = self.helper.create_room_as(self.user_id)
720674
721675 def test_invalid_puts(self):
722676 path = "/rooms/%s/send/m.room.message/mid1" % (urlparse.quote(self.room_id))
723677 # missing keys or invalid json
724 request, channel = make_request(b"PUT", path, '{}')
725 render(request, self.resource, self.clock)
726 self.assertEquals(400, int(channel.result["code"]), msg=channel.result["body"])
727
728 request, channel = make_request(b"PUT", path, '{"_name":"bob"}')
729 render(request, self.resource, self.clock)
730 self.assertEquals(400, int(channel.result["code"]), msg=channel.result["body"])
731
732 request, channel = make_request(b"PUT", path, '{"nao')
733 render(request, self.resource, self.clock)
734 self.assertEquals(400, int(channel.result["code"]), msg=channel.result["body"])
735
736 request, channel = make_request(
737 b"PUT", path, '[{"_name":"bob"},{"_name":"jill"}]'
738 )
739 render(request, self.resource, self.clock)
740 self.assertEquals(400, int(channel.result["code"]), msg=channel.result["body"])
741
742 request, channel = make_request(b"PUT", path, 'text only')
743 render(request, self.resource, self.clock)
744 self.assertEquals(400, int(channel.result["code"]), msg=channel.result["body"])
745
746 request, channel = make_request(b"PUT", path, '')
747 render(request, self.resource, self.clock)
748 self.assertEquals(400, int(channel.result["code"]), msg=channel.result["body"])
678 request, channel = self.make_request("PUT", path, b'{}')
679 self.render(request)
680 self.assertEquals(400, channel.code, msg=channel.result["body"])
681
682 request, channel = self.make_request("PUT", path, b'{"_name":"bo"}')
683 self.render(request)
684 self.assertEquals(400, channel.code, msg=channel.result["body"])
685
686 request, channel = self.make_request("PUT", path, b'{"nao')
687 self.render(request)
688 self.assertEquals(400, channel.code, msg=channel.result["body"])
689
690 request, channel = self.make_request(
691 "PUT", path, b'[{"_name":"bo"},{"_name":"jill"}]'
692 )
693 self.render(request)
694 self.assertEquals(400, channel.code, msg=channel.result["body"])
695
696 request, channel = self.make_request("PUT", path, b'text only')
697 self.render(request)
698 self.assertEquals(400, channel.code, msg=channel.result["body"])
699
700 request, channel = self.make_request("PUT", path, b'')
701 self.render(request)
702 self.assertEquals(400, channel.code, msg=channel.result["body"])
749703
750704 def test_rooms_messages_sent(self):
751705 path = "/rooms/%s/send/m.room.message/mid1" % (urlparse.quote(self.room_id))
752706
753 content = '{"body":"test","msgtype":{"type":"a"}}'
754 request, channel = make_request(b"PUT", path, content)
755 render(request, self.resource, self.clock)
756 self.assertEquals(400, int(channel.result["code"]), msg=channel.result["body"])
707 content = b'{"body":"test","msgtype":{"type":"a"}}'
708 request, channel = self.make_request("PUT", path, content)
709 self.render(request)
710 self.assertEquals(400, channel.code, msg=channel.result["body"])
757711
758712 # custom message types
759 content = '{"body":"test","msgtype":"test.custom.text"}'
760 request, channel = make_request(b"PUT", path, content)
761 render(request, self.resource, self.clock)
762 self.assertEquals(200, int(channel.result["code"]), msg=channel.result["body"])
713 content = b'{"body":"test","msgtype":"test.custom.text"}'
714 request, channel = self.make_request("PUT", path, content)
715 self.render(request)
716 self.assertEquals(200, channel.code, msg=channel.result["body"])
763717
764718 # m.text message type
765719 path = "/rooms/%s/send/m.room.message/mid2" % (urlparse.quote(self.room_id))
766 content = '{"body":"test2","msgtype":"m.text"}'
767 request, channel = make_request(b"PUT", path, content)
768 render(request, self.resource, self.clock)
769 self.assertEquals(200, int(channel.result["code"]), msg=channel.result["body"])
720 content = b'{"body":"test2","msgtype":"m.text"}'
721 request, channel = self.make_request("PUT", path, content)
722 self.render(request)
723 self.assertEquals(200, channel.code, msg=channel.result["body"])
770724
771725
772726 class RoomInitialSyncTestCase(RoomBase):
774728
775729 user_id = "@sid1:red"
776730
777 def setUp(self):
778 super(RoomInitialSyncTestCase, self).setUp()
779
731 def prepare(self, reactor, clock, hs):
780732 # create the room
781733 self.room_id = self.helper.create_room_as(self.user_id)
782734
783735 def test_initial_sync(self):
784 request, channel = make_request(b"GET", "/rooms/%s/initialSync" % self.room_id)
785 render(request, self.resource, self.clock)
786 self.assertEquals(200, int(channel.result["code"]))
736 request, channel = self.make_request(
737 "GET", "/rooms/%s/initialSync" % self.room_id
738 )
739 self.render(request)
740 self.assertEquals(200, channel.code)
787741
788742 self.assertEquals(self.room_id, channel.json_body["room_id"])
789743 self.assertEquals("join", channel.json_body["membership"])
818772
819773 user_id = "@sid1:red"
820774
821 def setUp(self):
822 super(RoomMessageListTestCase, self).setUp()
775 def prepare(self, reactor, clock, hs):
823776 self.room_id = self.helper.create_room_as(self.user_id)
824777
825778 def test_topo_token_is_accepted(self):
826779 token = "t1-0_0_0_0_0_0_0_0_0"
827 request, channel = make_request(
828 b"GET", "/rooms/%s/messages?access_token=x&from=%s" % (self.room_id, token)
829 )
830 render(request, self.resource, self.clock)
831 self.assertEquals(200, int(channel.result["code"]))
780 request, channel = self.make_request(
781 "GET", "/rooms/%s/messages?access_token=x&from=%s" % (self.room_id, token)
782 )
783 self.render(request)
784 self.assertEquals(200, channel.code)
832785 self.assertTrue("start" in channel.json_body)
833786 self.assertEquals(token, channel.json_body['start'])
834787 self.assertTrue("chunk" in channel.json_body)
836789
837790 def test_stream_token_is_accepted_for_fwd_pagianation(self):
838791 token = "s0_0_0_0_0_0_0_0_0"
839 request, channel = make_request(
840 b"GET", "/rooms/%s/messages?access_token=x&from=%s" % (self.room_id, token)
841 )
842 render(request, self.resource, self.clock)
843 self.assertEquals(200, int(channel.result["code"]))
792 request, channel = self.make_request(
793 "GET", "/rooms/%s/messages?access_token=x&from=%s" % (self.room_id, token)
794 )
795 self.render(request)
796 self.assertEquals(200, channel.code)
844797 self.assertTrue("start" in channel.json_body)
845798 self.assertEquals(token, channel.json_body['start'])
846799 self.assertTrue("chunk" in channel.json_body)
6161 self.assertEqual(channel.code, 200)
6262 self.assertTrue(
6363 set(
64 [
65 "next_batch",
66 "rooms",
67 "account_data",
68 "to_device",
69 "device_lists",
70 ]
64 ["next_batch", "rooms", "account_data", "to_device", "device_lists"]
7165 ).issubset(set(channel.json_body.keys()))
7266 )
33 from six import text_type
44
55 import attr
6
7 from twisted.internet import address, threads
6 from zope.interface import implementer
7
8 from twisted.internet import address, threads, udp
9 from twisted.internet._resolver import HostResolution
10 from twisted.internet.address import IPv4Address
811 from twisted.internet.defer import Deferred
12 from twisted.internet.error import DNSLookupError
13 from twisted.internet.interfaces import IReactorPluggableNameResolver
914 from twisted.python.failure import Failure
1015 from twisted.test.proto_helpers import MemoryReactorClock
1116
6469 def getPeer(self):
6570 # We give an address so that getClientIP returns a non null entry,
6671 # causing us to record the MAU
67 return address.IPv4Address(b"TCP", "127.0.0.1", 3423)
72 return address.IPv4Address("TCP", "127.0.0.1", 3423)
6873
6974 def getHost(self):
7075 return None
153158 wait_until_result(clock, request)
154159
155160
161 @implementer(IReactorPluggableNameResolver)
156162 class ThreadedMemoryReactorClock(MemoryReactorClock):
157163 """
158164 A MemoryReactorClock that supports callFromThread.
159165 """
166
167 def __init__(self):
168 self._udp = []
169 self.lookups = {}
170
171 class Resolver(object):
172 def resolveHostName(
173 _self,
174 resolutionReceiver,
175 hostName,
176 portNumber=0,
177 addressTypes=None,
178 transportSemantics='TCP',
179 ):
180
181 resolution = HostResolution(hostName)
182 resolutionReceiver.resolutionBegan(resolution)
183 if hostName not in self.lookups:
184 raise DNSLookupError("OH NO")
185
186 resolutionReceiver.addressResolved(
187 IPv4Address('TCP', self.lookups[hostName], portNumber)
188 )
189 resolutionReceiver.resolutionComplete()
190 return resolution
191
192 self.nameResolver = Resolver()
193 super(ThreadedMemoryReactorClock, self).__init__()
194
195 def listenUDP(self, port, protocol, interface='', maxPacketSize=8196):
196 p = udp.Port(port, protocol, interface, maxPacketSize, self)
197 p.startListening()
198 self._udp.append(p)
199 return p
160200
161201 def callFromThread(self, callback, *args, **kwargs):
162202 """
7979
8080 self._rlsn._auth.check_auth_blocking = Mock()
8181 mock_event = Mock(
82 type=EventTypes.Message,
83 content={"msgtype": ServerNoticeMsgType},
84 )
85 self._rlsn._store.get_events = Mock(return_value=defer.succeed(
86 {"123": mock_event}
87 ))
82 type=EventTypes.Message, content={"msgtype": ServerNoticeMsgType}
83 )
84 self._rlsn._store.get_events = Mock(
85 return_value=defer.succeed({"123": mock_event})
86 )
8887
8988 yield self._rlsn.maybe_send_server_notice_to_user(self.user_id)
9089 # Would be better to check the content, but once == remove blocking event
9897 )
9998
10099 mock_event = Mock(
101 type=EventTypes.Message,
102 content={"msgtype": ServerNoticeMsgType},
103 )
104 self._rlsn._store.get_events = Mock(return_value=defer.succeed(
105 {"123": mock_event}
106 ))
100 type=EventTypes.Message, content={"msgtype": ServerNoticeMsgType}
101 )
102 self._rlsn._store.get_events = Mock(
103 return_value=defer.succeed({"123": mock_event})
104 )
107105 yield self._rlsn.maybe_send_server_notice_to_user(self.user_id)
108106
109107 self._send_notice.assert_not_called()
176174
177175 @defer.inlineCallbacks
178176 def test_server_notice_only_sent_once(self):
179 self.store.get_monthly_active_count = Mock(
180 return_value=1000,
181 )
182
183 self.store.user_last_seen_monthly_active = Mock(
184 return_value=1000,
185 )
177 self.store.get_monthly_active_count = Mock(return_value=1000)
178
179 self.store.user_last_seen_monthly_active = Mock(return_value=1000)
186180
187181 # Call the function multiple times to ensure we only send the notice once
188182 yield self._rlsn.maybe_send_server_notice_to_user(self.user_id)
192186 # Now lets get the last load of messages in the service notice room and
193187 # check that there is only one server notice
194188 room_id = yield self.server_notices_manager.get_notice_room_for_user(
195 self.user_id,
189 self.user_id
196190 )
197191
198192 token = yield self.event_source.get_current_token()
199193 events, _ = yield self.store.get_recent_events_for_room(
200 room_id, limit=100, end_token=token.room_key,
194 room_id, limit=100, end_token=token.room_key
201195 )
202196
203197 count = 0
100100 self.hs.config.limit_usage_by_mau = True
101101 self.hs.config.max_mau_value = 50
102102 user_id = "@user:server"
103 yield self.store.register(user_id=user_id, token="123", password_hash=None)
103104
104105 active = yield self.store.user_last_seen_monthly_active(user_id)
105106 self.assertFalse(active)
107108 yield self.store.insert_client_ip(
108109 user_id, "access_token", "ip", "user_agent", "device_id"
109110 )
110 yield self.store.insert_client_ip(
111 user_id, "access_token", "ip", "user_agent", "device_id"
112 )
113111 active = yield self.store.user_last_seen_monthly_active(user_id)
114112 self.assertTrue(active)
1111 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
1212 # See the License for the specific language governing permissions and
1313 # limitations under the License.
14 from mock import Mock
15
16 from twisted.internet import defer
1417
1518 from tests.unittest import HomeserverTestCase
1619
2225
2326 hs = self.setup_test_homeserver()
2427 self.store = hs.get_datastore()
25
28 hs.config.limit_usage_by_mau = True
29 hs.config.max_mau_value = 50
2630 # Advance the clock a bit
2731 reactor.advance(FORTY_DAYS)
2832
7276 active_count = self.store.get_monthly_active_count()
7377 self.assertEquals(self.get_success(active_count), user_num)
7478
75 # Test that regalar users are removed from the db
79 # Test that regular users are removed from the db
7680 ru_count = 2
7781 self.store.upsert_monthly_active_user("@ru1:server")
7882 self.store.upsert_monthly_active_user("@ru2:server")
138142
139143 count = self.store.get_monthly_active_count()
140144 self.assertEquals(self.get_success(count), 0)
145
146 def test_populate_monthly_users_is_guest(self):
147 # Test that guest users are not added to mau list
148 user_id = "user_id"
149 self.store.register(
150 user_id=user_id, token="123", password_hash=None, make_guest=True
151 )
152 self.store.upsert_monthly_active_user = Mock()
153 self.store.populate_monthly_active_users(user_id)
154 self.pump()
155 self.store.upsert_monthly_active_user.assert_not_called()
156
157 def test_populate_monthly_users_should_update(self):
158 self.store.upsert_monthly_active_user = Mock()
159
160 self.store.is_trial_user = Mock(
161 return_value=defer.succeed(False)
162 )
163
164 self.store.user_last_seen_monthly_active = Mock(
165 return_value=defer.succeed(None)
166 )
167 self.store.populate_monthly_active_users('user_id')
168 self.pump()
169 self.store.upsert_monthly_active_user.assert_called_once()
170
171 def test_populate_monthly_users_should_not_update(self):
172 self.store.upsert_monthly_active_user = Mock()
173
174 self.store.is_trial_user = Mock(
175 return_value=defer.succeed(False)
176 )
177 self.store.user_last_seen_monthly_active = Mock(
178 return_value=defer.succeed(
179 self.hs.get_clock().time_msec()
180 )
181 )
182 self.store.populate_monthly_active_users('user_id')
183 self.pump()
184 self.store.upsert_monthly_active_user.assert_not_called()
185
186 def test_get_reserved_real_user_account(self):
187 # Test no reserved users, or reserved threepids
188 count = self.store.get_registered_reserved_users_count()
189 self.assertEquals(self.get_success(count), 0)
190 # Test reserved users but no registered users
191
192 user1 = '@user1:example.com'
193 user2 = '@user2:example.com'
194 user1_email = 'user1@example.com'
195 user2_email = 'user2@example.com'
196 threepids = [
197 {'medium': 'email', 'address': user1_email},
198 {'medium': 'email', 'address': user2_email},
199 ]
200 self.hs.config.mau_limits_reserved_threepids = threepids
201 self.store.initialise_reserved_users(threepids)
202 self.pump()
203 count = self.store.get_registered_reserved_users_count()
204 self.assertEquals(self.get_success(count), 0)
205
206 # Test reserved registed users
207 self.store.register(user_id=user1, token="123", password_hash=None)
208 self.store.register(user_id=user2, token="456", password_hash=None)
209 self.pump()
210
211 now = int(self.hs.get_clock().time_msec())
212 self.store.user_add_threepid(user1, "email", user1_email, now, now)
213 self.store.user_add_threepid(user2, "email", user2_email, now, now)
214 count = self.store.get_registered_reserved_users_count()
215 self.assertEquals(self.get_success(count), len(threepids))
184184
185185 # test _get_some_state_from_cache correctly filters out members with types=[]
186186 (state_dict, is_all) = yield self.store._get_some_state_from_cache(
187 self.store._state_group_cache,
188 group, [], filtered_types=[EventTypes.Member]
187 self.store._state_group_cache, group, [], filtered_types=[EventTypes.Member]
189188 )
190189
191190 self.assertEqual(is_all, True)
199198
200199 (state_dict, is_all) = yield self.store._get_some_state_from_cache(
201200 self.store._state_group_members_cache,
202 group, [], filtered_types=[EventTypes.Member]
203 )
204
205 self.assertEqual(is_all, True)
206 self.assertDictEqual(
207 {},
208 state_dict,
209 )
201 group,
202 [],
203 filtered_types=[EventTypes.Member],
204 )
205
206 self.assertEqual(is_all, True)
207 self.assertDictEqual({}, state_dict)
210208
211209 # test _get_some_state_from_cache correctly filters in members with wildcard types
212210 (state_dict, is_all) = yield self.store._get_some_state_from_cache(
213211 self.store._state_group_cache,
214 group, [(EventTypes.Member, None)], filtered_types=[EventTypes.Member]
212 group,
213 [(EventTypes.Member, None)],
214 filtered_types=[EventTypes.Member],
215215 )
216216
217217 self.assertEqual(is_all, True)
225225
226226 (state_dict, is_all) = yield self.store._get_some_state_from_cache(
227227 self.store._state_group_members_cache,
228 group, [(EventTypes.Member, None)], filtered_types=[EventTypes.Member]
228 group,
229 [(EventTypes.Member, None)],
230 filtered_types=[EventTypes.Member],
229231 )
230232
231233 self.assertEqual(is_all, True)
263265 )
264266
265267 self.assertEqual(is_all, True)
266 self.assertDictEqual(
267 {
268 (e5.type, e5.state_key): e5.event_id,
269 },
270 state_dict,
271 )
268 self.assertDictEqual({(e5.type, e5.state_key): e5.event_id}, state_dict)
272269
273270 # test _get_some_state_from_cache correctly filters in members with specific types
274271 # and no filtered_types
275272 (state_dict, is_all) = yield self.store._get_some_state_from_cache(
276273 self.store._state_group_members_cache,
277 group, [(EventTypes.Member, e5.state_key)], filtered_types=None
274 group,
275 [(EventTypes.Member, e5.state_key)],
276 filtered_types=None,
278277 )
279278
280279 self.assertEqual(is_all, True)
304303 key=group,
305304 value=state_dict_ids,
306305 # list fetched keys so it knows it's partial
307 fetched_keys=(
308 (e1.type, e1.state_key),
309 ),
306 fetched_keys=((e1.type, e1.state_key),),
310307 )
311308
312309 (is_all, known_absent, state_dict_ids) = self.store._state_group_cache.get(
314311 )
315312
316313 self.assertEqual(is_all, False)
317 self.assertEqual(
318 known_absent,
319 set(
320 [
321 (e1.type, e1.state_key),
322 ]
323 ),
324 )
325 self.assertDictEqual(
326 state_dict_ids,
327 {
328 (e1.type, e1.state_key): e1.event_id,
329 },
330 )
314 self.assertEqual(known_absent, set([(e1.type, e1.state_key)]))
315 self.assertDictEqual(state_dict_ids, {(e1.type, e1.state_key): e1.event_id})
331316
332317 ############################################
333318 # test that things work with a partial cache
335320 # test _get_some_state_from_cache correctly filters out members with types=[]
336321 room_id = self.room.to_string()
337322 (state_dict, is_all) = yield self.store._get_some_state_from_cache(
338 self.store._state_group_cache,
339 group, [], filtered_types=[EventTypes.Member]
323 self.store._state_group_cache, group, [], filtered_types=[EventTypes.Member]
340324 )
341325
342326 self.assertEqual(is_all, False)
345329 room_id = self.room.to_string()
346330 (state_dict, is_all) = yield self.store._get_some_state_from_cache(
347331 self.store._state_group_members_cache,
348 group, [], filtered_types=[EventTypes.Member]
332 group,
333 [],
334 filtered_types=[EventTypes.Member],
349335 )
350336
351337 self.assertEqual(is_all, True)
354340 # test _get_some_state_from_cache correctly filters in members wildcard types
355341 (state_dict, is_all) = yield self.store._get_some_state_from_cache(
356342 self.store._state_group_cache,
357 group, [(EventTypes.Member, None)], filtered_types=[EventTypes.Member]
343 group,
344 [(EventTypes.Member, None)],
345 filtered_types=[EventTypes.Member],
358346 )
359347
360348 self.assertEqual(is_all, False)
361 self.assertDictEqual(
362 {
363 (e1.type, e1.state_key): e1.event_id,
364 },
365 state_dict,
366 )
367
368 (state_dict, is_all) = yield self.store._get_some_state_from_cache(
369 self.store._state_group_members_cache,
370 group, [(EventTypes.Member, None)], filtered_types=[EventTypes.Member]
349 self.assertDictEqual({(e1.type, e1.state_key): e1.event_id}, state_dict)
350
351 (state_dict, is_all) = yield self.store._get_some_state_from_cache(
352 self.store._state_group_members_cache,
353 group,
354 [(EventTypes.Member, None)],
355 filtered_types=[EventTypes.Member],
371356 )
372357
373358 self.assertEqual(is_all, True)
388373 )
389374
390375 self.assertEqual(is_all, False)
391 self.assertDictEqual(
392 {
393 (e1.type, e1.state_key): e1.event_id,
394 },
395 state_dict,
396 )
397
398 (state_dict, is_all) = yield self.store._get_some_state_from_cache(
399 self.store._state_group_members_cache,
400 group,
401 [(EventTypes.Member, e5.state_key)],
402 filtered_types=[EventTypes.Member],
403 )
404
405 self.assertEqual(is_all, True)
406 self.assertDictEqual(
407 {
408 (e5.type, e5.state_key): e5.event_id,
409 },
410 state_dict,
411 )
376 self.assertDictEqual({(e1.type, e1.state_key): e1.event_id}, state_dict)
377
378 (state_dict, is_all) = yield self.store._get_some_state_from_cache(
379 self.store._state_group_members_cache,
380 group,
381 [(EventTypes.Member, e5.state_key)],
382 filtered_types=[EventTypes.Member],
383 )
384
385 self.assertEqual(is_all, True)
386 self.assertDictEqual({(e5.type, e5.state_key): e5.event_id}, state_dict)
412387
413388 # test _get_some_state_from_cache correctly filters in members with specific types
414389 # and no filtered_types
415390 (state_dict, is_all) = yield self.store._get_some_state_from_cache(
416391 self.store._state_group_cache,
417 group, [(EventTypes.Member, e5.state_key)], filtered_types=None
392 group,
393 [(EventTypes.Member, e5.state_key)],
394 filtered_types=None,
418395 )
419396
420397 self.assertEqual(is_all, False)
422399
423400 (state_dict, is_all) = yield self.store._get_some_state_from_cache(
424401 self.store._state_group_members_cache,
425 group, [(EventTypes.Member, e5.state_key)], filtered_types=None
426 )
427
428 self.assertEqual(is_all, True)
429 self.assertDictEqual(
430 {
431 (e5.type, e5.state_key): e5.event_id,
432 },
433 state_dict,
434 )
402 group,
403 [(EventTypes.Member, e5.state_key)],
404 filtered_types=None,
405 )
406
407 self.assertEqual(is_all, True)
408 self.assertDictEqual({(e5.type, e5.state_key): e5.event_id}, state_dict)
184184 self.assertEqual(e.errcode, Codes.RESOURCE_LIMIT_EXCEEDED)
185185
186186 def create_user(self, localpart):
187 request_data = json.dumps({
188 "username": localpart,
189 "password": "monkey",
190 "auth": {"type": LoginType.DUMMY},
191 })
192
193 request, channel = make_request(b"POST", b"/register", request_data)
187 request_data = json.dumps(
188 {
189 "username": localpart,
190 "password": "monkey",
191 "auth": {"type": LoginType.DUMMY},
192 }
193 )
194
195 request, channel = make_request("POST", "/register", request_data)
194196 render(request, self.resource, self.reactor)
195197
196 if channel.result["code"] != b"200":
198 if channel.code != 200:
197199 raise HttpResponseException(
198 int(channel.result["code"]),
199 channel.result["reason"],
200 channel.result["body"],
200 channel.code, channel.result["reason"], channel.result["body"]
201201 ).to_synapse_error()
202202
203203 access_token = channel.json_body["access_token"]
205205 return access_token
206206
207207 def do_sync_for_user(self, token):
208 request, channel = make_request(b"GET", b"/sync", access_token=token)
208 request, channel = make_request(
209 "GET", "/sync", access_token=token.encode('ascii')
210 )
209211 render(request, self.resource, self.reactor)
210212
211 if channel.result["code"] != b"200":
213 if channel.code != 200:
212214 raise HttpResponseException(
213 int(channel.result["code"]),
214 channel.result["reason"],
215 channel.result["body"],
215 channel.code, channel.result["reason"], channel.result["body"]
216216 ).to_synapse_error()
0 # -*- coding: utf-8 -*-
1 # Copyright 2018 New Vector Ltd
2 #
3 # Licensed under the Apache License, Version 2.0 (the "License");
4 # you may not use this file except in compliance with the License.
5 # You may obtain a copy of the License at
6 #
7 # http://www.apache.org/licenses/LICENSE-2.0
8 #
9 # Unless required by applicable law or agreed to in writing, software
10 # distributed under the License is distributed on an "AS IS" BASIS,
11 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 # See the License for the specific language governing permissions and
13 # limitations under the License.
14
15
16 from synapse.metrics import InFlightGauge
17
18 from tests import unittest
19
20
21 class TestMauLimit(unittest.TestCase):
22 def test_basic(self):
23 gauge = InFlightGauge(
24 "test1", "",
25 labels=["test_label"],
26 sub_metrics=["foo", "bar"],
27 )
28
29 def handle1(metrics):
30 metrics.foo += 2
31 metrics.bar = max(metrics.bar, 5)
32
33 def handle2(metrics):
34 metrics.foo += 3
35 metrics.bar = max(metrics.bar, 7)
36
37 gauge.register(("key1",), handle1)
38
39 self.assert_dict({
40 "test1_total": {("key1",): 1},
41 "test1_foo": {("key1",): 2},
42 "test1_bar": {("key1",): 5},
43 }, self.get_metrics_from_gauge(gauge))
44
45 gauge.unregister(("key1",), handle1)
46
47 self.assert_dict({
48 "test1_total": {("key1",): 0},
49 "test1_foo": {("key1",): 0},
50 "test1_bar": {("key1",): 0},
51 }, self.get_metrics_from_gauge(gauge))
52
53 gauge.register(("key1",), handle1)
54 gauge.register(("key2",), handle2)
55
56 self.assert_dict({
57 "test1_total": {("key1",): 1, ("key2",): 1},
58 "test1_foo": {("key1",): 2, ("key2",): 3},
59 "test1_bar": {("key1",): 5, ("key2",): 7},
60 }, self.get_metrics_from_gauge(gauge))
61
62 gauge.unregister(("key2",), handle2)
63 gauge.register(("key1",), handle2)
64
65 self.assert_dict({
66 "test1_total": {("key1",): 2, ("key2",): 0},
67 "test1_foo": {("key1",): 5, ("key2",): 0},
68 "test1_bar": {("key1",): 7, ("key2",): 0},
69 }, self.get_metrics_from_gauge(gauge))
70
71 def get_metrics_from_gauge(self, gauge):
72 results = {}
73
74 for r in gauge.collect():
75 results[r.name] = {
76 tuple(labels[x] for x in gauge.labels): value
77 for _, labels, value in r.samples
78 }
79
80 return results
179179 graph = Graph(
180180 nodes={
181181 "START": DictObj(
182 type=EventTypes.Create, state_key="", content={}, depth=1,
182 type=EventTypes.Create, state_key="", content={}, depth=1
183183 ),
184184 "A": DictObj(type=EventTypes.Message, depth=2),
185185 "B": DictObj(type=EventTypes.Message, depth=3),
9999
100100 @defer.inlineCallbacks
101101 def setup_test_homeserver(
102 cleanup_func, name="test", datastore=None, config=None, reactor=None,
103 homeserverToUse=TestHomeServer, **kargs
102 cleanup_func,
103 name="test",
104 datastore=None,
105 config=None,
106 reactor=None,
107 homeserverToUse=TestHomeServer,
108 **kargs
104109 ):
105110 """
106111 Setup a homeserver suitable for running tests against. Keyword arguments
146151 config.hs_disabled_message = ""
147152 config.hs_disabled_limit_type = ""
148153 config.max_mau_value = 50
154 config.mau_trial_days = 0
149155 config.mau_limits_reserved_threepids = []
150156 config.admin_contact = None
151157 config.rc_messages_per_second = 10000
321327 @patch('twisted.web.http.Request')
322328 @defer.inlineCallbacks
323329 def trigger(
324 self, http_method, path, content, mock_request,
325 federation_auth_origin=None,
330 self, http_method, path, content, mock_request, federation_auth_origin=None
326331 ):
327332 """ Fire an HTTP event.
328333
355360 headers = {}
356361 if federation_auth_origin is not None:
357362 headers[b"Authorization"] = [
358 b"X-Matrix origin=%s,key=,sig=" % (federation_auth_origin, )
363 b"X-Matrix origin=%s,key=,sig=" % (federation_auth_origin,)
359364 ]
360365 mock_request.requestHeaders.getRawHeaders = mock_getRawHeaders(headers)
361366
575580 event_builder_factory = hs.get_event_builder_factory()
576581 event_creation_handler = hs.get_event_creation_handler()
577582
578 builder = event_builder_factory.new({
579 "type": EventTypes.Create,
580 "state_key": "",
581 "sender": creator_id,
582 "room_id": room_id,
583 "content": {},
584 })
585
586 event, context = yield event_creation_handler.create_new_client_event(
587 builder
583 builder = event_builder_factory.new(
584 {
585 "type": EventTypes.Create,
586 "state_key": "",
587 "sender": creator_id,
588 "room_id": room_id,
589 "content": {},
590 }
588591 )
589592
593 event, context = yield event_creation_handler.create_new_client_event(builder)
594
590595 yield store.persist_event(event, context)
6363 {[base]setenv}
6464 SYNAPSE_POSTGRES = 1
6565
66 [testenv:py35]
67 usedevelop=true
68
6669 [testenv:py36]
6770 usedevelop=true
68 commands =
69 /usr/bin/find "{toxinidir}" -name '*.pyc' -delete
70 coverage run {env:COVERAGE_OPTS:} --source="{toxinidir}/synapse" \
71 "{envbindir}/trial" {env:TRIAL_FLAGS:} {posargs:tests/config \
72 tests/api/test_filtering.py \
73 tests/api/test_ratelimiting.py \
74 tests/appservice \
75 tests/crypto \
76 tests/events \
77 tests/handlers/test_appservice.py \
78 tests/handlers/test_auth.py \
79 tests/handlers/test_device.py \
80 tests/handlers/test_directory.py \
81 tests/handlers/test_e2e_keys.py \
82 tests/handlers/test_presence.py \
83 tests/handlers/test_profile.py \
84 tests/handlers/test_register.py \
85 tests/replication/slave/storage/test_account_data.py \
86 tests/replication/slave/storage/test_receipts.py \
87 tests/storage/test_appservice.py \
88 tests/storage/test_background_update.py \
89 tests/storage/test_base.py \
90 tests/storage/test__base.py \
91 tests/storage/test_client_ips.py \
92 tests/storage/test_devices.py \
93 tests/storage/test_end_to_end_keys.py \
94 tests/storage/test_event_push_actions.py \
95 tests/storage/test_keys.py \
96 tests/storage/test_presence.py \
97 tests/storage/test_profile.py \
98 tests/storage/test_registration.py \
99 tests/storage/test_room.py \
100 tests/storage/test_user_directory.py \
101 tests/test_distributor.py \
102 tests/test_dns.py \
103 tests/test_preview.py \
104 tests/test_test_utils.py \
105 tests/test_types.py \
106 tests/util} \
107 {env:TOXSUFFIX:}
108 {env:DUMP_COVERAGE_COMMAND:coverage report -m}
10971
11072 [testenv:packaging]
11173 deps =