Codebase list matrix-synapse / bef77b1
New upstream version 1.12.4 Andrej Shadura 3 years ago
13 changed file(s) with 362 addition(s) and 61 deletion(s). Raw diff Collapse all Expand all
0 Synapse 1.12.4 (2020-04-23)
1 ===========================
2
3 No significant changes.
4
5
6 Synapse 1.12.4rc1 (2020-04-22)
7 ==============================
8
9 Features
10 --------
11
12 - Always send users their own device updates. ([\#7160](https://github.com/matrix-org/synapse/issues/7160))
13 - Add support for handling GET requests for `account_data` on a worker. ([\#7311](https://github.com/matrix-org/synapse/issues/7311))
14
15
16 Bugfixes
17 --------
18
19 - Fix a bug that prevented cross-signing with users on worker-mode synapses. ([\#7255](https://github.com/matrix-org/synapse/issues/7255))
20 - Do not treat display names as globs in push rules. ([\#7271](https://github.com/matrix-org/synapse/issues/7271))
21 - Fix a bug with cross-signing devices belonging to remote users who did not share a room with any user on the local homeserver. ([\#7289](https://github.com/matrix-org/synapse/issues/7289))
22
23
024 Synapse 1.12.3 (2020-04-03)
125 ===========================
226
0 matrix-synapse-py3 (1.12.4) stable; urgency=medium
1
2 * New synapse release 1.12.4.
3
4 -- Synapse Packaging team <packages@matrix.org> Thu, 23 Apr 2020 10:58:14 -0400
5
06 matrix-synapse-py3 (1.12.3) stable; urgency=medium
17
28 [ Richard van der Hoff ]
267267
268268 ^/_matrix/client/(api/v1|r0|unstable)/pushrules/.*$
269269 ^/_matrix/client/(api/v1|r0|unstable)/groups/.*$
270 ^/_matrix/client/(api/v1|r0|unstable)/user/[^/]*/account_data/
271 ^/_matrix/client/(api/v1|r0|unstable)/user/[^/]*/rooms/[^/]*/account_data/
270272
271273 Additionally, the following REST endpoints can be handled, but all requests must
272274 be routed to the same instance:
3535 except ImportError:
3636 pass
3737
38 __version__ = "1.12.3"
38 __version__ = "1.12.4"
3939
4040 if bool(os.environ.get("SYNAPSE_TEST_PATCH_LOG_CONTEXTS", False)):
4141 # We import here so that we don't have to install a bunch of deps when
9797 from synapse.rest.client.v2_alpha import groups, sync, user_directory
9898 from synapse.rest.client.v2_alpha._base import client_patterns
9999 from synapse.rest.client.v2_alpha.account import ThreepidRestServlet
100 from synapse.rest.client.v2_alpha.account_data import (
101 AccountDataServlet,
102 RoomAccountDataServlet,
103 )
100104 from synapse.rest.client.v2_alpha.keys import KeyChangesServlet, KeyQueryServlet
101105 from synapse.rest.client.v2_alpha.register import RegisterRestServlet
102106 from synapse.rest.client.versions import VersionsRestServlet
474478 ProfileDisplaynameRestServlet(self).register(resource)
475479 ProfileRestServlet(self).register(resource)
476480 KeyUploadServlet(self).register(resource)
481 AccountDataServlet(self).register(resource)
482 RoomAccountDataServlet(self).register(resource)
477483
478484 sync.register_servlets(self, resource)
479485 events.register_servlets(self, resource)
398398 {
399399 "device_keys": {
400400 "<user_id>": ["<device_id>"]
401 } }
401 }
402 }
402403
403404 Response:
404405 {
405406 "device_keys": {
406407 "<user_id>": {
407408 "<device_id>": {...}
408 } } }
409 }
410 },
411 "master_key": {
412 "<user_id>": {...}
413 }
414 },
415 "self_signing_key": {
416 "<user_id>": {...}
417 }
418 }
409419
410420 Args:
411421 destination(str): The server to query.
412422 query_content(dict): The user ids to query.
413423 Returns:
414 A dict containg the device keys.
424 A dict containing device and cross-signing keys.
415425 """
416426 path = _create_v1_path("/user/keys/query")
417427
428438 Response:
429439 {
430440 "stream_id": "...",
431 "devices": [ { ... } ]
441 "devices": [ { ... } ],
442 "master_key": {
443 "user_id": "<user_id>",
444 "usage": [...],
445 "keys": {...},
446 "signatures": {
447 "<user_id>": {...}
448 }
449 },
450 "self_signing_key": {
451 "user_id": "<user_id>",
452 "usage": [...],
453 "keys": {...},
454 "signatures": {
455 "<user_id>": {...}
456 }
457 }
432458 }
433459
434460 Args:
435461 destination(str): The server to query.
436462 query_content(dict): The user ids to query.
437463 Returns:
438 A dict containg the device keys.
464 A dict containing device and cross-signing keys.
439465 """
440466 path = _create_v1_path("/user/devices/%s", user_id)
441467
453479 {
454480 "one_time_keys": {
455481 "<user_id>": {
456 "<device_id>": "<algorithm>"
457 } } }
482 "<device_id>": "<algorithm>"
483 }
484 }
485 }
458486
459487 Response:
460488 {
462490 "<user_id>": {
463491 "<device_id>": {
464492 "<algorithm>:<key_id>": "<key_base64>"
465 } } } }
493 }
494 }
495 }
496 }
466497
467498 Args:
468499 destination(str): The server to query.
469500 query_content(dict): The user ids to query.
470501 Returns:
471 A dict containg the one-time keys.
502 A dict containing the one-time keys.
472503 """
473504
474505 path = _create_v1_path("/user/keys/claim")
124124 users_who_share_room = yield self.store.get_users_who_share_room_with_user(
125125 user_id
126126 )
127
128 tracked_users = set(users_who_share_room)
129
130 # Always tell the user about their own devices
131 tracked_users.add(user_id)
132
127133 changed = yield self.store.get_users_whose_devices_changed(
128 from_token.device_list_key, users_who_share_room
134 from_token.device_list_key, tracked_users
129135 )
130136
131137 # Then work out if any users have since joined
455461
456462 room_ids = yield self.store.get_rooms_for_user(user_id)
457463
458 yield self.notifier.on_new_event("device_list_key", position, rooms=room_ids)
464 # specify the user ID too since the user should always get their own device list
465 # updates, even if they aren't in any rooms.
466 yield self.notifier.on_new_event(
467 "device_list_key", position, users=[user_id], rooms=room_ids
468 )
459469
460470 if hosts:
461471 logger.info(
5353
5454 self._edu_updater = SigningKeyEduUpdater(hs, self)
5555
56 federation_registry = hs.get_federation_registry()
57
5658 self._is_master = hs.config.worker_app is None
5759 if not self._is_master:
5860 self._user_device_resync_client = ReplicationUserDevicesResyncRestServlet.make_client(
5961 hs
6062 )
61
62 federation_registry = hs.get_federation_registry()
63
64 # FIXME: switch to m.signing_key_update when MSC1756 is merged into the spec
65 federation_registry.register_edu_handler(
66 "org.matrix.signing_key_update",
67 self._edu_updater.incoming_signing_key_update,
68 )
63 else:
64 # Only register this edu handler on master as it requires writing
65 # device updates to the db
66 #
67 # FIXME: switch to m.signing_key_update when MSC1756 is merged into the spec
68 federation_registry.register_edu_handler(
69 "org.matrix.signing_key_update",
70 self._edu_updater.incoming_signing_key_update,
71 )
72
6973 # doesn't really work as part of the generic query API, because the
7074 # query request requires an object POST, but we abuse the
7175 # "query handler" interface.
169173 """This is called when we are querying the device list of a user on
170174 a remote homeserver and their device list is not in the device list
171175 cache. If we share a room with this user and we're not querying for
172 specific user we will update the cache
173 with their device list."""
176 specific user we will update the cache with their device list.
177 """
174178
175179 destination_query = remote_queries_not_in_cache[destination]
176180
956960 return signature_list, failures
957961
958962 @defer.inlineCallbacks
959 def _get_e2e_cross_signing_verify_key(self, user_id, key_type, from_user_id=None):
960 """Fetch the cross-signing public key from storage and interpret it.
963 def _get_e2e_cross_signing_verify_key(
964 self, user_id: str, key_type: str, from_user_id: str = None
965 ):
966 """Fetch locally or remotely query for a cross-signing public key.
967
968 First, attempt to fetch the cross-signing public key from storage.
969 If that fails, query the keys from the homeserver they belong to
970 and update our local copy.
961971
962972 Args:
963 user_id (str): the user whose key should be fetched
964 key_type (str): the type of key to fetch
965 from_user_id (str): the user that we are fetching the keys for.
973 user_id: the user whose key should be fetched
974 key_type: the type of key to fetch
975 from_user_id: the user that we are fetching the keys for.
966976 This affects what signatures are fetched.
967977
968978 Returns:
971981
972982 Raises:
973983 NotFoundError: if the key is not found
984 SynapseError: if `user_id` is invalid
974985 """
986 user = UserID.from_string(user_id)
975987 key = yield self.store.get_e2e_cross_signing_key(
976988 user_id, key_type, from_user_id
977989 )
990
991 if key:
992 # We found a copy of this key in our database. Decode and return it
993 key_id, verify_key = get_verify_key_from_cross_signing_key(key)
994 return key, key_id, verify_key
995
996 # If we couldn't find the key locally, and we're looking for keys of
997 # another user then attempt to fetch the missing key from the remote
998 # user's server.
999 #
1000 # We may run into this in possible edge cases where a user tries to
1001 # cross-sign a remote user, but does not share any rooms with them yet.
1002 # Thus, we would not have their key list yet. We instead fetch the key,
1003 # store it and notify clients of new, associated device IDs.
1004 if self.is_mine(user) or key_type not in ["master", "self_signing"]:
1005 # Note that master and self_signing keys are the only cross-signing keys we
1006 # can request over federation
1007 raise NotFoundError("No %s key found for %s" % (key_type, user_id))
1008
1009 (
1010 key,
1011 key_id,
1012 verify_key,
1013 ) = yield self._retrieve_cross_signing_keys_for_remote_user(user, key_type)
1014
9781015 if key is None:
979 logger.debug("no %s key found for %s", key_type, user_id)
9801016 raise NotFoundError("No %s key found for %s" % (key_type, user_id))
981 key_id, verify_key = get_verify_key_from_cross_signing_key(key)
1017
9821018 return key, key_id, verify_key
1019
1020 @defer.inlineCallbacks
1021 def _retrieve_cross_signing_keys_for_remote_user(
1022 self, user: UserID, desired_key_type: str,
1023 ):
1024 """Queries cross-signing keys for a remote user and saves them to the database
1025
1026 Only the key specified by `key_type` will be returned, while all retrieved keys
1027 will be saved regardless
1028
1029 Args:
1030 user: The user to query remote keys for
1031 desired_key_type: The type of key to receive. One of "master", "self_signing"
1032
1033 Returns:
1034 Deferred[Tuple[Optional[Dict], Optional[str], Optional[VerifyKey]]]: A tuple
1035 of the retrieved key content, the key's ID and the matching VerifyKey.
1036 If the key cannot be retrieved, all values in the tuple will instead be None.
1037 """
1038 try:
1039 remote_result = yield self.federation.query_user_devices(
1040 user.domain, user.to_string()
1041 )
1042 except Exception as e:
1043 logger.warning(
1044 "Unable to query %s for cross-signing keys of user %s: %s %s",
1045 user.domain,
1046 user.to_string(),
1047 type(e),
1048 e,
1049 )
1050 return None, None, None
1051
1052 # Process each of the retrieved cross-signing keys
1053 desired_key = None
1054 desired_key_id = None
1055 desired_verify_key = None
1056 retrieved_device_ids = []
1057 for key_type in ["master", "self_signing"]:
1058 key_content = remote_result.get(key_type + "_key")
1059 if not key_content:
1060 continue
1061
1062 # Ensure these keys belong to the correct user
1063 if "user_id" not in key_content:
1064 logger.warning(
1065 "Invalid %s key retrieved, missing user_id field: %s",
1066 key_type,
1067 key_content,
1068 )
1069 continue
1070 if user.to_string() != key_content["user_id"]:
1071 logger.warning(
1072 "Found %s key of user %s when querying for keys of user %s",
1073 key_type,
1074 key_content["user_id"],
1075 user.to_string(),
1076 )
1077 continue
1078
1079 # Validate the key contents
1080 try:
1081 # verify_key is a VerifyKey from signedjson, which uses
1082 # .version to denote the portion of the key ID after the
1083 # algorithm and colon, which is the device ID
1084 key_id, verify_key = get_verify_key_from_cross_signing_key(key_content)
1085 except ValueError as e:
1086 logger.warning(
1087 "Invalid %s key retrieved: %s - %s %s",
1088 key_type,
1089 key_content,
1090 type(e),
1091 e,
1092 )
1093 continue
1094
1095 # Note down the device ID attached to this key
1096 retrieved_device_ids.append(verify_key.version)
1097
1098 # If this is the desired key type, save it and its ID/VerifyKey
1099 if key_type == desired_key_type:
1100 desired_key = key_content
1101 desired_verify_key = verify_key
1102 desired_key_id = key_id
1103
1104 # At the same time, store this key in the db for subsequent queries
1105 yield self.store.set_e2e_cross_signing_key(
1106 user.to_string(), key_type, key_content
1107 )
1108
1109 # Notify clients that new devices for this user have been discovered
1110 if retrieved_device_ids:
1111 # XXX is this necessary?
1112 yield self.device_handler.notify_device_update(
1113 user.to_string(), retrieved_device_ids
1114 )
1115
1116 return desired_key, desired_key_id, desired_verify_key
9831117
9841118
9851119 def _check_cross_signing_key(key, user_id, key_type, signing_key=None):
11421142 user_id
11431143 )
11441144
1145 tracked_users = set(users_who_share_room)
1146
1147 # Always tell the user about their own devices
1148 tracked_users.add(user_id)
1149
11451150 # Step 1a, check for changes in devices of users we share a room with
11461151 users_that_have_changed = await self.store.get_users_whose_devices_changed(
1147 since_token.device_list_key, users_who_share_room
1152 since_token.device_list_key, tracked_users
11481153 )
11491154
11501155 # Step 1b, check for newly joined rooms
1515
1616 import logging
1717 import re
18 from typing import Pattern
1819
1920 from six import string_types
2021
22 from synapse.events import EventBase
2123 from synapse.types import UserID
2224 from synapse.util.caches import CACHE_SIZE_FACTOR, register_cache
2325 from synapse.util.caches.lrucache import LruCache
5557 rhs = m.group(2)
5658 if not rhs.isdigit():
5759 return False
58 rhs = int(rhs)
60 rhs_int = int(rhs)
5961
6062 if ineq == "" or ineq == "==":
61 return number == rhs
63 return number == rhs_int
6264 elif ineq == "<":
63 return number < rhs
65 return number < rhs_int
6466 elif ineq == ">":
65 return number > rhs
67 return number > rhs_int
6668 elif ineq == ">=":
67 return number >= rhs
69 return number >= rhs_int
6870 elif ineq == "<=":
69 return number <= rhs
71 return number <= rhs_int
7072 else:
7173 return False
7274
8284
8385
8486 class PushRuleEvaluatorForEvent(object):
85 def __init__(self, event, room_member_count, sender_power_level, power_levels):
87 def __init__(
88 self,
89 event: EventBase,
90 room_member_count: int,
91 sender_power_level: int,
92 power_levels: dict,
93 ):
8694 self._event = event
8795 self._room_member_count = room_member_count
8896 self._sender_power_level = sender_power_level
9199 # Maps strings of e.g. 'content.body' -> event["content"]["body"]
92100 self._value_cache = _flatten_dict(event)
93101
94 def matches(self, condition, user_id, display_name):
102 def matches(self, condition: dict, user_id: str, display_name: str) -> bool:
95103 if condition["kind"] == "event_match":
96104 return self._event_match(condition, user_id)
97105 elif condition["kind"] == "contains_display_name":
105113 else:
106114 return True
107115
108 def _event_match(self, condition, user_id):
116 def _event_match(self, condition: dict, user_id: str) -> bool:
109117 pattern = condition.get("pattern", None)
110118
111119 if not pattern:
133141
134142 return _glob_matches(pattern, haystack)
135143
136 def _contains_display_name(self, display_name):
144 def _contains_display_name(self, display_name: str) -> bool:
137145 if not display_name:
138146 return False
139147
141149 if not body:
142150 return False
143151
144 return _glob_matches(display_name, body, word_boundary=True)
145
146 def _get_value(self, dotted_key):
152 # Similar to _glob_matches, but do not treat display_name as a glob.
153 r = regex_cache.get((display_name, False, True), None)
154 if not r:
155 r = re.escape(display_name)
156 r = _re_word_boundary(r)
157 r = re.compile(r, flags=re.IGNORECASE)
158 regex_cache[(display_name, False, True)] = r
159
160 return r.search(body)
161
162 def _get_value(self, dotted_key: str) -> str:
147163 return self._value_cache.get(dotted_key, None)
148164
149165
150 # Caches (glob, word_boundary) -> regex for push. See _glob_matches
166 # Caches (string, is_glob, word_boundary) -> regex for push. See _glob_matches
151167 regex_cache = LruCache(50000 * CACHE_SIZE_FACTOR)
152168 register_cache("cache", "regex_push_cache", regex_cache)
153169
154170
155 def _glob_matches(glob, value, word_boundary=False):
171 def _glob_matches(glob: str, value: str, word_boundary: bool = False) -> bool:
156172 """Tests if value matches glob.
157173
158174 Args:
159 glob (string)
160 value (string): String to test against glob.
161 word_boundary (bool): Whether to match against word boundaries or entire
175 glob
176 value: String to test against glob.
177 word_boundary: Whether to match against word boundaries or entire
162178 string. Defaults to False.
163
164 Returns:
165 bool
166179 """
167180
168181 try:
169 r = regex_cache.get((glob, word_boundary), None)
182 r = regex_cache.get((glob, True, word_boundary), None)
170183 if not r:
171184 r = _glob_to_re(glob, word_boundary)
172 regex_cache[(glob, word_boundary)] = r
185 regex_cache[(glob, True, word_boundary)] = r
173186 return r.search(value)
174187 except re.error:
175188 logger.warning("Failed to parse glob to regex: %r", glob)
176189 return False
177190
178191
179 def _glob_to_re(glob, word_boundary):
192 def _glob_to_re(glob: str, word_boundary: bool) -> Pattern:
180193 """Generates regex for a given glob.
181194
182195 Args:
183 glob (string)
184 word_boundary (bool): Whether to match against word boundaries or entire
185 string. Defaults to False.
186
187 Returns:
188 regex object
196 glob
197 word_boundary: Whether to match against word boundaries or entire string.
189198 """
190199 if IS_GLOB.search(glob):
191200 r = re.escape(glob)
218227 return re.compile(r, flags=re.IGNORECASE)
219228
220229
221 def _re_word_boundary(r):
230 def _re_word_boundary(r: str) -> str:
222231 """
223232 Adds word boundary characters to the start and end of an
224233 expression to require that the match occur as a whole word,
3737 self.auth = hs.get_auth()
3838 self.store = hs.get_datastore()
3939 self.notifier = hs.get_notifier()
40 self._is_worker = hs.config.worker_app is not None
4041
4142 async def on_PUT(self, request, user_id, account_data_type):
43 if self._is_worker:
44 raise Exception("Cannot handle PUT /account_data on worker")
45
4246 requester = await self.auth.get_user_by_req(request)
4347 if user_id != requester.user.to_string():
4448 raise AuthError(403, "Cannot add account data for other users.")
8589 self.auth = hs.get_auth()
8690 self.store = hs.get_datastore()
8791 self.notifier = hs.get_notifier()
92 self._is_worker = hs.config.worker_app is not None
8893
8994 async def on_PUT(self, request, user_id, room_id, account_data_type):
95 if self._is_worker:
96 raise Exception("Cannot handle PUT /account_data on worker")
97
9098 requester = await self.auth.get_user_by_req(request)
9199 if user_id != requester.user.to_string():
92100 raise AuthError(403, "Cannot add account data for other users.")
0 # -*- coding: utf-8 -*-
1 # Copyright 2020 The Matrix.org Foundation C.I.C.
2 #
3 # Licensed under the Apache License, Version 2.0 (the "License");
4 # you may not use this file except in compliance with the License.
5 # You may obtain a copy of the License at
6 #
7 # http://www.apache.org/licenses/LICENSE-2.0
8 #
9 # Unless required by applicable law or agreed to in writing, software
10 # distributed under the License is distributed on an "AS IS" BASIS,
11 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 # See the License for the specific language governing permissions and
13 # limitations under the License.
14
15 from synapse.api.room_versions import RoomVersions
16 from synapse.events import FrozenEvent
17 from synapse.push.push_rule_evaluator import PushRuleEvaluatorForEvent
18
19 from tests import unittest
20
21
22 class PushRuleEvaluatorTestCase(unittest.TestCase):
23 def setUp(self):
24 event = FrozenEvent(
25 {
26 "event_id": "$event_id",
27 "type": "m.room.history_visibility",
28 "sender": "@user:test",
29 "state_key": "",
30 "room_id": "@room:test",
31 "content": {"body": "foo bar baz"},
32 },
33 RoomVersions.V1,
34 )
35 room_member_count = 0
36 sender_power_level = 0
37 power_levels = {}
38 self.evaluator = PushRuleEvaluatorForEvent(
39 event, room_member_count, sender_power_level, power_levels
40 )
41
42 def test_display_name(self):
43 """Check for a matching display name in the body of the event."""
44 condition = {
45 "kind": "contains_display_name",
46 }
47
48 # Blank names are skipped.
49 self.assertFalse(self.evaluator.matches(condition, "@user:test", ""))
50
51 # Check a display name that doesn't match.
52 self.assertFalse(self.evaluator.matches(condition, "@user:test", "not found"))
53
54 # Check a display name which matches.
55 self.assertTrue(self.evaluator.matches(condition, "@user:test", "foo"))
56
57 # A display name that matches, but not a full word does not result in a match.
58 self.assertFalse(self.evaluator.matches(condition, "@user:test", "ba"))
59
60 # A display name should not be interpreted as a regular expression.
61 self.assertFalse(self.evaluator.matches(condition, "@user:test", "ba[rz]"))
62
63 # A display name with spaces should work fine.
64 self.assertTrue(self.evaluator.matches(condition, "@user:test", "foo bar"))
193193 synapse/metrics \
194194 synapse/module_api \
195195 synapse/push/pusherpool.py \
196 synapse/push/push_rule_evaluator.py \
196197 synapse/replication \
197198 synapse/rest \
198199 synapse/spam_checker_api \