New upstream version 1.12.4
Andrej Shadura
3 years ago
0 | Synapse 1.12.4 (2020-04-23) | |
1 | =========================== | |
2 | ||
3 | No significant changes. | |
4 | ||
5 | ||
6 | Synapse 1.12.4rc1 (2020-04-22) | |
7 | ============================== | |
8 | ||
9 | Features | |
10 | -------- | |
11 | ||
12 | - Always send users their own device updates. ([\#7160](https://github.com/matrix-org/synapse/issues/7160)) | |
13 | - Add support for handling GET requests for `account_data` on a worker. ([\#7311](https://github.com/matrix-org/synapse/issues/7311)) | |
14 | ||
15 | ||
16 | Bugfixes | |
17 | -------- | |
18 | ||
19 | - Fix a bug that prevented cross-signing with users on worker-mode synapses. ([\#7255](https://github.com/matrix-org/synapse/issues/7255)) | |
20 | - Do not treat display names as globs in push rules. ([\#7271](https://github.com/matrix-org/synapse/issues/7271)) | |
21 | - Fix a bug with cross-signing devices belonging to remote users who did not share a room with any user on the local homeserver. ([\#7289](https://github.com/matrix-org/synapse/issues/7289)) | |
22 | ||
23 | ||
0 | 24 | Synapse 1.12.3 (2020-04-03) |
1 | 25 | =========================== |
2 | 26 |
0 | matrix-synapse-py3 (1.12.4) stable; urgency=medium | |
1 | ||
2 | * New synapse release 1.12.4. | |
3 | ||
4 | -- Synapse Packaging team <packages@matrix.org> Thu, 23 Apr 2020 10:58:14 -0400 | |
5 | ||
0 | 6 | matrix-synapse-py3 (1.12.3) stable; urgency=medium |
1 | 7 | |
2 | 8 | [ Richard van der Hoff ] |
267 | 267 | |
268 | 268 | ^/_matrix/client/(api/v1|r0|unstable)/pushrules/.*$ |
269 | 269 | ^/_matrix/client/(api/v1|r0|unstable)/groups/.*$ |
270 | ^/_matrix/client/(api/v1|r0|unstable)/user/[^/]*/account_data/ | |
271 | ^/_matrix/client/(api/v1|r0|unstable)/user/[^/]*/rooms/[^/]*/account_data/ | |
270 | 272 | |
271 | 273 | Additionally, the following REST endpoints can be handled, but all requests must |
272 | 274 | be routed to the same instance: |
35 | 35 | except ImportError: |
36 | 36 | pass |
37 | 37 | |
38 | __version__ = "1.12.3" | |
38 | __version__ = "1.12.4" | |
39 | 39 | |
40 | 40 | if bool(os.environ.get("SYNAPSE_TEST_PATCH_LOG_CONTEXTS", False)): |
41 | 41 | # We import here so that we don't have to install a bunch of deps when |
97 | 97 | from synapse.rest.client.v2_alpha import groups, sync, user_directory |
98 | 98 | from synapse.rest.client.v2_alpha._base import client_patterns |
99 | 99 | from synapse.rest.client.v2_alpha.account import ThreepidRestServlet |
100 | from synapse.rest.client.v2_alpha.account_data import ( | |
101 | AccountDataServlet, | |
102 | RoomAccountDataServlet, | |
103 | ) | |
100 | 104 | from synapse.rest.client.v2_alpha.keys import KeyChangesServlet, KeyQueryServlet |
101 | 105 | from synapse.rest.client.v2_alpha.register import RegisterRestServlet |
102 | 106 | from synapse.rest.client.versions import VersionsRestServlet |
474 | 478 | ProfileDisplaynameRestServlet(self).register(resource) |
475 | 479 | ProfileRestServlet(self).register(resource) |
476 | 480 | KeyUploadServlet(self).register(resource) |
481 | AccountDataServlet(self).register(resource) | |
482 | RoomAccountDataServlet(self).register(resource) | |
477 | 483 | |
478 | 484 | sync.register_servlets(self, resource) |
479 | 485 | events.register_servlets(self, resource) |
398 | 398 | { |
399 | 399 | "device_keys": { |
400 | 400 | "<user_id>": ["<device_id>"] |
401 | } } | |
401 | } | |
402 | } | |
402 | 403 | |
403 | 404 | Response: |
404 | 405 | { |
405 | 406 | "device_keys": { |
406 | 407 | "<user_id>": { |
407 | 408 | "<device_id>": {...} |
408 | } } } | |
409 | } | |
410 | }, | |
411 | "master_key": { | |
412 | "<user_id>": {...} | |
413 | } | |
414 | }, | |
415 | "self_signing_key": { | |
416 | "<user_id>": {...} | |
417 | } | |
418 | } | |
409 | 419 | |
410 | 420 | Args: |
411 | 421 | destination(str): The server to query. |
412 | 422 | query_content(dict): The user ids to query. |
413 | 423 | Returns: |
414 | A dict containg the device keys. | |
424 | A dict containing device and cross-signing keys. | |
415 | 425 | """ |
416 | 426 | path = _create_v1_path("/user/keys/query") |
417 | 427 | |
428 | 438 | Response: |
429 | 439 | { |
430 | 440 | "stream_id": "...", |
431 | "devices": [ { ... } ] | |
441 | "devices": [ { ... } ], | |
442 | "master_key": { | |
443 | "user_id": "<user_id>", | |
444 | "usage": [...], | |
445 | "keys": {...}, | |
446 | "signatures": { | |
447 | "<user_id>": {...} | |
448 | } | |
449 | }, | |
450 | "self_signing_key": { | |
451 | "user_id": "<user_id>", | |
452 | "usage": [...], | |
453 | "keys": {...}, | |
454 | "signatures": { | |
455 | "<user_id>": {...} | |
456 | } | |
457 | } | |
432 | 458 | } |
433 | 459 | |
434 | 460 | Args: |
435 | 461 | destination(str): The server to query. |
436 | 462 | query_content(dict): The user ids to query. |
437 | 463 | Returns: |
438 | A dict containg the device keys. | |
464 | A dict containing device and cross-signing keys. | |
439 | 465 | """ |
440 | 466 | path = _create_v1_path("/user/devices/%s", user_id) |
441 | 467 | |
453 | 479 | { |
454 | 480 | "one_time_keys": { |
455 | 481 | "<user_id>": { |
456 | "<device_id>": "<algorithm>" | |
457 | } } } | |
482 | "<device_id>": "<algorithm>" | |
483 | } | |
484 | } | |
485 | } | |
458 | 486 | |
459 | 487 | Response: |
460 | 488 | { |
462 | 490 | "<user_id>": { |
463 | 491 | "<device_id>": { |
464 | 492 | "<algorithm>:<key_id>": "<key_base64>" |
465 | } } } } | |
493 | } | |
494 | } | |
495 | } | |
496 | } | |
466 | 497 | |
467 | 498 | Args: |
468 | 499 | destination(str): The server to query. |
469 | 500 | query_content(dict): The user ids to query. |
470 | 501 | Returns: |
471 | A dict containg the one-time keys. | |
502 | A dict containing the one-time keys. | |
472 | 503 | """ |
473 | 504 | |
474 | 505 | path = _create_v1_path("/user/keys/claim") |
124 | 124 | users_who_share_room = yield self.store.get_users_who_share_room_with_user( |
125 | 125 | user_id |
126 | 126 | ) |
127 | ||
128 | tracked_users = set(users_who_share_room) | |
129 | ||
130 | # Always tell the user about their own devices | |
131 | tracked_users.add(user_id) | |
132 | ||
127 | 133 | changed = yield self.store.get_users_whose_devices_changed( |
128 | from_token.device_list_key, users_who_share_room | |
134 | from_token.device_list_key, tracked_users | |
129 | 135 | ) |
130 | 136 | |
131 | 137 | # Then work out if any users have since joined |
455 | 461 | |
456 | 462 | room_ids = yield self.store.get_rooms_for_user(user_id) |
457 | 463 | |
458 | yield self.notifier.on_new_event("device_list_key", position, rooms=room_ids) | |
464 | # specify the user ID too since the user should always get their own device list | |
465 | # updates, even if they aren't in any rooms. | |
466 | yield self.notifier.on_new_event( | |
467 | "device_list_key", position, users=[user_id], rooms=room_ids | |
468 | ) | |
459 | 469 | |
460 | 470 | if hosts: |
461 | 471 | logger.info( |
53 | 53 | |
54 | 54 | self._edu_updater = SigningKeyEduUpdater(hs, self) |
55 | 55 | |
56 | federation_registry = hs.get_federation_registry() | |
57 | ||
56 | 58 | self._is_master = hs.config.worker_app is None |
57 | 59 | if not self._is_master: |
58 | 60 | self._user_device_resync_client = ReplicationUserDevicesResyncRestServlet.make_client( |
59 | 61 | hs |
60 | 62 | ) |
61 | ||
62 | federation_registry = hs.get_federation_registry() | |
63 | ||
64 | # FIXME: switch to m.signing_key_update when MSC1756 is merged into the spec | |
65 | federation_registry.register_edu_handler( | |
66 | "org.matrix.signing_key_update", | |
67 | self._edu_updater.incoming_signing_key_update, | |
68 | ) | |
63 | else: | |
64 | # Only register this edu handler on master as it requires writing | |
65 | # device updates to the db | |
66 | # | |
67 | # FIXME: switch to m.signing_key_update when MSC1756 is merged into the spec | |
68 | federation_registry.register_edu_handler( | |
69 | "org.matrix.signing_key_update", | |
70 | self._edu_updater.incoming_signing_key_update, | |
71 | ) | |
72 | ||
69 | 73 | # doesn't really work as part of the generic query API, because the |
70 | 74 | # query request requires an object POST, but we abuse the |
71 | 75 | # "query handler" interface. |
169 | 173 | """This is called when we are querying the device list of a user on |
170 | 174 | a remote homeserver and their device list is not in the device list |
171 | 175 | cache. If we share a room with this user and we're not querying for |
172 | specific user we will update the cache | |
173 | with their device list.""" | |
176 | specific user we will update the cache with their device list. | |
177 | """ | |
174 | 178 | |
175 | 179 | destination_query = remote_queries_not_in_cache[destination] |
176 | 180 | |
956 | 960 | return signature_list, failures |
957 | 961 | |
958 | 962 | @defer.inlineCallbacks |
959 | def _get_e2e_cross_signing_verify_key(self, user_id, key_type, from_user_id=None): | |
960 | """Fetch the cross-signing public key from storage and interpret it. | |
963 | def _get_e2e_cross_signing_verify_key( | |
964 | self, user_id: str, key_type: str, from_user_id: str = None | |
965 | ): | |
966 | """Fetch locally or remotely query for a cross-signing public key. | |
967 | ||
968 | First, attempt to fetch the cross-signing public key from storage. | |
969 | If that fails, query the keys from the homeserver they belong to | |
970 | and update our local copy. | |
961 | 971 | |
962 | 972 | Args: |
963 | user_id (str): the user whose key should be fetched | |
964 | key_type (str): the type of key to fetch | |
965 | from_user_id (str): the user that we are fetching the keys for. | |
973 | user_id: the user whose key should be fetched | |
974 | key_type: the type of key to fetch | |
975 | from_user_id: the user that we are fetching the keys for. | |
966 | 976 | This affects what signatures are fetched. |
967 | 977 | |
968 | 978 | Returns: |
971 | 981 | |
972 | 982 | Raises: |
973 | 983 | NotFoundError: if the key is not found |
984 | SynapseError: if `user_id` is invalid | |
974 | 985 | """ |
986 | user = UserID.from_string(user_id) | |
975 | 987 | key = yield self.store.get_e2e_cross_signing_key( |
976 | 988 | user_id, key_type, from_user_id |
977 | 989 | ) |
990 | ||
991 | if key: | |
992 | # We found a copy of this key in our database. Decode and return it | |
993 | key_id, verify_key = get_verify_key_from_cross_signing_key(key) | |
994 | return key, key_id, verify_key | |
995 | ||
996 | # If we couldn't find the key locally, and we're looking for keys of | |
997 | # another user then attempt to fetch the missing key from the remote | |
998 | # user's server. | |
999 | # | |
1000 | # We may run into this in possible edge cases where a user tries to | |
1001 | # cross-sign a remote user, but does not share any rooms with them yet. | |
1002 | # Thus, we would not have their key list yet. We instead fetch the key, | |
1003 | # store it and notify clients of new, associated device IDs. | |
1004 | if self.is_mine(user) or key_type not in ["master", "self_signing"]: | |
1005 | # Note that master and self_signing keys are the only cross-signing keys we | |
1006 | # can request over federation | |
1007 | raise NotFoundError("No %s key found for %s" % (key_type, user_id)) | |
1008 | ||
1009 | ( | |
1010 | key, | |
1011 | key_id, | |
1012 | verify_key, | |
1013 | ) = yield self._retrieve_cross_signing_keys_for_remote_user(user, key_type) | |
1014 | ||
978 | 1015 | if key is None: |
979 | logger.debug("no %s key found for %s", key_type, user_id) | |
980 | 1016 | raise NotFoundError("No %s key found for %s" % (key_type, user_id)) |
981 | key_id, verify_key = get_verify_key_from_cross_signing_key(key) | |
1017 | ||
982 | 1018 | return key, key_id, verify_key |
1019 | ||
1020 | @defer.inlineCallbacks | |
1021 | def _retrieve_cross_signing_keys_for_remote_user( | |
1022 | self, user: UserID, desired_key_type: str, | |
1023 | ): | |
1024 | """Queries cross-signing keys for a remote user and saves them to the database | |
1025 | ||
1026 | Only the key specified by `key_type` will be returned, while all retrieved keys | |
1027 | will be saved regardless | |
1028 | ||
1029 | Args: | |
1030 | user: The user to query remote keys for | |
1031 | desired_key_type: The type of key to receive. One of "master", "self_signing" | |
1032 | ||
1033 | Returns: | |
1034 | Deferred[Tuple[Optional[Dict], Optional[str], Optional[VerifyKey]]]: A tuple | |
1035 | of the retrieved key content, the key's ID and the matching VerifyKey. | |
1036 | If the key cannot be retrieved, all values in the tuple will instead be None. | |
1037 | """ | |
1038 | try: | |
1039 | remote_result = yield self.federation.query_user_devices( | |
1040 | user.domain, user.to_string() | |
1041 | ) | |
1042 | except Exception as e: | |
1043 | logger.warning( | |
1044 | "Unable to query %s for cross-signing keys of user %s: %s %s", | |
1045 | user.domain, | |
1046 | user.to_string(), | |
1047 | type(e), | |
1048 | e, | |
1049 | ) | |
1050 | return None, None, None | |
1051 | ||
1052 | # Process each of the retrieved cross-signing keys | |
1053 | desired_key = None | |
1054 | desired_key_id = None | |
1055 | desired_verify_key = None | |
1056 | retrieved_device_ids = [] | |
1057 | for key_type in ["master", "self_signing"]: | |
1058 | key_content = remote_result.get(key_type + "_key") | |
1059 | if not key_content: | |
1060 | continue | |
1061 | ||
1062 | # Ensure these keys belong to the correct user | |
1063 | if "user_id" not in key_content: | |
1064 | logger.warning( | |
1065 | "Invalid %s key retrieved, missing user_id field: %s", | |
1066 | key_type, | |
1067 | key_content, | |
1068 | ) | |
1069 | continue | |
1070 | if user.to_string() != key_content["user_id"]: | |
1071 | logger.warning( | |
1072 | "Found %s key of user %s when querying for keys of user %s", | |
1073 | key_type, | |
1074 | key_content["user_id"], | |
1075 | user.to_string(), | |
1076 | ) | |
1077 | continue | |
1078 | ||
1079 | # Validate the key contents | |
1080 | try: | |
1081 | # verify_key is a VerifyKey from signedjson, which uses | |
1082 | # .version to denote the portion of the key ID after the | |
1083 | # algorithm and colon, which is the device ID | |
1084 | key_id, verify_key = get_verify_key_from_cross_signing_key(key_content) | |
1085 | except ValueError as e: | |
1086 | logger.warning( | |
1087 | "Invalid %s key retrieved: %s - %s %s", | |
1088 | key_type, | |
1089 | key_content, | |
1090 | type(e), | |
1091 | e, | |
1092 | ) | |
1093 | continue | |
1094 | ||
1095 | # Note down the device ID attached to this key | |
1096 | retrieved_device_ids.append(verify_key.version) | |
1097 | ||
1098 | # If this is the desired key type, save it and its ID/VerifyKey | |
1099 | if key_type == desired_key_type: | |
1100 | desired_key = key_content | |
1101 | desired_verify_key = verify_key | |
1102 | desired_key_id = key_id | |
1103 | ||
1104 | # At the same time, store this key in the db for subsequent queries | |
1105 | yield self.store.set_e2e_cross_signing_key( | |
1106 | user.to_string(), key_type, key_content | |
1107 | ) | |
1108 | ||
1109 | # Notify clients that new devices for this user have been discovered | |
1110 | if retrieved_device_ids: | |
1111 | # XXX is this necessary? | |
1112 | yield self.device_handler.notify_device_update( | |
1113 | user.to_string(), retrieved_device_ids | |
1114 | ) | |
1115 | ||
1116 | return desired_key, desired_key_id, desired_verify_key | |
983 | 1117 | |
984 | 1118 | |
985 | 1119 | def _check_cross_signing_key(key, user_id, key_type, signing_key=None): |
1142 | 1142 | user_id |
1143 | 1143 | ) |
1144 | 1144 | |
1145 | tracked_users = set(users_who_share_room) | |
1146 | ||
1147 | # Always tell the user about their own devices | |
1148 | tracked_users.add(user_id) | |
1149 | ||
1145 | 1150 | # Step 1a, check for changes in devices of users we share a room with |
1146 | 1151 | users_that_have_changed = await self.store.get_users_whose_devices_changed( |
1147 | since_token.device_list_key, users_who_share_room | |
1152 | since_token.device_list_key, tracked_users | |
1148 | 1153 | ) |
1149 | 1154 | |
1150 | 1155 | # Step 1b, check for newly joined rooms |
15 | 15 | |
16 | 16 | import logging |
17 | 17 | import re |
18 | from typing import Pattern | |
18 | 19 | |
19 | 20 | from six import string_types |
20 | 21 | |
22 | from synapse.events import EventBase | |
21 | 23 | from synapse.types import UserID |
22 | 24 | from synapse.util.caches import CACHE_SIZE_FACTOR, register_cache |
23 | 25 | from synapse.util.caches.lrucache import LruCache |
55 | 57 | rhs = m.group(2) |
56 | 58 | if not rhs.isdigit(): |
57 | 59 | return False |
58 | rhs = int(rhs) | |
60 | rhs_int = int(rhs) | |
59 | 61 | |
60 | 62 | if ineq == "" or ineq == "==": |
61 | return number == rhs | |
63 | return number == rhs_int | |
62 | 64 | elif ineq == "<": |
63 | return number < rhs | |
65 | return number < rhs_int | |
64 | 66 | elif ineq == ">": |
65 | return number > rhs | |
67 | return number > rhs_int | |
66 | 68 | elif ineq == ">=": |
67 | return number >= rhs | |
69 | return number >= rhs_int | |
68 | 70 | elif ineq == "<=": |
69 | return number <= rhs | |
71 | return number <= rhs_int | |
70 | 72 | else: |
71 | 73 | return False |
72 | 74 | |
82 | 84 | |
83 | 85 | |
84 | 86 | class PushRuleEvaluatorForEvent(object): |
85 | def __init__(self, event, room_member_count, sender_power_level, power_levels): | |
87 | def __init__( | |
88 | self, | |
89 | event: EventBase, | |
90 | room_member_count: int, | |
91 | sender_power_level: int, | |
92 | power_levels: dict, | |
93 | ): | |
86 | 94 | self._event = event |
87 | 95 | self._room_member_count = room_member_count |
88 | 96 | self._sender_power_level = sender_power_level |
91 | 99 | # Maps strings of e.g. 'content.body' -> event["content"]["body"] |
92 | 100 | self._value_cache = _flatten_dict(event) |
93 | 101 | |
94 | def matches(self, condition, user_id, display_name): | |
102 | def matches(self, condition: dict, user_id: str, display_name: str) -> bool: | |
95 | 103 | if condition["kind"] == "event_match": |
96 | 104 | return self._event_match(condition, user_id) |
97 | 105 | elif condition["kind"] == "contains_display_name": |
105 | 113 | else: |
106 | 114 | return True |
107 | 115 | |
108 | def _event_match(self, condition, user_id): | |
116 | def _event_match(self, condition: dict, user_id: str) -> bool: | |
109 | 117 | pattern = condition.get("pattern", None) |
110 | 118 | |
111 | 119 | if not pattern: |
133 | 141 | |
134 | 142 | return _glob_matches(pattern, haystack) |
135 | 143 | |
136 | def _contains_display_name(self, display_name): | |
144 | def _contains_display_name(self, display_name: str) -> bool: | |
137 | 145 | if not display_name: |
138 | 146 | return False |
139 | 147 | |
141 | 149 | if not body: |
142 | 150 | return False |
143 | 151 | |
144 | return _glob_matches(display_name, body, word_boundary=True) | |
145 | ||
146 | def _get_value(self, dotted_key): | |
152 | # Similar to _glob_matches, but do not treat display_name as a glob. | |
153 | r = regex_cache.get((display_name, False, True), None) | |
154 | if not r: | |
155 | r = re.escape(display_name) | |
156 | r = _re_word_boundary(r) | |
157 | r = re.compile(r, flags=re.IGNORECASE) | |
158 | regex_cache[(display_name, False, True)] = r | |
159 | ||
160 | return r.search(body) | |
161 | ||
162 | def _get_value(self, dotted_key: str) -> str: | |
147 | 163 | return self._value_cache.get(dotted_key, None) |
148 | 164 | |
149 | 165 | |
150 | # Caches (glob, word_boundary) -> regex for push. See _glob_matches | |
166 | # Caches (string, is_glob, word_boundary) -> regex for push. See _glob_matches | |
151 | 167 | regex_cache = LruCache(50000 * CACHE_SIZE_FACTOR) |
152 | 168 | register_cache("cache", "regex_push_cache", regex_cache) |
153 | 169 | |
154 | 170 | |
155 | def _glob_matches(glob, value, word_boundary=False): | |
171 | def _glob_matches(glob: str, value: str, word_boundary: bool = False) -> bool: | |
156 | 172 | """Tests if value matches glob. |
157 | 173 | |
158 | 174 | Args: |
159 | glob (string) | |
160 | value (string): String to test against glob. | |
161 | word_boundary (bool): Whether to match against word boundaries or entire | |
175 | glob | |
176 | value: String to test against glob. | |
177 | word_boundary: Whether to match against word boundaries or entire | |
162 | 178 | string. Defaults to False. |
163 | ||
164 | Returns: | |
165 | bool | |
166 | 179 | """ |
167 | 180 | |
168 | 181 | try: |
169 | r = regex_cache.get((glob, word_boundary), None) | |
182 | r = regex_cache.get((glob, True, word_boundary), None) | |
170 | 183 | if not r: |
171 | 184 | r = _glob_to_re(glob, word_boundary) |
172 | regex_cache[(glob, word_boundary)] = r | |
185 | regex_cache[(glob, True, word_boundary)] = r | |
173 | 186 | return r.search(value) |
174 | 187 | except re.error: |
175 | 188 | logger.warning("Failed to parse glob to regex: %r", glob) |
176 | 189 | return False |
177 | 190 | |
178 | 191 | |
179 | def _glob_to_re(glob, word_boundary): | |
192 | def _glob_to_re(glob: str, word_boundary: bool) -> Pattern: | |
180 | 193 | """Generates regex for a given glob. |
181 | 194 | |
182 | 195 | Args: |
183 | glob (string) | |
184 | word_boundary (bool): Whether to match against word boundaries or entire | |
185 | string. Defaults to False. | |
186 | ||
187 | Returns: | |
188 | regex object | |
196 | glob | |
197 | word_boundary: Whether to match against word boundaries or entire string. | |
189 | 198 | """ |
190 | 199 | if IS_GLOB.search(glob): |
191 | 200 | r = re.escape(glob) |
218 | 227 | return re.compile(r, flags=re.IGNORECASE) |
219 | 228 | |
220 | 229 | |
221 | def _re_word_boundary(r): | |
230 | def _re_word_boundary(r: str) -> str: | |
222 | 231 | """ |
223 | 232 | Adds word boundary characters to the start and end of an |
224 | 233 | expression to require that the match occur as a whole word, |
37 | 37 | self.auth = hs.get_auth() |
38 | 38 | self.store = hs.get_datastore() |
39 | 39 | self.notifier = hs.get_notifier() |
40 | self._is_worker = hs.config.worker_app is not None | |
40 | 41 | |
41 | 42 | async def on_PUT(self, request, user_id, account_data_type): |
43 | if self._is_worker: | |
44 | raise Exception("Cannot handle PUT /account_data on worker") | |
45 | ||
42 | 46 | requester = await self.auth.get_user_by_req(request) |
43 | 47 | if user_id != requester.user.to_string(): |
44 | 48 | raise AuthError(403, "Cannot add account data for other users.") |
85 | 89 | self.auth = hs.get_auth() |
86 | 90 | self.store = hs.get_datastore() |
87 | 91 | self.notifier = hs.get_notifier() |
92 | self._is_worker = hs.config.worker_app is not None | |
88 | 93 | |
89 | 94 | async def on_PUT(self, request, user_id, room_id, account_data_type): |
95 | if self._is_worker: | |
96 | raise Exception("Cannot handle PUT /account_data on worker") | |
97 | ||
90 | 98 | requester = await self.auth.get_user_by_req(request) |
91 | 99 | if user_id != requester.user.to_string(): |
92 | 100 | raise AuthError(403, "Cannot add account data for other users.") |
0 | # -*- coding: utf-8 -*- | |
1 | # Copyright 2020 The Matrix.org Foundation C.I.C. | |
2 | # | |
3 | # Licensed under the Apache License, Version 2.0 (the "License"); | |
4 | # you may not use this file except in compliance with the License. | |
5 | # You may obtain a copy of the License at | |
6 | # | |
7 | # http://www.apache.org/licenses/LICENSE-2.0 | |
8 | # | |
9 | # Unless required by applicable law or agreed to in writing, software | |
10 | # distributed under the License is distributed on an "AS IS" BASIS, | |
11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | |
12 | # See the License for the specific language governing permissions and | |
13 | # limitations under the License. | |
14 | ||
15 | from synapse.api.room_versions import RoomVersions | |
16 | from synapse.events import FrozenEvent | |
17 | from synapse.push.push_rule_evaluator import PushRuleEvaluatorForEvent | |
18 | ||
19 | from tests import unittest | |
20 | ||
21 | ||
22 | class PushRuleEvaluatorTestCase(unittest.TestCase): | |
23 | def setUp(self): | |
24 | event = FrozenEvent( | |
25 | { | |
26 | "event_id": "$event_id", | |
27 | "type": "m.room.history_visibility", | |
28 | "sender": "@user:test", | |
29 | "state_key": "", | |
30 | "room_id": "@room:test", | |
31 | "content": {"body": "foo bar baz"}, | |
32 | }, | |
33 | RoomVersions.V1, | |
34 | ) | |
35 | room_member_count = 0 | |
36 | sender_power_level = 0 | |
37 | power_levels = {} | |
38 | self.evaluator = PushRuleEvaluatorForEvent( | |
39 | event, room_member_count, sender_power_level, power_levels | |
40 | ) | |
41 | ||
42 | def test_display_name(self): | |
43 | """Check for a matching display name in the body of the event.""" | |
44 | condition = { | |
45 | "kind": "contains_display_name", | |
46 | } | |
47 | ||
48 | # Blank names are skipped. | |
49 | self.assertFalse(self.evaluator.matches(condition, "@user:test", "")) | |
50 | ||
51 | # Check a display name that doesn't match. | |
52 | self.assertFalse(self.evaluator.matches(condition, "@user:test", "not found")) | |
53 | ||
54 | # Check a display name which matches. | |
55 | self.assertTrue(self.evaluator.matches(condition, "@user:test", "foo")) | |
56 | ||
57 | # A display name that matches, but not a full word does not result in a match. | |
58 | self.assertFalse(self.evaluator.matches(condition, "@user:test", "ba")) | |
59 | ||
60 | # A display name should not be interpreted as a regular expression. | |
61 | self.assertFalse(self.evaluator.matches(condition, "@user:test", "ba[rz]")) | |
62 | ||
63 | # A display name with spaces should work fine. | |
64 | self.assertTrue(self.evaluator.matches(condition, "@user:test", "foo bar")) |